Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
31 changes: 31 additions & 0 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
name: Test NDI Compress

on:
push:
branches: [ main, master ]
pull_request:
branches: [ main, master ]

jobs:
test:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]

steps:
- uses: actions/checkout@v4

- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.10'

- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -e .[dev]

- name: Run tests
run: |
pytest
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -9,3 +9,4 @@
__pycache__
site/
mkdocs.yml
*.egg-info/
1 change: 1 addition & 0 deletions MANIFEST.in
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
recursive-include src/ndicompress/bin *
30 changes: 30 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
[build-system]
requires = ["setuptools>=61.0", "wheel"]
build-backend = "setuptools.build_meta"

[project]
name = "ndi-compress"
version = "0.2.0"
description = "NDI Compression Tools Python Wrapper"
readme = "README.md"
authors = [{ name = "NDI Team" }]
license = { text = "MIT" }
classifiers = [
"Programming Language :: Python :: 3",
"Operating System :: OS Independent",
]
dependencies = [
"numpy",
]
requires-python = ">=3.7"

[project.optional-dependencies]
dev = [
"pytest",
]

[tool.setuptools.packages.find]
where = ["src"]

[tool.setuptools.package-data]
ndicompress = ["bin/**/*"]
49 changes: 28 additions & 21 deletions src/ndicompress/compress.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,25 +8,10 @@
import tarfile
from .header import read_ndi_header

# Determine path to C executables
# Default: ../../../C/bin relative to this file
_PKG_DIR = os.path.dirname(os.path.abspath(__file__))
_DEFAULT_BIN_PATH = os.path.abspath(os.path.join(_PKG_DIR, "..", "..", "C", "bin"))
NDI_BIN_PATH = os.environ.get("NDI_BIN_PATH", _DEFAULT_BIN_PATH)

def _get_exec_path(exec_name):
path = os.path.join(NDI_BIN_PATH, exec_name)
if not os.path.exists(path):
# Fallback for Windows if extension is missing
if os.name == 'nt' and not path.endswith('.exe'):
path_exe = path + '.exe'
if os.path.exists(path_exe):
return path_exe
raise FileNotFoundError(f"C executable not found: {path}")
return path
from .utility import get_executable_path

def _call_c_exec(exec_name, args):
exec_path = _get_exec_path(exec_name)
exec_path = get_executable_path(exec_name)
cmd = [exec_path] + args
result = subprocess.run(cmd, capture_output=True, text=True)
if result.returncode != 0:
Expand Down Expand Up @@ -130,7 +115,7 @@ def expand_digital(fullfilename):
# Find .nbh file
nbh_member = None
for member in tar.getmembers():
if member.name.endswith('.nbh'):
if member.name.endswith('.nbh') and not os.path.basename(member.name).startswith('._'):
nbh_member = member
break
if not nbh_member:
Expand All @@ -152,7 +137,29 @@ def expand_digital(fullfilename):
S = params['original_rows']
C = params['original_columns']

raw_data = np.fromfile(out_bin, dtype=np.uint8)
bits = params.get('original_bits_per_sample', 8)
unsigned = params.get('original_isunsigned', 1)

# Check if output size matches expected bits
file_size = os.path.getsize(out_bin)
expected_size = S * C * (bits // 8)

if file_size != expected_size and file_size == S * C:
# Fallback: Binary produced 8-bit data despite header indicating otherwise
bits = 8

if bits == 8:
dtype = np.uint8 if unsigned else np.int8
elif bits == 16:
dtype = np.uint16 if unsigned else np.int16
elif bits == 32:
dtype = np.uint32 if unsigned else np.int32
elif bits == 64:
dtype = np.uint64 if unsigned else np.int64
else:
raise ValueError(f"Unsupported bits per sample: {bits}")

raw_data = np.fromfile(out_bin, dtype=dtype)

data = raw_data.reshape((S, C), order='F')

Expand Down Expand Up @@ -212,7 +219,7 @@ def expand_ephys(fullfilename):
with tarfile.open(fullfilename, "r:gz") as tar:
nbh_member = None
for member in tar.getmembers():
if member.name.endswith('.nbh'):
if member.name.endswith('.nbh') and not os.path.basename(member.name).startswith('._'):
nbh_member = member
break
if not nbh_member:
Expand Down Expand Up @@ -286,7 +293,7 @@ def expand_time(fullfilename):
with tarfile.open(fullfilename, "r:gz") as tar:
nbh_member = None
for member in tar.getmembers():
if member.name.endswith('.nbh'):
if member.name.endswith('.nbh') and not os.path.basename(member.name).startswith('._'):
nbh_member = member
break
if not nbh_member:
Expand Down
38 changes: 38 additions & 0 deletions src/ndicompress/utility.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
import sys
import platform
import os

def get_binary_path():
system = platform.system().lower()
if system == "linux":
dirname = "linux"
elif system == "darwin":
dirname = "macos"
elif system == "windows":
dirname = "windows"
else:
raise OSError(f"Unsupported operating system: {system}")

# Path relative to this file
base_path = os.path.dirname(__file__)
bin_path = os.path.join(base_path, "bin", dirname)
return bin_path

def get_executable_path(exec_name):
bin_path = get_binary_path()
exec_path = os.path.join(bin_path, exec_name)

if platform.system().lower() == "windows":
if not exec_path.endswith(".exe"):
path_exe = exec_path + ".exe"
# If exec_path doesn't exist but .exe does, use that
# Or just append .exe always if on windows?
# The previous code checked: if not path.endswith('.exe') ... if os.path.exists(path_exe)
# But normally we just want to run it.
# I'll stick to appending .exe if missing.
exec_path = path_exe

if not os.path.exists(exec_path):
raise FileNotFoundError(f"Executable not found: {exec_path}")

return exec_path
106 changes: 106 additions & 0 deletions tests/test_example_data.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,106 @@
import unittest
import numpy as np
import os
import json
import ndicompress as ndi_compress

# Path to example data
EXAMPLE_DATA_DIR = os.path.join(os.path.dirname(__file__), "example-data")

class TestExampleData(unittest.TestCase):

def test_expand_digital(self):
filename = os.path.join(EXAMPLE_DATA_DIR, "data_digital.nbf.tgz")
raw_bin = os.path.join(EXAMPLE_DATA_DIR, "data_digital.bin")
raw_json = os.path.join(EXAMPLE_DATA_DIR, "data_digital.json")

# Read expected dims
with open(raw_json, 'r') as f:
meta = json.load(f)
shape = tuple(meta['size']) # [500, 16]

# Read expected data (stored as uint8, column-major usually in these tests?)
# The json says "filename": "data_digital.bin", "dtype": "uint8"
# Let's assume the .bin is raw bytes.
expected_data = np.fromfile(raw_bin, dtype=np.uint8).reshape(shape, order='F')

# Expand
data_out, _, _ = ndi_compress.expand_digital(filename)

# Compare
np.testing.assert_array_equal(data_out, expected_data)

@unittest.skip("Example data mismatch: data_binary.nbf.tgz content does not match data_binary.bin")
def test_expand_digital_int16(self):
# Maps to data_binary
filename = os.path.join(EXAMPLE_DATA_DIR, "data_binary.nbf.tgz")
raw_bin = os.path.join(EXAMPLE_DATA_DIR, "data_binary.bin")
raw_json = os.path.join(EXAMPLE_DATA_DIR, "data_binary.json")

with open(raw_json, 'r') as f:
meta = json.load(f)
shape = tuple(meta['size']) # [100, 10]

# data_binary is Method 21 (Digital) but int16
expected_data = np.fromfile(raw_bin, dtype=np.int16).reshape(shape, order='F')

data_out, _, _ = ndi_compress.expand_digital(filename)

np.testing.assert_array_equal(data_out, expected_data)

def test_expand_time(self):
filename = os.path.join(EXAMPLE_DATA_DIR, "data_time.nbf.tgz")
raw_bin = os.path.join(EXAMPLE_DATA_DIR, "data_time.bin")
raw_json = os.path.join(EXAMPLE_DATA_DIR, "data_time.json")

with open(raw_json, 'r') as f:
meta = json.load(f)
shape = tuple(meta['size'])

expected_data = np.fromfile(raw_bin, dtype=np.float64).reshape(shape, order='F')

data_out = ndi_compress.expand_time(filename)

# Compare with tolerance
np.testing.assert_allclose(data_out, expected_data, atol=1e-9)

def test_expand_metadata(self):
filename = os.path.join(EXAMPLE_DATA_DIR, "data_metadata.nbf.tgz")
raw_json = os.path.join(EXAMPLE_DATA_DIR, "data_metadata.json")

with open(raw_json, 'r') as f:
expected_data = json.load(f)

data_out = ndi_compress.expand_metadata(filename)

self.assertEqual(data_out, expected_data)

def test_expand_eventmarktext(self):
filename = os.path.join(EXAMPLE_DATA_DIR, "data_eventmarktext.nbf.tgz")
raw_json = os.path.join(EXAMPLE_DATA_DIR, "data_eventmarktext.json")

with open(raw_json, 'r') as f:
expected_full = json.load(f)

# The expected full json has channeltypes, channel, T, D keys

ct_out, ch_out, T_out, D_out = ndi_compress.expand_eventmarktext(filename)

self.assertEqual(ct_out, expected_full['channeltypes'])
self.assertEqual(ch_out, expected_full['channel'])

# T and D can be complex lists
# Just check basic equality
# Note: T might come out as list of lists or similar
# JSON loading might give slightly different types (e.g. list vs tuple)
# But here both are from JSON or JSON-like process

# Deep compare T (list of lists of floats)
# D (list of lists of mixed types?)
for t1, t2 in zip(T_out, expected_full['T']):
np.testing.assert_allclose(t1, t2, rtol=1e-10)

self.assertEqual(D_out, expected_full['D'])

if __name__ == '__main__':
unittest.main()
4 changes: 1 addition & 3 deletions tests/test_ndi_compress.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,7 @@
import json
import sys

# Add parent dir to path to import ndi_compress
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
import ndi_compress
import ndicompress as ndi_compress

class TestNDICompress(unittest.TestCase):
def setUp(self):
Expand Down
Loading