Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
7505223
Add `edf` and `gb` adapters for `tiled<0.1.0-b16`
Wiebke Oct 16, 2025
d772743
Add placeholders for adapter tests
Wiebke Oct 16, 2025
45a12dc
Elevate edf function adapter to class
Wiebke Oct 16, 2025
7dfbfc3
Elevate gb function adapter to class
Wiebke Mar 18, 2026
20f1e92
Move txt parsing to separate `metadata` module
Wiebke Mar 18, 2026
6c41eef
Add unit tests and example config for edf and gp adapters
Wiebke Mar 19, 2026
3b643a1
Guard against spec duplication on adapter init
Wiebke Mar 19, 2026
78d965a
Add `__init__.py` to `bl733` and bl733/adapters`
Wiebke Mar 20, 2026
b333712
Update base image to `ghcr.io/bluesky/tiled:0.2.8` and make `tiled[cl…
Wiebke Mar 25, 2026
94bbd01
Add `mypy` pre-commit and update others
Wiebke Mar 25, 2026
24e032b
Refactor tests to use fixtures and remove backend fixture
Wiebke Mar 26, 2026
260ce9d
Add `__init__.py` files to tests and tests/bl733 directories
Wiebke Mar 26, 2026
a790c9c
Add mypy configuration `.mypy.ini` and update type hints in code
Wiebke Mar 26, 2026
a8b4424
Add `als_tiled[bl733]` to test dependencies,
Wiebke Mar 26, 2026
b44c4f9
Update mypy configuration for tests to not ignore errors
Wiebke Mar 26, 2026
9a6dd60
Tests need Tiled server dependencies
Wiebke Mar 26, 2026
7531ae7
Expands mypy type checks to include tests
Wiebke Mar 26, 2026
32598f8
Use already defined detector size constants for tests
Wiebke Mar 26, 2026
26de2ff
Only check header instead of full file for gb adapter
Wiebke Mar 27, 2026
deb0f68
Normalize date format across edf and gb adapters
Wiebke Mar 27, 2026
96894d6
Install optional bl733 dependencies in docker image
Wiebke Mar 27, 2026
de65382
Debug logs for loading files
Wiebke Mar 27, 2026
1036bae
No need to guard against date not being present in header
Wiebke Mar 27, 2026
23723e3
Delete `test_main.py` placeholder
Wiebke Mar 27, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/ci-cd.yml
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ jobs:

- name: Type checking with mypy
run: |
mypy src
mypy src tests

test:
runs-on: ubuntu-latest
Expand Down
24 changes: 24 additions & 0 deletions .mypy.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
[mypy]
python_version = 3.11
mypy_path = src
ignore_errors = True
ignore_missing_imports = True
disallow_untyped_defs = False

[mypy-als_tiled.bl733.adapters.*]
ignore_errors = False
ignore_missing_imports = False
check_untyped_defs = True
disallow_untyped_defs = True
disallow_incomplete_defs = True
disallow_untyped_calls = True

[mypy-tests.*]
ignore_errors = False
disallow_untyped_defs = False

[mypy-fabio]
ignore_missing_imports = True

[mypy-tiled.*]
ignore_missing_imports = True
14 changes: 10 additions & 4 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0
rev: v6.0.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
Expand All @@ -10,19 +10,25 @@ repos:
- id: debug-statements

- repo: https://github.com/psf/black
rev: 23.9.1
rev: 26.3.1
hooks:
- id: black
language_version: python3

- repo: https://github.com/pycqa/isort
rev: 5.12.0
rev: 8.0.1
hooks:
- id: isort
args: ["--profile", "black"]

- repo: https://github.com/pycqa/flake8
rev: 6.1.0
rev: 7.3.0
hooks:
- id: flake8
args: [--max-line-length=88, --extend-ignore=E203]

- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.19.1
hooks:
- id: mypy
additional_dependencies: [types-cachetools]
4 changes: 2 additions & 2 deletions Containerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM ghcr.io/bluesky/tiled:0.2.3 AS base
FROM ghcr.io/bluesky/tiled:0.2.8 AS base

USER root

Expand All @@ -9,4 +9,4 @@ USER app
COPY --chown=app:app pyproject.toml README.md ./
COPY --chown=app:app src/ ./src/
RUN python -m ensurepip
RUN python -m pip install --upgrade --no-cache-dir .
RUN python -m pip install --upgrade --no-cache-dir ".[bl733]"
21 changes: 21 additions & 0 deletions example_configs/bl733/config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
file_extensions:
edf: application/x-edf
gb: application/x-gb

# Placeholder for future exporters that would allow download of arrays as edf or gb
# To be determined if this should include download of metadata as txt files
#media_types:
# array:
# application/x-edf: als_tiled.bl733.adapters.edf:export_edf
# application/x-gb: als_tiled.bl733.adapters.gb:export_gb

trees:
- path: /
tree: tiled.catalog:from_uri
args:
uri: ./data/catalog.db
readable_storage: [./data]
init_if_not_exists: true
adapters_by_mimetype:
application/x-edf: als_tiled.bl733.adapters.edf:EDFAdapter
application/x-gb: als_tiled.bl733.adapters.gb:GeneralBinaryPilatus2MAdapter
21 changes: 8 additions & 13 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -22,13 +22,14 @@ classifiers = [
]
requires-python = ">=3.11"
dependencies = [

"tiled[client]>=0.2.8",
]

[project.optional-dependencies]
dev = [
"pytest>=7.0",
"pytest-cov",
"pytest-asyncio",
"black",
"isort",
"flake8",
Expand All @@ -38,14 +39,14 @@ dev = [
test = [
"pytest>=7.0",
"pytest-cov",
"pytest-asyncio",
"als_tiled[bl733,tiled-all]",
]

tiled_all = [
"tiled[all]"
tiled-all = [
"tiled[all]>=0.2.8",
]

tiled_client = [
"tiled[client]"
bl733 = [
"fabio",
]

[project.urls]
Expand Down Expand Up @@ -85,12 +86,6 @@ profile = "black"
multi_line_output = 3
line_length = 88

[tool.mypy]
python_version = "3.11"
warn_return_any = true
warn_unused_configs = true
disallow_untyped_defs = true

[tool.pytest.ini_options]
minversion = "7.0"
addopts = "-ra -q --cov=als_tiled --cov-report=term-missing"
Expand Down
Empty file added src/als_tiled/bl733/__init__.py
Empty file.
Empty file.
76 changes: 76 additions & 0 deletions src/als_tiled/bl733/adapters/edf.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
import logging
from datetime import datetime
from typing import Any, Optional

import fabio
from tiled.adapters.array import ArrayAdapter
from tiled.adapters.utils import init_adapter_from_catalog
from tiled.catalog.orm import Node
from tiled.structures.array import ArrayStructure
from tiled.structures.core import Spec, StructureFamily
from tiled.structures.data_source import DataSource
from tiled.type_aliases import JSON
from tiled.utils import path_from_uri

from als_tiled.bl733.adapters.metadata import parse_txt_accompanying_edf

logger = logging.getLogger(__name__)


class EDFAdapter(ArrayAdapter):
structure_family = StructureFamily.array

def __init__(
self,
data_uri: str,
structure: Optional[ArrayStructure] = None,
metadata: Optional[JSON] = None,
specs: Optional[list[Spec]] = None,
**kwargs: Optional[Any],
) -> None:
"""Adapter for `.edf` files (e.g. PILATUS3 2M) at ALS beamline 7.3.3."""
filepath = path_from_uri(data_uri)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think I'd like to used the unused logger here to printout what file is being processed.

Copy link
Contributor Author

@Wiebke Wiebke Mar 27, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Added a logger.debug call.

logger.debug("Loading EDF file produced by ALS beamline 7.3.3: %s", filepath)

with fabio.open(filepath) as edf_file:
array = edf_file.data
metadata_edf = edf_file.header

date = datetime.strptime(metadata_edf["Date"], "%a %b %d %H:%M:%S %Y")
metadata_edf["Date"] = date.isoformat()

metadata = {
**(metadata or {}),
**metadata_edf,
**parse_txt_accompanying_edf(filepath),
}

edf_spec = Spec("als-bl733-edf", version="1.0")
specs = list(specs or [])
if edf_spec not in specs:
specs.append(edf_spec)
super().__init__(
array=array,
structure=structure or ArrayStructure.from_array(array),
metadata=metadata,
specs=specs,
**kwargs,
)

@classmethod
def from_catalog(
cls,
data_source: DataSource,
node: Node,
/,
**kwargs: Optional[Any],
) -> "EDFAdapter":
return init_adapter_from_catalog(cls, data_source, node, **kwargs)

@classmethod
def from_uris(
cls,
data_uri: str,
**kwargs: Optional[Any],
) -> "EDFAdapter":
return cls(data_uri, **kwargs)
146 changes: 146 additions & 0 deletions src/als_tiled/bl733/adapters/gb.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,146 @@
import logging
import pathlib
from datetime import datetime
from typing import Any, Optional

import fabio
import numpy as np
from tiled.adapters.array import ArrayAdapter
from tiled.adapters.utils import init_adapter_from_catalog
from tiled.catalog.orm import Node
from tiled.structures.array import ArrayStructure
from tiled.structures.core import Spec, StructureFamily
from tiled.structures.data_source import DataSource
from tiled.type_aliases import JSON
from tiled.utils import path_from_uri

from als_tiled.bl733.adapters.metadata import parse_txt_accompanying_edf

logger = logging.getLogger(__name__)

# Pixel dimensions for the PILATUS 2M detector at ALS beamline 7.3.3
PILATUS_2M_PIXELS_X = 1475
PILATUS_2M_PIXELS_Y = 1679


class GeneralBinaryPilatus2MAdapter(ArrayAdapter):
structure_family = StructureFamily.array

def __init__(
self,
data_uri: str,
structure: Optional[ArrayStructure] = None,
metadata: Optional[JSON] = None,
specs: Optional[list[Spec]] = None,
**kwargs: Optional[Any],
) -> None:
"""Adapter for a stitched detector image .gb produced at ALS beamline 7.3.3."""
filepath_gb = path_from_uri(data_uri)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think I'd like to used the unused logger here to printout what file is being processed.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Added a logger.debug call.

logger.debug("Loading GB file produced by ALS beamline 7.3.3: %s", filepath_gb)
data = np.fromfile(filepath_gb, dtype="<f4")
expected_size = PILATUS_2M_PIXELS_X * PILATUS_2M_PIXELS_Y
if data.size != expected_size:
raise ValueError(
f"Data size ({data.size}) does not match expected size "
f"({expected_size})."
)
array = data.reshape((PILATUS_2M_PIXELS_Y, PILATUS_2M_PIXELS_X))

metadata = {
**(metadata or {}),
**GeneralBinaryPilatus2MAdapter._parse_accompanying_metadata(filepath_gb),
}

gb_spec = Spec("als-bl733-gb", version="1.0")
specs = list(specs or [])
if gb_spec not in specs:
specs.append(gb_spec)
super().__init__(
array=array,
structure=structure or ArrayStructure.from_array(array),
metadata=metadata,
specs=specs,
**kwargs,
)

@classmethod
def from_catalog(
cls,
data_source: DataSource,
node: Node,
/,
**kwargs: Optional[Any],
) -> "GeneralBinaryPilatus2MAdapter":
return init_adapter_from_catalog(cls, data_source, node, **kwargs)

@classmethod
def from_uris(
cls,
data_uri: str,
**kwargs: Optional[Any],
) -> "GeneralBinaryPilatus2MAdapter":
return cls(data_uri, **kwargs)

@staticmethod
def _read_edf(filepath_edf: pathlib.Path) -> tuple[dict[str, Any], datetime | None]:
"""Read one EDF file and its companion .txt, returning (metadata, date)."""
metadata_txt = parse_txt_accompanying_edf(filepath_edf)
if not filepath_edf.is_file():
logger.warning(
f"GeneralBinary file is missing accompanying EDF file {filepath_edf}."
)
return metadata_txt, None
header = fabio.openheader(filepath_edf).header
date = datetime.strptime(header["Date"], "%a %b %d %H:%M:%S %Y")
return {**metadata_txt, **header}, date

@staticmethod
def _parse_accompanying_metadata(filepath_gb: pathlib.Path) -> dict[str, Any]:
"""Read the hi and lo EDF companions for a .gb file and merge their metadata."""
filepath_edf_hi = pathlib.Path(
str(filepath_gb.with_suffix(".edf")).replace("sfloat", "hi")
)
filepath_edf_lo = pathlib.Path(
str(filepath_gb.with_suffix(".edf")).replace("sfloat", "lo")
)

metadata_hi, date_hi = GeneralBinaryPilatus2MAdapter._read_edf(filepath_edf_hi)
metadata_lo, date_lo = GeneralBinaryPilatus2MAdapter._read_edf(filepath_edf_lo)

combined_metadata = GeneralBinaryPilatus2MAdapter._combine_metadata(
metadata_hi, metadata_lo
)

date = None
if date_hi is not None and date_lo is not None:
date = date_hi if date_hi > date_lo else date_lo
elif date_hi is not None:
date = date_hi
elif date_lo is not None:
date = date_lo
if date is not None:
combined_metadata["Date"] = date.isoformat()

return combined_metadata

@staticmethod
def _combine_metadata(
metadata_hi: dict[str, Any], metadata_lo: dict[str, Any]
) -> dict[str, Any]:
"""Combine metadata from hi and lo EDF files.

Keys with identical values are kept once. Keys with different values are
suffixed with _hi and _lo.
"""
combined_metadata = {}
for key in set(metadata_hi) | set(metadata_lo):
value_hi = metadata_hi.get(key)
value_lo = metadata_lo.get(key)
if value_hi == value_lo:
combined_metadata[key] = value_hi
else:
if value_hi is not None:
combined_metadata[f"{key}_hi"] = value_hi
if value_lo is not None:
combined_metadata[f"{key}_lo"] = value_lo
return combined_metadata
Loading
Loading