From a7855df37fe402fe9afd50d1b32dbd2bcdc7d1ea Mon Sep 17 00:00:00 2001 From: Johannes Kasimir Date: Tue, 24 Jun 2025 14:09:51 +0200 Subject: [PATCH 01/29] feat: initial beer mcstas workflow --- docs/user-guide/beer/beer_mcstas.ipynb | 85 ++++++++++++++++++++ docs/user-guide/beer/index.md | 11 +++ docs/user-guide/index.md | 1 + src/ess/beer/__init__.py | 25 ++++++ src/ess/beer/clustering.py | 90 +++++++++++++++++++++ src/ess/beer/io.py | 105 +++++++++++++++++++++++++ src/ess/beer/workflow.py | 28 +++++++ 7 files changed, 345 insertions(+) create mode 100644 docs/user-guide/beer/beer_mcstas.ipynb create mode 100644 docs/user-guide/beer/index.md create mode 100644 src/ess/beer/__init__.py create mode 100644 src/ess/beer/clustering.py create mode 100644 src/ess/beer/io.py create mode 100644 src/ess/beer/workflow.py diff --git a/docs/user-guide/beer/beer_mcstas.ipynb b/docs/user-guide/beer/beer_mcstas.ipynb new file mode 100644 index 00000000..ea0069b2 --- /dev/null +++ b/docs/user-guide/beer/beer_mcstas.ipynb @@ -0,0 +1,85 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "0", + "metadata": {}, + "outputs": [], + "source": [ + "%matplotlib ipympl" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1", + "metadata": {}, + "outputs": [], + "source": [ + "import scipp as sc\n", + "\n", + "from ess.beer import BeerMcStasWorkflow\n", + "from ess.reduce.nexus.types import Filename, SampleRun\n", + "from ess.reduce.time_of_flight.types import DetectorTofData" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2", + "metadata": {}, + "outputs": [], + "source": [ + "wf = BeerMcStasWorkflow()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3", + "metadata": {}, + "outputs": [], + "source": [ + "wf[Filename[SampleRun]] = '/home/johannes/work/diffraction/beer/duplex.h5'\n", + "da = wf.compute(DetectorTofData[SampleRun])\n", + "da.bins.coords['d'] = (sc.constants.h / sc.constants.m_n * da.bins.coords['tof'] / da.bins.coords['sin_theta_L'] / 2).to(unit='angstrom')\n", + "da.bins.concat().hist(d=2000).plot()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4", + "metadata": {}, + "outputs": [], + "source": [ + "wf[Filename[SampleRun]] = '/home/johannes/work/diffraction/beer/silicon.h5'\n", + "da = wf.compute(DetectorTofData[SampleRun])\n", + "da.bins.coords['d'] = (sc.constants.h / sc.constants.m_n * da.bins.coords['tof'] / da.bins.coords['sin_theta_L'] / 2).to(unit='angstrom')\n", + "da.bins.concat().hist(d=2000).plot()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.18" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/user-guide/beer/index.md b/docs/user-guide/beer/index.md new file mode 100644 index 00000000..b7a5348d --- /dev/null +++ b/docs/user-guide/beer/index.md @@ -0,0 +1,11 @@ +# BEER + +## Reduction Workflows + +```{toctree} +--- +maxdepth: 1 +--- + +beer-mcstas +``` diff --git a/docs/user-guide/index.md b/docs/user-guide/index.md index 77190bc4..6f2e3ade 100644 --- a/docs/user-guide/index.md +++ b/docs/user-guide/index.md @@ -7,6 +7,7 @@ maxdepth: 1 installation dream/index +beer/index sns-instruments/index common/index ``` diff --git a/src/ess/beer/__init__.py b/src/ess/beer/__init__.py new file mode 100644 index 00000000..1455c430 --- /dev/null +++ b/src/ess/beer/__init__.py @@ -0,0 +1,25 @@ +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (c) 2025 Scipp contributors (https://github.com/scipp) + +""" +Components for BEER +""" + +import importlib.metadata + +from .io import load_beer_mcstas +from .workflow import BeerMcStasWorkflow, default_parameters + +try: + __version__ = importlib.metadata.version("essdiffraction") +except importlib.metadata.PackageNotFoundError: + __version__ = "0.0.0" + +del importlib + +__all__ = [ + 'BeerMcStasWorkflow', + '__version__', + 'default_parameters', + 'load_beer_mcstas', +] diff --git a/src/ess/beer/clustering.py b/src/ess/beer/clustering.py new file mode 100644 index 00000000..30764f82 --- /dev/null +++ b/src/ess/beer/clustering.py @@ -0,0 +1,90 @@ +import scipp as sc +from scipy.signal import find_peaks + + +def cluster_events_by_streak(da: sc.DataArray) -> sc.DataArray: + h = da.hist(coarse_d=1000) + i_peaks, _ = find_peaks(h.data.values, height=40, distance=3) + i_valleys, _ = find_peaks( + h.data.values.max() - h.data.values, distance=3, height=h.data.values.max() / 2 + ) + + valleys = sc.array( + dims=['coarse_d'], + values=h.coords['coarse_d'].values[i_valleys], + unit=h.coords['coarse_d'].unit, + ) + peaks = sc.array( + dims=['coarse_d'], + values=h.coords['coarse_d'].values[i_peaks], + unit=h.coords['coarse_d'].unit, + ) + + has_peak = peaks.bin(coarse_d=valleys).bins.size().data.to(dtype='bool') + has_peak_left = sc.concat( + (has_peak, sc.array(dims=['coarse_d'], values=[False], unit=None)), 'coarse_d' + ) + has_peak_right = sc.concat( + ( + sc.array(dims=['coarse_d'], values=[False], unit=None), + has_peak, + ), + 'coarse_d', + ) + filtered_valleys = valleys[has_peak_left | has_peak_right] + has_peak = peaks.bin(coarse_d=filtered_valleys).bins.size().data + b = da.bin(coarse_d=filtered_valleys).assign_masks( + no_peak=has_peak != sc.scalar(1, unit=None) + ) + b.coords['peak'] = ( + peaks.bin(coarse_d=filtered_valleys).bins.coords['coarse_d'].bins.mean() + ) + return b + + +def compute_tof_in_each_cluster(b: sc.DataArray) -> None: + '''Fits a line through each cluster, the intercept of the line is t0. + The line is fitted using linear regression with an outlier removal procedure. + + The algorithm is: + + 1. Fit line through clusters. + 2. Mask points that are "outliers" based on the criteria that they are too far + from the line in the ``t`` variable. + This means they don't seem to have the same time of flight origin as the rest + of the points in the cluster. + 3. Go back to 1. and iterate until convergence. A few iterations should be enough. + ''' + for _ in range(3): + s, t0 = _linear_regression_by_bin( + b.bins.coords['sin_theta_L'], b.bins.coords['t'], b + ) + b.bins.masks['too_far_from_center'] = ( + sc.abs( + sc.values(t0) + + sc.values(s) * b.bins.coords['sin_theta_L'] + - b.bins.coords['t'] + ) + > sc.scalar(3e-4, unit='s') + ).data + + b.coords['t0'] = sc.values(t0).data + b.bins.coords['tof'] = (b.bins.coords['t'] - sc.values(t0)).data + + +def _linear_regression_by_bin( + x: sc.Variable, y: sc.Variable, w: sc.DataArray +) -> tuple[sc.DataArray, sc.DataArray]: + w = sc.values(w) + tot_w = w.bins.sum() + + avg_x = (w * x).bins.sum() / tot_w + avg_y = (w * y).bins.sum() / tot_w + + cov_xy = (w * (x - avg_x) * (y - avg_y)).bins.sum() / tot_w + var_x = (w * (x - avg_x) ** 2).bins.sum() / tot_w + + b1 = cov_xy / var_x + b0 = avg_y - b1 * avg_x + + return b1, b0 diff --git a/src/ess/beer/io.py b/src/ess/beer/io.py new file mode 100644 index 00000000..ad98db7b --- /dev/null +++ b/src/ess/beer/io.py @@ -0,0 +1,105 @@ +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (c) 2025 Scipp contributors (https://github.com/scipp) +import h5py +import numpy as np +import scipp as sc +from scipp import constants + + +def _load_h5(group: h5py.Group | str, *paths: str): + if isinstance(group, str): + with h5py.File(group) as group: + yield from _load_h5(group, *paths) + return + for path in paths: + g = group + for p in path.strip('/').split('/'): + g = _unique_child_group_h5(g, p) if p.startswith('NX') else g.get(p) + yield g + + +def _unique_child_group_h5( + group: h5py.Group, + nx_class: str, +) -> h5py.Group | None: + out = None + for v in group.values(): + if v.attrs.get("NX_class") == nx_class.encode(): + if out is None: + out = v + else: + raise ValueError( + f'Expected exactly one {nx_class} group, but found more' + ) + return out + + +def load_beer_mcstas(f): + if isinstance(f, str): + with h5py.File(f) as ff: + return load_beer_mcstas(ff) + + data, events, params, sample_pos, chopper_pos = _load_h5( + f, + 'NXentry/NXdetector/bank01_events_dat_list_p_x_y_n_id_t', + 'NXentry/NXdetector/bank01_events_dat_list_p_x_y_n_id_t/events', + 'NXentry/simulation/Param', + '/entry1/instrument/components/0189_sampleMantid/Position', + '/entry1/instrument/components/0020_cMCC/Position', + ) + da = sc.DataArray( + sc.array(dims=['events'], values=events[:, 0], variances=events[:, 0] ** 2), + ) + for name, value in data.attrs.items(): + da.coords[name] = sc.scalar(value.decode()) + + for i, label in enumerate(data.attrs["ylabel"].decode().strip().split(' ')): + if label == 'p': + continue + da.coords[label] = sc.array(dims=['events'], values=events[:, i]) + for k, v in params.items(): + v = v[0] + if isinstance(v, bytes): + v = v.decode() + da.coords[k] = sc.scalar(v) + + da.coords['sample_position'] = sc.vector(sample_pos[:], unit='m') + da.coords['detector_position'] = sc.vector( + list(map(float, da.coords['position'].value.split(' '))), unit='m' + ) + da.coords['chopper_position'] = sc.vector(chopper_pos[:], unit='m') + da.coords['x'].unit = 'm' + da.coords['y'].unit = 'm' + da.coords['t'].unit = 's' + da.coords['L1'] = sc.norm( + da.coords['sample_position'] - da.coords['chopper_position'] + ) - sc.scalar(0.854, unit='m') # note! fudge factor + da.coords['L2'] = sc.norm( + da.coords['detector_position'] - da.coords['sample_position'] + ) + da.coords['two_theta'] = sc.scalar(np.pi / 2, unit='rad') + sc.atan2( + y=da.coords['x'], x=da.coords['L2'] + ) + da.coords['sin_theta'] = sc.sin(da.coords['two_theta'] / 2) + da.coords['L'] = da.coords['L1'] + sc.sqrt( + da.coords['x'] ** 2 + da.coords['y'] ** 2 + da.coords['L2'] ** 2 + ) + + # TODO: approximate t0 should depend on chopper information + da.coords['t0'] = sc.scalar(0.0066, unit='s') + da.coords['coarse_d'] = ( + constants.h + / constants.m_n + * (da.coords['t'] - da.coords['t0']) + / da.coords['sin_theta'] + / da.coords['L'] + / 2 + ).to(unit='angstrom') + da.coords['sin_theta_L'] = da.coords['sin_theta'] * da.coords['L'] + + # TODO: limits should be user configurable + da.masks['two_theta'] = ( + da.coords['two_theta'] >= sc.scalar(105, unit='deg').to(unit='rad') + ) | (da.coords['two_theta'] <= sc.scalar(75, unit='deg').to(unit='rad')) + + return da diff --git a/src/ess/beer/workflow.py b/src/ess/beer/workflow.py new file mode 100644 index 00000000..d17546c5 --- /dev/null +++ b/src/ess/beer/workflow.py @@ -0,0 +1,28 @@ +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (c) 2025 Scipp contributors (https://github.com/scipp) +import sciline as sl + +from ess.reduce.nexus.types import DetectorData, Filename, SampleRun +from ess.reduce.time_of_flight.types import DetectorTofData + +from .clustering import cluster_events_by_streak, compute_tof_in_each_cluster +from .io import load_beer_mcstas + + +def load_data(fname: Filename[SampleRun]) -> DetectorData[SampleRun]: + return DetectorData[SampleRun](load_beer_mcstas(fname)) + + +def cluster_events_and_compute_tof( + da: DetectorData[SampleRun], +) -> DetectorTofData[SampleRun]: + da = cluster_events_by_streak(da) + compute_tof_in_each_cluster(da) + return DetectorTofData[SampleRun](da) + + +default_parameters = {} + + +def BeerMcStasWorkflow(): + return sl.Pipeline((load_data, cluster_events_and_compute_tof)) From b52260a5debf8fd3b9749dfe5327112188714085 Mon Sep 17 00:00:00 2001 From: Johannes Kasimir Date: Tue, 24 Jun 2025 14:44:36 +0200 Subject: [PATCH 02/29] fix: add data --- docs/user-guide/beer/beer_mcstas.ipynb | 5 ++-- src/ess/beer/data.py | 41 ++++++++++++++++++++++++++ 2 files changed, 44 insertions(+), 2 deletions(-) create mode 100644 src/ess/beer/data.py diff --git a/docs/user-guide/beer/beer_mcstas.ipynb b/docs/user-guide/beer/beer_mcstas.ipynb index ea0069b2..b8e56ecc 100644 --- a/docs/user-guide/beer/beer_mcstas.ipynb +++ b/docs/user-guide/beer/beer_mcstas.ipynb @@ -20,6 +20,7 @@ "import scipp as sc\n", "\n", "from ess.beer import BeerMcStasWorkflow\n", + "from ess.beer.data import mcstas_silicon_medium_resolution, mcstas_duplex_medium_resolution\n", "from ess.reduce.nexus.types import Filename, SampleRun\n", "from ess.reduce.time_of_flight.types import DetectorTofData" ] @@ -41,7 +42,7 @@ "metadata": {}, "outputs": [], "source": [ - "wf[Filename[SampleRun]] = '/home/johannes/work/diffraction/beer/duplex.h5'\n", + "wf[Filename[SampleRun]] = mcstas_duplex_medium_resolution()\n", "da = wf.compute(DetectorTofData[SampleRun])\n", "da.bins.coords['d'] = (sc.constants.h / sc.constants.m_n * da.bins.coords['tof'] / da.bins.coords['sin_theta_L'] / 2).to(unit='angstrom')\n", "da.bins.concat().hist(d=2000).plot()" @@ -54,7 +55,7 @@ "metadata": {}, "outputs": [], "source": [ - "wf[Filename[SampleRun]] = '/home/johannes/work/diffraction/beer/silicon.h5'\n", + "wf[Filename[SampleRun]] = mcstas_silicon_medium_resolution()\n", "da = wf.compute(DetectorTofData[SampleRun])\n", "da.bins.coords['d'] = (sc.constants.h / sc.constants.m_n * da.bins.coords['tof'] / da.bins.coords['sin_theta_L'] / 2).to(unit='angstrom')\n", "da.bins.concat().hist(d=2000).plot()" diff --git a/src/ess/beer/data.py b/src/ess/beer/data.py new file mode 100644 index 00000000..839f2614 --- /dev/null +++ b/src/ess/beer/data.py @@ -0,0 +1,41 @@ +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (c) 2025 Scipp contributors (https://github.com/scipp) +"""Data for tests and documentation with BEER.""" + +_version = "1" + +__all__ = ["mcstas_duplex_medium_resolution", "mcstas_silicon_medium_resolution"] + + +def _make_pooch(): + import pooch + + return pooch.create( + path=pooch.os_cache("ess/beer"), + env="ESS_DATA_DIR", + base_url="https://public.esss.dk/groups/scipp/ess/beer/{version}/", + version=_version, + registry={ + "duplex.h5": "md5:ebb3f9694ffdd0949f342bd0deaaf627", + "silicon.h5": "md5:3425ae2b2fe7a938c6f0a4afeb9e0819", + }, + ) + + +_pooch = _make_pooch() + + +def mcstas_duplex_medium_resolution() -> str: + """ + Simulated intensity from duplex sample with + medium resolution chopper configuration. + """ + return _pooch.fetch('duplex.h5') + + +def mcstas_silicon_medium_resolution() -> str: + """ + Simulated intensity from silicon sample with + medium resolution chopper configuration. + """ + return _pooch.fetch('silicon.h5') From ef5802ada80e2bb211d2cb41d6cac3196fa988f2 Mon Sep 17 00:00:00 2001 From: Johannes Kasimir Date: Tue, 24 Jun 2025 14:58:36 +0200 Subject: [PATCH 03/29] docs: fix name --- docs/user-guide/beer/index.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/docs/user-guide/beer/index.md b/docs/user-guide/beer/index.md index b7a5348d..327613e4 100644 --- a/docs/user-guide/beer/index.md +++ b/docs/user-guide/beer/index.md @@ -6,6 +6,5 @@ --- maxdepth: 1 --- - -beer-mcstas +beer_mcstas ``` From 5a19e76e80ba71f3b1f3f8421b7adb296aac834e Mon Sep 17 00:00:00 2001 From: Johannes Kasimir Date: Wed, 25 Jun 2025 10:45:15 +0200 Subject: [PATCH 04/29] refactor: remove unnecessary coordinates and coord transformations --- src/ess/beer/io.py | 33 +++++++++++++-------------------- 1 file changed, 13 insertions(+), 20 deletions(-) diff --git a/src/ess/beer/io.py b/src/ess/beer/io.py index ad98db7b..fbdf1058 100644 --- a/src/ess/beer/io.py +++ b/src/ess/beer/io.py @@ -3,7 +3,6 @@ import h5py import numpy as np import scipp as sc -from scipp import constants def _load_h5(group: h5py.Group | str, *paths: str): @@ -71,31 +70,25 @@ def load_beer_mcstas(f): da.coords['x'].unit = 'm' da.coords['y'].unit = 'm' da.coords['t'].unit = 's' - da.coords['L1'] = sc.norm( + + z = sc.norm(da.coords['detector_position'] - da.coords['sample_position']) + L1 = sc.norm( da.coords['sample_position'] - da.coords['chopper_position'] ) - sc.scalar(0.854, unit='m') # note! fudge factor - da.coords['L2'] = sc.norm( - da.coords['detector_position'] - da.coords['sample_position'] - ) + L2 = sc.sqrt(da.coords['x'] ** 2 + da.coords['y'] ** 2 + z**2) + da.coords['L'] = L1 + L2 da.coords['two_theta'] = sc.scalar(np.pi / 2, unit='rad') + sc.atan2( - y=da.coords['x'], x=da.coords['L2'] - ) - da.coords['sin_theta'] = sc.sin(da.coords['two_theta'] / 2) - da.coords['L'] = da.coords['L1'] + sc.sqrt( - da.coords['x'] ** 2 + da.coords['y'] ** 2 + da.coords['L2'] ** 2 + y=da.coords['x'], x=z ) + # Save some space + da.coords.pop('x') + da.coords.pop('y') + da.coords.pop('n') + da.coords.pop('id') + # TODO: approximate t0 should depend on chopper information - da.coords['t0'] = sc.scalar(0.0066, unit='s') - da.coords['coarse_d'] = ( - constants.h - / constants.m_n - * (da.coords['t'] - da.coords['t0']) - / da.coords['sin_theta'] - / da.coords['L'] - / 2 - ).to(unit='angstrom') - da.coords['sin_theta_L'] = da.coords['sin_theta'] * da.coords['L'] + da.coords['approximate_t0'] = sc.scalar(0.0066, unit='s') # TODO: limits should be user configurable da.masks['two_theta'] = ( From 8331450e4c90131d2d72f27bb286f346cdd4c83d Mon Sep 17 00:00:00 2001 From: Johannes Kasimir Date: Wed, 25 Jun 2025 10:49:04 +0200 Subject: [PATCH 05/29] refactor: make clustering step configurable + separate tof code from clustering --- src/ess/beer/clustering.py | 68 ++++++++++--------------------------- src/ess/beer/conversions.py | 56 ++++++++++++++++++++++++++++++ src/ess/beer/types.py | 15 ++++++++ src/ess/beer/workflow.py | 23 +++++-------- 4 files changed, 98 insertions(+), 64 deletions(-) create mode 100644 src/ess/beer/conversions.py create mode 100644 src/ess/beer/types.py diff --git a/src/ess/beer/clustering.py b/src/ess/beer/clustering.py index 30764f82..efa077c8 100644 --- a/src/ess/beer/clustering.py +++ b/src/ess/beer/clustering.py @@ -1,8 +1,21 @@ import scipp as sc +from scipp import constants from scipy.signal import find_peaks +from .types import DetectorData, RunType, StreakClusteredData + + +def cluster_events_by_streak(da: DetectorData[RunType]) -> StreakClusteredData[RunType]: + da = da.copy(deep=False) + da.coords['coarse_d'] = ( + constants.h + / constants.m_n + * (da.coords['t'] - da.coords['approximate_t0']) + / sc.sin(da.coords['two_theta'] / 2) + / da.coords['L'] + / 2 + ).to(unit='angstrom') -def cluster_events_by_streak(da: sc.DataArray) -> sc.DataArray: h = da.hist(coarse_d=1000) i_peaks, _ = find_peaks(h.data.values, height=40, distance=3) i_valleys, _ = find_peaks( @@ -36,55 +49,10 @@ def cluster_events_by_streak(da: sc.DataArray) -> sc.DataArray: b = da.bin(coarse_d=filtered_valleys).assign_masks( no_peak=has_peak != sc.scalar(1, unit=None) ) - b.coords['peak'] = ( - peaks.bin(coarse_d=filtered_valleys).bins.coords['coarse_d'].bins.mean() - ) + b = b.drop_coords(('coarse_d',)) + b = b.bins.drop_coords(('coarse_d',)) + b = b.rename_dims(coarse_d='streak') return b -def compute_tof_in_each_cluster(b: sc.DataArray) -> None: - '''Fits a line through each cluster, the intercept of the line is t0. - The line is fitted using linear regression with an outlier removal procedure. - - The algorithm is: - - 1. Fit line through clusters. - 2. Mask points that are "outliers" based on the criteria that they are too far - from the line in the ``t`` variable. - This means they don't seem to have the same time of flight origin as the rest - of the points in the cluster. - 3. Go back to 1. and iterate until convergence. A few iterations should be enough. - ''' - for _ in range(3): - s, t0 = _linear_regression_by_bin( - b.bins.coords['sin_theta_L'], b.bins.coords['t'], b - ) - b.bins.masks['too_far_from_center'] = ( - sc.abs( - sc.values(t0) - + sc.values(s) * b.bins.coords['sin_theta_L'] - - b.bins.coords['t'] - ) - > sc.scalar(3e-4, unit='s') - ).data - - b.coords['t0'] = sc.values(t0).data - b.bins.coords['tof'] = (b.bins.coords['t'] - sc.values(t0)).data - - -def _linear_regression_by_bin( - x: sc.Variable, y: sc.Variable, w: sc.DataArray -) -> tuple[sc.DataArray, sc.DataArray]: - w = sc.values(w) - tot_w = w.bins.sum() - - avg_x = (w * x).bins.sum() / tot_w - avg_y = (w * y).bins.sum() / tot_w - - cov_xy = (w * (x - avg_x) * (y - avg_y)).bins.sum() / tot_w - var_x = (w * (x - avg_x) ** 2).bins.sum() / tot_w - - b1 = cov_xy / var_x - b0 = avg_y - b1 * avg_x - - return b1, b0 +providers = (cluster_events_by_streak,) diff --git a/src/ess/beer/conversions.py b/src/ess/beer/conversions.py new file mode 100644 index 00000000..41bd815a --- /dev/null +++ b/src/ess/beer/conversions.py @@ -0,0 +1,56 @@ +import scipp as sc + +from .types import DetectorTofData, RunType, StreakClusteredData + + +def compute_tof_in_each_cluster( + da: StreakClusteredData[RunType], +) -> DetectorTofData[RunType]: + '''Fits a line through each cluster, the intercept of the line is t0. + The line is fitted using linear regression with an outlier removal procedure. + + The algorithm is: + + 1. Use least squares to fit line through clusters. + 2. Mask points that are "outliers" based on the criteria that they are too far + from the line in the ``t`` variable. + This means they don't seem to have the same time of flight origin as the rest + of the points in the cluster, and probably should belong to another cluster or + are part of the background. + 3. Go back to 1) and iterate until convergence. A few iterations should be enough. + ''' + sin_theta_L = sc.sin(da.bins.coords['two_theta'] / 2) * da.bins.coords['L'] + t = da.bins.coords['t'] + for _ in range(3): + s, t0 = _linear_regression_by_bin(sin_theta_L, t, da) + da = da.bins.assign_masks( + too_far_from_center=( + sc.abs(sc.values(t0) + sc.values(s) * sin_theta_L - t) + > sc.scalar(3e-4, unit='s') + ).data + ) + + da = da.assign_coords(t0=sc.values(t0).data) + da = da.bins.assign_coords(tof=(t - sc.values(t0)).data) + return da + + +def _linear_regression_by_bin( + x: sc.Variable, y: sc.Variable, w: sc.DataArray +) -> tuple[sc.DataArray, sc.DataArray]: + w = sc.values(w) + tot_w = w.bins.sum() + + avg_x = (w * x).bins.sum() / tot_w + avg_y = (w * y).bins.sum() / tot_w + + cov_xy = (w * (x - avg_x) * (y - avg_y)).bins.sum() / tot_w + var_x = (w * (x - avg_x) ** 2).bins.sum() / tot_w + + b1 = cov_xy / var_x + b0 = avg_y - b1 * avg_x + + return b1, b0 + + +providers = (compute_tof_in_each_cluster,) diff --git a/src/ess/beer/types.py b/src/ess/beer/types.py new file mode 100644 index 00000000..7a0dae55 --- /dev/null +++ b/src/ess/beer/types.py @@ -0,0 +1,15 @@ +import sciline +import scipp as sc + +from ess.reduce.nexus.types import DetectorData, Filename, RunType, SampleRun +from ess.reduce.time_of_flight.types import DetectorTofData + + +class StreakClusteredData(sciline.Scope[RunType, sc.DataArray], sc.DataArray): + """Detector data binned by streak""" + + +DetectorData = DetectorData +Filename = Filename +SampleRun = SampleRun +DetectorTofData = DetectorTofData diff --git a/src/ess/beer/workflow.py b/src/ess/beer/workflow.py index d17546c5..dab2b91a 100644 --- a/src/ess/beer/workflow.py +++ b/src/ess/beer/workflow.py @@ -2,27 +2,22 @@ # Copyright (c) 2025 Scipp contributors (https://github.com/scipp) import sciline as sl -from ess.reduce.nexus.types import DetectorData, Filename, SampleRun -from ess.reduce.time_of_flight.types import DetectorTofData - -from .clustering import cluster_events_by_streak, compute_tof_in_each_cluster +from .clustering import providers as clustering_providers +from .conversions import providers as conversion_providers from .io import load_beer_mcstas +from .types import DetectorData, Filename, RunType, SampleRun -def load_data(fname: Filename[SampleRun]) -> DetectorData[SampleRun]: +def load_mcstas(fname: Filename[SampleRun]) -> DetectorData[SampleRun]: return DetectorData[SampleRun](load_beer_mcstas(fname)) -def cluster_events_and_compute_tof( - da: DetectorData[SampleRun], -) -> DetectorTofData[SampleRun]: - da = cluster_events_by_streak(da) - compute_tof_in_each_cluster(da) - return DetectorTofData[SampleRun](da) - - default_parameters = {} def BeerMcStasWorkflow(): - return sl.Pipeline((load_data, cluster_events_and_compute_tof)) + return sl.Pipeline( + (load_mcstas, *clustering_providers, *conversion_providers), + params=default_parameters, + constraints={RunType: (SampleRun,)}, + ) From c82445e16111662925c4d043f8ef2a484d6707fa Mon Sep 17 00:00:00 2001 From: Johannes Kasimir Date: Wed, 25 Jun 2025 10:50:15 +0200 Subject: [PATCH 06/29] docs: add header and 2D figure illustrating the intensity --- docs/user-guide/beer/beer_mcstas.ipynb | 30 ++++++++++++++++++++------ 1 file changed, 24 insertions(+), 6 deletions(-) diff --git a/docs/user-guide/beer/beer_mcstas.ipynb b/docs/user-guide/beer/beer_mcstas.ipynb index b8e56ecc..5c279c05 100644 --- a/docs/user-guide/beer/beer_mcstas.ipynb +++ b/docs/user-guide/beer/beer_mcstas.ipynb @@ -10,10 +10,18 @@ "%matplotlib ipympl" ] }, + { + "cell_type": "markdown", + "id": "1", + "metadata": {}, + "source": [ + "# BEER McStas data reduction" + ] + }, { "cell_type": "code", "execution_count": null, - "id": "1", + "id": "2", "metadata": {}, "outputs": [], "source": [ @@ -28,7 +36,7 @@ { "cell_type": "code", "execution_count": null, - "id": "2", + "id": "3", "metadata": {}, "outputs": [], "source": [ @@ -38,28 +46,38 @@ { "cell_type": "code", "execution_count": null, - "id": "3", + "id": "4", "metadata": {}, "outputs": [], "source": [ "wf[Filename[SampleRun]] = mcstas_duplex_medium_resolution()\n", "da = wf.compute(DetectorTofData[SampleRun])\n", - "da.bins.coords['d'] = (sc.constants.h / sc.constants.m_n * da.bins.coords['tof'] / da.bins.coords['sin_theta_L'] / 2).to(unit='angstrom')\n", + "da.bins.coords['d'] = (sc.constants.h / sc.constants.m_n * da.bins.coords['tof'] / sc.sin(da.bins.coords['two_theta'] / 2) / da.bins.coords['L'] / 2).to(unit='angstrom')\n", "da.bins.concat().hist(d=2000).plot()" ] }, { "cell_type": "code", "execution_count": null, - "id": "4", + "id": "5", "metadata": {}, "outputs": [], "source": [ "wf[Filename[SampleRun]] = mcstas_silicon_medium_resolution()\n", "da = wf.compute(DetectorTofData[SampleRun])\n", - "da.bins.coords['d'] = (sc.constants.h / sc.constants.m_n * da.bins.coords['tof'] / da.bins.coords['sin_theta_L'] / 2).to(unit='angstrom')\n", + "da.bins.coords['d'] = (sc.constants.h / sc.constants.m_n * da.bins.coords['tof'] / sc.sin(da.bins.coords['two_theta'] / 2) / da.bins.coords['L'] / 2).to(unit='angstrom')\n", "da.bins.concat().hist(d=2000).plot()" ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6", + "metadata": {}, + "outputs": [], + "source": [ + "da.bins.concat().hist(two_theta=1000, t=1000).plot(norm='log')" + ] } ], "metadata": { From 749313468c84094f1cfe491214fcf06475473faf Mon Sep 17 00:00:00 2001 From: Johannes Kasimir Date: Wed, 2 Jul 2025 10:21:44 +0200 Subject: [PATCH 07/29] docs: add ground truth peak positions to notebook --- docs/user-guide/beer/beer_mcstas.ipynb | 10 ++++++--- src/ess/beer/data.py | 30 ++++++++++++++++++++++++++ 2 files changed, 37 insertions(+), 3 deletions(-) diff --git a/docs/user-guide/beer/beer_mcstas.ipynb b/docs/user-guide/beer/beer_mcstas.ipynb index 5c279c05..670b791b 100644 --- a/docs/user-guide/beer/beer_mcstas.ipynb +++ b/docs/user-guide/beer/beer_mcstas.ipynb @@ -28,7 +28,7 @@ "import scipp as sc\n", "\n", "from ess.beer import BeerMcStasWorkflow\n", - "from ess.beer.data import mcstas_silicon_medium_resolution, mcstas_duplex_medium_resolution\n", + "from ess.beer.data import mcstas_silicon_medium_resolution, mcstas_duplex_medium_resolution, duplex_peaks_array, silicon_peaks_array\n", "from ess.reduce.nexus.types import Filename, SampleRun\n", "from ess.reduce.time_of_flight.types import DetectorTofData" ] @@ -53,7 +53,9 @@ "wf[Filename[SampleRun]] = mcstas_duplex_medium_resolution()\n", "da = wf.compute(DetectorTofData[SampleRun])\n", "da.bins.coords['d'] = (sc.constants.h / sc.constants.m_n * da.bins.coords['tof'] / sc.sin(da.bins.coords['two_theta'] / 2) / da.bins.coords['L'] / 2).to(unit='angstrom')\n", - "da.bins.concat().hist(d=2000).plot()" + "p = da.bins.concat().hist(d=2000).plot()\n", + "p.ax.vlines(duplex_peaks_array().values, 0, 20000, linestyle='--', color='black', lw=0.5, label='true $d_{hkl}$')\n", + "p" ] }, { @@ -66,7 +68,9 @@ "wf[Filename[SampleRun]] = mcstas_silicon_medium_resolution()\n", "da = wf.compute(DetectorTofData[SampleRun])\n", "da.bins.coords['d'] = (sc.constants.h / sc.constants.m_n * da.bins.coords['tof'] / sc.sin(da.bins.coords['two_theta'] / 2) / da.bins.coords['L'] / 2).to(unit='angstrom')\n", - "da.bins.concat().hist(d=2000).plot()" + "p = da.bins.concat().hist(d=2000).plot()\n", + "p.ax.vlines(silicon_peaks_array().values, 0, 20000, linestyle='--', color='black', lw=0.5, label='true $d_{hkl}$')\n", + "p" ] }, { diff --git a/src/ess/beer/data.py b/src/ess/beer/data.py index 839f2614..357af819 100644 --- a/src/ess/beer/data.py +++ b/src/ess/beer/data.py @@ -2,6 +2,8 @@ # Copyright (c) 2025 Scipp contributors (https://github.com/scipp) """Data for tests and documentation with BEER.""" +import scipp as sc + _version = "1" __all__ = ["mcstas_duplex_medium_resolution", "mcstas_silicon_medium_resolution"] @@ -18,6 +20,8 @@ def _make_pooch(): registry={ "duplex.h5": "md5:ebb3f9694ffdd0949f342bd0deaaf627", "silicon.h5": "md5:3425ae2b2fe7a938c6f0a4afeb9e0819", + "silicon-dhkl.tab": "md5:90bedceb23245b045ce1ed0170c1313b", + "duplex-dhkl.tab": "md5:b4c6c2fcd66466ad291f306b2d6b346e", }, ) @@ -39,3 +43,29 @@ def mcstas_silicon_medium_resolution() -> str: medium resolution chopper configuration. """ return _pooch.fetch('silicon.h5') + + +def duplex_peaks() -> str: + return _pooch.fetch('duplex-dhkl.tab') + + +def silicon_peaks() -> str: + return _pooch.fetch('silicon-dhkl.tab') + + +def duplex_peaks_array() -> sc.Variable: + with open(duplex_peaks()) as f: + return sc.array( + dims='d', + values=sorted([float(x) for x in f.read().split('\n') if x != '']), + unit='angstrom', + ) + + +def silicon_peaks_array() -> sc.Variable: + with open(silicon_peaks()) as f: + return sc.array( + dims='d', + values=sorted([float(x) for x in f.read().split('\n') if x != '']), + unit='angstrom', + ) From ebd10d50a1df695c814e724c6ee869b1959e16cb Mon Sep 17 00:00:00 2001 From: Johannes Kasimir Date: Wed, 2 Jul 2025 10:22:20 +0200 Subject: [PATCH 08/29] fix: coordinate tranformations --- src/ess/beer/io.py | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/src/ess/beer/io.py b/src/ess/beer/io.py index fbdf1058..14d74a7b 100644 --- a/src/ess/beer/io.py +++ b/src/ess/beer/io.py @@ -1,7 +1,6 @@ # SPDX-License-Identifier: BSD-3-Clause # Copyright (c) 2025 Scipp contributors (https://github.com/scipp) import h5py -import numpy as np import scipp as sc @@ -44,7 +43,7 @@ def load_beer_mcstas(f): 'NXentry/NXdetector/bank01_events_dat_list_p_x_y_n_id_t/events', 'NXentry/simulation/Param', '/entry1/instrument/components/0189_sampleMantid/Position', - '/entry1/instrument/components/0020_cMCC/Position', + '/entry1/instrument/components/0017_cMCA/Position', ) da = sc.DataArray( sc.array(dims=['events'], values=events[:, 0], variances=events[:, 0] ** 2), @@ -72,14 +71,10 @@ def load_beer_mcstas(f): da.coords['t'].unit = 's' z = sc.norm(da.coords['detector_position'] - da.coords['sample_position']) - L1 = sc.norm( - da.coords['sample_position'] - da.coords['chopper_position'] - ) - sc.scalar(0.854, unit='m') # note! fudge factor + L1 = sc.norm(da.coords['sample_position'] - da.coords['chopper_position']) L2 = sc.sqrt(da.coords['x'] ** 2 + da.coords['y'] ** 2 + z**2) da.coords['L'] = L1 + L2 - da.coords['two_theta'] = sc.scalar(np.pi / 2, unit='rad') + sc.atan2( - y=da.coords['x'], x=z - ) + da.coords['two_theta'] = sc.acos(-da.coords['x'] / L2) # Save some space da.coords.pop('x') @@ -88,7 +83,7 @@ def load_beer_mcstas(f): da.coords.pop('id') # TODO: approximate t0 should depend on chopper information - da.coords['approximate_t0'] = sc.scalar(0.0066, unit='s') + da.coords['approximate_t0'] = sc.scalar(0.006, unit='s') # TODO: limits should be user configurable da.masks['two_theta'] = ( From 4db8df760cfbc155614bc7617c8845059f1e5dec Mon Sep 17 00:00:00 2001 From: Johannes Kasimir Date: Wed, 2 Jul 2025 10:23:15 +0200 Subject: [PATCH 09/29] feat: mask regions where streaks are too close --- src/ess/beer/conversions.py | 34 +++++++++++++++++++++++++++++----- 1 file changed, 29 insertions(+), 5 deletions(-) diff --git a/src/ess/beer/conversions.py b/src/ess/beer/conversions.py index 41bd815a..ee445e4d 100644 --- a/src/ess/beer/conversions.py +++ b/src/ess/beer/conversions.py @@ -1,3 +1,4 @@ +import numpy as np import scipp as sc from .types import DetectorTofData, RunType, StreakClusteredData @@ -21,13 +22,36 @@ def compute_tof_in_each_cluster( ''' sin_theta_L = sc.sin(da.bins.coords['two_theta'] / 2) * da.bins.coords['L'] t = da.bins.coords['t'] - for _ in range(3): + for _ in range(5): s, t0 = _linear_regression_by_bin(sin_theta_L, t, da) + + s_left = sc.array(dims=s.dims, values=np.roll(s.values, 1), unit=s.unit) + s_right = sc.array(dims=s.dims, values=np.roll(s.values, -1), unit=s.unit) + t0_left = sc.array(dims=t0.dims, values=np.roll(t0.values, 1), unit=t0.unit) + t0_right = sc.array(dims=t0.dims, values=np.roll(t0.values, -1), unit=t0.unit) + + # Distance from point to line through cluster + distance_to_self = sc.abs(sc.values(t0) + sc.values(s) * sin_theta_L - t) + # Distance from this cluster line to next before cluster line + distance_self_to_left = sc.abs( + sc.values(t0_left) + + sc.values(s_left) * sin_theta_L + - (sc.values(t0) + sc.values(s) * sin_theta_L) + ) + # Distance from this cluster line to next after cluster line + distance_self_to_right = sc.abs( + sc.values(t0_right) + + sc.values(s_right) * sin_theta_L + - (sc.values(t0) + sc.values(s) * sin_theta_L) + ) + da = da.bins.assign_masks( - too_far_from_center=( - sc.abs(sc.values(t0) + sc.values(s) * sin_theta_L - t) - > sc.scalar(3e-4, unit='s') - ).data + # TODO: Find suitable masking parameters for other chopper settings + too_far_from_center=(distance_to_self > sc.scalar(3e-4, unit='s')).data, + too_close_to_other=( + (distance_self_to_left < sc.scalar(8e-4, unit='s')) + | (distance_self_to_right < sc.scalar(8e-4, unit='s')) + ).data, ) da = da.assign_coords(t0=sc.values(t0).data) From a83254b4f3c07950bb14441fc8b4e0e4446f3fe3 Mon Sep 17 00:00:00 2001 From: Johannes Kasimir Date: Mon, 7 Jul 2025 15:08:02 +0200 Subject: [PATCH 10/29] feat: add workflow for BEER frame unwrapping when peak positions are known --- docs/user-guide/beer/beer_mcstas.ipynb | 392 ++++++++++++++++++++++++- src/ess/beer/__init__.py | 7 +- src/ess/beer/clustering.py | 8 +- src/ess/beer/conversions.py | 119 +++++++- src/ess/beer/io.py | 7 +- src/ess/beer/types.py | 16 + src/ess/beer/workflow.py | 78 ++++- 7 files changed, 601 insertions(+), 26 deletions(-) diff --git a/docs/user-guide/beer/beer_mcstas.ipynb b/docs/user-guide/beer/beer_mcstas.ipynb index 670b791b..13988a3f 100644 --- a/docs/user-guide/beer/beer_mcstas.ipynb +++ b/docs/user-guide/beer/beer_mcstas.ipynb @@ -27,10 +27,13 @@ "source": [ "import scipp as sc\n", "\n", - "from ess.beer import BeerMcStasWorkflow\n", + "from ess.beer import BeerMcStasWorkflow, BeerMcStasWorkflowKnownPeaks\n", "from ess.beer.data import mcstas_silicon_medium_resolution, mcstas_duplex_medium_resolution, duplex_peaks_array, silicon_peaks_array\n", "from ess.reduce.nexus.types import Filename, SampleRun\n", - "from ess.reduce.time_of_flight.types import DetectorTofData" + "from ess.reduce.time_of_flight.types import DetectorTofData\n", + "from ess.beer.types import *\n", + "\n", + "dspacing = sc.linspace('dspacing', 0.8, 2.2, 2000, unit='angstrom')" ] }, { @@ -40,7 +43,11 @@ "metadata": {}, "outputs": [], "source": [ - "wf = BeerMcStasWorkflow()" + "%%time\n", + "wf = BeerMcStasWorkflowKnownPeaks()\n", + "wf[Filename[SampleRun]] = mcstas_duplex_medium_resolution()\n", + "da = wf.compute(DetectorTofData[SampleRun])\n", + "da.hist(dspacing=dspacing).plot()" ] }, { @@ -50,10 +57,12 @@ "metadata": {}, "outputs": [], "source": [ + "%%time\n", + "wf = BeerMcStasWorkflow()\n", "wf[Filename[SampleRun]] = mcstas_duplex_medium_resolution()\n", "da = wf.compute(DetectorTofData[SampleRun])\n", - "da.bins.coords['d'] = (sc.constants.h / sc.constants.m_n * da.bins.coords['tof'] / sc.sin(da.bins.coords['two_theta'] / 2) / da.bins.coords['L'] / 2).to(unit='angstrom')\n", - "p = da.bins.concat().hist(d=2000).plot()\n", + "da.bins.coords['dspacing'] = (sc.constants.h / sc.constants.m_n * da.bins.coords['tof'] / sc.sin(da.bins.coords['two_theta'] / 2) / da.bins.coords['Ltotal'] / 2).to(unit='angstrom')\n", + "p = da.bins.concat().hist(dspacing=dspacing).plot()\n", "p.ax.vlines(duplex_peaks_array().values, 0, 20000, linestyle='--', color='black', lw=0.5, label='true $d_{hkl}$')\n", "p" ] @@ -65,10 +74,27 @@ "metadata": {}, "outputs": [], "source": [ + "%%time\n", + "wf = BeerMcStasWorkflowKnownPeaks()\n", + "wf[DHKLList] = sc.array(dims=('peaks',), values=[3.1353, 1.92, 1.6374, 1.5677, 1.3576, 1.2458, 1.1085, 1.0451, 1.0451, 0.96, 0.9179, 0.9051, 0.8586, 0.8281, 0.8187, 0.7838, 0.7604, 0.7604, 0.7257, 0.707 , 0.707], unit='angstrom')\n", + "wf[Filename[SampleRun]] = mcstas_silicon_medium_resolution()\n", + "da = wf.compute(DetectorTofData[SampleRun])\n", + "da.hist(dspacing=dspacing).plot()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6", + "metadata": {}, + "outputs": [], + "source": [ + "%%time\n", + "wf = BeerMcStasWorkflow()\n", "wf[Filename[SampleRun]] = mcstas_silicon_medium_resolution()\n", "da = wf.compute(DetectorTofData[SampleRun])\n", - "da.bins.coords['d'] = (sc.constants.h / sc.constants.m_n * da.bins.coords['tof'] / sc.sin(da.bins.coords['two_theta'] / 2) / da.bins.coords['L'] / 2).to(unit='angstrom')\n", - "p = da.bins.concat().hist(d=2000).plot()\n", + "da.bins.coords['dspacing'] = (sc.constants.h / sc.constants.m_n * da.bins.coords['tof'] / sc.sin(da.bins.coords['two_theta'] / 2) / da.bins.coords['Ltotal'] / 2).to(unit='angstrom')\n", + "p = da.bins.concat().hist(dspacing=dspacing).plot()\n", "p.ax.vlines(silicon_peaks_array().values, 0, 20000, linestyle='--', color='black', lw=0.5, label='true $d_{hkl}$')\n", "p" ] @@ -76,11 +102,359 @@ { "cell_type": "code", "execution_count": null, - "id": "6", + "id": "7", + "metadata": {}, + "outputs": [], + "source": [ + "da.bins.concat().hist(two_theta=1000, event_time_offset=1000).plot(norm='log')" + ] + }, + { + "cell_type": "markdown", + "id": "8", + "metadata": {}, + "source": [ + "## Automatic peak finder and reduction for different chopper modes" + ] + }, + { + "cell_type": "markdown", + "id": "9", + "metadata": {}, + "source": [ + "### Mode 7" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "10", + "metadata": {}, + "outputs": [], + "source": [ + "wf = BeerMcStasWorkflow()\n", + "wf[Filename[SampleRun]] = '/home/johannes/work/diffraction/beer/mc-stitch/data/mccode_mode7/mccode.h5'\n", + "wf.compute(DetectorData[SampleRun]).hist(two_theta=1000, event_time_offset=1000).plot(norm='log')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "11", + "metadata": {}, + "outputs": [], + "source": [ + "%%time\n", + "wf[MaxTimeOffset] = sc.scalar(6e-4, unit='s')\n", + "wf[MinTimeToNextStreak] = sc.scalar(8e-4/10, unit='s')\n", + "da = wf.compute(DetectorTofData[SampleRun])\n", + "da.bins.coords['dspacing'] = (sc.constants.h / sc.constants.m_n * da.bins.coords['tof'] / sc.sin(da.bins.coords['two_theta'] / 2) / da.bins.coords['Ltotal'] / 2).to(unit='angstrom')\n", + "p = da.bins.concat().hist(dspacing=dspacing).plot()\n", + "p.ax.vlines(duplex_peaks_array().values, 0, 20000, linestyle='--', color='black', lw=0.5, label='true $d_{hkl}$')\n", + "p" + ] + }, + { + "cell_type": "markdown", + "id": "12", + "metadata": {}, + "source": [ + "### Mode 8" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "13", + "metadata": {}, + "outputs": [], + "source": [ + "wf = BeerMcStasWorkflow()\n", + "wf[Filename[SampleRun]] = '/home/johannes/work/diffraction/beer/mc-stitch/data/mccode_mode8/mccode.h5'\n", + "wf.compute(DetectorData[SampleRun]).hist(two_theta=1000, event_time_offset=1000).plot(norm='log')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "14", + "metadata": {}, + "outputs": [], + "source": [ + "wf[MaxTimeOffset] = sc.scalar(6e-4, unit='s')\n", + "wf[MinTimeToNextStreak] = sc.scalar(8e-4/10, unit='s')\n", + "da = wf.compute(DetectorTofData[SampleRun])\n", + "da.bins.coords['dspacing'] = (sc.constants.h / sc.constants.m_n * da.bins.coords['tof'] / sc.sin(da.bins.coords['two_theta'] / 2) / da.bins.coords['Ltotal'] / 2).to(unit='angstrom')\n", + "p = da.bins.concat().hist(dspacing=dspacing).plot()\n", + "p.ax.vlines(duplex_peaks_array().values, 0, 20000, linestyle='--', color='black', lw=0.5, label='true $d_{hkl}$')\n", + "p" + ] + }, + { + "cell_type": "markdown", + "id": "15", + "metadata": {}, + "source": [ + "### Mode 9" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "16", + "metadata": {}, + "outputs": [], + "source": [ + "wf = BeerMcStasWorkflow()\n", + "wf[Filename[SampleRun]] = mcstas_duplex_medium_resolution() # mode 9\n", + "wf.compute(DetectorData[SampleRun]).hist(two_theta=1000, event_time_offset=1000).plot(norm='log')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "17", + "metadata": {}, + "outputs": [], + "source": [ + "wf[MaxTimeOffset] = sc.scalar(3e-4, unit='s')\n", + "wf[MinTimeToNextStreak] = sc.scalar(8e-4, unit='s')\n", + "da = wf.compute(DetectorTofData[SampleRun])\n", + "da.bins.coords['dspacing'] = (sc.constants.h / sc.constants.m_n * da.bins.coords['tof'] / sc.sin(da.bins.coords['two_theta'] / 2) / da.bins.coords['Ltotal'] / 2).to(unit='angstrom')\n", + "p = da.bins.concat().hist(dspacing=dspacing).plot()\n", + "p.ax.vlines(duplex_peaks_array().values, 0, 20000, linestyle='--', color='black', lw=0.5, label='true $d_{hkl}$')\n", + "p" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "18", + "metadata": {}, + "outputs": [], + "source": [ + "da = da.copy()\n", + "da.masks.clear()\n", + "da.bins.masks['too_far_from_center'] = ~da.bins.masks.pop('too_far_from_center')\n", + "da.bins.masks['too_close_to_other'] = ~da.bins.masks.pop('too_close_to_other')\n", + "\n", + "da.bins.concat().hist(two_theta=1000, event_time_offset=1000).plot(norm='log')" + ] + }, + { + "cell_type": "markdown", + "id": "19", + "metadata": {}, + "source": [ + "### Mode 10" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "20", + "metadata": {}, + "outputs": [], + "source": [ + "wf = BeerMcStasWorkflow()\n", + "wf[Filename[SampleRun]] = '/home/johannes/work/diffraction/beer/mc-stitch/data/mccode_mode10/mccode.h5'\n", + "wf.compute(DetectorData[SampleRun]).hist(two_theta=1000, event_time_offset=1000).plot(norm='log')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "21", "metadata": {}, "outputs": [], "source": [ - "da.bins.concat().hist(two_theta=1000, t=1000).plot(norm='log')" + "wf[MaxTimeOffset] = sc.scalar(3e-4/2, unit='s')\n", + "wf[MinTimeToNextStreak] = sc.scalar(8e-4/2, unit='s')\n", + "da = wf.compute(DetectorTofData[SampleRun])\n", + "da.bins.coords['dspacing'] = (sc.constants.h / sc.constants.m_n * da.bins.coords['tof'] / sc.sin(da.bins.coords['two_theta'] / 2) / da.bins.coords['Ltotal'] / 2).to(unit='angstrom')\n", + "p = da.bins.concat().hist(dspacing=dspacing).plot()\n", + "p.ax.vlines(duplex_peaks_array().values, 0, 20000, linestyle='--', color='black', lw=0.5, label='true $d_{hkl}$')\n", + "p" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "22", + "metadata": {}, + "outputs": [], + "source": [ + "da = da.copy()\n", + "da.masks.clear()\n", + "da.bins.masks['too_far_from_center'] = ~da.bins.masks.pop('too_far_from_center')\n", + "da.bins.masks['too_close_to_other'] = ~da.bins.masks.pop('too_close_to_other')\n", + "\n", + "da.bins.concat().hist(two_theta=1000, event_time_offset=1000).plot(norm='log')" + ] + }, + { + "cell_type": "markdown", + "id": "23", + "metadata": {}, + "source": [ + "### Mode 16" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "24", + "metadata": {}, + "outputs": [], + "source": [ + "wf = BeerMcStasWorkflow()\n", + "wf[Filename[SampleRun]] = '/home/johannes/work/diffraction/beer/mc-stitch/data/mccode_mode16/mccode.h5'\n", + "wf.compute(DetectorData[SampleRun]).hist(two_theta=1000, event_time_offset=1000).plot(norm='log')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "25", + "metadata": {}, + "outputs": [], + "source": [ + "wf[MaxTimeOffset] = sc.scalar(3e-4/2, unit='s')\n", + "wf[MinTimeToNextStreak] = sc.scalar(8e-4/2, unit='s')\n", + "da = wf.compute(DetectorTofData[SampleRun])\n", + "da.bins.coords['dspacing'] = (sc.constants.h / sc.constants.m_n * da.bins.coords['tof'] / sc.sin(da.bins.coords['two_theta'] / 2) / da.bins.coords['Ltotal'] / 2).to(unit='angstrom')\n", + "p = da.bins.concat().hist(dspacing=dspacing).plot()\n", + "p.ax.vlines(duplex_peaks_array().values, 0, 20000, linestyle='--', color='black', lw=0.5, label='true $d_{hkl}$')\n", + "p" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "26", + "metadata": {}, + "outputs": [], + "source": [ + "da = da.copy()\n", + "da.masks.clear()\n", + "da.bins.masks['too_far_from_center'] = ~da.bins.masks.pop('too_far_from_center')\n", + "da.bins.masks['too_close_to_other'] = ~da.bins.masks.pop('too_close_to_other')\n", + "\n", + "da.bins.concat().hist(two_theta=1000, event_time_offset=1000).plot(norm='log')" + ] + }, + { + "cell_type": "markdown", + "id": "27", + "metadata": {}, + "source": [ + "## Reuction with known peak workflow" + ] + }, + { + "cell_type": "markdown", + "id": "28", + "metadata": {}, + "source": [ + "## Mode 7" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "29", + "metadata": {}, + "outputs": [], + "source": [ + "wf = BeerMcStasWorkflowKnownPeaks()\n", + "wf[Filename[SampleRun]] = '/home/johannes/work/diffraction/beer/mc-stitch/data/mccode_mode7/mccode.h5'\n", + "da = wf.compute(DetectorTofData[SampleRun])\n", + "da.hist(dspacing=dspacing).plot()" + ] + }, + { + "cell_type": "markdown", + "id": "30", + "metadata": {}, + "source": [ + "## Mode 8" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "31", + "metadata": {}, + "outputs": [], + "source": [ + "wf = BeerMcStasWorkflowKnownPeaks()\n", + "wf[Filename[SampleRun]] = '/home/johannes/work/diffraction/beer/mc-stitch/data/mccode_mode8/mccode.h5'\n", + "da = wf.compute(DetectorTofData[SampleRun])\n", + "da.hist(dspacing=dspacing).plot()" + ] + }, + { + "cell_type": "markdown", + "id": "32", + "metadata": {}, + "source": [ + "## Mode 9" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "33", + "metadata": {}, + "outputs": [], + "source": [ + "wf = BeerMcStasWorkflowKnownPeaks()\n", + "wf[Filename[SampleRun]] = mcstas_duplex_medium_resolution()\n", + "da = wf.compute(DetectorTofData[SampleRun])\n", + "da.hist(dspacing=dspacing).plot()" + ] + }, + { + "cell_type": "markdown", + "id": "34", + "metadata": {}, + "source": [ + "## Mode 10" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "35", + "metadata": {}, + "outputs": [], + "source": [ + "wf = BeerMcStasWorkflowKnownPeaks()\n", + "wf[Filename[SampleRun]] = '/home/johannes/work/diffraction/beer/mc-stitch/data/mccode_mode10/mccode.h5'\n", + "da = wf.compute(DetectorTofData[SampleRun])\n", + "da.hist(dspacing=dspacing).plot()" + ] + }, + { + "cell_type": "markdown", + "id": "36", + "metadata": {}, + "source": [ + "## Mode 16" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "37", + "metadata": {}, + "outputs": [], + "source": [ + "wf = BeerMcStasWorkflowKnownPeaks()\n", + "wf[Time0] = sc.scalar(0.5 * 17/360 /28, unit='s')\n", + "wf[Filename[SampleRun]] = '/home/johannes/work/diffraction/beer/mc-stitch/data/mccode_mode16/mccode.h5'\n", + "da = wf.compute(DetectorTofData[SampleRun])\n", + "da.hist(dspacing=dspacing).plot()" ] } ], diff --git a/src/ess/beer/__init__.py b/src/ess/beer/__init__.py index 1455c430..9bdc8157 100644 --- a/src/ess/beer/__init__.py +++ b/src/ess/beer/__init__.py @@ -8,7 +8,11 @@ import importlib.metadata from .io import load_beer_mcstas -from .workflow import BeerMcStasWorkflow, default_parameters +from .workflow import ( + BeerMcStasWorkflow, + BeerMcStasWorkflowKnownPeaks, + default_parameters, +) try: __version__ = importlib.metadata.version("essdiffraction") @@ -19,6 +23,7 @@ __all__ = [ 'BeerMcStasWorkflow', + 'BeerMcStasWorkflowKnownPeaks', '__version__', 'default_parameters', 'load_beer_mcstas', diff --git a/src/ess/beer/clustering.py b/src/ess/beer/clustering.py index efa077c8..54a967d8 100644 --- a/src/ess/beer/clustering.py +++ b/src/ess/beer/clustering.py @@ -10,14 +10,16 @@ def cluster_events_by_streak(da: DetectorData[RunType]) -> StreakClusteredData[R da.coords['coarse_d'] = ( constants.h / constants.m_n - * (da.coords['t'] - da.coords['approximate_t0']) + * (da.coords['event_time_offset'] - da.coords['approximate_t0']) / sc.sin(da.coords['two_theta'] / 2) - / da.coords['L'] + / da.coords['L0'] / 2 ).to(unit='angstrom') h = da.hist(coarse_d=1000) - i_peaks, _ = find_peaks(h.data.values, height=40, distance=3) + i_peaks, _ = find_peaks( + h.data.values, height=(2 * sc.values(h).median()).value, distance=3 + ) i_valleys, _ = find_peaks( h.data.values.max() - h.data.values, distance=3, height=h.data.values.max() / 2 ) diff --git a/src/ess/beer/conversions.py b/src/ess/beer/conversions.py index ee445e4d..700c3131 100644 --- a/src/ess/beer/conversions.py +++ b/src/ess/beer/conversions.py @@ -1,11 +1,28 @@ import numpy as np import scipp as sc - -from .types import DetectorTofData, RunType, StreakClusteredData +import scipp.constants +import scippneutron as scn + +from .types import ( + DetectorData, + DetectorTofData, + DHKLList, + ElasticCoordTransformGraph, + MaxTimeOffset, + MinTimeToNextStreak, + ModDt, + ModShift, + ModTwidth, + RunType, + StreakClusteredData, + Time0, +) def compute_tof_in_each_cluster( da: StreakClusteredData[RunType], + max_distance_from_streak_line: MaxTimeOffset, + min_distance_to_neighbor_line: MinTimeToNextStreak, ) -> DetectorTofData[RunType]: '''Fits a line through each cluster, the intercept of the line is t0. The line is fitted using linear regression with an outlier removal procedure. @@ -20,9 +37,9 @@ def compute_tof_in_each_cluster( are part of the background. 3. Go back to 1) and iterate until convergence. A few iterations should be enough. ''' - sin_theta_L = sc.sin(da.bins.coords['two_theta'] / 2) * da.bins.coords['L'] - t = da.bins.coords['t'] - for _ in range(5): + sin_theta_L = sc.sin(da.bins.coords['two_theta'] / 2) * da.bins.coords['Ltotal'] + t = da.bins.coords['event_time_offset'] + for _ in range(15): s, t0 = _linear_regression_by_bin(sin_theta_L, t, da) s_left = sc.array(dims=s.dims, values=np.roll(s.values, 1), unit=s.unit) @@ -44,13 +61,12 @@ def compute_tof_in_each_cluster( + sc.values(s_right) * sin_theta_L - (sc.values(t0) + sc.values(s) * sin_theta_L) ) - da = da.bins.assign_masks( # TODO: Find suitable masking parameters for other chopper settings - too_far_from_center=(distance_to_self > sc.scalar(3e-4, unit='s')).data, + too_far_from_center=(distance_to_self > max_distance_from_streak_line).data, too_close_to_other=( - (distance_self_to_left < sc.scalar(8e-4, unit='s')) - | (distance_self_to_right < sc.scalar(8e-4, unit='s')) + (distance_self_to_left < min_distance_to_neighbor_line) + | (distance_self_to_right < min_distance_to_neighbor_line) ).data, ) @@ -77,4 +93,89 @@ def _linear_regression_by_bin( return b1, b0 +def _compute_d(event_time_offset, theta, dhkl_list, mod_twidth, mod_shift, L0): + sinth = sc.sin(theta) + t = event_time_offset + + d = sc.empty(dims=sinth.dims, shape=sinth.shape, unit=dhkl_list[0].unit) + d[:] = sc.scalar(float('nan'), unit=dhkl_list[0].unit) + + dtfound = sc.empty(dims=sinth.dims, shape=sinth.shape, dtype='float64', unit=t.unit) + dtfound[:] = sc.scalar(float('nan'), unit=t.unit) + + const = ( + 2 * (1.0 + mod_shift) * sinth * L0 / (scipp.constants.h / scipp.constants.m_n) + ).to(unit=f'{event_time_offset.unit}/angstrom') + + for dhkl in dhkl_list: + dt = sc.abs(t - dhkl * const) + dt_in_range = dt < mod_twidth / 2 + no_dt_found = sc.isnan(dtfound) + dtfound = sc.where(dt_in_range, sc.where(no_dt_found, dt, dtfound), dtfound) + d = sc.where( + dt_in_range, + sc.where(no_dt_found, dhkl, sc.scalar(float('nan'), unit=dhkl.unit)), + d, + ) + + return d + + +def _tof_from_dhkl( + event_time_offset, theta, coarse_dhkl, Ltotal, mod_shift, mod_dt, time0 +): + # tref = 2 * (1.0 + mod_shift) * d_hkl * sinth / hm * L0 + # tc = t - time0 - tref + # dt = np.floor(tc / mod_dt + 0.5) * mod_dt + time0 + c = (-2 * (1.0 + mod_shift) / (scipp.constants.h / scipp.constants.m_n)).to( + unit=f'{event_time_offset.unit}/m/angstrom' + ) + out = sc.sin(theta) + out *= c + out *= coarse_dhkl + out *= Ltotal + out += event_time_offset + out -= time0 + out /= mod_dt + out += 0.5 + sc.floor(out, out=out) + out *= mod_dt + out += time0 + out *= -1 + out += event_time_offset + return out + + +def tof_from_known_dhkl_graph( + mod_shift: ModShift, + mod_dt: ModDt, + mod_twidth: ModTwidth, + time0: Time0, + dhkl_list: DHKLList, +) -> ElasticCoordTransformGraph: + return { + **scn.conversion.graph.tof.elastic("tof"), + 'mod_twidth': lambda: mod_twidth, + 'mod_dt': lambda: mod_dt, + 'time0': lambda: time0, + 'mod_shift': lambda: mod_shift, + 'tof': _tof_from_dhkl, + 'coarse_dhkl': _compute_d, + 'theta': lambda two_theta: two_theta / 2, + 'dhkl_list': lambda: dhkl_list, + } + + +def compute_tof_from_known_peaks( + da: DetectorData[RunType], graph: ElasticCoordTransformGraph +) -> DetectorTofData[RunType]: + return da.transform_coords( + ('dspacing', 'tof', 'coarse_dhkl'), graph=graph, keep_intermediate=False + ) + + +convert_from_known_peaks_providers = ( + tof_from_known_dhkl_graph, + compute_tof_from_known_peaks, +) providers = (compute_tof_in_each_cluster,) diff --git a/src/ess/beer/io.py b/src/ess/beer/io.py index 14d74a7b..eaeccaa2 100644 --- a/src/ess/beer/io.py +++ b/src/ess/beer/io.py @@ -73,7 +73,9 @@ def load_beer_mcstas(f): z = sc.norm(da.coords['detector_position'] - da.coords['sample_position']) L1 = sc.norm(da.coords['sample_position'] - da.coords['chopper_position']) L2 = sc.sqrt(da.coords['x'] ** 2 + da.coords['y'] ** 2 + z**2) - da.coords['L'] = L1 + L2 + # Source is assumed to be at the origin + da.coords['L0'] = L1 + L2 + sc.norm(da.coords['chopper_position']) + da.coords['Ltotal'] = L1 + L2 da.coords['two_theta'] = sc.acos(-da.coords['x'] / L2) # Save some space @@ -83,11 +85,12 @@ def load_beer_mcstas(f): da.coords.pop('id') # TODO: approximate t0 should depend on chopper information - da.coords['approximate_t0'] = sc.scalar(0.006, unit='s') + da.coords['approximate_t0'] = sc.scalar(6e-3, unit='s') # TODO: limits should be user configurable da.masks['two_theta'] = ( da.coords['two_theta'] >= sc.scalar(105, unit='deg').to(unit='rad') ) | (da.coords['two_theta'] <= sc.scalar(75, unit='deg').to(unit='rad')) + da.coords['event_time_offset'] = da.coords.pop('t') return da diff --git a/src/ess/beer/types.py b/src/ess/beer/types.py index 7a0dae55..6e7b473f 100644 --- a/src/ess/beer/types.py +++ b/src/ess/beer/types.py @@ -1,3 +1,5 @@ +from typing import NewType + import sciline import scipp as sc @@ -13,3 +15,17 @@ class StreakClusteredData(sciline.Scope[RunType, sc.DataArray], sc.DataArray): Filename = Filename SampleRun = SampleRun DetectorTofData = DetectorTofData + + +MaxTimeOffset = NewType('MaxTimeOffset', sc.Variable) +MinTimeToNextStreak = NewType('MinTimeToNextStreak', sc.Variable) + +PeakList = NewType('PeakList', sc.Variable) + +ElasticCoordTransformGraph = NewType("ElasticCoordTransformGraph", dict) + +ModShift = NewType('ModShift', sc.Variable) +ModTwidth = NewType('ModTwidth', sc.Variable) +ModDt = NewType('ModDt', sc.Variable) +Time0 = NewType('Time0', sc.Variable) +DHKLList = NewType('DHKLList', sc.Variable) diff --git a/src/ess/beer/workflow.py b/src/ess/beer/workflow.py index dab2b91a..b9ddb899 100644 --- a/src/ess/beer/workflow.py +++ b/src/ess/beer/workflow.py @@ -1,18 +1,84 @@ # SPDX-License-Identifier: BSD-3-Clause # Copyright (c) 2025 Scipp contributors (https://github.com/scipp) import sciline as sl +import scipp as sc from .clustering import providers as clustering_providers +from .conversions import convert_from_known_peaks_providers from .conversions import providers as conversion_providers from .io import load_beer_mcstas -from .types import DetectorData, Filename, RunType, SampleRun +from .types import ( + DetectorData, + DHKLList, + Filename, + MaxTimeOffset, + MinTimeToNextStreak, + ModDt, + ModShift, + ModTwidth, + RunType, + SampleRun, + Time0, +) def load_mcstas(fname: Filename[SampleRun]) -> DetectorData[SampleRun]: return DetectorData[SampleRun](load_beer_mcstas(fname)) -default_parameters = {} +default_parameters = { + ModTwidth: sc.scalar(0.003, unit='s'), + ModShift: sc.scalar(5e-4), + ModDt: sc.scalar(4.464e-4, unit='s'), + Time0: sc.scalar(1.16 * 17 / 360 / 28, unit='s'), + MaxTimeOffset: sc.scalar(3e-4, unit='s'), + MinTimeToNextStreak: sc.scalar(8e-4, unit='s'), + DHKLList: sc.array( + dims=('peaks',), + values=[ + 2.1055, + 2.0407, + 1.8234, + 1.443, + 1.2893, + 1.1782, + 1.0996, + 1.0527, + 1.0204, + 0.9126, + 0.9117, + 0.8366, + 0.8331, + 0.8154, + 0.7713, + 0.7444, + 0.7215, + 0.7018, + 0.7018, + 0.6802, + 0.6802, + 0.6453, + 0.6447, + 0.6164, + 0.6153, + 0.6078, + 0.6078, + 0.5891, + 0.5766, + 0.566, + 0.566, + 0.5561, + 0.5498, + 0.5269, + 0.5264, + 0.5107, + 0.5107, + 0.5102, + 0.5057, + ], + unit='angstrom', + ), +} def BeerMcStasWorkflow(): @@ -21,3 +87,11 @@ def BeerMcStasWorkflow(): params=default_parameters, constraints={RunType: (SampleRun,)}, ) + + +def BeerMcStasWorkflowKnownPeaks(): + return sl.Pipeline( + (load_mcstas, *convert_from_known_peaks_providers), + params=default_parameters, + constraints={RunType: (SampleRun,)}, + ) From 4ca920464b8c72b3e4220c207b2ea96038974c51 Mon Sep 17 00:00:00 2001 From: Johannes Kasimir Date: Mon, 7 Jul 2025 15:33:04 +0200 Subject: [PATCH 11/29] ci: add example data from different chopper settings --- src/ess/beer/data.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/src/ess/beer/data.py b/src/ess/beer/data.py index 357af819..942afbe5 100644 --- a/src/ess/beer/data.py +++ b/src/ess/beer/data.py @@ -6,7 +6,7 @@ _version = "1" -__all__ = ["mcstas_duplex_medium_resolution", "mcstas_silicon_medium_resolution"] +__all__ = ["mcstas_duplex", "mcstas_silicon_medium_resolution"] def _make_pooch(): @@ -18,7 +18,11 @@ def _make_pooch(): base_url="https://public.esss.dk/groups/scipp/ess/beer/{version}/", version=_version, registry={ - "duplex.h5": "md5:ebb3f9694ffdd0949f342bd0deaaf627", + "duplex-mode07.h5": "md5:e8d44197e4bc6a84ab9265bfabd96efe", + "duplex-mode08.h5": "md5:7cd2cf86d5d98fe07097ff98b250ba9b", + "duplex-mode09.h5": "md5:ebb3f9694ffdd0949f342bd0deaaf627", + "duplex-mode10.h5": "md5:559e7fc0cce265f5102520e382ee5b26", + "duplex-mode16.h5": "md5:2ccd05832bbc8a087a731b37364b995d", "silicon.h5": "md5:3425ae2b2fe7a938c6f0a4afeb9e0819", "silicon-dhkl.tab": "md5:90bedceb23245b045ce1ed0170c1313b", "duplex-dhkl.tab": "md5:b4c6c2fcd66466ad291f306b2d6b346e", @@ -29,12 +33,11 @@ def _make_pooch(): _pooch = _make_pooch() -def mcstas_duplex_medium_resolution() -> str: +def mcstas_duplex(mode: int) -> str: """ - Simulated intensity from duplex sample with - medium resolution chopper configuration. + Simulated intensity from duplex sample with ``mode`` chopper configuration. """ - return _pooch.fetch('duplex.h5') + return _pooch.fetch(f'duplex-mode{mode:02}.h5') def mcstas_silicon_medium_resolution() -> str: From b427c3bc9866acdc4fa1172377e6b7f0442f5ee1 Mon Sep 17 00:00:00 2001 From: Johannes Kasimir Date: Mon, 7 Jul 2025 15:34:17 +0200 Subject: [PATCH 12/29] docs: update notebook with new files --- docs/user-guide/beer/beer_mcstas.ipynb | 28 ++++++++++++-------------- 1 file changed, 13 insertions(+), 15 deletions(-) diff --git a/docs/user-guide/beer/beer_mcstas.ipynb b/docs/user-guide/beer/beer_mcstas.ipynb index 13988a3f..f68f3600 100644 --- a/docs/user-guide/beer/beer_mcstas.ipynb +++ b/docs/user-guide/beer/beer_mcstas.ipynb @@ -28,7 +28,7 @@ "import scipp as sc\n", "\n", "from ess.beer import BeerMcStasWorkflow, BeerMcStasWorkflowKnownPeaks\n", - "from ess.beer.data import mcstas_silicon_medium_resolution, mcstas_duplex_medium_resolution, duplex_peaks_array, silicon_peaks_array\n", + "from ess.beer.data import mcstas_silicon_medium_resolution, mcstas_duplex, duplex_peaks_array, silicon_peaks_array\n", "from ess.reduce.nexus.types import Filename, SampleRun\n", "from ess.reduce.time_of_flight.types import DetectorTofData\n", "from ess.beer.types import *\n", @@ -45,7 +45,7 @@ "source": [ "%%time\n", "wf = BeerMcStasWorkflowKnownPeaks()\n", - "wf[Filename[SampleRun]] = mcstas_duplex_medium_resolution()\n", + "wf[Filename[SampleRun]] = mcstas_duplex(9)\n", "da = wf.compute(DetectorTofData[SampleRun])\n", "da.hist(dspacing=dspacing).plot()" ] @@ -59,7 +59,7 @@ "source": [ "%%time\n", "wf = BeerMcStasWorkflow()\n", - "wf[Filename[SampleRun]] = mcstas_duplex_medium_resolution()\n", + "wf[Filename[SampleRun]] = mcstas_duplex(9)\n", "da = wf.compute(DetectorTofData[SampleRun])\n", "da.bins.coords['dspacing'] = (sc.constants.h / sc.constants.m_n * da.bins.coords['tof'] / sc.sin(da.bins.coords['two_theta'] / 2) / da.bins.coords['Ltotal'] / 2).to(unit='angstrom')\n", "p = da.bins.concat().hist(dspacing=dspacing).plot()\n", @@ -133,7 +133,7 @@ "outputs": [], "source": [ "wf = BeerMcStasWorkflow()\n", - "wf[Filename[SampleRun]] = '/home/johannes/work/diffraction/beer/mc-stitch/data/mccode_mode7/mccode.h5'\n", + "wf[Filename[SampleRun]] = mcstas_duplex(7)\n", "wf.compute(DetectorData[SampleRun]).hist(two_theta=1000, event_time_offset=1000).plot(norm='log')" ] }, @@ -170,7 +170,7 @@ "outputs": [], "source": [ "wf = BeerMcStasWorkflow()\n", - "wf[Filename[SampleRun]] = '/home/johannes/work/diffraction/beer/mc-stitch/data/mccode_mode8/mccode.h5'\n", + "wf[Filename[SampleRun]] = mcstas_duplex(8)\n", "wf.compute(DetectorData[SampleRun]).hist(two_theta=1000, event_time_offset=1000).plot(norm='log')" ] }, @@ -206,7 +206,7 @@ "outputs": [], "source": [ "wf = BeerMcStasWorkflow()\n", - "wf[Filename[SampleRun]] = mcstas_duplex_medium_resolution() # mode 9\n", + "wf[Filename[SampleRun]] = mcstas_duplex(9)\n", "wf.compute(DetectorData[SampleRun]).hist(two_theta=1000, event_time_offset=1000).plot(norm='log')" ] }, @@ -217,8 +217,6 @@ "metadata": {}, "outputs": [], "source": [ - "wf[MaxTimeOffset] = sc.scalar(3e-4, unit='s')\n", - "wf[MinTimeToNextStreak] = sc.scalar(8e-4, unit='s')\n", "da = wf.compute(DetectorTofData[SampleRun])\n", "da.bins.coords['dspacing'] = (sc.constants.h / sc.constants.m_n * da.bins.coords['tof'] / sc.sin(da.bins.coords['two_theta'] / 2) / da.bins.coords['Ltotal'] / 2).to(unit='angstrom')\n", "p = da.bins.concat().hist(dspacing=dspacing).plot()\n", @@ -257,7 +255,7 @@ "outputs": [], "source": [ "wf = BeerMcStasWorkflow()\n", - "wf[Filename[SampleRun]] = '/home/johannes/work/diffraction/beer/mc-stitch/data/mccode_mode10/mccode.h5'\n", + "wf[Filename[SampleRun]] = mcstas_duplex(10)\n", "wf.compute(DetectorData[SampleRun]).hist(two_theta=1000, event_time_offset=1000).plot(norm='log')" ] }, @@ -308,7 +306,7 @@ "outputs": [], "source": [ "wf = BeerMcStasWorkflow()\n", - "wf[Filename[SampleRun]] = '/home/johannes/work/diffraction/beer/mc-stitch/data/mccode_mode16/mccode.h5'\n", + "wf[Filename[SampleRun]] = mcstas_duplex(16)\n", "wf.compute(DetectorData[SampleRun]).hist(two_theta=1000, event_time_offset=1000).plot(norm='log')" ] }, @@ -367,7 +365,7 @@ "outputs": [], "source": [ "wf = BeerMcStasWorkflowKnownPeaks()\n", - "wf[Filename[SampleRun]] = '/home/johannes/work/diffraction/beer/mc-stitch/data/mccode_mode7/mccode.h5'\n", + "wf[Filename[SampleRun]] = mcstas_duplex(7)\n", "da = wf.compute(DetectorTofData[SampleRun])\n", "da.hist(dspacing=dspacing).plot()" ] @@ -388,7 +386,7 @@ "outputs": [], "source": [ "wf = BeerMcStasWorkflowKnownPeaks()\n", - "wf[Filename[SampleRun]] = '/home/johannes/work/diffraction/beer/mc-stitch/data/mccode_mode8/mccode.h5'\n", + "wf[Filename[SampleRun]] = mcstas_duplex(8)\n", "da = wf.compute(DetectorTofData[SampleRun])\n", "da.hist(dspacing=dspacing).plot()" ] @@ -409,7 +407,7 @@ "outputs": [], "source": [ "wf = BeerMcStasWorkflowKnownPeaks()\n", - "wf[Filename[SampleRun]] = mcstas_duplex_medium_resolution()\n", + "wf[Filename[SampleRun]] = mcstas_duplex(9)\n", "da = wf.compute(DetectorTofData[SampleRun])\n", "da.hist(dspacing=dspacing).plot()" ] @@ -430,7 +428,7 @@ "outputs": [], "source": [ "wf = BeerMcStasWorkflowKnownPeaks()\n", - "wf[Filename[SampleRun]] = '/home/johannes/work/diffraction/beer/mc-stitch/data/mccode_mode10/mccode.h5'\n", + "wf[Filename[SampleRun]] = mcstas_duplex(10)\n", "da = wf.compute(DetectorTofData[SampleRun])\n", "da.hist(dspacing=dspacing).plot()" ] @@ -452,7 +450,7 @@ "source": [ "wf = BeerMcStasWorkflowKnownPeaks()\n", "wf[Time0] = sc.scalar(0.5 * 17/360 /28, unit='s')\n", - "wf[Filename[SampleRun]] = '/home/johannes/work/diffraction/beer/mc-stitch/data/mccode_mode16/mccode.h5'\n", + "wf[Filename[SampleRun]] = mcstas_duplex(16)\n", "da = wf.compute(DetectorTofData[SampleRun])\n", "da.hist(dspacing=dspacing).plot()" ] From bf88dbfa16d0f05c131d1770ef573cef0b82f1ee Mon Sep 17 00:00:00 2001 From: Johannes Kasimir Date: Tue, 8 Jul 2025 13:58:40 +0200 Subject: [PATCH 13/29] feat: read both banks --- src/ess/beer/clustering.py | 2 ++ src/ess/beer/conversions.py | 10 ++++++++++ src/ess/beer/io.py | 27 +++++++++++++++++++-------- 3 files changed, 31 insertions(+), 8 deletions(-) diff --git a/src/ess/beer/clustering.py b/src/ess/beer/clustering.py index 54a967d8..1e3c15c7 100644 --- a/src/ess/beer/clustering.py +++ b/src/ess/beer/clustering.py @@ -6,6 +6,8 @@ def cluster_events_by_streak(da: DetectorData[RunType]) -> StreakClusteredData[RunType]: + if isinstance(da, sc.DataGroup): + return sc.DataGroup({k: cluster_events_by_streak(v) for k, v in da.items()}) da = da.copy(deep=False) da.coords['coarse_d'] = ( constants.h diff --git a/src/ess/beer/conversions.py b/src/ess/beer/conversions.py index 700c3131..cd55f9ce 100644 --- a/src/ess/beer/conversions.py +++ b/src/ess/beer/conversions.py @@ -37,6 +37,16 @@ def compute_tof_in_each_cluster( are part of the background. 3. Go back to 1) and iterate until convergence. A few iterations should be enough. ''' + if isinstance(da, sc.DataGroup): + return sc.DataGroup( + { + k: compute_tof_in_each_cluster( + v, max_distance_from_streak_line, min_distance_to_neighbor_line + ) + for k, v in da.items() + } + ) + sin_theta_L = sc.sin(da.bins.coords['two_theta'] / 2) * da.bins.coords['Ltotal'] t = da.bins.coords['event_time_offset'] for _ in range(15): diff --git a/src/ess/beer/io.py b/src/ess/beer/io.py index eaeccaa2..a9c148a9 100644 --- a/src/ess/beer/io.py +++ b/src/ess/beer/io.py @@ -32,15 +32,11 @@ def _unique_child_group_h5( return out -def load_beer_mcstas(f): - if isinstance(f, str): - with h5py.File(f) as ff: - return load_beer_mcstas(ff) - +def _load_beer_mcstas(f, bank=1): data, events, params, sample_pos, chopper_pos = _load_h5( f, - 'NXentry/NXdetector/bank01_events_dat_list_p_x_y_n_id_t', - 'NXentry/NXdetector/bank01_events_dat_list_p_x_y_n_id_t/events', + f'NXentry/NXdetector/bank{bank:02}_events_dat_list_p_x_y_n_id_t', + f'NXentry/NXdetector/bank{bank:02}_events_dat_list_p_x_y_n_id_t/events', 'NXentry/simulation/Param', '/entry1/instrument/components/0189_sampleMantid/Position', '/entry1/instrument/components/0017_cMCA/Position', @@ -76,7 +72,9 @@ def load_beer_mcstas(f): # Source is assumed to be at the origin da.coords['L0'] = L1 + L2 + sc.norm(da.coords['chopper_position']) da.coords['Ltotal'] = L1 + L2 - da.coords['two_theta'] = sc.acos(-da.coords['x'] / L2) + da.coords['two_theta'] = sc.acos( + (-da.coords['x'] if bank == 1 else da.coords['x']) / L2 + ) # Save some space da.coords.pop('x') @@ -94,3 +92,16 @@ def load_beer_mcstas(f): da.coords['event_time_offset'] = da.coords.pop('t') return da + + +def load_beer_mcstas(f): + if isinstance(f, str): + with h5py.File(f) as ff: + return load_beer_mcstas(ff) + + return sc.DataGroup( + { + 'bank1': _load_beer_mcstas(f, bank=1), + 'bank2': _load_beer_mcstas(f, bank=2), + } + ) From 5929740f0332b41019858524f1cb284dfcf80741 Mon Sep 17 00:00:00 2001 From: Johannes Kasimir Date: Tue, 8 Jul 2025 13:59:05 +0200 Subject: [PATCH 14/29] docs: update to display both banks --- docs/user-guide/beer/beer_mcstas.ipynb | 49 +++++++++++++------------- 1 file changed, 25 insertions(+), 24 deletions(-) diff --git a/docs/user-guide/beer/beer_mcstas.ipynb b/docs/user-guide/beer/beer_mcstas.ipynb index f68f3600..c2547e13 100644 --- a/docs/user-guide/beer/beer_mcstas.ipynb +++ b/docs/user-guide/beer/beer_mcstas.ipynb @@ -26,6 +26,7 @@ "outputs": [], "source": [ "import scipp as sc\n", + "import scippneutron as scn\n", "\n", "from ess.beer import BeerMcStasWorkflow, BeerMcStasWorkflowKnownPeaks\n", "from ess.beer.data import mcstas_silicon_medium_resolution, mcstas_duplex, duplex_peaks_array, silicon_peaks_array\n", @@ -57,12 +58,12 @@ "metadata": {}, "outputs": [], "source": [ - "%%time\n", + "#%%time\n", "wf = BeerMcStasWorkflow()\n", "wf[Filename[SampleRun]] = mcstas_duplex(9)\n", "da = wf.compute(DetectorTofData[SampleRun])\n", - "da.bins.coords['dspacing'] = (sc.constants.h / sc.constants.m_n * da.bins.coords['tof'] / sc.sin(da.bins.coords['two_theta'] / 2) / da.bins.coords['Ltotal'] / 2).to(unit='angstrom')\n", - "p = da.bins.concat().hist(dspacing=dspacing).plot()\n", + "da = da.transform_coords(('dspacing',), graph=scn.conversion.graph.tof.elastic('tof'),)\n", + "p = da.hist(dspacing=dspacing, dim=da.dims).plot()\n", "p.ax.vlines(duplex_peaks_array().values, 0, 20000, linestyle='--', color='black', lw=0.5, label='true $d_{hkl}$')\n", "p" ] @@ -93,8 +94,8 @@ "wf = BeerMcStasWorkflow()\n", "wf[Filename[SampleRun]] = mcstas_silicon_medium_resolution()\n", "da = wf.compute(DetectorTofData[SampleRun])\n", - "da.bins.coords['dspacing'] = (sc.constants.h / sc.constants.m_n * da.bins.coords['tof'] / sc.sin(da.bins.coords['two_theta'] / 2) / da.bins.coords['Ltotal'] / 2).to(unit='angstrom')\n", - "p = da.bins.concat().hist(dspacing=dspacing).plot()\n", + "da = da.transform_coords(('dspacing',), graph=scn.conversion.graph.tof.elastic('tof'),)\n", + "p = da.hist(dspacing=dspacing, dim=da.dims).plot()\n", "p.ax.vlines(silicon_peaks_array().values, 0, 20000, linestyle='--', color='black', lw=0.5, label='true $d_{hkl}$')\n", "p" ] @@ -106,7 +107,7 @@ "metadata": {}, "outputs": [], "source": [ - "da.bins.concat().hist(two_theta=1000, event_time_offset=1000).plot(norm='log')" + "da['bank2'].bins.concat().hist(two_theta=1000, event_time_offset=1000).plot(norm='log')" ] }, { @@ -134,7 +135,7 @@ "source": [ "wf = BeerMcStasWorkflow()\n", "wf[Filename[SampleRun]] = mcstas_duplex(7)\n", - "wf.compute(DetectorData[SampleRun]).hist(two_theta=1000, event_time_offset=1000).plot(norm='log')" + "wf.compute(DetectorData[SampleRun])['bank1'].hist(two_theta=1000, event_time_offset=1000).plot(norm='log')" ] }, { @@ -148,8 +149,8 @@ "wf[MaxTimeOffset] = sc.scalar(6e-4, unit='s')\n", "wf[MinTimeToNextStreak] = sc.scalar(8e-4/10, unit='s')\n", "da = wf.compute(DetectorTofData[SampleRun])\n", - "da.bins.coords['dspacing'] = (sc.constants.h / sc.constants.m_n * da.bins.coords['tof'] / sc.sin(da.bins.coords['two_theta'] / 2) / da.bins.coords['Ltotal'] / 2).to(unit='angstrom')\n", - "p = da.bins.concat().hist(dspacing=dspacing).plot()\n", + "da = da.transform_coords(('dspacing',), graph=scn.conversion.graph.tof.elastic('tof'),)\n", + "p = da.hist(dspacing=dspacing, dim=da.dims).plot()\n", "p.ax.vlines(duplex_peaks_array().values, 0, 20000, linestyle='--', color='black', lw=0.5, label='true $d_{hkl}$')\n", "p" ] @@ -171,7 +172,7 @@ "source": [ "wf = BeerMcStasWorkflow()\n", "wf[Filename[SampleRun]] = mcstas_duplex(8)\n", - "wf.compute(DetectorData[SampleRun]).hist(two_theta=1000, event_time_offset=1000).plot(norm='log')" + "wf.compute(DetectorData[SampleRun])['bank1'].hist(two_theta=1000, event_time_offset=1000).plot(norm='log')" ] }, { @@ -184,8 +185,8 @@ "wf[MaxTimeOffset] = sc.scalar(6e-4, unit='s')\n", "wf[MinTimeToNextStreak] = sc.scalar(8e-4/10, unit='s')\n", "da = wf.compute(DetectorTofData[SampleRun])\n", - "da.bins.coords['dspacing'] = (sc.constants.h / sc.constants.m_n * da.bins.coords['tof'] / sc.sin(da.bins.coords['two_theta'] / 2) / da.bins.coords['Ltotal'] / 2).to(unit='angstrom')\n", - "p = da.bins.concat().hist(dspacing=dspacing).plot()\n", + "da = da.transform_coords(('dspacing',), graph=scn.conversion.graph.tof.elastic('tof'),)\n", + "p = da.hist(dspacing=dspacing, dim=da.dims).plot()\n", "p.ax.vlines(duplex_peaks_array().values, 0, 20000, linestyle='--', color='black', lw=0.5, label='true $d_{hkl}$')\n", "p" ] @@ -207,7 +208,7 @@ "source": [ "wf = BeerMcStasWorkflow()\n", "wf[Filename[SampleRun]] = mcstas_duplex(9)\n", - "wf.compute(DetectorData[SampleRun]).hist(two_theta=1000, event_time_offset=1000).plot(norm='log')" + "wf.compute(DetectorData[SampleRun])['bank1'].hist(two_theta=1000, event_time_offset=1000).plot(norm='log')" ] }, { @@ -218,8 +219,8 @@ "outputs": [], "source": [ "da = wf.compute(DetectorTofData[SampleRun])\n", - "da.bins.coords['dspacing'] = (sc.constants.h / sc.constants.m_n * da.bins.coords['tof'] / sc.sin(da.bins.coords['two_theta'] / 2) / da.bins.coords['Ltotal'] / 2).to(unit='angstrom')\n", - "p = da.bins.concat().hist(dspacing=dspacing).plot()\n", + "da = da.transform_coords(('dspacing',), graph=scn.conversion.graph.tof.elastic('tof'),)\n", + "p = da.hist(dspacing=dspacing, dim=da.dims).plot()\n", "p.ax.vlines(duplex_peaks_array().values, 0, 20000, linestyle='--', color='black', lw=0.5, label='true $d_{hkl}$')\n", "p" ] @@ -231,7 +232,7 @@ "metadata": {}, "outputs": [], "source": [ - "da = da.copy()\n", + "da = da['bank2'].copy()\n", "da.masks.clear()\n", "da.bins.masks['too_far_from_center'] = ~da.bins.masks.pop('too_far_from_center')\n", "da.bins.masks['too_close_to_other'] = ~da.bins.masks.pop('too_close_to_other')\n", @@ -256,7 +257,7 @@ "source": [ "wf = BeerMcStasWorkflow()\n", "wf[Filename[SampleRun]] = mcstas_duplex(10)\n", - "wf.compute(DetectorData[SampleRun]).hist(two_theta=1000, event_time_offset=1000).plot(norm='log')" + "wf.compute(DetectorData[SampleRun])['bank1'].hist(two_theta=1000, event_time_offset=1000).plot(norm='log')" ] }, { @@ -269,8 +270,8 @@ "wf[MaxTimeOffset] = sc.scalar(3e-4/2, unit='s')\n", "wf[MinTimeToNextStreak] = sc.scalar(8e-4/2, unit='s')\n", "da = wf.compute(DetectorTofData[SampleRun])\n", - "da.bins.coords['dspacing'] = (sc.constants.h / sc.constants.m_n * da.bins.coords['tof'] / sc.sin(da.bins.coords['two_theta'] / 2) / da.bins.coords['Ltotal'] / 2).to(unit='angstrom')\n", - "p = da.bins.concat().hist(dspacing=dspacing).plot()\n", + "da = da.transform_coords(('dspacing',), graph=scn.conversion.graph.tof.elastic('tof'),)\n", + "p = da.hist(dspacing=dspacing, dim=da.dims).plot()\n", "p.ax.vlines(duplex_peaks_array().values, 0, 20000, linestyle='--', color='black', lw=0.5, label='true $d_{hkl}$')\n", "p" ] @@ -282,7 +283,7 @@ "metadata": {}, "outputs": [], "source": [ - "da = da.copy()\n", + "da = da['bank1'].copy()\n", "da.masks.clear()\n", "da.bins.masks['too_far_from_center'] = ~da.bins.masks.pop('too_far_from_center')\n", "da.bins.masks['too_close_to_other'] = ~da.bins.masks.pop('too_close_to_other')\n", @@ -307,7 +308,7 @@ "source": [ "wf = BeerMcStasWorkflow()\n", "wf[Filename[SampleRun]] = mcstas_duplex(16)\n", - "wf.compute(DetectorData[SampleRun]).hist(two_theta=1000, event_time_offset=1000).plot(norm='log')" + "wf.compute(DetectorData[SampleRun])['bank1'].hist(two_theta=1000, event_time_offset=1000).plot(norm='log')" ] }, { @@ -320,8 +321,8 @@ "wf[MaxTimeOffset] = sc.scalar(3e-4/2, unit='s')\n", "wf[MinTimeToNextStreak] = sc.scalar(8e-4/2, unit='s')\n", "da = wf.compute(DetectorTofData[SampleRun])\n", - "da.bins.coords['dspacing'] = (sc.constants.h / sc.constants.m_n * da.bins.coords['tof'] / sc.sin(da.bins.coords['two_theta'] / 2) / da.bins.coords['Ltotal'] / 2).to(unit='angstrom')\n", - "p = da.bins.concat().hist(dspacing=dspacing).plot()\n", + "da = da.transform_coords(('dspacing',), graph=scn.conversion.graph.tof.elastic('tof'),)\n", + "p = da.hist(dspacing=dspacing, dim=da.dims).plot()\n", "p.ax.vlines(duplex_peaks_array().values, 0, 20000, linestyle='--', color='black', lw=0.5, label='true $d_{hkl}$')\n", "p" ] @@ -333,7 +334,7 @@ "metadata": {}, "outputs": [], "source": [ - "da = da.copy()\n", + "da = da['bank1'].copy()\n", "da.masks.clear()\n", "da.bins.masks['too_far_from_center'] = ~da.bins.masks.pop('too_far_from_center')\n", "da.bins.masks['too_close_to_other'] = ~da.bins.masks.pop('too_close_to_other')\n", From 074b90cf7368560e43c4004c037eb8808bbf995b Mon Sep 17 00:00:00 2001 From: Johannes Kasimir Date: Wed, 9 Jul 2025 10:21:48 +0200 Subject: [PATCH 15/29] fix: remove peak list from default parameters --- docs/user-guide/beer/beer_mcstas.ipynb | 10 ++++-- src/ess/beer/workflow.py | 46 -------------------------- 2 files changed, 8 insertions(+), 48 deletions(-) diff --git a/docs/user-guide/beer/beer_mcstas.ipynb b/docs/user-guide/beer/beer_mcstas.ipynb index c2547e13..6db781d6 100644 --- a/docs/user-guide/beer/beer_mcstas.ipynb +++ b/docs/user-guide/beer/beer_mcstas.ipynb @@ -46,6 +46,7 @@ "source": [ "%%time\n", "wf = BeerMcStasWorkflowKnownPeaks()\n", + "wf[DHKLList] = duplex_peaks_array()\n", "wf[Filename[SampleRun]] = mcstas_duplex(9)\n", "da = wf.compute(DetectorTofData[SampleRun])\n", "da.hist(dspacing=dspacing).plot()" @@ -58,7 +59,7 @@ "metadata": {}, "outputs": [], "source": [ - "#%%time\n", + "%%time\n", "wf = BeerMcStasWorkflow()\n", "wf[Filename[SampleRun]] = mcstas_duplex(9)\n", "da = wf.compute(DetectorTofData[SampleRun])\n", @@ -77,7 +78,7 @@ "source": [ "%%time\n", "wf = BeerMcStasWorkflowKnownPeaks()\n", - "wf[DHKLList] = sc.array(dims=('peaks',), values=[3.1353, 1.92, 1.6374, 1.5677, 1.3576, 1.2458, 1.1085, 1.0451, 1.0451, 0.96, 0.9179, 0.9051, 0.8586, 0.8281, 0.8187, 0.7838, 0.7604, 0.7604, 0.7257, 0.707 , 0.707], unit='angstrom')\n", + "wf[DHKLList] = silicon_peaks_array()\n", "wf[Filename[SampleRun]] = mcstas_silicon_medium_resolution()\n", "da = wf.compute(DetectorTofData[SampleRun])\n", "da.hist(dspacing=dspacing).plot()" @@ -366,6 +367,7 @@ "outputs": [], "source": [ "wf = BeerMcStasWorkflowKnownPeaks()\n", + "wf[DHKLList] = duplex_peaks_array()\n", "wf[Filename[SampleRun]] = mcstas_duplex(7)\n", "da = wf.compute(DetectorTofData[SampleRun])\n", "da.hist(dspacing=dspacing).plot()" @@ -387,6 +389,7 @@ "outputs": [], "source": [ "wf = BeerMcStasWorkflowKnownPeaks()\n", + "wf[DHKLList] = duplex_peaks_array()\n", "wf[Filename[SampleRun]] = mcstas_duplex(8)\n", "da = wf.compute(DetectorTofData[SampleRun])\n", "da.hist(dspacing=dspacing).plot()" @@ -408,6 +411,7 @@ "outputs": [], "source": [ "wf = BeerMcStasWorkflowKnownPeaks()\n", + "wf[DHKLList] = duplex_peaks_array()\n", "wf[Filename[SampleRun]] = mcstas_duplex(9)\n", "da = wf.compute(DetectorTofData[SampleRun])\n", "da.hist(dspacing=dspacing).plot()" @@ -429,6 +433,7 @@ "outputs": [], "source": [ "wf = BeerMcStasWorkflowKnownPeaks()\n", + "wf[DHKLList] = duplex_peaks_array()\n", "wf[Filename[SampleRun]] = mcstas_duplex(10)\n", "da = wf.compute(DetectorTofData[SampleRun])\n", "da.hist(dspacing=dspacing).plot()" @@ -450,6 +455,7 @@ "outputs": [], "source": [ "wf = BeerMcStasWorkflowKnownPeaks()\n", + "wf[DHKLList] = duplex_peaks_array()\n", "wf[Time0] = sc.scalar(0.5 * 17/360 /28, unit='s')\n", "wf[Filename[SampleRun]] = mcstas_duplex(16)\n", "da = wf.compute(DetectorTofData[SampleRun])\n", diff --git a/src/ess/beer/workflow.py b/src/ess/beer/workflow.py index b9ddb899..c335d587 100644 --- a/src/ess/beer/workflow.py +++ b/src/ess/beer/workflow.py @@ -9,7 +9,6 @@ from .io import load_beer_mcstas from .types import ( DetectorData, - DHKLList, Filename, MaxTimeOffset, MinTimeToNextStreak, @@ -33,51 +32,6 @@ def load_mcstas(fname: Filename[SampleRun]) -> DetectorData[SampleRun]: Time0: sc.scalar(1.16 * 17 / 360 / 28, unit='s'), MaxTimeOffset: sc.scalar(3e-4, unit='s'), MinTimeToNextStreak: sc.scalar(8e-4, unit='s'), - DHKLList: sc.array( - dims=('peaks',), - values=[ - 2.1055, - 2.0407, - 1.8234, - 1.443, - 1.2893, - 1.1782, - 1.0996, - 1.0527, - 1.0204, - 0.9126, - 0.9117, - 0.8366, - 0.8331, - 0.8154, - 0.7713, - 0.7444, - 0.7215, - 0.7018, - 0.7018, - 0.6802, - 0.6802, - 0.6453, - 0.6447, - 0.6164, - 0.6153, - 0.6078, - 0.6078, - 0.5891, - 0.5766, - 0.566, - 0.566, - 0.5561, - 0.5498, - 0.5269, - 0.5264, - 0.5107, - 0.5107, - 0.5102, - 0.5057, - ], - unit='angstrom', - ), } From 54c5472ff35ea5e43e1eeca8509a39d6a82b7a09 Mon Sep 17 00:00:00 2001 From: Johannes Kasimir Date: Wed, 9 Jul 2025 10:41:47 +0200 Subject: [PATCH 16/29] fix: remove dspacing coord from DetectorTofData --- docs/user-guide/beer/beer_mcstas.ipynb | 7 +++++++ src/ess/beer/conversions.py | 12 ++++-------- src/ess/beer/types.py | 2 +- 3 files changed, 12 insertions(+), 9 deletions(-) diff --git a/docs/user-guide/beer/beer_mcstas.ipynb b/docs/user-guide/beer/beer_mcstas.ipynb index 6db781d6..171945de 100644 --- a/docs/user-guide/beer/beer_mcstas.ipynb +++ b/docs/user-guide/beer/beer_mcstas.ipynb @@ -49,6 +49,7 @@ "wf[DHKLList] = duplex_peaks_array()\n", "wf[Filename[SampleRun]] = mcstas_duplex(9)\n", "da = wf.compute(DetectorTofData[SampleRun])\n", + "da = da.transform_coords(('dspacing',), graph=scn.conversion.graph.tof.elastic('tof'),)\n", "da.hist(dspacing=dspacing).plot()" ] }, @@ -81,6 +82,7 @@ "wf[DHKLList] = silicon_peaks_array()\n", "wf[Filename[SampleRun]] = mcstas_silicon_medium_resolution()\n", "da = wf.compute(DetectorTofData[SampleRun])\n", + "da = da.transform_coords(('dspacing',), graph=scn.conversion.graph.tof.elastic('tof'),)\n", "da.hist(dspacing=dspacing).plot()" ] }, @@ -370,6 +372,7 @@ "wf[DHKLList] = duplex_peaks_array()\n", "wf[Filename[SampleRun]] = mcstas_duplex(7)\n", "da = wf.compute(DetectorTofData[SampleRun])\n", + "da = da.transform_coords(('dspacing',), graph=scn.conversion.graph.tof.elastic('tof'),)\n", "da.hist(dspacing=dspacing).plot()" ] }, @@ -392,6 +395,7 @@ "wf[DHKLList] = duplex_peaks_array()\n", "wf[Filename[SampleRun]] = mcstas_duplex(8)\n", "da = wf.compute(DetectorTofData[SampleRun])\n", + "da = da.transform_coords(('dspacing',), graph=scn.conversion.graph.tof.elastic('tof'),)\n", "da.hist(dspacing=dspacing).plot()" ] }, @@ -414,6 +418,7 @@ "wf[DHKLList] = duplex_peaks_array()\n", "wf[Filename[SampleRun]] = mcstas_duplex(9)\n", "da = wf.compute(DetectorTofData[SampleRun])\n", + "da = da.transform_coords(('dspacing',), graph=scn.conversion.graph.tof.elastic('tof'),)\n", "da.hist(dspacing=dspacing).plot()" ] }, @@ -436,6 +441,7 @@ "wf[DHKLList] = duplex_peaks_array()\n", "wf[Filename[SampleRun]] = mcstas_duplex(10)\n", "da = wf.compute(DetectorTofData[SampleRun])\n", + "da = da.transform_coords(('dspacing',), graph=scn.conversion.graph.tof.elastic('tof'),)\n", "da.hist(dspacing=dspacing).plot()" ] }, @@ -459,6 +465,7 @@ "wf[Time0] = sc.scalar(0.5 * 17/360 /28, unit='s')\n", "wf[Filename[SampleRun]] = mcstas_duplex(16)\n", "da = wf.compute(DetectorTofData[SampleRun])\n", + "da = da.transform_coords(('dspacing',), graph=scn.conversion.graph.tof.elastic('tof'),)\n", "da.hist(dspacing=dspacing).plot()" ] } diff --git a/src/ess/beer/conversions.py b/src/ess/beer/conversions.py index cd55f9ce..06a42578 100644 --- a/src/ess/beer/conversions.py +++ b/src/ess/beer/conversions.py @@ -1,13 +1,11 @@ import numpy as np import scipp as sc import scipp.constants -import scippneutron as scn from .types import ( DetectorData, DetectorTofData, DHKLList, - ElasticCoordTransformGraph, MaxTimeOffset, MinTimeToNextStreak, ModDt, @@ -16,6 +14,7 @@ RunType, StreakClusteredData, Time0, + TofCoordTransformGraph, ) @@ -162,9 +161,8 @@ def tof_from_known_dhkl_graph( mod_twidth: ModTwidth, time0: Time0, dhkl_list: DHKLList, -) -> ElasticCoordTransformGraph: +) -> TofCoordTransformGraph: return { - **scn.conversion.graph.tof.elastic("tof"), 'mod_twidth': lambda: mod_twidth, 'mod_dt': lambda: mod_dt, 'time0': lambda: time0, @@ -177,11 +175,9 @@ def tof_from_known_dhkl_graph( def compute_tof_from_known_peaks( - da: DetectorData[RunType], graph: ElasticCoordTransformGraph + da: DetectorData[RunType], graph: TofCoordTransformGraph ) -> DetectorTofData[RunType]: - return da.transform_coords( - ('dspacing', 'tof', 'coarse_dhkl'), graph=graph, keep_intermediate=False - ) + return da.transform_coords(('tof',), graph=graph, keep_intermediate=False) convert_from_known_peaks_providers = ( diff --git a/src/ess/beer/types.py b/src/ess/beer/types.py index 6e7b473f..5de9ab10 100644 --- a/src/ess/beer/types.py +++ b/src/ess/beer/types.py @@ -22,7 +22,7 @@ class StreakClusteredData(sciline.Scope[RunType, sc.DataArray], sc.DataArray): PeakList = NewType('PeakList', sc.Variable) -ElasticCoordTransformGraph = NewType("ElasticCoordTransformGraph", dict) +TofCoordTransformGraph = NewType("TofCoordTransformGraph", dict) ModShift = NewType('ModShift', sc.Variable) ModTwidth = NewType('ModTwidth', sc.Variable) From b3d1bfe9d2d6053bcbb0a30b3e623905e12c74da Mon Sep 17 00:00:00 2001 From: Johannes Kasimir Date: Wed, 9 Jul 2025 11:23:01 +0200 Subject: [PATCH 17/29] fix: simplify fit to streaks --- docs/user-guide/beer/beer_mcstas.ipynb | 8 +---- src/ess/beer/conversions.py | 46 +++++++------------------- src/ess/beer/types.py | 1 - src/ess/beer/workflow.py | 2 -- 4 files changed, 13 insertions(+), 44 deletions(-) diff --git a/docs/user-guide/beer/beer_mcstas.ipynb b/docs/user-guide/beer/beer_mcstas.ipynb index 171945de..eaf6e49f 100644 --- a/docs/user-guide/beer/beer_mcstas.ipynb +++ b/docs/user-guide/beer/beer_mcstas.ipynb @@ -150,7 +150,7 @@ "source": [ "%%time\n", "wf[MaxTimeOffset] = sc.scalar(6e-4, unit='s')\n", - "wf[MinTimeToNextStreak] = sc.scalar(8e-4/10, unit='s')\n", + "#wf[MinTimeToNextStreak] = sc.scalar(8e-4/10, unit='s')\n", "da = wf.compute(DetectorTofData[SampleRun])\n", "da = da.transform_coords(('dspacing',), graph=scn.conversion.graph.tof.elastic('tof'),)\n", "p = da.hist(dspacing=dspacing, dim=da.dims).plot()\n", @@ -186,7 +186,6 @@ "outputs": [], "source": [ "wf[MaxTimeOffset] = sc.scalar(6e-4, unit='s')\n", - "wf[MinTimeToNextStreak] = sc.scalar(8e-4/10, unit='s')\n", "da = wf.compute(DetectorTofData[SampleRun])\n", "da = da.transform_coords(('dspacing',), graph=scn.conversion.graph.tof.elastic('tof'),)\n", "p = da.hist(dspacing=dspacing, dim=da.dims).plot()\n", @@ -238,7 +237,6 @@ "da = da['bank2'].copy()\n", "da.masks.clear()\n", "da.bins.masks['too_far_from_center'] = ~da.bins.masks.pop('too_far_from_center')\n", - "da.bins.masks['too_close_to_other'] = ~da.bins.masks.pop('too_close_to_other')\n", "\n", "da.bins.concat().hist(two_theta=1000, event_time_offset=1000).plot(norm='log')" ] @@ -271,7 +269,6 @@ "outputs": [], "source": [ "wf[MaxTimeOffset] = sc.scalar(3e-4/2, unit='s')\n", - "wf[MinTimeToNextStreak] = sc.scalar(8e-4/2, unit='s')\n", "da = wf.compute(DetectorTofData[SampleRun])\n", "da = da.transform_coords(('dspacing',), graph=scn.conversion.graph.tof.elastic('tof'),)\n", "p = da.hist(dspacing=dspacing, dim=da.dims).plot()\n", @@ -289,7 +286,6 @@ "da = da['bank1'].copy()\n", "da.masks.clear()\n", "da.bins.masks['too_far_from_center'] = ~da.bins.masks.pop('too_far_from_center')\n", - "da.bins.masks['too_close_to_other'] = ~da.bins.masks.pop('too_close_to_other')\n", "\n", "da.bins.concat().hist(two_theta=1000, event_time_offset=1000).plot(norm='log')" ] @@ -322,7 +318,6 @@ "outputs": [], "source": [ "wf[MaxTimeOffset] = sc.scalar(3e-4/2, unit='s')\n", - "wf[MinTimeToNextStreak] = sc.scalar(8e-4/2, unit='s')\n", "da = wf.compute(DetectorTofData[SampleRun])\n", "da = da.transform_coords(('dspacing',), graph=scn.conversion.graph.tof.elastic('tof'),)\n", "p = da.hist(dspacing=dspacing, dim=da.dims).plot()\n", @@ -340,7 +335,6 @@ "da = da['bank1'].copy()\n", "da.masks.clear()\n", "da.bins.masks['too_far_from_center'] = ~da.bins.masks.pop('too_far_from_center')\n", - "da.bins.masks['too_close_to_other'] = ~da.bins.masks.pop('too_close_to_other')\n", "\n", "da.bins.concat().hist(two_theta=1000, event_time_offset=1000).plot(norm='log')" ] diff --git a/src/ess/beer/conversions.py b/src/ess/beer/conversions.py index 06a42578..ee7a7a5d 100644 --- a/src/ess/beer/conversions.py +++ b/src/ess/beer/conversions.py @@ -1,4 +1,3 @@ -import numpy as np import scipp as sc import scipp.constants @@ -7,7 +6,6 @@ DetectorTofData, DHKLList, MaxTimeOffset, - MinTimeToNextStreak, ModDt, ModShift, ModTwidth, @@ -21,7 +19,6 @@ def compute_tof_in_each_cluster( da: StreakClusteredData[RunType], max_distance_from_streak_line: MaxTimeOffset, - min_distance_to_neighbor_line: MinTimeToNextStreak, ) -> DetectorTofData[RunType]: '''Fits a line through each cluster, the intercept of the line is t0. The line is fitted using linear regression with an outlier removal procedure. @@ -39,9 +36,7 @@ def compute_tof_in_each_cluster( if isinstance(da, sc.DataGroup): return sc.DataGroup( { - k: compute_tof_in_each_cluster( - v, max_distance_from_streak_line, min_distance_to_neighbor_line - ) + k: compute_tof_in_each_cluster(v, max_distance_from_streak_line) for k, v in da.items() } ) @@ -49,44 +44,27 @@ def compute_tof_in_each_cluster( sin_theta_L = sc.sin(da.bins.coords['two_theta'] / 2) * da.bins.coords['Ltotal'] t = da.bins.coords['event_time_offset'] for _ in range(15): - s, t0 = _linear_regression_by_bin(sin_theta_L, t, da) - - s_left = sc.array(dims=s.dims, values=np.roll(s.values, 1), unit=s.unit) - s_right = sc.array(dims=s.dims, values=np.roll(s.values, -1), unit=s.unit) - t0_left = sc.array(dims=t0.dims, values=np.roll(t0.values, 1), unit=t0.unit) - t0_right = sc.array(dims=t0.dims, values=np.roll(t0.values, -1), unit=t0.unit) + s, t0 = _linear_regression_by_bin(sin_theta_L, t, da.data) # Distance from point to line through cluster distance_to_self = sc.abs(sc.values(t0) + sc.values(s) * sin_theta_L - t) - # Distance from this cluster line to next before cluster line - distance_self_to_left = sc.abs( - sc.values(t0_left) - + sc.values(s_left) * sin_theta_L - - (sc.values(t0) + sc.values(s) * sin_theta_L) - ) - # Distance from this cluster line to next after cluster line - distance_self_to_right = sc.abs( - sc.values(t0_right) - + sc.values(s_right) * sin_theta_L - - (sc.values(t0) + sc.values(s) * sin_theta_L) - ) + da = da.bins.assign_masks( - # TODO: Find suitable masking parameters for other chopper settings - too_far_from_center=(distance_to_self > max_distance_from_streak_line).data, - too_close_to_other=( - (distance_self_to_left < min_distance_to_neighbor_line) - | (distance_self_to_right < min_distance_to_neighbor_line) - ).data, + too_far_from_center=(distance_to_self > max_distance_from_streak_line), ) - da = da.assign_coords(t0=sc.values(t0).data) - da = da.bins.assign_coords(tof=(t - sc.values(t0)).data) + da = da.assign_coords(t0=sc.values(t0)) + da = da.bins.assign_coords(tof=(t - sc.values(t0))) return da def _linear_regression_by_bin( - x: sc.Variable, y: sc.Variable, w: sc.DataArray -) -> tuple[sc.DataArray, sc.DataArray]: + x: sc.Variable, y: sc.Variable, w: sc.Variable +) -> tuple[sc.Variable, sc.Variable]: + '''Performs a weighted linear regression of the points + in the binned variables ``x`` and ``y`` weighted by ``w``. + Returns ``b1`` and ``b0`` such that ``y = b1 * x + b0``. + ''' w = sc.values(w) tot_w = w.bins.sum() diff --git a/src/ess/beer/types.py b/src/ess/beer/types.py index 5de9ab10..1ac57719 100644 --- a/src/ess/beer/types.py +++ b/src/ess/beer/types.py @@ -18,7 +18,6 @@ class StreakClusteredData(sciline.Scope[RunType, sc.DataArray], sc.DataArray): MaxTimeOffset = NewType('MaxTimeOffset', sc.Variable) -MinTimeToNextStreak = NewType('MinTimeToNextStreak', sc.Variable) PeakList = NewType('PeakList', sc.Variable) diff --git a/src/ess/beer/workflow.py b/src/ess/beer/workflow.py index c335d587..5803ea75 100644 --- a/src/ess/beer/workflow.py +++ b/src/ess/beer/workflow.py @@ -11,7 +11,6 @@ DetectorData, Filename, MaxTimeOffset, - MinTimeToNextStreak, ModDt, ModShift, ModTwidth, @@ -31,7 +30,6 @@ def load_mcstas(fname: Filename[SampleRun]) -> DetectorData[SampleRun]: ModDt: sc.scalar(4.464e-4, unit='s'), Time0: sc.scalar(1.16 * 17 / 360 / 28, unit='s'), MaxTimeOffset: sc.scalar(3e-4, unit='s'), - MinTimeToNextStreak: sc.scalar(8e-4, unit='s'), } From 760628f16bac421ccdd96c49058a3d5b133ab372 Mon Sep 17 00:00:00 2001 From: Johannes Kasimir Date: Wed, 9 Jul 2025 11:36:19 +0200 Subject: [PATCH 18/29] docs: type annotations --- src/ess/beer/conversions.py | 32 +++++++++++++++++++++++++------- 1 file changed, 25 insertions(+), 7 deletions(-) diff --git a/src/ess/beer/conversions.py b/src/ess/beer/conversions.py index ee7a7a5d..bdd6f9b6 100644 --- a/src/ess/beer/conversions.py +++ b/src/ess/beer/conversions.py @@ -80,13 +80,22 @@ def _linear_regression_by_bin( return b1, b0 -def _compute_d(event_time_offset, theta, dhkl_list, mod_twidth, mod_shift, L0): +def _compute_d( + event_time_offset: sc.Variable, + theta: sc.Variable, + dhkl_list: sc.Variable, + mod_twidth: sc.Variable, + mod_shift: sc.Variable, + L0: sc.Variable, +) -> sc.Variable: + """Determines the ``d_hkl`` peak each event belongs to, + given a list of known peaks.""" + # Source: https://www2.mcstas.org/download/components/3.4/contrib/NPI_tof_dhkl_detector.comp sinth = sc.sin(theta) t = event_time_offset d = sc.empty(dims=sinth.dims, shape=sinth.shape, unit=dhkl_list[0].unit) d[:] = sc.scalar(float('nan'), unit=dhkl_list[0].unit) - dtfound = sc.empty(dims=sinth.dims, shape=sinth.shape, dtype='float64', unit=t.unit) dtfound[:] = sc.scalar(float('nan'), unit=t.unit) @@ -109,11 +118,20 @@ def _compute_d(event_time_offset, theta, dhkl_list, mod_twidth, mod_shift, L0): def _tof_from_dhkl( - event_time_offset, theta, coarse_dhkl, Ltotal, mod_shift, mod_dt, time0 -): - # tref = 2 * (1.0 + mod_shift) * d_hkl * sinth / hm * L0 - # tc = t - time0 - tref - # dt = np.floor(tc / mod_dt + 0.5) * mod_dt + time0 + event_time_offset: sc.Variable, + theta: sc.Variable, + coarse_dhkl: sc.Variable, + Ltotal: sc.Variable, + mod_shift: sc.Variable, + mod_dt: sc.Variable, + time0: sc.Variable, +) -> sc.Variable: + '''Computes tof for BEER given the dhkl peak that the event belongs to''' + # Source: https://www2.mcstas.org/download/components/3.4/contrib/NPI_tof_dhkl_detector.comp + # tref = 2 * (1.0 + mod_shift) * d_hkl * sin(theta) / hm * Ltotal + # tc = event_time_zero - time0 - tref + # dt = floor(tc / mod_dt + 0.5) * mod_dt + time0 + # tof = event_time_offset - dt c = (-2 * (1.0 + mod_shift) / (scipp.constants.h / scipp.constants.m_n)).to( unit=f'{event_time_offset.unit}/m/angstrom' ) From 81b7f8fcb38883644001da454e38eeb3ed174865 Mon Sep 17 00:00:00 2001 From: Johannes Kasimir Date: Wed, 9 Jul 2025 13:17:37 +0200 Subject: [PATCH 19/29] feat: make two theta mask user configurable --- src/ess/beer/io.py | 5 ----- src/ess/beer/types.py | 4 ++++ src/ess/beer/workflow.py | 22 ++++++++++++++++++++-- 3 files changed, 24 insertions(+), 7 deletions(-) diff --git a/src/ess/beer/io.py b/src/ess/beer/io.py index a9c148a9..7c19970c 100644 --- a/src/ess/beer/io.py +++ b/src/ess/beer/io.py @@ -85,11 +85,6 @@ def _load_beer_mcstas(f, bank=1): # TODO: approximate t0 should depend on chopper information da.coords['approximate_t0'] = sc.scalar(6e-3, unit='s') - # TODO: limits should be user configurable - da.masks['two_theta'] = ( - da.coords['two_theta'] >= sc.scalar(105, unit='deg').to(unit='rad') - ) | (da.coords['two_theta'] <= sc.scalar(75, unit='deg').to(unit='rad')) - da.coords['event_time_offset'] = da.coords.pop('t') return da diff --git a/src/ess/beer/types.py b/src/ess/beer/types.py index 1ac57719..d127c319 100644 --- a/src/ess/beer/types.py +++ b/src/ess/beer/types.py @@ -1,3 +1,4 @@ +from collections.abc import Callable from typing import NewType import sciline @@ -18,6 +19,9 @@ class StreakClusteredData(sciline.Scope[RunType, sc.DataArray], sc.DataArray): MaxTimeOffset = NewType('MaxTimeOffset', sc.Variable) +TwoThetaMaskFunction = NewType( + 'TwoThetaMaskFunction', Callable[[sc.Variable], sc.Variable] +) PeakList = NewType('PeakList', sc.Variable) diff --git a/src/ess/beer/workflow.py b/src/ess/beer/workflow.py index 5803ea75..77f21e54 100644 --- a/src/ess/beer/workflow.py +++ b/src/ess/beer/workflow.py @@ -17,11 +17,25 @@ RunType, SampleRun, Time0, + TwoThetaMaskFunction, ) -def load_mcstas(fname: Filename[SampleRun]) -> DetectorData[SampleRun]: - return DetectorData[SampleRun](load_beer_mcstas(fname)) +def load_mcstas( + fname: Filename[SampleRun], two_theta_mask: TwoThetaMaskFunction +) -> DetectorData[SampleRun]: + da = DetectorData[SampleRun](load_beer_mcstas(fname)) + da = ( + sc.DataGroup( + { + k: v.assign_masks(two_theta=two_theta_mask(v.coords['two_theta'])) + for k, v in da.items() + } + ) + if isinstance(da, sc.DataGroup) + else da.assign_masks(two_theta=two_theta_mask(da.coords['two_theta'])) + ) + return da default_parameters = { @@ -30,6 +44,10 @@ def load_mcstas(fname: Filename[SampleRun]) -> DetectorData[SampleRun]: ModDt: sc.scalar(4.464e-4, unit='s'), Time0: sc.scalar(1.16 * 17 / 360 / 28, unit='s'), MaxTimeOffset: sc.scalar(3e-4, unit='s'), + TwoThetaMaskFunction: lambda two_theta: ( + (two_theta >= sc.scalar(105, unit='deg').to(unit='rad', dtype='float64')) + | (two_theta <= sc.scalar(75, unit='deg').to(unit='rad', dtype='float64')) + ), } From 9eb489f550418c3745a098afc8df2b9fe413118a Mon Sep 17 00:00:00 2001 From: Johannes Kasimir Date: Wed, 9 Jul 2025 13:19:06 +0200 Subject: [PATCH 20/29] fix --- src/ess/beer/workflow.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/ess/beer/workflow.py b/src/ess/beer/workflow.py index 77f21e54..4ff1e793 100644 --- a/src/ess/beer/workflow.py +++ b/src/ess/beer/workflow.py @@ -24,7 +24,7 @@ def load_mcstas( fname: Filename[SampleRun], two_theta_mask: TwoThetaMaskFunction ) -> DetectorData[SampleRun]: - da = DetectorData[SampleRun](load_beer_mcstas(fname)) + da = load_beer_mcstas(fname) da = ( sc.DataGroup( { @@ -35,7 +35,7 @@ def load_mcstas( if isinstance(da, sc.DataGroup) else da.assign_masks(two_theta=two_theta_mask(da.coords['two_theta'])) ) - return da + return DetectorData[SampleRun](da) default_parameters = { From 460103ef4dfc87cd76de4dbfa19fbcc86ca0a75e Mon Sep 17 00:00:00 2001 From: Johannes Kasimir Date: Wed, 9 Jul 2025 14:59:14 +0200 Subject: [PATCH 21/29] typo --- docs/user-guide/beer/beer_mcstas.ipynb | 1 - 1 file changed, 1 deletion(-) diff --git a/docs/user-guide/beer/beer_mcstas.ipynb b/docs/user-guide/beer/beer_mcstas.ipynb index eaf6e49f..14a0d8c4 100644 --- a/docs/user-guide/beer/beer_mcstas.ipynb +++ b/docs/user-guide/beer/beer_mcstas.ipynb @@ -150,7 +150,6 @@ "source": [ "%%time\n", "wf[MaxTimeOffset] = sc.scalar(6e-4, unit='s')\n", - "#wf[MinTimeToNextStreak] = sc.scalar(8e-4/10, unit='s')\n", "da = wf.compute(DetectorTofData[SampleRun])\n", "da = da.transform_coords(('dspacing',), graph=scn.conversion.graph.tof.elastic('tof'),)\n", "p = da.hist(dspacing=dspacing, dim=da.dims).plot()\n", From 103acddbbe470c23b93f41ce23dfee876f1fe7b8 Mon Sep 17 00:00:00 2001 From: Johannes Kasimir Date: Wed, 9 Jul 2025 15:02:43 +0200 Subject: [PATCH 22/29] tests: can apply workflows and get sensible result --- tests/beer/mcstas_reduction_test.py | 53 +++++++++++++++++++++++++++++ 1 file changed, 53 insertions(+) create mode 100644 tests/beer/mcstas_reduction_test.py diff --git a/tests/beer/mcstas_reduction_test.py b/tests/beer/mcstas_reduction_test.py new file mode 100644 index 00000000..e12cb6a8 --- /dev/null +++ b/tests/beer/mcstas_reduction_test.py @@ -0,0 +1,53 @@ +import numpy as np +import scipp as sc +import scippneutron as scn +from scipp.testing import assert_allclose + +from ess.beer import BeerMcStasWorkflow, BeerMcStasWorkflowKnownPeaks +from ess.beer.data import duplex_peaks_array, mcstas_duplex +from ess.beer.types import DHKLList +from ess.reduce.nexus.types import Filename, SampleRun +from ess.reduce.time_of_flight.types import DetectorTofData + + +def test_can_reduce_using_known_peaks_workflow(): + wf = BeerMcStasWorkflowKnownPeaks() + wf[DHKLList] = duplex_peaks_array() + wf[Filename[SampleRun]] = mcstas_duplex(7) + da = wf.compute(DetectorTofData[SampleRun]) + assert 'bank1' in da + assert 'bank2' in da + da = da['bank1'] + assert 'tof' in da.coords + # assert dataarray has all coords required to compute dspacing + da = da.transform_coords( + ('dspacing',), + graph=scn.conversion.graph.tof.elastic('tof'), + ) + h = da.hist(dspacing=2000, dim=da.dims) + max_peak_d = sc.midpoints(h['dspacing', np.argmax(h.values)].coords['dspacing'])[0] + assert_allclose( + max_peak_d, + sc.scalar(2.0407, unit='angstrom'), + atol=sc.scalar(1e-3, unit='angstrom'), + ) + + +def test_can_reduce_using_unknown_peaks_workflow(): + wf = BeerMcStasWorkflow() + wf[Filename[SampleRun]] = mcstas_duplex(7) + da = wf.compute(DetectorTofData[SampleRun]) + assert 'bank1' in da + assert 'bank2' in da + da = da['bank1'] + da = da.transform_coords( + ('dspacing',), + graph=scn.conversion.graph.tof.elastic('tof'), + ) + h = da.hist(dspacing=2000, dim=da.dims) + max_peak_d = sc.midpoints(h['dspacing', np.argmax(h.values)].coords['dspacing'])[0] + assert_allclose( + max_peak_d, + sc.scalar(2.0407, unit='angstrom'), + atol=sc.scalar(1e-3, unit='angstrom'), + ) From 49ec31a7d5f6fd09174e265d7ff55cb9ccce5d07 Mon Sep 17 00:00:00 2001 From: Johannes Kasimir Date: Wed, 9 Jul 2025 16:03:41 +0200 Subject: [PATCH 23/29] test: relax tolerance --- tests/beer/mcstas_reduction_test.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/beer/mcstas_reduction_test.py b/tests/beer/mcstas_reduction_test.py index e12cb6a8..b0068dec 100644 --- a/tests/beer/mcstas_reduction_test.py +++ b/tests/beer/mcstas_reduction_test.py @@ -29,7 +29,7 @@ def test_can_reduce_using_known_peaks_workflow(): assert_allclose( max_peak_d, sc.scalar(2.0407, unit='angstrom'), - atol=sc.scalar(1e-3, unit='angstrom'), + atol=sc.scalar(1e-2, unit='angstrom'), ) @@ -49,5 +49,5 @@ def test_can_reduce_using_unknown_peaks_workflow(): assert_allclose( max_peak_d, sc.scalar(2.0407, unit='angstrom'), - atol=sc.scalar(1e-3, unit='angstrom'), + atol=sc.scalar(1e-2, unit='angstrom'), ) From 038d80d0adee0c1bbf7aa9f2feae4d4077103562 Mon Sep 17 00:00:00 2001 From: Johannes Kasimir Date: Wed, 9 Jul 2025 16:04:11 +0200 Subject: [PATCH 24/29] docs: compare to true peak positions for both workflows --- docs/user-guide/beer/beer_mcstas.ipynb | 51 +++++++++++++------------- 1 file changed, 25 insertions(+), 26 deletions(-) diff --git a/docs/user-guide/beer/beer_mcstas.ipynb b/docs/user-guide/beer/beer_mcstas.ipynb index 14a0d8c4..472437b0 100644 --- a/docs/user-guide/beer/beer_mcstas.ipynb +++ b/docs/user-guide/beer/beer_mcstas.ipynb @@ -34,7 +34,17 @@ "from ess.reduce.time_of_flight.types import DetectorTofData\n", "from ess.beer.types import *\n", "\n", - "dspacing = sc.linspace('dspacing', 0.8, 2.2, 2000, unit='angstrom')" + "dspacing = sc.linspace('dspacing', 0.8, 2.2, 2000, unit='angstrom')\n", + "\n", + "def duplex_ground_truth_lines(p):\n", + " 'Helper to display the true peak positions for comparison'\n", + " p.ax.vlines(duplex_peaks_array().values, 0, 20000, linestyle='--', color='black', lw=0.5, label='true $d_{hkl}$')\n", + " return p\n", + "\n", + "def silicon_ground_truth_lines(p):\n", + " 'Helper to display the true peak positions for comparison'\n", + " p.ax.vlines(silicon_peaks_array().values, 0, 20000, linestyle='--', color='black', lw=0.5, label='true $d_{hkl}$')\n", + " return p" ] }, { @@ -50,7 +60,7 @@ "wf[Filename[SampleRun]] = mcstas_duplex(9)\n", "da = wf.compute(DetectorTofData[SampleRun])\n", "da = da.transform_coords(('dspacing',), graph=scn.conversion.graph.tof.elastic('tof'),)\n", - "da.hist(dspacing=dspacing).plot()" + "duplex_ground_truth_lines(da.hist(dspacing=dspacing).plot())" ] }, { @@ -65,9 +75,7 @@ "wf[Filename[SampleRun]] = mcstas_duplex(9)\n", "da = wf.compute(DetectorTofData[SampleRun])\n", "da = da.transform_coords(('dspacing',), graph=scn.conversion.graph.tof.elastic('tof'),)\n", - "p = da.hist(dspacing=dspacing, dim=da.dims).plot()\n", - "p.ax.vlines(duplex_peaks_array().values, 0, 20000, linestyle='--', color='black', lw=0.5, label='true $d_{hkl}$')\n", - "p" + "duplex_ground_truth_lines(da.hist(dspacing=dspacing, dim=da.dims).plot())" ] }, { @@ -83,7 +91,8 @@ "wf[Filename[SampleRun]] = mcstas_silicon_medium_resolution()\n", "da = wf.compute(DetectorTofData[SampleRun])\n", "da = da.transform_coords(('dspacing',), graph=scn.conversion.graph.tof.elastic('tof'),)\n", - "da.hist(dspacing=dspacing).plot()" + "da = da.transform_coords(('dspacing',), graph=scn.conversion.graph.tof.elastic('tof'),)\n", + "silicon_ground_truth_lines(da.hist(dspacing=dspacing).plot())" ] }, { @@ -98,9 +107,7 @@ "wf[Filename[SampleRun]] = mcstas_silicon_medium_resolution()\n", "da = wf.compute(DetectorTofData[SampleRun])\n", "da = da.transform_coords(('dspacing',), graph=scn.conversion.graph.tof.elastic('tof'),)\n", - "p = da.hist(dspacing=dspacing, dim=da.dims).plot()\n", - "p.ax.vlines(silicon_peaks_array().values, 0, 20000, linestyle='--', color='black', lw=0.5, label='true $d_{hkl}$')\n", - "p" + "silicon_ground_truth_lines(da.hist(dspacing=dspacing, dim=da.dims).plot())" ] }, { @@ -187,9 +194,7 @@ "wf[MaxTimeOffset] = sc.scalar(6e-4, unit='s')\n", "da = wf.compute(DetectorTofData[SampleRun])\n", "da = da.transform_coords(('dspacing',), graph=scn.conversion.graph.tof.elastic('tof'),)\n", - "p = da.hist(dspacing=dspacing, dim=da.dims).plot()\n", - "p.ax.vlines(duplex_peaks_array().values, 0, 20000, linestyle='--', color='black', lw=0.5, label='true $d_{hkl}$')\n", - "p" + "duplex_ground_truth_lines(da.hist(dspacing=dspacing, dim=da.dims).plot())" ] }, { @@ -221,9 +226,7 @@ "source": [ "da = wf.compute(DetectorTofData[SampleRun])\n", "da = da.transform_coords(('dspacing',), graph=scn.conversion.graph.tof.elastic('tof'),)\n", - "p = da.hist(dspacing=dspacing, dim=da.dims).plot()\n", - "p.ax.vlines(duplex_peaks_array().values, 0, 20000, linestyle='--', color='black', lw=0.5, label='true $d_{hkl}$')\n", - "p" + "duplex_ground_truth_lines(da.hist(dspacing=dspacing, dim=da.dims).plot())" ] }, { @@ -270,9 +273,7 @@ "wf[MaxTimeOffset] = sc.scalar(3e-4/2, unit='s')\n", "da = wf.compute(DetectorTofData[SampleRun])\n", "da = da.transform_coords(('dspacing',), graph=scn.conversion.graph.tof.elastic('tof'),)\n", - "p = da.hist(dspacing=dspacing, dim=da.dims).plot()\n", - "p.ax.vlines(duplex_peaks_array().values, 0, 20000, linestyle='--', color='black', lw=0.5, label='true $d_{hkl}$')\n", - "p" + "duplex_ground_truth_lines(da.hist(dspacing=dspacing, dim=da.dims).plot())" ] }, { @@ -319,9 +320,7 @@ "wf[MaxTimeOffset] = sc.scalar(3e-4/2, unit='s')\n", "da = wf.compute(DetectorTofData[SampleRun])\n", "da = da.transform_coords(('dspacing',), graph=scn.conversion.graph.tof.elastic('tof'),)\n", - "p = da.hist(dspacing=dspacing, dim=da.dims).plot()\n", - "p.ax.vlines(duplex_peaks_array().values, 0, 20000, linestyle='--', color='black', lw=0.5, label='true $d_{hkl}$')\n", - "p" + "duplex_ground_truth_lines(da.hist(dspacing=dspacing, dim=da.dims).plot())" ] }, { @@ -366,7 +365,7 @@ "wf[Filename[SampleRun]] = mcstas_duplex(7)\n", "da = wf.compute(DetectorTofData[SampleRun])\n", "da = da.transform_coords(('dspacing',), graph=scn.conversion.graph.tof.elastic('tof'),)\n", - "da.hist(dspacing=dspacing).plot()" + "duplex_ground_truth_lines(da.hist(dspacing=dspacing).plot())" ] }, { @@ -389,7 +388,7 @@ "wf[Filename[SampleRun]] = mcstas_duplex(8)\n", "da = wf.compute(DetectorTofData[SampleRun])\n", "da = da.transform_coords(('dspacing',), graph=scn.conversion.graph.tof.elastic('tof'),)\n", - "da.hist(dspacing=dspacing).plot()" + "duplex_ground_truth_lines(da.hist(dspacing=dspacing).plot())" ] }, { @@ -412,7 +411,7 @@ "wf[Filename[SampleRun]] = mcstas_duplex(9)\n", "da = wf.compute(DetectorTofData[SampleRun])\n", "da = da.transform_coords(('dspacing',), graph=scn.conversion.graph.tof.elastic('tof'),)\n", - "da.hist(dspacing=dspacing).plot()" + "duplex_ground_truth_lines(da.hist(dspacing=dspacing).plot())" ] }, { @@ -435,7 +434,7 @@ "wf[Filename[SampleRun]] = mcstas_duplex(10)\n", "da = wf.compute(DetectorTofData[SampleRun])\n", "da = da.transform_coords(('dspacing',), graph=scn.conversion.graph.tof.elastic('tof'),)\n", - "da.hist(dspacing=dspacing).plot()" + "duplex_ground_truth_lines(da.hist(dspacing=dspacing).plot())" ] }, { @@ -459,7 +458,7 @@ "wf[Filename[SampleRun]] = mcstas_duplex(16)\n", "da = wf.compute(DetectorTofData[SampleRun])\n", "da = da.transform_coords(('dspacing',), graph=scn.conversion.graph.tof.elastic('tof'),)\n", - "da.hist(dspacing=dspacing).plot()" + "duplex_ground_truth_lines(da.hist(dspacing=dspacing).plot())" ] } ], From eaf582aed099e27c419d2537303c3ae597b32026 Mon Sep 17 00:00:00 2001 From: Johannes Kasimir Date: Tue, 15 Jul 2025 10:18:09 +0200 Subject: [PATCH 25/29] fix: remove unused coordinates --- src/ess/beer/io.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/src/ess/beer/io.py b/src/ess/beer/io.py index 7c19970c..95a4bffa 100644 --- a/src/ess/beer/io.py +++ b/src/ess/beer/io.py @@ -45,21 +45,24 @@ def _load_beer_mcstas(f, bank=1): sc.array(dims=['events'], values=events[:, 0], variances=events[:, 0] ** 2), ) for name, value in data.attrs.items(): - da.coords[name] = sc.scalar(value.decode()) + if name in ('position',): + da.coords[name] = sc.scalar(value.decode()) for i, label in enumerate(data.attrs["ylabel"].decode().strip().split(' ')): if label == 'p': continue da.coords[label] = sc.array(dims=['events'], values=events[:, i]) + for k, v in params.items(): v = v[0] if isinstance(v, bytes): v = v.decode() - da.coords[k] = sc.scalar(v) + if k in ('mode', 'sample_filename'): + da.coords[k] = sc.scalar(v) da.coords['sample_position'] = sc.vector(sample_pos[:], unit='m') da.coords['detector_position'] = sc.vector( - list(map(float, da.coords['position'].value.split(' '))), unit='m' + list(map(float, da.coords.pop('position').value.split(' '))), unit='m' ) da.coords['chopper_position'] = sc.vector(chopper_pos[:], unit='m') da.coords['x'].unit = 'm' @@ -80,7 +83,6 @@ def _load_beer_mcstas(f, bank=1): da.coords.pop('x') da.coords.pop('y') da.coords.pop('n') - da.coords.pop('id') # TODO: approximate t0 should depend on chopper information da.coords['approximate_t0'] = sc.scalar(6e-3, unit='s') From 13976b87a04680b6d2281645475ca2005213a054 Mon Sep 17 00:00:00 2001 From: Johannes Kasimir Date: Tue, 15 Jul 2025 10:34:54 +0200 Subject: [PATCH 26/29] fix: move approximate t0 to clustering routine where it is used --- src/ess/beer/clustering.py | 6 +++++- src/ess/beer/io.py | 3 --- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/src/ess/beer/clustering.py b/src/ess/beer/clustering.py index 1e3c15c7..73e1f1bb 100644 --- a/src/ess/beer/clustering.py +++ b/src/ess/beer/clustering.py @@ -9,10 +9,14 @@ def cluster_events_by_streak(da: DetectorData[RunType]) -> StreakClusteredData[R if isinstance(da, sc.DataGroup): return sc.DataGroup({k: cluster_events_by_streak(v) for k, v in da.items()}) da = da.copy(deep=False) + + # TODO: approximate t0 should depend on chopper information + approximate_t0 = sc.scalar(6e-3, unit='s') + da.coords['coarse_d'] = ( constants.h / constants.m_n - * (da.coords['event_time_offset'] - da.coords['approximate_t0']) + * (da.coords['event_time_offset'] - approximate_t0) / sc.sin(da.coords['two_theta'] / 2) / da.coords['L0'] / 2 diff --git a/src/ess/beer/io.py b/src/ess/beer/io.py index 95a4bffa..fa85573a 100644 --- a/src/ess/beer/io.py +++ b/src/ess/beer/io.py @@ -84,9 +84,6 @@ def _load_beer_mcstas(f, bank=1): da.coords.pop('y') da.coords.pop('n') - # TODO: approximate t0 should depend on chopper information - da.coords['approximate_t0'] = sc.scalar(6e-3, unit='s') - da.coords['event_time_offset'] = da.coords.pop('t') return da From f81231696e6510d57b149ce8caf2a3dd21556b28 Mon Sep 17 00:00:00 2001 From: Johannes Kasimir Date: Fri, 18 Jul 2025 11:27:16 +0200 Subject: [PATCH 27/29] docs: update si example file --- src/ess/beer/data.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/ess/beer/data.py b/src/ess/beer/data.py index 942afbe5..8c41595e 100644 --- a/src/ess/beer/data.py +++ b/src/ess/beer/data.py @@ -23,7 +23,7 @@ def _make_pooch(): "duplex-mode09.h5": "md5:ebb3f9694ffdd0949f342bd0deaaf627", "duplex-mode10.h5": "md5:559e7fc0cce265f5102520e382ee5b26", "duplex-mode16.h5": "md5:2ccd05832bbc8a087a731b37364b995d", - "silicon.h5": "md5:3425ae2b2fe7a938c6f0a4afeb9e0819", + "silicon-mode09.h5": "md5:aa068d46dc7efc303b68a13e527e2607", "silicon-dhkl.tab": "md5:90bedceb23245b045ce1ed0170c1313b", "duplex-dhkl.tab": "md5:b4c6c2fcd66466ad291f306b2d6b346e", }, @@ -45,7 +45,7 @@ def mcstas_silicon_medium_resolution() -> str: Simulated intensity from silicon sample with medium resolution chopper configuration. """ - return _pooch.fetch('silicon.h5') + return _pooch.fetch('silicon-mode09.h5') def duplex_peaks() -> str: From f44f0ebcba3254f9265e389ab6985adbedc48521 Mon Sep 17 00:00:00 2001 From: Johannes Kasimir Date: Thu, 24 Jul 2025 14:53:35 +0200 Subject: [PATCH 28/29] fix: don't read data several times --- src/ess/beer/io.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/ess/beer/io.py b/src/ess/beer/io.py index fa85573a..d42e1cd1 100644 --- a/src/ess/beer/io.py +++ b/src/ess/beer/io.py @@ -41,6 +41,7 @@ def _load_beer_mcstas(f, bank=1): '/entry1/instrument/components/0189_sampleMantid/Position', '/entry1/instrument/components/0017_cMCA/Position', ) + events = events[()] da = sc.DataArray( sc.array(dims=['events'], values=events[:, 0], variances=events[:, 0] ** 2), ) From eb92d7f4fd13f159291e0be4b0ec4689b3d3617e Mon Sep 17 00:00:00 2001 From: Johannes Kasimir Date: Mon, 28 Jul 2025 16:05:57 +0200 Subject: [PATCH 29/29] docs: add tof-diagram notebook --- docs/user-guide/beer/tof_diagrams.ipynb | 724 ++++++++++++++ pyproject.toml | 3 + src/ess/beer/choppers.py | 1200 +++++++++++++++++++++++ 3 files changed, 1927 insertions(+) create mode 100644 docs/user-guide/beer/tof_diagrams.ipynb create mode 100644 src/ess/beer/choppers.py diff --git a/docs/user-guide/beer/tof_diagrams.ipynb b/docs/user-guide/beer/tof_diagrams.ipynb new file mode 100644 index 00000000..d328c202 --- /dev/null +++ b/docs/user-guide/beer/tof_diagrams.ipynb @@ -0,0 +1,724 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "0", + "metadata": { + "editable": true, + "slideshow": { + "slide_type": "" + }, + "tags": [] + }, + "source": [ + "# BEER TOF diagram\n", + "\n", + "This notebook serves to check the TOF diagrams of the BEER instrument chopper cascade. \n", + "The operation modes are identical to the ones defined in ESS-0238217 - *Neutron Optics Report for the BEER Instrument*. \n", + "\n", + "The definition of the chopper modes, the plotting and calculating functions are located in a separate Python file, which is imported below (`beer_choppers_tof`). The predefined chopper mode can be listed and changed, or you can create a new one. The position of the monitor can also be modified.\n", + "\n", + "## Basic description\n", + "### Python packages\n", + "\n", + "This Jupyter notebook uses the following packages for plotting and interactions: `matplotlib` and `ipywidgets`.\n", + "\n", + "### BEER choppers TOF package\n", + "\n", + "In the `beer_choppers_tof` package, there is a dependency on `scipp`, `scippneutron`, `plopp` and `tof` packages, which can be installed via `pip install scippneutron tof`. The necessary version of the `tof` package to work with is >= **25.5.0**. \n", + "\n", + "*Note:* If you use **conda**, use the appropriate command for installing the package into that environment.\n", + "\n", + "When the package is imported, it defines and fills up some general variables which can be used:
\n", + "`class beer_mode` - a structure which holds the defined chopper setting for the operation mode (see below)
\n", + "`modes` - list of all predefined chopper modes
\n", + "`detectors` - predefined detectors setting
\n", + "`lambda_0` - mean wavelength
\n", + "\n", + "**Definition of the units:**
\n", + "`Hz` - Hertz
\n", + "`deg` - degree
\n", + "`m` - meter
\n", + "`A` - Angstrom
\n", + "`s` - seconds
\n", + "Don't use the variables with the same names.\n", + "\n", + "### Useful functions\n", + "`load_modes` - recreates/resets `modes` with the predefined ones
\n", + "*Note: this function is called automatically during the import of the package `beer_choppers_tof`*
\n", + "`print_modes_info`, `print_choppers_info`, `print_detectors_info` - print info about available items
\n", + "`draw_chopper` - draws graphical representation of the chopper
\n", + "`run_and_plot_mode` - runs the simulation and plots the TOF diagram
\n", + "`plot_choppers` - provides output from `run_and_plot_mode` and plots the choppers info
\n", + "`plot_detectors` - provides output from `run_and_plot_mode` and plots the detector info
\n", + "`get_wave_for_all_modes` - calculates the TOF from all the modes and stores the information of the wavelength distribution for the future comparison
\n", + "`plot_comparison` - takes the results from `get_wave_for_all_modes` and plots several modes and/or pulses together\n", + "`plot_ess_pulse` - plots the ESS pulse structure
\n", + "\n", + "*Note:* All the indexes are zero-based! \n", + "\n", + "## Loading of the basic packages" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1", + "metadata": {}, + "outputs": [], + "source": [ + "# ruff: noqa\n", + "from ipywidgets import widgets, interactive\n", + "import matplotlib.pyplot as plt\n", + "import numpy as np\n", + "import ess.beer.choppers as beer\n", + "\n", + "%matplotlib widget" + ] + }, + { + "cell_type": "markdown", + "id": "2", + "metadata": {}, + "source": [ + "## The ESS pulse\n", + "Visualisation of the ESS pulse structure." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3", + "metadata": {}, + "outputs": [], + "source": [ + "save_ess_button = widgets.Button(description='Save to PDF')\n", + "data_ess_button = widgets.Button(description='Save data')\n", + "output_ess = widgets.Output()\n", + "\n", + "# display the selection, buttons and output\n", + "display(widgets.HBox([save_ess_button, data_ess_button]), output_ess)\n", + "\n", + "with output_ess:\n", + " ess_pulse = beer.plot_ess_pulse()\n", + "\n", + "def save_ess_on_click(b):\n", + " \"\"\" Save plot to PDF \"\"\" \n", + " with output_ess:\n", + " ess_pulse.savefig('ESS_source_pulse.pdf', format='pdf')\n", + " print('Figure saved to PDF.')\n", + " \n", + "def data_ess_on_click(b):\n", + " \"\"\" Save plotted data to ASCII file \"\"\"\n", + " with output_ess:\n", + " for ax in ess_pulse.axes:\n", + " xl = ax.xaxis.get_label().get_text().split(' ')[0]\n", + " yl = ax.yaxis.get_label().get_text().split(':')[0]\n", + " a = np.array(ax.lines[0].get_xydata())\n", + " fl = f'ESS-pulse-{xl}_{yl}.dat'\n", + " np.savetxt(fl, a, delimiter=\" \")\n", + " print(f'Data saved to \"{fl}\".')\n", + "\n", + "save_ess_button.on_click(save_ess_on_click)\n", + "data_ess_button.on_click(data_ess_on_click)" + ] + }, + { + "cell_type": "markdown", + "id": "4", + "metadata": {}, + "source": [ + "## Pre-loaded modes\n", + "Summary information of all standard pre-loaded modes of the BEER instrument. \n", + "You can list all the modes when not providing any parameter to the function or put ```index``` or ```mode_id``` string to get specific mode information.\n", + "\n", + "*Examples:*\n", + "\n", + "```beer.print_modes_info('M2')```\n", + "\n", + "```beer.print_modes_info('4')```\n", + "\n", + "```beer.print_modes_info('all')```" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5", + "metadata": {}, + "outputs": [], + "source": [ + "beer.print_modes_info('all')" + ] + }, + { + "cell_type": "markdown", + "id": "6", + "metadata": {}, + "source": [ + "## Chopper visualisation\n", + "`beer.print_choppers_info` prints a list of all choppers in the selected mode, and `beer.draw_chopper` shows the graphic representation of the chopper disk. The choppers are drawn in the time t$_0$ configuration.
\n", + "\n", + "*Usage:* Change the indexes as necessary. If you want to draw more choppers, copy and adapt the code in the next cell. \n", + "\n", + "*Examples:* \n", + "```beer.print_choppers_info(5)``` \n", + "```beer.draw_chopper(5, 1)```" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7", + "metadata": {}, + "outputs": [], + "source": [ + "mode_index = beer.get_mode_index('PS2') # getting mode index by string\n", + "chopper_index = beer.get_chopper_index(mode_index, 'PSC2') # getting chopper index by name\n", + "\n", + "beer.print_choppers_info(mode_index) # list all choppers in the mode\n", + "beer.draw_chopper(mode_index, chopper_index) # draw the chopper" + ] + }, + { + "cell_type": "markdown", + "id": "8", + "metadata": {}, + "source": [ + "## Show TOF diagram\n", + "\n", + "### Mode updates\n", + "In BEER basic setup, the **FC1A** chopper has a frequency of 28 Hz. This setting allows penetration of the neutrons with a wavelength above 15 Å to go through the chopper cascade, each 2nd pulse via **FC1A** and each 6th pulse via **FC2A**. \n", + "\n", + "The intensity of those long-wavelength neutrons is very small. You can omit it by setting ```wave_max``` to 15 Å. \n", + "To fully suppress the overlap, **FC1A** could be run with a frequency of 14 Hz. The code to change the setting of the frequency is implemented below and can be uncommented when necessary. One can also play with the chopper additional shift if necessary.
\n", + "\n", + "*Note:* Be aware that the change in the frequency of the mode affects the other results visualisation. To reload predefined modes use `beer.load_modes()` function.\n", + "\n", + "You can skip the following cell if you want to continue with the original modes." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9", + "metadata": {}, + "outputs": [], + "source": [ + "beer.load_modes()\n", + "##### Code to change the frequency of the FC1A chopper ######\n", + "# For the Pulse shaping modes affected, the frequency of the FC1A chopper \n", + "# has to be changed to 14 Hz from 28 Hz, no shift of the offset is needed\n", + "new_fc1a_freq = 14.\n", + "\n", + "modes = ['PS1', 'PS2', 'PS3']\n", + "choppers = ['FC1A']\n", + "\n", + "for mode in modes:\n", + " m_index = beer.get_mode_index(mode)\n", + " for chopper in choppers:\n", + " ch_index = beer.get_chopper_index(m_index, chopper)\n", + " beer.modes[m_index].choppers[ch_index].frequency = new_fc1a_freq * beer.Hz # change the frequency\n", + "\n", + "# For the Modulation modes affected, the frequency of the FC1A chopper \n", + "# has to be changed to 14 Hz, and the shift of the offset to 15º from the original 6º\n", + "modes = ['8X - M0', '8X - M2', '8X - M3', '16X - M0', '16X - M2', '16X - M3']\n", + "choppers = ['FC1A']\n", + "\n", + "for mode in modes:\n", + " m_index = beer.get_mode_index(mode)\n", + " for chopper in choppers:\n", + " ch_index = beer.get_chopper_index(m_index, chopper)\n", + " beer.modes[m_index].choppers[ch_index].frequency = new_fc1a_freq * beer.Hz # change the frequency\n", + " off = 15*beer.deg + beer.modes[m_index].choppers[ch_index].close[0]/2\n", + " beer.modes[m_index].chopper_offset[ch_index] = off # change the offset\n", + "\n", + "# recalculate the chopper phase shifts\n", + "for mode in beer.modes:\n", + " mode.update_chopper_phases()\n", + "##############################################################\n", + "\n", + "###### Adding some testing modes ######\n", + "###### First additional mode\n", + "fMC = 84.\n", + "mode = beer.beer_mode(2.1 * beer.A)\n", + "mode.caption = 'modulation (HF) 4X - M0+M1'\n", + "# distance freq. offset title opening/closing\n", + "mode.add_chopper(8.283*beer.m, new_fc1a_freq*beer.Hz, (72/2+15)*beer.deg, 'FC1A', [0], [72])\n", + "mode.add_chopper(9.300*beer.m, (fMC)*beer.Hz, 2.5*beer.deg, 'MCA',\n", + " list(on for on in np.arange(0.0, 360, 45)),\n", + " list(on for on in np.arange(5.0, 360, 45)))\n", + "mode.add_chopper(9.350*beer.m, (fMC/4)*beer.Hz, 2.5*beer.deg, 'MCB',\n", + " list(on for on in np.arange(0.0, 360, 22.5)),\n", + " list(on for on in np.arange(5.0, 360, 22.5)))\n", + "mode.add_chopper(79.975*beer.m, 14*beer.Hz, 175/2*beer.deg, 'FC2A', [0], [175])\n", + "modes.append(mode)\n", + "\n", + "beer.modes.append(mode)\n", + "\n", + "###### Second additional mode\n", + "fMC = 140.\n", + "mode = beer.beer_mode(2.1 * beer.A)\n", + "mode.caption = 'modulation (MR) 4X - M2'\n", + "# distance freq. offset title opening/closing\n", + "mode.add_chopper(8.283*beer.m, new_fc1a_freq*beer.Hz, (72/2+15)*beer.deg, 'FC1A', [0], [72])\n", + "mode.add_chopper(9.300*beer.m, fMC*beer.Hz, 2.5*beer.deg, 'MCA',\n", + " list(on for on in np.arange(0.0, 360, 45)),\n", + " list(on for on in np.arange(5.0, 360, 45)))\n", + "mode.add_chopper(9.350*beer.m, (fMC/4)*beer.Hz, 2.5*beer.deg, 'MCB',\n", + " list(on for on in np.arange(0.0, 360, 22.5)),\n", + " list(on for on in np.arange(5.0, 360, 22.5)))\n", + "mode.add_chopper(79.975*beer.m, 14*beer.Hz, 175/2*beer.deg, 'FC2A', [0], [175])\n", + "modes.append(mode)\n", + "\n", + "beer.modes.append(mode)\n", + "\n", + "###### Third additional mode\n", + "fMC = 280.\n", + "mode = beer.beer_mode(2.1 * beer.A)\n", + "mode.caption = 'modulation (HR) 4X - M3'\n", + "# distance freq. offset title opening/closing\n", + "mode.add_chopper(8.283*beer.m, new_fc1a_freq*beer.Hz, (72/2+15)*beer.deg, 'FC1A', [0], [72])\n", + "mode.add_chopper(9.300*beer.m, fMC*beer.Hz, 2.5*beer.deg, 'MCA',\n", + " list(on for on in np.arange(0.0, 360, 45)),\n", + " list(on for on in np.arange(5.0, 360, 45)))\n", + "mode.add_chopper( 9.350*beer.m, (fMC/4)*beer.Hz, 2.5*beer.deg, 'MCB',\n", + " list(on for on in np.arange(0.0, 360, 22.5)),\n", + " list(on for on in np.arange(5.0, 360, 22.5)))\n", + "mode.add_chopper(79.975*beer.m, 14.*beer.Hz, 175/2*beer.deg, 'FC2A', [0], [175])\n", + "modes.append(mode)\n", + "\n", + "beer.modes.append(mode)\n", + "\n", + "###### Fourth additional mode\n", + "fMC = 280.\n", + "mode = beer.beer_mode(2.1 * beer.A)\n", + "mode.caption = 'modulation (HR) 4X (JS) - M3'\n", + "# distance freq. offset title opening/closing\n", + "mode.add_chopper( 8.283*beer.m, new_fc1a_freq*beer.Hz, (72/2+15)*beer.deg, 'FC1A', [0], [72])\n", + "mode.add_chopper( 9.300*beer.m, (fMC/2)*beer.Hz, 2.5*beer.deg, 'MCA',\n", + " list(on for on in np.arange(0.0, 360, 45)),\n", + " list(on for on in np.arange(5.0, 360, 45)))\n", + "mode.add_chopper( 9.350*beer.m, fMC*beer.Hz, 2.5*beer.deg, 'MCB',\n", + " list(on for on in np.arange(0.0, 360, 22.5)),\n", + " list(on for on in np.arange(5.0, 360, 22.5)))\n", + "mode.add_chopper(79.975*beer.m, 14*beer.Hz, 175/2*beer.deg, 'FC2A', [0], [175])\n", + "modes.append(mode)\n", + "\n", + "beer.modes.append(mode)\n", + "\n", + "###### Fifth additional mode\n", + "mode = beer.beer_mode(2.1 * beer.A)\n", + "mode.caption = f'modulation 4X ({fMC} Hz - double frame)'\n", + "# distance freq. offset title opening/closing\n", + "mode.add_chopper( 8.283*beer.m, 7*beer.Hz, (72/2+12)*beer.deg, 'FC1A', [0], [72])\n", + "mode.add_chopper( 9.300*beer.m, fMC*beer.Hz, 2.5*beer.deg, 'MCA',\n", + " list(on for on in np.arange(0.0, 360, 45)),\n", + " list(on for on in np.arange(5.0, 360, 45)))\n", + "mode.add_chopper( 9.350*beer.m, (fMC/4)*beer.Hz, 2.5*beer.deg, 'MCB',\n", + " list(on for on in np.arange(0.0, 360, 22.5)),\n", + " list(on for on in np.arange(5.0, 360, 22.5)))\n", + "mode.add_chopper(79.975*beer.m, 7*beer.Hz, 175/2*beer.deg, 'FC2A', [0], [175])\n", + "modes.append(mode)\n", + "\n", + "beer.modes.append(mode)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "10", + "metadata": {}, + "outputs": [], + "source": [ + "# beer.print_modes_info('all')" + ] + }, + { + "cell_type": "markdown", + "id": "11", + "metadata": {}, + "source": [ + "### TOF plotting\n", + "The defined modes and number of simulated pulses can be selected.
\n", + "You can adjust the maximum wavelength (```wave_max```) in the simulation. ESS source provides neutrons up to $\\lambda_{max} = 20$ Å. The default value is 15 Å.
\n", + "\n", + "*Note:* You can increase the amount of neutrons to simulate when the statistic is necessary. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "12", + "metadata": {}, + "outputs": [], + "source": [ + "index, pulses = 8, 2 # start with PS2 and two pulses\n", + "wave_max = 20 # change if necessary - maximum 20\n", + "\n", + "select_mode = widgets.Dropdown(options=list(f'{mode.caption}' for mode in beer.modes),\n", + " value=f'{beer.modes[index].caption}', \n", + " description='Mode: ')\n", + "select_pulses = widgets.IntSlider(min=1, max=6, value=pulses, step=1, description='Pulses:')\n", + "recalculate_button = widgets.Button(description='Recalculate', disabled=True)\n", + "save_button = widgets.Button(description='Save to PDF', disabled=True)\n", + "output_tof = widgets.Output() \n", + "\n", + "# display the selection, buttons and output\n", + "display(widgets.HBox([select_mode, select_pulses, recalculate_button, save_button]), output_tof)\n", + "\n", + "with output_tof:\n", + " res = beer.run_and_plot_mode('', pulses, wmax=wave_max, mode_index=index, neutrons=1_000_000)\n", + " save_button.disabled = False\n", + "\n", + "# definition of the events\n", + "def select_mode_on_change(change):\n", + " global index \n", + " index = change.new\n", + " recalculate_button.disabled = False\n", + " save_button.disabled = True\n", + " output_tof.clear_output()\n", + " with output_tof:\n", + " plt.close(res[2])\n", + "\n", + "def pulses_on_change(change):\n", + " global pulses\n", + " pulses = change.new\n", + " recalculate_button.disabled = False\n", + " save_button.disabled = True\n", + " output_tof.clear_output()\n", + " with output_tof:\n", + " plt.close(res[2])\n", + "\n", + "def recalculate_on_click(b):\n", + " global res\n", + " output_tof.clear_output()\n", + " with output_tof:\n", + " res = beer.run_and_plot_mode('', pulses, wmax=wave_max, mode_index=index, neutrons=1_000_000)\n", + " is_ready = True\n", + " recalculate_button.disabled = True\n", + " save_button.disabled = False\n", + "\n", + "def save_on_click(b):\n", + " with output_tof:\n", + " res[2].savefig(f'BEER-TOF-{beer.modes[index].caption}.pdf', format='pdf')\n", + " print('Figure saved to PDF.')\n", + " \n", + "select_mode.observe(select_mode_on_change, names='index')\n", + "select_pulses.observe(pulses_on_change, names='value')\n", + "recalculate_button.on_click(recalculate_on_click)\n", + "save_button.on_click(save_on_click)" + ] + }, + { + "cell_type": "markdown", + "id": "13", + "metadata": {}, + "source": [ + "## Analysis of the above simulated TOF\n", + "\n", + "### Detector information\n", + "Select the available detector and the number of pulses to plot." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "14", + "metadata": {}, + "outputs": [], + "source": [ + "d_index = -1 # print the last detector (sample position) as default\n", + "select_det = widgets.Dropdown(options=list(f'{det.name}' for det in beer.detectors),\n", + " value=f'{beer.detectors[d_index].name}', description='Detector: ')\n", + "save_det_button = widgets.Button(description='Save to PDF')\n", + "dat_det_button = widgets.Button(description='Save data')\n", + "d_pulses = []\n", + "det_name = beer.detectors[0].name\n", + "da_toa = res[1].detectors[det_name].data.sum('event').values\n", + "\n", + "for i in range(pulses):\n", + " # evaluating if the pulse is empty or not and disabling the checkbox\n", + " enable = da_toa[i] > 0\n", + " \n", + " select = widgets.Checkbox(value=True if i == 0 else False, description=f'Pulse:{i+1}',\n", + " disabled=False if enable != 0 else True)\n", + " d_pulses.append(select)\n", + "\n", + "output_det = widgets.Output()\n", + "\n", + "# display the selection, buttons and output\n", + "display(widgets.HBox([select_det, save_det_button, dat_det_button]), \n", + " widgets.HBox([cb for cb in d_pulses]), output_det)\n", + "\n", + "def get_d_pulse_indexes():\n", + " out = []\n", + " for i, check in enumerate(d_pulses):\n", + " if check.value: out.append(i) \n", + " return out\n", + "\n", + "with output_det:\n", + " fig_det = beer.plot_detectors(res[1], res[0], d_index, get_d_pulse_indexes())\n", + "\n", + "# definition of the events\n", + "def select_det_on_change(change):\n", + " global d_index, fig_det\n", + " d_index = change.new\n", + " output_det.clear_output()\n", + " with output_det:\n", + " plt.close(fig_det)\n", + " fig_det = beer.plot_detectors(res[1], res[0], d_index, get_d_pulse_indexes())\n", + "\n", + "def save_det_on_click(b):\n", + " with output_det:\n", + " fig_det.savefig(f'{beer.detectors[d_index].name} - {beer.modes[res[0]].caption}.pdf', format='pdf')\n", + " print('Figure saved to PDF.')\n", + "\n", + "def dat_det_on_click(b):\n", + " with output_det:\n", + " for ax in fig_det.axes:\n", + " xl = ax.xaxis.get_label().get_text().split(' ')[0]\n", + " yl = ax.yaxis.get_label().get_text().split(' ')[0]\n", + " for line in ax.lines:\n", + " a = np.array(line.get_xydata())\n", + " fl = f'{beer.detectors[d_index].name}-{beer.modes[res[0]].caption}'\n", + " \n", + " fl = f\"{fl}-{line.get_label().replace(': ', '_')}_{xl}_{yl}.dat\"\n", + " np.savetxt(fl, a, delimiter=\" \")\n", + " print(f'Data saved to \"{fl}\".')\n", + " \n", + "def d_pulses_on_change(change):\n", + " global fig_det\n", + " output_det.clear_output()\n", + " with output_det:\n", + " plt.close(fig_det)\n", + " pulse = get_d_pulse_indexes()\n", + " if len(pulse) == 0:\n", + " d_pulses[0].value = True\n", + " else:\n", + " fig_det = beer.plot_detectors(res[1], res[0], d_index, pulse)\n", + " \n", + "select_det.observe(select_det_on_change, names='index')\n", + "save_det_button.on_click(save_det_on_click)\n", + "dat_det_button.on_click(dat_det_on_click)\n", + "for check in d_pulses:\n", + " check.observe(d_pulses_on_change, names='value')" + ] + }, + { + "cell_type": "markdown", + "id": "15", + "metadata": {}, + "source": [ + "### Chopper information\n", + "Select the available chopper and the number of pulses to plot. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "16", + "metadata": {}, + "outputs": [], + "source": [ + "chop_index = 0\n", + "select_chop = widgets.Dropdown(options=list(f'{chop.name}' for chop in beer.modes[index].choppers),\n", + " value=f'{beer.modes[index].choppers[chop_index].name}', \n", + " description='Chopper: ')\n", + "save_chop_button = widgets.Button(description='Save to PDF')\n", + "dat_chop_button = widgets.Button(description='Save data')\n", + "c_pulses = []\n", + "det_name = beer.detectors[0].name\n", + "da_toa = res[1].detectors[det_name].data.sum('event').values\n", + "\n", + "for i in range(pulses):\n", + " # evaluating if the pulse is empty or not and disabling the checkbox\n", + " enable = da_toa[i] > 0\n", + "\n", + " select = widgets.Checkbox(value=True if i == 0 else False, \n", + " description=f'Pulse:{i+1}',\n", + " disabled=False if enable != 0 else True)\n", + " c_pulses.append(select)\n", + "\n", + "output_chop = widgets.Output()\n", + "\n", + "# display the selection, buttons and output\n", + "display(widgets.HBox([select_chop, save_chop_button, dat_chop_button]), \n", + " widgets.HBox([cb for cb in c_pulses]), output_chop)\n", + "\n", + "def get_c_pulse_indexes():\n", + " out = []\n", + " for i, check in enumerate(c_pulses):\n", + " if check.value: out.append(i) \n", + " return out\n", + "\n", + "with output_chop:\n", + " fig_chop = beer.plot_choppers(res[1], res[0], chop_index, get_c_pulse_indexes())\n", + "\n", + "# definition of the events\n", + "def select_chop_on_change(change):\n", + " global chop_index, fig_chop\n", + " chop_index = change.new\n", + " output_chop.clear_output()\n", + "\n", + " with output_chop:\n", + " plt.close(fig_chop)\n", + " fig_chop = beer.plot_choppers(res[1], res[0], chop_index, get_c_pulse_indexes())\n", + "\n", + "def save_chop_on_click(b):\n", + " with output_chop:\n", + " fig_chop.savefig(f'{beer.modes[res[0]].choppers[chop_index].name}-{beer.modes[res[0]].caption}.pdf',\n", + " format='pdf')\n", + " print('Figure saved to PDF.')\n", + "\n", + "def dat_chop_on_click(b):\n", + " with output_chop:\n", + " for ax in fig_chop.axes:\n", + " xl = ax.xaxis.get_label().get_text().split(' ')[0]\n", + " yl = ax.yaxis.get_label().get_text().split(' ')[0]\n", + " for line in ax.lines:\n", + " a = np.array(line.get_xydata())\n", + " fl = f'{beer.modes[res[0]].choppers[chop_index].name}-{beer.modes[res[0]].caption}'\n", + " fl = f\"{fl}-{line.get_label().replace(': ', '_')}_{xl}_{yl}.dat\"\n", + " np.savetxt(fl, a, delimiter=\" \")\n", + " print(f'Data saved to \"{fl}\".')\n", + " \n", + "def c_pulses_on_change(change):\n", + " global fig_chop\n", + " output_chop.clear_output()\n", + " with output_chop:\n", + " plt.close(fig_chop)\n", + " pulse = get_c_pulse_indexes()\n", + " if len(pulse) == 0:\n", + " check_pulses[0].value = True\n", + " else:\n", + " fig_chop = beer.plot_choppers(res[1], res[0], chop_index, pulse)\n", + " \n", + "select_chop.observe(select_chop_on_change, names='index')\n", + "save_chop_button.on_click(save_chop_on_click)\n", + "dat_chop_button.on_click(dat_chop_on_click)\n", + "for check in c_pulses:\n", + " check.observe(c_pulses_on_change, names='value')" + ] + }, + { + "cell_type": "markdown", + "id": "17", + "metadata": {}, + "source": [ + "## Comparison between various modes on the wavelength level\n", + "\n", + "### Simulate all the modes\n", + "Variable ```waves``` store the wavelength results for all run modes. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "18", + "metadata": {}, + "outputs": [], + "source": [ + "waves = beer.get_wave_for_all_modes(neutrons=500_000, pulses=2, wmax=20.0)" + ] + }, + { + "cell_type": "markdown", + "id": "19", + "metadata": {}, + "source": [ + "### Plot the comparison\n", + "Below are some predefined comparisons. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "20", + "metadata": {}, + "outputs": [], + "source": [ + "beer.plot_comparison(waves, ['DS0', 'DS1'], [[0], [0, 1]], save_fig=False)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "21", + "metadata": {}, + "outputs": [], + "source": [ + "beer.plot_comparison(waves, ['PS1', 'PS2', 'PS3'], save_fig=False)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "22", + "metadata": {}, + "outputs": [], + "source": [ + "beer.plot_comparison(waves, ['8X - M0', '8X - M2', '8X - M3'], save_fig=False)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "23", + "metadata": {}, + "outputs": [], + "source": [ + "beer.plot_comparison(waves, ['16X - M0', '16X - M2', '16X - M3'], save_fig=False)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "24", + "metadata": {}, + "outputs": [], + "source": [ + "beer.plot_comparison(waves, ['8X - M0', '16X - M0', 'PS1', 'PS2'], save_fig=False)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "25", + "metadata": {}, + "outputs": [], + "source": [ + "beer.plot_comparison(waves, ['4X - M2', '8X - M0', '8X - M2', '16X - M2', 'PS2'], save_fig=False)" + ] + }, + { + "cell_type": "markdown", + "id": "26", + "metadata": {}, + "source": [ + "**end of notebook**" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.18" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/pyproject.toml b/pyproject.toml index f329fe32..8aeab917 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -110,6 +110,9 @@ pydocstyle.convention = "numpy" "S101", # asserts are used for demonstration and are safe in notebooks "T201", # printing is ok for demonstration purposes ] +"docs/user-guide/beer/tof_diagrams.ipynb" = [ + "PGH004", # file contains too many issues - allow blanket ignore +] [tool.ruff.format] quote-style = "preserve" diff --git a/src/ess/beer/choppers.py b/src/ess/beer/choppers.py new file mode 100644 index 00000000..70fc7d5a --- /dev/null +++ b/src/ess/beer/choppers.py @@ -0,0 +1,1200 @@ +#!/usr/bin/env python3 +""" +TOF diagrams for the BEER instrument + +The functions for plotting TOF diagrams and the predefined modes of operation +for the BEER instrument. + +@author: premysl.beran@ess.eu +""" + +# %% Basic import + +import sys +import time + +import matplotlib as mpl +import matplotlib.pyplot as plt +import numpy as np +import plopp as pp +import scipp as sc + +# import scippneutron as scn +import tof +from scippneutron.chopper import disk_chopper +from tof.chopper import AntiClockwise, Clockwise +from tof.utils import wavelength_to_speed as w2s + +mpl.rcParams.update({'font.size': 12}) + +# %% Definition of the units (needed for Scipp) + +Hz = sc.Unit('Hz') +deg = sc.Unit('deg') +m = sc.Unit('m') +A = sc.Unit('angstrom') +s = sc.Unit('second') +ms = sc.Unit('ms') + +# %% ESS pulse plot + + +def plot_ess_pulse(save_fig=False): + """ + Plot the basic ESS pulse + + Parameters + ---------- + save_fig : Boolean, optional, The default is False. + Do you want to save the plot as PDF? + + Returns + ------- + ess_pulse : matplotlib figure + The figure of the ESS pulse in Matplotlib figure format + + """ + + # definition of the ESS pulse + pulse = tof.Source(neutrons=1_000_000, facility='ess', pulses=1) + + # plot the chart + pulse_plot = pulse.plot(bins=1000) + ess_pulse = pulse_plot.fig + + ess_pulse.set_size_inches(9, 3.5) + ess_pulse.suptitle('ESS source pulse') + ess_pulse.set_layout_engine(layout='constrained') + pulse_plot.show() + + if save_fig: + ess_pulse.savefig('ESS_source_pulse.pdf', format='pdf') + + return ess_pulse + + +# %% Definition of the "modes" class and some functions + + +# setup the BEER mode +class beer_mode: + """ + Class containing the choppers setup. + """ + + def __init__(self, lambda_): + self.choppers = [] + self.caption = '' + self.lambda_0 = lambda_ + self.chopper_offset = [] + self.t0_dist = [] + + def __repr__(self) -> str: + t = '' + for i, chopper in enumerate(self.choppers): + off = ( + self.chopper_offset[i] - chopper.close[0] / 2 + if "PS" not in chopper.name + else self.chopper_offset[i] + ) + t += ( + f'{chopper.name:4s} -> freq: {chopper.frequency.value:3.0f} Hz; ' + f'dis: {chopper.distance.value:6.3f} m; ' + f'phase: {chopper.phase.value:6.2f}º; ' + f'dir: {"-->" if chopper.direction == Clockwise else "<--"}; ' + f'offset: {off.value:3.0f}º ' + f'{"*" if "PS" not in chopper.name else ""} \n' + ) + + return ( + f'Mode description: {self.caption}\n' + f'Number of choppers: {len(self.choppers)}\n' + f'Nominal wavelength: {self.lambda_0.value} Å\n{t}' + '* - frame overlap offset expressed as the shift ' + 'from the opening centre\n' + ) + + def get_tof(self, distance, wavelength): + """ + Calculate time-of-flight of neutrons of given wavelength at given distance + """ + return (distance.to(unit='m') / w2s(wavelength.to(unit='angstrom'))).to( + unit='s' + ) + + def get_phase(self, distance, frequency, ang_offset): + """ + Calculate the angular phase shift of the chopper + """ + return ( + (self.get_tof(distance, self.lambda_0) * frequency.to(unit='Hz')) + * (360 * deg) + + ang_offset.to(unit='deg') + ).to(unit='deg') + + def add_chopper( + self, + dist, + freq, + offset, + caption, + starts, + stops, + rotation=None, + dist_for_phase=None, + ): + """ + fill up the chopper variable + dist_for_phase - + for PSC choppers, the phase is calculated at the middle distance + """ + if dist_for_phase is None: + dist_for_phase = dist + + self.chopper_offset.append(offset.to(unit='deg')) + self.t0_dist.append(dist_for_phase.to(unit='m')) + self.choppers.append( + tof.Chopper( + frequency=freq.to(unit='Hz'), + open=sc.array(dims=['cutout'], unit='deg', values=starts), + close=sc.array(dims=['cutout'], unit='deg', values=stops), + distance=dist.to(unit='m'), + name=caption, + direction=Clockwise if rotation is None else rotation, + phase=self.get_phase( + dist_for_phase, + freq, + (0.5 * 0.00286 * s) * (360 * deg) * freq # middle of the pulse + - offset.to(unit='deg'), + ), + ) + ) # user offset + + def update_chopper_phases(self): + for i, chopper in enumerate(self.choppers): + off = self.chopper_offset[i] + chopper.phase = self.get_phase( + self.t0_dist[i], + chopper.frequency, + (0.5 * 0.00286 * s) + * (360 * deg) + * chopper.frequency # middle of the pulse + - off, + ) # user offset + + +# %% Detector definition + +# define the detectors and monitors +detectors = [ + tof.Detector(distance=10.0 * m, name='monitor-10m'), + tof.Detector(distance=28.198 * m, name='bunker exit'), + tof.Detector(distance=82.0 * m, name='80 m chopper'), + tof.Detector(distance=158.0 * m, name='sample position'), +] + +# %% Modes definition + +###### initialisation of all modes +modes = [] + +# definition of the nominal wavelength +lambda_0 = 2.1 * A + + +def load_modes(): + global modes + modes = [] + + ### Maximum intensity - F0 ### + # No choppers - all choppers stops open + + # create the mode and add choppers with the proper setting for the mode + mode = beer_mode(lambda_0) + mode.caption = 'maximum intensity - F0' + modes.append(mode) + + ### Maximum experimental beam - F1/F2 ### + # Only FC2A is running + + # create the mode and add choppers with the proper setting for the mode + mode = beer_mode(3.1 * A) # lambda_0) + mode.caption = 'maximum experimental beam - F1+F2' + # distance freq. offset title opening/closing + mode.add_chopper(79.975 * m, 14 * Hz, 175 / 2 * deg, 'FC2A', [0], [175]) + modes.append(mode) + + ### Pulse shaping mode - high flux - single frame - PS0/PS1 ### + # This mode runs **PSC1**, **PSC3**, **FC1A** and **FC2A**. + + # create the mode and add choppers with the proper setting for the mode + mode = beer_mode(lambda_0) + mode.caption = 'pulse shaping (HF) - PS0+PS1' + # distance freq. offset title opening/closing distance for phase calculation + mode.add_chopper( + 6.450 * m, 168 * Hz, 0 * deg, 'PSC1', [0], [144], AntiClockwise, 6.9125 * m + ) + mode.add_chopper( + 7.375 * m, 168 * Hz, 0 * deg, 'PSC3', [0], [144], Clockwise, 6.9125 * m + ) + mode.add_chopper(8.283 * m, 28 * Hz, (72 / 2 + 6) * deg, 'FC1A', [0], [72]) + mode.add_chopper(79.975 * m, 14 * Hz, 175 / 2 * deg, 'FC2A', [0], [175]) + modes.append(mode) + + ### Pulse shaping mode - medium resolution - single frame - PS2 ### + # This mode runs **PSC1**, **PSC2**, **FC1A** and **FC2A**. + + # **distance for phase** corresponds to the distance between PSC choppers which is + # used for phase shift calculation, if not provided, primary distance is used + + # create the mode and add choppers with the proper setting for the mode + mode = beer_mode(lambda_0) + mode.caption = 'pulse shaping (MR) - PS2' + # distance freq. offset title opening/closing distance for phase calculation + mode.add_chopper( + 6.450 * m, 168 * Hz, 0 * deg, 'PSC1', [0], [144], AntiClockwise, 6.650 * m + ) + mode.add_chopper( + 6.850 * m, 168 * Hz, 0 * deg, 'PSC2', [0], [144], Clockwise, 6.650 * m + ) + mode.add_chopper(8.283 * m, 28 * Hz, (72 / 2 + 6) * deg, 'FC1A', [0], [72]) + mode.add_chopper(79.975 * m, 14 * Hz, 175 / 2 * deg, 'FC2A', [0], [175]) + modes.append(mode) + + ### Pulse shaping mode - high resolution - single frame - PS3 ### + # This mode runs **PSC1**, **PSC2**, **FC1A** and **FC2A**. + + # **distance for phase** corresponds to the distance between PSC choppers which is + # used for phase shift calculation, if not provided, primary distance is used + + # create the mode and add choppers with the proper setting for the mode + mode = beer_mode(lambda_0) + mode.caption = 'pulse shaping (HR) - PS3' + # distance freq. offset title opening/closing distance for phase calculation + mode.add_chopper( + 6.450 * m, 168 * Hz, 0 * deg, 'PSC1', [0], [144], AntiClockwise, 6.550 * m + ) + mode.add_chopper( + 6.650 * m, 168 * Hz, 0 * deg, 'PSC2', [0], [144], Clockwise, 6.550 * m + ) + mode.add_chopper(8.283 * m, 28 * Hz, (72 / 2 + 6) * deg, 'FC1A', [0], [72]) + mode.add_chopper(79.975 * m, 14 * Hz, 175 / 2 * deg, 'FC2A', [0], [175]) + modes.append(mode) + + ### Modulation mode (HF) - 8 opening - single frame - M0/M1 ### + # This mode runs **MCA** with 8 openings, **FC1A** and **FC2A**. + # The frequency of the **MCA** is 70 Hz. + + # create the mode and add choppers with the proper setting for the mode + mode = beer_mode(lambda_0) + fMC = 70 + + # clear up the choppers and adjust the proper for the mode + mode.caption = 'modulation (HF) 8X - M0+M1' + # distance freq. offset title opening/closing + mode.add_chopper(8.283 * m, 28 * Hz, (72 / 2 + 6) * deg, 'FC1A', [0], [72]) + mode.add_chopper( + 9.300 * m, + fMC * Hz, + 2.5 * deg, + 'MCA', + list(np.arange(0, 360, 45)), # opening + list(np.arange(5, 360, 45)), + ) # closing + mode.add_chopper(79.975 * m, 14 * Hz, 175 / 2 * deg, 'FC2A', [0], [175]) + modes.append(mode) + + ### Modulation mode (MR) - 8 opening - single frame - M2 ### + # This mode runs **MCA** with 8 openings, **FC1A** and **FC2A**. + # The frequency of the **MCA** is 140 Hz. + + # create the mode and add choppers with the proper setting for the mode + mode = beer_mode(lambda_0) + fMC = 140 + + # clear up the choppers and adjust the proper for the mode + mode.caption = 'modulation (MR) 8X - M2' + # distance freq. offset title opening/closing + mode.add_chopper(8.283 * m, 28 * Hz, (72 / 2 + 6) * deg, 'FC1A', [0], [72]) + mode.add_chopper( + 9.300 * m, + fMC * Hz, + 2.5 * deg, + 'MCA', + list(np.arange(0, 360, 45)), # opening + list(np.arange(5, 360, 45)), + ) # closing + mode.add_chopper(79.975 * m, 14 * Hz, 175 / 2 * deg, 'FC2A', [0], [175]) + modes.append(mode) + + ### Modulation mode (HR) - 8 opening - single frame - M3 ### + # This mode runs **MCA** with 8 openings, **FC1A** and **FC2A**. + # The frequency of the **MCA** is 280 Hz. + + # create the mode and add choppers with the proper setting for the mode + mode = beer_mode(lambda_0) + fMC = 280 + + # clear up the choppers and adjust the proper for the mode + mode.caption = 'modulation (HR) 8X - M3' + # distance freq. offset title opening/closing + mode.add_chopper(8.283 * m, 28 * Hz, (72 / 2 + 6) * deg, 'FC1A', [0], [72]) + mode.add_chopper( + 9.300 * m, + fMC * Hz, + 2.5 * deg, + 'MCA', + list(np.arange(0, 360, 45)), # opening + list(np.arange(5, 360, 45)), + ) # closing + mode.add_chopper(79.975 * m, 14 * Hz, 175 / 2 * deg, 'FC2A', [0], [175]) + modes.append(mode) + + ### Modulation mode - 8 opening - double frame ### + # This mode runs **MCA** with 8 openings and **FC1A** and **FC2A** + # in reduced speed to double the wavelength frame. + # The frequency of the **MCA** chopper can be adjusted as necessary. + + # create the mode and add choppers with the proper setting for the mode + mode = beer_mode(lambda_0) + fMC = 280 + + # clear up the choppers and adjust the proper for the mode + mode.caption = f'modulation 8X ({fMC} Hz - double frame)' + # distance freq. offset title opening/closing + mode.add_chopper(8.283 * m, 7 * Hz, (72 / 2 + 12) * deg, 'FC1A', [0], [72]) + mode.add_chopper( + 9.300 * m, + fMC * Hz, + 2.5 * deg, + 'MCA', + list(np.arange(0, 360, 45)), + list(np.arange(5, 360, 45)), + ) + mode.add_chopper(79.975 * m, 7 * Hz, 175 / 2 * deg, 'FC2A', [0], [175]) + modes.append(mode) + + ### Modulation mode (HF)- 16 opening - single frame - M0/M1 ### + # This mode runs **MCB** with 8 openings, **FC1A** and **FC2A**. + # The frequency of the **MCB** is 70 Hz. + + # create the mode and add choppers with the proper setting for the mode + mode = beer_mode(lambda_0) + fMC = 70 + + # clear up the choppers and adjust the proper for the mode + mode.caption = 'modulation (HF) 16X - M0+M1' + # distance freq. offset title opening/closing + mode.add_chopper(8.283 * m, 28 * Hz, (72 / 2 + 6) * deg, 'FC1A', [0], [72]) + mode.add_chopper( + 9.350 * m, + fMC * Hz, + 2.5 * deg, + 'MCB', + list(np.arange(0, 360, 22.5)), + list(np.arange(5, 360, 22.5)), + ) + mode.add_chopper(79.975 * m, 14 * Hz, 175 / 2 * deg, 'FC2A', [0], [175]) + modes.append(mode) + + ### Modulation mode (MR) - 16 opening - single frame - M2 ### + # This mode runs **MCB** with 8 openings, **FC1A** and **FC2A**. + # The frequency of the **MCB** is 140 Hz. + + # create the mode and add choppers with the proper setting for the mode + mode = beer_mode(lambda_0) + fMC = 140 + + # clear up the choppers and adjust the proper for the mode + mode.caption = 'modulation (MR) 16X - M2' + # distance freq. offset title opening/closing + mode.add_chopper(8.283 * m, 28 * Hz, (72 / 2 + 6) * deg, 'FC1A', [0], [72]) + mode.add_chopper( + 9.350 * m, + fMC * Hz, + 2.5 * deg, + 'MCB', + list(np.arange(0, 360, 22.5)), + list(np.arange(5, 360, 22.5)), + ) + mode.add_chopper(79.975 * m, 14 * Hz, 175 / 2 * deg, 'FC2A', [0], [175]) + modes.append(mode) + + ### Modulation mode (HR) - 16 opening - single frame - M3 ### + # This mode runs **MCB** with 8 openings, **FC1A** and **FC2A**. + # The frequency of the **MCB** is 280 Hz. + + # create the mode and add choppers with the proper setting for the mode + mode = beer_mode(lambda_0) + fMC = 280 + + # clear up the choppers and adjust the proper for the mode + mode.caption = 'modulation (HR) 16X - M3' + # distance freq. offset title opening/closing + mode.add_chopper(8.283 * m, 28 * Hz, (72 / 2 + 6) * deg, 'FC1A', [0], [72]) + mode.add_chopper( + 9.350 * m, + fMC * Hz, + 2.5 * deg, + 'MCB', + list(np.arange(0, 360, 22.5)), + list(np.arange(5, 360, 22.5)), + ) + mode.add_chopper(79.975 * m, 14 * Hz, 175 / 2 * deg, 'FC2A', [0], [175]) + modes.append(mode) + + ### Modulation mode - 16 opening - double frame ### + # This mode runs **MCB** with 16 openings and **FC1A** and **FC2A** + # in reduced speed to double the wavelength frame. + # The frequency of the **MCB** chopper can be adjusted as necessary. + + # create the mode and add choppers with the proper setting for the mode + mode = beer_mode(lambda_0) + fMC = 280 + + # clear up the choppers and adjust the proper for the mode + mode.caption = f'modulation 16X ({fMC} Hz, double frame)' + # distance freq. offset title opening/closing + mode.add_chopper(8.283 * m, 7 * Hz, (72 / 2 + 12) * deg, 'FC1A', [0], [72]) + mode.add_chopper( + 9.350 * m, + fMC * Hz, + 2.5 * deg, + 'MCB', + list(np.arange(0, 360, 22.5)), + list(np.arange(5, 360, 22.5)), + ) + mode.add_chopper(79.975 * m, 7 * Hz, 175 / 2 * deg, 'FC2A', [0], [175]) + + modes.append(mode) + + ### SANS + modulation mode - double frame - DS0 ### + # This mode runs **MCC** with 8 openings (180 + 7x4) and **FC1A** and **FC2A** + # in reduced speed to double the wavelength frame. + # The frequency of the ***MCC*** chopper should be 70 Hz. + + # create the mode and add choppers with the proper setting for the mode + mode = beer_mode(4.0 * A) + fMC = 70 + + # clear up the choppers and adjust the proper for the mode + mode.caption = 'SANS + modulation - DS0' + # distance freq. offset title opening/closing + mode.add_chopper(8.283 * m, 7 * Hz, (72 / 2 + 12) * deg, 'FC1A', [0], [72]) + mode.add_chopper( + 9.875 * m, + fMC * Hz, + (180 + 2.5) * deg, + 'MCC', + list(np.arange(0, 150, 22.5)) + [160.0], # noqa: RUF005 + list(np.arange(5, 150, 22.5)) + [340.0], # noqa: RUF005 + ) + mode.add_chopper(79.975 * m, 7 * Hz, 175 / 2 * deg, 'FC2A', [0], [175]) + modes.append(mode) + + ### SANS + pulse shaping mode - alternating frame - DS1 ### + # This mode runs **PSC1**, **PSC3**, **FC1A**, **FC1B** and **FC2B**. + + # create the mode and add choppers with the proper setting for the mode + mode = beer_mode(lambda_0) + mode.caption = 'SANS + pulse shaping (HF) - DS1' + # distance freq. offset title opening/closing distance for phase calculation + mode.add_chopper( + 6.450 * m, 168 * Hz, 0 * deg, 'PSC1', [0], [144], AntiClockwise, 6.9125 * m + ) + mode.add_chopper( + 7.375 * m, 168 * Hz, 0 * deg, 'PSC3', [0], [144], Clockwise, 6.9125 * m + ) + mode.add_chopper(8.283 * m, 14 * Hz, (72 / 2 - 9) * deg, 'FC1A', [0], [72]) + mode.add_chopper(8.317 * m, 63 * Hz, 180 / 2 * deg, 'FC1B', [0], [180]) + mode.add_chopper(80.025 * m, 7 * Hz, 85 / 2 * deg, 'FC2B', [0], [85]) + modes.append(mode) + + +# loading the modes +load_modes() +# %% Mode info print + + +def print_modes_info(mode_id='all', index=-1): + """ + Print the summary of the loaded modes + + Parameters + ---------- + mode_id : String, optional, default 'all' + Mode ID string included in the caption. Try following F0, F1, F2, PS0, PS1, PS2, + PS3, M0, M1, M2, M3, IM0, IM1, DS0, DS1, or print the modes first. + + index : Integer, optional, default -1 + Number of pulses to be simulated + + """ + print(f'In total {len(modes)} modes were loaded.') # noqa: T201 + if (index == -1) and (mode_id == 'all'): + for i, mode in enumerate(modes): + print(f'Mode {i + 1} (index: {i})\n{mode}') # noqa: T201 + elif (index != -1) and (mode_id == 'all'): + print(f'Mode {index + 1} (index: {index})\n{modes[index]}') # noqa: T201 + elif (index == -1) and (mode_id != 'all'): + i = get_mode_index(mode_id) + print(f'Mode {i + 1} (index: {i})\n{modes[i]}') # noqa: T201 + + +def get_mode_index(mode_id, verbose=False): + """ + Search the index of the mode based on the mode ID + + Parameters + ---------- + mode_id : String + String contained in the caption of the mode + verbose : Boolean + Whether the info is printed in the output + + Returns + ------- + mode_index : Integer + Index of the mode in the all modes list (zero-based) + + """ + result = -1 + for i, mode in enumerate(modes): + if mode_id in mode.caption: + if verbose: + print(f'Mode {mode_id} has index: {i}') # noqa: T201 + result = i + break # get just the first appearance + + if result == -1: + result = 0 + print(f'No mode has ID: "{mode_id}". Selecting mode "{modes[result].caption}"') # noqa: T201 + + return result + + +# %% TOF diagrams + + +# definition of the function for running and plotting +def run_and_plot_mode( + mode_id, n_pulses, neutrons=100_000, wmax=15.0, mode_index=-1, save_fig=False +): + """ + Run selected mode and plot the TOF diagram. Number of neutrons is set to smaller + value by default (100_000) to get data quickly. For better statistic increase it. + + Parameters + ---------- + mode_id : String + Mode ID string included in the caption. Try following F0, F1, F2, PS0, PS1, PS2, + PS3, M0, M1, M2, M3, IM0, IM1, DS0, DS1, or print the modes first. + + n_pulses : Integer + Number of pulses to be simulated + + neutrons : Integer, optional, default 100_000 + Number of neutron to simulate per pulse. + + wmax : Float, optional, default 15.0 + Maximum wavelength simulated. + + mode_index : Integer, optional, default -1 + Alternatively mode index can be used instead of mode ID. + Useful when ID has not specific string sequence as double frame ones. + If index is provided them mode_id is overwritten. + mode_index is zero based! + + save_fig : Boolean, optional, default False + Save chart to the PDF file? + + Returns + ------- + mode_index : Integer + Index of selected mode ID, useful for plotting detector's and chopper's info + Zero based! + + tof_res : tof Results + Information of choppers and detectors, useful for plotting + + tof_fig : tof matplotlib figure + TOF diagram in Matplotlib format + """ + tof_res = [] + + # search the index of the mode based on the mode string + if mode_index == -1: + mode_index = get_mode_index(mode_id) + + print('Running and plotting the model ... please wait.') # noqa: T201 + + # we have to run the model + pulse_ = tof.Source( + neutrons=neutrons, facility='ess', pulses=n_pulses, wmax=wmax * A + ) + model_tof = tof.Model( + source=pulse_, choppers=modes[mode_index].choppers, detectors=detectors + ) + tof_res = model_tof.run() + + # plotting the tof + tof_plot = tof_res.plot(visible_rays=5000, blocked_rays=1000, figsize=(9, 6)) + tof_fig = tof_plot.fig + tof_ax = tof_plot.ax + + # some adjustment + tof_ax.set_title(f'BEER TOF - {modes[mode_index].caption}') + tof_fig.set_layout_engine(layout='tight') + tof_fig.canvas.manager.set_window_title(f'BEER TOF - {modes[mode_index].caption}') + + plt.show() + + print(f'Number of simulated pulses: {n_pulses}') # noqa: T201 + print(modes[mode_index]) # noqa: T201 + + if save_fig: + tof_fig.savefig(f'BEER_TOF_{modes[mode_index].caption}.pdf', format='pdf') + + return mode_index, tof_res, tof_fig + + +# %% Plotting the results from detectors + + +def plot_detectors(tof_res, mode_index, det_index=0, pulse_list=None, save_fig=False): + """ + Plotting the chart with information about the pulses at the detectors + + Parameters + ---------- + tof_res : tof Results + Output of the function plot + + mode_index : Integer + Index of the plotted and run mode. Zero-based! + + det_index : Integer + Index of the detector to plot. Use the print_detectors function to get info + about available detectors. Zero-based! + + pulse_list : [Integer] default [0] + List of indexes of the pulses to plot. Zero-based! + + save_fig : Boolean, optional default False + Save the chart to the PDF file? + + Returns + ------- + fig_det : matplotlib figure + The figure of the detector charts in Matplotlib figure format + """ + if pulse_list is None: + pulse_list = [0] + # chart definition + fig_det, ax_det = plt.subplots(1, 2, figsize=(9, 4)) + + # getting the data as the histograms + det_name = detectors[det_index].name + da_toa = tof_res.detectors[det_name].toa.data.copy() + # create DataGroup where a DataArray is associated with a pulse + tofs = sc.DataGroup() + for i in range(da_toa.sizes['pulse']): + if da_toa['pulse', i].data.sum('event').value: + tofs[f'pulse:{i}'] = da_toa['pulse', i].hist(toa=3000) + else: + tofs[f'pulse:{i}'] = da_toa['pulse', i] + + da_wlgth = tof_res.detectors[det_name].wavelength.data.copy() + waves = sc.DataGroup() + for i in range(da_wlgth.sizes['pulse']): + if da_wlgth['pulse', i].data.sum('event').value: + waves[f'pulse:{i}'] = da_wlgth['pulse', i].hist(wavelength=300) + else: + waves[f'pulse:{i}'] = da_wlgth['pulse', i] + + info = '' + + # converting toa to ms and consider midpoints of tof and wlgth + for value in pulse_list: + tofs[f'pulse:{value}'].coords['toa'] = sc.midpoints( + tofs[f'pulse:{value}'].coords['toa'].to(unit='ms') + ) + waves[f'pulse:{value}'].coords['wavelength'] = sc.midpoints( + waves[f'pulse:{value}'].coords['wavelength'] + ) + + # plotting data + pp.plot( + {f'Pulse: {value + 1}': tofs[f'pulse:{value}'] for value in pulse_list}, + ax=ax_det[0], + linestyle='solid', + marker='', + ) + pp.plot( + {f'Pulse: {value + 1}': waves[f'pulse:{value}'] for value in pulse_list}, + ax=ax_det[1], + linestyle='solid', + marker='', + ) + + # order the pulses by TOF + order = [ + [value, tofs[f'pulse:{value}'].coords['toa'].mean().value] + for value in pulse_list + ] + order = sorted(order, key=lambda column: column[1], reverse=False) + + # calculate info about the frame crossing + for index, _ in enumerate(order): + if len(order) > 1 and index < len(order) - 1: + y0 = tofs[f'pulse:{order[index][0]}'].data.values + tof0 = tofs[f'pulse:{order[index][0]}'].coords['toa'].values[y0 > 0].max() + y1 = tofs[f'pulse:{order[index + 1][0]}'].data.values + tof1 = ( + tofs[f'pulse:{order[index + 1][0]}'].coords['toa'].values[y1 > 0].min() + ) + info += ( + f'P{order[index][0] + 1}:{order[index + 1][0] + 1} ' + f'{"gap" if tof1 - tof0 >= 0 else "overlap"} = {tof1 - tof0:0.3f} ms' + ) + if index != len(order) - 2: + info += '\n' + + # show the info + ax_det[0].text( + 0.03, + 0.96, + f'Frame crossing info\n{info}', + fontsize=8, + ha='left', + va='top', + transform=ax_det[0].transAxes, + bbox=dict(facecolor='white', edgecolor='gray', boxstyle='round', alpha=0.6), # noqa: C408 + ) + + # info about wavelength + info = '' + for index, value in enumerate(pulse_list): + count_wave = waves[f'pulse:{value}'].data.values + wave = waves[f'pulse:{value}'].coords['wavelength'].values[count_wave > 0] + w_max = wave.max() + w_min = wave.min() + w_band = w_max - w_min + w_center = (w_max + w_min) / 2 + info += ( + f'P{value + 1}: ' + r'$\Delta \lambda$ = ' + f'{w_band:0.3f} Å;' + r' $\lambda_{mid}$' + f' = {w_center:0.3f} Å' + ) + if index != len(pulse_list) - 1: + info += '\n' + + ax_det[1].text( + 0.03, + 0.96, + f'{info}', + fontsize=8, + ha='left', + va='top', + transform=ax_det[1].transAxes, + bbox=dict(facecolor='white', edgecolor='gray', boxstyle='round', alpha=0.6), # noqa: C408 + ) + + # chart adjustment + ax_det[0].set_ylabel('[counts]') + ax_det[0].legend(fontsize=9, loc='upper right') + + ax_det[1].set_ylabel('[counts]') + ax_det[1].legend(fontsize=9, loc='upper right') + + fig_det.canvas.manager.set_window_title(f'{det_name} - {modes[mode_index].caption}') + fig_det.suptitle(f'{det_name} - {modes[mode_index].caption}') + fig_det.tight_layout() + + if save_fig: + fig_det.savefig(f'{det_name}-{modes[mode_index].caption}.pdf', format='pdf') + + plt.show() + + return fig_det + + +def print_detectors_info(): + """ + Print the detectors info with indexes + """ + print(f'In total {len(detectors)} detectors are loaded.') # noqa: T201 + for i, detector in enumerate(detectors): + print( # noqa: T201 + f'Detector [index {i}], name: "{detector.name}", ' + f'distance: {detector.distance.value} [m]' + ) + + +# %% Plot the results from the choppers + + +def plot_choppers(tof_res, mode_index, chop_index, pulse_list=None, save_fig=False): + """ + Plotting the chart with information about the pulses at the choppers + + Parameters + ---------- + tof_res : tof Results + Output of the function plot + + mode_index : Integer + Index of the plotted and run mode. Zero based! + + chop_index : Integer + Index of the chopper to plot. Use print_choppers function to get info + about available choppers. Zero based! + + pulse_list : [Integer] default [0] + List of indexes of the pulses to plot. Zero based! + + save_fig : Boolean, optional default False + Save the chart to the PDF file? + + Returns + ------- + fig_chop : matplotlib figure + The figure of the chopper chart in Matplotlib figure format + """ + if pulse_list is None: + pulse_list = [0] + # chart definition + fig_chop, ax_chops = plt.subplots(1, 2, figsize=(9, 4)) + + info = '' + det_name = modes[mode_index].choppers[chop_index].name + + for i in pulse_list: + # getting the tofs data not binned yet + tofs = tof_res.choppers[det_name].toa.data['pulse', i].copy() + tofs.coords['toa'] = tofs.coords['toa'].to(unit='ms') + + # blocked + tofs_ = tofs.copy() + # tofs_ = sc.DataArray(data=tofs.data, coords=tofs.coords) + # tofs_.masks['visible'] = ~reduce(lambda a, b: a | b, tofs.masks.values()) + + del tofs.masks['blocked_by_me'] + + # get proper binning to align visible and blocked + toa_bins = sc.linspace( + dim='toa', + num=300, + unit=tofs.coords['toa'].unit, + start=tofs.coords['toa'].min().value, + stop=tofs.coords['toa'].max().value, + ) + tofs = tofs.hist(toa=toa_bins) + tofs_ = tofs_.hist(toa=toa_bins) + + # getting the wavelength data not binned yet visible and blocked + waves = tof_res.choppers[det_name].wavelength.data['pulse', i].copy() + # blocked + waves_ = waves.copy() + + del waves.masks['blocked_by_me'] + + # get proper binning to align visible and blocked + wlgth_bins = sc.linspace( + dim='wavelength', + num=300, + unit='angstrom', + start=waves.coords['wavelength'].min().value, + stop=waves.coords['wavelength'].max().value, + ) + waves = waves.hist(wavelength=wlgth_bins) + waves_ = waves_.hist(wavelength=wlgth_bins) + + ax_chops[0].plot(tofs, label=f'transmitted - P{i + 1}') + ax_chops[0].plot(tofs_, label=f'blocked - P{i + 1}') + ax_chops[1].plot(waves, label=f'transmitted - P{i + 1}') + ax_chops[1].plot(waves_, label=f'blocked - P{i + 1}') + + info += f'(P{i + 1}) = {tofs_.sum().value / (tofs_.sum().value + tofs.sum().value) * 100:0.1f}% ' # noqa: E501 + + # put some info + ax_chops[0].text( + 0.5, + 1.06, + f'Blocked ratio {info}', + fontsize=8, + ha='center', + va='top', + transform=ax_chops[0].transAxes, + bbox=dict(facecolor='white', edgecolor='gray', boxstyle='round', alpha=0.6), # noqa: C408 + ) + # chart adjustment + ax_chops[0].set_xlabel('toa [ms]') + ax_chops[0].set_ylabel('[counts]') + ax_chops[0].legend(fontsize=9) + + ax_chops[1].set_xlabel('Wavelength [Å]') + ax_chops[1].set_ylabel('[counts]') + ax_chops[1].legend(fontsize=9) + + fig_chop.canvas.manager.set_window_title( + f'{det_name} - {modes[mode_index].caption}' + ) + fig_chop.suptitle(f'{det_name} - {modes[mode_index].caption}') + fig_chop.tight_layout() + + if save_fig: + fig_chop.savefig(f'{det_name}-{modes[mode_index].caption}.pdf', format='pdf') + + plt.show() + + return fig_chop + + +def print_choppers_info(mode_index): + """ + Print the chopper information for selected mode index + + Parameters + ---------- + mode_index : Integer + Index of the mode for which to print the choppers' info. Zero-based! + + """ + print( # noqa: T201 + f'For selected mode [{mode_index}] "{modes[mode_index].caption}": ' + f'{len(modes[mode_index].choppers)} chopper(s) is/are defined.' + ) + for i, chopper in enumerate(modes[mode_index].choppers): + print( # noqa: T201 + f'Chopper[{i}] name: "{chopper.name}", ' + f'dist: {chopper.distance.value:6.3f} [m], ' + f'freq: {chopper.frequency.value:3.0f} [Hz], ' + f'phase: {chopper.phase.value:6.2f} [º], ' + f'cutouts: {len(chopper.open):2d}, ' + f'dir: {"-->" if chopper.direction == Clockwise else "<--"}' + ) + + +def get_chopper_index(mode_index, chopper_id, verbose=False): + """ + Search the index of the chopper in the mode based on the chopper ID + + Parameters + ---------- + mode_index : Integer + Index of the mode in the all modes list (zero-based) + chopper_id : String + String contained in the caption of the chopper + verbose : Boolean + Whether the info is printed in the output + + Returns + ------- + chopper_id : String + String contained in the caption of the chopper + + """ + result = -1 + for i, chopper in enumerate(modes[mode_index].choppers): + if chopper_id in chopper.name: + if verbose: + print(f'Chopper {chopper_id} has index: {i}') # noqa: T201 + result = i + break # get just the first appearance + + if result == -1: + result = 0 + print( # noqa: T201 + f'No chopper has ID: "{chopper_id}". Selecting chopper ' + f'"{modes[mode_index].chopper[result].name}"' + ) + + return result + + +def draw_chopper(mode_index, chopper_index): + """ + Graphical visualisation of the chopper for selected mode + Works only in Jupyter notebook! + + Parameters + ---------- + mode_index : Integer + Index of the mode. Zero-based! + + chopper_index : Integer + Index of the chopper for which to display chopper. Zero-based! + + """ + to_show = sc.DataGroup() + chop = modes[mode_index].choppers[chopper_index] + shift = chop.phase.value * (1 if chop.direction == Clockwise else -1) + # shift = chop.phase.value + + to_show[chop.name] = disk_chopper.DiskChopper( # scn.chopper.DiskChopper( + axle_position=sc.vector(value=np.array([0, 0, chop.distance.value]), unit='m'), + frequency=chop.frequency * (-1 if chop.direction == Clockwise else 1), + beam_position=0.0 * sc.Unit('deg'), + phase=chop.phase, + slit_begin=sc.array( + dims=['slit'], values=[i.value + shift for i in chop.open], unit='deg' + ), + slit_end=sc.array( + dims=['slit'], values=[i.value + shift for i in chop.close], unit='deg' + ), + slit_height=0.05 * m, + radius=0.375 * m, + ) + return to_show[chop.name] + + +# %% Comparison of the modes + + +def progressbar(it, prefix="", size=60, out=sys.stdout): + count = len(it) + start = time.time() + + def show(j): + x = int(size * j / count) + remaining = ((time.time() - start) / j) * (count - j) + + mins, sec = divmod(remaining, 60) + time_str = f"{int(mins):02}:{sec:05.2f}" + + print( + f"{prefix}[{'█' * x}{('.' * (size - x))}] {j}/{count} Estimated wait {time_str} ", # noqa: E501 + end='\r', + file=out, + flush=True, + ) + + for i, item in enumerate(it): + yield item + show(i + 1) + print("\n", flush=True, file=out) + + +def get_wave_for_all_modes(neutrons=1_000_000, pulses=2, wmax=15.0): + """ + Run the simulation for all the modes and store the wavelength related results. + + Parameters + ---------- + neutrons : Integer, optional + Number of neutron to simulate per pulse. The default is 1_000_000. + + pulses : Integer, optional + Number of pulses to simulate, The default is 2. + + wmax : Float, optional + Maximum wavelength simulated. The default is 15.0. + + Returns + ------- + waves : List of results + List of TOF simulated wavelength related results. + """ + waves = [] + for i in progressbar(range(len(modes)), 'Computing: ', 40): + pulse_ = tof.Source( + neutrons=neutrons, facility='ess', pulses=pulses, wmax=wmax * A + ) + model_ = tof.Model( + source=pulse_, detectors=detectors, choppers=modes[i].choppers + ) + res_ = model_.run() + + # data added from sample detector not binned + # (binning adjusted during comparison) + waves.append(res_.detectors['sample position'].wavelength.data) + print(f'{len(waves)} models were run.') # noqa: T201 + return waves + + +def plot_comparison(waves, what, pulses=None, save_fig=False): + """ + Plot the chart comparing wavelength distribution of various modes and pulses. + + Parameters + ---------- + waves : List of results + Wavelength events from running all the modes. Use output from + "get_wave_for_all_modes" function. + + what : List of strings + Part of the mode's name which you want to compare. Be specific, if two + occurrences happened then longer text (ex. "8X - M2"). + + pulses : List of array, optional, The default is None + The list of Integer array representing what pulse to plot from each + mode. Ex: [[0],[1, 2]] - for the first mode in "what" plot the first + pulse and for the second mode plot pulse 2 and 3 (zero based!). + If not provided, only first pulse plotted for each mode. The number of + members has to be the same as for "what". + + save_fig : Boolean, Optional, The default is False. + Do you want to save the chart as PDF? + """ + + # plot definition + fig_mode_comp, mode_comp_ax = plt.subplots(1, 1, figsize=(9, 6)) + + # adjusting pulses if None + if pulses is None: + pulses = [] + for _ in range(len(what)): + pulses.append([0]) + + # getting min and max for proper binning + w_min, w_max = 100, 0 + index = 0 + for i, wave in enumerate(waves): + if any( + select in modes[i].caption for select in what + ): # select what you want to compare + for j in pulses[index]: + w_min = min(w_min, wave['pulse', j].coords['wavelength'].min().value) + w_max = max(w_max, wave['pulse', j].coords['wavelength'].max().value) + index += 1 + bins = sc.linspace( + dim='wavelength', num=300, unit='angstrom', start=w_min, stop=w_max + ) + + # plotting the comparison + index = 0 + for i, wave in enumerate(waves): + if any( + select in modes[i].caption for select in what + ): # select what you want to compare + for j in pulses[index]: + info = '' + if len(pulses[index]) > 1: + info = f' - P{j + 1}' + + wave_ = wave['pulse', j].hist(wavelength=bins) + mode_comp_ax.plot( + sc.midpoints(wave_.coords['wavelength']), + wave_, + ('--' if 'modulation' in modes[i].caption else '-'), + label=modes[i].caption + info, + ) + index += 1 + + mode_comp_ax.legend(fontsize=10) + mode_comp_ax.set_title('BEER modes intensity comparison at sample position') + mode_comp_ax.set_xlabel('Wavelength [Å]') + mode_comp_ax.set_ylabel('[counts]') + + fig_mode_comp.canvas.manager.set_window_title( + 'BEER modes intensity comparison at sample position' + ) + fig_mode_comp.tight_layout() + + if save_fig: + fig_mode_comp.savefig( + 'BEER_modes_intensity_comparison_at_sample_position.pdf', format='pdf' + )