diff --git a/docs/user-guide/common/beam-center-finder.ipynb b/docs/user-guide/common/beam-center-finder.ipynb index 45ee74b8..a1a68357 100644 --- a/docs/user-guide/common/beam-center-finder.ipynb +++ b/docs/user-guide/common/beam-center-finder.ipynb @@ -79,7 +79,7 @@ "outputs": [], "source": [ "workflow[BeamCenter] = sc.vector([0, 0, 0], unit='m')\n", - "raw = workflow.compute(DetectorData[SampleRun])['spectrum', :61440]\n", + "raw = workflow.compute(RawDetector[SampleRun])['spectrum', :61440]\n", "\n", "p = isis.plot_flat_detector_xy(raw.hist(), norm='log')\n", "p.ax.plot(0, 0, '+', color='k', ms=10)\n", diff --git a/docs/user-guide/isis/sans2d.ipynb b/docs/user-guide/isis/sans2d.ipynb index d7329e59..01d93f44 100644 --- a/docs/user-guide/isis/sans2d.ipynb +++ b/docs/user-guide/isis/sans2d.ipynb @@ -82,8 +82,8 @@ "metadata": {}, "source": [ "The workflow can be visualized as a graph.\n", - "For readability we show only sub-workflow for computing `IofQ[Sample]`.\n", - "The workflow can actually compute the full `BackgroundSubtractedIofQ`, which applies and equivalent workflow to the background run, before a subtraction step:" + "For readability we show only sub-workflow for computing `IntensityQ[Sample]`.\n", + "The workflow can actually compute the full `BackgroundSubtractedIntensityQ`, which applies and equivalent workflow to the background run, before a subtraction step:" ] }, { @@ -94,7 +94,7 @@ "outputs": [], "source": [ "# left-right layout works better for this graph\n", - "workflow.visualize(IofQ[SampleRun], graph_attr={\"rankdir\": \"LR\"})" + "workflow.visualize(IntensityQ[SampleRun], graph_attr={\"rankdir\": \"LR\"})" ] }, { @@ -224,7 +224,7 @@ "metadata": {}, "outputs": [], "source": [ - "result = workflow.compute(BackgroundSubtractedIofQ)\n", + "result = workflow.compute(BackgroundSubtractedIntensityQ)\n", "result.hist().plot(scale={'Q': 'log'}, norm='log')" ] }, @@ -264,7 +264,7 @@ "outputs": [], "source": [ "workflow[UncertaintyBroadcastMode] = UncertaintyBroadcastMode.drop\n", - "result_drop = workflow.compute(BackgroundSubtractedIofQ)\n", + "result_drop = workflow.compute(BackgroundSubtractedIntensityQ)\n", "# Reset the UnsertaintyBroadcastMode to the old value\n", "workflow[UncertaintyBroadcastMode] = UncertaintyBroadcastMode.upper_bound\n", "sc.DataGroup(upper_bound=result, dropped=result_drop).hist().plot(norm='log')" @@ -323,7 +323,7 @@ " WavelengthScaledQ[SampleRun, Numerator],\n", " WavelengthScaledQ[SampleRun, Denominator],\n", ")\n", - "iofqs = (IofQ[SampleRun], IofQ[BackgroundRun], BackgroundSubtractedIofQ)\n", + "iofqs = (IntensityQ[SampleRun], IntensityQ[BackgroundRun], BackgroundSubtractedIntensityQ)\n", "keys = (*monitors, MaskedData[SampleRun], *parts, *iofqs)\n", "\n", "results = workflow.compute(keys)\n", @@ -393,7 +393,7 @@ "metadata": {}, "outputs": [], "source": [ - "result = workflow.compute(BackgroundSubtractedIofQ)\n", + "result = workflow.compute(BackgroundSubtractedIntensityQ)\n", "result" ] }, diff --git a/docs/user-guide/isis/zoom.ipynb b/docs/user-guide/isis/zoom.ipynb index f572ff3e..7b69647f 100644 --- a/docs/user-guide/isis/zoom.ipynb +++ b/docs/user-guide/isis/zoom.ipynb @@ -165,7 +165,7 @@ "outputs": [], "source": [ "# left-right layout works better for this graph\n", - "workflow.visualize(IofQ[SampleRun], graph_attr={'rankdir': 'LR'})" + "workflow.visualize(IntensityQ[SampleRun], graph_attr={'rankdir': 'LR'})" ] }, { @@ -210,7 +210,7 @@ "metadata": {}, "outputs": [], "source": [ - "da = workflow.compute(IofQ[SampleRun])\n", + "da = workflow.compute(IntensityQ[SampleRun])\n", "da.plot(norm='log', scale={'Q': 'log'})" ] }, @@ -237,7 +237,7 @@ " WavelengthScaledQ[SampleRun, Numerator],\n", " WavelengthScaledQ[SampleRun, Denominator],\n", ")\n", - "iofqs = (IofQ[SampleRun],)\n", + "iofqs = (IntensityQ[SampleRun],)\n", "keys = (*monitors, MaskedData[SampleRun], *parts, *iofqs)\n", "\n", "results = workflow.compute(keys)\n", @@ -277,7 +277,7 @@ "source": [ "## Computing Qx/Qy\n", "\n", - "To compute $I(Q_{x}, Q_{y})$ instead of the one-dimensional $I(Q)$, we can compute `IofQxy` instead of `IofQ`.\n", + "To compute $I(Q_{x}, Q_{y})$ instead of the one-dimensional $I(Q)$, we can compute `IntensityQxy` instead of `IntensityQ`.\n", "For this to work, we need to define `QxBins` and `QyBins` in our parameters:" ] }, @@ -291,7 +291,7 @@ "workflow[QxBins] = sc.linspace('Qx', start=-0.5, stop=0.5, num=101, unit='1/angstrom')\n", "workflow[QyBins] = sc.linspace('Qy', start=-0.8, stop=0.8, num=101, unit='1/angstrom')\n", "\n", - "iqxqy = workflow.compute(IofQxy[SampleRun])\n", + "iqxqy = workflow.compute(IntensityQxy[SampleRun])\n", "iqxqy.plot(norm='log', aspect='equal')" ] } diff --git a/docs/user-guide/loki/loki-direct-beam.ipynb b/docs/user-guide/loki/loki-direct-beam.ipynb index 46f1f5f9..14bf6626 100644 --- a/docs/user-guide/loki/loki-direct-beam.ipynb +++ b/docs/user-guide/loki/loki-direct-beam.ipynb @@ -95,8 +95,8 @@ "metadata": {}, "source": [ "The workflow can be visualized as a graph.\n", - "For readability we show only sub-workflow for computing `IofQ[Sample]`.\n", - "The workflow can actually compute the full `BackgroundSubtractedIofQ`, which applies and equivalent workflow to the background run, before a subtraction step:" + "For readability we show only sub-workflow for computing `IntensityQ[Sample]`.\n", + "The workflow can actually compute the full `BackgroundSubtractedIntensityQ`, which applies and equivalent workflow to the background run, before a subtraction step:" ] }, { @@ -106,7 +106,7 @@ "metadata": {}, "outputs": [], "source": [ - "workflow.visualize(IofQ[SampleRun], compact=True, graph_attr={'rankdir': 'LR'})" + "workflow.visualize(IntensityQ[SampleRun], compact=True, graph_attr={'rankdir': 'LR'})" ] }, { @@ -546,7 +546,7 @@ "metadata": {}, "outputs": [], "source": [ - "workflow.visualize(BackgroundSubtractedIofQ, compact=True, graph_attr={'rankdir': 'LR'})" + "workflow.visualize(BackgroundSubtractedIntensityQ, compact=True, graph_attr={'rankdir': 'LR'})" ] }, { diff --git a/docs/user-guide/loki/loki-iofq.ipynb b/docs/user-guide/loki/loki-iofq.ipynb index 90988ed7..ee6646d1 100644 --- a/docs/user-guide/loki/loki-iofq.ipynb +++ b/docs/user-guide/loki/loki-iofq.ipynb @@ -173,7 +173,7 @@ "metadata": {}, "outputs": [], "source": [ - "workflow.visualize(BackgroundSubtractedIofQ, compact=True, graph_attr={\"rankdir\": \"LR\"})" + "workflow.visualize(BackgroundSubtractedIntensityQ, compact=True, graph_attr={\"rankdir\": \"LR\"})" ] }, { @@ -210,7 +210,7 @@ "metadata": {}, "outputs": [], "source": [ - "da = workflow.compute(BackgroundSubtractedIofQ)\n", + "da = workflow.compute(BackgroundSubtractedIntensityQ)\n", "da.plot(norm=\"log\", title=\"ISIS polymer\")" ] }, @@ -230,7 +230,7 @@ "outputs": [], "source": [ "workflow[WavelengthBands] = sc.linspace(\"wavelength\", 1.0, 13.0, 11, unit=\"angstrom\")\n", - "da_bands = workflow.compute(BackgroundSubtractedIofQ)\n", + "da_bands = workflow.compute(BackgroundSubtractedIntensityQ)\n", "da_bands" ] }, @@ -287,7 +287,7 @@ "outputs": [], "source": [ "# I(Q)\n", - "da = workflow.compute(BackgroundSubtractedIofQ)\n", + "da = workflow.compute(BackgroundSubtractedIntensityQ)\n", "da.plot(norm=\"log\", title=\"AgBeh sample\")" ] }, @@ -315,7 +315,7 @@ "workflow[BeamCenter] = sans.beam_center_from_center_of_mass(workflow)\n", "\n", "# I(Q)\n", - "da = workflow.compute(BackgroundSubtractedIofQ)\n", + "da = workflow.compute(BackgroundSubtractedIntensityQ)\n", "da.plot(norm=\"log\", title=\"Porous silica\")" ] }, @@ -343,7 +343,7 @@ "workflow[BeamCenter] = sans.beam_center_from_center_of_mass(workflow)\n", "\n", "# I(Q)\n", - "da = workflow.compute(BackgroundSubtractedIofQ)\n", + "da = workflow.compute(BackgroundSubtractedIntensityQ)\n", "da.plot(norm=\"log\", title=\"deut-SDS\")" ] } diff --git a/docs/user-guide/loki/workflow-widget-loki.ipynb b/docs/user-guide/loki/workflow-widget-loki.ipynb index 93eaeb36..9091cfe6 100644 --- a/docs/user-guide/loki/workflow-widget-loki.ipynb +++ b/docs/user-guide/loki/workflow-widget-loki.ipynb @@ -50,11 +50,11 @@ "keys, values = zip(*select.options, strict=True)\n", "ind = keys.index(\"LokiAtLarmorTutorialWorkflow\")\n", "select.value = values[ind]\n", - "# Select IofQ[SampleRun] output\n", + "# Select IntensityQ[SampleRun] output\n", "wfw = widget.children[1].children[0]\n", "outputs = wfw.output_selection_box.typical_outputs_widget\n", "keys, values = zip(*outputs.options, strict=True)\n", - "ind = keys.index(\"IofQ[SampleRun]\")\n", + "ind = keys.index(\"IntensityQ[SampleRun]\")\n", "outputs.value = (values[ind],)\n", "# Refresh parameters\n", "pbox = wfw.parameter_box\n", diff --git a/pyproject.toml b/pyproject.toml index 4e80336d..b29ab68e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -32,7 +32,7 @@ requires-python = ">=3.11" dependencies = [ "dask>=2022.1.0", "graphviz>=0.20", - "essreduce>=25.10.1", + "essreduce>=25.11.0", "numpy>=1.26.4", "pandas>=2.1.2", "plopp>=25.03.0", diff --git a/requirements/base.in b/requirements/base.in index f2526649..e770d5d4 100644 --- a/requirements/base.in +++ b/requirements/base.in @@ -4,7 +4,7 @@ # The following was generated by 'tox -e deps', DO NOT EDIT MANUALLY! dask>=2022.1.0 graphviz>=0.20 -essreduce>=25.10.1 +essreduce>=25.11.0 numpy>=1.26.4 pandas>=2.1.2 plopp>=25.03.0 diff --git a/requirements/base.txt b/requirements/base.txt index f9f11792..4144a855 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -1,4 +1,4 @@ -# SHA1:4070c98e9bfb765d14716fce509312b2102adb65 +# SHA1:00b9bcd18f145ced32ae50ff97e174fb9fc96093 # # This file was generated by pip-compile-multi. # To update, run: @@ -11,7 +11,7 @@ asttokens==3.0.0 # via stack-data click==8.3.0 # via dask -cloudpickle==3.1.1 +cloudpickle==3.1.2 # via dask comm==0.2.3 # via ipywidgets @@ -29,13 +29,13 @@ dnspython==2.8.0 # via email-validator email-validator==2.3.0 # via scippneutron -essreduce==25.10.1 +essreduce==25.11.0 # via -r base.in executing==2.2.1 # via stack-data fonttools==4.60.1 # via matplotlib -fsspec==2025.9.0 +fsspec==2025.10.0 # via dask graphviz==0.21 # via -r base.in @@ -53,13 +53,13 @@ ipython==9.6.0 # via ipywidgets ipython-pygments-lexers==1.1.1 # via ipython -ipywidgets==8.1.7 +ipywidgets==8.1.8 # via # ipydatawidgets # pythreejs jedi==0.19.2 # via ipython -jupyterlab-widgets==3.0.15 +jupyterlab-widgets==3.0.16 # via ipywidgets kiwisolver==1.4.9 # via matplotlib @@ -138,11 +138,11 @@ pytz==2025.2 # via pandas pyyaml==6.0.3 # via dask -sciline==25.8.0 +sciline==25.11.1 # via # -r base.in # essreduce -scipp==25.8.0 +scipp==25.11.0 # via # -r base.in # essreduce @@ -183,7 +183,6 @@ typing-extensions==4.15.0 # ipython # pydantic # pydantic-core - # sciline # typing-inspection typing-inspection==0.4.2 # via pydantic @@ -191,7 +190,7 @@ tzdata==2025.2 # via pandas wcwidth==0.2.14 # via prompt-toolkit -widgetsnbextension==4.0.14 +widgetsnbextension==4.0.15 # via ipywidgets zipp==3.23.0 # via importlib-metadata diff --git a/requirements/dev.txt b/requirements/dev.txt index 0ebd00b6..4afeb88a 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -79,7 +79,7 @@ pip-compile-multi==3.2.2 # via -r dev.in pip-tools==7.5.1 # via pip-compile-multi -plumbum==1.9.0 +plumbum==1.10.0 # via copier prometheus-client==0.23.1 # via jupyter-server @@ -111,7 +111,7 @@ toposort==1.10 # via pip-compile-multi uri-template==1.3.0 # via jsonschema -webcolors==24.11.1 +webcolors==25.10.0 # via jsonschema websocket-client==1.9.0 # via jupyter-server diff --git a/requirements/docs.txt b/requirements/docs.txt index e5d5574e..75f10a5e 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -106,7 +106,7 @@ platformdirs==4.5.0 # pooch pooch==1.8.2 # via -r docs.in -psutil==7.1.2 +psutil==7.1.3 # via ipykernel pydantic-settings==2.11.0 # via autodoc-pydantic diff --git a/requirements/nightly.txt b/requirements/nightly.txt index cf369a7f..03e9bcf1 100644 --- a/requirements/nightly.txt +++ b/requirements/nightly.txt @@ -18,7 +18,7 @@ charset-normalizer==3.4.4 # via requests click==8.3.0 # via dask -cloudpickle==3.1.1 +cloudpickle==3.1.2 # via dask comm==0.2.3 # via ipywidgets @@ -42,7 +42,7 @@ executing==2.2.1 # via stack-data fonttools==4.60.1 # via matplotlib -fsspec==2025.9.0 +fsspec==2025.10.0 # via dask graphviz==0.21 # via -r nightly.in @@ -64,13 +64,13 @@ ipython==9.6.0 # via ipywidgets ipython-pygments-lexers==1.1.1 # via ipython -ipywidgets==8.1.7 +ipywidgets==8.1.8 # via # ipydatawidgets # pythreejs jedi==0.19.2 # via ipython -jupyterlab-widgets==3.0.15 +jupyterlab-widgets==3.0.16 # via ipywidgets kiwisolver==1.4.10rc0 # via matplotlib @@ -207,7 +207,6 @@ typing-extensions==4.15.0 # ipython # pydantic # pydantic-core - # sciline # typing-inspection typing-inspection==0.4.2 # via pydantic @@ -217,7 +216,7 @@ urllib3==2.5.0 # via requests wcwidth==0.2.14 # via prompt-toolkit -widgetsnbextension==4.0.14 +widgetsnbextension==4.0.15 # via ipywidgets zipp==3.23.0 # via importlib-metadata diff --git a/src/ess/isissans/general.py b/src/ess/isissans/general.py index f5231d96..ce9e90da 100644 --- a/src/ess/isissans/general.py +++ b/src/ess/isissans/general.py @@ -14,24 +14,24 @@ from ess.reduce.nexus.types import NeXusTransformation, Position from ess.sans.types import ( BeamCenter, - CalibratedDetector, - CalibratedMonitor, - DetectorData, DetectorIDs, DetectorPixelShape, DetectorPositionOffset, + EmptyDetector, + EmptyMonitor, Incident, Measurement, - MonitorData, MonitorPositionOffset, MonitorType, NeXusComponent, NeXusMonitorName, NonBackgroundWavelengthRange, + RawDetector, + RawMonitor, RunType, SampleRun, ScatteringRunType, - TofData, + TofDetector, TofMonitor, Transmission, ) @@ -128,7 +128,7 @@ def get_calibrated_isis_detector( detector: NeXusComponent[snx.NXdetector, RunType], *, offset: DetectorPositionOffset[RunType], -) -> CalibratedDetector[RunType]: +) -> EmptyDetector[RunType]: """ Replacement for :py:func:`ess.reduce.nexus.workflow.get_calibrated_detector`. @@ -142,7 +142,7 @@ def get_calibrated_isis_detector( """ da = detector['data'] position = detector['data'].coords['position'] - return CalibratedDetector[RunType]( + return EmptyDetector[RunType]( da.assign_coords(position=position + offset.to(unit=position.unit)) ) @@ -191,29 +191,29 @@ def get_monitor_data( def dummy_assemble_detector_data( - detector: CalibratedDetector[RunType], -) -> DetectorData[RunType]: + detector: EmptyDetector[RunType], +) -> RawDetector[RunType]: """Dummy assembly of detector data, detector already contains neutron data.""" - return DetectorData[RunType](detector) + return RawDetector[RunType](detector) def dummy_assemble_monitor_data( - monitor: CalibratedMonitor[RunType, MonitorType], -) -> MonitorData[RunType, MonitorType]: + monitor: EmptyMonitor[RunType, MonitorType], +) -> RawMonitor[RunType, MonitorType]: """Dummy assembly of monitor data, monitor already contains neutron data.""" - return MonitorData[RunType, MonitorType](monitor) + return RawMonitor[RunType, MonitorType](monitor) def data_to_tof( - da: DetectorData[ScatteringRunType], -) -> TofData[ScatteringRunType]: + da: RawDetector[ScatteringRunType], +) -> TofDetector[ScatteringRunType]: """Dummy conversion of data to time-of-flight data. The data already has a time-of-flight coordinate.""" - return TofData[ScatteringRunType](da) + return TofDetector[ScatteringRunType](da) def monitor_to_tof( - da: MonitorData[RunType, MonitorType], + da: RawMonitor[RunType, MonitorType], ) -> TofMonitor[RunType, MonitorType]: """Dummy conversion of monitor data to time-of-flight data. The monitor data already has a time-of-flight coordinate.""" @@ -262,7 +262,7 @@ def lab_frame_transform() -> NeXusTransformation[snx.NXdetector, ScatteringRunTy ) -def get_detector_ids_from_sample_run(data: TofData[SampleRun]) -> DetectorIDs: +def get_detector_ids_from_sample_run(data: TofDetector[SampleRun]) -> DetectorIDs: """Extract detector IDs from sample run. This overrides the function in the masking module which gets the detector IDs from diff --git a/src/ess/isissans/sans2d.py b/src/ess/isissans/sans2d.py index 37d4530f..f57736fc 100644 --- a/src/ess/isissans/sans2d.py +++ b/src/ess/isissans/sans2d.py @@ -8,7 +8,7 @@ from ess.reduce.workflow import register_workflow from ess.sans import SansWorkflow from ess.sans.parameters import typical_outputs -from ess.sans.types import BeamCenter, CalibratedDetector, DetectorMasks, SampleRun +from ess.sans.types import BeamCenter, DetectorMasks, EmptyDetector, SampleRun from .general import default_parameters from .io import load_tutorial_direct_beam, load_tutorial_run @@ -26,7 +26,7 @@ def detector_edge_mask( - beam_center: BeamCenter, sample: CalibratedDetector[SampleRun] + beam_center: BeamCenter, sample: EmptyDetector[SampleRun] ) -> DetectorEdgeMask: # These values were determined by hand before the beam center was available. # We therefore undo the shift introduced by the beam center. @@ -39,7 +39,7 @@ def detector_edge_mask( def sample_holder_mask( beam_center: BeamCenter, - sample: CalibratedDetector[SampleRun], + sample: EmptyDetector[SampleRun], low_counts_threshold: LowCountThreshold, ) -> SampleHolderMask: # These values were determined by hand before the beam center was available. diff --git a/src/ess/loki/live.py b/src/ess/loki/live.py index ac490eb6..b0335dd8 100644 --- a/src/ess/loki/live.py +++ b/src/ess/loki/live.py @@ -17,22 +17,22 @@ from ess.sans import with_pixel_mask_filenames from ess.sans.types import ( BackgroundRun, - BackgroundSubtractedIofQ, - BackgroundSubtractedIofQxy, + BackgroundSubtractedIntensityQ, + BackgroundSubtractedIntensityQxy, BeamCenter, CorrectForGravity, Denominator, - DetectorData, DirectBeamFilename, EmptyBeamRun, Filename, Incident, - IofQ, - IofQxy, + IntensityQ, + IntensityQxy, Numerator, QBins, QxBins, QyBins, + RawDetector, ReducedQ, ReducedQxy, ReturnEvents, @@ -49,7 +49,7 @@ RawDetectorView = NewType('RawDetectorView', sc.DataArray) -def _raw_detector_view(data: DetectorData[SampleRun]) -> RawDetectorView: +def _raw_detector_view(data: RawDetector[SampleRun]) -> RawDetectorView: """Very simple raw detector view for initial testing.""" # Instead of histogramming concrete x and y (which leads to artifacts), another # quick option is to slice/sum some dimensions. But it will not give true positions: @@ -181,9 +181,9 @@ def make_sample_run_workflow( try: workflow.compute(Filename[BackgroundRun]) except sciline.UnsatisfiedRequirement: - iofq_keys = (IofQ[SampleRun], IofQxy[SampleRun]) + iofq_keys = (IntensityQ[SampleRun], IntensityQxy[SampleRun]) else: - iofq_keys = (BackgroundSubtractedIofQ, BackgroundSubtractedIofQxy) + iofq_keys = (BackgroundSubtractedIntensityQ, BackgroundSubtractedIntensityQxy) outputs.update(dict(zip(('I(Q)', '$I(Q_x, Q_y)$'), iofq_keys, strict=True))) factories = AccumulatorFactories(accum=streaming.RollingAccumulator, window=20) diff --git a/src/ess/loki/workflow.py b/src/ess/loki/workflow.py index c34b8073..0f358a5a 100644 --- a/src/ess/loki/workflow.py +++ b/src/ess/loki/workflow.py @@ -17,14 +17,12 @@ BackgroundRun, BeamCenter, DetectorBankSizes, - DetectorData, DetectorPixelShape, DirectBeam, DirectBeamFilename, EmptyBeamRun, Filename, Incident, - MonitorData, MonitorType, NeXusComponent, NeXusDetectorName, @@ -32,10 +30,12 @@ NonBackgroundWavelengthRange, PixelMaskFilename, PixelShapePath, + RawDetector, + RawMonitor, RunType, SampleRun, ScatteringRunType, - TofData, + TofDetector, TofMonitor, Transmission, TransmissionRun, @@ -66,13 +66,13 @@ def _convert_to_tof(da: sc.DataArray) -> sc.DataArray: def data_to_tof( - da: DetectorData[ScatteringRunType], -) -> TofData[ScatteringRunType]: - return TofData[ScatteringRunType](_convert_to_tof(da)) + da: RawDetector[ScatteringRunType], +) -> TofDetector[ScatteringRunType]: + return TofDetector[ScatteringRunType](_convert_to_tof(da)) def monitor_to_tof( - da: MonitorData[RunType, MonitorType], + da: RawMonitor[RunType, MonitorType], ) -> TofMonitor[RunType, MonitorType]: return TofMonitor[RunType, MonitorType](_convert_to_tof(da)) diff --git a/src/ess/sans/__init__.py b/src/ess/sans/__init__.py index d4e41373..e7e07d04 100644 --- a/src/ess/sans/__init__.py +++ b/src/ess/sans/__init__.py @@ -15,7 +15,7 @@ ) from .beam_center_finder import beam_center_from_center_of_mass from .direct_beam import direct_beam -from .types import BackgroundSubtractedIofQ, IofQ, ReturnEvents, SampleRun +from .types import BackgroundSubtractedIntensityQ, IntensityQ, ReturnEvents, SampleRun from .workflow import ( SansWorkflow, providers, @@ -33,8 +33,8 @@ del importlib __all__ = [ - 'BackgroundSubtractedIofQ', - 'IofQ', + 'BackgroundSubtractedIntensityQ', + 'IntensityQ', 'ReturnEvents', 'SampleRun', 'SansWorkflow', diff --git a/src/ess/sans/beam_center_finder.py b/src/ess/sans/beam_center_finder.py index ef9e82ef..ae4324e1 100644 --- a/src/ess/sans/beam_center_finder.py +++ b/src/ess/sans/beam_center_finder.py @@ -18,7 +18,7 @@ CleanDirectBeam, DetectorBankSizes, DimsToKeep, - IofQ, + IntensityQ, MaskedData, NeXusComponent, QBins, @@ -171,7 +171,7 @@ def _iofq_in_quadrants( for i, quad in enumerate(quadrants): # Select pixels based on phi sel = (phi >= phi_bins[i]) & (phi < phi_bins[i + 1]) - # The beam center is applied when computing CalibratedDetector, set quadrant + # The beam center is applied when computing EmptyDetector, set quadrant # *before* that step. workflow[NeXusComponent[snx.NXdetector, SampleRun]] = sc.DataGroup( data=detector[sel] @@ -179,7 +179,7 @@ def _iofq_in_quadrants( # MaskedData would be computed automatically, but we did it above already workflow[MaskedData[SampleRun]] = calibrated[sel] workflow[CleanDirectBeam] = norm if norm.dims == ('wavelength',) else norm[sel] - out[quad] = workflow.compute(IofQ[SampleRun]) + out[quad] = workflow.compute(IntensityQ[SampleRun]) return out diff --git a/src/ess/sans/conversions.py b/src/ess/sans/conversions.py index ce157dd9..3404dcfc 100644 --- a/src/ess/sans/conversions.py +++ b/src/ess/sans/conversions.py @@ -13,18 +13,20 @@ from .common import mask_range from .types import ( - CleanQ, - CleanQxy, - CleanSummedQ, - CleanSummedQxy, - CleanWavelength, + CorrectedDetector, + CorrectedQ, + CorrectedQxy, CorrectForGravity, Denominator, - IofQPart, + DimsToKeep, + IntensityQPart, MaskedData, MonitorTerm, MonitorType, Numerator, + QBins, + QxBins, + QyBins, RunType, ScatteringRunType, TofMonitor, @@ -165,14 +167,14 @@ def monitor_to_wavelength( def detector_to_wavelength( detector: MaskedData[ScatteringRunType], graph: ElasticCoordTransformGraph, -) -> CleanWavelength[ScatteringRunType, Numerator]: - return CleanWavelength[ScatteringRunType, Numerator]( +) -> CorrectedDetector[ScatteringRunType, Numerator]: + return CorrectedDetector[ScatteringRunType, Numerator]( detector.transform_coords('wavelength', graph=graph, keep_inputs=False) ) def mask_wavelength_q( - da: CleanSummedQ[ScatteringRunType, Numerator], mask: WavelengthMask + da: CorrectedQ[ScatteringRunType, Numerator], mask: WavelengthMask ) -> WavelengthScaledQ[ScatteringRunType, Numerator]: if mask is not None: da = mask_range(da, mask=mask) @@ -180,7 +182,7 @@ def mask_wavelength_q( def mask_wavelength_qxy( - da: CleanSummedQxy[ScatteringRunType, Numerator], mask: WavelengthMask + da: CorrectedQxy[ScatteringRunType, Numerator], mask: WavelengthMask ) -> WavelengthScaledQxy[ScatteringRunType, Numerator]: if mask is not None: da = mask_range(da, mask=mask) @@ -188,7 +190,7 @@ def mask_wavelength_qxy( def mask_and_scale_wavelength_q( - da: CleanSummedQ[ScatteringRunType, Denominator], + da: CorrectedQ[ScatteringRunType, Denominator], mask: WavelengthMask, wavelength_term: MonitorTerm[ScatteringRunType], uncertainties: UncertaintyBroadcastMode, @@ -200,7 +202,7 @@ def mask_and_scale_wavelength_q( def mask_and_scale_wavelength_qxy( - da: CleanSummedQxy[ScatteringRunType, Denominator], + da: CorrectedQxy[ScatteringRunType, Denominator], mask: WavelengthMask, wavelength_term: MonitorTerm[ScatteringRunType], uncertainties: UncertaintyBroadcastMode, @@ -212,40 +214,107 @@ def mask_and_scale_wavelength_qxy( def _compute_Q( - data: sc.DataArray, graph: ElasticCoordTransformGraph, target: tuple[str, ...] + data: sc.DataArray, + graph: ElasticCoordTransformGraph, + target: tuple[str, ...], + edges: dict[str, sc.Variable], + dims_to_keep: tuple[str, ...], ) -> sc.DataArray: # Keep naming of wavelength dim, subsequent steps use a (Q[xy], wavelength) binning. - return CleanQ[ScatteringRunType, IofQPart]( - data.transform_coords( - target, - graph=graph, - keep_intermediate=False, - rename_dims=False, - ) + data_q = data.transform_coords( + target, + graph=graph, + keep_intermediate=False, + rename_dims=False, + ) + dims_to_reduce = set(data_q.dims) - {'wavelength'} - set(dims_to_keep or ()) + return (data_q.hist if data_q.bins is None else data_q.bin)( + **edges, dim=dims_to_reduce ) def compute_Q( - data: CleanWavelength[ScatteringRunType, IofQPart], + data: CorrectedDetector[ScatteringRunType, IntensityQPart], + q_bins: QBins, + dims_to_keep: DimsToKeep, graph: ElasticCoordTransformGraph, -) -> CleanQ[ScatteringRunType, IofQPart]: +) -> CorrectedQ[ScatteringRunType, IntensityQPart]: """ Convert a data array from wavelength to Q. + We then combine data from all pixels into a single I(Q) spectrum: + + * In the case of event data, events in all bins are concatenated + * In the case of dense data, counts in all spectra are summed + + Parameters + ---------- + data: + A DataArray containing the data that is to be converted to Q. + q_bins: + The binning in Q to be used. + dims_to_keep: + Dimensions that should not be reduced and thus still be present in the final + I(Q) result (this is typically the layer dimension). + graph: + The coordinate transformation graph to use. + + Returns + ------- + : + The input data converted to Q and then summed over all detector pixels. """ - return CleanQ[ScatteringRunType, IofQPart]( - _compute_Q(data=data, graph=graph, target=('Q',)) + return CorrectedQ[ScatteringRunType, IntensityQPart]( + _compute_Q( + data=data, + graph=graph, + target=('Q',), + edges={'Q': q_bins}, + dims_to_keep=dims_to_keep, + ) ) def compute_Qxy( - data: CleanWavelength[ScatteringRunType, IofQPart], + data: CorrectedDetector[ScatteringRunType, IntensityQPart], + qx_bins: QxBins, + qy_bins: QyBins, + dims_to_keep: DimsToKeep, graph: ElasticCoordTransformGraph, -) -> CleanQxy[ScatteringRunType, IofQPart]: +) -> CorrectedQxy[ScatteringRunType, IntensityQPart]: """ Convert a data array from wavelength to Qx and Qy. + We then combine data from all pixels into a single I(Qx, Qy) spectrum: + + * In the case of event data, events in all bins are concatenated + * In the case of dense data, counts in all spectra are summed + + Parameters + ---------- + data: + A DataArray containing the data that is to be converted to Q. + qx_bins: + The binning in Qx to be used. + qy_bins: + The binning in Qy to be used. + dims_to_keep: + Dimensions that should not be reduced and thus still be present in the final + I(Qx, Qy) result (this is typically the layer dimension). + graph: + The coordinate transformation graph to use. + + Returns + ------- + : + The input data converted to Qx and Qy and then summed over all detector pixels. """ - return CleanQxy[ScatteringRunType, IofQPart]( - _compute_Q(data=data, graph=graph, target=('Qx', 'Qy')) + return CorrectedQxy[ScatteringRunType, IntensityQPart]( + _compute_Q( + data=data, + graph=graph, + target=('Qx', 'Qy'), + edges={'Qy': qy_bins, 'Qx': qx_bins}, + dims_to_keep=dims_to_keep, + ) ) diff --git a/src/ess/sans/direct_beam.py b/src/ess/sans/direct_beam.py index db2bd86a..0a7b6100 100644 --- a/src/ess/sans/direct_beam.py +++ b/src/ess/sans/direct_beam.py @@ -7,7 +7,7 @@ from .i_of_q import resample_direct_beam from .types import ( BackgroundRun, - BackgroundSubtractedIofQ, + BackgroundSubtractedIntensityQ, Denominator, DirectBeam, Numerator, @@ -130,9 +130,9 @@ def direct_beam(*, workflow: Pipeline, I0: sc.Variable, niter: int = 5) -> list[ # The first time we compute I(Q), the direct beam function is not in the # parameters, nor given by any providers, so it will be considered flat. workflow[WavelengthBands] = full_wavelength_range - iofq_full = workflow.compute(BackgroundSubtractedIofQ) + iofq_full = workflow.compute(BackgroundSubtractedIntensityQ) workflow[WavelengthBands] = bands - iofq_bands = workflow.compute(BackgroundSubtractedIofQ) + iofq_bands = workflow.compute(BackgroundSubtractedIntensityQ) if direct_beam_function is None: # Make a flat direct beam diff --git a/src/ess/sans/i_of_q.py b/src/ess/sans/i_of_q.py index 8348b6fc..8ccc3e55 100644 --- a/src/ess/sans/i_of_q.py +++ b/src/ess/sans/i_of_q.py @@ -9,28 +9,18 @@ from .logging import get_logger from .types import ( BackgroundRun, - BackgroundSubtractedIofQ, - BackgroundSubtractedIofQxy, + BackgroundSubtractedIntensityQ, + BackgroundSubtractedIntensityQxy, CleanDirectBeam, - CleanMonitor, - CleanQ, - CleanQxy, - CleanSummedQ, - CleanSummedQxy, - DimsToKeep, + CorrectedMonitor, DirectBeam, - IofQ, - IofQPart, - IofQxy, + IntensityQ, + IntensityQxy, MonitorType, NonBackgroundWavelengthRange, - QBins, - QxBins, - QyBins, ReturnEvents, RunType, SampleRun, - ScatteringRunType, WavelengthBins, WavelengthMonitor, ) @@ -41,7 +31,7 @@ def preprocess_monitor_data( wavelength_bins: WavelengthBins, non_background_range: NonBackgroundWavelengthRange, uncertainties: UncertaintyBroadcastMode, -) -> CleanMonitor[RunType, MonitorType]: +) -> CorrectedMonitor[RunType, MonitorType]: """ Prepare monitor data for computing the transmission fraction. The input data are first converted to wavelength (if needed). @@ -88,7 +78,7 @@ def preprocess_monitor_data( monitor -= broadcast_uncertainties( background, prototype=monitor, mode=uncertainties ) - return CleanMonitor(monitor) + return CorrectedMonitor(monitor) def resample_direct_beam( @@ -135,81 +125,6 @@ def resample_direct_beam( return CleanDirectBeam(func(wavelength_bins, midpoints=True)) -def bin_in_q( - data: CleanQ[ScatteringRunType, IofQPart], - q_bins: QBins, - dims_to_keep: DimsToKeep, -) -> CleanSummedQ[ScatteringRunType, IofQPart]: - """ - Merges data from all pixels into a single I(Q) spectrum: - - * In the case of event data, events in all bins are concatenated - * In the case of dense data, counts in all spectra are summed - - Parameters - ---------- - data: - A DataArray containing the data that is to be converted to Q. - q_bins: - The binning in Q to be used. - dims_to_keep: - Dimensions that should not be reduced and thus still be present in the final - I(Q) result (this is typically the layer dimension). - - Returns - ------- - : - The input data converted to Q and then summed over all detector pixels. - """ - out = _bin_in_q(data=data, edges={'Q': q_bins}, dims_to_keep=dims_to_keep) - return CleanSummedQ[ScatteringRunType, IofQPart](out) - - -def bin_in_qxy( - data: CleanQxy[ScatteringRunType, IofQPart], - qx_bins: QxBins, - qy_bins: QyBins, - dims_to_keep: DimsToKeep, -) -> CleanSummedQxy[ScatteringRunType, IofQPart]: - """ - Merges data from all pixels into a single I(Q) spectrum: - - * In the case of event data, events in all bins are concatenated - * In the case of dense data, counts in all spectra are summed - - Parameters - ---------- - data: - A DataArray containing the data that is to be converted to Q. - qx_bins: - The binning in Qx to be used. - qy_bins: - The binning in Qy to be used. - dims_to_keep: - Dimensions that should not be reduced and thus still be present in the final - I(Q) result (this is typically the layer dimension). - - Returns - ------- - : - The input data converted to Qx and Qy and then summed over all detector pixels. - """ - # We make Qx the inner dim, such that plots naturally show Qx on the x-axis. - out = _bin_in_q( - data=data, - edges={'Qy': qy_bins, 'Qx': qx_bins}, - dims_to_keep=dims_to_keep, - ) - return CleanSummedQxy[ScatteringRunType, IofQPart](out) - - -def _bin_in_q( - data: sc.DataArray, edges: dict[str, sc.Variable], dims_to_keep: tuple[str, ...] -) -> sc.DataArray: - dims_to_reduce = set(data.dims) - {'wavelength'} - set(dims_to_keep or ()) - return (data.hist if data.bins is None else data.bin)(**edges, dim=dims_to_reduce) - - def _subtract_background( sample: sc.DataArray, background: sc.DataArray, @@ -225,11 +140,11 @@ def _subtract_background( def subtract_background( - sample: IofQ[SampleRun], - background: IofQ[BackgroundRun], + sample: IntensityQ[SampleRun], + background: IntensityQ[BackgroundRun], return_events: ReturnEvents, -) -> BackgroundSubtractedIofQ: - return BackgroundSubtractedIofQ( +) -> BackgroundSubtractedIntensityQ: + return BackgroundSubtractedIntensityQ( _subtract_background( sample=sample, background=background, return_events=return_events ) @@ -237,11 +152,11 @@ def subtract_background( def subtract_background_xy( - sample: IofQxy[SampleRun], - background: IofQxy[BackgroundRun], + sample: IntensityQxy[SampleRun], + background: IntensityQxy[BackgroundRun], return_events: ReturnEvents, -) -> BackgroundSubtractedIofQxy: - return BackgroundSubtractedIofQxy( +) -> BackgroundSubtractedIntensityQxy: + return BackgroundSubtractedIntensityQxy( _subtract_background( sample=sample, background=background, return_events=return_events ) @@ -251,8 +166,6 @@ def subtract_background_xy( providers = ( preprocess_monitor_data, resample_direct_beam, - bin_in_q, - bin_in_qxy, subtract_background, subtract_background_xy, ) diff --git a/src/ess/sans/io.py b/src/ess/sans/io.py index 472fc625..6c48ddee 100644 --- a/src/ess/sans/io.py +++ b/src/ess/sans/io.py @@ -6,7 +6,7 @@ from scippnexus.application_definitions import nxcansas from .types import ( - BackgroundSubtractedIofQ, + BackgroundSubtractedIntensityQ, MaskedDetectorIDs, Measurement, OutFilename, @@ -16,7 +16,7 @@ def save_background_subtracted_iofq( *, - iofq: BackgroundSubtractedIofQ, + iofq: BackgroundSubtractedIntensityQ, out_filename: OutFilename, measurement: Measurement, ) -> None: diff --git a/src/ess/sans/masking.py b/src/ess/sans/masking.py index fe1ad33b..228e2209 100644 --- a/src/ess/sans/masking.py +++ b/src/ess/sans/masking.py @@ -8,19 +8,19 @@ import scipp as sc from .types import ( - CalibratedDetector, DetectorIDs, DetectorMasks, + EmptyDetector, MaskedData, MaskedDetectorIDs, PixelMaskFilename, SampleRun, ScatteringRunType, - TofData, + TofDetector, ) -def get_detector_ids_from_detector(data: CalibratedDetector[SampleRun]) -> DetectorIDs: +def get_detector_ids_from_detector(data: EmptyDetector[SampleRun]) -> DetectorIDs: """Extract detector IDs from a detector.""" return DetectorIDs( data.coords[ @@ -52,7 +52,7 @@ def to_detector_mask( def apply_pixel_masks( - data: TofData[ScatteringRunType], + data: TofDetector[ScatteringRunType], masks: DetectorMasks, ) -> MaskedData[ScatteringRunType]: """Apply pixel-specific masks to raw data. diff --git a/src/ess/sans/normalization.py b/src/ess/sans/normalization.py index 4b859ac1..d93411ff 100644 --- a/src/ess/sans/normalization.py +++ b/src/ess/sans/normalization.py @@ -4,24 +4,26 @@ import scippnexus as snx from scipp.core import concepts -from ess.reduce.nexus.types import CalibratedBeamline, NeXusTransformation +from ess.reduce.nexus.types import NeXusTransformation from ess.reduce.uncertainty import UncertaintyBroadcastMode, broadcast_uncertainties from .types import ( CleanDirectBeam, - CleanMonitor, - CleanWavelength, + CorrectedDetector, + CorrectedMonitor, Denominator, DetectorMasks, DetectorPixelShape, EmptyBeamRun, + EmptyDetector, Incident, - IofQ, - IofQPart, - IofQxy, + IntensityQ, + IntensityQPart, + IntensityQxy, MaskedSolidAngle, MonitorTerm, Numerator, + Position, ProcessedWavelengthBands, ReducedQ, ReducedQxy, @@ -39,9 +41,10 @@ def solid_angle( - data: CalibratedBeamline[ScatteringRunType], + data: EmptyDetector[ScatteringRunType], pixel_shape: DetectorPixelShape[ScatteringRunType], transform: NeXusTransformation[snx.NXdetector, ScatteringRunType], + sample_position: Position[snx.NXsample, ScatteringRunType], ) -> SolidAngle[ScatteringRunType]: """ Solid angle for cylindrical pixels. @@ -75,7 +78,7 @@ def solid_angle( length = sc.norm(cylinder_axis) omega = _approximate_solid_angle_for_cylinder_shaped_pixel_of_detector( - pixel_position=data.coords['position'] - data.coords['sample_position'], + pixel_position=data.coords['position'] - sample_position, cylinder_axis=cylinder_axis, radius=radius, length=length, @@ -134,12 +137,14 @@ def _approximate_solid_angle_for_cylinder_shaped_pixel_of_detector( def transmission_fraction( - sample_incident_monitor: CleanMonitor[TransmissionRun[ScatteringRunType], Incident], - sample_transmission_monitor: CleanMonitor[ + sample_incident_monitor: CorrectedMonitor[ + TransmissionRun[ScatteringRunType], Incident + ], + sample_transmission_monitor: CorrectedMonitor[ TransmissionRun[ScatteringRunType], Transmission ], - direct_incident_monitor: CleanMonitor[EmptyBeamRun, Incident], - direct_transmission_monitor: CleanMonitor[EmptyBeamRun, Transmission], + direct_incident_monitor: CorrectedMonitor[EmptyBeamRun, Incident], + direct_transmission_monitor: CorrectedMonitor[EmptyBeamRun, Transmission], ) -> TransmissionFraction[ScatteringRunType]: """ Approximation based on equations in @@ -173,7 +178,7 @@ def transmission_fraction( def norm_monitor_term( - incident_monitor: CleanMonitor[ScatteringRunType, Incident], + incident_monitor: CorrectedMonitor[ScatteringRunType, Incident], transmission_fraction: TransmissionFraction[ScatteringRunType], ) -> MonitorTerm[ScatteringRunType]: """ @@ -207,7 +212,7 @@ def norm_detector_term( solid_angle: MaskedSolidAngle[ScatteringRunType], direct_beam: CleanDirectBeam, uncertainties: UncertaintyBroadcastMode, -) -> CleanWavelength[ScatteringRunType, Denominator]: +) -> CorrectedDetector[ScatteringRunType, Denominator]: """ Compute the detector-dependent contribution to the denominator term of I(Q). @@ -248,7 +253,7 @@ def norm_detector_term( ) # Convert wavelength coordinate to midpoints for future histogramming out.coords['wavelength'] = sc.midpoints(out.coords['wavelength']) - return CleanWavelength[ScatteringRunType, Denominator](out) + return CorrectedDetector[ScatteringRunType, Denominator](out) def process_wavelength_bands( @@ -406,17 +411,17 @@ def _reduce(part: sc.DataArray, /, *, bands: ProcessedWavelengthBands) -> sc.Dat def reduce_q( - data: WavelengthScaledQ[ScatteringRunType, IofQPart], + data: WavelengthScaledQ[ScatteringRunType, IntensityQPart], bands: ProcessedWavelengthBands, -) -> ReducedQ[ScatteringRunType, IofQPart]: - return ReducedQ[ScatteringRunType, IofQPart](_reduce(data, bands=bands)) +) -> ReducedQ[ScatteringRunType, IntensityQPart]: + return ReducedQ[ScatteringRunType, IntensityQPart](_reduce(data, bands=bands)) def reduce_qxy( - data: WavelengthScaledQxy[ScatteringRunType, IofQPart], + data: WavelengthScaledQxy[ScatteringRunType, IntensityQPart], bands: ProcessedWavelengthBands, -) -> ReducedQxy[ScatteringRunType, IofQPart]: - return ReducedQxy[ScatteringRunType, IofQPart](_reduce(data, bands=bands)) +) -> ReducedQxy[ScatteringRunType, IntensityQPart]: + return ReducedQxy[ScatteringRunType, IntensityQPart](_reduce(data, bands=bands)) def normalize_q( @@ -424,8 +429,8 @@ def normalize_q( denominator: ReducedQ[ScatteringRunType, Denominator], return_events: ReturnEvents, uncertainties: UncertaintyBroadcastMode, -) -> IofQ[ScatteringRunType]: - return IofQ[ScatteringRunType]( +) -> IntensityQ[ScatteringRunType]: + return IntensityQ[ScatteringRunType]( _normalize( numerator=numerator, denominator=denominator, @@ -440,8 +445,8 @@ def normalize_qxy( denominator: ReducedQxy[ScatteringRunType, Denominator], return_events: ReturnEvents, uncertainties: UncertaintyBroadcastMode, -) -> IofQxy[ScatteringRunType]: - return IofQxy[ScatteringRunType]( +) -> IntensityQxy[ScatteringRunType]: + return IntensityQxy[ScatteringRunType]( _normalize( numerator=numerator, denominator=denominator, diff --git a/src/ess/sans/parameters.py b/src/ess/sans/parameters.py index 350791f3..d940dbcf 100644 --- a/src/ess/sans/parameters.py +++ b/src/ess/sans/parameters.py @@ -21,8 +21,8 @@ from ..sans.types import ( BackgroundRun, - BackgroundSubtractedIofQ, - BackgroundSubtractedIofQxy, + BackgroundSubtractedIntensityQ, + BackgroundSubtractedIntensityQxy, BeamCenter, CorrectForGravity, DirectBeam, @@ -30,8 +30,8 @@ EmptyBeamRun, Filename, Incident, - IofQ, - IofQxy, + IntensityQ, + IntensityQxy, MaskedData, NeXusDetectorName, NeXusMonitorName, @@ -116,12 +116,12 @@ ) typical_outputs = ( - BackgroundSubtractedIofQ, - BackgroundSubtractedIofQxy, - IofQ[SampleRun], - IofQxy[SampleRun], - IofQ[BackgroundRun], - IofQxy[BackgroundRun], + BackgroundSubtractedIntensityQ, + BackgroundSubtractedIntensityQxy, + IntensityQ[SampleRun], + IntensityQxy[SampleRun], + IntensityQ[BackgroundRun], + IntensityQxy[BackgroundRun], MaskedData[BackgroundRun], MaskedData[SampleRun], WavelengthMonitor[SampleRun, Incident], diff --git a/src/ess/sans/types.py b/src/ess/sans/types.py index a031beca..8500b9ac 100644 --- a/src/ess/sans/types.py +++ b/src/ess/sans/types.py @@ -16,14 +16,14 @@ from ess.reduce.uncertainty import UncertaintyBroadcastMode as _UncertaintyBroadcastMode BackgroundRun = reduce_t.BackgroundRun -CalibratedDetector = reduce_t.CalibratedDetector -CalibratedMonitor = reduce_t.CalibratedMonitor -DetectorData = reduce_t.DetectorData +EmptyDetector = reduce_t.EmptyDetector +EmptyMonitor = reduce_t.EmptyMonitor +RawDetector = reduce_t.RawDetector DetectorPositionOffset = reduce_t.DetectorPositionOffset EmptyBeamRun = reduce_t.EmptyBeamRun Filename = reduce_t.Filename Incident = reduce_t.IncidentMonitor -MonitorData = reduce_t.MonitorData +RawMonitor = reduce_t.RawMonitor MonitorPositionOffset = reduce_t.MonitorPositionOffset NeXusMonitorName = reduce_t.NeXusName NeXusComponent = reduce_t.NeXusComponent @@ -48,13 +48,13 @@ UncertaintyBroadcastMode = _UncertaintyBroadcastMode -# 1.3 Numerator and denominator of IofQ +# 1.3 Numerator and denominator of IntensityQ Numerator = NewType('Numerator', sc.DataArray) -"""Numerator of IofQ""" +"""Numerator of IntensityQ""" Denominator = NewType('Denominator', sc.DataArray) -"""Denominator of IofQ""" -IofQPart = TypeVar('IofQPart', Numerator, Denominator) -"""TypeVar used for specifying Numerator or Denominator of IofQ""" +"""Denominator of IntensityQ""" +IntensityQPart = TypeVar('IntensityQPart', Numerator, Denominator) +"""TypeVar used for specifying Numerator or Denominator of IntensityQ""" # 1.4 Entry paths in NeXus files PixelShapePath = NewType('PixelShapePath', str) @@ -100,13 +100,13 @@ QBins = NewType('QBins', sc.Variable) -"""Q binning used when computing IofQ""" +"""Q binning used when computing IntensityQ""" QxBins = NewType('QxBins', sc.Variable) -"""Qx binning used when computing IofQxy""" +"""Qx binning used when computing IntensityQxy""" QyBins = NewType('QyBins', sc.Variable) -"""Qy binning used when computing IofQxy""" +"""Qy binning used when computing IntensityQxy""" NonBackgroundWavelengthRange = NewType( 'NonBackgroundWavelengthRange', sc.Variable | None @@ -175,7 +175,7 @@ class MaskedSolidAngle(sciline.Scope[ScatteringRunType, sc.DataArray], sc.DataAr """Same as :py:class:`SolidAngle`, but with pixel masks applied""" -class TofData(sciline.Scope[ScatteringRunType, sc.DataArray], sc.DataArray): +class TofDetector(sciline.Scope[ScatteringRunType, sc.DataArray], sc.DataArray): """Data with a time-of-flight coordinate""" @@ -191,14 +191,14 @@ class MaskedData(sciline.Scope[ScatteringRunType, sc.DataArray], sc.DataArray): class MonitorTerm(sciline.Scope[ScatteringRunType, sc.DataArray], sc.DataArray): - """Monitor-dependent factor of the Normalization term (numerator) for IofQ.""" + """Monitor-dependent factor of the Normalization term (numerator) for IntensityQ.""" -class CleanWavelength( - sciline.Scope[ScatteringRunType, IofQPart, sc.DataArray], sc.DataArray +class CorrectedDetector( + sciline.Scope[ScatteringRunType, IntensityQPart, sc.DataArray], sc.DataArray ): """ - Prerequisite for IofQ numerator or denominator. + Prerequisite for IntensityQ numerator or denominator. This can either be the sample or background counts, converted to wavelength, or the respective normalization terms computed from the respective solid angle, @@ -207,60 +207,56 @@ class CleanWavelength( class WavelengthScaledQ( - sciline.Scope[ScatteringRunType, IofQPart, sc.DataArray], sc.DataArray + sciline.Scope[ScatteringRunType, IntensityQPart, sc.DataArray], sc.DataArray ): - """Result of applying wavelength scaling/masking to :py:class:`CleanSummedQ`""" + """Result of applying wavelength scaling/masking to :py:class:`CorrectedQ`""" class WavelengthScaledQxy( - sciline.Scope[ScatteringRunType, IofQPart, sc.DataArray], sc.DataArray + sciline.Scope[ScatteringRunType, IntensityQPart, sc.DataArray], sc.DataArray ): - """Result of applying wavelength scaling/masking to :py:class:`CleanSummedQxy`""" + """Result of applying wavelength scaling/masking to :py:class:`CorrectedQxy`""" -class CleanQ(sciline.Scope[ScatteringRunType, IofQPart, sc.DataArray], sc.DataArray): - """Result of converting :py:class:`CleanWavelengthMasked` to Q""" - - -class CleanQxy(sciline.Scope[ScatteringRunType, IofQPart, sc.DataArray], sc.DataArray): - """Result of converting :py:class:`CleanWavelengthMasked` to Qx and Qy""" - - -class CleanSummedQ( - sciline.Scope[ScatteringRunType, IofQPart, sc.DataArray], sc.DataArray +class CorrectedQ( + sciline.Scope[ScatteringRunType, IntensityQPart, sc.DataArray], sc.DataArray ): - """Result of histogramming/binning :py:class:`CleanQ` over all pixels into Q bins""" + """Result of computing Q coordinate and histogramming/binning into Q bins""" -class CleanSummedQxy( - sciline.Scope[ScatteringRunType, IofQPart, sc.DataArray], sc.DataArray +class CorrectedQxy( + sciline.Scope[ScatteringRunType, IntensityQPart, sc.DataArray], sc.DataArray ): - """Result of histogramming/binning :py:class:`CleanQxy` over all pixels into Qx and - Qy bins""" + """Result of computing Qx and Qy coordinates and histogramming/binning into + Qx and Qy bins""" -class ReducedQ(sciline.Scope[ScatteringRunType, IofQPart, sc.DataArray], sc.DataArray): - """Result of reducing :py:class:`CleanSummedQ` over the wavelength dimensions""" +class ReducedQ( + sciline.Scope[ScatteringRunType, IntensityQPart, sc.DataArray], sc.DataArray +): + """Result of reducing :py:class:`CorrectedQ` over the wavelength dimensions""" class ReducedQxy( - sciline.Scope[ScatteringRunType, IofQPart, sc.DataArray], sc.DataArray + sciline.Scope[ScatteringRunType, IntensityQPart, sc.DataArray], sc.DataArray ): - """Result of reducing :py:class:`CleanSummedQxy` over the wavelength dimensions""" + """Result of reducing :py:class:`CorrectedQxy` over the wavelength dimensions""" -class IofQ(sciline.Scope[ScatteringRunType, sc.DataArray], sc.DataArray): +class IntensityQ(sciline.Scope[ScatteringRunType, sc.DataArray], sc.DataArray): """I(Q)""" -class IofQxy(sciline.Scope[ScatteringRunType, sc.DataArray], sc.DataArray): +class IntensityQxy(sciline.Scope[ScatteringRunType, sc.DataArray], sc.DataArray): """I(Qx, Qy)""" -BackgroundSubtractedIofQ = NewType('BackgroundSubtractedIofQ', sc.DataArray) +BackgroundSubtractedIntensityQ = NewType('BackgroundSubtractedIntensityQ', sc.DataArray) """I(Q) with background (given by I(Q) of the background run) subtracted""" -BackgroundSubtractedIofQxy = NewType('BackgroundSubtractedIofQxy', sc.DataArray) +BackgroundSubtractedIntensityQxy = NewType( + 'BackgroundSubtractedIntensityQxy', sc.DataArray +) """I(Qx, Qy) with background (given by I(Qx, Qy) of the background run) subtracted""" @@ -270,7 +266,7 @@ class WavelengthMonitor( """Monitor data converted to wavelength""" -class CleanMonitor(sciline.Scope[RunType, MonitorType, sc.DataArray], sc.DataArray): +class CorrectedMonitor(sciline.Scope[RunType, MonitorType, sc.DataArray], sc.DataArray): """Monitor data cleaned of background counts""" diff --git a/tests/io_test.py b/tests/io_test.py index 9b66a976..7b3cf6ca 100644 --- a/tests/io_test.py +++ b/tests/io_test.py @@ -9,15 +9,15 @@ from scippnexus.application_definitions import nxcansas from ess.sans.io import save_background_subtracted_iofq -from ess.sans.types import BackgroundSubtractedIofQ, Measurement, OutFilename +from ess.sans.types import BackgroundSubtractedIntensityQ, Measurement, OutFilename @pytest.mark.parametrize('use_edges', [True, False]) def test_save_background_subtracted_iofq(use_edges, tmp_path): - def background_subtracted_iofq() -> BackgroundSubtractedIofQ: + def background_subtracted_iofq() -> BackgroundSubtractedIntensityQ: i = sc.arange('Q', 0.0, 400.0) i.variances = i.values / 10 - return BackgroundSubtractedIofQ( + return BackgroundSubtractedIntensityQ( sc.DataArray( i, coords={ diff --git a/tests/isissans/sans2d_reduction_test.py b/tests/isissans/sans2d_reduction_test.py index ee45fd06..9a625cb3 100644 --- a/tests/isissans/sans2d_reduction_test.py +++ b/tests/isissans/sans2d_reduction_test.py @@ -12,22 +12,22 @@ from ess.isissans import MonitorOffset, SampleOffset, sans2d from ess.sans.types import ( BackgroundRun, - BackgroundSubtractedIofQ, + BackgroundSubtractedIntensityQ, BeamCenter, - CalibratedDetector, CorrectForGravity, - DetectorData, DimsToKeep, DirectBeam, DirectBeamFilename, EmptyBeamRun, + EmptyDetector, Filename, Incident, - IofQ, + IntensityQ, MaskedData, NeXusMonitorName, NonBackgroundWavelengthRange, QBins, + RawDetector, ReturnEvents, SampleRun, SolidAngle, @@ -103,36 +103,38 @@ def pipeline(): def test_can_create_pipeline(pipeline): - pipeline.get(IofQ[SampleRun]) + pipeline.get(IntensityQ[SampleRun]) @pytest.mark.parametrize( 'uncertainties', [UncertaintyBroadcastMode.drop, UncertaintyBroadcastMode.upper_bound], ) -def test_pipeline_can_compute_background_subtracted_IofQ(pipeline, uncertainties): +def test_pipeline_can_compute_background_subtracted_IntensityQ(pipeline, uncertainties): pipeline[UncertaintyBroadcastMode] = uncertainties - result = pipeline.compute(BackgroundSubtractedIofQ) + result = pipeline.compute(BackgroundSubtractedIntensityQ) assert result.dims == ('Q',) -def test_pipeline_can_compute_background_subtracted_IofQ_in_wavelength_bands(pipeline): +def test_pipeline_can_compute_background_subtracted_IntensityQ_in_wavelength_bands( + pipeline, +): pipeline[WavelengthBands] = sc.linspace( 'wavelength', start=2.0, stop=16.0, num=11, unit='angstrom' ) - result = pipeline.compute(BackgroundSubtractedIofQ) + result = pipeline.compute(BackgroundSubtractedIntensityQ) assert result.dims == ('band', 'Q') assert result.sizes['band'] == 10 def test_pipeline_wavelength_bands_is_optional(pipeline): pipeline[BeamCenter] = sans.beam_center_from_center_of_mass(pipeline) - noband = pipeline.compute(BackgroundSubtractedIofQ) + noband = pipeline.compute(BackgroundSubtractedIntensityQ) assert pipeline.compute(WavelengthBands) is None band = sc.linspace('wavelength', 2.0, 16.0, num=2, unit='angstrom') pipeline[WavelengthBands] = band assert sc.identical(band, pipeline.compute(WavelengthBands)) - withband = pipeline.compute(BackgroundSubtractedIofQ) + withband = pipeline.compute(BackgroundSubtractedIntensityQ) assert sc.identical(noband, withband) @@ -141,7 +143,7 @@ def test_workflow_is_deterministic(pipeline): pipeline[BeamCenter] = sans.beam_center_from_center_of_mass(pipeline) # This is Sciline's default scheduler, but we want to be explicit here scheduler = sciline.scheduler.DaskScheduler() - graph = pipeline.get(IofQ[SampleRun], scheduler=scheduler) + graph = pipeline.get(IntensityQ[SampleRun], scheduler=scheduler) reference = graph.compute().data result = graph.compute().data assert sc.identical(sc.values(result), sc.values(reference)) @@ -153,7 +155,7 @@ def test_pipeline_raises_VariancesError_if_normalization_errors_not_dropped(pipe ) pipeline[UncertaintyBroadcastMode] = UncertaintyBroadcastMode.fail with pytest.raises(sc.VariancesError): - pipeline.compute(BackgroundSubtractedIofQ) + pipeline.compute(BackgroundSubtractedIntensityQ) def test_uncertainty_broadcast_mode_drop_yields_smaller_variances(pipeline): @@ -162,14 +164,14 @@ def test_uncertainty_broadcast_mode_drop_yields_smaller_variances(pipeline): dim='Q', start=0.01, stop=0.5, num=141, unit='1/angstrom' ) pipeline[UncertaintyBroadcastMode] = UncertaintyBroadcastMode.drop - drop = pipeline.compute(IofQ[SampleRun]).data + drop = pipeline.compute(IntensityQ[SampleRun]).data pipeline[UncertaintyBroadcastMode] = UncertaintyBroadcastMode.upper_bound - upper_bound = pipeline.compute(IofQ[SampleRun]).data + upper_bound = pipeline.compute(IntensityQ[SampleRun]).data assert sc.all(sc.variances(drop) < sc.variances(upper_bound)).value -def test_pipeline_can_visualize_background_subtracted_IofQ(pipeline): - pipeline.visualize(BackgroundSubtractedIofQ) +def test_pipeline_can_visualize_background_subtracted_IntensityQ(pipeline): + pipeline.visualize(BackgroundSubtractedIntensityQ) def test_pipeline_can_compute_intermediate_results(pipeline): @@ -178,7 +180,7 @@ def test_pipeline_can_compute_intermediate_results(pipeline): def pixel_dependent_direct_beam( - filename: DirectBeamFilename, shape: CalibratedDetector[SampleRun] + filename: DirectBeamFilename, shape: EmptyDetector[SampleRun] ) -> DirectBeam: direct_beam = isis.io.load_tutorial_direct_beam(filename) sizes = {'spectrum': shape.sizes['spectrum'], **direct_beam.sizes} @@ -193,7 +195,7 @@ def test_pixel_dependent_direct_beam_is_supported(pipeline, uncertainties): pipeline[UncertaintyBroadcastMode] = uncertainties pipeline.insert(pixel_dependent_direct_beam) pipeline[BeamCenter] = sc.vector([0, 0, 0], unit='m') - result = pipeline.compute(BackgroundSubtractedIofQ) + result = pipeline.compute(BackgroundSubtractedIntensityQ) assert result.dims == ('Q',) @@ -202,7 +204,7 @@ def test_pixel_dependent_direct_beam_is_supported(pipeline, uncertainties): def test_beam_center_from_center_of_mass_is_close_to_verified_result(pipeline): center = sans.beam_center_from_center_of_mass(pipeline) - # This is the result obtained from Mantid, using the full IofQ + # This is the result obtained from Mantid, using the full IntensityQ # calculation. The difference is about 3 mm in X or Y, probably due to a bias # introduced by the sample holder, which the center-of-mass approach cannot ignore. assert sc.allclose(center, MANTID_BEAM_CENTER, atol=sc.scalar(3e-3, unit='m')) @@ -224,7 +226,7 @@ def test_beam_center_finder_without_direct_beam_reproduces_verified_result(pipel def test_beam_center_can_get_closer_to_verified_result_with_low_counts_mask(pipeline): def low_counts_mask( - sample: DetectorData[SampleRun], + sample: RawDetector[SampleRun], low_counts_threshold: sans2d.LowCountThreshold, ) -> sans2d.SampleHolderMask: return sans2d.SampleHolderMask(sample.hist().data < low_counts_threshold) @@ -284,9 +286,9 @@ def test_beam_center_finder_works_with_pixel_dependent_direct_beam(pipeline): def test_workflow_runs_without_gravity_if_beam_center_is_provided(pipeline): pipeline[CorrectForGravity] = False - da = pipeline.compute(DetectorData[SampleRun]) + da = pipeline.compute(RawDetector[SampleRun]) del da.coords['gravity'] - pipeline[DetectorData[SampleRun]] = da + pipeline[RawDetector[SampleRun]] = da pipeline[BeamCenter] = MANTID_BEAM_CENTER - result = pipeline.compute(BackgroundSubtractedIofQ) + result = pipeline.compute(BackgroundSubtractedIntensityQ) assert result.dims == ('Q',) diff --git a/tests/isissans/zoom_reduction_test.py b/tests/isissans/zoom_reduction_test.py index bb5987e5..6908acbc 100644 --- a/tests/isissans/zoom_reduction_test.py +++ b/tests/isissans/zoom_reduction_test.py @@ -12,8 +12,8 @@ DetectorPositionOffset, Filename, Incident, - IofQ, - IofQxy, + IntensityQ, + IntensityQxy, NeXusMonitorName, NonBackgroundWavelengthRange, QBins, @@ -72,19 +72,19 @@ def test_can_create_pipeline(pipeline): pipeline = sans.with_pixel_mask_filenames( pipeline, isis.data.zoom_tutorial_mask_filenames() ) - pipeline.get(IofQ[SampleRun]) + pipeline.get(IntensityQ[SampleRun]) -def test_pipeline_can_compute_IofQ(pipeline): +def test_pipeline_can_compute_IntensityQ(pipeline): pipeline[BeamCenter] = sc.vector([0, 0, 0], unit='m') pipeline = sans.with_pixel_mask_filenames( pipeline, isis.data.zoom_tutorial_mask_filenames() ) - result = pipeline.compute(IofQ[SampleRun]) + result = pipeline.compute(IntensityQ[SampleRun]) assert result.dims == ('Q',) -def test_pipeline_can_compute_IofQxQy(pipeline): +def test_pipeline_can_compute_IntensityQxQy(pipeline): pipeline[BeamCenter] = sc.vector([0, 0, 0], unit='m') pipeline = sans.with_pixel_mask_filenames( pipeline, isis.data.zoom_tutorial_mask_filenames() @@ -96,5 +96,5 @@ def test_pipeline_can_compute_IofQxQy(pipeline): dim='Qy', start=-0.8, stop=0.8, num=101, unit='1/angstrom' ) - result = pipeline.compute(IofQxy[SampleRun]) + result = pipeline.compute(IntensityQxy[SampleRun]) assert result.dims == ('Qy', 'Qx') diff --git a/tests/loki/iofq_test.py b/tests/loki/iofq_test.py index 90d8549b..e2c4825b 100644 --- a/tests/loki/iofq_test.py +++ b/tests/loki/iofq_test.py @@ -14,16 +14,16 @@ from ess.sans.conversions import ElasticCoordTransformGraph from ess.sans.types import ( BackgroundRun, - BackgroundSubtractedIofQ, - BackgroundSubtractedIofQxy, + BackgroundSubtractedIntensityQ, + BackgroundSubtractedIntensityQxy, BeamCenter, - CleanWavelength, + CorrectedDetector, CorrectForGravity, Denominator, DimsToKeep, Filename, - IofQ, - IofQxy, + IntensityQ, + IntensityQxy, MaskedData, Numerator, QBins, @@ -44,7 +44,7 @@ def test_can_create_pipeline(): pipeline = make_workflow() pipeline[BeamCenter] = sc.vector([0, 0, 0], unit='m') - pipeline.get(BackgroundSubtractedIofQ) + pipeline.get(BackgroundSubtractedIntensityQ) def test_can_create_pipeline_with_pixel_masks(): @@ -53,7 +53,7 @@ def test_can_create_pipeline_with_pixel_masks(): pipeline, loki.data.loki_tutorial_mask_filenames() ) pipeline[BeamCenter] = sc.vector([0, 0, 0], unit='m') - pipeline.get(BackgroundSubtractedIofQ) + pipeline.get(BackgroundSubtractedIntensityQ) @pytest.mark.parametrize( @@ -61,7 +61,7 @@ def test_can_create_pipeline_with_pixel_masks(): [UncertaintyBroadcastMode.drop, UncertaintyBroadcastMode.upper_bound], ) @pytest.mark.parametrize('qxy', [False, True]) -def test_pipeline_can_compute_IofQ(uncertainties, qxy: bool): +def test_pipeline_can_compute_IntensityQ(uncertainties, qxy: bool): pipeline = make_workflow(no_masks=False) pipeline[UncertaintyBroadcastMode] = uncertainties pipeline = sans.with_pixel_mask_filenames( @@ -69,20 +69,20 @@ def test_pipeline_can_compute_IofQ(uncertainties, qxy: bool): ) pipeline[BeamCenter] = sans.beam_center_from_center_of_mass(pipeline) if qxy: - result = pipeline.compute(BackgroundSubtractedIofQxy) + result = pipeline.compute(BackgroundSubtractedIntensityQxy) assert result.dims == ('Qy', 'Qx') assert sc.identical(result.coords['Qx'], pipeline.compute(QxBins)) assert sc.identical(result.coords['Qy'], pipeline.compute(QyBins)) assert result.sizes['Qx'] == 90 assert result.sizes['Qy'] == 77 else: - result = pipeline.compute(BackgroundSubtractedIofQ) + result = pipeline.compute(BackgroundSubtractedIntensityQ) assert result.dims == ('Q',) assert sc.identical(result.coords['Q'], pipeline.compute(QBins)) assert result.sizes['Q'] == 100 if uncertainties == UncertaintyBroadcastMode.drop: test_dir = os.path.dirname(os.path.abspath(__file__)) - name = Path(f'reference_IofQ{"xy" if qxy else ""}_{uncertainties}.hdf5') + name = Path(f'reference_IntensityQ{"xy" if qxy else ""}_{uncertainties}.hdf5') reference = sc.io.load_hdf5(test_dir / name) assert_identical(result, reference) @@ -94,20 +94,20 @@ def test_pipeline_can_compute_IofQ(uncertainties, qxy: bool): @pytest.mark.parametrize( 'target', [ - IofQ[SampleRun], - IofQxy[SampleRun], - BackgroundSubtractedIofQ, - BackgroundSubtractedIofQxy, + IntensityQ[SampleRun], + IntensityQxy[SampleRun], + BackgroundSubtractedIntensityQ, + BackgroundSubtractedIntensityQxy, ], ) -def test_pipeline_can_compute_IofQ_in_event_mode(uncertainties, target): +def test_pipeline_can_compute_IntensityQ_in_event_mode(uncertainties, target): pipeline = make_workflow() pipeline[UncertaintyBroadcastMode] = uncertainties pipeline[BeamCenter] = sans.beam_center_from_center_of_mass(pipeline) reference = pipeline.compute(target) pipeline[ReturnEvents] = True result = pipeline.compute(target) - qxy = target in (IofQxy[SampleRun], BackgroundSubtractedIofQxy) + qxy = target in (IntensityQxy[SampleRun], BackgroundSubtractedIntensityQxy) assert result.bins is not None assert result.dims == ('Qy', 'Qx') if qxy else ('Q',) assert sc.allclose( @@ -133,7 +133,7 @@ def test_pipeline_can_compute_IofQ_in_event_mode(uncertainties, target): @pytest.mark.parametrize('qxy', [False, True]) -def test_pipeline_can_compute_IofQ_in_wavelength_bands(qxy: bool): +def test_pipeline_can_compute_IntensityQ_in_wavelength_bands(qxy: bool): pipeline = make_workflow() pipeline[WavelengthBands] = sc.linspace( 'wavelength', @@ -143,14 +143,14 @@ def test_pipeline_can_compute_IofQ_in_wavelength_bands(qxy: bool): ) pipeline[BeamCenter] = _compute_beam_center() result = pipeline.compute( - BackgroundSubtractedIofQxy if qxy else BackgroundSubtractedIofQ + BackgroundSubtractedIntensityQxy if qxy else BackgroundSubtractedIntensityQ ) assert result.dims == ('band', 'Qy', 'Qx') if qxy else ('band', 'Q') assert result.sizes['band'] == 10 @pytest.mark.parametrize('qxy', [False, True]) -def test_pipeline_can_compute_IofQ_in_overlapping_wavelength_bands(qxy: bool): +def test_pipeline_can_compute_IntensityQ_in_overlapping_wavelength_bands(qxy: bool): pipeline = make_workflow() # Bands have double the width edges = pipeline.compute(WavelengthBins) @@ -160,19 +160,19 @@ def test_pipeline_can_compute_IofQ_in_overlapping_wavelength_bands(qxy: bool): ).transpose() pipeline[BeamCenter] = _compute_beam_center() result = pipeline.compute( - BackgroundSubtractedIofQxy if qxy else BackgroundSubtractedIofQ + BackgroundSubtractedIntensityQxy if qxy else BackgroundSubtractedIntensityQ ) assert result.dims == ('band', 'Qy', 'Qx') if qxy else ('band', 'Q') assert result.sizes['band'] == 10 @pytest.mark.parametrize('qxy', [False, True]) -def test_pipeline_can_compute_IofQ_in_layers(qxy: bool): +def test_pipeline_can_compute_IntensityQ_in_layers(qxy: bool): pipeline = make_workflow() pipeline[DimsToKeep] = ['layer'] pipeline[BeamCenter] = _compute_beam_center() result = pipeline.compute( - BackgroundSubtractedIofQxy if qxy else BackgroundSubtractedIofQ + BackgroundSubtractedIntensityQxy if qxy else BackgroundSubtractedIntensityQ ) assert result.dims == ('layer', 'Qy', 'Qx') if qxy else ('layer', 'Q') assert result.sizes['layer'] == 4 @@ -182,7 +182,7 @@ def _compute_beam_center(): return sans.beam_center_from_center_of_mass(make_workflow()) -def test_pipeline_can_compute_IofQ_merging_events_from_multiple_runs(): +def test_pipeline_can_compute_IntensityQ_merging_events_from_multiple_runs(): sample_runs = [ loki.data.loki_tutorial_sample_run_60250(), loki.data.loki_tutorial_sample_run_60339(), @@ -200,22 +200,22 @@ def test_pipeline_can_compute_IofQ_merging_events_from_multiple_runs(): pipeline = sans.with_sample_runs(pipeline, runs=sample_runs) pipeline = sans.with_background_runs(pipeline, runs=background_runs) - result = pipeline.compute(BackgroundSubtractedIofQ) + result = pipeline.compute(BackgroundSubtractedIntensityQ) assert result.dims == ('Q',) - result = pipeline.compute(BackgroundSubtractedIofQxy) + result = pipeline.compute(BackgroundSubtractedIntensityQxy) assert result.dims == ('Qy', 'Qx') -def test_pipeline_can_compute_IofQ_by_bank(): +def test_pipeline_can_compute_IntensityQ_by_bank(): pipeline = make_workflow() pipeline[BeamCenter] = _compute_beam_center() pipeline = sans.with_banks(pipeline, banks=['larmor_detector']) - results = sciline.compute_mapped(pipeline, BackgroundSubtractedIofQ) + results = sciline.compute_mapped(pipeline, BackgroundSubtractedIntensityQ) assert results['larmor_detector'].dims == ('Q',) -def test_pipeline_can_compute_IofQ_merging_events_from_multiple_runs_by_bank(): +def test_pipeline_can_compute_IntensityQ_merging_events_from_multiple_runs_by_bank(): sample_runs = [ loki.data.loki_tutorial_sample_run_60250(), loki.data.loki_tutorial_sample_run_60339(), @@ -229,7 +229,7 @@ def test_pipeline_can_compute_IofQ_merging_events_from_multiple_runs_by_bank(): pipeline = sans.with_sample_runs(pipeline, runs=sample_runs) pipeline = sans.with_background_runs(pipeline, runs=background_runs) - key = BackgroundSubtractedIofQ + key = BackgroundSubtractedIntensityQ reference = pipeline.compute(key) pipeline = sans.with_banks( @@ -241,7 +241,7 @@ def test_pipeline_can_compute_IofQ_merging_events_from_multiple_runs_by_bank(): assert_identical(sc.values(results['bank1']), sc.values(reference)) -def test_pipeline_IofQ_merging_events_yields_consistent_results(): +def test_pipeline_IntensityQ_merging_events_yields_consistent_results(): N = 3 center = _compute_beam_center() pipeline_single = make_workflow() @@ -252,8 +252,8 @@ def test_pipeline_IofQ_merging_events_yields_consistent_results(): pipeline_triple = sans.with_sample_runs(pipeline_single, runs=sample_runs) pipeline_triple = sans.with_background_runs(pipeline_triple, runs=background_runs) - iofq1 = pipeline_single.compute(BackgroundSubtractedIofQ) - iofq3 = pipeline_triple.compute(BackgroundSubtractedIofQ) + iofq1 = pipeline_single.compute(BackgroundSubtractedIntensityQ) + iofq3 = pipeline_triple.compute(BackgroundSubtractedIntensityQ) assert sc.allclose(sc.values(iofq1.data), sc.values(iofq3.data)) assert sc.identical(iofq1.coords['Q'], iofq3.coords['Q']) assert all(sc.variances(iofq1.data) > sc.variances(iofq3.data)) @@ -291,13 +291,13 @@ def test_phi_with_gravity(): pipeline = make_workflow() pipeline[BeamCenter] = _compute_beam_center() pipeline[CorrectForGravity] = False - data_no_grav = pipeline.compute(CleanWavelength[SampleRun, Numerator]).flatten( + data_no_grav = pipeline.compute(CorrectedDetector[SampleRun, Numerator]).flatten( to='pixel' ) graph_no_grav = pipeline.compute(ElasticCoordTransformGraph) pipeline[CorrectForGravity] = True data_with_grav = ( - pipeline.compute(CleanWavelength[SampleRun, Numerator]) + pipeline.compute(CorrectedDetector[SampleRun, Numerator]) .flatten(to='pixel') .hist(wavelength=sc.linspace('wavelength', 1.0, 12.0, 101, unit='angstrom')) ) diff --git a/tests/loki/workflow_test.py b/tests/loki/workflow_test.py index 74900d5b..6dbb8630 100644 --- a/tests/loki/workflow_test.py +++ b/tests/loki/workflow_test.py @@ -11,10 +11,10 @@ from ess.reduce import workflow from ess.sans.types import ( BackgroundRun, - BackgroundSubtractedIofQ, + BackgroundSubtractedIntensityQ, BeamCenter, Filename, - IofQ, + IntensityQ, PixelMaskFilename, QBins, ReturnEvents, @@ -40,7 +40,7 @@ class MyWorkflow: ... def test_loki_workflow_parameters_returns_filtered_params(): wf = LokiAtLarmorWorkflow() - parameters = workflow.get_parameters(wf, (IofQ[SampleRun],)) + parameters = workflow.get_parameters(wf, (IntensityQ[SampleRun],)) assert Filename[SampleRun] in parameters assert Filename[BackgroundRun] not in parameters @@ -64,7 +64,7 @@ def test_loki_workflow_compute_with_single_pixel_mask(): # For simplicity, insert a fake beam center instead of computing it. wf[BeamCenter] = sc.vector([0.0, 0.0, 0.0], unit='m') - result = wf.compute(BackgroundSubtractedIofQ) + result = wf.compute(BackgroundSubtractedIntensityQ) assert result.dims == ('Q',) assert sc.identical(result.coords['Q'], wf.compute(QBins)) assert result.sizes['Q'] == 100