diff --git a/src/ess/sans/common.py b/src/ess/sans/common.py index e3de665c..4073f22e 100644 --- a/src/ess/sans/common.py +++ b/src/ess/sans/common.py @@ -61,12 +61,12 @@ def mask_range( coord = ( da.bins.constituents['data'].coords[dim] - if da.bins is not None + if da.bins is not None and dim in da.bins.coords else da.coords[dim] ) edges = edges.to(unit=coord.unit) lu = sc.DataArray(data=mask.data, coords={dim: edges}) - if da.bins is not None: + if da.bins is not None and dim in da.bins.coords: if dim not in da.coords: underlying = da.bins.coords[dim] new_bins = np.union1d( diff --git a/src/ess/sans/normalization.py b/src/ess/sans/normalization.py index 4b859ac1..40b17991 100644 --- a/src/ess/sans/normalization.py +++ b/src/ess/sans/normalization.py @@ -392,7 +392,7 @@ def _reduce(part: sc.DataArray, /, *, bands: ProcessedWavelengthBands) -> sc.Dat Q-dependent data, ready for normalization. """ wav = 'wavelength' - if part.bins is not None: + if part.bins is not None and wav in part.bins.coords: # If in event mode the desired wavelength binning has not been applied, we need # it for splitting by bands, or restricting the range in case of a single band. part = part.bin(wavelength=sc.sort(bands.flatten(to=wav), wav)) diff --git a/src/ess/sans/qresolution.py b/src/ess/sans/qresolution.py new file mode 100644 index 00000000..d6df2d0e --- /dev/null +++ b/src/ess/sans/qresolution.py @@ -0,0 +1,260 @@ +# SPDX-License-Identifier: BSD-3-Clause +# Copyright (c) 2024 Scipp contributors (https://github.com/scipp) +"""Q-resolution calculation for SANS data.""" + +from typing import NewType + +import numpy as np +import scipp as sc +from scipp.constants import pi + +from .common import mask_range +from .conversions import ElasticCoordTransformGraph, sans_elastic +from .i_of_q import resample_direct_beam +from .normalization import _reduce +from .types import ( + CalibratedBeamline, + DetectorMasks, + DimsToKeep, + ProcessedWavelengthBands, + QBins, + SampleRun, + WavelengthBins, + WavelengthMask, +) + +DeltaR = NewType("DeltaR", sc.Variable) +"""Virtual ring width on the detector.""" + +SampleApertureRadius = NewType("SampleApertureRadius", sc.Variable) +"""Sample aperture radius, R2.""" +SourceApertureRadius = NewType("SourceApertureRadius", sc.Variable) +"""Source aperture radius, R1.""" +SigmaModerator = NewType("SigmaModerator", sc.DataArray) +""" +Moderator wavelength spread as a function of wavelength. + +This is derived from ModeratorTimeSpread. +""" +CollimationLength = NewType("CollimationLength", sc.Variable) +"""Collimation length.""" +ModeratorTimeSpreadFilename = NewType("ModeratorTimeSpreadFilename", str) +ModeratorTimeSpread = NewType("ModeratorTimeSpread", sc.DataArray) +"""Moderator time-spread as a function of wavelength.""" + + +QResolutionByPixel = NewType("QResolutionByPixel", sc.DataArray) +QResolutionByWavelength = NewType("QResolutionByWavelength", sc.DataArray) +QResolution = NewType("QResolution", sc.DataArray) + + +def load_isis_moderator_time_spread( + filename: ModeratorTimeSpreadFilename, +) -> ModeratorTimeSpread: + """ + Load moderator time spread from an ISIS moderator file. + + Files looks as follows: + + .. code-block:: text + + Fri 08-Aug-2015, LET exptl data (FWHM/2.35) [...] + + 61 0 0 0 1 61 0 + 0 0 0 0 + 3 (F12.5,2E14.6) + 0.00000 2.257600E+01 0.000000E+00 + 0.50000 2.677152E+01 0.000000E+00 + 1.00000 3.093920E+01 0.000000E+00 + 1.50000 3.507903E+01 0.000000E+00 + 2.00000 3.919100E+01 0.000000E+00 + + The first column is the wavelength in Angstrom, the second is the time spread in + microseconds. The third column is the error on the time spread, which we ignore. + """ + wavelength, time_spread = np.loadtxt( + filename, skiprows=5, usecols=(0, 1), unpack=True + ) + wav = 'wavelength' + return ModeratorTimeSpread( + sc.DataArray( + data=sc.array(dims=[wav], values=time_spread, unit='us'), + coords={wav: sc.array(dims=[wav], values=wavelength, unit='angstrom')}, + ) + ) + + +def moderator_time_spread_to_wavelength_spread( + moderator_time_spread: ModeratorTimeSpread, + beamline: CalibratedBeamline[SampleRun], + wavelength_bins: WavelengthBins, + masks: DetectorMasks, +) -> SigmaModerator: + """ + Convert the moderator time spread to a wavelength spread and mask pixels. + + This resamples the raw moderator time spread to the wavelength bins used in the data + reduction. The result is a DataArray with the time spread as a function of pixel and + wavelength. + + Parameters + ---------- + moderator_time_spread: + Moderator time spread. + beamline: + Beamline geometry information required for converting time spread to wavelength + spread. The latter depends on the pixel position via sample-detector distance + L2. + wavelength_bins: + Binning in wavelength. + masks: + Detector masks. + + Returns + ------- + : + Wavelength spread of the moderator. + """ + # We want to convert a small time-of-flight spread to a wavelength spread. As this + # time spread is assumed to be symmetric around the base time-of-flight we do not + # want to correct for gravity here as it would skew the result. + graph = sans_elastic(correct_for_gravity=False) + dtof = resample_direct_beam(moderator_time_spread, wavelength_bins=wavelength_bins) + # We would like to "transform" the *data*, but we only have transform_coords, so + # there is some back and forth between data and coords here. + dummy = beamline.broadcast(sizes={**beamline.sizes, **dtof.sizes}) + dummy.data = sc.empty(sizes=dummy.sizes) + da = ( + dummy.assign_coords(tof=dtof.data) + .assign_masks(masks) + .transform_coords('wavelength', graph=graph, keep_inputs=False) + ) + da.data = da.coords.pop('wavelength') + return SigmaModerator(da.assign_coords(wavelength=wavelength_bins)) + + +def q_resolution_by_pixel( + sigma_moderator: SigmaModerator, + source_aperture: SourceApertureRadius, + sample_aperture: SampleApertureRadius, + collimation_length: CollimationLength, + delta_r: DeltaR, + graph: ElasticCoordTransformGraph, +) -> QResolutionByPixel: + """ + Calculate the Q-resolution per pixel and wavelength. + + This is a Gaussian approximation to the Q-resolution (Mildner and Carpenter). It is + likely not sufficient for the long ESS pulse. Based on communication with Andrew + Jackson this "approximation works OK when you have all your Q points from a single + planar detector (as on SANS2D) and was implemented by Richard Heenan as a 'hack' to + get a resolution value of some kind". + + Parameters + ---------- + sigma_moderator: + Moderator wavelength spread as a function of pixel and wavelength. + source_aperture: + Source aperture radius, R1. + sample_aperture: + Sample aperture radius, R2. + collimation_length: + Collimation length. + delta_r: + Virtual ring width on the detector. + graph: + Coordinate transformation graph for elastic scattering. + + Returns + ------- + : + Q-resolution per pixel and wavelength. + """ + wavelength_bins = sigma_moderator.coords['wavelength'] + delta_lambda = wavelength_bins[1:] - wavelength_bins[:-1] + result = sigma_moderator.assign_coords( + wavelength=sc.midpoints(wavelength_bins) + ).transform_coords('Q', graph=graph, keep_inputs=True) + L2 = result.coords["L2"] + L3 = sc.reciprocal(sc.reciprocal(collimation_length) + sc.reciprocal(L2)) + pixel_term = ( + 3 * ((source_aperture / collimation_length) ** 2 + (sample_aperture / L3) ** 2) + + (delta_r / L2) ** 2 + ) + # Written in multiple steps to avoid allocation of intermediate arrays. This is + # makes this step about twice as fast (but computing Q above dominates anyway). + result.data *= result.data + result.data += delta_lambda**2 / 12 + result.data *= result.coords['Q'] ** 2 + result.data += (pi**2 / 3) * pixel_term + result = sc.sqrt(result) + result /= result.coords['wavelength'] + return QResolutionByPixel(result) + + +def q_resolution_by_wavelength( + data: QResolutionByPixel, + q_bins: QBins, + dims_to_keep: DimsToKeep, + mask: WavelengthMask, +) -> QResolutionByWavelength: + """ + Compute the masked Q-resolution in (Q, lambda) space. + + Parameters + ---------- + data: + Q-resolution per pixel and wavelength. + q_bins: + Binning in Q. + dims_to_keep: + Detector dimensions that should not be reduced. + mask: + Wavelength mask. + + Returns + ------- + : + Q-resolution in (Q, lambda) space. + """ + dims = [dim for dim in data.dims if dim not in (*dims_to_keep, 'wavelength')] + resolution = data.bin(Q=q_bins, dim=dims) + if mask is not None: + resolution = mask_range(resolution, mask=mask) + return QResolutionByWavelength(resolution) + + +def reduce_resolution_q( + data: QResolutionByWavelength, bands: ProcessedWavelengthBands +) -> QResolution: + """ + Q-resolution reduced over wavelength dimension. + + The result depends only Q, or (Q, wavelength_band) if wavelength bands are used. + + Note that the result is *binned data*, with each bin entry representing a specific + input pixel and wavelength. The final solution can be obtained, e.g., by computing + `resolution.bins.max()`, assuming `resolution is the output of this function. + + Parameters + ---------- + data: + Q-resolution in (Q, lambda) space. + bands: + Wavelength bands. + + Returns + ------- + : + Reduced Q-resolution. + """ + return QResolution(_reduce(data, bands=bands)) + + +providers = ( + load_isis_moderator_time_spread, + moderator_time_spread_to_wavelength_spread, + q_resolution_by_pixel, + q_resolution_by_wavelength, + reduce_resolution_q, +) diff --git a/src/ess/sans/types.py b/src/ess/sans/types.py index a5e21bca..e17c719a 100644 --- a/src/ess/sans/types.py +++ b/src/ess/sans/types.py @@ -16,6 +16,7 @@ from ess.reduce.uncertainty import UncertaintyBroadcastMode as _UncertaintyBroadcastMode BackgroundRun = reduce_t.BackgroundRun +CalibratedBeamline = reduce_t.CalibratedBeamline CalibratedDetector = reduce_t.CalibratedDetector CalibratedMonitor = reduce_t.CalibratedMonitor DetectorData = reduce_t.DetectorData @@ -40,9 +41,9 @@ UncertaintyBroadcastMode = _UncertaintyBroadcastMode # 1.3 Numerator and denominator of IofQ -Numerator = NewType('Numerator', sc.DataArray) +Numerator = NewType('Numerator', int) """Numerator of IofQ""" -Denominator = NewType('Denominator', sc.DataArray) +Denominator = NewType('Denominator', int) """Denominator of IofQ""" IofQPart = TypeVar('IofQPart', Numerator, Denominator) """TypeVar used for specifying Numerator or Denominator of IofQ""" diff --git a/src/ess/sans/workflow.py b/src/ess/sans/workflow.py index 15a1a567..287d097a 100644 --- a/src/ess/sans/workflow.py +++ b/src/ess/sans/workflow.py @@ -9,7 +9,7 @@ from ess.reduce.nexus.workflow import GenericNeXusWorkflow from ess.reduce.parameter import parameter_mappers -from . import common, conversions, i_of_q, masking, normalization +from . import common, conversions, i_of_q, masking, normalization, qresolution from .types import ( BackgroundRun, CleanSummedQ, @@ -151,6 +151,7 @@ def with_background_runs( *masking.providers, *normalization.providers, common.beam_center_to_detector_position_offset, + *qresolution.providers, ) """ List of providers for setting up a Sciline pipeline.