Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
58 changes: 29 additions & 29 deletions plugins/bm29/hplc.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
__contact__ = "Jerome.Kieffer@ESRF.eu"
__license__ = "MIT"
__copyright__ = "European Synchrotron Radiation Facility, Grenoble, France"
__date__ = "21/02/2025"
__date__ = "27/05/2025"
__status__ = "development"
__version__ = "0.3.0"

Expand Down Expand Up @@ -51,7 +51,7 @@

def smooth_chromatogram(signal, window):
"""smooth-out the chromatogram

:param signal: the chomatogram as 1d array
:param window: the size of the window
"""
Expand All @@ -73,7 +73,7 @@ def smooth_chromatogram(signal, window):
def search_peaks(signal, wmin=10, scale=0.9):
"""
Label all peak regions of chromatogram.

:param signal=smooth signal
:param wmin: minimum width for a peak. smaller ones are discarded.
:param scale: shrink factor (i.e. <1 for the search zone)
Expand Down Expand Up @@ -108,14 +108,14 @@ def search_peaks(signal, wmin=10, scale=0.9):
def build_background(I, std=None, keep=0.3):
"""
Build a background from a SVD and search for the frames looking most like the background.

1. build a coarse approximation based on the SVD.
2. measure the distance (cormap) of every single frame to the fundamental of the SVD
3. average frames that looks most like the coarse approximation (with deviation)

:param I: 2D array of shape (nframes, nbins)
:param std: same as I but with the standard deviation.
:param keep: fraction of frames to consider for background (<1!), 30% looks like a good guess
:param keep: fraction of frames to consider for background (<1!), 30% looks like a good guess
:return: (bg_avg, bg_std, indexes), each 1d of size nbins. + the index of the frames to keep
"""
U, S, V = numpy.linalg.svd(I.T, full_matrices=False)
Expand All @@ -135,21 +135,21 @@ def build_background(I, std=None, keep=0.3):

class HPLC(Plugin):
""" Rebuild the complete chromatogram and perform basic analysis on it.

Typical JSON file:
{
"integrated_files": ["img_001.h5", "img_002.h5"],
"output_file": "hplc.h5"
"ispyb": {
"url": "http://ispyb.esrf.fr:1234",
"pyarch": "/data/pyarch/mx1234/sample",
"pyarch": "/data/pyarch/mx1234/sample",
"measurement_id": -1,
"collection_id": -1
},
"nmf_components": 5,
"nmf_components": 5,
"wait_for": [jobid_img001, jobid_img002],
"plugin_name": "bm29.hplc"
}
}
"""
NMF_COMP = 5
"Default number of Non-negative matrix factorisation components. Correspond to the number of spieces"
Expand Down Expand Up @@ -214,7 +214,7 @@ def process(self):
self.to_pyarch["sample_name"] = self.juices[0].sample.name
if not self.input.get("no_ispyb"):
self.send_to_ispyb()
# self.output["icat"] =
# self.output["icat"] =
self.send_to_icat()

def teardown(self):
Expand All @@ -233,7 +233,7 @@ def create_nexus(self):
title='BioSaxs HPLC experiment',
force_time=get_isotime())
entry_grp["version"] = __version__
nxs.h5.attrs["default"] = entry_grp.name
nxs.h5.attrs["default"] = entry_grp.name.strip("/")

# Configuration
cfg_grp = nxs.new_class(entry_grp, "configuration", "NXnote")
Expand Down Expand Up @@ -285,7 +285,8 @@ def create_nexus(self):
frame_ds.attrs["long_name"] = "frame index"
hplc_data.attrs["signal"] = "sum"
hplc_data.attrs["axes"] = "frame_ids"
chroma_grp.attrs["default"] = entry_grp.attrs["default"] = hplc_data.name
chroma_grp.attrs["default"] = posixpath.relpath(hplc_data.name, chroma_grp.name)
entry_grp.attrs["default"] = posixpath.relpath(hplc_data.name, entry_grp.name)
time_ds = hplc_data.create_dataset("timestamps", data=timestamps, dtype=numpy.uint32)
time_ds.attrs["interpretation"] = "spectrum"
time_ds.attrs["long_name"] = "Time stamps (s)"
Expand Down Expand Up @@ -340,7 +341,7 @@ def create_nexus(self):
chroma_data.attrs["SILX_style"] = NORMAL_STYLE

svd_grp.create_dataset("eigenvalues", data=S[:r], dtype=numpy.float32)
svd_grp.attrs["default"] = chroma_data.name
svd_grp.attrs["default"] = posixpath.relpath(chroma_data.name, svd_grp.name)

# Process 3: NMF matrix decomposition
nmf_grp = nxs.new_class(entry_grp, "3_NMF", "NXprocess")
Expand Down Expand Up @@ -370,7 +371,7 @@ def create_nexus(self):
chroma_ds.attrs["interpretation"] = "spectrum"
chroma_data.attrs["signal"] = "H"
chroma_data.attrs["SILX_style"] = NORMAL_STYLE
nmf_grp.attrs["default"] = chroma_data.name
nmf_grp.attrs["default"] = posixpath.relpath(chroma_data.name, nmf_grp.name)

# Process 5: Background estimation
bg_grp = nxs.new_class(entry_grp, "4_background", "NXprocess")
Expand All @@ -397,7 +398,7 @@ def create_nexus(self):
bg_q_ds.attrs["long_name"] = f"Scattering vector q ({radius_unit}⁻¹)"
bg_std_ds = bg_data.create_dataset("errors", data=numpy.ascontiguousarray(bg_std, dtype=numpy.float32))
bg_std_ds.attrs["interpretation"] = "spectrum"
bg_grp.attrs["default"] = bg_data.name
bg_grp.attrs["default"] = posixpath.relpath(bg_data.name, bg_grp.name)
I_sub = I - bg_avg
Istd_sub = numpy.sqrt(sigma ** 2 + bg_std ** 2)

Expand Down Expand Up @@ -434,7 +435,7 @@ def one_fraction(self, fraction, index, nxs, top_grp):
:param index: index of the fraction
:param nxs: opened Nexus file object
:param top_grp: top level nexus group to start building into.

"""
q = self.juices[0].q
unit = self.juices[0].unit
Expand All @@ -458,7 +459,7 @@ def one_fraction(self, fraction, index, nxs, top_grp):
avg_data.attrs["title"] = f"{sample.name}, frames {fraction.start}-{fraction.stop} averaged, buffer subtracted"
avg_data.attrs["signal"] = "I"
avg_data.attrs["axes"] = radial_unit
f_grp.attrs["default"] = avg_data.name
f_grp.attrs["default"] = posixpath.relpath(avg_data.name, f_grp.name)
avg_q_ds = avg_data.create_dataset(radial_unit,
data=numpy.ascontiguousarray(q, dtype=numpy.float32))
avg_q_ds.attrs["units"] = unit_name
Expand Down Expand Up @@ -610,9 +611,9 @@ def one_fraction(self, fraction, index, nxs, top_grp):
guinier_data_attrs["signal"] = "logI"
guinier_data_attrs["axes"] = "q2"
guinier_data_attrs["auxiliary_signals"] = "fit"
guinier_grp.attrs["default"] = guinier_data.name
guinier_grp.attrs["default"] = posixpath.relpath(guinier_data.name, guinier_grp.name)
if guinier is None:
f_grp.attrs["default"] = avg_data.name
f_grp.attrs["default"] = posixpath.relpath(avg_data.name, f_grp.name)
self.log_error("No Guinier region found, data of dubious quality", do_raise=False)
return

Expand All @@ -625,7 +626,7 @@ def one_fraction(self, fraction, index, nxs, top_grp):
kratky_data = nxs.new_class(kratky_grp, "results", "NXdata")
kratky_data.attrs["SILX_style"] = NORMAL_STYLE
kratky_data.attrs["title"] = "Dimensionless Kratky plots"
kratky_grp.attrs["default"] = kratky_data.name
kratky_grp.attrs["default"] = posixpath.relpath(kratky_data.name, kratky_grp.name)

# Stage #5 Kratky plot generation:
Rg = guinier.Rg
Expand Down Expand Up @@ -762,7 +763,7 @@ def one_fraction(self, fraction, index, nxs, top_grp):
bift_ds = avg_data.create_dataset("BIFT", data=T.dot(stats.density_avg).astype(numpy.float32))
bift_ds.attrs["interpretation"] = "spectrum"
avg_data.attrs["auxiliary_signals"] = "BIFT"
bift_grp.attrs["default"] = bift_data.name
bift_grp.attrs["default"] = posixpath.relpath(bift_data.name, bift_grp.name)

def build_ispyb_group(self, nxs, top_grp):
"""Build the ispyb group inside the HDF5/Nexus file and all associated calculation
Expand Down Expand Up @@ -942,7 +943,7 @@ def send_to_ispyb(self):
else:
self.log_warning(f"Not sending to ISPyB: no valid URL in {self.ispyb}")

def send_to_icat(self):
def send_to_icat(self):
to_icat = copy.copy(self.to_pyarch)
to_icat["experiment_type"] = "hplc"
to_icat["sample"] = self.juices[0].sample
Expand All @@ -954,8 +955,8 @@ def send_to_icat(self):
return send_icat(sample=self.juices[0].sample,
raw=os.path.dirname(os.path.abspath(self.input_files[0])),
path=os.path.dirname(os.path.abspath(self.output_file)),
data=to_icat,
gallery=gallery,
data=to_icat,
gallery=gallery,
metadata=metadata)

def save_csv(self, filename, sum_I, Rg):
Expand All @@ -970,7 +971,6 @@ def save_csv(self, filename, sum_I, Rg):
lines.append("")
with open(filename, "w") as csv:
csv.write(os.linesep.join(lines))







41 changes: 23 additions & 18 deletions plugins/bm29/integrate.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
__contact__ = "Jerome.Kieffer@ESRF.eu"
__license__ = "MIT"
__copyright__ = "European Synchrotron Radiation Facility, Grenoble, France"
__date__ = "21/02/2025"
__date__ = "04/06/2025"
__status__ = "development"
__version__ = "0.3.0"

Expand All @@ -20,6 +20,7 @@
import json
import logging
import copy
import posixpath
from collections import namedtuple
from urllib3.util import parse_url
from dahu.plugin import Plugin
Expand Down Expand Up @@ -79,6 +80,7 @@ class IntegrateMultiframe(Plugin):
"fidelity_rel": 1e-3,
"hplc_mode": 0,
"timeout": 10,
"average_out_monitor_values": False, # use this to work around noisy beam stop diode reading.
"sample": {
"name": "bsa",
"description": "protein description like Bovine Serum Albumin",
Expand Down Expand Up @@ -185,6 +187,8 @@ def setup(self, kwargs=None):
else:
self.energy = numpy.float32(self.energy) # It is important to fix the datatype of the energy
self.monitor_values = numpy.array(self.input.get("monitor_values", 1), dtype=numpy.float64)
if self.input.get("average_out_monitor_values"):
self.monitor_values = numpy.zeros_like(self.monitor_values) + self.monitor_values.mean()
self.normalization_factor = float(self.input.get("normalization_factor", 1))
self.scale_factor = float(self.input.get("exposure_time", 1)) / self.normalization_factor

Expand Down Expand Up @@ -232,7 +236,7 @@ def process(self):
self.create_nexus()
self.output["memcached"] = self.send_to_memcached()
self.send_to_ispyb()
#self.output["icat"] =
#self.output["icat"] =
self.send_to_icat()

def wait_file(self, filename, timeout=None):
Expand Down Expand Up @@ -277,7 +281,7 @@ def create_nexus(self):
entry_grp = nxs.new_entry("entry", self.input.get("plugin_name", "dahu"),
title='BioSaxs multiframe integration',
force_time=get_isotime(creation_time))
nxs.h5.attrs["default"] = entry_grp.name
nxs.h5.attrs["default"] = entry_grp.name.strip("/")

# Configuration
cfg_grp = nxs.new_class(entry_grp, "configuration", "NXnote")
Expand Down Expand Up @@ -432,11 +436,12 @@ def create_nexus(self):
hplc_data.attrs["axes"] = "frame_ids"

if self.input.get("hplc_mode"):
entry_grp.attrs["default"] = entry_grp.attrs["default"] = integration_grp.attrs["default"] = hplc_data.name
entry_grp.attrs["default"] = posixpath.relpath(hplc_data.name, entry_grp.name)
integration_grp.attrs["default"] = posixpath.relpath(hplc_data.name, integration_grp.name)
self.log_warning("HPLC mode detected, stopping after frame per frame integration")
return

integration_grp.attrs["default"] = integration_data.name
integration_grp.attrs["default"] = posixpath.relpath(integration_data.name, integration_grp.name)

# Process 2: Freesas cormap
cormap_grp = nxs.new_class(entry_grp, "2_correlation_mapping", "NXprocess")
Expand Down Expand Up @@ -466,7 +471,7 @@ def create_nexus(self):

to_merge_ds = cormap_data.create_dataset("to_merge", data=numpy.arange(*cormap_results.tomerge, dtype=numpy.uint16))
to_merge_ds.attrs["long_name"] = "Index of equivalent frames"
cormap_grp.attrs["default"] = cormap_data.name
cormap_grp.attrs["default"] = posixpath.relpath(cormap_data.name, cormap_grp.name)
if self.ispyb.url:
self.to_pyarch["merged"] = cormap_results.tomerge

Expand Down Expand Up @@ -497,11 +502,11 @@ def create_nexus(self):
int_std_ds.attrs["interpretation"] = "image"
int_std_ds.attrs["formula"] = "sqrt(sum_i(variance_i)/sum_i(normalization_i))"
int_std_ds.attrs["method"] = "Propagated error from weighted mean assuming poissonian behavour of every data-point"

int_nrm_ds = average_data.create_dataset("normalization", data=norm)
int_nrm_ds.attrs["formula"] = "sum_i(normalization_i))"
average_grp.attrs["default"] = average_data.name

average_grp.attrs["default"] = posixpath.relpath(average_data.name, average_grp.name)

# Process 4: Azimuthal integration of the time average image
ai2_grp = nxs.new_class(entry_grp, "4_azimuthal_integration", "NXprocess")
Expand All @@ -518,7 +523,7 @@ def create_nexus(self):
ai2_grp["configuration"] = integration_grp["configuration"]
# ai2_grp["polarization_factor"] = integration_grp["polarization_factor"]
# ai2_grp["integration_method"] = integration_grp["integration_method"]
ai2_grp.attrs["default"] = ai2_data.name
ai2_grp.attrs["default"] = posixpath.relpath(ai2_data.name, ai2_grp.name)

# Stage 4 processing
intensity_std = res3.deviation
Expand Down Expand Up @@ -552,7 +557,7 @@ def create_nexus(self):
ai2_std_ds.attrs["interpretation"] = "spectrum"
ai2_int_ds.attrs["units"] = "arbitrary"
# Finally declare the default entry and default dataset ...
entry_grp.attrs["default"] = ai2_data.name
entry_grp.attrs["default"] = posixpath.relpath(ai2_data.name, entry_grp.name)

# Export this to the output JSON
# self.output["q"] = res2.radial
Expand Down Expand Up @@ -606,7 +611,7 @@ def process3_average(self, tomerge):
valid_slice = slice(*tomerge)
mask = self.ai.detector.mask
sum_data = (self.input_frames[valid_slice]).sum(axis=0)
sum_norm = self.scale_factor * sum(self.monitor_values[valid_slice])
sum_norm = self.scale_factor * sum(self.monitor_values[valid_slice])
if numexpr is not None:
# Numexpr is many-times faster than numpy when it comes to element-wise operations
intensity_avg = numexpr.evaluate("where(mask==0, sum_data/sum_norm, 0.0)")
Expand All @@ -629,9 +634,9 @@ def send_to_ispyb(self):
self.log_warning(f"Not sending to ISPyB: no valid URL {self.ispyb.url}")

def send_to_icat(self):
#Some more metadata for iCat, as strings:
#Some more metadata for iCat, as strings:
to_icat = copy.copy(self.to_pyarch)
to_icat["experiment_type"] = "hplc" if self.input.get("hplc_mode") else "sample-changer"
to_icat["experiment_type"] = "hplc" if self.input.get("hplc_mode") else "sample-changer"
to_icat["sample"] = self.sample
to_icat["SAXS_maskFile"] = self.mask
to_icat["SAXS_waveLength"] = str(self.ai.wavelength)
Expand All @@ -645,13 +650,13 @@ def send_to_icat(self):
f2d = self.ai.getFit2D()
to_icat["SAXS_beam_center_x"] = str(f2d["centerX"])
to_icat["SAXS_beam_center_y"] = str(f2d["centerY"])

metadata = {"scanType": "integration"}
return send_icat(sample=self.sample.name,
raw=os.path.dirname(os.path.dirname(os.path.abspath(self.input_file))),
path=os.path.dirname(os.path.abspath(self.output_file)),
data=to_icat,
gallery=self.ispyb.gallery or os.path.join(os.path.dirname(os.path.abspath(self.output_file)), "gallery"),
data=to_icat,
gallery=self.ispyb.gallery or os.path.join(os.path.dirname(os.path.abspath(self.output_file)), "gallery"),
metadata=metadata)

def send_to_memcached(self):
Expand All @@ -661,5 +666,5 @@ def send_to_memcached(self):
for k in sorted(self.to_memcached.keys(), key=lambda i:self.to_memcached[i].nbytes):
key = f"{key_base}_{k}"
dico[key] = json.dumps(self.to_memcached[k], cls=NumpyEncoder)
return to_memcached(dico)
return to_memcached(dico)

10 changes: 5 additions & 5 deletions plugins/bm29/nexus.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
__contact__ = "Jerome.Kieffer@ESRF.eu"
__license__ = "MIT"
__copyright__ = "European Synchrotron Radiation Facility, Grenoble, France"
__date__ = "21/02/2025"
__date__ = "27/05/2025"
__status__ = "production"
__docformat__ = 'restructuredtext'

Expand Down Expand Up @@ -85,8 +85,8 @@ class Nexus:
TODO: make it thread-safe !!!
"""

def __init__(self, filename, mode=None,
creator=None,
def __init__(self, filename, mode=None,
creator=None,
timeout=None,
start_time=None):
"""
Expand Down Expand Up @@ -244,7 +244,7 @@ def new_entry(self, entry="entry", program_name="pyFAI",
nb_entries = len(self.get_entries())
entry = "%s_%04i" % (entry, nb_entries)
entry_grp = self.h5.require_group(entry)
self.h5.attrs["default"] = entry
self.h5.attrs["default"] = entry_grp.name.strip("/")
entry_grp.attrs["NX_class"] = "NXentry"
entry_grp["title"] = str(title)
entry_grp["program_name"] = program_name
Expand All @@ -269,7 +269,7 @@ def new_instrument(self, entry="entry", instrument_name="id00",):
def new_class(grp, name, class_type="NXcollection"):
"""
create a new sub-group with type class_type

:param grp: parent group
:param name: name of the sub-group
:param class_type: NeXus class name
Expand Down
Loading