From 1aeeb324a9acf3a4df8fe5c17d1dffc2ed8af623 Mon Sep 17 00:00:00 2001 From: Jerome Kieffer Date: Tue, 27 May 2025 16:17:31 +0200 Subject: [PATCH 1/7] fix relative path for default dataset --- plugins/bm29/hplc.py | 58 ++++++++++++++++++++++---------------------- 1 file changed, 29 insertions(+), 29 deletions(-) diff --git a/plugins/bm29/hplc.py b/plugins/bm29/hplc.py index b5bf3ea..5e3a8da 100644 --- a/plugins/bm29/hplc.py +++ b/plugins/bm29/hplc.py @@ -10,7 +10,7 @@ __contact__ = "Jerome.Kieffer@ESRF.eu" __license__ = "MIT" __copyright__ = "European Synchrotron Radiation Facility, Grenoble, France" -__date__ = "21/02/2025" +__date__ = "27/05/2025" __status__ = "development" __version__ = "0.3.0" @@ -51,7 +51,7 @@ def smooth_chromatogram(signal, window): """smooth-out the chromatogram - + :param signal: the chomatogram as 1d array :param window: the size of the window """ @@ -73,7 +73,7 @@ def smooth_chromatogram(signal, window): def search_peaks(signal, wmin=10, scale=0.9): """ Label all peak regions of chromatogram. - + :param signal=smooth signal :param wmin: minimum width for a peak. smaller ones are discarded. :param scale: shrink factor (i.e. <1 for the search zone) @@ -108,14 +108,14 @@ def search_peaks(signal, wmin=10, scale=0.9): def build_background(I, std=None, keep=0.3): """ Build a background from a SVD and search for the frames looking most like the background. - + 1. build a coarse approximation based on the SVD. 2. measure the distance (cormap) of every single frame to the fundamental of the SVD 3. average frames that looks most like the coarse approximation (with deviation) - + :param I: 2D array of shape (nframes, nbins) :param std: same as I but with the standard deviation. - :param keep: fraction of frames to consider for background (<1!), 30% looks like a good guess + :param keep: fraction of frames to consider for background (<1!), 30% looks like a good guess :return: (bg_avg, bg_std, indexes), each 1d of size nbins. + the index of the frames to keep """ U, S, V = numpy.linalg.svd(I.T, full_matrices=False) @@ -135,21 +135,21 @@ def build_background(I, std=None, keep=0.3): class HPLC(Plugin): """ Rebuild the complete chromatogram and perform basic analysis on it. - + Typical JSON file: { "integrated_files": ["img_001.h5", "img_002.h5"], "output_file": "hplc.h5" "ispyb": { "url": "http://ispyb.esrf.fr:1234", - "pyarch": "/data/pyarch/mx1234/sample", + "pyarch": "/data/pyarch/mx1234/sample", "measurement_id": -1, "collection_id": -1 }, - "nmf_components": 5, + "nmf_components": 5, "wait_for": [jobid_img001, jobid_img002], "plugin_name": "bm29.hplc" - } + } """ NMF_COMP = 5 "Default number of Non-negative matrix factorisation components. Correspond to the number of spieces" @@ -214,7 +214,7 @@ def process(self): self.to_pyarch["sample_name"] = self.juices[0].sample.name if not self.input.get("no_ispyb"): self.send_to_ispyb() - # self.output["icat"] = + # self.output["icat"] = self.send_to_icat() def teardown(self): @@ -233,7 +233,7 @@ def create_nexus(self): title='BioSaxs HPLC experiment', force_time=get_isotime()) entry_grp["version"] = __version__ - nxs.h5.attrs["default"] = entry_grp.name + nxs.h5.attrs["default"] = entry_grp.name.strip("/") # Configuration cfg_grp = nxs.new_class(entry_grp, "configuration", "NXnote") @@ -285,7 +285,8 @@ def create_nexus(self): frame_ds.attrs["long_name"] = "frame index" hplc_data.attrs["signal"] = "sum" hplc_data.attrs["axes"] = "frame_ids" - chroma_grp.attrs["default"] = entry_grp.attrs["default"] = hplc_data.name + chroma_grp.attrs["default"] = posixpath.relpath(hplc_data.name, chroma_grp.name) + entry_grp.attrs["default"] = posixpath.relpath(hplc_data.name, entry_grp.name) time_ds = hplc_data.create_dataset("timestamps", data=timestamps, dtype=numpy.uint32) time_ds.attrs["interpretation"] = "spectrum" time_ds.attrs["long_name"] = "Time stamps (s)" @@ -340,7 +341,7 @@ def create_nexus(self): chroma_data.attrs["SILX_style"] = NORMAL_STYLE svd_grp.create_dataset("eigenvalues", data=S[:r], dtype=numpy.float32) - svd_grp.attrs["default"] = chroma_data.name + svd_grp.attrs["default"] = posixpath.relpath(chroma_data.name, svd_grp.name) # Process 3: NMF matrix decomposition nmf_grp = nxs.new_class(entry_grp, "3_NMF", "NXprocess") @@ -370,7 +371,7 @@ def create_nexus(self): chroma_ds.attrs["interpretation"] = "spectrum" chroma_data.attrs["signal"] = "H" chroma_data.attrs["SILX_style"] = NORMAL_STYLE - nmf_grp.attrs["default"] = chroma_data.name + nmf_grp.attrs["default"] = posixpath.relpath(chroma_data.name, nmf_grp.name) # Process 5: Background estimation bg_grp = nxs.new_class(entry_grp, "4_background", "NXprocess") @@ -397,7 +398,7 @@ def create_nexus(self): bg_q_ds.attrs["long_name"] = f"Scattering vector q ({radius_unit}⁻¹)" bg_std_ds = bg_data.create_dataset("errors", data=numpy.ascontiguousarray(bg_std, dtype=numpy.float32)) bg_std_ds.attrs["interpretation"] = "spectrum" - bg_grp.attrs["default"] = bg_data.name + bg_grp.attrs["default"] = posixpath.relpath(bg_data.name, bg_grp.name) I_sub = I - bg_avg Istd_sub = numpy.sqrt(sigma ** 2 + bg_std ** 2) @@ -434,7 +435,7 @@ def one_fraction(self, fraction, index, nxs, top_grp): :param index: index of the fraction :param nxs: opened Nexus file object :param top_grp: top level nexus group to start building into. - + """ q = self.juices[0].q unit = self.juices[0].unit @@ -458,7 +459,7 @@ def one_fraction(self, fraction, index, nxs, top_grp): avg_data.attrs["title"] = f"{sample.name}, frames {fraction.start}-{fraction.stop} averaged, buffer subtracted" avg_data.attrs["signal"] = "I" avg_data.attrs["axes"] = radial_unit - f_grp.attrs["default"] = avg_data.name + f_grp.attrs["default"] = posixpath.relpath(avg_data.name, f_grp.name) avg_q_ds = avg_data.create_dataset(radial_unit, data=numpy.ascontiguousarray(q, dtype=numpy.float32)) avg_q_ds.attrs["units"] = unit_name @@ -610,9 +611,9 @@ def one_fraction(self, fraction, index, nxs, top_grp): guinier_data_attrs["signal"] = "logI" guinier_data_attrs["axes"] = "q2" guinier_data_attrs["auxiliary_signals"] = "fit" - guinier_grp.attrs["default"] = guinier_data.name + guinier_grp.attrs["default"] = posixpath.relpath(guinier_data.name, guinier_grp.name) if guinier is None: - f_grp.attrs["default"] = avg_data.name + f_grp.attrs["default"] = posixpath.relpath(avg_data.name, f_grp.name) self.log_error("No Guinier region found, data of dubious quality", do_raise=False) return @@ -625,7 +626,7 @@ def one_fraction(self, fraction, index, nxs, top_grp): kratky_data = nxs.new_class(kratky_grp, "results", "NXdata") kratky_data.attrs["SILX_style"] = NORMAL_STYLE kratky_data.attrs["title"] = "Dimensionless Kratky plots" - kratky_grp.attrs["default"] = kratky_data.name + kratky_grp.attrs["default"] = posixpath.relpath(kratky_data.name, kratky_grp.name) # Stage #5 Kratky plot generation: Rg = guinier.Rg @@ -762,7 +763,7 @@ def one_fraction(self, fraction, index, nxs, top_grp): bift_ds = avg_data.create_dataset("BIFT", data=T.dot(stats.density_avg).astype(numpy.float32)) bift_ds.attrs["interpretation"] = "spectrum" avg_data.attrs["auxiliary_signals"] = "BIFT" - bift_grp.attrs["default"] = bift_data.name + bift_grp.attrs["default"] = posixpath.relpath(bift_data.name, bift_grp.name) def build_ispyb_group(self, nxs, top_grp): """Build the ispyb group inside the HDF5/Nexus file and all associated calculation @@ -942,7 +943,7 @@ def send_to_ispyb(self): else: self.log_warning(f"Not sending to ISPyB: no valid URL in {self.ispyb}") - def send_to_icat(self): + def send_to_icat(self): to_icat = copy.copy(self.to_pyarch) to_icat["experiment_type"] = "hplc" to_icat["sample"] = self.juices[0].sample @@ -954,8 +955,8 @@ def send_to_icat(self): return send_icat(sample=self.juices[0].sample, raw=os.path.dirname(os.path.abspath(self.input_files[0])), path=os.path.dirname(os.path.abspath(self.output_file)), - data=to_icat, - gallery=gallery, + data=to_icat, + gallery=gallery, metadata=metadata) def save_csv(self, filename, sum_I, Rg): @@ -970,7 +971,6 @@ def save_csv(self, filename, sum_I, Rg): lines.append("") with open(filename, "w") as csv: csv.write(os.linesep.join(lines)) - - - - \ No newline at end of file + + + From d6be44d310c95fbae5ecf573855de30cb21ee143 Mon Sep 17 00:00:00 2001 From: Jerome Kieffer Date: Tue, 27 May 2025 16:20:42 +0200 Subject: [PATCH 2/7] fix relative path in default attrs --- plugins/bm29/integrate.py | 38 ++++++++++++++++++++------------------ 1 file changed, 20 insertions(+), 18 deletions(-) diff --git a/plugins/bm29/integrate.py b/plugins/bm29/integrate.py index d0e0b2f..5bb0e07 100644 --- a/plugins/bm29/integrate.py +++ b/plugins/bm29/integrate.py @@ -11,7 +11,7 @@ __contact__ = "Jerome.Kieffer@ESRF.eu" __license__ = "MIT" __copyright__ = "European Synchrotron Radiation Facility, Grenoble, France" -__date__ = "21/02/2025" +__date__ = "27/05/2025" __status__ = "development" __version__ = "0.3.0" @@ -20,6 +20,7 @@ import json import logging import copy +import posixpath from collections import namedtuple from urllib3.util import parse_url from dahu.plugin import Plugin @@ -232,7 +233,7 @@ def process(self): self.create_nexus() self.output["memcached"] = self.send_to_memcached() self.send_to_ispyb() - #self.output["icat"] = + #self.output["icat"] = self.send_to_icat() def wait_file(self, filename, timeout=None): @@ -277,7 +278,7 @@ def create_nexus(self): entry_grp = nxs.new_entry("entry", self.input.get("plugin_name", "dahu"), title='BioSaxs multiframe integration', force_time=get_isotime(creation_time)) - nxs.h5.attrs["default"] = entry_grp.name + nxs.h5.attrs["default"] = entry_grp.name.strip("/") # Configuration cfg_grp = nxs.new_class(entry_grp, "configuration", "NXnote") @@ -432,11 +433,12 @@ def create_nexus(self): hplc_data.attrs["axes"] = "frame_ids" if self.input.get("hplc_mode"): - entry_grp.attrs["default"] = entry_grp.attrs["default"] = integration_grp.attrs["default"] = hplc_data.name + entry_grp.attrs["default"] = posixpath.relpath(hplc_data.name, entry_grp.name) + integration_grp.attrs["default"] = posixpath.relpath(hplc_data.name, integration_grp.name) self.log_warning("HPLC mode detected, stopping after frame per frame integration") return - integration_grp.attrs["default"] = integration_data.name + integration_grp.attrs["default"] = posixpath.relpath(integration_data.name, integration_grp.name) # Process 2: Freesas cormap cormap_grp = nxs.new_class(entry_grp, "2_correlation_mapping", "NXprocess") @@ -466,7 +468,7 @@ def create_nexus(self): to_merge_ds = cormap_data.create_dataset("to_merge", data=numpy.arange(*cormap_results.tomerge, dtype=numpy.uint16)) to_merge_ds.attrs["long_name"] = "Index of equivalent frames" - cormap_grp.attrs["default"] = cormap_data.name + cormap_grp.attrs["default"] = posixpath.relpath(cormap_data.name, cormap_grp.name) if self.ispyb.url: self.to_pyarch["merged"] = cormap_results.tomerge @@ -497,11 +499,11 @@ def create_nexus(self): int_std_ds.attrs["interpretation"] = "image" int_std_ds.attrs["formula"] = "sqrt(sum_i(variance_i)/sum_i(normalization_i))" int_std_ds.attrs["method"] = "Propagated error from weighted mean assuming poissonian behavour of every data-point" - + int_nrm_ds = average_data.create_dataset("normalization", data=norm) int_nrm_ds.attrs["formula"] = "sum_i(normalization_i))" - - average_grp.attrs["default"] = average_data.name + + average_grp.attrs["default"] = posixpath.relpath(average_data.name, average_grp.name) # Process 4: Azimuthal integration of the time average image ai2_grp = nxs.new_class(entry_grp, "4_azimuthal_integration", "NXprocess") @@ -518,7 +520,7 @@ def create_nexus(self): ai2_grp["configuration"] = integration_grp["configuration"] # ai2_grp["polarization_factor"] = integration_grp["polarization_factor"] # ai2_grp["integration_method"] = integration_grp["integration_method"] - ai2_grp.attrs["default"] = ai2_data.name + ai2_grp.attrs["default"] = posixpath.relpath(ai2_data.name, ai2_grp.name) # Stage 4 processing intensity_std = res3.deviation @@ -552,7 +554,7 @@ def create_nexus(self): ai2_std_ds.attrs["interpretation"] = "spectrum" ai2_int_ds.attrs["units"] = "arbitrary" # Finally declare the default entry and default dataset ... - entry_grp.attrs["default"] = ai2_data.name + entry_grp.attrs["default"] = posixpath.relpath(ai2_data.name, entry_grp.name) # Export this to the output JSON # self.output["q"] = res2.radial @@ -606,7 +608,7 @@ def process3_average(self, tomerge): valid_slice = slice(*tomerge) mask = self.ai.detector.mask sum_data = (self.input_frames[valid_slice]).sum(axis=0) - sum_norm = self.scale_factor * sum(self.monitor_values[valid_slice]) + sum_norm = self.scale_factor * sum(self.monitor_values[valid_slice]) if numexpr is not None: # Numexpr is many-times faster than numpy when it comes to element-wise operations intensity_avg = numexpr.evaluate("where(mask==0, sum_data/sum_norm, 0.0)") @@ -629,9 +631,9 @@ def send_to_ispyb(self): self.log_warning(f"Not sending to ISPyB: no valid URL {self.ispyb.url}") def send_to_icat(self): - #Some more metadata for iCat, as strings: + #Some more metadata for iCat, as strings: to_icat = copy.copy(self.to_pyarch) - to_icat["experiment_type"] = "hplc" if self.input.get("hplc_mode") else "sample-changer" + to_icat["experiment_type"] = "hplc" if self.input.get("hplc_mode") else "sample-changer" to_icat["sample"] = self.sample to_icat["SAXS_maskFile"] = self.mask to_icat["SAXS_waveLength"] = str(self.ai.wavelength) @@ -645,13 +647,13 @@ def send_to_icat(self): f2d = self.ai.getFit2D() to_icat["SAXS_beam_center_x"] = str(f2d["centerX"]) to_icat["SAXS_beam_center_y"] = str(f2d["centerY"]) - + metadata = {"scanType": "integration"} return send_icat(sample=self.sample.name, raw=os.path.dirname(os.path.dirname(os.path.abspath(self.input_file))), path=os.path.dirname(os.path.abspath(self.output_file)), - data=to_icat, - gallery=self.ispyb.gallery or os.path.join(os.path.dirname(os.path.abspath(self.output_file)), "gallery"), + data=to_icat, + gallery=self.ispyb.gallery or os.path.join(os.path.dirname(os.path.abspath(self.output_file)), "gallery"), metadata=metadata) def send_to_memcached(self): @@ -661,5 +663,5 @@ def send_to_memcached(self): for k in sorted(self.to_memcached.keys(), key=lambda i:self.to_memcached[i].nbytes): key = f"{key_base}_{k}" dico[key] = json.dumps(self.to_memcached[k], cls=NumpyEncoder) - return to_memcached(dico) + return to_memcached(dico) From dd24a14e010be1b21050c6716b74343583282489 Mon Sep 17 00:00:00 2001 From: Jerome Kieffer Date: Tue, 27 May 2025 16:24:56 +0200 Subject: [PATCH 3/7] fix relative path for default --- plugins/bm29/nexus.py | 10 ++++----- plugins/bm29/subtracte.py | 44 +++++++++++++++++++-------------------- 2 files changed, 27 insertions(+), 27 deletions(-) diff --git a/plugins/bm29/nexus.py b/plugins/bm29/nexus.py index b8d214a..411f949 100644 --- a/plugins/bm29/nexus.py +++ b/plugins/bm29/nexus.py @@ -4,7 +4,7 @@ __contact__ = "Jerome.Kieffer@ESRF.eu" __license__ = "MIT" __copyright__ = "European Synchrotron Radiation Facility, Grenoble, France" -__date__ = "21/02/2025" +__date__ = "27/05/2025" __status__ = "production" __docformat__ = 'restructuredtext' @@ -85,8 +85,8 @@ class Nexus: TODO: make it thread-safe !!! """ - def __init__(self, filename, mode=None, - creator=None, + def __init__(self, filename, mode=None, + creator=None, timeout=None, start_time=None): """ @@ -244,7 +244,7 @@ def new_entry(self, entry="entry", program_name="pyFAI", nb_entries = len(self.get_entries()) entry = "%s_%04i" % (entry, nb_entries) entry_grp = self.h5.require_group(entry) - self.h5.attrs["default"] = entry + self.h5.attrs["default"] = entry_grp.name.strip("/") entry_grp.attrs["NX_class"] = "NXentry" entry_grp["title"] = str(title) entry_grp["program_name"] = program_name @@ -269,7 +269,7 @@ def new_instrument(self, entry="entry", instrument_name="id00",): def new_class(grp, name, class_type="NXcollection"): """ create a new sub-group with type class_type - + :param grp: parent group :param name: name of the sub-group :param class_type: NeXus class name diff --git a/plugins/bm29/subtracte.py b/plugins/bm29/subtracte.py index 75246df..0431c89 100644 --- a/plugins/bm29/subtracte.py +++ b/plugins/bm29/subtracte.py @@ -3,17 +3,17 @@ """Data Analysis plugin for BM29: BioSaxs -* SubtractBuffer: Search for the equivalence of buffers, average them and subtract from sample signal. -* SaxsAnalysis: Performs Guinier + Kratky + IFT, generates plots +* SubtractBuffer: Search for the equivalence of buffers, average them and subtract from sample signal. +* SaxsAnalysis: Performs Guinier + Kratky + IFT, generates plots """ __authors__ = ["Jérôme Kieffer"] __contact__ = "Jerome.Kieffer@ESRF.eu" __license__ = "MIT" __copyright__ = "European Synchrotron Radiation Facility, Grenoble, France" -__date__ = "24/02/2025" +__date__ = "27/05/2025" __status__ = "development" -__version__ = "0.3.0" +__version__ = "0.3.0" import os import posixpath @@ -141,7 +141,7 @@ def teardown(self): self.output["Dmax"] = self.Dmax self.output["Vc"] = self.Vc self.output["mass"] = self.mass - + #teardown everything else: if self.nxs is not None: self.nxs.close() @@ -169,7 +169,7 @@ def process(self): raise(err) else: self.send_to_ispyb() - self.send_to_icat() + self.send_to_icat() self.output["memcached"] = self.send_to_memcached() @@ -209,7 +209,7 @@ def create_nexus(self): entry_grp = nxs.new_entry("entry", self.input.get("plugin_name", "dahu"), title='BioSaxs buffer subtraction', force_time=get_isotime()) - nxs.h5.attrs["default"] = entry_grp.name + nxs.h5.attrs["default"] = entry_grp.name.stip["/"] # Configuration cfg_grp = nxs.new_class(entry_grp, "configuration", "NXnote") @@ -271,7 +271,7 @@ def create_nexus(self): to_merge_ds = cormap_data.create_dataset("to_merge", data=to_merge_idx) # self.log_warning(f"to_merge: {tomerge}") to_merge_ds.attrs["long_name"] = "Index of equivalent frames" - cormap_grp.attrs["default"] = cormap_data.name + cormap_grp.attrs["default"] = posixpath.relpath(cormap_data.name, cormap_grp.name) # Process 2: Image processing: subtraction with standard deviation average_grp = nxs.new_class(entry_grp, "2_buffer_subtraction", "NXprocess") @@ -283,7 +283,7 @@ def create_nexus(self): average_data.attrs["signal"] = "intensity_normed" # Stage 2 processing - # Nota: This formula takes into account the number of input frames in each averaged buffer ! + # Nota: This formula takes into account the number of input frames in each averaged buffer ! # avg = Σdata / Σnorm # var = sigma² = ΣV / Σnorm # TODO implement those math using numexpr: @@ -310,7 +310,7 @@ def create_nexus(self): int_std_ds.attrs["interpretation"] = "image" int_std_ds.attrs["formula"] = "sqrt( sample_variance + weighted_mean(buffer_variance_i) )" int_std_ds.attrs["method"] = "quadratic sum of sample error and buffer errors" - average_grp.attrs["default"] = average_data.name + average_grp.attrs["default"] = posixpath.relpath(average_data.name, average_grp.name) key_cache = KeyCache(self.sample_juice.npt, self.sample_juice.unit, self.sample_juice.poni, self.sample_juice.mask, self.sample_juice.energy) ai = get_integrator(key_cache) @@ -346,7 +346,7 @@ def create_nexus(self): ai2_data.attrs["title"] = "%s, subtracted" % self.sample_juice.sample.name ai2_data.attrs["signal"] = "I" ai2_data.attrs["axes"] = radial_unit - ai2_grp.attrs["default"] = ai2_data.name + ai2_grp.attrs["default"] = posixpath.relpath(ai2_data.name, ai2_grp.name) cfg_grp = nxs.new_class(ai2_grp, "configuration", "NXnote") cfg_grp.create_dataset("data", data=json.dumps(ai.get_config(), indent=2, separators=(",\r\n", ": "))) cfg_grp.create_dataset("format", data="text/json") @@ -393,7 +393,7 @@ def create_nexus(self): # Finally declare the default entry and default dataset ... # overlay the BIFT fitted data on top of the scattering curve - entry_grp.attrs["default"] = ai2_data.name + entry_grp.attrs["default"] = posixpath.relpath(ai2_data.name, entry_grp.name) # Process 4: Guinier analysis guinier_grp = nxs.new_class(entry_grp, "4_Guinier_analysis", "NXprocess") @@ -527,9 +527,9 @@ def create_nexus(self): guinier_data_attrs["signal"] = "logI" guinier_data_attrs["axes"] = "q2" guinier_data_attrs["auxiliary_signals"] = "fit" - guinier_grp.attrs["default"] = guinier_data.name + guinier_grp.attrs["default"] = posixpath.relpath(guinier_data.name, guinier_grp.name) if guinier is None: - entry_grp.attrs["default"] = ai2_data.name + entry_grp.attrs["default"] = posixpath.relpath(ai2_data.name, entry_grp.name) self.log_error("No Guinier region found, data of dubious quality", do_raise=True) # Process 5: Kratky plot @@ -541,7 +541,7 @@ def create_nexus(self): kratky_data = nxs.new_class(kratky_grp, "results", "NXdata") kratky_data.attrs["SILX_style"] = NORMAL_STYLE kratky_data.attrs["title"] = "Dimensionless Kratky plots" - kratky_grp.attrs["default"] = kratky_data.name + kratky_grp.attrs["default"] = posixpath.relpath(kratky_data.name, kratky_grp.name) # Stage #5 Kratky plot generation: Rg = guinier.Rg @@ -552,9 +552,9 @@ def create_nexus(self): qRg_ds = kratky_data.create_dataset("qRg", data=xdata.astype(numpy.float32)) qRg_ds.attrs["interpretation"] = "spectrum" qRg_ds.attrs["long_name"] = "q·Rg (unit-less)" - + #Nota the "/" hereafter is chr(8725), the division sign and not the usual slash - k_ds = kratky_data.create_dataset("q2Rg2I∕I0", data=ydata.astype(numpy.float32)) + k_ds = kratky_data.create_dataset("q2Rg2I∕I0", data=ydata.astype(numpy.float32)) k_ds.attrs["interpretation"] = "spectrum" k_ds.attrs["long_name"] = "q²Rg²I(q)/I₀" ke_ds = kratky_data.create_dataset("errors", data=dy.astype(numpy.float32)) @@ -681,7 +681,7 @@ def create_nexus(self): bift_ds = ai2_data.create_dataset("BIFT", data=T.dot(stats.density_avg).astype(numpy.float32)) bift_ds.attrs["interpretation"] = "spectrum" ai2_data.attrs["auxiliary_signals"] = "BIFT" - bift_grp.attrs["default"] = bift_data.name + bift_grp.attrs["default"] = posixpath.relpath(bift_data.name, bift_grp.name) self.to_pyarch["bift"] = stats @staticmethod @@ -739,7 +739,7 @@ def send_to_ispyb(self): else: self.log_warning("Not sending to ISPyB: no valid URL %s" % self.ispyb.url) - def send_to_icat(self): + def send_to_icat(self): to_icat = copy.copy(self.to_pyarch) to_icat["experiment_type"] = "sample-changer" if self.sample_juice is None: @@ -752,8 +752,8 @@ def send_to_icat(self): return send_icat(sample=self.sample_juice.sample, raw=raw, path=os.path.dirname(os.path.abspath(self.output_file)), - data=to_icat, - gallery=self.ispyb.gallery or os.path.join(os.path.dirname(os.path.abspath(self.output_file)), "gallery"), + data=to_icat, + gallery=self.ispyb.gallery or os.path.join(os.path.dirname(os.path.abspath(self.output_file)), "gallery"), metadata=metadata) def send_to_memcached(self): @@ -764,5 +764,5 @@ def send_to_memcached(self): key = key_base + "_" + k dico[key] = json.dumps(self.to_memcached[k], cls=NumpyEncoder) - return to_memcached(dico) + return to_memcached(dico) From e660028c50162cd0dc22e30397700a6e8512c878 Mon Sep 17 00:00:00 2001 From: Jerome Kieffer Date: Tue, 27 May 2025 16:27:43 +0200 Subject: [PATCH 4/7] fix path for ID02 as well --- plugins/id02/nexus.py | 10 +++++----- plugins/id02/single_detector.py | 6 +++--- plugins/id02/xpcs.py | 15 ++++++++------- 3 files changed, 16 insertions(+), 15 deletions(-) diff --git a/plugins/id02/nexus.py b/plugins/id02/nexus.py index 3978bf3..a27296f 100644 --- a/plugins/id02/nexus.py +++ b/plugins/id02/nexus.py @@ -4,7 +4,7 @@ __contact__ = "Jerome.Kieffer@ESRF.eu" __license__ = "MIT" __copyright__ = "European Synchrotron Radiation Facility, Grenoble, France" -__date__ = "20/02/2025" +__date__ = "27/05/2025" __status__ = "production" __docformat__ = 'restructuredtext' @@ -86,8 +86,8 @@ class Nexus: TODO: make it thread-safe !!! """ - def __init__(self, filename, mode=None, - creator=None, + def __init__(self, filename, mode=None, + creator=None, timeout=None, start_time=None): """ @@ -233,7 +233,7 @@ def new_entry(self, entry="entry", program_name="pyFAI", nb_entries = len(self.get_entries()) entry = "%s_%04i" % (entry, nb_entries) entry_grp = self.h5.require_group(entry) - self.h5.attrs["default"] = entry + self.h5.attrs["default"] = entry.strip("/") entry_grp.attrs["NX_class"] = "NXentry" entry_grp["title"] = str(title) entry_grp["program_name"] = program_name @@ -258,7 +258,7 @@ def new_instrument(self, entry="entry", instrument_name="id00",): def new_class(grp, name, class_type="NXcollection"): """ create a new sub-group with type class_type - + :param grp: parent group :param name: name of the sub-group :param class_type: NeXus class name diff --git a/plugins/id02/single_detector.py b/plugins/id02/single_detector.py index 33dcb1f..0c87450 100644 --- a/plugins/id02/single_detector.py +++ b/plugins/id02/single_detector.py @@ -8,7 +8,7 @@ __contact__ = "Jerome.Kieffer@ESRF.eu" __license__ = "MIT" __copyright__ = "European Synchrotron Radiation Facility, Grenoble, France" -__date__ = "20/02/2025" +__date__ = "27/05/2025" __status__ = "development" __version__ = "0.9.3" @@ -683,7 +683,7 @@ def create_hdf5(self): nxprocess["date"] = isotime nxprocess["processing_type"] = ensure_str(ext) nxdata = nxs.new_class(nxprocess, "result_" + ext, class_type="NXdata") - entry.attrs["default"] = nxdata.name + entry.attrs["default"] = posixpath.relpath(nxdata.name, entry.name) metadata_grp = nxprocess.require_group("parameters") for key, val in self.metadata.items(): @@ -870,7 +870,7 @@ def grpdeepcopy(name, obj): **compression) nxdata.attrs["signal"] = "data" # output_ds.attrs["signal"] = "1" - entry.attrs["default"] = nxdata.name + entry.attrs["default"] = posixpath.relpath(nxdata.name, entry.name) if self.variance_formula is not None: error_ds = nxdata.create_dataset("data_errors", shape, dtype=numpy.float32, diff --git a/plugins/id02/xpcs.py b/plugins/id02/xpcs.py index 2fcbed3..f9ed98b 100644 --- a/plugins/id02/xpcs.py +++ b/plugins/id02/xpcs.py @@ -1,5 +1,5 @@ """ -X-ray photon correlation spectroscopy plugin for ID02 +X-ray photon correlation spectroscopy plugin for ID02 """ @@ -7,7 +7,7 @@ __contact__ = "Jerome.Kieffer@ESRF.eu" __license__ = "MIT" __copyright__ = "European Synchrotron Radiation Facility, Grenoble, France" -__date__ = "20/02/2025" +__date__ = "27/05/2025" __status__ = "development" __version__ = "0.1.1" @@ -41,7 +41,7 @@ class XPCS(Plugin): - """This plugin does pixel correlation for XPCS and averages the signal from various bins provided in the qmask. + """This plugin does pixel correlation for XPCS and averages the signal from various bins provided in the qmask. Minimalistic example: { @@ -70,13 +70,13 @@ class XPCS(Plugin): "q_mask": "qmask.npy", "beamstop_mask": "mask.npy" , "directbeam_x": 104, #pixel - "directbeam_y": 157, #pixel + "directbeam_y": 157, #pixel }, "correlator":{ "name": "MatMulCorrelator", "dtype": "uint8" } - + } """ @@ -251,7 +251,7 @@ def save_results(self, result): program_name=self.input.get("plugin_name", "dahu"), title="XPCS experiment", force_time=self.start_time) - nxs.h5.attrs["default"] = entry_grp.name + nxs.h5.attrs["default"] = entry_grp.name.strip("/") # Sample description, provided by the input sample_grp = nxs.new_class(entry_grp, "sample", "NXsample") @@ -310,7 +310,8 @@ def save_results(self, result): qmask_ds.attrs["interpretation"] = "image" qmask_ds.attrs["long_name"] = "mask with bins averaged (0=masked-out)" - entry_grp.attrs["default"] = xpcs_grp.attrs["default"] = xpcs_data.name + entry_grp.attrs["default"] = posixpath.relpath(xpcs_data.name, entry_grp.name) + xpcs_grp.attrs["default"] = posixpath.relpath(xpcs_data.name, xpcs_grp.name) result_ds = xpcs_data.create_dataset("g2", data=result.res, chunks=True, **COMPRESSION) result_ds.attrs["interpretation"] = "spectrum" errors_ds = xpcs_data.create_dataset("errors", data=result.dev, chunks=True, **COMPRESSION) From 5c5fc53d3281b36705700820c417af9b7487a6da Mon Sep 17 00:00:00 2001 From: Jerome Kieffer Date: Tue, 27 May 2025 16:30:45 +0200 Subject: [PATCH 5/7] missing import --- plugins/id02/xpcs.py | 1 + 1 file changed, 1 insertion(+) diff --git a/plugins/id02/xpcs.py b/plugins/id02/xpcs.py index f9ed98b..5979500 100644 --- a/plugins/id02/xpcs.py +++ b/plugins/id02/xpcs.py @@ -13,6 +13,7 @@ import os import json +import posixpath import logging logger = logging.getLogger("id02.xpcs") From 9eddd1c3ef10f7d084c7965fa7a4e5f2e2199fa3 Mon Sep 17 00:00:00 2001 From: Jerome Kieffer Date: Tue, 27 May 2025 16:44:51 +0200 Subject: [PATCH 6/7] typo --- plugins/bm29/subtracte.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/bm29/subtracte.py b/plugins/bm29/subtracte.py index 0431c89..4ea0cbe 100644 --- a/plugins/bm29/subtracte.py +++ b/plugins/bm29/subtracte.py @@ -209,7 +209,7 @@ def create_nexus(self): entry_grp = nxs.new_entry("entry", self.input.get("plugin_name", "dahu"), title='BioSaxs buffer subtraction', force_time=get_isotime()) - nxs.h5.attrs["default"] = entry_grp.name.stip["/"] + nxs.h5.attrs["default"] = entry_grp.name.strip["/"] # Configuration cfg_grp = nxs.new_class(entry_grp, "configuration", "NXnote") From 3957d2022d7906751de1d20986be901a3539f518 Mon Sep 17 00:00:00 2001 From: Jerome Kieffer Date: Wed, 4 Jun 2025 13:28:22 +0200 Subject: [PATCH 7/7] quick fix for noisy beam stop diode. --- plugins/bm29/integrate.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/plugins/bm29/integrate.py b/plugins/bm29/integrate.py index 5bb0e07..c447015 100644 --- a/plugins/bm29/integrate.py +++ b/plugins/bm29/integrate.py @@ -11,7 +11,7 @@ __contact__ = "Jerome.Kieffer@ESRF.eu" __license__ = "MIT" __copyright__ = "European Synchrotron Radiation Facility, Grenoble, France" -__date__ = "27/05/2025" +__date__ = "04/06/2025" __status__ = "development" __version__ = "0.3.0" @@ -80,6 +80,7 @@ class IntegrateMultiframe(Plugin): "fidelity_rel": 1e-3, "hplc_mode": 0, "timeout": 10, + "average_out_monitor_values": False, # use this to work around noisy beam stop diode reading. "sample": { "name": "bsa", "description": "protein description like Bovine Serum Albumin", @@ -186,6 +187,8 @@ def setup(self, kwargs=None): else: self.energy = numpy.float32(self.energy) # It is important to fix the datatype of the energy self.monitor_values = numpy.array(self.input.get("monitor_values", 1), dtype=numpy.float64) + if self.input.get("average_out_monitor_values"): + self.monitor_values = numpy.zeros_like(self.monitor_values) + self.monitor_values.mean() self.normalization_factor = float(self.input.get("normalization_factor", 1)) self.scale_factor = float(self.input.get("exposure_time", 1)) / self.normalization_factor