From 4d65341f0321af1bb804365243079948e68aa8fb Mon Sep 17 00:00:00 2001 From: Jerome Kieffer Date: Fri, 21 Feb 2025 14:00:30 +0100 Subject: [PATCH 01/18] there will be 2 memcached servers --- plugins/bm29/integrate.py | 28 +++++++++------------------- plugins/bm29/memcached.py | 33 +++++++++++++++++++++++++++++++++ plugins/bm29/meson.build | 3 ++- plugins/bm29/subtracte.py | 31 +++++++++++-------------------- 4 files changed, 55 insertions(+), 40 deletions(-) create mode 100644 plugins/bm29/memcached.py diff --git a/plugins/bm29/integrate.py b/plugins/bm29/integrate.py index 12a06de..d551dc9 100644 --- a/plugins/bm29/integrate.py +++ b/plugins/bm29/integrate.py @@ -11,7 +11,7 @@ __contact__ = "Jerome.Kieffer@ESRF.eu" __license__ = "MIT" __copyright__ = "European Synchrotron Radiation Facility, Grenoble, France" -__date__ = "03/12/2024" +__date__ = "21/02/2025" __status__ = "development" __version__ = "0.3.0" @@ -36,6 +36,7 @@ method, polarization_factor, Nexus, get_isotime, SAXS_STYLE, NORMAL_STYLE, \ create_nexus_sample from .ispyb import IspybConnector, NumpyEncoder +from .memcached import to_memcached logger = logging.getLogger("bm29.integrate") @@ -45,12 +46,6 @@ logger.error("Numexpr is not installed, falling back on numpy's implementations") numexpr = None -try: - import memcache -except (ImportError, ModuleNotFoundError): - memcache = None - - IntegrationResult = namedtuple("IntegrationResult", "radial intensity sigma") CormapResult = namedtuple("CormapResult", "probability count tomerge") AverageResult = namedtuple("AverageResult", "average deviation normalization") @@ -651,15 +646,10 @@ def send_to_ispyb(self): def send_to_memcached(self): "Send the content of self.to_memcached to the storage" - keys = {} - rc = {} - if memcache is not None: - mc = memcache.Client([('stanza', 11211)]) - key_base = self.output_file - for k in sorted(self.to_memcached.keys(), key=lambda i:self.to_memcached[i].nbytes): - key = key_base + "_" + k - keys[k] = key - value = json.dumps(self.to_memcached[k], cls=NumpyEncoder) - rc[k] = mc.set(key, value) - self.log_warning(f"Return codes for memcached {rc}") - return keys + dico={} + key_base = self.output_file + for k in sorted(self.to_memcached.keys(), key=lambda i:self.to_memcached[i].nbytes): + key = f"{key_base}_{k}" + dico[key] = json.dumps(self.to_memcached[k], cls=NumpyEncoder) + return to_memcached(dico) + diff --git a/plugins/bm29/memcached.py b/plugins/bm29/memcached.py new file mode 100644 index 0000000..252bc58 --- /dev/null +++ b/plugins/bm29/memcached.py @@ -0,0 +1,33 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +"""Data Analysis plugin for BM29: BioSaxs + +Connection to Memcached +""" + +__authors__ = ["Jérôme Kieffer"] +__contact__ = "Jerome.Kieffer@ESRF.eu" +__license__ = "MIT" +__copyright__ = "European Synchrotron Radiation Facility, Grenoble, France" +__date__ = "21/02/2025" +__status__ = "development" +__version__ = "0.3.0" + +import socket +try: + import memcache +except (ImportError, ModuleNotFoundError): + memcache = None + +SERVER = "localhost" + + +def to_memcached(dico): + rc = {} + if memcache is not None: + mc = memcache.Client([(SERVER, 11211)]) + rc["server"] = socket.getfqdn()+":11211" + for k, v in dico.items: + rc[k] = mc.set(k, v) + return rc diff --git a/plugins/bm29/meson.build b/plugins/bm29/meson.build index dc1832a..8c84090 100644 --- a/plugins/bm29/meson.build +++ b/plugins/bm29/meson.build @@ -5,7 +5,8 @@ py.install_sources( [ 'integrate.py', 'ispyb.py', 'nexus.py', - 'subtracte.py' + 'subtracte.py', + 'memcached.py', ], pure: false, # Will be installed next to binaries subdir: 'dahu/plugins/bm29' # Folder relative to site-packages to install to diff --git a/plugins/bm29/subtracte.py b/plugins/bm29/subtracte.py index 83122f0..efaaadd 100644 --- a/plugins/bm29/subtracte.py +++ b/plugins/bm29/subtracte.py @@ -11,9 +11,9 @@ __contact__ = "Jerome.Kieffer@ESRF.eu" __license__ = "MIT" __copyright__ = "European Synchrotron Radiation Facility, Grenoble, France" -__date__ = "03/12/2024" +__date__ = "21/02/2025" __status__ = "development" -__version__ = "0.2.1" +__version__ = "0.3.0" import os import json @@ -42,11 +42,7 @@ polarization_factor, method, Nexus, get_isotime, SAXS_STYLE, NORMAL_STYLE, \ Sample, create_nexus_sample from .ispyb import IspybConnector, NumpyEncoder - -try: - import memcache -except (ImportError, ModuleNotFoundError): - memcache = None +from .memcached import to_memcached NexusJuice = namedtuple("NexusJuice", "filename h5path npt unit q I sigma poni mask energy polarization method signal2d error2d normalization sample") @@ -736,7 +732,7 @@ def send_to_ispyb(self): if self.ispyb.url and parse_url(self.ispyb.url).host: ispyb = IspybConnector(*self.ispyb) ispyb.send_subtracted(self.to_pyarch) - self.to_pyarch["experiment_type"]="sampleChanger" + self.to_pyarch["experiment_type"] = "sampleChanger" self.to_pyarch["sample"] = self.sample_juice.sample ispyb.send_icat(data=self.to_pyarch) else: @@ -745,16 +741,11 @@ def send_to_ispyb(self): def send_to_memcached(self): "Send the content of self.to_memcached to the storage" - keys = {} - rc = {} - if memcache is not None: - mc = memcache.Client([('stanza', 11211)]) - key_base = self.output_file - for k in sorted(self.to_memcached.keys(), key=lambda i:self.to_memcached[i].nbytes): - key = key_base + "_" + k - keys[k] = key - value = json.dumps(self.to_memcached[k], cls=NumpyEncoder) - rc[k] = mc.set(key, value) - self.log_warning(f"Return codes for memcached {rc}") - return keys + dico = {} + key_base = self.output_file + for k in sorted(self.to_memcached.keys(), key=lambda i:self.to_memcached[i].nbytes): + key = key_base + "_" + k + dico[key] = json.dumps(self.to_memcached[k], cls=NumpyEncoder) + + return to_memcached(dico) From 3f75d3f71c9d8fd69de65ca7b26a20e9830abea3 Mon Sep 17 00:00:00 2001 From: Jerome Kieffer Date: Fri, 21 Feb 2025 15:38:01 +0100 Subject: [PATCH 02/18] Declare to the icat catalog separately from ispyb --- plugins/bm29/hplc.py | 32 +++++++-- plugins/bm29/icat.py | 138 ++++++++++++++++++++++++++++++++++++++ plugins/bm29/integrate.py | 58 +++++++++------- plugins/bm29/ispyb.py | 4 +- plugins/bm29/meson.build | 1 + plugins/bm29/subtracte.py | 17 ++++- 6 files changed, 214 insertions(+), 36 deletions(-) create mode 100644 plugins/bm29/icat.py diff --git a/plugins/bm29/hplc.py b/plugins/bm29/hplc.py index 3366349..4d545ef 100644 --- a/plugins/bm29/hplc.py +++ b/plugins/bm29/hplc.py @@ -10,9 +10,9 @@ __contact__ = "Jerome.Kieffer@ESRF.eu" __license__ = "MIT" __copyright__ = "European Synchrotron Radiation Facility, Grenoble, France" -__date__ = "03/12/2024" +__date__ = "21/02/2025" __status__ = "development" -__version__ = "0.2.0" +__version__ = "0.3.0" import time import os @@ -20,6 +20,7 @@ import math from math import log, pi import posixpath +import copy from collections import namedtuple from urllib3.util import parse_url from dahu.plugin import Plugin @@ -42,6 +43,8 @@ polarization_factor, method, Nexus, get_isotime, SAXS_STYLE, NORMAL_STYLE, \ Sample, create_nexus_sample from .ispyb import IspybConnector +from .icat import send_icat + NexusJuice = namedtuple("NexusJuice", "filename h5path npt unit idx Isum q I sigma poni mask energy polarization method sample timestamps") @@ -933,10 +936,25 @@ def send_to_ispyb(self): if self.ispyb and self.ispyb.url and parse_url(self.ispyb.url).host: ispyb = IspybConnector(*self.ispyb) ispyb.send_hplc(self.to_pyarch) - self.to_pyarch["experiment_type"]="hplc" - if "volume" in self.to_pyarch: - self.to_pyarch.pop("volume") - self.to_pyarch["sample"] = self.juices[0].sample - ispyb.send_icat(data=self.to_pyarch) else: self.log_warning(f"Not sending to ISPyB: no valid URL in {self.ispyb}") + + + + self.to_pyarch["sample"] = self.juices[0].sample + ispyb.send_icat(data=self.to_pyarch) + + + def send_to_icat(self): + to_icat = copy.copy(self.to_pyarch) + to_icat["experiment_type"] = "hplc" + to_icat["sample"] = self.juices[0].sample + if "volume" in to_icat: + to_icat.pop("volume") + metadata = {"scanType": "hplc"} + return send_icat(sample=self.juices[0].sample, + raw=os.path.dirname(os.path.abspath(self.input_files[0])), + path=os.path.dirname(os.path.abspath(self.output_file)), + data=to_icat, + gallery=self.ispyb.gallery or os.path.join(os.path.dirname(os.path.abspath(self.output_file)), "gallery"), + metadata=metadata) diff --git a/plugins/bm29/icat.py b/plugins/bm29/icat.py new file mode 100644 index 0000000..27eed15 --- /dev/null +++ b/plugins/bm29/icat.py @@ -0,0 +1,138 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +"""Data Analysis plugin for BM29: BioSaxs + +Everything to send data to iCat, the data catalogue + +""" + +__authors__ = ["Jérôme Kieffer"] +__contact__ = "Jerome.Kieffer@ESRF.eu" +__license__ = "MIT" +__copyright__ = "European Synchrotron Radiation Facility, Grenoble, France" +__date__ = "21/02/2025" +__status__ = "development" +version = "0.3.0" + + +import os +import tempfile +import logging +logger = logging.getLogger(__name__) +try: + from pyicat_plus.client.main import IcatClient +except ImportError: + logger.error("iCat connection will no work") + IcatClient = None + +def _ensure_gallery(gallery): + if gallery: + gallery = os.path.abspath(gallery) + if not os.path.isdir(gallery): + try: + os.makedirs(gallery) + except Exception as err: + logger.warning(f"Unable to create dir {gallery}. {type(err)}: {err}") + else: + logger.error("No `gallery` destination provided ... things will go wrong") + gallery = tempfile.gettempdir() + return gallery + + +def send_icat(proposal=None, beamline=None, sample=None, dataset=None, path=None, raw=None, data=None, gallery=None, metadata=None): + """Send some data to icat, the data-catalogue + + :param proposal: mx1324 + :param beamline: name of the beamline + :param sample: sample name as registered in icat + :param dataset: name given by BLISS + :param path: directory name where processed data are staying + :param raw: directory name of the raw data (not the processed ones) + :param data: dict with all data sent to iCat + :param gallery: path with the gallery directory + :param metadata: dict with additionnal metadata (could be overwritten by this function + :return: data sent to icat as a dict + """ + gallery = _ensure_gallery(gallery) + tmp = gallery.strip("/").split("/") + idx_process = [i for i,j in enumerate(tmp) if j.lower().startswith("process")][-1] + if tmp[idx_process] == "processed": + assert idx_process>=6 + if proposal is None: + proposal = tmp[idx_process-6] + if beamline is None: + beamline = tmp[idx_process-5] + if sample is None: + sample = tmp[idx_process-2] + if dataset is None: + dataset = tmp[idx_process+1] + if path is None: + path = os.path.dirname(gallery) + if raw is None: + raw = os.path.abspath(gallery[:gallery.lower().index("process")]) + elif tmp[idx_process] == "PROCESSED_DATA": + if proposal is None: + proposal = tmp[idx_process-3] + if beamline is None: + beamline = tmp[idx_process-2] + if sample is None: + sample = tmp[idx_process+1] + if dataset is None: + dataset = tmp[idx_process+2] + if path is None: + path = os.path.dirname(gallery) + if raw is None: + raw = os.path.dirname(os.path.dirname(os.path.abspath(gallery.replace("PROCESSED_DATA", "RAW_DATA")))) + else: + logger.error("Unrecognized path layout") + + if metadata is None: + metadata = {} + metadata["definition"] = "SAXS", + metadata["Sample_name"] = sample + + for k,v in data.items(): + if isinstance(k, str) and k.startswith("SAXS_"): + metadata[k] = v + sample = data.get("sample") + if sample: + metadata["SAXS_concentration"] = str(sample.concentration) + metadata["SAXS_code"] = sample.name + metadata["SAXS_comments"] = sample.description + metadata["SAXS_storage_temperature"] = str(sample.temperature_env) + metadata["SAXS_exposure_temperature"] = str(sample.temperature) + if sample.hplc: + metadata["SAXS_column_type"] = sample.hplc + #"buffer": "description of buffer, pH, ...", + + guinier = data.get("guinier") + if guinier: + metadata["SAXS_guinier_rg"] = f"{guinier.Rg:.1f}±{guinier.sigma_Rg:.1f}" + metadata["SAXS_guinier_points"] = f"{guinier.start_point}-{guinier.end_point}" + metadata["SAXS_guinier_i0"] = f"{guinier.I0:.1f}±{guinier.sigma_I0:.1f}" + + bift = data.get("bift") + if bift: + metadata["SAXS_rg"] = f"{bift.Rg_avg:.1f}±{bift.Rg_std:.1f}" + metadata["SAXS_d_max"] = f"{bift.Dmax_avg:.1f}±{bift.Dmax_std:.1f}" + + tomerge = data.get("merged") + if tomerge: + metadata["SAXS_frames_averaged"] = f"{tomerge[0]}-{tomerge[1]}" + + volume = data.get("volume") + if volume: + metadata["SAXS_porod_volume"] = str(volume) + #Other metadata one may collect ... + metadata["SAXS_experiment_type"]= data.get("experiment_type", "UNKNOWN") + metadata["datasetName"] = dataset + icat_client = IcatClient(metadata_urls=["bcu-mq-01.esrf.fr:61613", "bcu-mq-02.esrf.fr:61613"]) + kwargs = {"beamline":beamline, + "proposal":proposal, + "dataset":dataset, + "path":path, + "metadata":metadata, + "raw":[raw]} + icat_client.store_processed_data(**kwargs) + return kwargs diff --git a/plugins/bm29/integrate.py b/plugins/bm29/integrate.py index d551dc9..3fafcf1 100644 --- a/plugins/bm29/integrate.py +++ b/plugins/bm29/integrate.py @@ -19,6 +19,7 @@ import time import json import logging +import copy from collections import namedtuple from urllib3.util import parse_url from dahu.plugin import Plugin @@ -230,6 +231,7 @@ def process(self): self.create_nexus() self.output["memcached"] = self.send_to_memcached() self.send_to_ispyb() + self.output["icat"] = self.send_to_icat() def wait_file(self, filename, timeout=None): """Wait for a file to appear on a filesystem @@ -617,32 +619,38 @@ def process3_average(self, tomerge): return AverageResult(intensity_avg, intensity_std, sum_norm) def send_to_ispyb(self): - if self.ispyb.url and parse_url(self.ispyb.url).host: - ispyb = IspybConnector(*self.ispyb) - if self.input.get("hplc_mode"): - self.to_pyarch["experiment_type"]="hplc" - else: + if self.input.get("hplc_mode") == 0: + if self.ispyb.url and parse_url(self.ispyb.url).host: + ispyb = IspybConnector(*self.ispyb) ispyb.send_averaged(self.to_pyarch) - self.to_pyarch["experiment_type"]="sample-changer" - #Some more metadata for iCat, as strings: - self.to_pyarch["sample"] = self.sample - self.to_pyarch["SAXS_maskFile"] = self.mask - self.to_pyarch["SAXS_waveLength"] = str(self.ai.wavelength) - self.to_pyarch["SAXS_normalisation"] = str(self.normalization_factor) - self.to_pyarch["SAXS_diode_currents"] = str(self.monitor_values) - self.to_pyarch["SAXS_numberFrames"] = str(self.nb_frames) - self.to_pyarch["SAXS_timePerFrame"] = self.input.get("exposure_time", "?") - self.to_pyarch["SAXS_detector_distance"] = str(self.ai.dist) - self.to_pyarch["SAXS_pixelSizeX"] = str(self.ai.detector.pixel2) - self.to_pyarch["SAXS_pixelSizeY"] = str(self.ai.detector.pixel1) - f2d = self.ai.getFit2D() - self.to_pyarch["SAXS_beam_center_x"] = str(f2d["centerX"]) - self.to_pyarch["SAXS_beam_center_y"] = str(f2d["centerY"]) - - icat = ispyb.send_icat(data=self.to_pyarch) - #self.log_warning("Sent to icat: " + str(icat)) - else: - self.log_warning("Not sending to ISPyB: no valid URL %s" % self.ispyb.url) + else: + self.log_warning(f"Not sending to ISPyB: no valid URL {self.ispyb.url}") + + def send_to_icat(self): + #Some more metadata for iCat, as strings: + to_icat = copy.copy(self.to_pyarch) + to_icat["experiment_type"] = "hplc" if self.input.get("hplc_mode") else "sample-changer" + to_icat["sample"] = self.sample.name + to_icat["SAXS_maskFile"] = self.mask + to_icat["SAXS_waveLength"] = str(self.ai.wavelength) + to_icat["SAXS_normalisation"] = str(self.normalization_factor) + to_icat["SAXS_diode_currents"] = str(self.monitor_values) + to_icat["SAXS_numberFrames"] = str(self.nb_frames) + to_icat["SAXS_timePerFrame"] = self.input.get("exposure_time", "?") + to_icat["SAXS_detector_distance"] = str(self.ai.dist) + to_icat["SAXS_pixelSizeX"] = str(self.ai.detector.pixel2) + to_icat["SAXS_pixelSizeY"] = str(self.ai.detector.pixel1) + f2d = self.ai.getFit2D() + to_icat["SAXS_beam_center_x"] = str(f2d["centerX"]) + to_icat["SAXS_beam_center_y"] = str(f2d["centerY"]) + + metadata = {"scanType": "integration"} + return send_icat(sample=self.sample.name, + raw=os.path.dirname(os.path.abspath(self.input_file)), + path=os.path.dirname(os.path.abspath(self.output_file)), + data=to_icat, + gallery=self.ispyb.gallery or os.path.join(os.path.dirname(os.path.abspath(self.output_file)), "gallery"), + metadata=metadata) def send_to_memcached(self): "Send the content of self.to_memcached to the storage" diff --git a/plugins/bm29/ispyb.py b/plugins/bm29/ispyb.py index cbd4de4..d35b898 100644 --- a/plugins/bm29/ispyb.py +++ b/plugins/bm29/ispyb.py @@ -11,7 +11,7 @@ __contact__ = "Jerome.Kieffer@ESRF.eu" __license__ = "MIT" __copyright__ = "European Synchrotron Radiation Facility, Grenoble, France" -__date__ = "10/12/2024" +__date__ = "21/02/2025" __status__ = "development" version = "0.2.3" @@ -98,6 +98,8 @@ def __repr__(self): def send_icat(self, proposal=None, beamline=None, sample=None, dataset=None, path=None, raw=None, data=None): """ + DEPRECATED CODE ! + :param proposal: mx1324 :param beamline: name of the beamline :param sample: sample name as registered in icat diff --git a/plugins/bm29/meson.build b/plugins/bm29/meson.build index 8c84090..3c08e41 100644 --- a/plugins/bm29/meson.build +++ b/plugins/bm29/meson.build @@ -7,6 +7,7 @@ py.install_sources( [ 'nexus.py', 'subtracte.py', 'memcached.py', + 'icat.py' ], pure: false, # Will be installed next to binaries subdir: 'dahu/plugins/bm29' # Folder relative to site-packages to install to diff --git a/plugins/bm29/subtracte.py b/plugins/bm29/subtracte.py index efaaadd..7226771 100644 --- a/plugins/bm29/subtracte.py +++ b/plugins/bm29/subtracte.py @@ -17,6 +17,7 @@ import os import json +import copy from math import log, pi from collections import namedtuple from urllib3.util import parse_url @@ -43,6 +44,7 @@ Sample, create_nexus_sample from .ispyb import IspybConnector, NumpyEncoder from .memcached import to_memcached +from .icat import send_icat NexusJuice = namedtuple("NexusJuice", "filename h5path npt unit q I sigma poni mask energy polarization method signal2d error2d normalization sample") @@ -139,6 +141,7 @@ def teardown(self): self.output["Vc"] = self.Vc self.output["mass"] = self.mass self.output["memcached"] = self.send_to_memcached() + self.output["icat"] = self.send_to_icat() #teardown everything else: if self.nxs is not None: self.nxs.close() @@ -732,12 +735,20 @@ def send_to_ispyb(self): if self.ispyb.url and parse_url(self.ispyb.url).host: ispyb = IspybConnector(*self.ispyb) ispyb.send_subtracted(self.to_pyarch) - self.to_pyarch["experiment_type"] = "sampleChanger" - self.to_pyarch["sample"] = self.sample_juice.sample - ispyb.send_icat(data=self.to_pyarch) else: self.log_warning("Not sending to ISPyB: no valid URL %s" % self.ispyb.url) + def send_to_icat(self): + to_icat = copy.copy(self.to_pyarch) + to_icat["experiment_type"] = "sample-changer" + to_icat["sample"] = self.sample_juice.sample + metadata = {"scanType": "subtraction"} + return send_icat(sample=self.sample_juice.sample, + raw=os.path.dirname(os.path.abspath(self.sample_file)), + path=os.path.dirname(os.path.abspath(self.output_file)), + data=to_icat, + gallery=self.ispyb.gallery or os.path.join(os.path.dirname(os.path.abspath(self.output_file)), "gallery"), + metadata=metadata) def send_to_memcached(self): "Send the content of self.to_memcached to the storage" From 4bfe5f0670a2d323ecc274f3281fa9e3af2baf19 Mon Sep 17 00:00:00 2001 From: Jerome Kieffer Date: Fri, 21 Feb 2025 16:43:32 +0100 Subject: [PATCH 03/18] Include the export of HPLC data as CSV file --- plugins/bm29/hplc.py | 33 ++++++++++++++++++++++++--------- 1 file changed, 24 insertions(+), 9 deletions(-) diff --git a/plugins/bm29/hplc.py b/plugins/bm29/hplc.py index 4d545ef..73c398e 100644 --- a/plugins/bm29/hplc.py +++ b/plugins/bm29/hplc.py @@ -132,6 +132,7 @@ def build_background(I, std=None, keep=0.3): return bg_avg, bg_std, to_keep + class HPLC(Plugin): """ Rebuild the complete chromatogram and perform basic analysis on it. @@ -213,6 +214,7 @@ def process(self): self.to_pyarch["sample_name"] = self.juices[0].sample.name if not self.input.get("no_ispyb"): self.send_to_ispyb() + self.output["icat"] = self.send_to_icat() def teardown(self): Plugin.teardown(self) @@ -939,22 +941,35 @@ def send_to_ispyb(self): else: self.log_warning(f"Not sending to ISPyB: no valid URL in {self.ispyb}") - - - self.to_pyarch["sample"] = self.juices[0].sample - ispyb.send_icat(data=self.to_pyarch) - - def send_to_icat(self): to_icat = copy.copy(self.to_pyarch) to_icat["experiment_type"] = "hplc" to_icat["sample"] = self.juices[0].sample - if "volume" in to_icat: - to_icat.pop("volume") + # if "volume" in to_icat: + # to_icat.pop("volume") metadata = {"scanType": "hplc"} + gallery=self.ispyb.gallery or os.path.join(os.path.dirname(os.path.abspath(self.output_file)), "gallery") + self.save_csv(os.path.join(gallery, "chromatogram.csv"), to_icat.get("sum_I"), to_icat.get("Rg")) return send_icat(sample=self.juices[0].sample, raw=os.path.dirname(os.path.abspath(self.input_files[0])), path=os.path.dirname(os.path.abspath(self.output_file)), data=to_icat, - gallery=self.ispyb.gallery or os.path.join(os.path.dirname(os.path.abspath(self.output_file)), "gallery"), + gallery=gallery, metadata=metadata) + + def save_csv(self, filename, sum_I, Rg): + dirname = os.path.dirname(filename) + if not os.path.isdir(dirname): + os.makedirs(dirname, exist_ok=True) + lines = ["id,ΣI,Rg"] + idx = 0 + for I,rg in zip(sum_I, Rg): + lines.append(f"{idx},{I},{rg}") + idx+=1 + lines.append("") + with open(filename, "w") as csv: + csv.write(os.linesep.join(lines)) + + + + \ No newline at end of file From a791ff5ea8a2b924e1e1f77008653aa9d8f4783f Mon Sep 17 00:00:00 2001 From: Jerome Kieffer Date: Fri, 21 Feb 2025 16:45:14 +0100 Subject: [PATCH 04/18] update version --- version.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/version.py b/version.py index 62934b4..b6513f0 100755 --- a/version.py +++ b/version.py @@ -48,7 +48,7 @@ __contact__ = "Jerome.Kieffer@ESRF.eu" __license__ = "MIT" __copyright__ = "European Synchrotron Radiation Facility, Grenoble, France" -__date__ = "22/02/2024" +__date__ = "21/02/2025" __status__ = "producton" __docformat__ = 'restructuredtext' RELEASE_LEVEL_VALUE = {"dev": 0, @@ -60,8 +60,8 @@ MAJOR = 2025 MINOR = 2 -MICRO = 0 -RELEV = "final" # <16 +MICRO = 1 +RELEV = "dev" # <16 SERIAL = 0 # <16 date = __date__ From 5157e1e5f0a86ad402eadd5ca7f386f367b26d52 Mon Sep 17 00:00:00 2001 From: Jerome Kieffer Date: Fri, 21 Feb 2025 17:15:10 +0100 Subject: [PATCH 05/18] Fix a couple of bugs spotted on the beamline --- plugins/bm29/hplc.py | 4 ++-- plugins/bm29/nexus.py | 35 +++++++++++++++++++++++------------ 2 files changed, 25 insertions(+), 14 deletions(-) diff --git a/plugins/bm29/hplc.py b/plugins/bm29/hplc.py index 73c398e..25a80bb 100644 --- a/plugins/bm29/hplc.py +++ b/plugins/bm29/hplc.py @@ -945,8 +945,8 @@ def send_to_icat(self): to_icat = copy.copy(self.to_pyarch) to_icat["experiment_type"] = "hplc" to_icat["sample"] = self.juices[0].sample - # if "volume" in to_icat: - # to_icat.pop("volume") + if "volume" in to_icat: + to_icat.pop("volume") metadata = {"scanType": "hplc"} gallery=self.ispyb.gallery or os.path.join(os.path.dirname(os.path.abspath(self.output_file)), "gallery") self.save_csv(os.path.join(gallery, "chromatogram.csv"), to_icat.get("sum_I"), to_icat.get("Rg")) diff --git a/plugins/bm29/nexus.py b/plugins/bm29/nexus.py index 3978bf3..b8d214a 100644 --- a/plugins/bm29/nexus.py +++ b/plugins/bm29/nexus.py @@ -4,7 +4,7 @@ __contact__ = "Jerome.Kieffer@ESRF.eu" __license__ = "MIT" __copyright__ = "European Synchrotron Radiation Facility, Grenoble, France" -__date__ = "20/02/2025" +__date__ = "21/02/2025" __status__ = "production" __docformat__ = 'restructuredtext' @@ -13,7 +13,6 @@ import time import logging import h5py -import atexit logger = logging.getLogger(__name__) @@ -137,7 +136,6 @@ def __init__(self, filename, mode=None, self.file_handle = None self.h5 = h5py.File(self.filename, mode=self.mode) self.to_close = [] - atexit.register(self.close) if not pre_existing: self.h5.attrs["NX_class"] = "NXroot" self.h5.attrs["file_time"] = get_isotime(start_time) @@ -145,18 +143,31 @@ def __init__(self, filename, mode=None, self.h5.attrs["HDF5_Version"] = h5py.version.hdf5_version self.h5.attrs["creator"] = creator or self.__class__.__name__ + def __del__(self): + self.close() + def close(self, end_time=None): """ - close the filename and update all entries + Close the file and update all entries. """ - if self.mode != "r": - end_time = get_isotime(end_time) - for entry in self.to_close: - entry["end_time"] = end_time - self.h5.attrs["file_update_time"] = get_isotime() - self.h5.close() - if self.file_handle: - self.file_handle.close() + try: + if self.mode != "r": + if self.h5: + end_time = get_isotime(end_time) + while self.to_close: + entry = self.to_close.pop() + entry["end_time"] = end_time + self.h5.attrs["file_update_time"] = get_isotime() + except Exception as error: + sys.stderr.write(f"{type(error)}: {error},\nwhile finalizing Nexus file\n") + + try: + if self.h5: + self.h5.close() + if self.file_handle: + self.file_handle.close() + except Exception as error: + sys.stderr.write(f"Error closing file: {error}\n") # Context manager for "with" statement compatibility def __enter__(self, *arg, **kwarg): From c4fd0ba3f203918a6d12cab2e8008ed9be3a043d Mon Sep 17 00:00:00 2001 From: Jerome Kieffer Date: Fri, 21 Feb 2025 17:26:41 +0100 Subject: [PATCH 06/18] Disable the saving of large arrays --- plugins/bm29/hplc.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/plugins/bm29/hplc.py b/plugins/bm29/hplc.py index 25a80bb..b5bf3ea 100644 --- a/plugins/bm29/hplc.py +++ b/plugins/bm29/hplc.py @@ -214,7 +214,8 @@ def process(self): self.to_pyarch["sample_name"] = self.juices[0].sample.name if not self.input.get("no_ispyb"): self.send_to_ispyb() - self.output["icat"] = self.send_to_icat() + # self.output["icat"] = + self.send_to_icat() def teardown(self): Plugin.teardown(self) From d43211d171f7b18c8e0bcfc31f9a99a24984053b Mon Sep 17 00:00:00 2001 From: Jerome Kieffer Date: Mon, 24 Feb 2025 12:09:09 +0100 Subject: [PATCH 07/18] Debug session live on the BM29 beamline ... Move code in production, integrate + subtract work OK HPLC untested --- build-deb.sh | 4 +- debug/debug_deadlock.py | 50 -------- debug/debug_id02_filter.py | 58 ---------- debug/debug_id02_single.py | 66 ----------- debug/demo_distortion.json | 7 -- debug/issue4.json | 230 ------------------------------------- debug/test_distortion.py | 79 ------------- debug/test_tango_server.py | 85 -------------- plugins/bm29/icat.py | 21 ++-- plugins/bm29/integrate.py | 6 +- plugins/bm29/memcached.py | 2 +- plugins/bm29/subtracte.py | 6 +- 12 files changed, 23 insertions(+), 591 deletions(-) delete mode 100644 debug/debug_deadlock.py delete mode 100755 debug/debug_id02_filter.py delete mode 100644 debug/debug_id02_single.py delete mode 100644 debug/demo_distortion.json delete mode 100644 debug/issue4.json delete mode 100644 debug/test_distortion.py delete mode 100755 debug/test_tango_server.py diff --git a/build-deb.sh b/build-deb.sh index 35740b4..ca4e799 100755 --- a/build-deb.sh +++ b/build-deb.sh @@ -1,8 +1,8 @@ #!/bin/sh export PATH=$PATH:/opt/bliss/conda/venv/dahu/bin rm -rf deb_dist/ -/usr/bin/python3 -m pip wheel . -wheel2deb --output-dir deb_dist --exclude numpy* +/usr/bin/python3 -m pip wheel . --no-cache-dir +wheel2deb default --output-dir deb_dist --exclude numpy* cd deb_dist/python3-dahu*_amd64 dpkg-buildpackage -r -uc -us cd .. diff --git a/debug/debug_deadlock.py b/debug/debug_deadlock.py deleted file mode 100644 index cca5e91..0000000 --- a/debug/debug_deadlock.py +++ /dev/null @@ -1,50 +0,0 @@ -#!/usr/bin/python - -import os -import PyTango, json - -saxs = { - "npt2_rad": 1000, - "dark_filter_quantil_lower": 0.1, - "dark_filter_quantil_upper": 0.9, - "regrouping_mask_filename": "mask-05m.edf", - "npt2_azim": 360, - "job_id": 2, - "dark_filename": "/nobackup/lid02gpu12/nl16b_saxs_02229_dark.h5", - "to_save": "raw sub flat norm azim ave", - "dark_filter": "quantil", - "output_dir": "/mntdirect/_data_opid02_inhouse/com/20141216d/cor", - "image_file": "/nobackup/lid02gpu12/nl16b_saxs_02229.h5", - "npt1_rad": 1000, - "dark_filter_quantil": 0.5, - "DetectorName": "saxs", - "plugin_name": "id02.singledetector", - "flat_filename": "/data/opid02/archive/setup/spatcorr-files/saxs/flat_saxs_2x2.edf", - "distortion_filename": "/data/opid02/archive/setup/spatcorr-files/saxs/SpatCorrRayonix_2b2.dat", - "c216_filename": "/nobackup/lid02gpu12/nl16b_scalers_02229.h5"} - -waxs = { - "npt2_rad": 1000, - "dark_filter_quantil_lower": 0.1, - "dark_filter_quantil_upper": 0.9, - "regrouping_mask_filename": "mask-waxs.edf", - "npt2_azim": 360, - "job_id": 3, - "dark_filename": "/nobackup/lid02gpu11/nl16b_waxs_02229_dark.h5", - "to_save": "raw sub flat norm azim ave", - "dark_filter": "quantil", - "output_dir": "/mntdirect/_data_opid02_inhouse/com/20141216d/cor", - "image_file": "/nobackup/lid02gpu11/nl16b_waxs_02229.h5", - "npt1_rad": 1000, - "dark_filter_quantil": 0.5, - "DetectorName": "waxs", - "plugin_name": "id02.singledetector", - "c216_filename": "/nobackup/lid02gpu12/nl16b_scalers_02229.h5" -} - -dahu = PyTango.DeviceProxy("DAU/dahu/1") -js = dahu.startJob(["id02.singledetector",json.dumps(saxs)]) -jw = dahu.startJob(["id02.singledetector",json.dumps(waxs)]) - -dahu.collectStatistics() -print(dahu.statisticsCollected) \ No newline at end of file diff --git a/debug/debug_id02_filter.py b/debug/debug_id02_filter.py deleted file mode 100755 index 1a1e5d2..0000000 --- a/debug/debug_id02_filter.py +++ /dev/null @@ -1,58 +0,0 @@ -#!/usr/bin/python -import sys, os, time, json - -if "TANGO_HOST" not in os.environ: - raise RuntimeError("No TANGO_HOST defined") -import PyTango - -dahu = PyTango.DeviceProxy("DAU/dahu/1") -plugin = "id02.filter" -data = { "image_file": "/nobackup/lid02gpu11/FRELON/test_laurent_saxs_0000.h5", - #"entry": "entry_0000" - "output_format": "edf", - "output_file": "/nobackup/lid02gpu12/dark.edf", - "filter": "median", - #"cutoff": 0 - #threshold:0, - #"format": "edf", - #"dark_file": filename, - #"flat_file": filename, -# "do_dark":false, -# "do_flat":false, - } -print(dahu.initPlugin(plugin)) -pid = dahu.startJob([plugin, json.dumps(data)]) -print("%s id: %i" % (plugin, pid)) -print("Input: %s" % dahu.getJobInput(pid)) -print("Output: %s" % dahu.getJobOutput(pid)) -print("state: %s" % dahu.getJobState(pid)) -while dahu.getJobState(pid) not in ["success", "failure"]: - time.sleep(1) -print("output:") -print(dahu.getJobOutput(pid)) -if dahu.getJobState(pid) == "failure": - print("Error:") - print(dahu.getJobError(pid)) - -data = { - "dark_filter_loq": 0.9, - "npt2_rad": 1000, - "dark_filter_quantil_lower": 0.1, - "dark_filter_quantil_upper": 0.9, - "regrouping_mask_filename": "mask-10m.edf", - "npt2_azim": 360, - "job_id": 66, - "dark_filename": "/mntdirect/_data_opid02_inhouse/com/20141104/nk04_saxs_-0001_dark.h5", - "to_save": "[\"azim\",\"ave\"]", - "dark_filter": "quantil", - "output_dir": "/mntdirect/_data_opid02_inhouse/com/20141104/cor", - "image_file": "/mntdirect/_data_opid02_inhouse/com/20141104/nk04_saxs_-0001.h5", - "npt1_rad": 1000, - "dark_filter_quantil": 0.5, - "DetectorName": "saxs", - "plugin_name": "id02.singledetector", - "flat_filename": "/data/opid02/archive/setup/spatcorr-files/saxs/flat_saxs_1x1.h5", - "dark_filter_pq": 0.5, - "distortion_filename": "/data/opid02/archive/setup/spatcorr-files/saxs/spline_saxs_1x1.dat", - "c216_filename": "/mntdirect/_data_opid02_inhouse/com/20141104/nk04_scalers_-0001.h5" -} diff --git a/debug/debug_id02_single.py b/debug/debug_id02_single.py deleted file mode 100644 index 7eff209..0000000 --- a/debug/debug_id02_single.py +++ /dev/null @@ -1,66 +0,0 @@ -#!/usr/bin/python -import sys, os, time, json - -if "TANGO_HOST" not in os.environ: - raise RuntimeError("No TANGO_HOST defined") -import PyTango - -dahu = PyTango.DeviceProxy("DAU/dahu/1") -plugin = "id02.SingleDetector" -data = {"DetectorName": 'Rayonix', - "image_file": "/nobackup/lid02gpu11/FRELON/test_laurent_saxs_0000.h5", - #"entry": "entry_0000" - #"hdf5": "/entry_0000/id02/data - "output_dir": "/nobackup/lid02gpu12", - "PSize_1": 2.4e-05, - "PSize_2": 2.4e-05, - "BSize_1":1, - "BSize_2":1, - "Center_1": 512, - "Center_2": 512, - "DDummy": 1, - "SampleDistance":14.9522, - "c216_filename": "/nobackup/lid02gpu11/metadata/test.h5", - "WaveLength": 9.95058e-11, - "Dummy":-10, - "output_dir": "/nobackup/lid02gpu12/output", - # "do_dark":false, - # "do_flat":false, - "npt2_azim": 360, - "npt2_rad" : 500, - "npt1_rad" : 1000, - "to_save": ["raw", "azim", "ave"], - } -data = { - "npt2_rad": 1000, - "dark_filter_quantil_lower": 0.1, - "dark_filter_quantil_upper": 0.9, - "regrouping_mask_filename": "mask-10m.edf", - "npt2_azim": 360, - "dark_filename": "/mntdirect/_data_opid02_inhouse/com/20141104/nk04_saxs_00048_dark.h5", - "to_save": "raw dark flat dist norm azim ave", - "dark_filter": "quantil", - "output_dir": "/mntdirect/_data_opid02_inhouse/com/20141104/cor", - "image_file": "/mntdirect/_data_opid02_inhouse/com/20141104/nk04_saxs_00047.h5", - "npt1_rad": 1000, - "DetectorName": "saxs", - "flat_filename": "/data/opid02/archive/setup/spatcorr-files/saxs/flat_saxs_2x2.edf", - "distortion_filename": "/data/opid02/archive/setup/spatcorr-files/saxs/SpatCorrRayonix_2b2.dat", - "c216_filename": "/mntdirect/_data_opid02_inhouse/com/20141104/nk04_scalers_00048.h5" -} - - - -print(dahu.initPlugin(plugin)) -pid = dahu.startJob([plugin, json.dumps(data)]) -print("%s id: %i" % (plugin, pid)) -print("Input: %s" % dahu.getJobInput(pid)) -print("Output: %s" % dahu.getJobOutput(pid)) -print("state: %s" % dahu.getJobState(pid)) -while dahu.getJobState(pid) not in ["success", "failure"]: - time.sleep(1) -print("output:") -print(dahu.getJobOutput(pid)) -if dahu.getJobState(pid) == "failure": - print("Error:") - print(dahu.getJobError(pid)) diff --git a/debug/demo_distortion.json b/debug/demo_distortion.json deleted file mode 100644 index e29d117..0000000 --- a/debug/demo_distortion.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "output_hdf5_dataset": "/testout/data", - "distortion_spline": "/nobackup/lid02gpu11/FReLoN_1024_1024.spline", - "input_hdf5_dataset": "/lima_test_hdf5_0000_0000/Data/Image", - "output_hdf5_filename": "/nobackup/lid02gpu12/dahu.h5", - "input_hdf5_filename": "/nobackup/lid02gpu11/lima_test_0000.h5" -} diff --git a/debug/issue4.json b/debug/issue4.json deleted file mode 100644 index 0c4cd31..0000000 --- a/debug/issue4.json +++ /dev/null @@ -1,230 +0,0 @@ -{ - "HS32N05": "AUX1", - "HS32N04": "PIN42", - "HS32N07": "PIN7", - "HS32N06": "PIN6", - "HS32N01": "TIME", - "HS32N03": "PIN41", - "HS32N02": "PIN3", - "HS32N09": "PIN1", - "HS32N08": "PIN8", - "job_id": 21, - "HSI1Factor": "1", - "instrument": "id02", - "HS32F09": 135192000, - "HS32F08": 168159000, - "PrimarySlitInfo": "VxH:p1=0.90x0.90,p2=0.90x0.90", - "HS32F05": 1, - "HS32F04": 287887000, - "HS32F07": 1, - "HS32F06": 967341, - "HS32F01": 1e-06, - "HS32F03": 288583000, - "HS32F02": 100000, - "HSI0Factor": "0.956645", - "HMStartTime": "2014-10-16T16:17:36.514547+0200", - "HSI0": 4, - "HSI1": 8, - "HSTime": 1, - "HS32N12": "PIN4", - "HS32N13": "PIN51", - "HS32N10": "PIN2", - "HS32N11": "AUX2", - "HS32N16": "THC2", - "HS32N14": "PIN52", - "HS32N15": "THC1", - "ShutterOpeningTime": "0.005 s", - "HS32F12": 163180000, - "HS32F13": 1, - "HS32F10": 10000, - "HS32F11": 1, - "HS32F16": 0.000995868, - "HS32F14": 1, - "HS32F15": 0.000995868, - "HS32Len": 16, - "HS32N": [ - "TIME", - "PIN3", - "PIN41", - "PIN42", - "AUX1", - "PIN6", - "PIN7", - "PIN8", - "PIN1", - "PIN2", - "AUX2", - "PIN4", - "PIN51", - "PIN52", - "THC1", - "THC2" - ], - "MonochromatorInfo": "egy=12500.0eV,theta=9.127deg,hgt=11.7mm,tilt=4.670deg,tra=1.963mm", - "c216": "id02/c216/0", - "HS32Z": [ - 0.0, - 57348.3, - 627.4, - 83.1, - 0.0, - 0.0, - 4832.57, - 346.7, - 95.1, - 134.9, - 0.0, - 5978.6, - 325.9, - 184.1, - 170.0, - 170.0 - ], - "DetTubeInfo": "VxH:detbox=0.000x0.000x0.000,dettab=0.000x0.000", - "Info": { - "ShutterOpeningTime": "0.005 s", - "AttenuatorInfo": "att=0,att1=no_filter,att2=no_filter,att3=no_filter,att4=no_filter", - "SlitInfo": "VxH:s1=1.000x1.000,s2=1.200x1.200,s3=0.200x0.100,s4=0.300x0.300,s5=0.800x0.800", - "StationInfo": "ID02", - "MirrorInfo": "rz=-3.531mrad,ty=0.714mm,ty1=2.303mm,ty2=-0.874mm,tz1=-0.070mm,tz2=0.010mm,mir2rz=2.000mrad", - "MachineInfo": "Ie=-00.09mA,u35u=374.001mm/-0.004mm,u21m=454.996mm/-0.005mm,u21d=395.002mm/-0.000mm", - "plugin_name": "id02.metadata", - "MonochromatorInfo": "egy=12500.0eV,theta=9.127deg,hgt=11.7mm,tilt=4.670deg,tra=1.963mm", - "ShutterClosingTime": "0.005 s", - "PrimarySlitInfo": "VxH:p1=0.90x0.90,p2=0.90x0.90", - "hdf5_filename": "/mntdirect/_data_opid02_inhouse/com/20141008/data3/nj17d_scalers_00002.h5", - "entry": "entry", - "DetTubeInfo": "VxH:detbox=0.000x0.000x0.000,dettab=0.000x0.000", - "ExperimentInfo": "INHOUSE", - "DettubeInfo": "VxH:detbox=0.000x0.000x0.000,dettab=0.000x0.000", - "ProposalInfo": "Prevost_DODAB" - }, - "plugin_name": "id02.metadata", - "DettubeInfo": "VxH:detbox=0.000x0.000x0.000,dettab=0.000x0.000", - "HS32Z01": 0, - "HS32Z03": 627.4, - "HS32Z02": 57348.3, - "HS32Z05": 0, - "HS32Z04": 83.1, - "HS32Z07": 4832.57, - "HS32Z06": 0, - "HS32Z09": 95.1, - "HS32Z08": 346.7, - "StationInfo": "ID02", - "SlitInfo": "VxH:s1=1.000x1.000,s2=1.200x1.200,s3=0.200x0.100,s4=0.300x0.300,s5=0.800x0.800", - "HS32Z16": 170, - "HS32Z14": 184.1, - "HS32Z15": 170, - "HS32Z12": 5978.6, - "HS32Z13": 325.9, - "HS32Z10": 134.9, - "HS32Z11": 0, - "ExperimentInfo": "INHOUSE", - "ProposalInfo": "Prevost_DODAB", - "hdf5_filename": "/mntdirect/_data_opid02_inhouse/com/20141008/data3/nj17d_scalers_00002.h5", - "HS32F": [ - 1e-06, - 100000.0, - 288583000.0, - 287887000.0, - 1.0, - 967341.0, - 1.0, - 168159000.0, - 135192000.0, - 10000.0, - 1.0, - 163180000.0, - 1.0, - 1.0, - 0.000995868, - 0.000995868 - ], - "HMStartEpoch": 1413469056.51455, - "HS32Depth": 32, - "MachineInfo": "Ie=-00.09mA,u35u=374.001mm/-0.004mm,u21m=454.996mm/-0.005mm,u21d=395.002mm/-0.000mm", - "MirrorInfo": "rz=-3.531mrad,ty=0.714mm,ty1=2.303mm,ty2=-0.874mm,tz1=-0.070mm,tz2=0.010mm,mir2rz=2.000mrad", - "AttenuatorInfo": "att=0,att1=no_filter,att2=no_filter,att3=no_filter,att4=no_filter", - "ShutterClosingTime": 0.005, - "entry": "entry" -} - -origin_input= -{ - "HS32N05": "AUX1", - "HS32N04": "PIN42", - "HS32N07": "PIN7", - "HS32N06": "PIN6", - "HS32N01": "TIME", - "HS32N03": "PIN41", - "HS32N02": "PIN3", - "HS32N09": "PIN1", - "HS32N08": "PIN8", - "job_id": 21, - "HSI1Factor": "1", - "instrument": "id02", - "HS32F09": 135192000, - "HS32F08": 168159000, - "PrimarySlitInfo": "VxH:p1=0.90x0.90,p2=0.90x0.90", - "HS32F05": 1, - "HS32F04": 287887000, - "HS32F07": 1, - "HS32F06": 967341, - "HS32F01": 1e-06, - "HS32F03": 288583000, - "HS32F02": 100000, - "HSI0Factor": "0.956645", - "HMStartTime": "2014-10-16T16:17:36.514547+0200", - "HSI0": 4, - "HSI1": 8, - "HSTime": 1, - "HS32N12": "PIN4", - "HS32N13": "PIN51", - "HS32N10": "PIN2", - "HS32N11": "AUX2", - "HS32N16": "THC2", - "HS32N14": "PIN52", - "HS32N15": "THC1", - "ShutterOpeningTime": "0.005 s", - "HS32F12": 163180000, - "HS32F13": 1, - "HS32F10": 10000, - "HS32F11": 1, - "HS32F16": 0.000995868, - "HS32F14": 1, - "HS32F15": 0.000995868, - "HS32Len": 16, - "MonochromatorInfo": "egy=12500.0eV,theta=9.127deg,hgt=11.7mm,tilt=4.670deg,tra=1.963mm", - "c216": "id02/c216/0", - "DetTubeInfo": "VxH:detbox=0.000x0.000x0.000,dettab=0.000x0.000", - "plugin_name": "id02.metadata", - "DettubeInfo": "VxH:detbox=0.000x0.000x0.000,dettab=0.000x0.000", - "HS32Z01": 0, - "HS32Z03": 627.4, - "HS32Z02": 57348.3, - "HS32Z05": 0, - "HS32Z04": 83.1, - "HS32Z07": 4832.57, - "HS32Z06": 0, - "HS32Z09": 95.1, - "HS32Z08": 346.7, - "StationInfo": "ID02", - "SlitInfo": "VxH:s1=1.000x1.000,s2=1.200x1.200,s3=0.200x0.100,s4=0.300x0.300,s5=0.800x0.800", - "HS32Z16": 170, - "HS32Z14": 184.1, - "HS32Z15": 170, - "HS32Z12": 5978.6, - "HS32Z13": 325.9, - "HS32Z10": 134.9, - "HS32Z11": 0, - "ExperimentInfo": "INHOUSE", - "ProposalInfo": "Prevost_DODAB", - "hdf5_filename": "/mntdirect/_data_opid02_inhouse/com/20141008/data3/nj17d_scalers_00002.h5", - "HMStartEpoch": 1413469056.51455, - "HS32Depth": 32, - "MachineInfo": "Ie=-00.09mA,u35u=374.001mm/-0.004mm,u21m=454.996mm/-0.005mm,u21d=395.002mm/-0.000mm", - "MirrorInfo": "rz=-3.531mrad,ty=0.714mm,ty1=2.303mm,ty2=-0.874mm,tz1=-0.070mm,tz2=0.010mm,mir2rz=2.000mrad", - "AttenuatorInfo": "att=0,att1=no_filter,att2=no_filter,att3=no_filter,att4=no_filter", - "ShutterClosingTime": "0.005 s", - "entry": "entry" -} diff --git a/debug/test_distortion.py b/debug/test_distortion.py deleted file mode 100644 index 08eae0b..0000000 --- a/debug/test_distortion.py +++ /dev/null @@ -1,79 +0,0 @@ -#!/usr/bin/python -import sys, os, time, json, glob - -if "TANGO_HOST" not in os.environ: - raise RuntimeError("No TANGO_HOST defined") -import PyTango - -dahu = PyTango.DeviceProxy("DAU/dahu/1") - -def status(pid): - try: - res = dahu.getJobState(pid) - except: - print("Communication failure with Dahu via Tango") - else: - return res - - -data = { - #"output_hdf5_filename_base": "/nobackup/lid02gpu12/test", - "output_hdf5_filename_base": "/data/opid02/inhouse/test_dahu/test", - "output_hdf5_dataset":"entry_000/data", - #"distortion_spline": "/users/kieffer/workspace/pyFAI/test/testimages/frelon.spline", - "input_hdf5_filename": "/nobackup/lid02gpu11/FRELON/test_laurent_saxs_0000.h5", - - "input_hdf5_dataset": "/entry_0000/measurement/detector/data" -} -kwargs = "{\"x\":5}" - -for i in range(1): - for plugin in [ "example.cube","example.square","example.sleep"]: - pid = dahu.startJob([plugin, kwargs]) -print("%s id: %i"%(plugin, pid)) -print("Input: %s"% dahu.getJobInput(pid)) -print("Output: %s"% dahu.getJobOutput(pid)) -print("state: %s"% status(pid)) - - -def startjob(spline=None,method=None,device=None,wg=None): - if method: - data["method"] = method - if wg: - data["workgroup"] = wg - if device: - data["device"] = device - if spline: - data["distortion_spline"] = spline - data["output_hdf5_filename"] = "%s_%04i.h5"%(data["output_hdf5_filename_base"],len(glob.glob(data["output_hdf5_filename_base"]+"*"))) - print(data) - pid = dahu.startJob(["id02.distortion", json.dumps(data)]) - print("%s id: %i"%(plugin, pid)) - print("Input: %s"% dahu.getJobInput(pid)) - print("Output: %s"% dahu.getJobOutput(pid)) - print("state: %s"% status(pid)) - while status(pid) not in ["success", "failure"]: - time.sleep(1) - print("output:") - output =dahu.getJobOutput(pid) - print(output) - output - print("Average write speed: %f MB/s"%(os.path.getsize(data["output_hdf5_filename"])/json.loads(output)["job_runtime"]/2**20)) - -for i in ((None,None,None,None), - ("/users/kieffer/workspace/pyFAI/test/testimages/frelon.spline",None,None,None), - ("/users/kieffer/workspace/pyFAI/test/testimages/frelon.spline","lut",None,None), - ("/users/kieffer/workspace/pyFAI/test/testimages/frelon.spline","csr",None,None), - ("/users/kieffer/workspace/pyFAI/test/testimages/frelon.spline","lut","gpu",8), - ("/users/kieffer/workspace/pyFAI/test/testimages/frelon.spline","lut","cpu",8), - ("/users/kieffer/workspace/pyFAI/test/testimages/frelon.spline","csr","gpu",8), - ("/users/kieffer/workspace/pyFAI/test/testimages/frelon.spline","csr","cpu",8), - ("/users/kieffer/workspace/pyFAI/test/testimages/frelon.spline","lut","gpu",32), - ("/users/kieffer/workspace/pyFAI/test/testimages/frelon.spline","lut","cpu",32), - ("/users/kieffer/workspace/pyFAI/test/testimages/frelon.spline","csr","gpu",32), - ("/users/kieffer/workspace/pyFAI/test/testimages/frelon.spline","csr","cpu",32), - - ): - startjob(*i) - -dahu.collectStatistics() \ No newline at end of file diff --git a/debug/test_tango_server.py b/debug/test_tango_server.py deleted file mode 100755 index 9cf001d..0000000 --- a/debug/test_tango_server.py +++ /dev/null @@ -1,85 +0,0 @@ -#!/usr/bin/python -import sys, os, time, json - -if "TANGO_HOST" not in os.environ: - raise RuntimeError("No TANGO_HOST defined") -import PyTango - -dahu = PyTango.DeviceProxy("DAU/dahu/1") - - - -kwargs = "{\"x\":5}" - -for i in range(1): - for plugin in [ "example.cube", "example.square", "example.sleep"]: - pid = dahu.startJob([plugin, kwargs]) -print("%s id: %i" % (plugin, pid)) -print("Input: %s" % dahu.getJobInput(pid)) -print("Output: %s" % dahu.getJobOutput(pid)) -print("state: %s" % dahu.getJobState(pid)) -# dahu.collectStatistics() -# print(dahu.getStatistics()) - - -def startjob(method, device, wg): - data_input = json.loads(open("demo_distortion.json").read()) - data_input["method"] = method - data_input["device"] = device - data_input["workgroup"] = wg - data = json.dumps(data_input) - print(data) - pid = dahu.startJob(["id02.distortion", data]) - print("%s id: %i" % (plugin, pid)) - print("Input: %s" % dahu.getJobInput(pid)) - print("Output: %s" % dahu.getJobOutput(pid)) - print("state: %s" % dahu.getJobState(pid)) - while dahu.getJobState(pid) not in ["success", "failure"]: - time.sleep(1) - -# for device in (None,"CPU", "GPU"): -# for method in ("csr","lut"): -# for wg in [1,2,4,8,16,32]: -# if (method != "CSR" or device == "None")and wg > 1: -# continue -# else: -# startjob(method,device,wg) - -data = { - "hdf5_filename":"/nobackup/lid02gpu11/metadata/test.h5", - "entry": "entry", - "instrument":"id02", - "c216":"id02/c216/0", - "HS32F": [1e-06, 1, 7763480, 8176290, 342239, 967341, 5541980, 1739160, 2753.61, 1351920, 140000000, 16719500, 1, 0.000995868, 0.000995868, 1], - "HS32Z": [0, 0, 383.55, 126.4, 6582.1, 6973.6, 162.95, 0, 221.2, 207.8, 315.26, 145.11, 323.76, 170, 170, 228.4], - "HS32N": ["TIME", "AUX1", "PIN41", "PIN42", "PIN5", "PIN6", "PIN7", "PIN8", "PIN1", "PIN2", "PIN3", "PIN4", "AUX2", "THC1", "THC2", "PRESS"], - "HSI0": 12, - "HSI1": 7, - "HSTime": 1, - "HMStartEpoch": 1405087717.12159, - "HMStartTime": "2014-07-11T16:08:37.121591+0200", - "info": {"DetectorInfo":"VxH:detbox=14952.235x0.000x1.000,dettab=-62.000x-245.000", - "ExperimentInfo":"0", - "MachineInfo": "Ie=183.27mA,u35u=100.000mm/0.001mm,u21m=100.000mm/0.000mm,u21d=100.000mm/-0.000mm", - "MirrorInfo": "rz=-3.600mrad,ty=0.455mm,ty1=2.075mm,ty2=-1.165mm,tz1=-0.030mm,tz2=-0.090mm,mir2rz=2.000mrad", - "OpticsInfo": "egy=12460.0eV,theta=9.132deg,hgt=11.7mm,tilt=4.440deg,tra=1.963mm", - "ProposalInfo": 0, - "StationInfo": "ID02" - } - } - -pid = dahu.startJob(["id02.metadata", json.dumps(data)]) -print("%s id: %i" % (plugin, pid)) -print("Input: %s" % dahu.getJobInput(pid)) -print("Output: %s" % dahu.getJobOutput(pid)) -print("state: %s" % dahu.getJobState(pid)) -while dahu.getJobState(pid) not in ["success", "failure"]: - time.sleep(1) - -# dahu.collectStatistics() -# time.sleep(5) -# print(dahu.getStatistics()) -print("Input: %s" % dahu.getJobInput(pid)) -print("Output: %s" % dahu.getJobOutput(pid)) -print("state: %s" % dahu.getJobState(pid)) -print("error: %s" % dahu.getJobError(pid)) diff --git a/plugins/bm29/icat.py b/plugins/bm29/icat.py index 27eed15..eaca474 100644 --- a/plugins/bm29/icat.py +++ b/plugins/bm29/icat.py @@ -96,15 +96,18 @@ def send_icat(proposal=None, beamline=None, sample=None, dataset=None, path=None if isinstance(k, str) and k.startswith("SAXS_"): metadata[k] = v sample = data.get("sample") - if sample: - metadata["SAXS_concentration"] = str(sample.concentration) - metadata["SAXS_code"] = sample.name - metadata["SAXS_comments"] = sample.description - metadata["SAXS_storage_temperature"] = str(sample.temperature_env) - metadata["SAXS_exposure_temperature"] = str(sample.temperature) - if sample.hplc: - metadata["SAXS_column_type"] = sample.hplc - #"buffer": "description of buffer, pH, ...", + if sample is not None: + if isinstance(sample, str): + metadata["SAXS_code"] = sample + else: + metadata["SAXS_concentration"] = str(sample.concentration) + metadata["SAXS_code"] = sample.name + metadata["SAXS_comments"] = sample.description + metadata["SAXS_storage_temperature"] = str(sample.temperature_env) + metadata["SAXS_exposure_temperature"] = str(sample.temperature) + if sample.hplc: + metadata["SAXS_column_type"] = sample.hplc + #"buffer": "description of buffer, pH, ...", guinier = data.get("guinier") if guinier: diff --git a/plugins/bm29/integrate.py b/plugins/bm29/integrate.py index 3fafcf1..4a59187 100644 --- a/plugins/bm29/integrate.py +++ b/plugins/bm29/integrate.py @@ -37,6 +37,7 @@ method, polarization_factor, Nexus, get_isotime, SAXS_STYLE, NORMAL_STYLE, \ create_nexus_sample from .ispyb import IspybConnector, NumpyEncoder +from .icat import send_icat from .memcached import to_memcached @@ -231,7 +232,8 @@ def process(self): self.create_nexus() self.output["memcached"] = self.send_to_memcached() self.send_to_ispyb() - self.output["icat"] = self.send_to_icat() + #self.output["icat"] = + self.send_to_icat() def wait_file(self, filename, timeout=None): """Wait for a file to appear on a filesystem @@ -630,7 +632,7 @@ def send_to_icat(self): #Some more metadata for iCat, as strings: to_icat = copy.copy(self.to_pyarch) to_icat["experiment_type"] = "hplc" if self.input.get("hplc_mode") else "sample-changer" - to_icat["sample"] = self.sample.name + to_icat["sample"] = self.sample to_icat["SAXS_maskFile"] = self.mask to_icat["SAXS_waveLength"] = str(self.ai.wavelength) to_icat["SAXS_normalisation"] = str(self.normalization_factor) diff --git a/plugins/bm29/memcached.py b/plugins/bm29/memcached.py index 252bc58..4bd8432 100644 --- a/plugins/bm29/memcached.py +++ b/plugins/bm29/memcached.py @@ -28,6 +28,6 @@ def to_memcached(dico): if memcache is not None: mc = memcache.Client([(SERVER, 11211)]) rc["server"] = socket.getfqdn()+":11211" - for k, v in dico.items: + for k, v in dico.items(): rc[k] = mc.set(k, v) return rc diff --git a/plugins/bm29/subtracte.py b/plugins/bm29/subtracte.py index 7226771..8e7567b 100644 --- a/plugins/bm29/subtracte.py +++ b/plugins/bm29/subtracte.py @@ -140,8 +140,10 @@ def teardown(self): self.output["Dmax"] = self.Dmax self.output["Vc"] = self.Vc self.output["mass"] = self.mass - self.output["memcached"] = self.send_to_memcached() - self.output["icat"] = self.send_to_icat() + #self.output["memcached"] = + self.send_to_memcached() + #self.output["icat"] = + self.send_to_icat() #teardown everything else: if self.nxs is not None: self.nxs.close() From e4d38793c9b40109ce320f607584c606d4469cb3 Mon Sep 17 00:00:00 2001 From: Jerome Kieffer Date: Mon, 24 Feb 2025 17:50:43 +0100 Subject: [PATCH 08/18] Issue with the type of run_number --- plugins/bm29/ispyb.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/plugins/bm29/ispyb.py b/plugins/bm29/ispyb.py index d35b898..af3e470 100644 --- a/plugins/bm29/ispyb.py +++ b/plugins/bm29/ispyb.py @@ -11,7 +11,7 @@ __contact__ = "Jerome.Kieffer@ESRF.eu" __license__ = "MIT" __copyright__ = "European Synchrotron Radiation Facility, Grenoble, France" -__date__ = "21/02/2025" +__date__ = "24/02/2025" __status__ = "development" version = "0.2.3" @@ -108,6 +108,7 @@ def send_icat(self, proposal=None, beamline=None, sample=None, dataset=None, pat :param raw: directory name of the raw data (not the processed ones) :param data: dict with all data sent to ISpyB """ + logger.error("Deprecated code `ispyb.IspybConnector.send_icat()` Switch to `icat.send_icat()`") tmp = self.gallery.strip("/").split("/") idx_process = [i for i,j in enumerate(tmp) if j.lower().startswith("process")][-1] if tmp[idx_process] == "processed": @@ -309,7 +310,6 @@ def send_subtracted(self, data): :param data: a dict with all information to be saved in Ispyb """ - run_number = list(self.run_number) guinier = data.get("guinier") gnom = data.get("bift") subtracted = data.get("subtracted") @@ -338,7 +338,7 @@ def send_subtracted(self, data): densityPlot = "" self.client.service.addSubtraction(str(self.experiment_id), - str(run_number), + str(self.run_number), str(guinier.Rg if guinier else -1), str(guinier.sigma_Rg if guinier else -1), str(guinier.I0 if guinier else -1), From 45b63b4c62b47b1cafb3a7286bcda3ebde0deb2e Mon Sep 17 00:00:00 2001 From: Jerome Kieffer Date: Mon, 24 Feb 2025 18:04:09 +0100 Subject: [PATCH 09/18] Check if it works better ... --- plugins/bm29/ispyb.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/bm29/ispyb.py b/plugins/bm29/ispyb.py index af3e470..4081595 100644 --- a/plugins/bm29/ispyb.py +++ b/plugins/bm29/ispyb.py @@ -338,7 +338,7 @@ def send_subtracted(self, data): densityPlot = "" self.client.service.addSubtraction(str(self.experiment_id), - str(self.run_number), + str([self.run_number]), str(guinier.Rg if guinier else -1), str(guinier.sigma_Rg if guinier else -1), str(guinier.I0 if guinier else -1), From 285f0eadcb3489ca3a75b82631f7ddd7d4fdc836 Mon Sep 17 00:00:00 2001 From: Jerome Kieffer Date: Mon, 24 Feb 2025 18:07:30 +0100 Subject: [PATCH 10/18] another attempt --- plugins/bm29/ispyb.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/bm29/ispyb.py b/plugins/bm29/ispyb.py index 4081595..5967692 100644 --- a/plugins/bm29/ispyb.py +++ b/plugins/bm29/ispyb.py @@ -338,7 +338,7 @@ def send_subtracted(self, data): densityPlot = "" self.client.service.addSubtraction(str(self.experiment_id), - str([self.run_number]), + [str(self.run_number)], str(guinier.Rg if guinier else -1), str(guinier.sigma_Rg if guinier else -1), str(guinier.I0 if guinier else -1), From 6e59f09a487e7540f889097c0307c6ea26bea594 Mon Sep 17 00:00:00 2001 From: Jerome Kieffer Date: Mon, 24 Feb 2025 18:15:59 +0100 Subject: [PATCH 11/18] debug --- plugins/bm29/ispyb.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/plugins/bm29/ispyb.py b/plugins/bm29/ispyb.py index 5967692..063daf0 100644 --- a/plugins/bm29/ispyb.py +++ b/plugins/bm29/ispyb.py @@ -310,6 +310,10 @@ def send_subtracted(self, data): :param data: a dict with all information to be saved in Ispyb """ + try: + run_number = list(self.run_number) + except TypeError: + run_number = [self.run_number] guinier = data.get("guinier") gnom = data.get("bift") subtracted = data.get("subtracted") @@ -336,9 +340,9 @@ def send_subtracted(self, data): densityPlot = self.density_plot(gnom, basename) else: densityPlot = "" - + print(run_number) self.client.service.addSubtraction(str(self.experiment_id), - [str(self.run_number)], + str(run_number), str(guinier.Rg if guinier else -1), str(guinier.sigma_Rg if guinier else -1), str(guinier.I0 if guinier else -1), From 5ce47f18320b13295ab35daee9e8af536e7ec37b Mon Sep 17 00:00:00 2001 From: Jerome Kieffer Date: Mon, 24 Feb 2025 18:17:42 +0100 Subject: [PATCH 12/18] less verbose --- plugins/bm29/ispyb.py | 1 - 1 file changed, 1 deletion(-) diff --git a/plugins/bm29/ispyb.py b/plugins/bm29/ispyb.py index 063daf0..d27b7ae 100644 --- a/plugins/bm29/ispyb.py +++ b/plugins/bm29/ispyb.py @@ -340,7 +340,6 @@ def send_subtracted(self, data): densityPlot = self.density_plot(gnom, basename) else: densityPlot = "" - print(run_number) self.client.service.addSubtraction(str(self.experiment_id), str(run_number), str(guinier.Rg if guinier else -1), From a395efb7b14a920baa44f0cc93e0485d85c107dd Mon Sep 17 00:00:00 2001 From: Jerome Kieffer Date: Mon, 24 Feb 2025 18:31:54 +0100 Subject: [PATCH 13/18] No ispyb communication if broken --- plugins/bm29/subtracte.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/plugins/bm29/subtracte.py b/plugins/bm29/subtracte.py index 8e7567b..8883a4c 100644 --- a/plugins/bm29/subtracte.py +++ b/plugins/bm29/subtracte.py @@ -11,7 +11,7 @@ __contact__ = "Jerome.Kieffer@ESRF.eu" __license__ = "MIT" __copyright__ = "European Synchrotron Radiation Facility, Grenoble, France" -__date__ = "21/02/2025" +__date__ = "24/02/2025" __status__ = "development" __version__ = "0.3.0" @@ -140,10 +140,7 @@ def teardown(self): self.output["Dmax"] = self.Dmax self.output["Vc"] = self.Vc self.output["mass"] = self.mass - #self.output["memcached"] = - self.send_to_memcached() - #self.output["icat"] = - self.send_to_icat() + #teardown everything else: if self.nxs is not None: self.nxs.close() @@ -170,8 +167,9 @@ def process(self): self.log_warning(f"Processing failed and unable to send remaining data to ISPyB: {type(err2)} {err2}\n{traceback.format_exc(limit=10)}") raise(err) else: - self.send_to_ispyb() - + self.send_to_ispyb() + self.send_to_icat() + self.output["memcached"] = self.send_to_memcached() def validate_buffer(self, buffer_file): @@ -743,6 +741,9 @@ def send_to_ispyb(self): def send_to_icat(self): to_icat = copy.copy(self.to_pyarch) to_icat["experiment_type"] = "sample-changer" + if self.sample_juice is None: + self.log_warning("Sample_juice is None in send_to_icat. Not sending garbage") + return to_icat["sample"] = self.sample_juice.sample metadata = {"scanType": "subtraction"} return send_icat(sample=self.sample_juice.sample, From ecec94bf6ad2e9a4eef1fc3b6f91546f8a46ffcb Mon Sep 17 00:00:00 2001 From: Jerome Kieffer Date: Tue, 25 Feb 2025 16:39:51 +0100 Subject: [PATCH 14/18] debug from the beamline --- plugins/bm29/icat.py | 7 +++++-- plugins/bm29/integrate.py | 2 +- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/plugins/bm29/icat.py b/plugins/bm29/icat.py index eaca474..20a93f2 100644 --- a/plugins/bm29/icat.py +++ b/plugins/bm29/icat.py @@ -48,7 +48,7 @@ def send_icat(proposal=None, beamline=None, sample=None, dataset=None, path=None :param sample: sample name as registered in icat :param dataset: name given by BLISS :param path: directory name where processed data are staying - :param raw: directory name of the raw data (not the processed ones) + :param raw: list of directory name of the raw data (not the processed ones) :param data: dict with all data sent to iCat :param gallery: path with the gallery directory :param metadata: dict with additionnal metadata (could be overwritten by this function @@ -127,6 +127,8 @@ def send_icat(proposal=None, beamline=None, sample=None, dataset=None, path=None volume = data.get("volume") if volume: metadata["SAXS_porod_volume"] = str(volume) + if not isinstance(raw, list): + raw = [raw] #Other metadata one may collect ... metadata["SAXS_experiment_type"]= data.get("experiment_type", "UNKNOWN") metadata["datasetName"] = dataset @@ -136,6 +138,7 @@ def send_icat(proposal=None, beamline=None, sample=None, dataset=None, path=None "dataset":dataset, "path":path, "metadata":metadata, - "raw":[raw]} + "raw":raw} + #print(kwargs) icat_client.store_processed_data(**kwargs) return kwargs diff --git a/plugins/bm29/integrate.py b/plugins/bm29/integrate.py index 4a59187..d0e0b2f 100644 --- a/plugins/bm29/integrate.py +++ b/plugins/bm29/integrate.py @@ -648,7 +648,7 @@ def send_to_icat(self): metadata = {"scanType": "integration"} return send_icat(sample=self.sample.name, - raw=os.path.dirname(os.path.abspath(self.input_file)), + raw=os.path.dirname(os.path.dirname(os.path.abspath(self.input_file))), path=os.path.dirname(os.path.abspath(self.output_file)), data=to_icat, gallery=self.ispyb.gallery or os.path.join(os.path.dirname(os.path.abspath(self.output_file)), "gallery"), From 56d31b92b0173b2601db5ffe0101f5c4d940663b Mon Sep 17 00:00:00 2001 From: Jerome Kieffer Date: Tue, 25 Feb 2025 16:52:53 +0100 Subject: [PATCH 15/18] subtracted data has several sources --- plugins/bm29/subtracte.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/plugins/bm29/subtracte.py b/plugins/bm29/subtracte.py index 8883a4c..75246df 100644 --- a/plugins/bm29/subtracte.py +++ b/plugins/bm29/subtracte.py @@ -16,6 +16,7 @@ __version__ = "0.3.0" import os +import posixpath import json import copy from math import log, pi @@ -719,7 +720,7 @@ def read_nexus(filename): norm = img_grp["normalization"][()] if "normalization" in img_grp else None # Read the sample description: sample_grp = nxsr.get_class(entry_grp, class_type="NXsample")[0] - sample_name = sample_grp.name + sample_name = posixpath.basename(sample_grp.name) buffer = sample_grp["buffer"][()] if "buffer" in sample_grp else "" concentration = sample_grp["concentration"][()] if "concentration" in sample_grp else "" @@ -746,8 +747,10 @@ def send_to_icat(self): return to_icat["sample"] = self.sample_juice.sample metadata = {"scanType": "subtraction"} + raw = [os.path.dirname(os.path.abspath(i)) for i in self.buffer_files] + raw.append(os.path.dirname(os.path.abspath(self.sample_file))) return send_icat(sample=self.sample_juice.sample, - raw=os.path.dirname(os.path.abspath(self.sample_file)), + raw=raw, path=os.path.dirname(os.path.abspath(self.output_file)), data=to_icat, gallery=self.ispyb.gallery or os.path.join(os.path.dirname(os.path.abspath(self.output_file)), "gallery"), From ebe64f0b50dfce5c577e249eb235904e10df8e00 Mon Sep 17 00:00:00 2001 From: Jerome Kieffer Date: Tue, 25 Feb 2025 18:30:35 +0100 Subject: [PATCH 16/18] fix sample being a namedtuple --- plugins/bm29/icat.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/plugins/bm29/icat.py b/plugins/bm29/icat.py index 20a93f2..3cc5cc1 100644 --- a/plugins/bm29/icat.py +++ b/plugins/bm29/icat.py @@ -90,18 +90,18 @@ def send_icat(proposal=None, beamline=None, sample=None, dataset=None, path=None if metadata is None: metadata = {} metadata["definition"] = "SAXS", - metadata["Sample_name"] = sample + # metadata["Sample_name"] = sample for k,v in data.items(): if isinstance(k, str) and k.startswith("SAXS_"): metadata[k] = v - sample = data.get("sample") + sample = data.get("sample", sample) if sample is not None: if isinstance(sample, str): - metadata["SAXS_code"] = sample + metadata["Sample_name"] = metadata["SAXS_code"] = sample else: metadata["SAXS_concentration"] = str(sample.concentration) - metadata["SAXS_code"] = sample.name + metadata["Sample_name"] = metadata["SAXS_code"] = sample.name metadata["SAXS_comments"] = sample.description metadata["SAXS_storage_temperature"] = str(sample.temperature_env) metadata["SAXS_exposure_temperature"] = str(sample.temperature) From 4b112713439df130bab2ae63435b93e157cc1f5b Mon Sep 17 00:00:00 2001 From: Jerome Kieffer Date: Mon, 17 Mar 2025 16:50:45 +0100 Subject: [PATCH 17/18] Implement new metadata to DRAC --- plugins/bm29/icat.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/plugins/bm29/icat.py b/plugins/bm29/icat.py index 3cc5cc1..15549b9 100644 --- a/plugins/bm29/icat.py +++ b/plugins/bm29/icat.py @@ -119,6 +119,8 @@ def send_icat(proposal=None, beamline=None, sample=None, dataset=None, path=None if bift: metadata["SAXS_rg"] = f"{bift.Rg_avg:.1f}±{bift.Rg_std:.1f}" metadata["SAXS_d_max"] = f"{bift.Dmax_avg:.1f}±{bift.Dmax_std:.1f}" + metadata["SAXS_chi2r"] = str(bift.chi2r_avg) + metadata["SAXS_chi2r_error"] = str(bift.chi2r_std) tomerge = data.get("merged") if tomerge: @@ -127,6 +129,14 @@ def send_icat(proposal=None, beamline=None, sample=None, dataset=None, path=None volume = data.get("volume") if volume: metadata["SAXS_porod_volume"] = str(volume) + rti = data.get("rti") + if rti: + "Vc sigma_Vc Qr sigma_Qr mass sigma_mass" + metadata["SAXS_vc"] = str(rti.Vc) + metadata["SAXS_vc_error"] = str(rti.sigma_Vc) + metadata["SAXS_mass"] = str(rti.mass) + metadata["SAXS_mass_error"] = str(rti.sigma_mass) + if not isinstance(raw, list): raw = [raw] #Other metadata one may collect ... From ae47210301d63aaf1d7aed82734e3a1fb41bef50 Mon Sep 17 00:00:00 2001 From: Jerome Kieffer Date: Tue, 18 Mar 2025 14:04:45 +0100 Subject: [PATCH 18/18] more metadata for icat --- plugins/bm29/icat.py | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/plugins/bm29/icat.py b/plugins/bm29/icat.py index 15549b9..e6eac7a 100644 --- a/plugins/bm29/icat.py +++ b/plugins/bm29/icat.py @@ -111,16 +111,20 @@ def send_icat(proposal=None, beamline=None, sample=None, dataset=None, path=None guinier = data.get("guinier") if guinier: - metadata["SAXS_guinier_rg"] = f"{guinier.Rg:.1f}±{guinier.sigma_Rg:.1f}" - metadata["SAXS_guinier_points"] = f"{guinier.start_point}-{guinier.end_point}" + metadata["SAXS_guinier_rg"] = f"{guinier.Rg:.1f}" + metadata["SAXS_guinier_sigma_rg"] = f"{guinier.sigma_Rg:.1f}" + metadata["SAXS_guinier_points_start"] = f"{guinier.start_point}" + metadata["SAXS_guinier_points_end"] = f"{guinier.end_point}" metadata["SAXS_guinier_i0"] = f"{guinier.I0:.1f}±{guinier.sigma_I0:.1f}" bift = data.get("bift") if bift: metadata["SAXS_rg"] = f"{bift.Rg_avg:.1f}±{bift.Rg_std:.1f}" + metadata["SAXS_rg_std"] = f"{bift.Rg_avg:.1f}" + metadata["SAXS_rg_avg"] = f"{bift.Rg_std:.1f}" metadata["SAXS_d_max"] = f"{bift.Dmax_avg:.1f}±{bift.Dmax_std:.1f}" - metadata["SAXS_chi2r"] = str(bift.chi2r_avg) - metadata["SAXS_chi2r_error"] = str(bift.chi2r_std) + metadata["SAXS_chi2r"] = f"{bift.chi2r_avg:.2f}" + metadata["SAXS_chi2r_error"] = f"{bift.chi2r_std:.2f}" tomerge = data.get("merged") if tomerge: @@ -132,10 +136,10 @@ def send_icat(proposal=None, beamline=None, sample=None, dataset=None, path=None rti = data.get("rti") if rti: "Vc sigma_Vc Qr sigma_Qr mass sigma_mass" - metadata["SAXS_vc"] = str(rti.Vc) - metadata["SAXS_vc_error"] = str(rti.sigma_Vc) - metadata["SAXS_mass"] = str(rti.mass) - metadata["SAXS_mass_error"] = str(rti.sigma_mass) + metadata["SAXS_vc"] = f"{rti.Vc:.2f}" + metadata["SAXS_vc_error"] = f"{rti.sigma_Vc:.2f}" + metadata["SAXS_mass"] = f"{rti.mass:.2f}" + metadata["SAXS_mass_error"] = f"{rti.sigma_mass:.2f}" if not isinstance(raw, list): raw = [raw]