Skip to content
Open

Bm29 #101

Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
58 commits
Select commit Hold shift + click to select a range
4670521
WIP
kif Apr 3, 2025
438a411
fix import
kif Apr 3, 2025
97a42db
propagate the type of experiment to icat
kif Apr 22, 2025
a9a2a67
Merge remote-tracking branch 'origin/BM29' into BM29
kif Apr 22, 2025
908c186
belt&suspenders for directory creation
kif Apr 22, 2025
755fa66
Merge remote-tracking branch 'origin/master' into BM29
kif Apr 22, 2025
78a4eab
Merge remote-tracking branch 'slavia2/debian' into BM29
kif Apr 22, 2025
c84900a
track plugin used
Apr 22, 2025
904242c
Merge remote-tracking branch 'origin/BM29' into BM29
Apr 22, 2025
00f4b83
Merge remote-tracking branch 'lintaillefer/BM29' into BM29
Apr 22, 2025
f7ba0de
save as zip hplc dataset
kif Apr 22, 2025
292d786
Merge remote-tracking branch 'lintaillefer/BM29' into BM29
Apr 22, 2025
2429214
typo
kif Apr 22, 2025
3daaa0c
Merge remote-tracking branch 'lintaillefer/BM29' into BM29
Apr 22, 2025
c07fe88
typo2
kif Apr 22, 2025
4c70cc7
Merge remote-tracking branch 'lintaillefer/BM29' into BM29
Apr 22, 2025
b8f3bc6
namedtuple
kif Apr 22, 2025
c6bd845
Merge remote-tracking branch 'lintaillefer/BM29' into BM29
Apr 22, 2025
150c985
DIscard common prefix
kif Apr 22, 2025
fb553e1
Merge remote-tracking branch 'lintaillefer/BM29' into BM29
Apr 22, 2025
87d612c
Add docstring
kif Apr 22, 2025
a94ebf6
Merge remote-tracking branch 'lintaillefer/BM29' into BM29
Apr 22, 2025
081a0ff
memcache key length:
kif Apr 22, 2025
729f629
typo
kif Apr 22, 2025
e0ca321
Merge branch 'BM29' into BM29_mesh
kif Apr 28, 2025
b5a9579
Work on the mesh scan for BM29
kif Apr 28, 2025
9b7b468
Merge remote-tracking branch 'origin/BM29_mesh' into BM29
May 12, 2025
0f26992
fix import of azimuthalIntegrator`
kif May 12, 2025
b2b51db
Merge remote-tracking branch 'slavia/BM29' into BM29
May 12, 2025
afd503c
WIP
kif May 14, 2025
0328862
Merge remote-tracking branch 'stanza/BM29' into BM29_mesh
kif May 14, 2025
1621d5f
Merge branch 'BM29_relpath' into BM29
kif Jun 4, 2025
0204a48
fix the reading of default attribute
kif Jun 4, 2025
1432c2b
few typos
kif Jun 4, 2025
26e12cb
Export SC experiment as zip format
kif Jun 10, 2025
0015631
typo
kif Jun 10, 2025
9297826
increment version
kif Jun 10, 2025
3583355
typo
kif Jun 10, 2025
9451a63
bytes issues
kif Jun 10, 2025
2467ad1
typo
kif Jun 10, 2025
9633e34
Code not yet ready for production. commented
kif Jun 10, 2025
287a57d
read all data from individual frames...
kif Jun 10, 2025
51b1382
typo
kif Jun 10, 2025
ad34ff5
Merge remote-tracking branch 'origin/master' into BM29
kif Jun 13, 2025
855248d
Merge branch 'BM29_mesh' into BM29
kif Jun 13, 2025
1b5c04b
remove debugging
kif Jun 13, 2025
1b685c2
make "results" -> "result" to be consistent with pyFAI
kif Jun 17, 2025
e471c5e
fix saving config
kif Jun 18, 2025
9e80a58
implement the saving of the map_ptr
kif Jun 18, 2025
127e2c2
make mesh-scan work ...
kif Jun 23, 2025
e05ba4e
fix Kratky plot display
kif Jun 25, 2025
4a36be2
start implementing transposition
kif Dec 16, 2025
f7c2854
update doc
kif Feb 5, 2026
b3b4d13
Merge branch 'BM29'
kif Feb 5, 2026
b0349e8
implement a noise measurement for the monitor value
kif Feb 5, 2026
a54ee9f
Merge remote-tracking branch 'origin/master' into BM29
kif Feb 23, 2026
57fbb1e
calculate stats before assuming everything's gonna be OK
kif Feb 23, 2026
68e7b40
f-string formating
kif Feb 23, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 7 additions & 6 deletions build-deb.sh
Original file line number Diff line number Diff line change
Expand Up @@ -68,12 +68,12 @@ then
bookworm)
debian_version=12
;;
trixie)
debian_version=13
;;
sid)
debian_version=13
;;
trixie)
debian_version=13
;;
sid)
debian_version=13
;;
esac
fi

Expand Down Expand Up @@ -109,6 +109,7 @@ optional arguments:
--debian10 Simulate a debian 10 Buster system
--debian11 Simulate a debian 11 Bullseye system
--debian12 Simulate a debian 12 Bookworm system
--debian13 Simulate a debian 13 Trixie system
"

install=0
Expand Down
6 changes: 3 additions & 3 deletions doc/source/dahu.rst
Original file line number Diff line number Diff line change
Expand Up @@ -11,15 +11,15 @@ The *dahu* server executes **jobs**:
* The job (de-) serializes JSON strings coming from/returning to Tango
* Jobs are executed asynchronously, the request for calculation is answered instantaneously with a *jobid* (an integer, unique for the process).
* The *jobid* can be used to poll the server for the status of the job or for manual synchronization (mind that Tango can time-out!).
* When jobs are finished, the client is notified via Tango events about the status
* When jobs are finished, the client is notified via **Tango events** about the status change
* Results can be retrieved after the job has finished.

Jobs execute **plugin**:
------------------------

* Plugins are written in Python (extension in Cython or OpenCL are common)
* Plugins are written in Python (extensions in Cython or OpenCL are common)
* Plugins can be classes or simple functions
* The input and output MUST be JSON-seriablisable as simple dictionnaries
* The input and output MUST be JSON-serializable as simple dictionaries
* Plugins are dynamically loaded from Python modules
* Plugins can be profiled for performance analysis

Expand Down
4 changes: 3 additions & 1 deletion plugins/bm29/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,16 @@
__contact__ = "Jerome.Kieffer@ESRF.eu"
__license__ = "MIT"
__copyright__ = "European Synchrotron Radiation Facility, Grenoble, France"
__date__ = "03/12/2024"
__date__ = "05/05/2025"
__status__ = "development"
__version__ = "0.2.0"

from dahu.factory import register
from .integrate import IntegrateMultiframe
from .subtracte import SubtractBuffer
from .hplc import HPLC
from .mesh import Mesh
register(IntegrateMultiframe, fqn="bm29.integratemultiframe")
register(SubtractBuffer, fqn="bm29.subtractbuffer")
register(HPLC, fqn="bm29.hplc")
register(Mesh, fqn="bm29.mesh")
85 changes: 64 additions & 21 deletions plugins/bm29/hplc.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
__contact__ = "Jerome.Kieffer@ESRF.eu"
__license__ = "MIT"
__copyright__ = "European Synchrotron Radiation Facility, Grenoble, France"
__date__ = "27/05/2025"
__date__ = "23/02/2026"
__status__ = "development"
__version__ = "0.3.0"

Expand All @@ -21,6 +21,7 @@
from math import log, pi
import posixpath
import copy
import zipfile
from collections import namedtuple
from urllib3.util import parse_url
from dahu.plugin import Plugin
Expand All @@ -29,11 +30,12 @@
logger = logging.getLogger("bm29.hplc")
import numpy
import h5py
import pyFAI, pyFAI.azimuthalIntegrator, pyFAI.units
import pyFAI, pyFAI.integrator.azimuthal, pyFAI.units
from pyFAI.method_registry import IntegrationMethod
import freesas, freesas.cormap, freesas.invariants
from freesas.autorg import auto_gpa, autoRg, auto_guinier
from freesas.bift import BIFT
from freesas.app.extract_ascii import write_ascii
from scipy.optimize import minimize
import scipy.signal
import scipy.ndimage
Expand Down Expand Up @@ -132,6 +134,41 @@ def build_background(I, std=None, keep=0.3):
return bg_avg, bg_std, to_keep


def save_zip(filename, config, I, sigma):
"""Save a stack of I into a zipfile with each frames in a dat-file.

:param filename: name of the zip-file
:param confif: this is some NexusJuice namedtuple. we use only q and the sample description.
:param I: 2D array with the intensity of the stack of curves
:param sigma: 2D array with the uncertainties of the stack of frames
:return: nothing
"""
basename = os.path.basename(filename)
base = os.path.splitext(basename)[0]
destz = base + "_%04i.dat"
common = {"q": config.q}
if config.sample:
sample = config.sample
if sample.name:
common["sample"]: sample.name
if sample.buffer:
common["buffer"] = sample.buffer
if sample.temperature_env:
common["storage temperature"] = sample.temperature_env
if sample.temperature:
common["exposure temperature"] = sample.temperature
if sample.concentration:
common["concentration"] = sample.concentration
res = []
for i, s in zip(I, sigma):
r = copy.copy(common)
r["I"] = i
r["std"] = s
res.append(r)
with zipfile.ZipFile(filename, "w") as z:
for idx, frame in enumerate(res):
z.writestr(destz % idx, write_ascii(frame))


class HPLC(Plugin):
""" Rebuild the complete chromatogram and perform basic analysis on it.
Expand Down Expand Up @@ -291,13 +328,15 @@ def create_nexus(self):
time_ds.attrs["interpretation"] = "spectrum"
time_ds.attrs["long_name"] = "Time stamps (s)"

integration_data = nxs.new_class(chroma_grp, "results", "NXdata")
integration_data = nxs.new_class(chroma_grp, "result", "NXdata")
chroma_grp.attrs["title"] = str(self.juices[0].sample)

int_ds = integration_data.create_dataset("I", data=numpy.ascontiguousarray(I, dtype=numpy.float32))
std_ds = integration_data.create_dataset("errors", data=numpy.ascontiguousarray(sigma, dtype=numpy.float32))
q_ds = integration_data.create_dataset("q", data=self.juices[0].q)
q_ds.attrs["interpretation"] = "spectrum"
q_ds.attrs["unit"] = unit_name
q_ds.attrs["long_name"] = "Scattering vector q (nm⁻¹)"
integration_data.attrs["signal"] = "I"
integration_data.attrs["axes"] = [".", "q"]
integration_data.attrs["SILX_style"] = SAXS_STYLE
Expand All @@ -309,6 +348,9 @@ def create_nexus(self):
int_ds.attrs["scale"] = "log"
std_ds.attrs["interpretation"] = "spectrum"

save_zip(os.path.splitext(self.output_file)[0]+".zip",
self.juices[0], I, sigma)

# Process 2: SVD decomposition
svd_grp = nxs.new_class(entry_grp, "2_SVD", "NXprocess")
svd_grp["sequence_index"] = self.sequence_index()
Expand Down Expand Up @@ -385,7 +427,7 @@ def create_nexus(self):
self.to_pyarch["buffer_frames"] = to_keep
self.to_pyarch["buffer_I"] = bg_avg
self.to_pyarch["buffer_Stdev"] = bg_std
bg_data = nxs.new_class(bg_grp, "results", "NXdata")
bg_data = nxs.new_class(bg_grp, "result", "NXdata")
bg_data.attrs["signal"] = "I"
bg_data.attrs["SILX_style"] = SAXS_STYLE
bg_data.attrs["axes"] = radial_unit
Expand Down Expand Up @@ -493,16 +535,15 @@ def one_fraction(self, fraction, index, nxs, top_grp):
guinier_autorg = nxs.new_class(guinier_grp, "autorg", "NXcollection")
guinier_gpa = nxs.new_class(guinier_grp, "gpa", "NXcollection")
guinier_guinier = nxs.new_class(guinier_grp, "guinier", "NXcollection")
guinier_data = nxs.new_class(guinier_grp, "results", "NXdata")
guinier_data = nxs.new_class(guinier_grp, "result", "NXdata")
guinier_data.attrs["SILX_style"] = NORMAL_STYLE
guinier_data.attrs["title"] = "Guinier analysis"
# Stage4 processing: autorg and auto_gpa
sasm = numpy.vstack((q, I_frc, sigma_frc)).T

try:
gpa = auto_gpa(sasm)
except Exception as error:
guinier_gpa["Failed"] = "%s: %s" % (error.__class__.__name__, error)
guinier_gpa["Failed"] = f"{error.__class__.__name__}: {error}"
gpa = None
else:
# "Rg sigma_Rg I0 sigma_I0 start_point end_point quality aggregated"
Expand All @@ -520,7 +561,7 @@ def one_fraction(self, fraction, index, nxs, top_grp):
try:
guinier = auto_guinier(sasm)
except Exception as error:
guinier_guinier["Failed"] = "%s: %s" % (error.__class__.__name__, error)
guinier_guinier["Failed"] = f"{error.__class__.__name__}: {error}"
guinier = None
else:
# "Rg sigma_Rg I0 sigma_I0 start_point end_point quality aggregated"
Expand All @@ -540,7 +581,7 @@ def one_fraction(self, fraction, index, nxs, top_grp):
try:
autorg = autoRg(sasm)
except Exception as err:
guinier_autorg["Failed"] = "%s: %s" % (err.__class__.__name__, err)
guinier_autorg["Failed"] = f"{err.__class__.__name__}: {err}"
autorg = None
else:
if autorg.Rg < 0:
Expand Down Expand Up @@ -594,7 +635,7 @@ def one_fraction(self, fraction, index, nxs, top_grp):
dlogI = err[mask] / logI
q2_ds = guinier_data.create_dataset("q2", data=q2.astype(numpy.float32))
q2_ds.attrs["unit"] = radius_unit + "⁻²"
q2_ds.attrs["long_name"] = "q² (%s⁻²)" % radius_unit
q2_ds.attrs["long_name"] = f"q² ({radius_unit}⁻²)"
q2_ds.attrs["interpretation"] = "spectrum"
lnI_ds = guinier_data.create_dataset("logI", data=logI.astype(numpy.float32))
lnI_ds.attrs["long_name"] = "log(I)"
Expand Down Expand Up @@ -623,7 +664,7 @@ def one_fraction(self, fraction, index, nxs, top_grp):
kratky_grp["program"] = "freesas.autorg"
kratky_grp["version"] = freesas.version
kratky_grp["date"] = get_isotime()
kratky_data = nxs.new_class(kratky_grp, "results", "NXdata")
kratky_data = nxs.new_class(kratky_grp, "result", "NXdata")
kratky_data.attrs["SILX_style"] = NORMAL_STYLE
kratky_data.attrs["title"] = "Dimensionless Kratky plots"
kratky_grp.attrs["default"] = posixpath.relpath(kratky_data.name, kratky_grp.name)
Expand All @@ -637,21 +678,21 @@ def one_fraction(self, fraction, index, nxs, top_grp):
qRg_ds = kratky_data.create_dataset("qRg", data=xdata.astype(numpy.float32))
qRg_ds.attrs["interpretation"] = "spectrum"
qRg_ds.attrs["long_name"] = "q·Rg (unit-less)"
k_ds = kratky_data.create_dataset("q2Rg2I/I0", data=ydata.astype(numpy.float32))
k_ds = kratky_data.create_dataset("q2Rg2I÷I0", data=ydata.astype(numpy.float32))
k_ds.attrs["interpretation"] = "spectrum"
k_ds.attrs["long_name"] = "q²Rg²I(q)/I₀"
ke_ds = kratky_data.create_dataset("errors", data=dy.astype(numpy.float32))
ke_ds.attrs["interpretation"] = "spectrum"
kratky_data_attrs = kratky_data.attrs
kratky_data_attrs["signal"] = "q2Rg2I/I0"
kratky_data_attrs["axes"] = "qRg"
kratky_data_attrs["signal"] = k_ds.name
kratky_data_attrs["axes"] = qRg_ds.name

# stage 6: Rambo-Tainer invariant
rti_grp = nxs.new_class(f_grp, "4_invariants", "NXprocess")
rti_grp["sequence_index"] = self.sequence_index()
rti_grp["program"] = "freesas.invariants"
rti_grp["version"] = freesas.version
rti_data = nxs.new_class(rti_grp, "results", "NXdata")
rti_data = nxs.new_class(rti_grp, "result", "NXdata")
# average_data.attrs["SILX_style"] = SAXS_STYLE
# average_data.attrs["signal"] = "intensity_normed"
# Rambo_Tainer
Expand Down Expand Up @@ -688,7 +729,7 @@ def one_fraction(self, fraction, index, nxs, top_grp):
bift_grp["program"] = "freesas.bift"
bift_grp["version"] = freesas.version
bift_grp["date"] = get_isotime()
bift_data = nxs.new_class(bift_grp, "results", "NXdata")
bift_data = nxs.new_class(bift_grp, "result", "NXdata")
bift_data.attrs["SILX_style"] = NORMAL_STYLE
bift_data.attrs["title"] = "Pair distance distribution function p(r)"

Expand Down Expand Up @@ -726,11 +767,11 @@ def one_fraction(self, fraction, index, nxs, top_grp):
res = minimize(bo.opti_evidence, (Dmax, log(alpha)), args=(npt, use_wisdom), method="powell")
cfg_grp["Powell_steps"] = res.nfev
cfg_grp["Monte-Carlo_steps"] = 0
stats = bo.calc_stats()
except Exception as error:
bift_grp["Failed"] = "%s: %s" % (error.__class__.__name__, error)
bift_grp["Failed"] = f"{error.__class__.__name__}: {error}"
bo = None
else:
stats = bo.calc_stats()
bift_grp["alpha"] = stats.alpha_avg
bift_grp["alpha_error"] = stats.alpha_std
self.Dmax = bift_grp["Dmax"] = stats.Dmax_avg
Expand All @@ -750,7 +791,7 @@ def one_fraction(self, fraction, index, nxs, top_grp):
r_ds.attrs["interpretation"] = "spectrum"

r_ds.attrs["unit"] = radius_unit
r_ds.attrs["long_name"] = "radius r(%s)" % radius_unit
r_ds.attrs["long_name"] = f"radius r({radius_unit})"
p_ds = bift_data.create_dataset("p(r)", data=stats.density_avg.astype(numpy.float32))
p_ds.attrs["interpretation"] = "spectrum"
bift_data["errors"] = stats.density_std
Expand Down Expand Up @@ -882,13 +923,13 @@ def read_nexus(filename):
entry_name = nxsr.h5.attrs["default"]
entry_grp = nxsr.h5[entry_name]
h5path = entry_grp.name
nxdata_grp = nxsr.h5[entry_grp.attrs["default"]]
nxdata_grp = entry_grp[entry_grp.attrs["default"]]
assert nxdata_grp.name.endswith("hplc") # we are reading HPLC data
signal = nxdata_grp.attrs["signal"]
axis = nxdata_grp.attrs["axes"]
Isum = nxdata_grp[signal][()]
idx = nxdata_grp[axis][()]
integrated = nxdata_grp.parent["results"]
integrated = nxdata_grp.parent["result"]
signal = integrated.attrs["signal"]
I = integrated[signal][()]
axes = integrated.attrs["axes"][-1]
Expand Down Expand Up @@ -956,6 +997,7 @@ def send_to_icat(self):
raw=os.path.dirname(os.path.abspath(self.input_files[0])),
path=os.path.dirname(os.path.abspath(self.output_file)),
data=to_icat,
dataset="HPLC",
gallery=gallery,
metadata=metadata)

Expand All @@ -974,3 +1016,4 @@ def save_csv(self, filename, sum_I, Rg):




Loading
Loading