From 1d37d1aa0cbb4369230a63f0e7a2770af7291067 Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Sat, 28 Mar 2026 13:30:24 -0700 Subject: [PATCH 01/64] feat: pyaccess --- Cargo.lock | 110 +++++ Cargo.toml | 3 +- python/timsquery_pyo3/Cargo.toml | 17 + python/timsquery_pyo3/README.md | 228 +++++++++++ .../examples/streaming_example.py | 163 ++++++++ python/timsquery_pyo3/pyproject.toml | 16 + python/timsquery_pyo3/src/chromatogram.rs | 82 ++++ python/timsquery_pyo3/src/elution_group.rs | 109 +++++ python/timsquery_pyo3/src/index.rs | 382 ++++++++++++++++++ python/timsquery_pyo3/src/iterator.rs | 221 ++++++++++ python/timsquery_pyo3/src/lib.rs | 52 +++ python/timsquery_pyo3/src/numpy_utils.rs | 12 + python/timsquery_pyo3/src/spectrum.rs | 165 ++++++++ python/timsquery_pyo3/src/tolerance.rs | 204 ++++++++++ rust/timsquery/src/models/base/arrays.rs | 4 + rust/timsquery/src/models/indexed_data.rs | 47 +++ rust/timsquery/src/traits/queriable_data.rs | 29 +- rust/timsquery_cli/src/processing.rs | 9 +- 18 files changed, 1838 insertions(+), 15 deletions(-) create mode 100644 python/timsquery_pyo3/Cargo.toml create mode 100644 python/timsquery_pyo3/README.md create mode 100644 python/timsquery_pyo3/examples/streaming_example.py create mode 100644 python/timsquery_pyo3/pyproject.toml create mode 100644 python/timsquery_pyo3/src/chromatogram.rs create mode 100644 python/timsquery_pyo3/src/elution_group.rs create mode 100644 python/timsquery_pyo3/src/index.rs create mode 100644 python/timsquery_pyo3/src/iterator.rs create mode 100644 python/timsquery_pyo3/src/lib.rs create mode 100644 python/timsquery_pyo3/src/numpy_utils.rs create mode 100644 python/timsquery_pyo3/src/spectrum.rs create mode 100644 python/timsquery_pyo3/src/tolerance.rs diff --git a/Cargo.lock b/Cargo.lock index 48ff6f0..4b562d8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3272,6 +3272,15 @@ dependencies = [ "web-time", ] +[[package]] +name = "indoc" +version = "2.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79cf5c93f93228cf8efb3ba362535fb11199ac548a09ce117c9b1adc3030d706" +dependencies = [ + "rustversion", +] + [[package]] name = "insta" version = "1.45.0" @@ -3954,6 +3963,22 @@ dependencies = [ "syn 2.0.111", ] +[[package]] +name = "numpy" +version = "0.25.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29f1dee9aa8d3f6f8e8b9af3803006101bb3653866ef056d530d53ae68587191" +dependencies = [ + "libc", + "ndarray", + "num-complex", + "num-integer", + "num-traits", + "pyo3", + "pyo3-build-config", + "rustc-hash 2.1.1", +] + [[package]] name = "objc" version = "0.2.7" @@ -4677,6 +4702,68 @@ dependencies = [ "num-traits", ] +[[package]] +name = "pyo3" +version = "0.25.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8970a78afe0628a3e3430376fc5fd76b6b45c4d43360ffd6cdd40bdde72b682a" +dependencies = [ + "indoc", + "libc", + "memoffset", + "once_cell", + "portable-atomic", + "pyo3-build-config", + "pyo3-ffi", + "pyo3-macros", + "unindent", +] + +[[package]] +name = "pyo3-build-config" +version = "0.25.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "458eb0c55e7ece017adeba38f2248ff3ac615e53660d7c71a238d7d2a01c7598" +dependencies = [ + "once_cell", + "target-lexicon", +] + +[[package]] +name = "pyo3-ffi" +version = "0.25.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7114fe5457c61b276ab77c5055f206295b812608083644a5c5b2640c3102565c" +dependencies = [ + "libc", + "pyo3-build-config", +] + +[[package]] +name = "pyo3-macros" +version = "0.25.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8725c0a622b374d6cb051d11a0983786448f7785336139c3c94f5aa6bef7e50" +dependencies = [ + "proc-macro2", + "pyo3-macros-backend", + "quote", + "syn 2.0.111", +] + +[[package]] +name = "pyo3-macros-backend" +version = "0.25.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4109984c22491085343c05b0dbc54ddc405c3cf7b4374fc533f5c3313a572ccc" +dependencies = [ + "heck", + "proc-macro2", + "pyo3-build-config", + "quote", + "syn 2.0.111", +] + [[package]] name = "quick-error" version = "2.0.1" @@ -5760,6 +5847,12 @@ dependencies = [ "syn 2.0.111", ] +[[package]] +name = "target-lexicon" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adb6935a6f5c20170eeceb1a3835a49e12e19d792f6dd344ccc76a985ca5a6ca" + [[package]] name = "tempfile" version = "3.24.0" @@ -5966,6 +6059,17 @@ dependencies = [ "tracing-subscriber", ] +[[package]] +name = "timsquery_pyo3" +version = "0.26.0" +dependencies = [ + "numpy", + "pyo3", + "rayon", + "timscentroid", + "timsquery", +] + [[package]] name = "timsquery_viewer" version = "0.26.0" @@ -6386,6 +6490,12 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4ac048d71ede7ee76d585517add45da530660ef4390e49b098733c6e897f254" +[[package]] +name = "unindent" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7264e107f553ccae879d21fbea1d6724ac785e8c3bfc762137959b5802826ef3" + [[package]] name = "unit-prefix" version = "0.5.2" diff --git a/Cargo.toml b/Cargo.toml index 19db98d..4ae61fc 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -8,7 +8,8 @@ members = [ "rust/timsseek_cli", "rust/timsquery", "rust/timsquery_cli", - "rust/timsquery_viewer" + "rust/timsquery_viewer", + "python/timsquery_pyo3" ] [workspace.package] diff --git a/python/timsquery_pyo3/Cargo.toml b/python/timsquery_pyo3/Cargo.toml new file mode 100644 index 0000000..618f615 --- /dev/null +++ b/python/timsquery_pyo3/Cargo.toml @@ -0,0 +1,17 @@ +[package] +name = "timsquery_pyo3" +version.workspace = true +edition.workspace = true +license.workspace = true + +[lib] +name = "timsquery_pyo3" +crate-type = ["cdylib"] + +[dependencies] +timsquery = { path = "../../rust/timsquery" } +timscentroid = { path = "../../rust/timscentroid" } + +rayon = { workspace = true } +pyo3 = { version = "0.25", features = ["extension-module"] } +numpy = "0.25" diff --git a/python/timsquery_pyo3/README.md b/python/timsquery_pyo3/README.md new file mode 100644 index 0000000..91e3be6 --- /dev/null +++ b/python/timsquery_pyo3/README.md @@ -0,0 +1,228 @@ +# timsquery_pyo3 + +Python bindings for [timsquery](../../rust/timsquery/), a Rust library for querying timsTOF mass spectrometry data. + +## Installation + +Requires a Rust toolchain and Python >= 3.9. + +```bash +cd python/timsquery_pyo3 +pip install maturin +maturin develop --release +``` + +## Quick start + +```python +import timsquery_pyo3 as tq + +# 1. Load an index from a .d directory (builds + caches on first run) +# or from a pre-built .d.idx cache +index = tq.PyTimsIndex("path/to/experiment.d") + +# 2. Set up tolerances (or just use defaults: 20ppm, 5min RT, 3% mobility, 0.1Da quad) +tolerance = tq.PyTolerance.default() + +# Override a single dimension: +tolerance = tq.PyTolerance.default().with_mz(tq.PyMzTolerance.ppm(10.0, 10.0)) + +# Or build from scratch: +tolerance = tq.PyTolerance( + mz=tq.PyMzTolerance.ppm(15.0, 15.0), + rt=tq.PyRtTolerance.minutes(3.0, 3.0), + mobility=tq.PyMobilityTolerance.pct(5.0, 5.0), + quad=tq.PyQuadTolerance.absolute(0.1, 0.1), +) + +# 3. Define an elution group (one precursor + its fragments) +eg = tq.PyElutionGroup( + id=1, + precursor_mz=500.0, + precursor_charge=2, + rt_seconds=300.0, + mobility=0.85, + fragment_mzs=[600.1, 700.2, 800.3], + fragment_labels=[0, 1, 2], + precursor_labels=[0, 1, -1], # M0, M+1, M-1 isotopes (optional, defaults to [0]) +) + +# 4. Query +result = index.query_chromatogram(eg, tolerance) + +# 5. Access results as numpy arrays +result.fragment_intensities # shape (n_fragments, n_cycles), dtype float32 +result.precursor_intensities # shape (n_precursors, n_cycles), dtype float32 + +result.fragment_labels # [(label, mz), ...] — row order matches the array +result.precursor_labels # [(isotope_offset, mz), ...] +result.rt_range_ms # (start_ms, end_ms) +result.num_cycles # number of RT points +result.id # elution group id +``` + +## Batch queries (parallel) + +```python +elution_groups = [eg1, eg2, eg3, ...] # list of PyElutionGroup + +# Shared tolerance (applied to all queries) +results = index.query_chromatograms_batch(elution_groups, tolerance) + +# Per-query tolerance (list must match length) +results = index.query_chromatograms_batch(elution_groups, [tol1, tol2, tol3]) +``` + +## Aggregator reuse + +Avoid repeated allocations by reusing a `ChromatogramResult` across queries. +The internal `Vec` capacity grows to the largest elution group and stays there. + +```python +result = index.query_chromatogram(eg1, tolerance) # first query — allocates + +index.query_chromatogram_into(result, eg2, tolerance) # reuses allocation +index.query_chromatogram_into(result, eg3, tolerance) # same allocation +``` + +## Streaming queries (iterator in, iterator out) + +For large-scale workloads, stream elution groups from any Python iterator. +Internally uses chunked rayon parallelism and reuses collector allocations +across chunks — after the first chunk, allocations settle and only `memcpy` +into numpy remains. + +```python +# Any iterable works — generator, list, map, etc. +eg_iter = (make_eg(row) for row in dataframe.itertuples()) + +# Shared tolerance +for arrays in index.query_chromatograms_iter(eg_iter, tolerance, chunk_size=256): + arrays.id # int + arrays.precursor_intensities # np.ndarray (n_prec, n_cycles), float32 + arrays.fragment_intensities # np.ndarray (n_frag, n_cycles), float32 + arrays.precursor_labels # list[(isotope_offset, mz)] + arrays.fragment_labels # list[(label, mz)] + arrays.rt_range_ms # (start_ms, end_ms) + arrays.num_cycles # int + +# Per-query tolerance (iterable consumed in lockstep with elution groups) +tol_iter = (make_tol(row) for row in dataframe.itertuples()) +for arrays in index.query_chromatograms_iter(eg_iter, tol_iter, chunk_size=256): + ... +``` + +`ChromatogramArrays` is a lightweight frozen object that owns its numpy arrays. +The iterator's internal collector pool is never exposed — it just keeps reusing +the same Rust-side buffers across chunks. + +## Spectral queries + +### Summed intensity per ion + +```python +result = index.query_spectrum(eg, tolerance) +result.precursor_intensities # list[float] — one total intensity per precursor +result.fragment_intensities # list[float] — one total intensity per fragment +result.precursor_labels # list[(isotope_offset, mz)] +result.fragment_labels # list[(label, mz)] +result.id # int +``` + +### Intensity-weighted mean m/z and mobility + +```python +result = index.query_mz_mobility(eg, tolerance) +result.precursor_stats # list[(weight, mean_mz, mean_mobility)] +result.fragment_stats # list[(weight, mean_mz, mean_mobility)] +result.precursor_labels # list[(isotope_offset, mz)] +result.fragment_labels # list[(label, mz)] +result.id # int +``` + +Each stats tuple contains: +- `weight` — total accumulated intensity +- `mean_mz` — intensity-weighted mean m/z (NaN if no peaks found) +- `mean_mobility` — intensity-weighted mean ion mobility in 1/K0 (NaN if no peaks found) + +## Tolerance reference + +Each dimension has its own type with `@staticmethod` constructors: + +| Type | Constructors | +|---|---| +| `PyMzTolerance` | `.ppm(low, high)`, `.absolute(low, high)` | +| `PyRtTolerance` | `.minutes(low, high)`, `.pct(low, high)`, `.unrestricted()` | +| `PyMobilityTolerance` | `.absolute(low, high)`, `.pct(low, high)`, `.unrestricted()` | +| `PyQuadTolerance` | `.absolute(low, high)` | + +Tolerances are symmetric ranges expressed as positive values. +A tolerance of `(5.0, 5.0)` on a value of `100.0` gives the range `[95.0, 105.0]`. + +`PyTolerance` supports a builder pattern for overriding individual dimensions: + +```python +tol = tq.PyTolerance.default() # start from defaults +tol = tol.with_mz(tq.PyMzTolerance.ppm(10.0, 10.0)) +tol = tol.with_rt(tq.PyRtTolerance.unrestricted()) +tol = tol.with_mobility(tq.PyMobilityTolerance.absolute(0.05, 0.05)) +tol = tol.with_quad(tq.PyQuadTolerance.absolute(0.2, 0.2)) +``` + +## Lazy vs eager loading + +```python +# Eager (default): loads entire index into memory — faster queries +index = tq.PyTimsIndex("experiment.d") + +# Lazy: loads from cached .idx on demand — faster startup, lower memory +index = tq.PyTimsIndex("experiment.d.idx", prefer_lazy=True) + +index.is_lazy # bool +``` + +## RT / cycle mapping + +```python +index.num_cycles # total MS1 cycles in the acquisition +index.rt_range_ms # (start_ms, end_ms) +index.rt_values_ms # list[int] — RT in ms for every cycle index + +# Convert between seconds and cycle indices +idx = index.rt_seconds_to_cycle_index(300.0) # nearest cycle index +rt = index.cycle_index_to_rt_ms(idx) # back to ms (raises IndexError if OOB) +``` + +Useful for building an RT axis aligned with chromatogram arrays: + +```python +import numpy as np +result = index.query_chromatogram(eg, tolerance) +rt_axis = np.array(index.rt_values_ms, dtype=np.float32) / 1000.0 # seconds +# rt_axis and result.fragment_intensities share the cycle dimension +``` + +## Current limitations + +- **Fragment keys are `usize` only.** The Rust library is generic over key types + (e.g. `IonAnnot`), but this binding fixes `T = usize` for simplicity. +- **Intensity values are `f32` only.** +- **PointIntensityAggregator not yet exposed.** + +## Roadmap + +- [x] **Aggregator reuse** — `query_chromatogram_into` reuses a `ChromatogramCollector` + allocation across queries, avoiding repeated allocation. +- [x] **SpectralCollector** — `query_spectrum` (summed f32) and `query_mz_mobility` + (intensity-weighted mean m/z + mobility) per ion. +- [ ] **PointIntensityAggregator** — single scalar total intensity per elution group. +- [ ] **IonAnnot key type** — support `IonAnnot` fragment labels alongside `usize`, + enabling richer annotation round-trips between Python and Rust. +- [ ] **Zero-copy array access** — return numpy views backed by Rust-owned memory + instead of copying, for large-scale workloads. +- [x] **CycleToRTMapping exposure** — `rt_seconds_to_cycle_index`, `cycle_index_to_rt_ms`, + `rt_values_ms`, `num_cycles`, `rt_range_ms` on `PyTimsIndex`. +- [ ] **Library file I/O** — read DIA-NN / Spectronaut libraries directly into + lists of `PyElutionGroup`, removing boilerplate on the Python side. +- [x] **Streaming queries** — `query_chromatograms_iter` streams from any Python + iterator with chunked rayon parallelism and internal collector reuse. diff --git a/python/timsquery_pyo3/examples/streaming_example.py b/python/timsquery_pyo3/examples/streaming_example.py new file mode 100644 index 0000000..e0c231f --- /dev/null +++ b/python/timsquery_pyo3/examples/streaming_example.py @@ -0,0 +1,163 @@ +# /// script +# /// dependencies = [ +# /// "timsquery_pyo3", +# /// "numpy", +# /// ] +# /// +# /// [tool.uv.sources] +# /// timsquery_pyo3 = { path = ".." } +# /// +""" +Streaming chromatogram extraction from timsTOF data. + +This example demonstrates the three query modes in timsquery_pyo3: + + 1. Single query — one elution group at a time + 2. Aggregator reuse — reuse allocations across sequential queries + 3. Streaming iterator — iterator-in, iterator-out with chunked parallelism + +Usage: + uv run examples/streaming_example.py +""" + +import sys +import time + +import numpy as np +import timsquery_pyo3 as tq + + +# --------------------------------------------------------------------------- +# Example elution groups (from the timsquery_cli templates) +# --------------------------------------------------------------------------- + +EXAMPLE_ELUTION_GROUPS = [ + dict( + id=0, + precursor_mz=723.844601280237, + precursor_charge=2, + rt_seconds=302.2712, + mobility=0.9851410984992981, + fragment_mzs=[147.1128, 74.06004, 248.1604, 124.58387, 347.22889, 174.11808, 418.26601], + fragment_labels=[0, 1, 2, 3, 4, 5, 6], + precursor_labels=[0, 1, 2], + ), + dict( + id=1, + precursor_mz=723.844601280237, + precursor_charge=1, + rt_seconds=354.2712, + mobility=0.9851410984992981, + fragment_mzs=[147.1128, 74.06004, 248.1604, 124.58387, 347.22889, 174.11808, 418.26601], + fragment_labels=[0, 1, 2, 3, 4, 5, 6], + precursor_labels=[0], + ), +] + + +def make_elution_groups(n: int): + """Generate n elution groups by cycling through the templates with shifted RTs.""" + for i in range(n): + template = EXAMPLE_ELUTION_GROUPS[i % len(EXAMPLE_ELUTION_GROUPS)] + yield tq.PyElutionGroup( + id=i, + precursor_mz=template["precursor_mz"], + precursor_charge=template["precursor_charge"], + # Shift RT slightly for each group so they aren't identical + rt_seconds=template["rt_seconds"] + (i * 0.5), + mobility=template["mobility"], + fragment_mzs=template["fragment_mzs"], + fragment_labels=template["fragment_labels"], + precursor_labels=template["precursor_labels"], + ) + + +def main(): + if len(sys.argv) < 2: + print(__doc__) + print("Error: please provide a path to a .d or .d.idx file.") + sys.exit(1) + + data_path = sys.argv[1] + n_queries = 200 + + # ------------------------------------------------------------------ + # Load the index + # ------------------------------------------------------------------ + print(f"Loading index from: {data_path}") + t0 = time.perf_counter() + index = tq.PyTimsIndex(data_path) + print(f" loaded in {time.perf_counter() - t0:.2f}s ({index})") + + # ------------------------------------------------------------------ + # Set up tolerances — narrow search window + # ------------------------------------------------------------------ + tolerance = tq.PyTolerance( + mz=tq.PyMzTolerance.ppm(10.0, 10.0), + rt=tq.PyRtTolerance.minutes(0.5, 0.5), + mobility=tq.PyMobilityTolerance.pct(5.0, 5.0), + quad=tq.PyQuadTolerance.absolute(1.05, 1.05), + ) + + # ------------------------------------------------------------------ + # Mode 1: Single queries + # ------------------------------------------------------------------ + print(f"\n--- Mode 1: Single queries ({n_queries} queries) ---") + egs = list(make_elution_groups(n_queries)) + + t0 = time.perf_counter() + for eg in egs: + result = index.query_chromatogram(eg, tolerance) + dt = time.perf_counter() - t0 + print(f" {dt:.3f}s total, {dt / n_queries * 1000:.2f}ms per query") + print(f" last result: {result}") + + # ------------------------------------------------------------------ + # Mode 2: Aggregator reuse (query_chromatogram_into) + # ------------------------------------------------------------------ + print(f"\n--- Mode 2: Aggregator reuse ({n_queries} queries) ---") + + t0 = time.perf_counter() + result = index.query_chromatogram(egs[0], tolerance) + for eg in egs[1:]: + index.query_chromatogram_into(result, eg, tolerance) + dt = time.perf_counter() - t0 + print(f" {dt:.3f}s total, {dt / n_queries * 1000:.2f}ms per query") + print(f" last result: {result}") + + # ------------------------------------------------------------------ + # Mode 3: Streaming iterator (query_chromatograms_iter) + # ------------------------------------------------------------------ + print(f"\n--- Mode 3: Streaming iterator ({n_queries} queries, chunk_size=64) ---") + + t0 = time.perf_counter() + total_signal = 0.0 + count = 0 + for arrays in index.query_chromatograms_iter( + make_elution_groups(n_queries), tolerance, chunk_size=64 + ): + total_signal += arrays.fragment_intensities.sum() + count += 1 + dt = time.perf_counter() - t0 + print(f" {dt:.3f}s total, {dt / n_queries * 1000:.2f}ms per query") + print(f" yielded {count} results, total fragment signal: {total_signal:.1f}") + + # ------------------------------------------------------------------ + # Inspect one result in detail + # ------------------------------------------------------------------ + print("\n--- Inspecting a single result ---") + eg = egs[0] + result = index.query_chromatogram(eg, tolerance) + print(f" Elution group: {eg}") + print(f" Result: {result}") + print(f" Precursor shape: {result.precursor_intensities.shape}") + print(f" Fragment shape: {result.fragment_intensities.shape}") + print(f" Precursor labels: {result.precursor_labels}") + print(f" Fragment labels: {result.fragment_labels}") + print(f" RT range (ms): {result.rt_range_ms}") + print(f" Num cycles: {result.num_cycles}") + print(f" Fragment TIC per ion: {result.fragment_intensities.sum(axis=1)}") + + +if __name__ == "__main__": + main() diff --git a/python/timsquery_pyo3/pyproject.toml b/python/timsquery_pyo3/pyproject.toml new file mode 100644 index 0000000..6577d5f --- /dev/null +++ b/python/timsquery_pyo3/pyproject.toml @@ -0,0 +1,16 @@ +[build-system] +requires = ["maturin>=1.0,<2.0"] +build-backend = "maturin" + +[project] +name = "timsquery_pyo3" +requires-python = ">=3.9" +classifiers = [ + "Programming Language :: Rust", + "Programming Language :: Python :: Implementation :: CPython", +] +dynamic = ["version"] + +[tool.maturin] +features = ["pyo3/extension-module"] +manifest-path = "Cargo.toml" diff --git a/python/timsquery_pyo3/src/chromatogram.rs b/python/timsquery_pyo3/src/chromatogram.rs new file mode 100644 index 0000000..9363123 --- /dev/null +++ b/python/timsquery_pyo3/src/chromatogram.rs @@ -0,0 +1,82 @@ +use numpy::PyArray2; +use pyo3::prelude::*; +use timsquery::ChromatogramCollector; + +use crate::numpy_utils::array2d_to_numpy; + +/// Result of a chromatogram query. +/// +/// Contains 2D numpy arrays for precursor and fragment intensity traces +/// across retention time cycles. Shaped (n_ions, n_cycles). +#[pyclass] +pub struct PyChromatogramResult { + pub(crate) collector: ChromatogramCollector, +} + +impl PyChromatogramResult { + pub fn new(collector: ChromatogramCollector) -> Self { + Self { collector } + } +} + +#[pymethods] +impl PyChromatogramResult { + #[getter] + fn precursor_intensities<'py>(&self, py: Python<'py>) -> PyResult>> { + array2d_to_numpy(py, &self.collector.precursors.arr) + } + + #[getter] + fn fragment_intensities<'py>(&self, py: Python<'py>) -> PyResult>> { + array2d_to_numpy(py, &self.collector.fragments.arr) + } + + #[getter] + fn precursor_labels(&self) -> Vec<(i8, f64)> { + self.collector + .precursors + .mz_order + .iter() + .map(|(k, mz)| (*k, *mz)) + .collect() + } + + #[getter] + fn fragment_labels(&self) -> Vec<(usize, f64)> { + self.collector + .fragments + .mz_order + .iter() + .map(|(k, mz)| (*k, *mz)) + .collect() + } + + #[getter] + fn rt_range_ms(&self) -> (u32, u32) { + let r = self.collector.rt_range_milis(); + (r.start(), r.end()) + } + + #[getter] + fn num_cycles(&self) -> usize { + self.collector.num_cycles() + } + + #[getter] + fn id(&self) -> u64 { + self.collector.eg.id() + } + + fn __repr__(&self) -> String { + format!( + "ChromatogramResult(id={}, precursors={}x{}, fragments={}x{}, rt_ms=({}, {}))", + self.collector.eg.id(), + self.collector.precursors.arr.nrows(), + self.collector.precursors.arr.ncols(), + self.collector.fragments.arr.nrows(), + self.collector.fragments.arr.ncols(), + self.collector.rt_range_milis().start(), + self.collector.rt_range_milis().end(), + ) + } +} diff --git a/python/timsquery_pyo3/src/elution_group.rs b/python/timsquery_pyo3/src/elution_group.rs new file mode 100644 index 0000000..fe482da --- /dev/null +++ b/python/timsquery_pyo3/src/elution_group.rs @@ -0,0 +1,109 @@ +use pyo3::prelude::*; +use timsquery::TimsElutionGroup; +use timsquery::tinyvec::tiny_vec; + +/// An elution group defines a query target: one precursor and its fragments. +/// +/// NOTE: Fragment labels are `usize` only in this binding. This is a deliberate +/// simplification — the Rust side is generic over `T: KeyLike` but we monomorphize +/// to `usize` here for a clean Python interface. Other key types (e.g. `IonAnnot`) +/// may be added in future versions. +#[pyclass] +#[derive(Debug, Clone)] +pub struct PyElutionGroup { + pub(crate) inner: TimsElutionGroup, +} + +#[pymethods] +impl PyElutionGroup { + /// Create a new ElutionGroup. + /// + /// Args: + /// id: Unique identifier. + /// precursor_mz: Monoisotopic precursor m/z. + /// precursor_charge: Charge state. + /// rt_seconds: Expected retention time in seconds. + /// mobility: Expected ion mobility (1/K0). + /// fragment_mzs: List of fragment m/z values. + /// fragment_labels: List of integer labels (one per fragment, same length as fragment_mzs). + /// precursor_labels: Isotope offset labels (e.g. [0, 1, -1] for M0, M+1, M-1). + #[new] + #[pyo3(signature = (id, precursor_mz, precursor_charge, rt_seconds, mobility, fragment_mzs, fragment_labels, precursor_labels=None))] + fn new( + id: u64, + precursor_mz: f64, + precursor_charge: u8, + rt_seconds: f32, + mobility: f32, + fragment_mzs: Vec, + fragment_labels: Vec, + precursor_labels: Option>, + ) -> PyResult { + let precursor_labels_tv = match precursor_labels { + Some(labels) => labels.into_iter().collect(), + None => tiny_vec![0i8], + }; + let fragment_labels_tv = fragment_labels.into_iter().collect(); + + let eg = TimsElutionGroup::builder() + .id(id) + .precursor(precursor_mz, precursor_charge) + .mobility_ook0(mobility) + .rt_seconds(rt_seconds) + .fragment_mzs(fragment_mzs) + .fragment_labels(fragment_labels_tv) + .precursor_labels(precursor_labels_tv) + .try_build() + .map_err(|e| PyErr::new::(format!("{:?}", e)))?; + + Ok(Self { inner: eg }) + } + + #[getter] + fn id(&self) -> u64 { + self.inner.id() + } + + #[getter] + fn precursor_mz(&self) -> f64 { + self.inner.precursor_mz() + } + + #[getter] + fn precursor_charge(&self) -> u8 { + self.inner.precursor_charge() + } + + #[getter] + fn rt_seconds(&self) -> f32 { + self.inner.rt_seconds() + } + + #[getter] + fn mobility(&self) -> f32 { + self.inner.mobility_ook0() + } + + #[getter] + fn num_fragments(&self) -> usize { + self.inner.fragment_count() + } + + #[getter] + fn num_precursors(&self) -> usize { + self.inner.precursor_count() + } + + fn __repr__(&self) -> String { + format!( + "ElutionGroup(id={}, mz={:.4}, charge={}, rt={:.1}s, mob={:.3}, frags={}, precs={})", + self.inner.id(), + self.inner.precursor_mz(), + self.inner.precursor_charge(), + self.inner.rt_seconds(), + self.inner.mobility_ook0(), + self.inner.fragment_count(), + self.inner.precursor_count(), + ) + } +} diff --git a/python/timsquery_pyo3/src/index.rs b/python/timsquery_pyo3/src/index.rs new file mode 100644 index 0000000..d49f427 --- /dev/null +++ b/python/timsquery_pyo3/src/index.rs @@ -0,0 +1,382 @@ +use std::sync::Arc; + +use pyo3::prelude::*; +use timscentroid::rt_mapping::RTIndex; +use timsquery::serde::IndexedPeaksHandle; +use timsquery::traits::queriable_data::QueriableData; +use timsquery::{ + ChromatogramCollector, MzMobilityStatsCollector, OptionallyRestricted, SpectralCollector, + Tolerance, +}; + +use crate::chromatogram::PyChromatogramResult; +use crate::elution_group::PyElutionGroup; +use crate::iterator::PyChromatogramIterator; +use crate::spectrum::{PyMzMobilityResult, PySpectralResult}; +use crate::tolerance::PyTolerance; + +/// Compute the RT range in milliseconds for a chromatogram query. +/// +/// If the tolerance is restricted, returns the tolerance-derived range. +/// If unrestricted, falls back to the full acquisition RT range from the +/// cycle mapping (giving a chromatogram spanning the entire run). +pub(crate) fn rt_range_ms_for_chromatogram( + tol: &Tolerance, + rt_seconds: f32, + handle: &IndexedPeaksHandle, +) -> PyResult> { + match tol.rt_range_as_milis(rt_seconds) { + OptionallyRestricted::Restricted(range) => Ok(range), + OptionallyRestricted::Unrestricted => { + let (start, end) = handle.ms1_cycle_mapping().range_milis(); + timsquery::TupleRange::try_new(start, end).map_err(|e| { + PyErr::new::(format!( + "Empty RT range in index: {:?}", + e + )) + }) + } + } +} + +/// Resolved tolerances: either one shared or one per query. +pub(crate) enum ResolvedTolerances { + Single(Tolerance), + PerQuery(Vec), +} + +impl ResolvedTolerances { + /// Extract from a Python object: either a PyTolerance or a list of PyTolerance. + pub fn from_py(py: Python<'_>, obj: &PyObject, expected_len: Option) -> PyResult { + // Try single PyTolerance first + if let Ok(tol) = obj.extract::>(py) { + return Ok(Self::Single(tol.inner.clone())); + } + + // Try list of PyTolerance + let list: Vec> = obj.extract(py).map_err(|_| { + PyErr::new::( + "tolerance must be a PyTolerance or a list of PyTolerance", + ) + })?; + + if let Some(expected) = expected_len { + if list.len() != expected { + return Err(PyErr::new::(format!( + "tolerance list length ({}) must match elution_groups length ({})", + list.len(), + expected, + ))); + } + } + + Ok(Self::PerQuery( + list.iter().map(|t| t.inner.clone()).collect(), + )) + } + + /// Get the tolerance for query at index i. + pub fn get(&self, i: usize) -> &Tolerance { + match self { + Self::Single(tol) => tol, + Self::PerQuery(tols) => &tols[i], + } + } +} + +/// A loaded timsTOF index, ready for querying. +/// +/// Wraps either an eager (fully in-memory) or lazy (on-demand parquet) +/// indexed peaks handle. +#[pyclass] +pub struct PyTimsIndex { + pub(crate) handle: Arc, +} + +#[pymethods] +impl PyTimsIndex { + /// Load a timsTOF index from a .d directory or .d.idx cache. + /// + /// Args: + /// path: Path to the .d file or .d.idx cached index. + /// prefer_lazy: If True, prefer lazy loading for cached indexes (default: False). + #[new] + #[pyo3(signature = (path, prefer_lazy=false))] + fn new(path: &str, prefer_lazy: bool) -> PyResult { + let config = timsquery::serde::IndexLoadConfig { + prefer_lazy, + ..Default::default() + }; + let handle = timsquery::serde::load_index_auto(path, Some(config)) + .map_err(|e| PyErr::new::(format!("{:?}", e)))?; + Ok(Self { + handle: Arc::new(handle), + }) + } + + /// Query a single elution group and return a ChromatogramResult. + /// + /// Args: + /// elution_group: The query target. + /// tolerance: Search tolerances. + /// + /// Returns: + /// ChromatogramResult with precursor and fragment intensity arrays. + fn query_chromatogram( + &self, + elution_group: &PyElutionGroup, + tolerance: &PyTolerance, + ) -> PyResult { + let eg = elution_group.inner.clone(); + let tol = &tolerance.inner; + let rt_range_ms = rt_range_ms_for_chromatogram(tol, eg.rt_seconds(), &self.handle)?; + let ref_rt = self.handle.ms1_cycle_mapping(); + + let mut collector = ChromatogramCollector::::new(eg, rt_range_ms, ref_rt) + .map_err(|e| PyErr::new::(format!("{:?}", e)))?; + + self.handle.add_query(&mut collector, tol); + + Ok(PyChromatogramResult::new(collector)) + } + + /// Re-query into an existing ChromatogramResult, reusing its allocation. + /// + /// The internal arrays are reset and refilled. The Vec capacity is preserved, + /// so repeated calls with similarly-sized elution groups avoid reallocation. + /// + /// Args: + /// result: A previously returned ChromatogramResult (mutated in place). + /// elution_group: The new query target. + /// tolerance: Search tolerances. + fn query_chromatogram_into( + &self, + result: &mut PyChromatogramResult, + elution_group: &PyElutionGroup, + tolerance: &PyTolerance, + ) -> PyResult<()> { + let eg = elution_group.inner.clone(); + let tol = &tolerance.inner; + let rt_range_ms = rt_range_ms_for_chromatogram(tol, eg.rt_seconds(), &self.handle)?; + let ref_rt = self.handle.ms1_cycle_mapping(); + + result + .collector + .try_reset_with(eg, rt_range_ms, ref_rt) + .map_err(|e| PyErr::new::(format!("{:?}", e)))?; + + self.handle.add_query(&mut result.collector, tol); + + Ok(()) + } + + /// Query a single elution group for total summed intensity per ion. + /// + /// Returns one f32 intensity value per precursor/fragment (no RT dimension). + /// + /// Args: + /// elution_group: The query target. + /// tolerance: Search tolerances. + fn query_spectrum( + &self, + elution_group: &PyElutionGroup, + tolerance: &PyTolerance, + ) -> PyResult { + let eg = elution_group.inner.clone(); + let tol = &tolerance.inner; + let mut collector = SpectralCollector::::new(eg); + self.handle.add_query(&mut collector, tol); + Ok(PySpectralResult::new(collector)) + } + + /// Query a single elution group for intensity-weighted mean m/z and mobility. + /// + /// Returns (weight, mean_mz, mean_mobility) per precursor/fragment. + /// NaN values indicate no peaks were found for that ion. + /// + /// Args: + /// elution_group: The query target. + /// tolerance: Search tolerances. + fn query_mz_mobility( + &self, + elution_group: &PyElutionGroup, + tolerance: &PyTolerance, + ) -> PyResult { + let eg = elution_group.inner.clone(); + let tol = &tolerance.inner; + let mut collector = SpectralCollector::::new(eg); + self.handle.add_query(&mut collector, tol); + Ok(PyMzMobilityResult::new(collector)) + } + + /// Query multiple elution groups in parallel (via rayon). + /// + /// Args: + /// elution_groups: List of query targets. + /// tolerance: A single PyTolerance (shared) or a list of PyTolerance + /// (one per elution group, must match length). + /// + /// Returns: + /// List of ChromatogramResult, one per elution group. + fn query_chromatograms_batch( + &self, + py: Python<'_>, + elution_groups: Vec>, + tolerance: PyObject, + ) -> PyResult> { + let n = elution_groups.len(); + let tolerances = ResolvedTolerances::from_py(py, &tolerance, Some(n))?; + let ref_rt = self.handle.ms1_cycle_mapping(); + + let mut collectors: Vec> = elution_groups + .iter() + .enumerate() + .map(|(i, eg)| { + let inner = eg.inner.clone(); + let tol = tolerances.get(i); + let rt_range_ms = + rt_range_ms_for_chromatogram(tol, inner.rt_seconds(), &self.handle)?; + ChromatogramCollector::::new(inner, rt_range_ms, ref_rt).map_err(|e| { + PyErr::new::(format!("{:?}", e)) + }) + }) + .collect::>>()?; + + // Release GIL for the parallel query work + let handle = &*self.handle; + py.allow_threads(|| match &tolerances { + ResolvedTolerances::Single(tol) => { + handle.par_add_query_multi( + &mut collectors[..], + rayon::iter::repeat_n(tol, n), + ); + } + ResolvedTolerances::PerQuery(tols) => { + handle.par_add_query_multi(&mut collectors[..], &tols[..]); + } + }); + + Ok(collectors + .into_iter() + .map(PyChromatogramResult::new) + .collect()) + } + + /// Streaming query over an iterator of elution groups. + /// + /// Internally reuses collector allocations and processes queries in parallel + /// chunks via rayon. Yields lightweight ChromatogramArrays with owned numpy + /// arrays. + /// + /// Args: + /// elution_groups: Any Python iterable of ElutionGroup objects. + /// tolerance: A single PyTolerance (shared) or a Python iterable of + /// PyTolerance (one per elution group, consumed in lockstep). + /// chunk_size: Number of queries per parallel batch (default: 256). + /// + /// Returns: + /// An iterator yielding ChromatogramArrays. + #[pyo3(signature = (elution_groups, tolerance, chunk_size=None))] + fn query_chromatograms_iter( + &self, + py: Python<'_>, + elution_groups: PyObject, + tolerance: PyObject, + chunk_size: Option, + ) -> PyResult { + let eg_iter = elution_groups.call_method0(py, "__iter__")?; + + // Check if tolerance is a single PyTolerance or an iterable + let tol_source = match tolerance.extract::>(py) { + Ok(tol_ref) => { + crate::iterator::ToleranceSource::Single(tol_ref.inner.clone()) + } + Err(_) => { + let tol_iter = tolerance.call_method0(py, "__iter__").map_err(|_| { + PyErr::new::( + "tolerance must be a PyTolerance or an iterable of PyTolerance", + ) + })?; + crate::iterator::ToleranceSource::PerQuery(tol_iter) + } + }; + + Ok(PyChromatogramIterator::new( + Arc::clone(&self.handle), + tol_source, + eg_iter, + chunk_size.unwrap_or(256), + )) + } + + /// Convert a retention time in seconds to the nearest cycle index. + fn rt_seconds_to_cycle_index(&self, rt_seconds: f32) -> usize { + let rt_ms = (rt_seconds * 1000.0) as u32; + self.handle + .ms1_cycle_mapping() + .ms_to_closest_index(rt_ms) + .index() + } + + /// Convert a cycle index to retention time in milliseconds. + /// + /// Raises IndexError if the index is out of bounds. + fn cycle_index_to_rt_ms(&self, index: u32) -> PyResult { + use timscentroid::rt_mapping::MS1CycleIndex; + let idx = MS1CycleIndex::new(index); + self.handle + .ms1_cycle_mapping() + .rt_milis_for_index(&idx) + .map_err(|_| { + PyErr::new::(format!( + "Cycle index {} out of bounds (num_cycles={})", + index, + self.handle.ms1_cycle_mapping().len(), + )) + }) + } + + /// All cycle retention times in milliseconds, as a list. + /// + /// Index i corresponds to cycle i. Useful for building an RT axis + /// to align with chromatogram arrays. + #[getter] + fn rt_values_ms(&self) -> Vec { + let mapping = self.handle.ms1_cycle_mapping(); + (0..mapping.len() as u32) + .map(|i| { + use timscentroid::rt_mapping::MS1CycleIndex; + mapping + .rt_milis_for_index(&MS1CycleIndex::new(i)) + .unwrap() + }) + .collect() + } + + /// Total number of MS1 cycles in the acquisition. + #[getter] + fn num_cycles(&self) -> usize { + self.handle.ms1_cycle_mapping().len() + } + + /// Full acquisition RT range as (start_ms, end_ms). + #[getter] + fn rt_range_ms(&self) -> (u32, u32) { + self.handle.ms1_cycle_mapping().range_milis() + } + + /// Whether this index is lazily loaded. + #[getter] + fn is_lazy(&self) -> bool { + self.handle.is_lazy() + } + + fn __repr__(&self) -> String { + let mode = if self.handle.is_lazy() { + "lazy" + } else { + "eager" + }; + format!("TimsIndex(mode={})", mode) + } +} diff --git a/python/timsquery_pyo3/src/iterator.rs b/python/timsquery_pyo3/src/iterator.rs new file mode 100644 index 0000000..557f84a --- /dev/null +++ b/python/timsquery_pyo3/src/iterator.rs @@ -0,0 +1,221 @@ +use std::collections::VecDeque; +use std::sync::Arc; + +use pyo3::prelude::*; +use timsquery::serde::IndexedPeaksHandle; +use timsquery::traits::queriable_data::QueriableData; +use timsquery::{ChromatogramCollector, Tolerance}; + +use crate::elution_group::PyElutionGroup; +use crate::index::rt_range_ms_for_chromatogram; +use crate::numpy_utils::array2d_to_numpy; +use crate::tolerance::PyTolerance; + +/// Source of tolerances: either one shared or one per query from a Python iterator. +pub enum ToleranceSource { + Single(Tolerance), + PerQuery(PyObject), +} + +/// Lightweight result yielded by the streaming iterator. +/// +/// Owns materialized numpy arrays and metadata. The iterator's internal +/// collector pool is never exposed — it reuses Rust-side buffers across chunks. +#[pyclass(frozen)] +pub struct PyChromatogramArrays { + #[pyo3(get)] + id: u64, + precursor_intensities: PyObject, + fragment_intensities: PyObject, + #[pyo3(get)] + precursor_labels: Vec<(i8, f64)>, + #[pyo3(get)] + fragment_labels: Vec<(usize, f64)>, + #[pyo3(get)] + rt_range_ms: (u32, u32), + #[pyo3(get)] + num_cycles: usize, +} + +#[pymethods] +impl PyChromatogramArrays { + #[getter] + fn precursor_intensities<'py>(&self, py: Python<'py>) -> Bound<'py, PyAny> { + self.precursor_intensities.clone_ref(py).into_bound(py) + } + + #[getter] + fn fragment_intensities<'py>(&self, py: Python<'py>) -> Bound<'py, PyAny> { + self.fragment_intensities.clone_ref(py).into_bound(py) + } + + fn __repr__(&self) -> String { + format!( + "ChromatogramArrays(id={}, precursors={}, fragments={}, cycles={})", + self.id, + self.precursor_labels.len(), + self.fragment_labels.len(), + self.num_cycles, + ) + } +} + +fn extract_arrays( + py: Python<'_>, + collector: &ChromatogramCollector, +) -> PyResult { + let prec_np = array2d_to_numpy(py, &collector.precursors.arr)?; + let frag_np = array2d_to_numpy(py, &collector.fragments.arr)?; + let rt = collector.rt_range_milis(); + + Ok(PyChromatogramArrays { + id: collector.eg.id(), + precursor_intensities: prec_np.into_any().unbind(), + fragment_intensities: frag_np.into_any().unbind(), + precursor_labels: collector + .precursors + .mz_order + .iter() + .map(|(k, mz)| (*k, *mz)) + .collect(), + fragment_labels: collector + .fragments + .mz_order + .iter() + .map(|(k, mz)| (*k, *mz)) + .collect(), + rt_range_ms: (rt.start(), rt.end()), + num_cycles: collector.num_cycles(), + }) +} + +/// Streaming chromatogram iterator with internal collector reuse. +#[pyclass] +pub struct PyChromatogramIterator { + handle: Arc, + tol_source: ToleranceSource, + eg_source: PyObject, + pool: Vec>, + chunk_tolerances: Vec, + buffer: VecDeque, + chunk_size: usize, + exhausted: bool, +} + +impl PyChromatogramIterator { + pub fn new( + handle: Arc, + tol_source: ToleranceSource, + eg_source: PyObject, + chunk_size: usize, + ) -> Self { + Self { + handle, + tol_source, + eg_source, + pool: Vec::with_capacity(chunk_size), + chunk_tolerances: Vec::with_capacity(chunk_size), + buffer: VecDeque::with_capacity(chunk_size), + chunk_size, + exhausted: false, + } + } + + fn fill_buffer(&mut self, py: Python<'_>) -> PyResult<()> { + let ref_rt = self.handle.ms1_cycle_mapping(); + let mut n_this_chunk = 0; + self.chunk_tolerances.clear(); + + for i in 0..self.chunk_size { + let next_result = self.eg_source.call_method0(py, "__next__"); + match next_result { + Ok(obj) => { + let eg_ref: PyRef<'_, PyElutionGroup> = obj.extract(py)?; + let eg = eg_ref.inner.clone(); + + let tol = match &self.tol_source { + ToleranceSource::Single(t) => t.clone(), + ToleranceSource::PerQuery(iter_obj) => { + let tol_obj = iter_obj.call_method0(py, "__next__").map_err(|e| { + if e.is_instance_of::(py) { + PyErr::new::( + "tolerance iterator exhausted before elution_groups iterator", + ) + } else { + e + } + })?; + let tol_ref: PyRef<'_, PyTolerance> = tol_obj.extract(py)?; + tol_ref.inner.clone() + } + }; + + let rt_range_ms = + rt_range_ms_for_chromatogram(&tol, eg.rt_seconds(), &self.handle)?; + + if i < self.pool.len() { + self.pool[i] + .try_reset_with(eg, rt_range_ms, ref_rt) + .map_err(|e| { + PyErr::new::(format!("{e:?}")) + })?; + } else { + let collector = + ChromatogramCollector::::new(eg, rt_range_ms, ref_rt) + .map_err(|e| { + PyErr::new::(format!( + "{e:?}" + )) + })?; + self.pool.push(collector); + } + self.chunk_tolerances.push(tol); + n_this_chunk += 1; + } + Err(err) if err.is_instance_of::(py) => { + self.exhausted = true; + break; + } + Err(err) => return Err(err), + } + } + + if n_this_chunk == 0 { + return Ok(()); + } + + // Release GIL for the parallel query work + let pool_slice = &mut self.pool[..n_this_chunk]; + let tol_slice = &self.chunk_tolerances[..]; + let handle = &self.handle; + py.allow_threads(|| { + handle.par_add_query_multi(pool_slice, tol_slice); + }); + + for collector in &self.pool[..n_this_chunk] { + self.buffer.push_back(extract_arrays(py, collector)?); + } + + Ok(()) + } +} + +#[pymethods] +impl PyChromatogramIterator { + fn __iter__(slf: PyRef<'_, Self>) -> PyRef<'_, Self> { + slf + } + + fn __next__(&mut self, py: Python<'_>) -> PyResult> { + if let Some(arrays) = self.buffer.pop_front() { + return Ok(Some(arrays)); + } + + if self.exhausted { + return Ok(None); + } + + self.fill_buffer(py)?; + Ok(self.buffer.pop_front()) + } +} diff --git a/python/timsquery_pyo3/src/lib.rs b/python/timsquery_pyo3/src/lib.rs new file mode 100644 index 0000000..a74908a --- /dev/null +++ b/python/timsquery_pyo3/src/lib.rs @@ -0,0 +1,52 @@ +//! PyO3 bindings for timsquery. +//! +//! This crate exposes a Python-friendly interface to the timsquery library +//! for querying timsTOF mass spectrometry data. +//! +//! # Key simplifications +//! +//! - **Fragment key type is `usize` only.** The Rust library is generic over +//! `T: KeyLike`, but this binding monomorphizes to `usize` for simplicity. +//! Other key types (e.g. `IonAnnot`) may be added in future versions. +//! +//! - **Intensity type is `f32` only.** Chromatogram and spectral (f32) results +//! use `f32` intensities. The MzMobility variant uses `MzMobilityStatsCollector`. + +mod chromatogram; +mod elution_group; +mod index; +pub(crate) mod iterator; +mod numpy_utils; +mod spectrum; +mod tolerance; + +use pyo3::prelude::*; + +#[pymodule] +fn timsquery_pyo3(m: &Bound<'_, PyModule>) -> PyResult<()> { + // Tolerance types + m.add_class::()?; + m.add_class::()?; + m.add_class::()?; + m.add_class::()?; + m.add_class::()?; + + // Query definition + m.add_class::()?; + + // Index + m.add_class::()?; + + // Chromatogram results + m.add_class::()?; + + // Spectral results + m.add_class::()?; + m.add_class::()?; + + // Streaming iterator types + m.add_class::()?; + m.add_class::()?; + + Ok(()) +} diff --git a/python/timsquery_pyo3/src/numpy_utils.rs b/python/timsquery_pyo3/src/numpy_utils.rs new file mode 100644 index 0000000..796d148 --- /dev/null +++ b/python/timsquery_pyo3/src/numpy_utils.rs @@ -0,0 +1,12 @@ +use numpy::{PyArray1, PyArray2, PyArrayMethods}; +use pyo3::prelude::*; +use timsquery::Array2D; + +/// Copy an Array2D into a new numpy 2D array (safe, no unsafe blocks). +pub fn array2d_to_numpy<'py>( + py: Python<'py>, + arr: &Array2D, +) -> PyResult>> { + let flat = PyArray1::from_slice(py, arr.as_flat_slice()); + flat.reshape((arr.nrows(), arr.ncols())) +} diff --git a/python/timsquery_pyo3/src/spectrum.rs b/python/timsquery_pyo3/src/spectrum.rs new file mode 100644 index 0000000..a289d5c --- /dev/null +++ b/python/timsquery_pyo3/src/spectrum.rs @@ -0,0 +1,165 @@ +use pyo3::prelude::*; +use timsquery::{MzMobilityStatsCollector, SpectralCollector}; + +/// Result of a spectral query — total summed intensity per ion. +/// +/// Each precursor/fragment gets a single f32 intensity value (summed +/// across all matching peaks within the tolerance window). +/// +/// NOTE: Uses `usize` fragment keys and `f32` intensities. +#[pyclass(frozen)] +pub struct PySpectralResult { + collector: SpectralCollector, +} + +impl PySpectralResult { + pub fn new(collector: SpectralCollector) -> Self { + Self { collector } + } +} + +#[pymethods] +impl PySpectralResult { + /// Total intensity per precursor isotope. + #[getter] + fn precursor_intensities(&self) -> Vec { + self.collector + .iter_precursors() + .map(|(_, val)| *val) + .collect() + } + + /// Total intensity per fragment ion. + #[getter] + fn fragment_intensities(&self) -> Vec { + self.collector + .iter_fragments() + .map(|(_, val)| *val) + .collect() + } + + /// List of (isotope_label, mz) tuples for each precursor. + #[getter] + fn precursor_labels(&self) -> Vec<(i8, f64)> { + self.collector + .iter_precursors() + .map(|((label, mz), _)| (label, mz)) + .collect() + } + + /// List of (fragment_label, mz) tuples for each fragment. + #[getter] + fn fragment_labels(&self) -> Vec<(usize, f64)> { + self.collector + .iter_fragments() + .map(|((label, mz), _)| (*label, *mz)) + .collect() + } + + /// The elution group id. + #[getter] + fn id(&self) -> u64 { + self.collector.eg.id() + } + + fn __repr__(&self) -> String { + let prec_sum: f32 = self.collector.iter_precursors().map(|(_, v)| v).sum(); + let frag_sum: f32 = self.collector.iter_fragments().map(|(_, v)| v).sum(); + format!( + "SpectralResult(id={}, precursors={}, fragments={}, prec_total={:.1}, frag_total={:.1})", + self.collector.eg.id(), + self.collector.eg.precursor_count(), + self.collector.eg.fragment_count(), + prec_sum, + frag_sum, + ) + } +} + +/// Stats for a single ion from MzMobilityStatsCollector. +/// +/// Exposed as a tuple: (weight, mean_mz, mean_mobility). +/// If no peaks were found, mean_mz and mean_mobility are NaN. +fn stats_to_tuple(stats: &MzMobilityStatsCollector) -> (f64, f64, f64) { + ( + stats.weight(), + stats.mean_mz().unwrap_or(f64::NAN), + stats.mean_mobility().unwrap_or(f64::NAN), + ) +} + +/// Result of an m/z + mobility stats query. +/// +/// Each precursor/fragment gets intensity-weighted running statistics: +/// - weight: total accumulated intensity +/// - mean_mz: intensity-weighted mean m/z +/// - mean_mobility: intensity-weighted mean ion mobility (1/K0) +/// +/// Stats are returned as (weight, mean_mz, mean_mobility) tuples. +/// NaN values indicate no peaks were found for that ion. +/// +/// NOTE: Uses `usize` fragment keys. +#[pyclass(frozen)] +pub struct PyMzMobilityResult { + collector: SpectralCollector, +} + +impl PyMzMobilityResult { + pub fn new(collector: SpectralCollector) -> Self { + Self { collector } + } +} + +#[pymethods] +impl PyMzMobilityResult { + /// Stats per precursor isotope: list of (weight, mean_mz, mean_mobility). + #[getter] + fn precursor_stats(&self) -> Vec<(f64, f64, f64)> { + self.collector + .iter_precursors() + .map(|(_, stats)| stats_to_tuple(stats)) + .collect() + } + + /// Stats per fragment ion: list of (weight, mean_mz, mean_mobility). + #[getter] + fn fragment_stats(&self) -> Vec<(f64, f64, f64)> { + self.collector + .iter_fragments() + .map(|(_, stats)| stats_to_tuple(stats)) + .collect() + } + + /// List of (isotope_label, mz) tuples for each precursor. + #[getter] + fn precursor_labels(&self) -> Vec<(i8, f64)> { + self.collector + .iter_precursors() + .map(|((label, mz), _)| (label, mz)) + .collect() + } + + /// List of (fragment_label, mz) tuples for each fragment. + #[getter] + fn fragment_labels(&self) -> Vec<(usize, f64)> { + self.collector + .iter_fragments() + .map(|((label, mz), _)| (*label, *mz)) + .collect() + } + + /// The elution group id. + #[getter] + fn id(&self) -> u64 { + self.collector.eg.id() + } + + fn __repr__(&self) -> String { + format!( + "MzMobilityResult(id={}, precursors={}, fragments={})", + self.collector.eg.id(), + self.collector.eg.precursor_count(), + self.collector.eg.fragment_count(), + ) + } +} diff --git a/python/timsquery_pyo3/src/tolerance.rs b/python/timsquery_pyo3/src/tolerance.rs new file mode 100644 index 0000000..e4bbe60 --- /dev/null +++ b/python/timsquery_pyo3/src/tolerance.rs @@ -0,0 +1,204 @@ +use pyo3::prelude::*; + +#[pyclass(frozen)] +#[derive(Debug, Clone)] +pub struct PyMzTolerance { + pub(crate) inner: timsquery::models::tolerance::MzTolerance, +} + +#[pymethods] +impl PyMzTolerance { + /// Create a parts-per-million tolerance: ±(low_ppm, high_ppm). + #[staticmethod] + fn ppm(low: f64, high: f64) -> Self { + Self { + inner: timsquery::models::tolerance::MzTolerance::Ppm((low, high)), + } + } + + /// Create an absolute (dalton) tolerance: ±(low_da, high_da). + #[staticmethod] + fn absolute(low: f64, high: f64) -> Self { + Self { + inner: timsquery::models::tolerance::MzTolerance::Absolute((low, high)), + } + } + + fn __repr__(&self) -> String { + format!("{:?}", self.inner) + } +} + +#[pyclass(frozen)] +#[derive(Debug, Clone)] +pub struct PyRtTolerance { + pub(crate) inner: timsquery::models::tolerance::RtTolerance, +} + +#[pymethods] +impl PyRtTolerance { + /// Create a fixed-minutes tolerance: ±(low_min, high_min). + #[staticmethod] + fn minutes(low: f32, high: f32) -> Self { + Self { + inner: timsquery::models::tolerance::RtTolerance::Minutes((low, high)), + } + } + + /// Create a percentage tolerance: ±(low_pct, high_pct). + #[staticmethod] + fn pct(low: f32, high: f32) -> Self { + Self { + inner: timsquery::models::tolerance::RtTolerance::Pct((low, high)), + } + } + + /// No restriction on retention time. + #[staticmethod] + fn unrestricted() -> Self { + Self { + inner: timsquery::models::tolerance::RtTolerance::Unrestricted, + } + } + + fn __repr__(&self) -> String { + format!("{:?}", self.inner) + } +} + +#[pyclass(frozen)] +#[derive(Debug, Clone)] +pub struct PyMobilityTolerance { + pub(crate) inner: timsquery::models::tolerance::MobilityTolerance, +} + +#[pymethods] +impl PyMobilityTolerance { + /// Create an absolute (1/K0) tolerance: ±(low, high). + #[staticmethod] + fn absolute(low: f32, high: f32) -> Self { + Self { + inner: timsquery::models::tolerance::MobilityTolerance::Absolute((low, high)), + } + } + + /// Create a percentage tolerance: ±(low_pct, high_pct). + #[staticmethod] + fn pct(low: f32, high: f32) -> Self { + Self { + inner: timsquery::models::tolerance::MobilityTolerance::Pct((low, high)), + } + } + + /// No restriction on ion mobility. + #[staticmethod] + fn unrestricted() -> Self { + Self { + inner: timsquery::models::tolerance::MobilityTolerance::Unrestricted, + } + } + + fn __repr__(&self) -> String { + format!("{:?}", self.inner) + } +} + +#[pyclass(frozen)] +#[derive(Debug, Clone)] +pub struct PyQuadTolerance { + pub(crate) inner: timsquery::models::tolerance::QuadTolerance, +} + +#[pymethods] +impl PyQuadTolerance { + /// Create an absolute (dalton) quadrupole tolerance: ±(low_da, high_da). + #[staticmethod] + fn absolute(low: f32, high: f32) -> Self { + Self { + inner: timsquery::models::tolerance::QuadTolerance::Absolute((low, high)), + } + } + + fn __repr__(&self) -> String { + format!("{:?}", self.inner) + } +} + +#[pyclass(frozen)] +#[derive(Debug, Clone)] +pub struct PyTolerance { + pub(crate) inner: timsquery::Tolerance, +} + +#[pymethods] +impl PyTolerance { + /// Construct a Tolerance from per-dimension tolerance objects. + #[new] + fn new( + mz: &PyMzTolerance, + rt: &PyRtTolerance, + mobility: &PyMobilityTolerance, + quad: &PyQuadTolerance, + ) -> Self { + Self { + inner: timsquery::Tolerance { + ms: mz.inner.clone(), + rt: rt.inner.clone(), + mobility: mobility.inner.clone(), + quad: quad.inner.clone(), + }, + } + } + + /// Default tolerance: 20 ppm m/z, ±5 min RT, 3% mobility, 0.1 Da quad. + #[staticmethod] + fn default() -> Self { + Self { + inner: timsquery::Tolerance::default(), + } + } + + /// Return a new Tolerance with the m/z tolerance replaced. + fn with_mz(&self, mz: &PyMzTolerance) -> Self { + Self { + inner: timsquery::Tolerance { + ms: mz.inner.clone(), + ..self.inner.clone() + }, + } + } + + /// Return a new Tolerance with the RT tolerance replaced. + fn with_rt(&self, rt: &PyRtTolerance) -> Self { + Self { + inner: timsquery::Tolerance { + rt: rt.inner.clone(), + ..self.inner.clone() + }, + } + } + + /// Return a new Tolerance with the mobility tolerance replaced. + fn with_mobility(&self, mobility: &PyMobilityTolerance) -> Self { + Self { + inner: timsquery::Tolerance { + mobility: mobility.inner.clone(), + ..self.inner.clone() + }, + } + } + + /// Return a new Tolerance with the quad tolerance replaced. + fn with_quad(&self, quad: &PyQuadTolerance) -> Self { + Self { + inner: timsquery::Tolerance { + quad: quad.inner.clone(), + ..self.inner.clone() + }, + } + } + + fn __repr__(&self) -> String { + format!("{:?}", self.inner) + } +} diff --git a/rust/timsquery/src/models/base/arrays.rs b/rust/timsquery/src/models/base/arrays.rs index fb176f5..80a136d 100644 --- a/rust/timsquery/src/models/base/arrays.rs +++ b/rust/timsquery/src/models/base/arrays.rs @@ -281,6 +281,10 @@ impl Array2D { Ok(()) } + pub fn as_flat_slice(&self) -> &[T] { + &self.values + } + pub fn nrows(&self) -> usize { self.n_row } diff --git a/rust/timsquery/src/models/indexed_data.rs b/rust/timsquery/src/models/indexed_data.rs index a78e16a..b061255 100644 --- a/rust/timsquery/src/models/indexed_data.rs +++ b/rust/timsquery/src/models/indexed_data.rs @@ -455,6 +455,53 @@ impl QueriableData> for IndexedPeaks } } +impl QueriableData> for IndexedPeaksHandle { + fn add_query(&self, aggregator: &mut SpectralCollector, tolerance: &Tolerance) { + match self { + IndexedPeaksHandle::Eager(eager) => eager.add_query(aggregator, tolerance), + IndexedPeaksHandle::Lazy(lazy) => { + let ranges = QueryRanges::from_elution_group(aggregator, tolerance, |rt| { + lazy.rt_ms_to_cycle_index(rt) + }); + + let cycle_range_u32 = match ranges.ms1_cycle_range { + Restricted(x) => Restricted( + TupleRange::try_new(x.start().as_u32(), x.end().as_u32()).unwrap(), + ), + Unrestricted => Unrestricted, + }; + + aggregator + .iter_mut_precursors() + .for_each(|((_idx, mz), ion)| { + let mz_range = tolerance.mz_range_f32(mz as f32); + lazy.query_peaks_ms1(mz_range, cycle_range_u32, ranges.im_range) + .for_each(|peak| { + *ion += peak.intensity; + }); + }); + + aggregator + .iter_mut_fragments() + .for_each(|((_idx, mz), ion)| { + let mz_range = tolerance.mz_range_f32(*mz as f32); + let results = lazy.query_peaks_ms2( + ranges.quad_range, + mz_range, + cycle_range_u32, + ranges.im_range, + ); + for (_isolation_scheme, peaks) in results { + for peak in peaks { + *ion += peak.intensity; + } + } + }); + } + } + } +} + impl QueriableData> for IndexedPeaksHandle { diff --git a/rust/timsquery/src/traits/queriable_data.rs b/rust/timsquery/src/traits/queriable_data.rs index a20a715..661642b 100644 --- a/rust/timsquery/src/traits/queriable_data.rs +++ b/rust/timsquery/src/traits/queriable_data.rs @@ -79,22 +79,29 @@ where /// Execute multiple queries in parallel, one per aggregator. /// - /// This method processes a batch of aggregators using Rayon parallelism. - /// Each aggregator is queried independently against the same indexed data. + /// Zips aggregators with tolerances and processes each pair in parallel via + /// Rayon. Both parameters accept any `IntoParallelIterator`, so you can pass: /// - /// Useful for: - /// - Batch processing multiple elution groups - /// - Parallelizing across precursors - /// - Speeding up large-scale data extraction + /// - `&mut [QA]` for aggregators (the common case) + /// - `&[Tolerance]` or `&Vec` for per-query tolerances + /// - `rayon::iter::repeatn(&tol, n)` for a single shared tolerance /// /// # Arguments /// - /// - `queriable_aggregators`: Slice of aggregators to process in parallel - /// - `tolerance`: Shared tolerance applied to all queries - fn par_add_query_multi(&self, queriable_aggregators: &mut [QA], tolerance: &Tolerance) { + /// - `queriable_aggregators`: Parallel iterator of mutable aggregator references + /// - `tolerances`: Parallel iterator of tolerance references (one per aggregator) + fn par_add_query_multi<'a, A, T>(&self, queriable_aggregators: A, tolerances: T) + where + QA: 'a, + A: IntoParallelIterator, + A::Iter: IndexedParallelIterator, + T: IntoParallelIterator, + T::Iter: IndexedParallelIterator, + { queriable_aggregators - .par_iter_mut() - .for_each(|queriable_aggregator| self.add_query(queriable_aggregator, tolerance)); + .into_par_iter() + .zip(tolerances) + .for_each(|(agg, tol)| self.add_query(agg, tol)); } } diff --git a/rust/timsquery_cli/src/processing.rs b/rust/timsquery_cli/src/processing.rs index 9bdd5dd..37795da 100644 --- a/rust/timsquery_cli/src/processing.rs +++ b/rust/timsquery_cli/src/processing.rs @@ -120,13 +120,16 @@ impl AggregatorContainer { pub fn add_query(&mut self, index: &IndexedTimstofPeaks, tolerance: &Tolerance) { match self { AggregatorContainer::Point(aggregators) => { - index.par_add_query_multi(aggregators, tolerance); + let n = aggregators.len(); + index.par_add_query_multi(aggregators, rayon::iter::repeat_n(tolerance, n)); } AggregatorContainer::Chromatogram(aggregators) => { - index.par_add_query_multi(aggregators, tolerance); + let n = aggregators.len(); + index.par_add_query_multi(aggregators, rayon::iter::repeat_n(tolerance, n)); } AggregatorContainer::Spectrum(aggregators) => { - index.par_add_query_multi(aggregators, tolerance); + let n = aggregators.len(); + index.par_add_query_multi(aggregators, rayon::iter::repeat_n(tolerance, n)); } } } From aca217ac903cfdbb950866a4f6fa4818c2f8f728 Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Thu, 9 Apr 2026 19:18:31 -0700 Subject: [PATCH 02/64] refactor: remove dead uncalibrated scoring path from pipeline.rs MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Delete `build_narrow_context`, `process_query`, and `process_batch` — the old uncalibrated pipeline that is no longer called. The CLI exclusively uses `prescore_batch` + `score_calibrated_batch`. Also remove unused `info` import and clean up the stale module-level doc comment. --- .gitignore | 7 + Cargo.toml | 10 + TODO.md | 0 .../timsseek_rescore/feateng.py | 36 +- rust/calibrt/src/grid.rs | 41 +- rust/calibrt/src/lib.rs | 2 + rust/calibrt/src/pathfinding.rs | 4 +- rust/timsseek/DATA_FLOW.md | 366 +++++ rust/timsseek/src/ml/cv.rs | 14 +- rust/timsseek/src/ml/qvalues.rs | 98 +- rust/timsseek/src/rt_calibration.rs | 181 +-- rust/timsseek/src/scoring/apex_finding.rs | 553 ++++--- rust/timsseek/src/scoring/mod.rs | 12 +- rust/timsseek/src/scoring/pipeline.rs | 530 +++++-- .../src/scoring/scores/apex_features.rs | 1285 +++++++++++++++++ .../scores/coelution/coelution_score.rs | 181 --- .../src/scoring/scores/coelution/mod.rs | 2 - .../timsseek/src/scoring/scores/corr_v_ref.rs | 85 -- rust/timsseek/src/scoring/scores/mod.rs | 4 +- rust/timsseek/src/scoring/scores/scribe.rs | 2 + rust/timsseek/src/scoring/search_results.rs | 157 +- rust/timsseek/src/scoring/timings.rs | 18 + rust/timsseek_cli/src/cli.rs | 7 + rust/timsseek_cli/src/main.rs | 17 + rust/timsseek_cli/src/processing.rs | 533 ++++++- 25 files changed, 3243 insertions(+), 902 deletions(-) delete mode 100644 TODO.md create mode 100644 rust/timsseek/DATA_FLOW.md create mode 100644 rust/timsseek/src/scoring/scores/apex_features.rs delete mode 100644 rust/timsseek/src/scoring/scores/coelution/coelution_score.rs delete mode 100644 rust/timsseek/src/scoring/scores/coelution/mod.rs delete mode 100644 rust/timsseek/src/scoring/scores/corr_v_ref.rs create mode 100644 rust/timsseek/src/scoring/scores/scribe.rs diff --git a/.gitignore b/.gitignore index 87a941c..64baf74 100644 --- a/.gitignore +++ b/.gitignore @@ -37,3 +37,10 @@ results.json *search_results*/* wandb/* +docs/superpowers/* +.plans/* +.claude/* +.prompts/* +sh*/ +*plan*.md + diff --git a/Cargo.toml b/Cargo.toml index 4ae61fc..1377d99 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -11,6 +11,16 @@ members = [ "rust/timsquery_viewer", "python/timsquery_pyo3" ] +default-members = [ + "rust/calibrt", + "rust/micromzpaf", + "rust/timscentroid", + "rust/timsseek", + "rust/timsseek_cli", + "rust/timsquery", + "rust/timsquery_cli", + "rust/timsquery_viewer" +] [workspace.package] version = "0.26.0" diff --git a/TODO.md b/TODO.md deleted file mode 100644 index e69de29..0000000 diff --git a/python/timsseek_rescore/timsseek_rescore/feateng.py b/python/timsseek_rescore/timsseek_rescore/feateng.py index 908cee4..04fae44 100644 --- a/python/timsseek_rescore/timsseek_rescore/feateng.py +++ b/python/timsseek_rescore/timsseek_rescore/feateng.py @@ -165,7 +165,9 @@ def to_mokapot_df( "delta_next", "delta_second_next", "apex_lazyerscore", - "apex_lazyerscore_vs_baseline", + "split_product_score", + "cosine_au_score", + "scribe_au_score", "ms2_isotope_lazyerscore", "ms2_lazyerscore", "ms2_isotope_lazyerscore_ratio", @@ -174,6 +176,8 @@ def to_mokapot_df( # TODO: consider clamping instead of logging here. "sq_delta_theo_rt", "calibrated_sq_delta_theo_rt", + "delta_group", + "delta_group_ratio", ) imputable_cols = ( # Abs impute @@ -229,13 +233,26 @@ def to_mokapot_df( "precursor_mz", "precursor_mobility_query", "obs_mobility", - "ms2_cosine_ref_similarity", - "ms2_coelution_score", - "ms1_cosine_ref_similarity", - "ms1_coelution_score", - "ms1_corr_v_gauss", - "ms2_corr_v_gauss", + "peak_shape", + "ratio_cv", + "centered_apex", + "precursor_coelution", + "fragment_coverage", + "precursor_apex_match", + "xic_quality", + "fragment_apex_agreement", + "isotope_correlation", + "gaussian_correlation", + "per_frag_gaussian_corr", + "coelution_gradient_cosine", + "coelution_gradient_scribe", + "cosine_weighted_coelution", + "cosine_gradient_consistency", + "scribe_weighted_coelution", + "scribe_gradient_consistency", "nqueries", + "lazyscore_z", + "lazyscore_vs_baseline", # Intensity ratios "ms1_inten_ratio_2", "ms2_inten_ratio_4", @@ -250,10 +267,7 @@ def to_mokapot_df( # Cycle counts "raising_cycles", "falling_cycles", - "apex_norm_lazyerscore_vs_baseline", - # ... - "delta_group_ratio", - "delta_group", + # delta_group and delta_group_ratio are in loggable_cols ) + loggable_cols + imputable_cols diff --git a/rust/calibrt/src/grid.rs b/rust/calibrt/src/grid.rs index 2764096..c2321ed 100644 --- a/rust/calibrt/src/grid.rs +++ b/rust/calibrt/src/grid.rs @@ -44,6 +44,9 @@ impl Grid { weight: 0.0, }, suppressed: false, + sum_wx: 0.0, + sum_wy: 0.0, + sum_w: 0.0, }); } } @@ -83,6 +86,9 @@ impl Grid { let index = gy * self.bins + gx; if let Some(node) = self.nodes.get_mut(index) { node.center.weight += weight; + node.sum_wx += x * weight; + node.sum_wy += y * weight; + node.sum_w += weight; } Ok(()) @@ -142,6 +148,18 @@ impl Grid { if non_suppressed_sum == 0.0 { return Err(CalibRtError::NoPoints); } + + // Replace bin centers with weighted centroids for non-suppressed nodes. + // Clamp to grid range to avoid interpolation boundary issues. + for node in self.nodes.iter_mut() { + if !node.suppressed && node.sum_w > 0.0 { + let cx = (node.sum_wx / node.sum_w).clamp(self.x_range.0, self.x_range.1); + let cy = (node.sum_wy / node.sum_w).clamp(self.y_range.0, self.y_range.1); + node.center.x = cx; + node.center.y = cy; + } + } + Ok(()) } } @@ -151,6 +169,10 @@ impl Grid { pub(crate) struct Node { pub(crate) center: Point, pub(crate) suppressed: bool, + // Weighted centroid accumulators + sum_wx: f64, + sum_wy: f64, + sum_w: f64, } #[cfg(test)] @@ -239,15 +261,8 @@ mod tests { non_suppressed.len() ); - // Verify the specific nodes - assert!( - non_suppressed.contains(&(0, 2, 9.0)), - "Node at (0,2) with weight 9.0 should be non-suppressed" - ); - assert!( - non_suppressed.contains(&(2, 1, 8.0)), - "Node at (2,1) with weight 8.0 should be non-suppressed" - ); + assert!(non_suppressed.contains(&(0, 2, 9.0))); + assert!(non_suppressed.contains(&(2, 1, 8.0))); } #[test] @@ -281,17 +296,15 @@ mod tests { let non_suppressed = print_grid_state(&grid); // Only the center cell (1,1) with weight 9 should be non-suppressed + // It is the max in both its row (row 1: 4,9,6) and column (col 1: 2,9,8) assert_eq!( non_suppressed.len(), 1, - "Expected 1 non-suppressed node (the global max), found {}", + "Expected 1 non-suppressed node, found {}", non_suppressed.len() ); - assert!( - non_suppressed.contains(&(1, 1, 9.0)), - "Node at (1,1) with weight 9.0 should be non-suppressed" - ); + assert!(non_suppressed.contains(&(1, 1, 9.0))); } #[test] diff --git a/rust/calibrt/src/lib.rs b/rust/calibrt/src/lib.rs index 92a91b7..bb15cda 100644 --- a/rust/calibrt/src/lib.rs +++ b/rust/calibrt/src/lib.rs @@ -114,6 +114,8 @@ impl CalibrationCurve { // Find the partition point; first element >= x_val. let i = self.points.partition_point(|p| p.x < x_val); + // Clamp to [1, slopes.len()] — partition_point can return 0 when x_val == first_x + let i = i.max(1).min(self.slopes.len()); Ok(self.predict_with_index(x_val, i)) } diff --git a/rust/calibrt/src/pathfinding.rs b/rust/calibrt/src/pathfinding.rs index fb53d5a..3abd76f 100644 --- a/rust/calibrt/src/pathfinding.rs +++ b/rust/calibrt/src/pathfinding.rs @@ -35,7 +35,9 @@ pub(crate) fn find_optimal_path(nodes: &mut [crate::grid::Node]) -> Vec lookback { i - lookback } else { 0 }; + for j in start..i { // Only create edges where both dimensions increase (monotonic constraint) if nodes[i].center.x > nodes[j].center.x && nodes[i].center.y > nodes[j].center.y { let dx = nodes[i].center.x - nodes[j].center.x; diff --git a/rust/timsseek/DATA_FLOW.md b/rust/timsseek/DATA_FLOW.md new file mode 100644 index 0000000..3aeaf08 --- /dev/null +++ b/rust/timsseek/DATA_FLOW.md @@ -0,0 +1,366 @@ +# Scoring Pipeline Data Flow + +## Overview + +The timsseek scoring pipeline uses a global two-pass architecture: + +``` +Phase 1: Broad prescore → collect top-2000 calibrants +Phase 2: Calibrate iRT→RT + derive tolerances from calibrant errors +Phase 3: Narrow calibrated extraction → full scoring → results +Post: Target/decoy competition → ML rescoring → parquet output +``` + +### Conceptual Decomposition + +The pipeline optimizes three independent concerns per peptide: + +1. **Apex finding** — locate the correct elution peak in time. Uses the composite apex profile (`cos³ × I × (0.5 + S_norm)`) for peak-picking. Quality measured by whether the detected apex RT matches the true elution time. + +2. **Feature extraction** — at the detected apex, compute features that discriminate true peptides from false matches. The 11 apex features, split product scores, lazyscore baseline stats, m/z/mobility errors, and relative intensities all serve this purpose. + +3. **Post-processing** — refine target/decoy assignments using inter-peptide relationships. Currently: target-decoy competition (dedup by sequence, compete within decoy groups). Potential: overlapping fragment ion competition (if two candidates share fragments and RT, the weaker one is likely false). + +## Entry Point + +**`main()`** — `timsseek_cli/src/main.rs` + +1. Parse CLI args (`Cli` struct — `cli.rs`) + - `--speclib-file` — main spectral library + - `--calib-lib` — optional calibration library (different RT scale OK) + - `--dotd-files` — raw instrument files + - `--output-dir`, `--overwrite`, `--config`, `--decoy-strategy` +2. Load/merge config (JSON config + CLI overrides) +3. Validate inputs (speclib, raw files, output dir, calib lib if provided) +4. For each .d file → `process_single_file()`: + - Load raw data as `IndexedTimstofPeaks` via `load_index_auto()` + - Extract DIA fragmentation m/z range from frame reader + - Build `ScoringPipeline { index, tolerances, fragmented_range }` + - Call `process_speclib()` + +**`process_speclib()`** — `timsseek_cli/src/processing.rs` + +1. Load main speclib: `Speclib::from_file(path, decoy_strategy)` → `Vec` +2. Optionally load calibration library (`--calib-lib`) +3. If calib lib provided: `check_rt_scale_compatibility()` — warns if RT ranges have < 50% overlap +4. Call `main_loop(speclib, calib_lib, pipeline, chunk_size, output)` + +## Phase 1: Broad Prescore + +**Goal:** Find the top-2000 best-scoring peptides for calibration. + +``` +main_loop() + → phase1_prescore() processing.rs + → pipeline.prescore_batch() pipeline.rs + → pipeline.prescore() pipeline.rs [per peptide] + → build_candidate_context() pipeline.rs + → buffer.find_apex_location() apex_finding.rs +``` + +Both `prescore_batch` and `score_calibrated_batch` have serial paths gated behind `--features serial_scoring` for instrumentation/debugging. + +### `build_candidate_context()` +**In:** `QueryItemToScore` (peptide with iRT, m/z, fragments, expected intensities) +**Out:** `(PeptideMetadata, ScoringContext)` + +1. Compute RT range from `self.tolerances.prescore` (broad: ±5 min or unrestricted) +2. Create `ChromatogramCollector` — allocates cycle×ion intensity arrays +3. `index.add_query(&mut agg, &prescore_tolerance)` — extract peaks from raw data +4. `filter_zero_intensity_ions()` — drop ions with no signal +5. `select_top_n_fragments(n=8)` — keep only top-8 by predicted intensity + +### `find_apex_location()` +**In:** `ScoringContext` (chromatogram data + expected intensities) +**Out:** `ApexLocation { score, retention_time_ms, apex_cycle, raising_cycles, falling_cycles }` + +1. **`compute_pass_1()`** — single pass over all fragments × cycles: + - **Cosine**: `dot(obs, sqrt(expected)) / (||obs|| × ||sqrt(expected)||)` per cycle + - **Scribe**: SSE of sqrt-normalized observed vs predicted distributions → `-ln(SSE)` + - **Lazyscore**: `lnfact(Σ ln(intensity))` per cycle + - **Log-intensity**: `ln(1 + Σ raw_intensity)` per cycle + - **Precursor trace**: summed MS1 precursor intensity (keys ≥ 0) + +2. **`compute_main_score_trace()`** — apex profile: + ``` + C(t) = cos(t)³ × I(t) + S(t) = scribe(t) × I(t) + S_norm = (S - min(S)) / (max(S) - min(S)) + apex_profile(t) = C(t) × (0.5 + S_norm(t)) + ``` + +3. **Peak-pick** on apex profile → argmax +4. **Split product score** for calibrant ranking: + - Independent argmax on cosine and scribe profiles + - Area-uniqueness (hw=5): `AU = peak_area × (1 + 200 × peak_area/total)` + - Coelution-gradient (hw=20, gradient hw=10): pairwise fragment correlation + - `base_score = cos_AU × cos_CG × scr_AU × scr_CG` + +### `prescore_batch()` +- Parallel (or serial with `--features serial_scoring`) iteration +- Per-thread bounded min-heap (`CalibrantHeap`, capacity=2000) +- Pushes `CalibrantCandidate { score, apex_rt_seconds, speclib_index }` +- Merge heaps across threads → top-2000 globally + +## Phase 2: Calibration + +**Goal:** Fit iRT→RT curve, measure instrument errors, derive tolerances. + +``` +main_loop() + → [build precursor+fragment lookup if calib lib] processing.rs + → calibrate_from_phase1() processing.rs +``` + +### Calib lib matching (when `--calib-lib` is used) + +Builds a lookup: `HashMap<(quantized_mz_0.01Da, charge), Vec<(rt, sorted_fragment_mzs)>>` from the main speclib. For each calibrant from the calib lib, matches by: +1. Same precursor m/z (within 0.01 Da) + same charge +2. ≥ 5 shared fragment masses (within 0.01 Da, sorted merge) +3. Among matches, pick the one with most shared fragments (break ties by closest RT) + +This maps the calibrant's observed apex RT to the **main speclib's iRT** for curve fitting. + +### Step A: Fit iRT → RT curve + +For each calibrant: +- `x = iRT` (from main speclib if using calib lib, else from phase1 lib) +- `y = observed apex RT` (from Phase 1) + +Call `calibrate_with_ranges()` (calibrt crate): +- 100×100 grid, AND-intersection NMS +- Non-suppressed cells use **weighted centroids** (not bin centers) +- Pathfinding: optimal ascending path with bounded lookback L=30 +- Result: piecewise-linear `CalibrationCurve` + +### Step B: Measure m/z and mobility errors + +For each calibrant, re-query the index at apex RT with broad tolerance: +```rust +SpectralCollector +``` +Extract observed precursor m/z and mobility: +- `mz_error_ppm = (obs_mz - expected_mz) / expected_mz × 1e6` +- `mobility_error_pct = (obs_mob - expected_mob) / expected_mob × 100` + +### Step C: Derive tolerances + +- **RT**: `tolerance = rt_sigma_factor × MAD(|residuals|) / 60` (min 0.5 min) +- **m/z**: `asymmetric_tolerance(errors, sigma=2.0)` → `(left_ppm, right_ppm)` +- **mobility**: `asymmetric_tolerance(errors, sigma=3.0)` → `(left_pct, right_pct)` + +Asymmetric formula: `left = max(min_val, -(mean - σ×std))`, `right = max(min_val, mean + σ×std)` + +**Output:** `CalibrationResult { cal_curve, rt_tolerance_minutes, mz_tolerance_ppm, mobility_tolerance_pct }` + +## Phase 3: Calibrated Scoring + +**Goal:** Re-extract every peptide with calibrated RT + derived tolerances, compute full features. + +``` +main_loop() + → phase3_score() processing.rs + → pipeline.score_calibrated_batch() pipeline.rs + → score_calibrated_extraction() pipeline.rs [per peptide] + → build_calibrated_context() pipeline.rs + → buffer.find_apex() apex_finding.rs + → execute_secondary_query() pipeline.rs + → finalize_results() pipeline.rs +``` + +### `build_calibrated_context()` +**In:** `QueryItemToScore` + `CalibrationResult` +**Out:** `(PeptideMetadata, ScoringContext)` + +Key difference from Phase 1: +- RT = `calibration.convert_irt(original_irt)` (not the raw iRT) +- Tolerance = `calibration.get_tolerance(mz, mobility, rt)` (narrow, asymmetric) + +### `find_apex()` — full scoring +**In:** `ScoringContext` (narrow extraction) +**Out:** `ApexScore` + +Same as `find_apex_location()` (compute_pass_1 + main_score), then additionally: + +1. **`extract_apex_score()`**: + - Peak-pick with delta scoring (mask primary, find 2nd/3rd peaks) + - Split product: `SplitProductScore` with 9 component fields + - Joint precursor-fragment apex: `argmax(C(t) × (0.5 + P(t)/max(P)))` + - **11 apex features** at joint apex (`compute_apex_features()`): + + | Feature | Description | + |---------|------------| + | peak_shape | 0.5×symmetry + 0.5×sharpness around apex | + | ratio_cv | 1/(1+CV) of obs/predicted ratios at apex | + | centered_apex | 1 - |apex - center| / (n_cycles/2) | + | precursor_coelution | Pearson(precursor, summed_fragments) ±10 cycles | + | fragment_coverage | Fraction of fragments with intensity > 0 | + | precursor_apex_match | 0.5×proximity + 0.5×fraction | + | xic_quality | Min XIC signal quality across fragments | + | fragment_apex_agreement | Pearson(expected, observed per-fragment peaks) | + | isotope_correlation | Correlation between precursor isotope patterns | + | gaussian_correlation | Correlation vs Gaussian peak shape | + | per_frag_gaussian_corr | Mean per-fragment Gaussian correlation | + + - **Weighted product score**: + ``` + score = base_score × Π(offset_k + scale_k × feature_k) + ``` + With 11 (offset, scale) pairs from `SCORING_WEIGHTS` + +### `execute_secondary_query()` — two-pass spectral refinement +**In:** `QueryItemToScore` + `ApexScore` +**Out:** `(SpectralCollector, SpectralCollector)` + +1. **Pass 1**: Query at apex RT with secondary tolerance → observed mobility +2. **Pass 2**: Query at apex RT + observed mobility with 3% mobility tolerance + - Main pattern → `MzMobilityStatsCollector` (m/z, mobility, intensity stats) + - Isotope pattern (+1 Da offset) → `f32` intensities + +### `finalize_results()` +**In:** metadata + ApexScore + secondary collectors +**Out:** `IonSearchResults` + +Via `SearchResultBuilder`: +1. `MzMobilityOffsets::new()` — top-3 MS1 + top-7 MS2 m/z/mobility errors +2. `RelativeIntensities::new()` — log-normalized MS1/MS2 intensities +3. `compute_secondary_lazyscores()` — main + isotope lazyscores + ratio +4. Populate all ~90 fields from ApexScore features, split product, offsets + +## Post-Processing + +**`target_decoy_compete()`** — `processing.rs` +1. Deduplicate by (sequence, charge, m/z) — keep best score +2. Sort by (decoy_group_id, charge, score desc) +3. Compute delta_group scores between target/decoy pairs +4. Keep one winner per (decoy_group_id, charge) + +**`rescore()`** — `timsseek/src/ml/qvalues.rs` +1. Extract features from each `IonSearchResults` via `as_feature()` (see Feature Vector below) +2. 3-fold cross-validated gradient boosting (forust) + - 1000 iterations, LR=0.1, max_depth=6, no early stopping + - Subsample=0.8, colsample_bytree=0.8, min_leaf_weight=5 +3. Predict discriminant scores +4. Assign q-values: `q = cummin(decoys / targets)` + +**Output:** Parquet file via `ResultParquetWriter` + +## Feature Vector (GBM input) + +The `as_feature()` method produces the following features for rescoring: + +**Context features:** +- `precursor_mz` (binned to 5 Da), `charge`, `mobility_query`, `rt_query` (rounded) +- `nqueries` (fragment count after filtering) + +**Primary scores:** +- `main_score`, `main_score/delta_next`, `delta_next`, `delta_second_next` + +**RT/mobility deltas:** +- `obs_rt`, `obs_mobility`, `delta_theo_rt`, `sq_delta_theo_rt` +- `delta_ms1_ms2_mobility`, `sq_delta_ms1_ms2_mobility` +- `calibrated_sq_delta_theo_rt`, `recalibrated_query_rt` + +**Peak shape:** +- `raising_cycles`, `falling_cycles` + +**MS2 scores:** +- `npeaks`, `apex_lazyerscore`, `ln(ms2_summed_intensity)` +- `ms2_lazyerscore`, `ms2_isotope_lazyerscore`, `ms2_isotope_ratio` +- `lazyscore_z`, `lazyscore_vs_baseline` + +**Split product & apex features (19 values):** +- `ln(split_product_score)`, `ln(cosine_au)`, `ln(scribe_au)` +- `coelution_gradient_cosine`, `coelution_gradient_scribe` +- `cosine_weighted_coelution`, `cosine_gradient_consistency` +- `scribe_weighted_coelution`, `scribe_gradient_consistency` +- 11 apex features (peak_shape through per_frag_gaussian_corr) + +**Per-ion errors (20 values):** +- 7× `ms2_mz_error`, 7× `ms2_mobility_error` +- 3× `ms1_mz_error`, 3× `ms1_mobility_error` + +**Intensities:** +- `ln(ms1_summed_precursor_intensity)` +- 3× `ms1_inten_ratio`, 7× `ms2_inten_ratio` + +**Target-decoy competition:** +- `delta_group`, `delta_group_ratio` + +**Interaction features:** +- `main_score × delta_next` +- `split_product_score × fragment_coverage` + +**Summary error features:** +- Mean |ms2_mz_error|, Mean |ms2_mobility_error| +- Mean |ms1_mz_error|, Mean |ms1_mobility_error| + +**Derived features:** +- Max fragment intensity ratio (dominance of strongest fragment) + +## Key Types + +``` +QueryItemToScore +├── digest: DigestSlice sequence + modifications +├── query: TimsElutionGroup precursor/fragment m/z, RT, mobility +└── expected_intensity: ExpectedIntensities + +ChromatogramCollector +├── fragments: MzMajorIntensityArray [n_fragments × n_cycles] +├── precursors: MzMajorIntensityArray [n_precursors × n_cycles] +└── eg: TimsElutionGroup + +ScoreTraces 6 per-cycle vectors +├── ms2_cosine_ref_sim, ms2_lazyscore, ms2_scribe +├── ms2_log_intensity, ms1_precursor_trace +└── main_score composite apex profile + +ApexLocation Phase 1 result (lightweight) +├── score: f32 split product base score +└── retention_time_ms: u32 + +ApexScore Phase 3 result (full) +├── score: f32 weighted product score +├── split_product: SplitProductScore 9 component scores +├── features: ApexFeatures 11 apex-local features +├── delta_next, delta_second_next peak discrimination +├── lazyscore_z: f32 Poisson Z-score (peak vs baseline) +└── lazyscore_vs_baseline: f32 signal-to-noise (k/lambda) + +CalibrationResult immutable, speclib not mutated +├── cal_curve: RTCalibration iRT → calibrated RT +├── rt_tolerance_minutes: f32 +├── mz_tolerance_ppm: (f64, f64) asymmetric +└── mobility_tolerance_pct: (f32, f32) asymmetric + +IonSearchResults ~90 fields → parquet +``` + +## Constants + +| Constant | Value | Location | +|----------|-------|----------| +| TOP_N_FRAGMENTS | 8 | pipeline.rs | +| NUM_MS1_IONS | 3 | scoring/mod.rs | +| NUM_MS2_IONS | 7 | scoring/mod.rs | +| SCRIBE_FLOOR | -100.0 | scores/scribe.rs | +| n_calibrants | 2000 | CalibrationConfig | +| grid_size | 100 | CalibrationConfig | +| rt_sigma_factor | 3.0 | CalibrationConfig | +| min_rt_tolerance_minutes | 0.5 | CalibrationConfig | +| mz_sigma | 2.0 | CalibrationConfig | +| mobility_sigma | 3.0 | CalibrationConfig | +| MIN_SHARED_FRAGMENTS | 5 | processing.rs (calib lib matching) | +| Area-uniqueness hw | 5 | apex_features.rs | +| Coelution hw | 20 | apex_features.rs | +| Gradient hw | 10 | apex_features.rs | +| GBM iterations | 1000 | cv.rs | +| GBM learning_rate | 0.1 | cv.rs | +| GBM max_depth | 6 | cv.rs | +| GBM min_leaf_weight | 5.0 | cv.rs | +| GBM subsample | 0.8 | cv.rs | +| GBM colsample_bytree | 0.8 | cv.rs | +| GBM early_stopping | None | cv.rs | +| CV folds | 3 | qvalues.rs | +| Pathfinding lookback | 30 | pathfinding.rs | diff --git a/rust/timsseek/src/ml/cv.rs b/rust/timsseek/src/ml/cv.rs index 22bc822..d07614c 100644 --- a/rust/timsseek/src/ml/cv.rs +++ b/rust/timsseek/src/ml/cv.rs @@ -134,32 +134,32 @@ impl Clone for GBMConfig { impl Default for GBMConfig { fn default() -> Self { GBMConfig { - iterations: 500, - learning_rate: 0.3, + iterations: 1000, + learning_rate: 0.1, max_depth: 6, max_leaves: usize::MAX, l1: 0., l2: 1., gamma: 0., max_delta_step: 0., - min_leaf_weight: 2., + min_leaf_weight: 5., base_score: 0.5, nbins: 256, parallel: true, allow_missing_splits: true, monotone_constraints: None, - subsample: 1.0, + subsample: 0.8, top_rate: 0.1, other_rate: 0.2, - colsample_bytree: 1.0, + colsample_bytree: 0.8, seed: 0, missing: f64::NAN, create_missing_branch: false, - sample_method: SampleMethod::None, + sample_method: SampleMethod::Random, grow_policy: GrowPolicy::DepthWise, evaluation_metric: Some(Metric::LogLoss), // evaluation_metric: None, - early_stopping_rounds: Some(20), + early_stopping_rounds: None, initialize_base_score: true, terminate_missing_features: HashSet::new(), missing_node_treatment: MissingNodeTreatment::AssignToParent, diff --git a/rust/timsseek/src/ml/qvalues.rs b/rust/timsseek/src/ml/qvalues.rs index 3e85c15..be8ba8a 100644 --- a/rust/timsseek/src/ml/qvalues.rs +++ b/rust/timsseek/src/ml/qvalues.rs @@ -89,7 +89,7 @@ pub fn rescore( data.shuffle(&mut rand::rng()); - let mut scorer = CrossValidatedScorer::new_from_shuffled(5, data, config); + let mut scorer = CrossValidatedScorer::new_from_shuffled(3, data, config); scorer .fit(&mut DataBuffer::default(), &mut DataBuffer::default()) .unwrap(); @@ -105,6 +105,12 @@ pub fn rescore( use crate::IonSearchResults; +fn mean_abs_error(errs: &[f32]) -> f64 { + let (sum, n) = errs.iter().filter(|e| e.is_finite() && **e != 0.0) + .fold((0.0f64, 0u32), |(s, n), &e| (s + (e as f64).abs(), n + 1)); + if n > 0 { sum / n as f64 } else { f64::NAN } +} + impl FeatureLike for IonSearchResults { fn as_feature(&self) -> impl IntoIterator + '_ { let Self { @@ -129,7 +135,6 @@ impl FeatureLike for IonSearchResults { sq_delta_theo_rt, calibrated_sq_delta_theo_rt, delta_ms1_ms2_mobility, - // ms1_ms2_correlation, sq_delta_ms1_ms2_mobility, raising_cycles, falling_cycles, @@ -137,22 +142,36 @@ impl FeatureLike for IonSearchResults { // MS2 npeaks, apex_lazyerscore, - apex_lazyerscore_vs_baseline, - apex_norm_lazyerscore_vs_baseline, - ms2_cosine_ref_similarity, - ms2_coelution_score, - ms2_corr_v_gauss, ms2_summed_transition_intensity, ms2_lazyerscore, ms2_isotope_lazyerscore, ms2_isotope_lazyerscore_ratio, + lazyscore_z, + lazyscore_vs_baseline, + + // Split product & apex features + split_product_score, + cosine_au_score, + scribe_au_score, + coelution_gradient_cosine, + coelution_gradient_scribe, + cosine_weighted_coelution, + cosine_gradient_consistency, + scribe_weighted_coelution, + scribe_gradient_consistency, + peak_shape, + ratio_cv, + centered_apex, + precursor_coelution, + fragment_coverage, + precursor_apex_match, + xic_quality, + fragment_apex_agreement, + isotope_correlation, + gaussian_correlation, + per_frag_gaussian_corr, // MS2 - Split - // Flattening manually bc serde(flatten) - // is not supported by csv ... - // https, - // Q, - // A, ms2_mz_error_0, ms2_mz_error_1, ms2_mz_error_2, @@ -169,10 +188,7 @@ impl FeatureLike for IonSearchResults { ms2_mobility_error_6, // MS1 - ms1_cosine_ref_similarity, - ms1_coelution_score, ms1_summed_precursor_intensity, - ms1_corr_v_gauss, // MS1 Split ms1_mz_error_0, @@ -223,15 +239,33 @@ impl FeatureLike for IonSearchResults { // MS2 npeaks as f64, apex_lazyerscore as f64, - apex_lazyerscore_vs_baseline as f64, - apex_norm_lazyerscore_vs_baseline as f64, - ms2_cosine_ref_similarity as f64, - ms2_coelution_score as f64, - ms2_corr_v_gauss as f64, (ms2_summed_transition_intensity as f64).ln_1p(), ms2_lazyerscore as f64, ms2_isotope_lazyerscore as f64, ms2_isotope_lazyerscore_ratio as f64, + lazyscore_z as f64, + lazyscore_vs_baseline as f64, + // Split product & apex features + (split_product_score as f64).ln_1p(), + (cosine_au_score as f64).ln_1p(), + (scribe_au_score as f64).ln_1p(), + coelution_gradient_cosine as f64, + coelution_gradient_scribe as f64, + cosine_weighted_coelution as f64, + cosine_gradient_consistency as f64, + scribe_weighted_coelution as f64, + scribe_gradient_consistency as f64, + peak_shape as f64, + ratio_cv as f64, + centered_apex as f64, + precursor_coelution as f64, + fragment_coverage as f64, + precursor_apex_match as f64, + xic_quality as f64, + fragment_apex_agreement as f64, + isotope_correlation as f64, + gaussian_correlation as f64, + per_frag_gaussian_corr as f64, // MS2 - Split ms2_mz_error_0 as f64, ms2_mz_error_1 as f64, @@ -247,12 +281,9 @@ impl FeatureLike for IonSearchResults { ms2_mobility_error_4 as f64, ms2_mobility_error_5 as f64, ms2_mobility_error_6 as f64, - // MS as f641 - ms1_cosine_ref_similarity as f64, - ms1_coelution_score as f64, + // MS1 (ms1_summed_precursor_intensity as f64).ln_1p(), - ms1_corr_v_gauss as f64, - // MS1 Spli as f64t + // MS1 Split ms1_mz_error_0 as f64, ms1_mz_error_1 as f64, ms1_mz_error_2 as f64, @@ -274,6 +305,23 @@ impl FeatureLike for IonSearchResults { delta_group_ratio as f64, recalibrated_query_rt as f64, calibrated_sq_delta_theo_rt as f64, + // Derived intensity features + { + // Max fragment intensity ratio (dominance of strongest fragment) + let ratios = [ms2_inten_ratio_0, ms2_inten_ratio_1, ms2_inten_ratio_2, + ms2_inten_ratio_3, ms2_inten_ratio_4, ms2_inten_ratio_5, ms2_inten_ratio_6]; + ratios.iter().filter(|r| r.is_finite()).fold(f32::NEG_INFINITY, |a, &b| a.max(b)) as f64 + }, + // Interaction features + (main_score * delta_next) as f64, // score × peak separation + (split_product_score * fragment_coverage) as f64, // base score × coverage + // Summary error features + mean_abs_error(&[ms2_mz_error_0, ms2_mz_error_1, ms2_mz_error_2, + ms2_mz_error_3, ms2_mz_error_4, ms2_mz_error_5, ms2_mz_error_6]), + mean_abs_error(&[ms2_mobility_error_0, ms2_mobility_error_1, ms2_mobility_error_2, + ms2_mobility_error_3, ms2_mobility_error_4, ms2_mobility_error_5, ms2_mobility_error_6]), + mean_abs_error(&[ms1_mz_error_0, ms1_mz_error_1, ms1_mz_error_2]), + mean_abs_error(&[ms1_mobility_error_0, ms1_mobility_error_1, ms1_mobility_error_2]), ] } diff --git a/rust/timsseek/src/rt_calibration.rs b/rust/timsseek/src/rt_calibration.rs index 6b379a6..f950f31 100644 --- a/rust/timsseek/src/rt_calibration.rs +++ b/rust/timsseek/src/rt_calibration.rs @@ -1,131 +1,88 @@ -use crate::{ - IonSearchResults, - Speclib, -}; +use crate::ScorerQueriable; +use crate::scoring::pipeline::ScoringPipeline; pub use calibrt::{ CalibRtError, CalibrationCurve as RTCalibration, Point, calibrate_with_ranges, }; -use tracing::warn; +use timsquery::Tolerance; +use timsquery::models::tolerance::{ + MobilityTolerance, + MzTolerance, + QuadTolerance, + RtTolerance, +}; -// It is significantly cheaper to project the spectral library to the observed data -// instead of the other way around. so we need to train the classifier with x = theoretical -// and y = observed. +/// Immutable calibration result. Provides RT conversion and per-query tolerance +/// without mutating the speclib. +pub struct CalibrationResult { + cal_curve: RTCalibration, + rt_tolerance_minutes: f32, + mz_tolerance_ppm: (f64, f64), + mobility_tolerance_pct: (f32, f32), +} -impl From<&IonSearchResults> for Point { - fn from(val: &IonSearchResults) -> Self { - Point { - x: val.precursor_rt_query_seconds as f64, - y: val.obs_rt_seconds as f64, - weight: 1.0, +impl CalibrationResult { + pub fn new( + cal_curve: RTCalibration, + rt_tolerance_minutes: f32, + mz_tolerance_ppm: (f64, f64), + mobility_tolerance_pct: (f32, f32), + ) -> Self { + Self { + cal_curve, + rt_tolerance_minutes, + mz_tolerance_ppm, + mobility_tolerance_pct, } - // weight: val.main_score.ln_1p().max(0.0) as f64, // very non-good - // so just weight equally for now. - // weight: val.main_score as f64, // works ok... - // In theory we dont want to use target-decoy info here, to prevent biasing. - // weight: if val.qvalue < 0.1 { 1.0 } else { 0.0 }, - // weight: 1.0, } -} -#[cfg_attr( - feature = "instrumentation", - tracing::instrument(skip_all, level = "trace") -)] -pub fn recalibrate_speclib( - speclib: &mut Speclib, - calib_data: &[IonSearchResults], -) -> Result { - let mut min_x = f64::INFINITY; - let mut max_x = f64::NEG_INFINITY; - let mut min_y = f64::INFINITY; - let mut max_y = f64::NEG_INFINITY; - - calib_data.iter().for_each(|spec| { - let y = spec.obs_rt_seconds as f64; - if y < min_y { - min_y = y; - } - if y > max_y { - max_y = y; - } - let x = spec.precursor_rt_query_seconds as f64; - if x < min_x { - min_x = x; - } - if x > max_x { - max_x = x; + /// Convert indexed RT to calibrated absolute RT (seconds). + pub fn convert_irt(&self, irt_seconds: f32) -> f32 { + match self.cal_curve.predict(irt_seconds as f64) { + Ok(rt) => rt as f32, + Err(CalibRtError::OutOfBounds(rt)) => rt as f32, + Err(_) => irt_seconds, } - }); - - let cal_res = calibrate_with_ranges( - calib_data - .iter() - .map(Point::from) - .collect::>() - .as_slice(), - (min_x, max_x), - (min_y, max_y), - 50, - ); + } - let mut oob_preds = (0, f32::MAX, f32::MIN); - let mut cool_preds = 0; - match cal_res { - Ok(cal_curve) => { - speclib.elems.iter_mut().for_each(|spec| { - let pred = cal_curve.predict(spec.query.rt_seconds() as f64); - let pred = match pred { - Ok(pred_rt) => { - // I can get this by total - oob but this feels safer ... even if its a - // hair slower. - cool_preds += 1; - pred_rt as f32 - } - Err(CalibRtError::OutOfBounds(pred_rt)) => { - oob_preds.0 += 1; - let query_rt = spec.query.rt_seconds(); - oob_preds.1 = oob_preds.1.min(query_rt); - oob_preds.2 = oob_preds.2.max(query_rt); + /// Get per-query tolerance. Initially uniform; future: position-dependent. + pub fn get_tolerance(&self, _mz: f64, _mobility: f32, _rt: f32) -> Tolerance { + Tolerance { + ms: MzTolerance::Ppm(self.mz_tolerance_ppm), + rt: RtTolerance::Minutes((self.rt_tolerance_minutes, self.rt_tolerance_minutes)), + mobility: MobilityTolerance::Pct(self.mobility_tolerance_pct), + quad: QuadTolerance::Absolute((0.1, 0.1)), + } + } - pred_rt as f32 - } - Err(_) => { - panic!("Unexpected error during RT prediction"); - } - }; - // True impass ... - spec.query.set_rt_seconds(pred); - }); - if oob_preds.0 > 0 { - warn!( - "{}/{} out of bounds RT predictions (min RT {}s, max RT {}s)", - oob_preds.0, - cool_preds + oob_preds.0, - oob_preds.1, - oob_preds.2 - ); - } + /// Fallback when calibration fails: identity RT mapping, secondary tolerance. + pub fn fallback(pipeline: &ScoringPipeline) -> Self { + let range = pipeline.index.ms1_cycle_mapping().range_milis(); + let start = range.0 as f64 / 1000.0; + let end = range.1 as f64 / 1000.0; + let points = vec![ + Point { + x: start, + y: start, + weight: 1.0, + }, + Point { + x: end, + y: end, + weight: 1.0, + }, + ]; + let cal_curve = calibrate_with_ranges(&points, (start, end), (start, end), 10) + .expect("Identity calibration should not fail"); - Ok(cal_curve) + Self { + cal_curve, + rt_tolerance_minutes: 1.0, + mz_tolerance_ppm: (10.0, 10.0), + mobility_tolerance_pct: (5.0, 5.0), } - - Err(e) => Err(e), } } -pub fn recalibrate_results(calibration: &RTCalibration, results: &mut [IonSearchResults]) { - for v in results.iter_mut() { - let pred_rt = calibration - .predict(v.precursor_rt_query_seconds as f64) - .unwrap_or_else(|e| match e { - CalibRtError::OutOfBounds(x) => x, - _ => panic!("Unexpected error during RT prediction"), - }); - v.recalibrated_query_rt = pred_rt as f32; - v.calibrated_sq_delta_theo_rt = (v.obs_rt_seconds - v.recalibrated_query_rt).powi(2); - v.delta_theo_rt = v.obs_rt_seconds - v.recalibrated_query_rt; - } -} diff --git a/rust/timsseek/src/scoring/apex_finding.rs b/rust/timsseek/src/scoring/apex_finding.rs index ec72550..09b1974 100644 --- a/rust/timsseek/src/scoring/apex_finding.rs +++ b/rust/timsseek/src/scoring/apex_finding.rs @@ -28,7 +28,6 @@ use std::fmt::Display; use super::{ - COELUTION_WINDOW_WIDTH, NUM_MS1_IONS, NUM_MS2_IONS, }; @@ -38,8 +37,15 @@ use crate::models::{ DigestSlice, ExpectedIntensities, }; -use crate::scoring::scores::coelution::coelution_score::coelution_vref_score_filter_into; -use crate::scoring::scores::corr_v_ref; +use crate::scoring::scores::apex_features::{ + ApexFeatures, + SplitProductScore, + compute_apex_features, + compute_split_product, + compute_weighted_score, + find_joint_apex, +}; +use crate::scoring::scores::scribe::SCRIBE_FLOOR; use crate::utils::top_n_array::TopNArray; use serde::Serialize; use timsquery::models::aggregators::ChromatogramCollector; @@ -106,35 +112,48 @@ pub struct ScoringContext { pub query_values: ChromatogramCollector, } -/// The result of the apex finding process. +/// Lightweight result from Phase 1 apex finding. +/// Contains only the apex location and a basic score for calibrant ranking. +#[derive(Debug, Clone, Copy)] +pub struct ApexLocation { + /// Basic score (apex profile peak value) for ranking. + pub score: f32, + /// Retention time at the apex (ms). + pub retention_time_ms: u32, + /// Local cycle index of the apex within the extraction. + pub apex_cycle: usize, + /// Peak shape metrics for baseline computation. + pub raising_cycles: u8, + pub falling_cycles: u8, +} + +/// The result of the full scoring process (Phase 3). #[derive(Debug, Clone, Copy)] pub struct ApexScore { - /// The main composite score (higher is better). + /// The final weighted product score (higher is better). pub score: f32, - /// Difference to the next best peak. - pub delta_next: f32, - /// Difference to the third best peak. - pub delta_second_next: f32, /// Retention time at the apex (ms). pub retention_time_ms: u32, + /// Local cycle index of the joint apex. + pub joint_apex_cycle: usize, - // --- MS2 Features --- - pub ms2_cosine_ref_sim: f32, - pub ms2_coelution_score: f32, - pub ms2_summed_intensity: f32, - pub npeaks: u8, + // --- Split product components --- + pub split_product: SplitProductScore, + + // --- 11 features at joint apex --- + pub features: ApexFeatures, + + // --- Peak discrimination --- + pub delta_next: f32, + pub delta_second_next: f32, + + // --- Retained from current (used downstream) --- pub lazyscore: f32, pub lazyscore_vs_baseline: f32, pub lazyscore_z: f32, - pub ms2_corr_v_gauss: f32, - - // --- MS1 Features --- - pub ms1_corr_v_gauss: f32, - pub ms1_cosine_ref_sim: f32, - pub ms1_coelution_score: f32, + pub npeaks: u8, + pub ms2_summed_intensity: f32, pub ms1_summed_intensity: f32, - - // --- Shape Features --- pub raising_cycles: u8, pub falling_cycles: u8, } @@ -142,64 +161,58 @@ pub struct ApexScore { /// Stores time-resolved scores for every cycle in the chromatogram. #[derive(Debug, Clone, Serialize)] pub struct ScoreTraces { - pub ms1_cosine_ref_sim: Vec, - pub ms1_coelution_score: Vec, - pub ms1_corr_v_gauss: Vec, + /// Per-cycle cosine similarity (sqrt-transformed expected). pub ms2_cosine_ref_sim: Vec, - pub ms2_coelution_score: Vec, + /// Per-cycle lazyscore (kept for baseline lambda computation). pub ms2_lazyscore: Vec, - pub ms2_corr_v_gauss: Vec, + /// Per-cycle Scribe score. + pub ms2_scribe: Vec, + /// Per-cycle log1p(sum(fragment_intensities)). + pub ms2_log_intensity: Vec, + /// Per-cycle summed precursor intensity (keys >= 0 only). + pub ms1_precursor_trace: Vec, + /// Composite apex profile for peak picking. pub main_score: Vec, } impl ScoreTraces { pub fn new_with_capacity(capacity: usize) -> Self { Self { - ms1_cosine_ref_sim: Vec::with_capacity(capacity), - ms1_coelution_score: Vec::with_capacity(capacity), - ms1_corr_v_gauss: Vec::with_capacity(capacity), ms2_cosine_ref_sim: Vec::with_capacity(capacity), - ms2_coelution_score: Vec::with_capacity(capacity), ms2_lazyscore: Vec::with_capacity(capacity), - ms2_corr_v_gauss: Vec::with_capacity(capacity), + ms2_scribe: Vec::with_capacity(capacity), + ms2_log_intensity: Vec::with_capacity(capacity), + ms1_precursor_trace: Vec::with_capacity(capacity), main_score: Vec::with_capacity(capacity), } } pub fn clear(&mut self) { - self.ms1_cosine_ref_sim.clear(); - self.ms1_coelution_score.clear(); - self.ms1_corr_v_gauss.clear(); self.ms2_cosine_ref_sim.clear(); - self.ms2_coelution_score.clear(); self.ms2_lazyscore.clear(); - self.ms2_corr_v_gauss.clear(); + self.ms2_scribe.clear(); + self.ms2_log_intensity.clear(); + self.ms1_precursor_trace.clear(); self.main_score.clear(); } /// Resize all buffers to the specified length (filling with 0.0). pub fn resize(&mut self, len: usize) { - self.ms1_cosine_ref_sim.resize(len, 0.0); - self.ms1_coelution_score.resize(len, 0.0); - self.ms1_corr_v_gauss.resize(len, 0.0); self.ms2_cosine_ref_sim.resize(len, 0.0); - self.ms2_coelution_score.resize(len, 0.0); self.ms2_lazyscore.resize(len, 0.0); - self.ms2_corr_v_gauss.resize(len, 0.0); - // main_score is computed later, so we just reserve/clear usually, - // but for safety we can resize it too. + self.ms2_scribe.resize(len, 0.0); + self.ms2_log_intensity.resize(len, 0.0); + self.ms1_precursor_trace.resize(len, 0.0); self.main_score.resize(len, 0.0); } pub fn iter_scores(&self) -> impl Iterator + '_ { vec![ - ("ms1_cosine_ref_sim", &self.ms1_cosine_ref_sim[..]), - ("ms1_coelution_score", &self.ms1_coelution_score[..]), - ("ms1_corr_v_gauss", &self.ms1_corr_v_gauss[..]), ("ms2_cosine_ref_sim", &self.ms2_cosine_ref_sim[..]), - ("ms2_coelution_score", &self.ms2_coelution_score[..]), ("ms2_lazyscore", &self.ms2_lazyscore[..]), - ("ms2_corr_v_gauss", &self.ms2_corr_v_gauss[..]), + ("ms2_scribe", &self.ms2_scribe[..]), + ("ms2_log_intensity", &self.ms2_log_intensity[..]), + ("ms1_precursor_trace", &self.ms1_precursor_trace[..]), ("main_score", &self.main_score[..]), ] .into_iter() @@ -215,10 +228,14 @@ pub struct ApexFinder { #[derive(Debug)] struct ApexFinderBuffers { + /// Cosine numerator: sum(obs * sqrt(exp)) per cycle. temp_ms2_dot_prod: Vec, + /// Cosine denominator: sum(obs^2) per cycle. temp_ms2_norm_sq_obs: Vec, - temp_ms1_dot_prod: Vec, - temp_ms1_norm_sq_obs: Vec, + /// Scribe: sum(sqrt(obs)) per cycle. + temp_sqrt_sum: Vec, + /// Log-intensity: sum(obs) per cycle (finalized as log1p). + temp_raw_intensity_sum: Vec, } impl ApexFinderBuffers { @@ -226,27 +243,23 @@ impl ApexFinderBuffers { Self { temp_ms2_dot_prod: vec![0.0f32; size], temp_ms2_norm_sq_obs: vec![0.0f32; size], - temp_ms1_dot_prod: vec![0.0f32; size], - temp_ms1_norm_sq_obs: vec![0.0f32; size], + temp_sqrt_sum: vec![0.0f32; size], + temp_raw_intensity_sum: vec![0.0f32; size], } } fn clear(&mut self) { - // I can maybe cut some corners by not zeroing the whole vec, - // but just resizing later. - // Since every value is over-written anyway. - // ... This feels safer though. self.temp_ms2_dot_prod.fill(0.0); self.temp_ms2_norm_sq_obs.fill(0.0); - self.temp_ms1_dot_prod.fill(0.0); - self.temp_ms1_norm_sq_obs.fill(0.0); + self.temp_sqrt_sum.fill(0.0); + self.temp_raw_intensity_sum.fill(0.0); } fn resize(&mut self, len: usize) { self.temp_ms2_dot_prod.resize(len, 0.0); self.temp_ms2_norm_sq_obs.resize(len, 0.0); - self.temp_ms1_dot_prod.resize(len, 0.0); - self.temp_ms1_norm_sq_obs.resize(len, 0.0); + self.temp_sqrt_sum.resize(len, 0.0); + self.temp_raw_intensity_sum.resize(len, 0.0); } } @@ -258,7 +271,87 @@ impl ApexFinder { } } - /// Find the peptide apex within the provided scoring context. + /// Build cosine and scribe profiles from traces. + /// cosine_profile[i] = cos^3 * intensity, scribe_profile[i] = scribe * intensity. + fn build_profiles(&self) -> (Vec, Vec) { + let n = self.traces.ms2_cosine_ref_sim.len(); + let mut cosine_profile = Vec::with_capacity(n); + let mut scribe_profile = Vec::with_capacity(n); + for i in 0..n { + let cos = self.traces.ms2_cosine_ref_sim[i]; + let intensity = self.traces.ms2_log_intensity[i]; + cosine_profile.push(cos * cos * cos * intensity); + scribe_profile.push(self.traces.ms2_scribe[i] * intensity); + } + (cosine_profile, scribe_profile) + } + + /// Phase 1: Find apex location using broad extraction. + /// + /// Returns a lightweight `ApexLocation` with just the peak location and + /// a basic score (apex profile value). Sufficient for calibrant ranking. + #[cfg_attr( + feature = "instrumentation", + tracing::instrument(skip(self, scoring_ctx, rt_mapper), level = "trace") + )] + pub fn find_apex_location( + &mut self, + scoring_ctx: &ScoringContext, + rt_mapper: &dyn Fn(usize) -> u32, + ) -> Result { + let collector = &scoring_ctx.query_values; + let n_cycles = collector.num_cycles(); + + self.traces.clear(); + self.traces.resize(n_cycles); + self.buffers.clear(); + self.buffers.resize(n_cycles); + + self.compute_pass_1(scoring_ctx)?; + self.compute_main_score_trace(); + + // Peak-pick on apex profile + let peak_picker = PeakPicker::new(&self.traces.main_score); + let (max_val, max_loc) = match peak_picker.next_peak() { + Some(p) => p, + None => { + return Err(DataProcessingError::ExpectedNonEmptyData { + context: Some("No main score found".into()), + }); + } + }; + if max_val == 0.0 { + return Err(DataProcessingError::ExpectedNonEmptyData { + context: Some("No non-0 main score".into()), + }); + } + + let (raising_cycles, falling_cycles) = self.calculate_rise_and_fall_cycles(max_loc); + let cycle_offset = scoring_ctx.query_values.cycle_offset(); + let retention_time_ms = rt_mapper(max_loc + cycle_offset); + + // Compute split product score for calibrant ranking + let (cosine_profile, scribe_profile) = self.build_profiles(); + + let split_product = compute_split_product( + &cosine_profile, + &scribe_profile, + &scoring_ctx.query_values.fragments, + &scoring_ctx.expected_intensities.fragment_intensities, + ); + + Ok(ApexLocation { + score: split_product.base_score, + retention_time_ms, + apex_cycle: max_loc, + raising_cycles, + falling_cycles, + }) + } + + /// Phase 3: Full scoring on a (narrow) extraction. + /// + /// Computes traces, apex profile, split product, 11 features, and weighted score. #[cfg_attr( feature = "instrumentation", tracing::instrument(skip(self, scoring_ctx, rt_mapper), level = "trace") @@ -277,26 +370,18 @@ impl ApexFinder { self.buffers.clear(); self.buffers.resize(n_cycles); - // 2. Compute scores (Two-Pass approach) + // 2. Compute per-cycle scores (single pass) self.compute_pass_1(scoring_ctx)?; - self.compute_pass_2(scoring_ctx)?; - - // 3. Smooth scores - self.smooth_scores(); - // 4. Compute Main Score (Composite) + // 3. Compute apex profile (cos^3 * I combined with scribe * I) self.compute_main_score_trace(); - // 5. Find Apex and Extract Features + // 4. Find apex and extract features self.extract_apex_score(scoring_ctx, &rt_mapper) } - /// Pass 1: Scores that depend only on individual ion traces. - /// - Lazyscore (Hyperscore approximation) - /// - Cosine Similarity vs Expected Intensities - /// - Gaussian Shape Correlation - /// - /// Can be an error if no valid ions are found for scoring. + /// Single-pass scoring: cosine (sqrt-transformed), scribe, lazyscore, + /// log-intensity, and precursor trace. #[cfg_attr( feature = "instrumentation", tracing::instrument(skip_all, level = "trace") @@ -308,15 +393,18 @@ impl ApexFinder { let collector = &scoring_ctx.query_values; // --- MS2 (Fragments) --- - // We use accumulators to compute cosine similarity and lazyscore in one go. - // Lazyscore ~ Sum(ln(1 + intensity)) - // Cosine ~ DotProduct(obs, exp) / (Norm(obs) * Norm(exp)) - let ms2_dot_prod = &mut self.buffers.temp_ms2_dot_prod; let ms2_norm_sq_obs = &mut self.buffers.temp_ms2_norm_sq_obs; - let mut ms2_norm_sq_exp = 0.0f32; // Scalar, since expected is a single vector + let sqrt_sum = &mut self.buffers.temp_sqrt_sum; + let raw_sum = &mut self.buffers.temp_raw_intensity_sum; + // Sum of sqrt(expected) for cosine norm: ||sqrt(exp)||^2 = sum(exp) + let mut ms2_sum_exp = 0.0f32; - for ((key, _mz), chrom) in collector.fragments.iter_mzs() { + // Pre-compute pred_norm for scribe + let mut pred_norms: Vec<(usize, f32)> = Vec::new(); + let mut pred_sqrt_sum = 0.0f32; + + for (row_idx, ((key, _mz), chrom)) in collector.fragments.iter_mzs().enumerate() { let expected = scoring_ctx .expected_intensities .fragment_intensities @@ -327,178 +415,145 @@ impl ApexFinder { if expected <= 0.0 { continue; } - ms2_norm_sq_exp += expected * expected; + + let sqrt_exp = expected.sqrt(); + ms2_sum_exp += expected; // sqrt_exp * sqrt_exp = expected + pred_norms.push((row_idx, expected.sqrt())); + pred_sqrt_sum += sqrt_exp; for (i, &intensity) in chrom.iter().enumerate() { if intensity > 0.0 { - // Lazyscore: lnfact of sum of logs... simplified here to sum of logs - // Actual implementation in `hyperscore.rs` uses `lnfact_f32(sum(ln(x)))`. - // We'll accumulate the log sums here. - let ln_val = intensity.max(1.0).ln(); - self.traces.ms2_lazyscore[i] += ln_val; - - // Cosine parts - ms2_dot_prod[i] += intensity * expected; + // Lazyscore accumulation + self.traces.ms2_lazyscore[i] += intensity.max(1.0).ln(); + // Cosine: dot(obs, sqrt(exp)) + ms2_dot_prod[i] += intensity * sqrt_exp; + // Cosine: obs norm ms2_norm_sq_obs[i] += intensity * intensity; + // Scribe: sqrt(obs) sum + sqrt_sum[i] += intensity.sqrt(); } + // Raw intensity sum (for log-intensity, includes zeros) + raw_sum[i] += intensity.max(0.0); } } - // Finalize MS2 Cosine & Lazyscore - let ms2_norm_exp = ms2_norm_sq_exp.sqrt(); - for i in 0..self.traces.ms2_cosine_ref_sim.len() { - // Finalize Lazyscore + // Finalize cosine, lazyscore, log-intensity + let norm_sqrt_exp = ms2_sum_exp.sqrt(); // ||sqrt(exp)|| = sqrt(sum(exp)) + let n = self.traces.ms2_cosine_ref_sim.len(); + for i in 0..n { + // Lazyscore self.traces.ms2_lazyscore[i] = crate::utils::math::lnfact_f32(self.traces.ms2_lazyscore[i]); - // Finalize Cosine + // Cosine (sqrt-transformed expected) let obs_norm = ms2_norm_sq_obs[i].sqrt(); - if obs_norm > 0.0 && ms2_norm_exp > 0.0 { - self.traces.ms2_cosine_ref_sim[i] = ms2_dot_prod[i] / (obs_norm * ms2_norm_exp); - // Clip to valid range and min value + if obs_norm > 0.0 && norm_sqrt_exp > 0.0 { self.traces.ms2_cosine_ref_sim[i] = - self.traces.ms2_cosine_ref_sim[i].max(1e-3).min(1.0); + (ms2_dot_prod[i] / (obs_norm * norm_sqrt_exp)).clamp(1e-3, 1.0); } else { self.traces.ms2_cosine_ref_sim[i] = 1e-3; } + + // Log-intensity + self.traces.ms2_log_intensity[i] = raw_sum[i].ln_1p(); } - // --- MS2 Gaussian Correlation --- - // We can reuse the existing function from `corr_v_ref` as it takes `MzMajorIntensityArray`. - // It iterates ions internally. - corr_v_ref::calculate_cosine_with_ref_gaussian_into( - &collector.fragments, - |_| true, // Filter: take all - &mut self.traces.ms2_corr_v_gauss, - )?; - - // --- MS1 (Precursors) --- - // Similar logic for MS1 - let ms1_dot_prod = &mut self.buffers.temp_ms1_dot_prod; - let ms1_norm_sq_obs = &mut self.buffers.temp_ms1_norm_sq_obs; - let mut ms1_norm_sq_exp = 0.0f32; + // Finalize scribe (inline, reusing sqrt_sum buffer from accumulation above) + if pred_sqrt_sum > 0.0 && !pred_norms.is_empty() { + // Normalize pred_norms in-place + for entry in pred_norms.iter_mut() { + entry.1 /= pred_sqrt_sum; + } - for ((key, _mz), chrom) in collector.precursors.iter_mzs() { - let expected = scoring_ctx - .expected_intensities - .precursor_intensities - .get(key) - .copied() - .unwrap_or(0.0); - if expected <= 0.0 { - continue; + // Pass B: accumulate SSE + for &(row_idx, pred_norm_i) in &pred_norms { + let row = collector + .fragments + .get_row_idx(row_idx) + .expect("row_idx from enumeration must be valid"); + for (t, &intensity) in row.iter().enumerate() { + if sqrt_sum[t] == 0.0 { + continue; + } + let obs_norm_i = if intensity > 0.0 { + intensity.sqrt() / sqrt_sum[t] + } else { + 0.0 + }; + let diff = obs_norm_i - pred_norm_i; + self.traces.ms2_scribe[t] += diff * diff; + } } - ms1_norm_sq_exp += expected * expected; - for (i, &intensity) in chrom.iter().enumerate() { - if intensity > 0.0 { - ms1_dot_prod[i] += intensity * expected; - ms1_norm_sq_obs[i] += intensity * intensity; + // Finalize scribe: -log(sse) + for t in 0..n { + if sqrt_sum[t] == 0.0 { + self.traces.ms2_scribe[t] = SCRIBE_FLOOR; + } else { + let sse = self.traces.ms2_scribe[t].max(f32::EPSILON); + self.traces.ms2_scribe[t] = -sse.ln(); } } + } else { + self.traces.ms2_scribe.fill(SCRIBE_FLOOR); } - let ms1_norm_exp = ms1_norm_sq_exp.sqrt(); - for i in 0..self.traces.ms1_cosine_ref_sim.len() { - let obs_norm = ms1_norm_sq_obs[i].sqrt(); - if obs_norm > 0.0 && ms1_norm_exp > 0.0 { - self.traces.ms1_cosine_ref_sim[i] = ms1_dot_prod[i] / (obs_norm * ms1_norm_exp); - self.traces.ms1_cosine_ref_sim[i] = - self.traces.ms1_cosine_ref_sim[i].max(1e-3).min(1.0); - } else { - self.traces.ms1_cosine_ref_sim[i] = 1e-3; + // --- MS1 Precursor trace --- + for ((key, _mz), chrom) in collector.precursors.iter_mzs() { + if *key < 0 { + continue; // Skip decoy isotope keys + } + for (i, &intensity) in chrom.iter().enumerate() { + if intensity > 0.0 { + self.traces.ms1_precursor_trace[i] += intensity; + } } } - corr_v_ref::calculate_cosine_with_ref_gaussian_into( - &collector.precursors, - |&k| k >= 0, // Filter: Ignore negative keys (decoys/invalid) - &mut self.traces.ms1_corr_v_gauss, - )?; - - Ok(()) - } - - /// Pass 2: Scores that depend on the results of Pass 1. - /// - Coelution Score (compares individual ion traces vs the aggregated Lazyscore trace) - #[cfg_attr( - feature = "instrumentation", - tracing::instrument(skip_all, level = "trace") - )] - fn compute_pass_2( - &mut self, - scoring_ctx: &ScoringContext, - ) -> Result<(), DataProcessingError> { - let collector = &scoring_ctx.query_values; - - // Apply smoothing to Lazyscore BEFORE using it as a reference for Coelution - // Note: The original code smoothed EVERYTHING at the end. - // But Coelution uses `ms2_lazyscore` as the reference shape. - // Does it use the smoothed or raw? - // Checking `calculate_scores.rs`: `smooth_scores()` happens AFTER `calculate_coelution_scores`. - // So it uses the RAW lazyscore. Okay, we proceed with raw. - - // MS2 Coelution - coelution_vref_score_filter_into( - &collector.fragments, - &self.traces.ms2_lazyscore, - COELUTION_WINDOW_WIDTH, - &|_| true, - &mut self.traces.ms2_coelution_score, - )?; - - // MS1 Coelution (vs MS2 Lazyscore reference) - coelution_vref_score_filter_into( - &collector.precursors, - &self.traces.ms2_lazyscore, - COELUTION_WINDOW_WIDTH, - &|x: &i8| *x >= 0i8, - &mut self.traces.ms1_coelution_score, - )?; - Ok(()) } - #[cfg_attr( - feature = "instrumentation", - tracing::instrument(skip_all, level = "trace") - )] - fn smooth_scores(&mut self) { - gaussblur_in_place(&mut self.traces.ms2_lazyscore); - gaussblur_in_place(&mut self.traces.ms1_coelution_score); - gaussblur_in_place(&mut self.traces.ms2_coelution_score); - gaussblur_in_place(&mut self.traces.ms2_cosine_ref_sim); - gaussblur_in_place(&mut self.traces.ms1_cosine_ref_sim); - gaussblur_in_place(&mut self.traces.ms2_corr_v_gauss); - gaussblur_in_place(&mut self.traces.ms1_corr_v_gauss); - } - + /// Compute the apex profile from cosine and scribe traces. + /// + /// apex_profile(t) = C(t) * (0.5 + S_norm(t)) + /// where C(t) = cosine(t)^3 * I(t) + /// S(t) = scribe(t) * I(t) + /// S_norm = (S - min(S)) / (max(S) - min(S)) #[cfg_attr( feature = "instrumentation", tracing::instrument(skip_all, level = "trace") )] fn compute_main_score_trace(&mut self) { - let len = self.traces.ms1_corr_v_gauss.len(); + let len = self.traces.ms2_cosine_ref_sim.len(); self.traces.main_score.clear(); self.traces.main_score.reserve(len); - const MS1_SCALING: f32 = 0.75; - const MS1_OFFSET: f32 = 0.25; + // Compute S(t) = scribe(t) * I(t), find min/max for normalization + let mut s_min = f32::INFINITY; + let mut s_max = f32::NEG_INFINITY; + // Temp: compute S values inline for i in 0..len { - let ms1_cos_score = - MS1_SCALING + (MS1_OFFSET * self.traces.ms1_coelution_score[i].max(1e-3).powi(2)); - let ms1_gauss_score = - MS1_SCALING + (MS1_OFFSET * self.traces.ms1_corr_v_gauss[i].max(1e-3).powi(2)); - - let mut loc_score = 1.0; - loc_score *= ms1_cos_score; - loc_score *= ms1_gauss_score; - loc_score *= self.traces.ms2_cosine_ref_sim[i].max(1e-3).powi(2); - loc_score *= self.traces.ms2_coelution_score[i].max(1e-3).powi(2); - loc_score *= self.traces.ms2_corr_v_gauss[i].max(1e-3).powi(2); - - self.traces.main_score.push(loc_score); + let s = self.traces.ms2_scribe[i] * self.traces.ms2_log_intensity[i]; + s_min = s_min.min(s); + s_max = s_max.max(s); + } + + let s_range = s_max - s_min; + + for i in 0..len { + let cos = self.traces.ms2_cosine_ref_sim[i]; + let intensity = self.traces.ms2_log_intensity[i]; + let c = cos * cos * cos * intensity; // cos^3 * I + + let s = self.traces.ms2_scribe[i] * intensity; + let s_norm = if s_range > 0.0 { + (s - s_min) / s_range + } else { + 0.5 // Degrade to cosine-only when scribe is constant + }; + + self.traces.main_score.push(c * (0.5 + s_norm)); } } @@ -525,39 +580,64 @@ impl ApexFinder { }); } - // Calculate Peak Shape (Raising/Falling) to determine width + // Peak shape (rise/fall) for delta computation let (raising_cycles, falling_cycles) = self.calculate_rise_and_fall_cycles(max_loc); - // Mask the current peak + // Mask and find next peaks for delta scores peak_picker.mask_peak(max_loc, raising_cycles as usize, falling_cycles as usize, 2); - - // Find next peaks let (next_val, next_loc) = peak_picker.next_peak().unwrap_or((0.0, max_loc)); - - // For second next, we need to mask the 'next' peak properly. let (next_raise, next_fall) = self.calculate_rise_and_fall_cycles(next_loc); peak_picker.mask_peak(next_loc, next_raise as usize, next_fall as usize, 1); - let (second_next_val, _) = peak_picker.next_peak().unwrap_or((0.0, max_loc)); let delta_next = max_val - next_val; let delta_second_next = max_val - second_next_val; - // Extract features at max_loc + // Build intermediate profiles for split product and features + let (cosine_profile, scribe_profile) = self.build_profiles(); + + // Split product score (independent cosine/scribe apexes) + let split_product = compute_split_product( + &cosine_profile, + &scribe_profile, + &scoring_ctx.query_values.fragments, + &scoring_ctx.expected_intensities.fragment_intensities, + ); + + // Joint precursor-fragment apex + let joint_apex = find_joint_apex(&cosine_profile, &self.traces.ms1_precursor_trace); + + // 11 features at joint apex + let n_cycles = cosine_profile.len(); + let features = compute_apex_features( + &scoring_ctx.query_values.fragments, + &scoring_ctx.query_values.precursors, + &scoring_ctx.expected_intensities, + &cosine_profile, + &self.traces.ms1_precursor_trace, + joint_apex, + n_cycles, + ); + + // Weighted final score + let score = compute_weighted_score(split_product.base_score, &features); + + // RT at joint apex let cycle_offset = scoring_ctx.query_values.cycle_offset(); - let global_loc = max_loc + cycle_offset; + let global_loc = joint_apex + cycle_offset; let retention_time_ms = rt_mapper(global_loc); - let (ms1_summed_intensity, _ms1_npeaks) = - self.sum_intensities_at(&scoring_ctx.query_values.precursors, max_loc); + // Intensity counts at joint apex + let (ms1_summed_intensity, _) = + self.sum_intensities_at(&scoring_ctx.query_values.precursors, joint_apex); let (ms2_summed_intensity, ms2_npeaks) = - self.sum_intensities_at(&scoring_ctx.query_values.fragments, max_loc); + self.sum_intensities_at(&scoring_ctx.query_values.fragments, joint_apex); - // Calculate Lambda (Baseline noise level) + // Lazyscore baseline stats let lambda = self.calculate_baseline_lambda(max_loc, raising_cycles, falling_cycles); - let k = self.traces.ms2_lazyscore[max_loc] as f64; + let k = self.traces.ms2_lazyscore[joint_apex] as f64; let norm_lazy_std = lambda.sqrt().max(1.0) as f32; - let lazyscore_z = self.traces.ms2_lazyscore[max_loc] / norm_lazy_std; + let lazyscore_z = self.traces.ms2_lazyscore[joint_apex] / norm_lazy_std; if lazyscore_z.is_nan() { return Err(DataProcessingError::ExpectedFiniteNonNanData { @@ -566,21 +646,18 @@ impl ApexFinder { } Ok(ApexScore { - score: max_val, + score, + retention_time_ms, + joint_apex_cycle: joint_apex, + split_product, + features, delta_next, delta_second_next, - retention_time_ms, - ms2_cosine_ref_sim: self.traces.ms2_cosine_ref_sim[max_loc], - ms2_coelution_score: self.traces.ms2_coelution_score[max_loc], - ms2_summed_intensity, - npeaks: ms2_npeaks as u8, - lazyscore: self.traces.ms2_lazyscore[max_loc], + lazyscore: self.traces.ms2_lazyscore[joint_apex], lazyscore_vs_baseline: (k / lambda) as f32, lazyscore_z, - ms2_corr_v_gauss: self.traces.ms2_corr_v_gauss[max_loc], - ms1_corr_v_gauss: self.traces.ms1_corr_v_gauss[max_loc], - ms1_cosine_ref_sim: self.traces.ms1_cosine_ref_sim[max_loc], - ms1_coelution_score: self.traces.ms1_coelution_score[max_loc], + npeaks: ms2_npeaks as u8, + ms2_summed_intensity, ms1_summed_intensity, raising_cycles, falling_cycles, diff --git a/rust/timsseek/src/scoring/mod.rs b/rust/timsseek/src/scoring/mod.rs index 9b6d5f2..c86ba95 100644 --- a/rust/timsseek/src/scoring/mod.rs +++ b/rust/timsseek/src/scoring/mod.rs @@ -7,19 +7,17 @@ mod scores; pub mod search_results; pub mod timings; -// RN I am not the biggest fan of exposig this -pub use scores::{ - coelution, - hyperscore, -}; +pub use scores::hyperscore; pub use pipeline::{ + CalibrantCandidate, + CalibrantHeap, + CalibrationConfig, ScoringPipeline, ToleranceHierarchy, }; pub use search_results::IonSearchResults; -pub use timings::ScoreTimings; +pub use timings::{PipelineTimings, ScoreTimings}; pub const NUM_MS2_IONS: usize = 7; pub const NUM_MS1_IONS: usize = 3; -pub const COELUTION_WINDOW_WIDTH: usize = 7; diff --git a/rust/timsseek/src/scoring/pipeline.rs b/rust/timsseek/src/scoring/pipeline.rs index 1357b02..c076e01 100644 --- a/rust/timsseek/src/scoring/pipeline.rs +++ b/rust/timsseek/src/scoring/pipeline.rs @@ -5,27 +5,19 @@ //! The scoring pipeline processes thousands of peptide queries per second. To achieve this //! throughput, it's critical to minimize allocations in the hot path. //! -//! ## Why `process_query()` Exists -//! //! Each scoring operation requires several buffers: //! - `ApexFinder` holds time-series feature buffers (size varies with query) //! - Chromatogram collectors (size varies with query complexity) //! -//! Creating a new `ApexFinder` for every query incurs allocation overhead. The solution -//! is **buffer reuse**: -//! -//! 1. `process_query()` accepts a mutable `&mut ApexFinder` reference -//! 2. `process_batch()` uses Rayon's `map_init()` to create one buffer per thread -//! 3. Each thread reuses its buffer across thousands of queries +//! `prescore_batch` and `score_calibrated_batch` use Rayon's `map_init()` / `fold` to create +//! one `ApexFinder` buffer per thread, which is reused across thousands of queries. //! //! ## Scoring Pipeline //! -//! The scoring process has four stages with separated metadata and scoring data: +//! The scoring process has two phases: //! -//! 1. **Context Build** (27% of time): Extract chromatograms, separate metadata from scoring data -//! 2. **Apex Finding** (62% of time): Find peak apex using time-series features (bottleneck) -//! 3. **Refinement** (11% of time): Two-pass query at detected apex with narrow tolerances -//! 4. **Finalization** (<1% of time): Assemble final results with calculated offsets and metadata +//! 1. **Prescore** (Phase 1): Broad extraction + `find_apex_location` — yields calibrant candidates. +//! 2. **Calibrated scoring** (Phase 3): Narrow calibrated extraction + `find_apex` + secondary query. use crate::errors::DataProcessingError; use crate::utils::elution_group_ops::isotope_offset_fragments; @@ -54,10 +46,11 @@ use timsquery::{ use super::accumulator::IonSearchAccumulator; use super::apex_finding::{ ApexFinder, + ApexLocation, ApexScore, + PeptideMetadata, RelativeIntensities, }; -// use super::calculate_scores::RelativeIntensities; // Removed use super::full_results::FullQueryResult; use super::hyperscore::single_lazyscore; use super::offsets::MzMobilityOffsets; @@ -66,10 +59,8 @@ use super::search_results::{ SearchResultBuilder, }; use super::timings::ScoreTimings; -use tracing::{ - info, - warn, -}; +use crate::rt_calibration::CalibrationResult; +use tracing::warn; /// Hierarchical tolerance configuration for the scoring pipeline. /// @@ -92,6 +83,172 @@ impl ToleranceHierarchy { } } +/// Lightweight calibrant candidate — just enough to re-query in Phase 2. +/// Implements Ord by score (ascending) for use in BinaryHeap>. +#[derive(Debug, Clone)] +pub struct CalibrantCandidate { + pub score: f32, + pub apex_rt_seconds: f32, + pub speclib_index: usize, +} + +impl PartialEq for CalibrantCandidate { + fn eq(&self, other: &Self) -> bool { + self.score == other.score + } +} + +impl Eq for CalibrantCandidate {} + +impl PartialOrd for CalibrantCandidate { + fn partial_cmp(&self, other: &Self) -> Option { + self.score.partial_cmp(&other.score) + } +} + +impl Ord for CalibrantCandidate { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + self.partial_cmp(other).unwrap_or(std::cmp::Ordering::Equal) + } +} + +/// Bounded min-heap: keeps only the top-N candidates by score. +/// Uses Reverse so the *smallest* score is at the top (ejected first). +pub struct CalibrantHeap { + heap: std::collections::BinaryHeap>, + capacity: usize, +} + +impl CalibrantHeap { + pub fn new(capacity: usize) -> Self { + Self { + heap: std::collections::BinaryHeap::with_capacity(capacity + 1), + capacity, + } + } + + pub fn push(&mut self, candidate: CalibrantCandidate) { + if !candidate.score.is_finite() || candidate.score <= 0.0 { + return; + } + if self.heap.len() < self.capacity { + self.heap.push(std::cmp::Reverse(candidate)); + } else if let Some(std::cmp::Reverse(min)) = self.heap.peek() { + if candidate.score > min.score { + self.heap.pop(); + self.heap.push(std::cmp::Reverse(candidate)); + } + } + } + + pub fn merge(mut self, other: Self) -> Self { + for item in other.heap { + self.push(item.0); + } + self + } + + pub fn into_vec(self) -> Vec { + self.heap.into_iter().map(|std::cmp::Reverse(c)| c).collect() + } + + pub fn len(&self) -> usize { + self.heap.len() + } +} + +/// Calibration configuration — all tunable parameters with defaults. +#[derive(Debug, Clone)] +pub struct CalibrationConfig { + pub n_calibrants: usize, + pub grid_size: usize, + pub mz_sigma: f32, + pub mobility_sigma: f32, + pub rt_sigma_factor: f32, + pub min_rt_tolerance_minutes: f32, + pub lowess_frac: f32, + pub calibration_query_rt_window_minutes: f32, + pub dp_lookback: usize, +} + +impl Default for CalibrationConfig { + fn default() -> Self { + Self { + n_calibrants: 2000, + grid_size: 100, + mz_sigma: 2.0, + mobility_sigma: 3.0, + rt_sigma_factor: 3.0, + min_rt_tolerance_minutes: 0.5, + lowess_frac: 0.5, + calibration_query_rt_window_minutes: 0.5, + dp_lookback: 30, + } + } +} + +/// Number of top fragments to retain for scoring (by predicted intensity). +const TOP_N_FRAGMENTS: usize = 8; + +/// Retain only the top `n` fragments by predicted intensity. +/// +/// Removes lower-ranked fragments from the chromatogram collector (fragments array + eg) +/// and from expected intensities, maintaining the invariant that all three agree on count. +fn select_top_n_fragments( + agg: &mut ChromatogramCollector, + expected: &mut crate::ExpectedIntensities, + n: usize, +) { + let n_frags = agg.fragments.num_ions(); + if n_frags <= n { + return; + } + + // Build (positional_index, key, expected_intensity) for all fragments + let mut indexed: Vec<(usize, T, f32)> = agg + .fragments + .iter_mzs() + .enumerate() + .map(|(idx, ((key, _mz), _chrom))| { + let intensity = expected + .fragment_intensities + .get(key) + .copied() + .unwrap_or(0.0); + (idx, key.clone(), intensity) + }) + .collect(); + + // Sort descending by expected intensity + indexed.sort_by(|a, b| b.2.partial_cmp(&a.2).unwrap_or(std::cmp::Ordering::Equal)); + + // Collect indices and keys to drop (everything beyond top N) + let mut to_drop: Vec<(usize, T)> = indexed + .into_iter() + .skip(n) + .map(|(idx, key, _)| (idx, key)) + .collect(); + + // Sort drop-indices descending so highest index is removed first (avoids shift) + to_drop.sort_by(|a, b| b.0.cmp(&a.0)); + + for (idx, key) in to_drop { + agg.fragments + .drop_row_idx(idx) + .expect("index should be in bounds"); + agg.eg + .try_drop_fragment(&key) + .expect("key should exist in eg"); + expected.fragment_intensities.remove(&key); + } + + debug_assert_eq!(agg.fragments.num_ions(), agg.eg.fragment_count()); + debug_assert_eq!( + agg.fragments.num_ions(), + expected.fragment_intensities.len() + ); +} + /// Filter out zero-intensity ions and update expected intensities in one pass. /// /// This maintains index alignment by removing ions from the chromatogram collector @@ -242,6 +399,9 @@ impl ScoringPipeline { let mut expected_intensities = item.expected_intensity.clone(); filter_zero_intensity_ions(&mut agg, &mut expected_intensities); + // Retain only top-N fragments by predicted intensity for scoring + select_top_n_fragments(&mut agg, &mut expected_intensities, TOP_N_FRAGMENTS); + let metadata = super::apex_finding::PeptideMetadata { digest: item.digest.clone(), charge: item.query.precursor_charge(), @@ -377,64 +537,6 @@ impl ScoringPipeline { } impl ScoringPipeline { - pub fn process_query( - &self, - item: QueryItemToScore, - buffer: &mut ApexFinder, - timings: &mut ScoreTimings, - ) -> Option { - let st = Instant::now(); - let (metadata, scoring_ctx) = match self.build_candidate_context(&item) { - Ok(result) => result, - Err(SkippingReason::RetentionTimeOutOfBounds) => { - return None; - } - }; - timings.prescore += st.elapsed(); - - if scoring_ctx - .expected_intensities - .fragment_intensities - .is_empty() - { - return None; - } - - let st = Instant::now(); - let apex_score = - match buffer.find_apex(&scoring_ctx, &|idx| self.map_rt_index_to_milis(idx)) { - Ok(score) => score, - Err(_e) => { - return None; - } - }; - timings.localize += st.elapsed(); - - let st = Instant::now(); - let (inner_collector, isotope_collector) = self.execute_secondary_query(&item, &apex_score); - timings.secondary_query += st.elapsed(); - - let nqueries = scoring_ctx.query_values.fragments.num_ions() as u8; - - let st = Instant::now(); - let out = self.finalize_results( - &metadata, - nqueries, - &apex_score, - &inner_collector, - &isotope_collector, - ); - timings.finalization += st.elapsed(); - - match out { - Ok(res) => Some(res), - Err(e) => { - warn!("Error in scoring: {:?}", e); - None - } - } - } - pub fn process_query_full( &self, item: QueryItemToScore, @@ -473,19 +575,158 @@ impl ScoringPipeline { }) } + /// Build a chromatogram extraction using calibrated RT and per-query tolerance. + /// The speclib is NOT mutated — CalibrationResult provides the RT conversion. + #[cfg_attr( + feature = "instrumentation", + tracing::instrument(skip_all, level = "trace") + )] + fn build_calibrated_context( + &self, + item: &QueryItemToScore, + calibration: &CalibrationResult, + ) -> Result< + ( + super::apex_finding::PeptideMetadata, + super::apex_finding::ScoringContext, + ), + SkippingReason, + > { + let original_irt = item.query.rt_seconds(); + let calibrated_rt = calibration.convert_irt(original_irt); + let tolerance = calibration.get_tolerance( + item.query.mono_precursor_mz(), + item.query.mobility_ook0(), + calibrated_rt, + ); + + let calibrated_query = item.query.clone().with_rt_seconds(calibrated_rt); + + let max_range = self.index.ms1_cycle_mapping().range_milis(); + let max_range = TupleRange::try_new(max_range.0, max_range.1) + .expect("Reference RTs should be sorted and valid"); + let rt_range = match tolerance.rt_range_as_milis(calibrated_rt) { + OptionallyRestricted::Unrestricted => max_range, + OptionallyRestricted::Restricted(r) => r, + }; + + if !max_range.intersects(rt_range) { + return Err(SkippingReason::RetentionTimeOutOfBounds); + } + + let mut agg = ChromatogramCollector::new( + calibrated_query, + rt_range, + self.index.ms1_cycle_mapping(), + ) + .map_err(|_| SkippingReason::RetentionTimeOutOfBounds)?; + + self.index.add_query(&mut agg, &tolerance); + + let mut expected_intensities = item.expected_intensity.clone(); + filter_zero_intensity_ions(&mut agg, &mut expected_intensities); + select_top_n_fragments(&mut agg, &mut expected_intensities, TOP_N_FRAGMENTS); + + let metadata = super::apex_finding::PeptideMetadata { + digest: item.digest.clone(), + charge: item.query.precursor_charge(), + library_id: agg.eg.id() as u32, + ref_rt_seconds: calibrated_rt, + ref_mobility_ook0: item.query.mobility_ook0(), + ref_precursor_mz: item.query.mono_precursor_mz(), + }; + + let scoring_ctx = super::apex_finding::ScoringContext { + expected_intensities, + query_values: agg, + }; + + Ok((metadata, scoring_ctx)) + } + + /// Phase 3: Score a peptide using calibrated extraction window. + /// Expects narrow calibrated extraction (from CalibrationResult). #[cfg_attr( feature = "instrumentation", - tracing::instrument(skip(self, items_to_score), level = "trace") + tracing::instrument(skip_all, level = "trace") )] - pub fn process_batch( + pub fn score_calibrated_extraction( + &self, + item: &QueryItemToScore, + calibration: &CalibrationResult, + buffer: &mut ApexFinder, + timings: &mut ScoreTimings, + ) -> Option { + let st = Instant::now(); + let (metadata, scoring_ctx) = + tracing::span!(tracing::Level::TRACE, "score_calibrated::extraction").in_scope( + || match self.build_calibrated_context(item, calibration) { + Ok(result) => Some(result), + Err(_) => None, + }, + )?; + timings.prescore += st.elapsed(); + + if scoring_ctx + .expected_intensities + .fragment_intensities + .is_empty() + { + return None; + } + + let st = Instant::now(); + let apex_score = + tracing::span!(tracing::Level::TRACE, "score_calibrated::apex_scoring").in_scope( + || { + buffer + .find_apex(&scoring_ctx, &|idx| self.map_rt_index_to_milis(idx)) + .ok() + }, + )?; + timings.localize += st.elapsed(); + + let st = Instant::now(); + let (inner_collector, isotope_collector) = + tracing::span!(tracing::Level::TRACE, "score_calibrated::secondary_query") + .in_scope(|| self.execute_secondary_query(item, &apex_score)); + timings.secondary_query += st.elapsed(); + + let nqueries = scoring_ctx.query_values.fragments.num_ions() as u8; + let st = Instant::now(); + let out = tracing::span!(tracing::Level::TRACE, "score_calibrated::finalize").in_scope( + || { + self.finalize_results( + &metadata, + nqueries, + &apex_score, + &inner_collector, + &isotope_collector, + ) + }, + ); + timings.finalization += st.elapsed(); + + match out { + Ok(res) => Some(res), + Err(e) => { + warn!("Error in scoring: {:?}", e); + None + } + } + } + + /// Phase 3 batch: Score all peptides with calibrated tolerances. + #[cfg_attr( + feature = "instrumentation", + tracing::instrument(skip_all, level = "trace") + )] + pub fn score_calibrated_batch( &self, items_to_score: &[QueryItemToScore], + calibration: &CalibrationResult, ) -> (Vec, ScoreTimings) { - let num_input_items = items_to_score.len(); - let loc_score_start = Instant::now(); - let init_fn = || ApexFinder::new(self.num_cycles()); - let filter_fn = |x: &&QueryItemToScore| { let tmp = x.query.get_precursor_mz_limits(); let lims = TupleRange::try_new(tmp.0, tmp.1).expect("Should already be ordered"); @@ -495,13 +736,13 @@ impl ScoringPipeline { #[cfg(not(feature = "serial_scoring"))] let results: IonSearchAccumulator = { items_to_score - .into_par_iter() - .with_min_len(512) + .par_iter() .filter(filter_fn) .map_init(init_fn, |scorer, item| { - let mut timings = ScoreTimings::default(); - let maybe_score = self.process_query(item.clone(), scorer, &mut timings); - (maybe_score, timings) + let mut t = ScoreTimings::default(); + let result = + self.score_calibrated_extraction(item, calibration, scorer, &mut t); + (result, t) }) .collect() }; @@ -513,26 +754,113 @@ impl ScoringPipeline { .iter() .filter(filter_fn) .map(|item| { - let mut timings = ScoreTimings::default(); - let maybe_score = self.process_query(item.clone(), &mut scorer, &mut timings); - (maybe_score, timings) + let mut t = ScoreTimings::default(); + let result = + self.score_calibrated_extraction(item, calibration, &mut scorer, &mut t); + (result, t) }) .collect() }; - let elapsed = loc_score_start.elapsed(); - let avg_speed = - std::time::Duration::from_nanos(elapsed.as_nanos() as u64 / num_input_items as u64); - let throughput = num_input_items as f64 / elapsed.as_secs_f64(); - let million_per_min = 1e-6 * throughput * 60.0; - info!( - "Scoring {} items took: {:?} throughput: {:#.1}/s, million_per_min: {:#.1}, avg: {:?}", - num_input_items, elapsed, throughput, million_per_min, avg_speed - ); + (results.res, results.timings) + } - info!("{:?}", results.timings); + /// Phase 1: Lightweight prescore — broad extraction + find_apex_location only. + /// Returns the apex location (with split product score) and metadata. + #[cfg_attr( + feature = "instrumentation", + tracing::instrument(skip_all, level = "trace") + )] + pub fn prescore( + &self, + item: &QueryItemToScore, + buffer: &mut ApexFinder, + ) -> Option<(ApexLocation, PeptideMetadata)> { + let (metadata, scoring_ctx) = tracing::span!(tracing::Level::TRACE, "prescore::extraction") + .in_scope(|| match self.build_candidate_context(item) { + Ok(result) => Some(result), + Err(SkippingReason::RetentionTimeOutOfBounds) => None, + })?; - (results.res, results.timings) + if scoring_ctx + .expected_intensities + .fragment_intensities + .is_empty() + { + return None; + } + + let apex_location = + tracing::span!(tracing::Level::TRACE, "prescore::scoring").in_scope(|| { + buffer + .find_apex_location(&scoring_ctx, &|idx| self.map_rt_index_to_milis(idx)) + .ok() + })?; + + Some((apex_location, metadata)) + } + + /// Phase 1 batch: Prescore all peptides, collecting top-N calibrant candidates via bounded heaps. + #[cfg_attr( + feature = "instrumentation", + tracing::instrument(skip_all, level = "trace") + )] + pub fn prescore_batch( + &self, + items_to_score: &[QueryItemToScore], + speclib_offset: usize, + config: &CalibrationConfig, + ) -> CalibrantHeap { + let filter_fn = |x: &&QueryItemToScore| { + let tmp = x.query.get_precursor_mz_limits(); + let lims = TupleRange::try_new(tmp.0, tmp.1).expect("Should already be ordered"); + self.fragmented_range.intersects(lims) + }; + + #[cfg(not(feature = "serial_scoring"))] + let heap: CalibrantHeap = { + let init_fn = + || (ApexFinder::new(self.num_cycles()), CalibrantHeap::new(config.n_calibrants)); + + items_to_score + .par_iter() + .enumerate() + .filter(|(_, x)| filter_fn(x)) + .fold(init_fn, |(mut scorer, mut heap), (chunk_idx, item)| { + if let Some((loc, _meta)) = self.prescore(item, &mut scorer) { + heap.push(CalibrantCandidate { + score: loc.score, + apex_rt_seconds: loc.retention_time_ms as f32 / 1000.0, + speclib_index: speclib_offset + chunk_idx, + }); + } + (scorer, heap) + }) + .map(|(_, heap)| heap) + .reduce( + || CalibrantHeap::new(config.n_calibrants), + |a, b| a.merge(b), + ) + }; + + #[cfg(feature = "serial_scoring")] + let heap: CalibrantHeap = { + let mut scorer = ApexFinder::new(self.num_cycles()); + let mut heap = CalibrantHeap::new(config.n_calibrants); + for (chunk_idx, item) in items_to_score.iter().enumerate().filter(|(_, x)| filter_fn(x)) + { + if let Some((loc, _meta)) = self.prescore(item, &mut scorer) { + heap.push(CalibrantCandidate { + score: loc.score, + apex_rt_seconds: loc.retention_time_ms as f32 / 1000.0, + speclib_index: speclib_offset + chunk_idx, + }); + } + } + heap + }; + + heap } fn map_rt_index_to_milis(&self, rt_index: usize) -> u32 { diff --git a/rust/timsseek/src/scoring/scores/apex_features.rs b/rust/timsseek/src/scoring/scores/apex_features.rs new file mode 100644 index 0000000..e83ec77 --- /dev/null +++ b/rust/timsseek/src/scoring/scores/apex_features.rs @@ -0,0 +1,1285 @@ +//! Apex-local scoring functions for the METHODS.md Phase 3 scoring pipeline. +//! +//! These functions are computed once at specific apex locations, not per-cycle. +//! They operate on windows of the raw chromatogram data and implement the +//! 11 feature functions plus helper structs described in METHODS.md Sections 3.1-3.5. + +use std::collections::HashMap; +use timsquery::models::MzMajorIntensityArray; +use timsquery::traits::KeyLike; + +// --------------------------------------------------------------------------- +// Structs +// --------------------------------------------------------------------------- + +/// The 11 apex-local features described in METHODS.md Section 3.4. +#[derive(Debug, Clone, Copy)] +pub struct ApexFeatures { + pub peak_shape: f32, + pub ratio_cv: f32, + pub centered_apex: f32, + pub precursor_coelution: f32, + pub fragment_coverage: f32, + pub precursor_apex_match: f32, + pub xic_quality: f32, + pub fragment_apex_agreement: f32, + pub isotope_correlation: f32, + pub gaussian_correlation: f32, + pub per_frag_gaussian_corr: f32, +} + +/// Result of the area-uniqueness calculation (METHODS.md Section 3.1). +#[derive(Debug, Clone, Copy)] +pub struct AreaUniquenessResult { + pub au_score: f32, +} + +/// Result of the coelution-gradient quality calculation (METHODS.md Section 3.2). +#[derive(Debug, Clone, Copy)] +pub struct CoelutionGradientResult { + pub weighted_coelution: f32, + pub gradient_consistency: f32, + pub combined: f32, +} + +/// Split product score computed from independent cosine and scribe apexes (METHODS.md Section 3.1). +#[derive(Debug, Clone, Copy)] +pub struct SplitProductScore { + pub cosine_au: f32, + pub cosine_cg: f32, + pub scribe_au: f32, + pub scribe_cg: f32, + pub base_score: f32, + pub cosine_weighted_coelution: f32, + pub cosine_gradient_consistency: f32, + pub scribe_weighted_coelution: f32, + pub scribe_gradient_consistency: f32, +} + +// --------------------------------------------------------------------------- +// Scoring weights: (offset, scale) pairs for each feature. +// Final score = base * product(offset + scale * feature_k) +// See METHODS.md Section 3.5. +// --------------------------------------------------------------------------- + +pub const SCORING_WEIGHTS: [(f32, f32); 11] = [ + (1.0, 3.5), // peak_shape + (1.0, 3.0), // ratio_cv + (1.0, 4.5), // centered_apex + (1.0, 2.3), // precursor_coelution + (0.32, 1.0), // fragment_coverage + (1.0, 8.0), // precursor_apex_match + (1.0, 7.8), // xic_quality + (0.43, 1.0), // fragment_apex_agreement + (1.0, 2.6), // isotope_correlation + (0.27, 1.0), // gaussian_correlation + (0.65, 1.0), // per_frag_gaussian_corr +]; + +// --------------------------------------------------------------------------- +// Private helpers +// --------------------------------------------------------------------------- + +/// Pearson correlation between two equal-length slices. +/// Returns 0.0 if either has zero variance. +fn pearson_correlation(a: &[f32], b: &[f32]) -> f32 { + debug_assert_eq!(a.len(), b.len()); + let n = a.len(); + if n == 0 { + return 0.0; + } + let mean_a: f32 = a.iter().sum::() / n as f32; + let mean_b: f32 = b.iter().sum::() / n as f32; + + let mut cov = 0.0f32; + let mut var_a = 0.0f32; + let mut var_b = 0.0f32; + for i in 0..n { + let da = a[i] - mean_a; + let db = b[i] - mean_b; + cov += da * db; + var_a += da * da; + var_b += db * db; + } + let denom = (var_a * var_b).sqrt(); + if denom < 1e-12 { + return 0.0; + } + cov / denom +} + +/// Center a window (subtract mean) and normalize to unit length. +/// Writes into `out` (must be same length as `window`). +/// Returns `false` if the norm is zero (all-constant input), leaving `out` zeroed. +fn center_normalize(window: &[f32], out: &mut [f32]) -> bool { + debug_assert_eq!(window.len(), out.len()); + let n = window.len(); + if n == 0 { + return false; + } + let mean: f32 = window.iter().sum::() / n as f32; + let mut norm_sq = 0.0f32; + for i in 0..n { + let v = window[i] - mean; + out[i] = v; + norm_sq += v * v; + } + let norm = norm_sq.sqrt(); + if norm < 1e-12 { + for v in out.iter_mut() { + *v = 0.0; + } + return false; + } + for v in out.iter_mut() { + *v /= norm; + } + true +} + +/// Estimate FWHM of a profile around the apex by descending from the peak +/// to find the half-maximum width on both sides. +fn estimate_fwhm(profile: &[f32], apex: usize) -> f32 { + if profile.is_empty() { + return 2.0; + } + let peak_val = profile[apex]; + if peak_val <= 0.0 { + return 2.0; + } + let half_max = peak_val * 0.5; + + // Descend left + let mut left_dist = 0.0f32; + for i in (0..apex).rev() { + if profile[i] <= half_max { + // Linear interpolation between i and i+1 + let above = profile[i + 1]; + let below = profile[i]; + let frac = if (above - below).abs() > 1e-12 { + (above - half_max) / (above - below) + } else { + 0.5 + }; + left_dist = (apex - i - 1) as f32 + frac; + break; + } + if i == 0 { + left_dist = apex as f32; + } + } + + // Descend right + let mut right_dist = 0.0f32; + for i in (apex + 1)..profile.len() { + if profile[i] <= half_max { + let above = profile[i - 1]; + let below = profile[i]; + let frac = if (above - below).abs() > 1e-12 { + (above - half_max) / (above - below) + } else { + 0.5 + }; + right_dist = (i - apex - 1) as f32 + frac; + break; + } + if i == profile.len() - 1 { + right_dist = (profile.len() - 1 - apex) as f32; + } + } + + let fwhm = (left_dist + right_dist).max(1.0); + fwhm +} + +// --------------------------------------------------------------------------- +// Public functions +// --------------------------------------------------------------------------- + +/// Area-uniqueness score around an apex (METHODS.md Section 3.1). +/// +/// `signal` is the per-cycle profile (e.g. cosine_profile or scribe_profile). +/// `apex` is the argmax index within `signal`. +/// `hw` is the half-width of the peak window (typically 5). +/// +/// AU = peak_area * (1 + 200 * peak_area / total_area) +pub fn area_uniqueness(signal: &[f32], apex: usize, hw: usize) -> AreaUniquenessResult { + let n = signal.len(); + if n == 0 { + return AreaUniquenessResult { + au_score: 0.0, + }; + } + let lo = apex.saturating_sub(hw); + let hi = (apex + hw + 1).min(n); + + let peak_area: f32 = signal[lo..hi].iter().sum(); + let total: f32 = signal.iter().sum(); + + if total <= 0.0 { + return AreaUniquenessResult { + au_score: 0.0, + }; + } + + let uniqueness = peak_area / total; + let au_score = peak_area * (1.0 + 200.0 * uniqueness); + + AreaUniquenessResult { au_score } +} + +/// Coelution-gradient quality at a given apex (METHODS.md Section 3.2). +/// +/// Measures whether the fragment XICs co-elute and change in the same direction. +/// Weights use raw predicted intensity fractions (NOT sqrt-transformed). +/// +/// Returns `combined = 1.0` if fewer than 2 active fragments in the window. +pub fn coelution_gradient( + fragments: &MzMajorIntensityArray, + expected: &HashMap, + apex: usize, + coelution_hw: usize, + gradient_hw: usize, +) -> CoelutionGradientResult { + let n_cycles = fragments.num_cycles(); + if n_cycles == 0 { + return CoelutionGradientResult { + weighted_coelution: 0.0, + gradient_consistency: 0.0, + combined: 1.0, + }; + } + + // Collect fragment windows and weights + let coel_lo = apex.saturating_sub(coelution_hw); + let coel_hi = (apex + coelution_hw + 1).min(n_cycles); + let coel_len = coel_hi - coel_lo; + + let grad_lo = apex.saturating_sub(gradient_hw); + let grad_hi = (apex + gradient_hw + 1).min(n_cycles); + let grad_len = grad_hi - grad_lo; + + // We work with up to ~16 fragments; use small vecs. + let mut windows: Vec> = Vec::new(); + let mut weights: Vec = Vec::new(); + let mut grad_windows: Vec> = Vec::new(); + + let weight_sum: f32 = expected.values().sum(); + if weight_sum <= 0.0 { + return CoelutionGradientResult { + weighted_coelution: 0.0, + gradient_consistency: 0.0, + combined: 1.0, + }; + } + + for ((key, _mz), chrom) in fragments.iter_mzs() { + let y_hat = expected.get(key).copied().unwrap_or(0.0); + if y_hat <= 0.0 { + continue; + } + + // Coelution window + let coel_slice = &chrom[coel_lo..coel_hi]; + let mut normed = vec![0.0f32; coel_len]; + if center_normalize(coel_slice, &mut normed) { + windows.push(normed); + weights.push(y_hat / weight_sum); + } + + // Gradient window: first differences + if grad_len >= 2 { + let grad_slice = &chrom[grad_lo..grad_hi]; + let diffs: Vec = grad_slice.windows(2).map(|w| w[1] - w[0]).collect(); + let mut normed_grad = vec![0.0f32; diffs.len()]; + if center_normalize(&diffs, &mut normed_grad) { + grad_windows.push(normed_grad); + } + } + } + + let n_active = windows.len(); + + // Need at least 2 active fragments for pairwise correlations + if n_active < 2 { + return CoelutionGradientResult { + weighted_coelution: 0.0, + gradient_consistency: 0.0, + combined: 1.0, + }; + } + + // Weighted coelution: pairwise dot products (already centered+normalized = correlations) + let mut wcoel_num = 0.0f32; + let mut wcoel_den = 0.0f32; + for i in 0..n_active { + for j in (i + 1)..n_active { + let corr: f32 = windows[i] + .iter() + .zip(windows[j].iter()) + .map(|(a, b)| a * b) + .sum(); + let w = weights[i] * weights[j]; + wcoel_num += w * corr; + wcoel_den += w; + } + } + let weighted_coelution = if wcoel_den > 0.0 { + wcoel_num / wcoel_den + } else { + 0.0 + }; + + // Gradient consistency: unweighted mean of upper-triangle correlations + let n_grad = grad_windows.len(); + let gradient_consistency = if n_grad >= 2 { + let mut sum_corr = 0.0f32; + let mut count = 0u32; + for i in 0..n_grad { + for j in (i + 1)..n_grad { + let corr: f32 = grad_windows[i] + .iter() + .zip(grad_windows[j].iter()) + .map(|(a, b)| a * b) + .sum(); + sum_corr += corr; + count += 1; + } + } + if count > 0 { + sum_corr / count as f32 + } else { + 0.0 + } + } else { + 0.0 + }; + + // Combined: (1 + 10 * max(wcoel, 0)) * (1 + max(grad, 0)) + let combined = + (1.0 + 10.0 * weighted_coelution.max(0.0)) * (1.0 + gradient_consistency.max(0.0)); + + CoelutionGradientResult { + weighted_coelution, + gradient_consistency, + combined, + } +} + +/// Split product score from independent cosine and scribe apexes (METHODS.md Section 3.1). +/// +/// Each profile finds its own argmax, computes area-uniqueness (hw=5) and +/// coelution-gradient (coelution_hw=20, gradient_hw=10) at that apex. +/// base = cos_AU * cos_CG * scr_AU * scr_CG +pub fn compute_split_product( + cosine_profile: &[f32], + scribe_profile: &[f32], + fragments: &MzMajorIntensityArray, + expected: &HashMap, +) -> SplitProductScore { + // Independent argmax on each profile + let cos_apex = argmax(cosine_profile); + let scr_apex = argmax(scribe_profile); + + // Area-uniqueness with hw=5 + let cos_au = area_uniqueness(cosine_profile, cos_apex, 5); + let scr_au = area_uniqueness(scribe_profile, scr_apex, 5); + + // Coelution-gradient at each apex + let cos_cg = coelution_gradient(fragments, expected, cos_apex, 20, 10); + let scr_cg = coelution_gradient(fragments, expected, scr_apex, 20, 10); + + let base_score = cos_au.au_score * cos_cg.combined * scr_au.au_score * scr_cg.combined; + + SplitProductScore { + cosine_au: cos_au.au_score, + cosine_cg: cos_cg.combined, + scribe_au: scr_au.au_score, + scribe_cg: scr_cg.combined, + base_score, + cosine_weighted_coelution: cos_cg.weighted_coelution, + cosine_gradient_consistency: cos_cg.gradient_consistency, + scribe_weighted_coelution: scr_cg.weighted_coelution, + scribe_gradient_consistency: scr_cg.gradient_consistency, + } +} + +/// Find the joint precursor-fragment apex (METHODS.md Section 3.3). +/// +/// joint(t) = C(t) * (0.5 + P(t) / max(P)) +/// If max(P) == 0, degrades to joint(t) = C(t) * 0.5 (pure fragment apex). +pub fn find_joint_apex(cosine_profile: &[f32], precursor_trace: &[f32]) -> usize { + let max_p = precursor_trace + .iter() + .copied() + .fold(0.0f32, f32::max); + + let mut best_val = f32::NEG_INFINITY; + let mut best_idx = 0usize; + + let n = cosine_profile.len().min(precursor_trace.len()); + for t in 0..n { + let p_factor = if max_p > 0.0 { + 0.5 + precursor_trace[t] / max_p + } else { + 0.5 + }; + let joint = cosine_profile[t] * p_factor; + if joint > best_val { + best_val = joint; + best_idx = t; + } + } + best_idx +} + +/// Compute all 11 apex-local features at the joint apex (METHODS.md Section 3.4). +/// +/// `fragments` and `precursors` are the raw chromatogram data. +/// `expected` contains both fragment and precursor predicted intensities. +/// `cosine_profile` is C(t) = cos(t)^3 * I(t). +/// `precursor_trace` is the summed precursor intensity trace. +/// `joint_apex` is the cycle index from `find_joint_apex`. +/// `n_cycles` is the total number of cycles in the extraction window. +pub fn compute_apex_features( + fragments: &MzMajorIntensityArray, + precursors: &MzMajorIntensityArray, + expected: &crate::models::ExpectedIntensities, + cosine_profile: &[f32], + precursor_trace: &[f32], + joint_apex: usize, + n_cycles: usize, +) -> ApexFeatures { + let apex = joint_apex; + + // ---- Peak Shape (Section 3.4) ---- + let peak_shape = compute_peak_shape(cosine_profile, apex, 10); + + // ---- Ratio CV (Section 3.4) ---- + let ratio_cv = compute_ratio_cv(fragments, &expected.fragment_intensities, apex); + + // ---- Centered Apex (Section 3.4) ---- + let centered_apex = if n_cycles > 0 { + let half = n_cycles as f32 / 2.0; + (1.0 - (apex as f32 - half).abs() / half).max(0.0) + } else { + 0.0 + }; + + // ---- Precursor Coelution (Section 3.4) ---- + let precursor_coelution = + compute_precursor_coelution(fragments, precursor_trace, apex, 10, n_cycles); + + // ---- Fragment Coverage (Section 3.4) ---- + let fragment_coverage = compute_fragment_coverage(fragments, apex); + + // ---- Precursor Apex Match (Section 3.4) ---- + let precursor_apex_match = + compute_precursor_apex_match(precursor_trace, apex, n_cycles); + + // ---- XIC Quality (Section 3.4) ---- + let xic_quality = compute_xic_quality(fragments, apex, 8); + + // ---- Fragment Apex Agreement (Section 3.4) ---- + let fragment_apex_agreement = compute_fragment_apex_agreement(fragments, apex); + + // ---- Isotope Correlation (Section 3.4) ---- + let isotope_correlation = + compute_isotope_correlation(precursors, &expected.precursor_intensities, apex); + + // ---- Gaussian Correlation (Section 3.4) ---- + let gaussian_correlation = compute_gaussian_correlation(cosine_profile, apex, 15); + + // ---- Per-Fragment Gaussian Correlation (Section 3.4) ---- + let per_frag_gaussian_corr = compute_per_frag_gaussian_corr(fragments, apex, 10); + + ApexFeatures { + peak_shape, + ratio_cv, + centered_apex, + precursor_coelution, + fragment_coverage, + precursor_apex_match, + xic_quality, + fragment_apex_agreement, + isotope_correlation, + gaussian_correlation, + per_frag_gaussian_corr, + } +} + +/// Compute the final weighted score (METHODS.md Section 3.5). +/// +/// score = base * product(offset_k + scale_k * feature_k) +pub fn compute_weighted_score(base: f32, features: &ApexFeatures) -> f32 { + let feature_values = [ + features.peak_shape, + features.ratio_cv, + features.centered_apex, + features.precursor_coelution, + features.fragment_coverage, + features.precursor_apex_match, + features.xic_quality, + features.fragment_apex_agreement, + features.isotope_correlation, + features.gaussian_correlation, + features.per_frag_gaussian_corr, + ]; + + let mut score = base; + for (i, &fval) in feature_values.iter().enumerate() { + let (offset, scale) = SCORING_WEIGHTS[i]; + score *= offset + scale * fval; + } + score +} + +// --------------------------------------------------------------------------- +// Feature implementation helpers (private) +// --------------------------------------------------------------------------- + +fn argmax(slice: &[f32]) -> usize { + if slice.is_empty() { + return 0; + } + let mut best = f32::NEG_INFINITY; + let mut idx = 0; + for (i, &v) in slice.iter().enumerate() { + if v > best { + best = v; + idx = i; + } + } + idx +} + +/// Peak shape = 0.5 * symmetry + 0.5 * sharpness (Section 3.4). +fn compute_peak_shape(profile: &[f32], apex: usize, hw: usize) -> f32 { + let n = profile.len(); + if n == 0 { + return 0.5; + } + + let lo = apex.saturating_sub(hw); + let hi = (apex + hw + 1).min(n); + + let left_len = apex - lo; + let right_len = hi - 1 - apex; + let flank_len = left_len.min(right_len); + + if flank_len < 2 { + return 0.5; + } + + // Symmetry: Pearson correlation between left flank (reversed) and right flank + let left_rev: Vec = (0..flank_len) + .map(|i| profile[apex - 1 - i]) + .collect(); + let right: Vec = (0..flank_len) + .map(|i| profile[apex + 1 + i]) + .collect(); + let symmetry = pearson_correlation(&left_rev, &right).clamp(0.0, 1.0); + + // Sharpness: 1 - mean(edges) / peak + let peak_val = profile[apex]; + let sharpness = if peak_val > 0.0 { + let edge_mean = (profile[lo] + profile[hi - 1]) * 0.5; + (1.0 - edge_mean / peak_val).max(0.0) + } else { + 0.0 + }; + + 0.5 * symmetry + 0.5 * sharpness +} + +/// Ratio CV: consistency of observed-to-predicted ratios at apex (Section 3.4). +fn compute_ratio_cv( + fragments: &MzMajorIntensityArray, + expected: &HashMap, + apex: usize, +) -> f32 { + let n_cycles = fragments.num_cycles(); + if apex >= n_cycles { + return 0.0; + } + + let mut ratios = Vec::new(); + for ((key, _mz), chrom) in fragments.iter_mzs() { + let y_hat = expected.get(key).copied().unwrap_or(0.0); + if y_hat > 0.0 && apex < chrom.len() && chrom[apex] > 0.0 { + ratios.push(chrom[apex] / y_hat); + } + } + + if ratios.len() < 3 { + return 0.0; + } + + let mean: f32 = ratios.iter().sum::() / ratios.len() as f32; + if mean <= 0.0 { + return 0.0; + } + let var: f32 = ratios.iter().map(|r| (r - mean).powi(2)).sum::() / ratios.len() as f32; + let cv = var.sqrt() / mean; + 1.0 / (1.0 + cv) +} + +/// Precursor coelution: Pearson correlation between precursor trace and summed +/// fragment trace in a +/-hw window around apex (Section 3.4). +fn compute_precursor_coelution( + fragments: &MzMajorIntensityArray, + precursor_trace: &[f32], + apex: usize, + hw: usize, + n_cycles: usize, +) -> f32 { + let lo = apex.saturating_sub(hw); + let hi = (apex + hw + 1).min(n_cycles); + if hi <= lo || hi > precursor_trace.len() { + return 0.0; + } + + // Sum fragment traces in the window + let win_len = hi - lo; + let mut frag_sum = vec![0.0f32; win_len]; + for ((_key, _mz), chrom) in fragments.iter_mzs() { + for i in 0..win_len { + let idx = lo + i; + if idx < chrom.len() { + frag_sum[i] += chrom[idx]; + } + } + } + + let prec_win = &precursor_trace[lo..hi]; + pearson_correlation(prec_win, &frag_sum).max(0.0) +} + +/// Fragment coverage: fraction of fragments with nonzero intensity at apex (Section 3.4). +fn compute_fragment_coverage( + fragments: &MzMajorIntensityArray, + apex: usize, +) -> f32 { + let n_frags = fragments.num_ions(); + if n_frags == 0 { + return 0.0; + } + let mut count = 0u32; + for ((_key, _mz), chrom) in fragments.iter_mzs() { + if apex < chrom.len() && chrom[apex] > 0.0 { + count += 1; + } + } + count as f32 / n_frags as f32 +} + +/// Precursor apex match = 0.5 * proximity + 0.5 * fraction (Section 3.4). +fn compute_precursor_apex_match( + precursor_trace: &[f32], + apex: usize, + n_cycles: usize, +) -> f32 { + if precursor_trace.is_empty() || n_cycles == 0 { + return 0.0; + } + + // Proximity: max(0, 1 - |t*_P - t*| / (T/4)) + let prec_apex = argmax(precursor_trace); + let quarter = n_cycles as f32 / 4.0; + let proximity = if quarter > 0.0 { + (1.0 - (prec_apex as f32 - apex as f32).abs() / quarter).max(0.0) + } else { + 0.0 + }; + + // Fraction: P(t*) / sum(P) + let total_p: f32 = precursor_trace.iter().sum(); + let fraction = if total_p > 0.0 && apex < precursor_trace.len() { + precursor_trace[apex] / total_p + } else { + 0.0 + }; + + 0.5 * proximity + 0.5 * fraction +} + +/// XIC quality: mean per-fragment chromatographic peak quality (Section 3.4). +/// For each fragment in a +/-hw window: +/// alignment = max(0, 1 - d_i / (w/2)) +/// sharpness = 1 - mean(edges) / peak +/// xic_i = 0.5 * alignment + 0.5 * sharpness +fn compute_xic_quality( + fragments: &MzMajorIntensityArray, + apex: usize, + hw: usize, +) -> f32 { + let n_cycles = fragments.num_cycles(); + let n_frags = fragments.num_ions(); + if n_frags == 0 || n_cycles == 0 { + return 0.0; + } + + let lo = apex.saturating_sub(hw); + let hi = (apex + hw + 1).min(n_cycles); + let win_len = hi - lo; + + if win_len < 2 { + return 0.0; + } + + let half_w = win_len as f32 / 2.0; + let mut sum_quality = 0.0f32; + + for ((_key, _mz), chrom) in fragments.iter_mzs() { + let window = &chrom[lo..hi]; + + // Find local max in window + let (local_max_val, local_max_idx) = window + .iter() + .enumerate() + .fold((0.0f32, 0usize), |(best_v, best_i), (i, &v)| { + if v > best_v { + (v, i) + } else { + (best_v, best_i) + } + }); + + if local_max_val <= 0.0 { + // Fragment contributes 0 + continue; + } + + // Alignment: distance from local max to expected apex position in window + let apex_in_window = apex - lo; + let d = (local_max_idx as f32 - apex_in_window as f32).abs(); + let alignment = (1.0 - d / half_w).max(0.0); + + // Sharpness: 1 - mean(edges) / peak + let edge_mean = (window[0] + window[win_len - 1]) * 0.5; + let sharpness = (1.0 - edge_mean / local_max_val).max(0.0); + + sum_quality += 0.5 * alignment + 0.5 * sharpness; + } + + sum_quality / n_frags as f32 +} + +/// Fragment apex agreement: fraction of fragments whose argmax is within +/-2 +/// of the joint apex (Section 3.4). +fn compute_fragment_apex_agreement( + fragments: &MzMajorIntensityArray, + apex: usize, +) -> f32 { + let n_frags = fragments.num_ions(); + if n_frags == 0 { + return 0.0; + } + + let mut count = 0u32; + for ((_key, _mz), chrom) in fragments.iter_mzs() { + // Single pass: find argmax and check if max > 0 + let (frag_apex, max_val) = chrom.iter().enumerate() + .fold((0usize, 0.0f32), |(bi, bv), (i, &v)| if v > bv { (i, v) } else { (bi, bv) }); + if max_val <= 0.0 { + continue; + } + if (frag_apex as i64 - apex as i64).unsigned_abs() <= 2 { + count += 1; + } + } + + count as f32 / n_frags as f32 +} + +/// Isotope correlation: cosine between observed and expected precursor isotope +/// envelope at the apex cycle (Section 3.4). +/// +/// Uses existing predicted precursor_intensities (keys 0, 1, 2) rather than +/// re-implementing averagine — the codebase already has sequence-specific predictions. +fn compute_isotope_correlation( + precursors: &MzMajorIntensityArray, + expected_precursor: &HashMap, + apex: usize, +) -> f32 { + let n_cycles = precursors.num_cycles(); + if apex >= n_cycles { + return 0.0; + } + + // Collect observed and expected for isotope keys 0, 1, 2 + let mut obs = [0.0f32; 3]; + let mut exp = [0.0f32; 3]; + let mut n_valid = 0u32; + + for iso_key in 0i8..=2i8 { + if let Some(row) = precursors.get_row(&iso_key) { + if apex < row.len() { + obs[iso_key as usize] = row[apex]; + } + } + if let Some(&v) = expected_precursor.get(&iso_key) { + exp[iso_key as usize] = v; + } + if obs[iso_key as usize] > 0.0 && exp[iso_key as usize] > 0.0 { + n_valid += 1; + } + } + + if n_valid < 2 { + return 0.0; + } + + // Cosine similarity + let dot: f32 = obs.iter().zip(exp.iter()).map(|(a, b)| a * b).sum(); + let norm_obs = obs.iter().map(|v| v * v).sum::().sqrt(); + let norm_exp = exp.iter().map(|v| v * v).sum::().sqrt(); + + if norm_obs < 1e-12 || norm_exp < 1e-12 { + return 0.0; + } + (dot / (norm_obs * norm_exp)).clamp(0.0, 1.0) +} + +/// Build a Gaussian reference vector centered at `apex` within window `[lo, lo+win_len)`. +fn build_gaussian_reference(win_len: usize, lo: usize, apex: usize, sigma: f32) -> Vec { + (0..win_len) + .map(|i| { + let t = (lo + i) as f32 - apex as f32; + (-t * t / (2.0 * sigma * sigma)).exp() + }) + .collect() +} + +/// Gaussian correlation: Pearson correlation between the combined elution profile +/// and an ideal Gaussian centered at the apex (Section 3.4). +/// +/// sigma is estimated from the observed FWHM: sigma = FWHM / 2.355 +fn compute_gaussian_correlation(profile: &[f32], apex: usize, hw: usize) -> f32 { + let n = profile.len(); + if n == 0 { + return 0.0; + } + + let lo = apex.saturating_sub(hw); + let hi = (apex + hw + 1).min(n); + let win_len = hi - lo; + if win_len < 3 { + return 0.0; + } + + let fwhm = estimate_fwhm(profile, apex); + let sigma = (fwhm / 2.355).max(0.5); + let gaussian = build_gaussian_reference(win_len, lo, apex, sigma); + + let window = &profile[lo..hi]; + pearson_correlation(window, &gaussian).max(0.0) +} + +/// Per-fragment Gaussian correlation: mean per-fragment correlation with a Gaussian +/// reference (Section 3.4, inspired by Beta-DIA). +/// +/// sigma = max(window_size / 6, 1) +fn compute_per_frag_gaussian_corr( + fragments: &MzMajorIntensityArray, + apex: usize, + hw: usize, +) -> f32 { + let n_cycles = fragments.num_cycles(); + if n_cycles == 0 { + return 0.0; + } + + let lo = apex.saturating_sub(hw); + let hi = (apex + hw + 1).min(n_cycles); + let win_len = hi - lo; + if win_len < 3 { + return 0.0; + } + + let sigma = (win_len as f32 / 6.0).max(1.0); + let gaussian = build_gaussian_reference(win_len, lo, apex, sigma); + + let mut sum_corr = 0.0f32; + let mut n_active = 0u32; + + for ((_key, _mz), chrom) in fragments.iter_mzs() { + let max_val = chrom.iter().copied().fold(0.0f32, f32::max); + if max_val <= 0.0 { + continue; + } + let window = &chrom[lo..hi]; + let corr = pearson_correlation(window, &gaussian).max(0.0); + sum_corr += corr; + n_active += 1; + } + + if n_active == 0 { + return 0.0; + } + sum_corr / n_active as f32 +} + +// --------------------------------------------------------------------------- +// Tests +// --------------------------------------------------------------------------- + +#[cfg(test)] +mod tests { + use super::*; + use timsquery::models::Array2D; + + /// Helper: build a MzMajorIntensityArray from a Vec of (key, mz, intensities) tuples. + fn make_fragments( + ions: Vec<(T, f64, Vec)>, + ) -> MzMajorIntensityArray { + let n_cycles = ions.first().map(|(_, _, v)| v.len()).unwrap_or(0); + let n_ions = ions.len(); + let mz_order: Vec<(T, f64)> = ions.iter().map(|(k, mz, _)| (k.clone(), *mz)).collect(); + let flat: Vec = ions.into_iter().flat_map(|(_, _, v)| v).collect(); + let arr = Array2D::from_flat_vector(flat, n_ions, n_cycles).unwrap(); + MzMajorIntensityArray { + arr, + mz_order, + cycle_offset: 0, + } + } + + // --- Test 3: area_uniqueness --- + + #[test] + fn test_area_uniqueness_clear_peak() { + // A clear peak at index 10, surrounded by near-zero + let mut signal = vec![0.0f32; 21]; + signal[8] = 1.0; + signal[9] = 5.0; + signal[10] = 10.0; + signal[11] = 5.0; + signal[12] = 1.0; + + let result = area_uniqueness(&signal, 10, 5); + + // peak_area = sum of signal[5..=15] = 0+0+0+1+5+10+5+1+0+0+0 = 22 + // total = 22 + // uniqueness = 22/22 = 1.0 + // au_score = 22 * (1 + 200*1.0) = 22 * 201 = 4422 + assert!((result.au_score - 4422.0).abs() < 1.0); + } + + #[test] + fn test_area_uniqueness_all_zeros() { + let signal = vec![0.0f32; 20]; + let result = area_uniqueness(&signal, 10, 5); + assert_eq!(result.au_score, 0.0); + } + + #[test] + fn test_area_uniqueness_boundary_clamping() { + // Apex near left edge + let mut signal = vec![0.0f32; 10]; + signal[0] = 5.0; + signal[1] = 10.0; + signal[2] = 5.0; + + let result = area_uniqueness(&signal, 1, 5); + // Window: [0..min(7,10)] = [0..7], peak_area = 5+10+5+0+0+0+0 = 20 + // total = 20 + assert!(result.au_score > 0.0); + + // Apex near right edge + let mut signal2 = vec![0.0f32; 10]; + signal2[8] = 10.0; + signal2[9] = 5.0; + + let result2 = area_uniqueness(&signal2, 9, 5); + assert!(result2.au_score > 0.0); + } + + // --- Test 4: coelution_gradient --- + + #[test] + fn test_coelution_gradient_identical_fragments() { + // Two fragments with identical chromatograms => perfect coelution (wcoel = 1) + let peak: Vec = (0..41) + .map(|i| { + let x = (i as f32 - 20.0) / 5.0; + (-x * x / 2.0).exp() * 100.0 + }) + .collect(); + + let fragments = make_fragments(vec![ + ("a".to_string(), 100.0, peak.clone()), + ("b".to_string(), 200.0, peak.clone()), + ]); + + let expected: HashMap = + [("a".to_string(), 1.0), ("b".to_string(), 1.0)] + .into_iter() + .collect(); + + let result = coelution_gradient(&fragments, &expected, 20, 20, 10); + + // Identical traces => correlation should be ~1.0 + assert!( + result.weighted_coelution > 0.95, + "wcoel = {} should be ~1.0", + result.weighted_coelution + ); + assert!(result.combined > 1.0); + } + + #[test] + fn test_coelution_gradient_anti_correlated() { + // Two fragments with anti-correlated chromatograms + let n = 41; + let peak_a: Vec = (0..n) + .map(|i| { + let x = (i as f32 - 10.0) / 3.0; + (-x * x / 2.0).exp() * 100.0 + }) + .collect(); + let peak_b: Vec = (0..n) + .map(|i| { + let x = (i as f32 - 30.0) / 3.0; + (-x * x / 2.0).exp() * 100.0 + }) + .collect(); + + let fragments = make_fragments(vec![ + ("a".to_string(), 100.0, peak_a), + ("b".to_string(), 200.0, peak_b), + ]); + + let expected: HashMap = + [("a".to_string(), 1.0), ("b".to_string(), 1.0)] + .into_iter() + .collect(); + + let result = coelution_gradient(&fragments, &expected, 20, 20, 10); + // Anti-correlated: wcoel should be negative or near zero + assert!( + result.weighted_coelution < 0.5, + "wcoel = {} should be low for anti-correlated traces", + result.weighted_coelution + ); + } + + #[test] + fn test_coelution_gradient_single_fragment() { + // Single fragment: fewer than 2 active => combined = 1.0 + let peak: Vec = (0..41) + .map(|i| { + let x = (i as f32 - 20.0) / 5.0; + (-x * x / 2.0).exp() * 100.0 + }) + .collect(); + + let fragments = + make_fragments(vec![("a".to_string(), 100.0, peak)]); + + let expected: HashMap = + [("a".to_string(), 1.0)].into_iter().collect(); + + let result = coelution_gradient(&fragments, &expected, 20, 20, 10); + assert!( + (result.combined - 1.0).abs() < 1e-6, + "single fragment => combined should be 1.0, got {}", + result.combined + ); + } + + // --- Test 5: split product with offset peaks --- + + #[test] + fn test_split_product_offset_peaks() { + let n = 50; + // Cosine profile peaks at 20, scribe at 30 + let cosine_profile: Vec = (0..n) + .map(|i| { + let x = (i as f32 - 20.0) / 3.0; + (-x * x / 2.0).exp() * 10.0 + }) + .collect(); + let scribe_profile: Vec = (0..n) + .map(|i| { + let x = (i as f32 - 30.0) / 3.0; + (-x * x / 2.0).exp() * 8.0 + }) + .collect(); + + let peak: Vec = (0..n) + .map(|i| { + let x = (i as f32 - 25.0) / 5.0; + (-x * x / 2.0).exp() * 100.0 + }) + .collect(); + + let fragments = make_fragments(vec![ + ("a".to_string(), 100.0, peak.clone()), + ("b".to_string(), 200.0, peak), + ]); + let expected: HashMap = + [("a".to_string(), 1.0), ("b".to_string(), 1.0)] + .into_iter() + .collect(); + + let result = compute_split_product(&cosine_profile, &scribe_profile, &fragments, &expected); + assert!(result.base_score > 0.0); + assert!(result.cosine_au > 0.0); + assert!(result.scribe_au > 0.0); + } + + // --- Test 6: joint apex --- + + #[test] + fn test_joint_apex_with_precursor() { + let n = 30; + // Fragment profile peaks at 15 + let cosine_profile: Vec = (0..n) + .map(|i| { + let x = (i as f32 - 15.0) / 3.0; + (-x * x / 2.0).exp() * 10.0 + }) + .collect(); + // Precursor trace also peaks at 15 + let precursor_trace: Vec = (0..n) + .map(|i| { + let x = (i as f32 - 15.0) / 4.0; + (-x * x / 2.0).exp() * 5.0 + }) + .collect(); + + let apex = find_joint_apex(&cosine_profile, &precursor_trace); + assert_eq!(apex, 15); + } + + #[test] + fn test_joint_apex_without_precursor() { + let n = 30; + let cosine_profile: Vec = (0..n) + .map(|i| { + let x = (i as f32 - 15.0) / 3.0; + (-x * x / 2.0).exp() * 10.0 + }) + .collect(); + // Zero precursor + let precursor_trace = vec![0.0f32; n]; + + let apex = find_joint_apex(&cosine_profile, &precursor_trace); + // Should still find the cosine peak at 15 + assert_eq!(apex, 15); + } + + // --- Test 7: individual feature tests --- + + #[test] + fn test_fragment_coverage() { + // 4 fragments, 2 have signal at apex + let fragments = make_fragments(vec![ + ("a".to_string(), 100.0, vec![0.0, 5.0, 0.0]), + ("b".to_string(), 200.0, vec![0.0, 0.0, 0.0]), + ("c".to_string(), 300.0, vec![0.0, 3.0, 0.0]), + ("d".to_string(), 400.0, vec![0.0, 0.0, 0.0]), + ]); + let cov = compute_fragment_coverage(&fragments, 1); + assert!((cov - 0.5).abs() < 1e-6, "expected 0.5, got {}", cov); + } + + #[test] + fn test_ratio_cv_uniform_ratios() { + // All fragments have the same obs/pred ratio => CV = 0 => ratio_cv = 1.0 + let fragments = make_fragments(vec![ + ("a".to_string(), 100.0, vec![0.0, 2.0, 0.0]), + ("b".to_string(), 200.0, vec![0.0, 4.0, 0.0]), + ("c".to_string(), 300.0, vec![0.0, 6.0, 0.0]), + ("d".to_string(), 400.0, vec![0.0, 8.0, 0.0]), + ]); + let expected: HashMap = [ + ("a".to_string(), 1.0), + ("b".to_string(), 2.0), + ("c".to_string(), 3.0), + ("d".to_string(), 4.0), + ] + .into_iter() + .collect(); + let cv = compute_ratio_cv(&fragments, &expected, 1); + assert!( + (cv - 1.0).abs() < 1e-6, + "uniform ratios => cv should be 1.0, got {}", + cv + ); + } + + #[test] + fn test_isotope_correlation_perfect_match() { + // Observed matches expected perfectly + let precursors = make_fragments(vec![ + (0i8, 500.0, vec![0.0, 0.6, 0.0]), + (1i8, 500.5, vec![0.0, 0.3, 0.0]), + (2i8, 501.0, vec![0.0, 0.1, 0.0]), + ]); + let expected: HashMap = + [(0i8, 0.6), (1, 0.3), (2, 0.1)].into_iter().collect(); + let iso = compute_isotope_correlation(&precursors, &expected, 1); + assert!( + (iso - 1.0).abs() < 1e-4, + "perfect match => iso should be ~1.0, got {}", + iso + ); + } + + // --- Test 8: weighted score --- + + #[test] + fn test_weighted_score_all_zero_features() { + let features = ApexFeatures { + peak_shape: 0.0, + ratio_cv: 0.0, + centered_apex: 0.0, + precursor_coelution: 0.0, + fragment_coverage: 0.0, + precursor_apex_match: 0.0, + xic_quality: 0.0, + fragment_apex_agreement: 0.0, + isotope_correlation: 0.0, + gaussian_correlation: 0.0, + per_frag_gaussian_corr: 0.0, + }; + let score = compute_weighted_score(100.0, &features); + // product of offsets: 1*1*1*1*0.32*1*1*0.43*1*0.27*0.65 + let expected = 100.0 * 1.0 * 1.0 * 1.0 * 1.0 * 0.32 * 1.0 * 1.0 * 0.43 * 1.0 * 0.27 + * 0.65; + assert!( + (score - expected).abs() < 1e-3, + "score {} != expected {}", + score, + expected + ); + } + + #[test] + fn test_weighted_score_all_one_features() { + let features = ApexFeatures { + peak_shape: 1.0, + ratio_cv: 1.0, + centered_apex: 1.0, + precursor_coelution: 1.0, + fragment_coverage: 1.0, + precursor_apex_match: 1.0, + xic_quality: 1.0, + fragment_apex_agreement: 1.0, + isotope_correlation: 1.0, + gaussian_correlation: 1.0, + per_frag_gaussian_corr: 1.0, + }; + let score = compute_weighted_score(1.0, &features); + // product of (offset + scale): 4.5 * 4.0 * 5.5 * 3.3 * 1.32 * 9.0 * 8.8 * 1.43 * 3.6 * 1.27 * 1.65 + let expected = 4.5 * 4.0 * 5.5 * 3.3 * 1.32 * 9.0 * 8.8 * 1.43 * 3.6 * 1.27 * 1.65; + assert!( + (score - expected).abs() / expected < 1e-3, + "score {} != expected {}", + score, + expected + ); + } +} diff --git a/rust/timsseek/src/scoring/scores/coelution/coelution_score.rs b/rust/timsseek/src/scoring/scores/coelution/coelution_score.rs deleted file mode 100644 index 89b4113..0000000 --- a/rust/timsseek/src/scoring/scores/coelution/coelution_score.rs +++ /dev/null @@ -1,181 +0,0 @@ -use crate::errors::DataProcessingError; -use crate::utils::correlation::rolling_cosine_similarity; -use timsquery::models::{ - Array2D, - MzMajorIntensityArray, -}; -use tracing::trace; - -// /// Calculates the coelution score of a set of chromatograms. -// /// -// /// # Example -// /// -// /// ``` -// /// use timsquery::Array2D; -// /// use timsseek::scoring::coelution; -// /// -// /// let slices = Array2D::new( -// /// vec![[0., 1., 3., 22., 5.], -// /// [0., 2., 4., 20., 5.], -// /// [0., 1., 2., 19., 2.]]).unwrap(); -// /// let window = 3; -// /// // Note that the generic type parameter is the top N of scores that will -// /// // be averaged to report the coelution. -// /// let scores = coelution::coelution_score_arr::<3>(&slices, window).unwrap(); -// /// assert_eq!(scores, [0.0, 0.9866667, 0.9939658, 0.9849558, 0.0]); -// /// ``` -// pub fn coelution_score_arr<'a, const TOP_N: usize>( -// slices: &'a Array2D, -// window_size: usize, -// ) -> Result + 'a, DataProcessingError> { -// let filter: Option bool> = None; -// coelution_score_iter_filter::(slices, window_size, filter) -// } - -// Assuming these types are in the current scope: -// use crate::{Array2D, DataProcessingError, TopNArray, rolling_cosine_similarity}; - -/// Calculates the coelution score of a set of chromatograms, returning a lazy iterator. -fn coelution_vref_score_filter_onto( - slices: &Array2D, - ref_slice: &[f32], - window_size: usize, - filter: impl Fn(usize) -> bool, - buffer: &mut Vec, -) -> Result<(), DataProcessingError> { - if slices.ncols() < window_size { - trace!("Not enough data to calculate coelution score"); - return Err(DataProcessingError::ExpectedNonEmptyData { - context: Some(format!( - "Not enough columns in slices to apply the rolling window of size {}", - window_size - )), - }); - } - - let num_elems = (0..slices.nrows()).filter(|&i| filter(i)).count(); - if num_elems == 0 { - trace!("No valid slices after filtering"); - return Err(DataProcessingError::ExpectedNonEmptyData { - context: Some("No valid slices after filtering, check your filter function".into()), - }); - } - let norm_factor = 1f32 / num_elems as f32; - if num_elems > 150 { - trace!( - "There are too many valid slices after filtering, probably an mz-major and an rt-major array got mixed up" - ); - // TODO: make this a more specific error - return Err(DataProcessingError::ExpectedNonEmptyData { context: Some( - "Too many valid slices after filtering, probably an mz-major and an rt-major array got mixed up".into() - ) }); - } - buffer.clear(); - buffer.resize(slices.ncols(), 0.0); - - // Collect all rolling similarity calculations v the reference. - // This still computes the similarities upfront, but the aggregation into the final - // score is done lazily, one time-point at a time. - let res: Result<(), DataProcessingError> = (0..slices.nrows()) - .filter(|&i| filter(i)) - .try_for_each(|i| { - let slice1 = slices.get_row(i).expect("Row index i is within bounds"); - let iter = rolling_cosine_similarity(slice1, ref_slice, window_size)?; - for (i, v) in iter.enumerate() { - if v.is_nan() { - continue; - } - buffer[i] += v.max(0.0) - } - Ok(()) - }); - res?; - - for x in buffer.iter_mut() { - *x *= norm_factor; - } - Ok(()) -} - -/// Calculates the coelution score for a set of chromatograms against a reference slice. -/// -/// This function is a variant of `coelution_vref_score_filter_onto` that works with -/// an `MzMajorIntensityArray`. It uses the m/z order within the `slices` to filter -/// which chromatograms to include in the score calculation. -/// -/// # Arguments -/// -/// * `slices` - An `MzMajorIntensityArray` containing the intensity data. The rows of this -/// array correspond to different m/z values, and the columns correspond to -/// different time points or cycles. The `mz_order` field of this struct provides -/// the mapping from row index to m/z value. -/// * `ref_slice` - A slice representing the reference chromatogram. -/// * `window` - The size of the rolling window for the cosine similarity calculation. -/// * `filter` - A closure that takes a key (of type `K`) from the `mz_order` and returns -/// `true` if the corresponding chromatogram should be included in the calculation. -/// * `buffer` - A mutable buffer to store the resulting coelution scores. -pub fn coelution_vref_score_filter_into<'a, K: Clone + Eq>( - slices: &'a MzMajorIntensityArray, - ref_slice: &'a [f32], - window: usize, - filter: &'a impl Fn(&K) -> bool, - buffer: &mut Vec, -) -> Result<(), DataProcessingError> { - let inner_filter = |i| slices.mz_order.get(i).is_some_and(|(k, _mz)| filter(k)); - coelution_vref_score_filter_onto(&slices.arr, ref_slice, window, inner_filter, buffer)?; - Ok(()) -} - -#[cfg(test)] -mod tests { - use super::*; - - fn assert_close_enough(a: &[f32], b: &[f32], tol: f32) { - assert_eq!(a.len(), b.len()); - for (i, (aa, bb)) in a.iter().zip(b.iter()).enumerate() { - assert!( - (aa - bb).abs() < tol, - "Failed at index {}; Expected {:?}, got {:?}, within left: {:?}, right: {:?}", - i, - aa, - bb, - a, - b - ); - } - } - - #[test] - fn test_coelution_vref_score_filter_into() { - // 1. Setup the MzMajorIntensityArray - let mz_order = vec![(1, 100.0), (2, 200.0)]; - let n_cycles = 4; - let cycle_offset = 0; - let mut slices = - MzMajorIntensityArray::try_new_empty(mz_order.into(), n_cycles, cycle_offset).unwrap(); - slices.arr = Array2D::new(vec![ - [1.0, 1.0, 0.0, 0.0], // id = 1 - [0.0, 0.0, 1.0, 1.0], // id = 2 - ]) - .unwrap(); - - // 2. Setup other parameters - let ref_slice = vec![1.0, 1.0, 0.0, 0.0]; - let window = 3; - let mut buffer = Vec::new(); - - // 3. Define a filter and call the function - let filter = |k: &i32| *k == 1 || *k == 2; // Include both - coelution_vref_score_filter_into(&slices, &ref_slice, window, &filter, &mut buffer) - .unwrap(); - - // 4. Define expected results and assert - // For row 1 vs ref_slice (self): rolling cos sim is roughly [0, 1, 1, 0] - // For row 2 vs ref_slice (orthogonal): rolling cos sim is roughly [0, 0, 0, 0] - // The sum of similarities is [0.0, 1.0, 1.0, 0.0] - // The number of elements is 2, so the norm_factor is 0.5. - // Expected buffer = sum * norm_factor - let expected = vec![0.0, 0.5, 0.5, 0.0]; - assert_close_enough(&buffer, &expected, 1e-7); - } -} diff --git a/rust/timsseek/src/scoring/scores/coelution/mod.rs b/rust/timsseek/src/scoring/scores/coelution/mod.rs deleted file mode 100644 index 2061691..0000000 --- a/rust/timsseek/src/scoring/scores/coelution/mod.rs +++ /dev/null @@ -1,2 +0,0 @@ -pub mod coelution_score; -// pub use coelution_score::coelution_score_arr; diff --git a/rust/timsseek/src/scoring/scores/corr_v_ref.rs b/rust/timsseek/src/scoring/scores/corr_v_ref.rs deleted file mode 100644 index 924b2b4..0000000 --- a/rust/timsseek/src/scoring/scores/corr_v_ref.rs +++ /dev/null @@ -1,85 +0,0 @@ -use crate::errors::DataProcessingError; -use crate::utils::correlation::cosine_similarity; -use timsquery::models::MzMajorIntensityArray; -use timsquery::traits::key_like::KeyLike; - -// From https://doi.org/10.1101/2024.11.19.624419 -const REF_GAUSSIAN: [f32; 7] = [0.0044, 0.054, 0.242, 0.399, 0.242, 0.054, 0.0044]; -const REF_GAUSS_OFFSET: usize = 4; - -// Note that padding is not hadled here, so the output will be smaller than the input. -fn slide_cosine_v_gaussian( - slice: &[f32], -) -> impl Iterator> + '_ { - slice - .windows(7) - .map(|window| cosine_similarity(window, &REF_GAUSSIAN)) -} - -pub fn calculate_cosine_with_ref_gaussian_into( - slices: &MzMajorIntensityArray, - filter: impl Fn(&FH) -> bool, - buffer: &mut Vec, -) -> Result<(), DataProcessingError> { - buffer.clear(); - buffer.resize(slices.arr.ncols(), 0.0); - - let ratio = 1f32 / slices.arr.nrows() as f32; - slices.iter_mzs().try_for_each(|((k, _mz), slc)| { - if !filter(k) { - return Ok(()); - } - for (i, v) in slide_cosine_v_gaussian(slc).enumerate() { - match v { - Err(e) => return Err(e), - Ok(v) => { - if v.is_nan() { - continue; - } - buffer[i + REF_GAUSS_OFFSET] += v.max(0.0) * ratio; - } - } - } - Ok(()) - }) -} - -// pub fn calculate_cosine_with_ref_gaussian( -// slices: &MzMajorIntensityArray, -// filter: impl Fn(&FH) -> bool, -// ) -> Result, DataProcessingError> { -// let mut result = vec![0.0; slices.arr.ncols()]; -// calculate_cosine_with_ref_gaussian_into(slices, filter, &mut result)?; -// -// Ok(result) -// } - -#[test] -fn test_calculate_cosine_with_ref_gaussian() { - let test_vec = (0..3).flat_map(|_| REF_GAUSSIAN).collect::>(); - let out = slide_cosine_v_gaussian(&test_vec).collect::, DataProcessingError>>(); - let expect_out = [ - 1.0000001, - 0.7786916, - 0.36945745, - 0.122937046, - 0.122937046, - 0.3694575, - 0.77869165, - 1.0000001, - 0.7786916, - 0.36945745, - 0.122937046, - 0.122937046, - 0.3694575, - 0.77869165, - 1.0000001, - ]; - assert!(out.is_ok()); - - let out = out.unwrap(); - println!("{:?}", out); - for (a, b) in out.iter().zip(expect_out.iter()) { - assert!((a - b).abs() < 1e-6); - } -} diff --git a/rust/timsseek/src/scoring/scores/mod.rs b/rust/timsseek/src/scoring/scores/mod.rs index efdbcbd..1ac4801 100644 --- a/rust/timsseek/src/scoring/scores/mod.rs +++ b/rust/timsseek/src/scoring/scores/mod.rs @@ -1,3 +1,3 @@ -pub mod coelution; -pub mod corr_v_ref; +pub mod apex_features; pub mod hyperscore; +pub mod scribe; diff --git a/rust/timsseek/src/scoring/scores/scribe.rs b/rust/timsseek/src/scoring/scores/scribe.rs new file mode 100644 index 0000000..c625516 --- /dev/null +++ b/rust/timsseek/src/scoring/scores/scribe.rs @@ -0,0 +1,2 @@ +/// Floor value for Scribe score when no signal is observed. +pub const SCRIBE_FLOOR: f32 = -100.0; diff --git a/rust/timsseek/src/scoring/search_results.rs b/rust/timsseek/src/scoring/search_results.rs index a25fd18..8ed7645 100644 --- a/rust/timsseek/src/scoring/search_results.rs +++ b/rust/timsseek/src/scoring/search_results.rs @@ -47,27 +47,43 @@ pub struct SearchResultBuilder<'q> { // ms1_ms2_correlation: SetField, npeaks: SetField, apex_lazyerscore: SetField, - apex_lazyerscore_vs_baseline: SetField, - apex_norm_lazyerscore_vs_baseline: SetField, - ms2_cosine_ref_similarity: SetField, - ms2_coelution_score: SetField, ms2_summed_transition_intensity: SetField, - ms2_corr_v_gauss: SetField, ms2_lazyerscore: SetField, ms2_isotope_lazyerscore: SetField, ms2_isotope_lazyerscore_ratio: SetField, + lazyscore_z: SetField, + lazyscore_vs_baseline: SetField, ms2_mz_errors: SetField<[f32; NUM_MS2_IONS]>, ms2_mobility_errors: SetField<[f32; NUM_MS2_IONS]>, - ms1_cosine_ref_similarity: SetField, - ms1_coelution_score: SetField, ms1_summed_precursor_intensity: SetField, - ms1_corr_v_gauss: SetField, ms1_mz_errors: SetField<[f32; NUM_MS1_IONS]>, ms1_mobility_errors: SetField<[f32; NUM_MS1_IONS]>, + // Split product & apex features + split_product_score: SetField, + cosine_au_score: SetField, + scribe_au_score: SetField, + coelution_gradient_cosine: SetField, + coelution_gradient_scribe: SetField, + cosine_weighted_coelution: SetField, + cosine_gradient_consistency: SetField, + scribe_weighted_coelution: SetField, + scribe_gradient_consistency: SetField, + peak_shape: SetField, + ratio_cv: SetField, + centered_apex: SetField, + precursor_coelution: SetField, + fragment_coverage: SetField, + precursor_apex_match: SetField, + xic_quality: SetField, + fragment_apex_agreement: SetField, + isotope_correlation: SetField, + gaussian_correlation: SetField, + per_frag_gaussian_corr: SetField, + relative_intensities: SetField, raising_cycles: SetField, falling_cycles: SetField, @@ -168,21 +184,18 @@ impl<'q> SearchResultBuilder<'q> { pub fn with_apex_score(mut self, main_score: &ApexScore) -> Self { let ApexScore { score, + retention_time_ms, + joint_apex_cycle: _, + split_product, + features, delta_next, delta_second_next, - ms2_cosine_ref_sim, - ms2_coelution_score, - ms1_coelution_score, - ms1_cosine_ref_sim, lazyscore, lazyscore_vs_baseline, lazyscore_z, - ms2_corr_v_gauss, - ms1_corr_v_gauss, npeaks, - ms1_summed_intensity, ms2_summed_intensity, - retention_time_ms, + ms1_summed_intensity, raising_cycles, falling_cycles, } = *main_score; @@ -190,20 +203,36 @@ impl<'q> SearchResultBuilder<'q> { self.main_score = SetField::Some(score); self.delta_next = SetField::Some(delta_next); self.delta_second_next = SetField::Some(delta_second_next); - self.rt_seconds = SetField::Some(retention_time_ms as f32 / 1000.0); - self.ms2_cosine_ref_similarity = SetField::Some(ms2_cosine_ref_sim); - self.ms2_coelution_score = SetField::Some(ms2_coelution_score); - self.ms1_coelution_score = SetField::Some(ms1_coelution_score); - self.ms1_cosine_ref_similarity = SetField::Some(ms1_cosine_ref_sim); + + self.split_product_score = SetField::Some(split_product.base_score); + self.cosine_au_score = SetField::Some(split_product.cosine_au); + self.scribe_au_score = SetField::Some(split_product.scribe_au); + self.coelution_gradient_cosine = SetField::Some(split_product.cosine_cg); + self.coelution_gradient_scribe = SetField::Some(split_product.scribe_cg); + self.cosine_weighted_coelution = SetField::Some(split_product.cosine_weighted_coelution); + self.cosine_gradient_consistency = SetField::Some(split_product.cosine_gradient_consistency); + self.scribe_weighted_coelution = SetField::Some(split_product.scribe_weighted_coelution); + self.scribe_gradient_consistency = SetField::Some(split_product.scribe_gradient_consistency); + + self.peak_shape = SetField::Some(features.peak_shape); + self.ratio_cv = SetField::Some(features.ratio_cv); + self.centered_apex = SetField::Some(features.centered_apex); + self.precursor_coelution = SetField::Some(features.precursor_coelution); + self.fragment_coverage = SetField::Some(features.fragment_coverage); + self.precursor_apex_match = SetField::Some(features.precursor_apex_match); + self.xic_quality = SetField::Some(features.xic_quality); + self.fragment_apex_agreement = SetField::Some(features.fragment_apex_agreement); + self.isotope_correlation = SetField::Some(features.isotope_correlation); + self.gaussian_correlation = SetField::Some(features.gaussian_correlation); + self.per_frag_gaussian_corr = SetField::Some(features.per_frag_gaussian_corr); + self.apex_lazyerscore = SetField::Some(lazyscore); - self.apex_lazyerscore_vs_baseline = SetField::Some(lazyscore_vs_baseline); - self.apex_norm_lazyerscore_vs_baseline = SetField::Some(lazyscore_z); + self.lazyscore_z = SetField::Some(lazyscore_z); + self.lazyscore_vs_baseline = SetField::Some(lazyscore_vs_baseline); self.npeaks = SetField::Some(npeaks); self.ms1_summed_precursor_intensity = SetField::Some(ms1_summed_intensity); self.ms2_summed_transition_intensity = SetField::Some(ms2_summed_intensity); - self.ms2_corr_v_gauss = SetField::Some(ms2_corr_v_gauss); - self.ms1_corr_v_gauss = SetField::Some(ms1_corr_v_gauss); self.raising_cycles = SetField::Some(raising_cycles); self.falling_cycles = SetField::Some(falling_cycles); } @@ -297,17 +326,33 @@ impl<'q> SearchResultBuilder<'q> { falling_cycles: expect_some!(falling_cycles), apex_lazyerscore: expect_some!(apex_lazyerscore), - apex_lazyerscore_vs_baseline: expect_some!(apex_lazyerscore_vs_baseline), - // ms1_ms2_correlation: self - // .ms1_ms2_correlation - // .expect_some("ms1_ms2_correlation", "ms1_ms2_correlation")?, - apex_norm_lazyerscore_vs_baseline: expect_some!(apex_norm_lazyerscore_vs_baseline), - ms2_cosine_ref_similarity: expect_some!(ms2_cosine_ref_similarity), - ms2_corr_v_gauss: expect_some!(ms2_corr_v_gauss), ms2_summed_transition_intensity: expect_some!(ms2_summed_transition_intensity), ms2_lazyerscore: expect_some!(ms2_lazyerscore), ms2_isotope_lazyerscore: expect_some!(ms2_isotope_lazyerscore), ms2_isotope_lazyerscore_ratio: expect_some!(ms2_isotope_lazyerscore_ratio), + lazyscore_z: expect_some!(lazyscore_z), + lazyscore_vs_baseline: expect_some!(lazyscore_vs_baseline), + + split_product_score: expect_some!(split_product_score), + cosine_au_score: expect_some!(cosine_au_score), + scribe_au_score: expect_some!(scribe_au_score), + coelution_gradient_cosine: expect_some!(coelution_gradient_cosine), + coelution_gradient_scribe: expect_some!(coelution_gradient_scribe), + cosine_weighted_coelution: expect_some!(cosine_weighted_coelution), + cosine_gradient_consistency: expect_some!(cosine_gradient_consistency), + scribe_weighted_coelution: expect_some!(scribe_weighted_coelution), + scribe_gradient_consistency: expect_some!(scribe_gradient_consistency), + peak_shape: expect_some!(peak_shape), + ratio_cv: expect_some!(ratio_cv), + centered_apex: expect_some!(centered_apex), + precursor_coelution: expect_some!(precursor_coelution), + fragment_coverage: expect_some!(fragment_coverage), + precursor_apex_match: expect_some!(precursor_apex_match), + xic_quality: expect_some!(xic_quality), + fragment_apex_agreement: expect_some!(fragment_apex_agreement), + isotope_correlation: expect_some!(isotope_correlation), + gaussian_correlation: expect_some!(gaussian_correlation), + per_frag_gaussian_corr: expect_some!(per_frag_gaussian_corr), ms2_mz_error_0: mz2_e0, ms2_mz_error_1: mz2_e1, @@ -324,14 +369,7 @@ impl<'q> SearchResultBuilder<'q> { ms2_mobility_error_5: mob2_e5, ms2_mobility_error_6: mob2_e6, - ms2_coelution_score: expect_some!(ms2_coelution_score), - ms1_cosine_ref_similarity: expect_some!(ms1_cosine_ref_similarity), ms1_summed_precursor_intensity: expect_some!(ms1_summed_precursor_intensity), - ms1_corr_v_gauss: expect_some!(ms1_corr_v_gauss), - ms1_coelution_score: expect_some!(ms1_coelution_score), - // ms1_coelution_score: self - // .ms1_coelution_score - // .expect_some("ms1_coelution_score", "ms1_coelution_score")?, ms1_mz_error_0: mz1_e0, ms1_mz_error_1: mz1_e1, ms1_mz_error_2: mz1_e2, @@ -356,8 +394,8 @@ impl<'q> SearchResultBuilder<'q> { qvalue: f32::NAN, delta_group: f32::NAN, delta_group_ratio: f32::NAN, - recalibrated_query_rt: f32::NAN, - calibrated_sq_delta_theo_rt: f32::NAN, + recalibrated_query_rt: ref_rt, + calibrated_sq_delta_theo_rt: sq_delta_theo_rt, }; Ok(results) @@ -403,22 +441,36 @@ pub struct IonSearchResults { // MS2 pub npeaks: u8, pub apex_lazyerscore: f32, - pub apex_lazyerscore_vs_baseline: f32, - pub apex_norm_lazyerscore_vs_baseline: f32, - pub ms2_cosine_ref_similarity: f32, - pub ms2_coelution_score: f32, - pub ms2_corr_v_gauss: f32, pub ms2_summed_transition_intensity: f32, pub ms2_lazyerscore: f32, pub ms2_isotope_lazyerscore: f32, pub ms2_isotope_lazyerscore_ratio: f32, + pub lazyscore_z: f32, + pub lazyscore_vs_baseline: f32, + + // Split product & apex features + pub split_product_score: f32, + pub cosine_au_score: f32, + pub scribe_au_score: f32, + pub coelution_gradient_cosine: f32, + pub coelution_gradient_scribe: f32, + pub cosine_weighted_coelution: f32, + pub cosine_gradient_consistency: f32, + pub scribe_weighted_coelution: f32, + pub scribe_gradient_consistency: f32, + pub peak_shape: f32, + pub ratio_cv: f32, + pub centered_apex: f32, + pub precursor_coelution: f32, + pub fragment_coverage: f32, + pub precursor_apex_match: f32, + pub xic_quality: f32, + pub fragment_apex_agreement: f32, + pub isotope_correlation: f32, + pub gaussian_correlation: f32, + pub per_frag_gaussian_corr: f32, // MS2 - Split - // Flattening manually bc serde(flatten) - // is not supported by csv ... - // https://github.com/BurntSushi/rust-csv/pull/223 - // Q: Is it supported by parquet? - // A: As of 2025-May-20, it is not. pub ms2_mz_error_0: f32, pub ms2_mz_error_1: f32, pub ms2_mz_error_2: f32, @@ -435,10 +487,7 @@ pub struct IonSearchResults { pub ms2_mobility_error_6: f32, // MS1 - pub ms1_cosine_ref_similarity: f32, - pub ms1_coelution_score: f32, pub ms1_summed_precursor_intensity: f32, - pub ms1_corr_v_gauss: f32, // MS1 Split pub ms1_mz_error_0: f32, diff --git a/rust/timsseek/src/scoring/timings.rs b/rust/timsseek/src/scoring/timings.rs index ac03abf..3f0a3ff 100644 --- a/rust/timsseek/src/scoring/timings.rs +++ b/rust/timsseek/src/scoring/timings.rs @@ -58,3 +58,21 @@ impl std::ops::AddAssign for ScoreTimings { self.finalization += rhs.finalization; } } + +/// Phase-level + stage-level timing for the two-pass pipeline. +/// All durations are in milliseconds. +#[derive(Debug, Default, Serialize)] +pub struct PipelineTimings { + /// Wall time for Phase 1 (broad prescore, all peptides). + pub phase1_prescore_ms: u64, + /// Wall time for Phase 2 (calibration: RT fit + error measurement). + pub phase2_calibration_ms: u64, + /// Time spent building calibrated chromatograms in Phase 3. + pub phase3_prescore_ms: u64, + /// Time spent finding peak apex in Phase 3 (typically the bottleneck). + pub phase3_localize_ms: u64, + /// Time spent on secondary spectral query in Phase 3. + pub phase3_secondary_query_ms: u64, + /// Time spent assembling final results in Phase 3. + pub phase3_finalization_ms: u64, +} diff --git a/rust/timsseek_cli/src/cli.rs b/rust/timsseek_cli/src/cli.rs index 70657c4..9a5bf1e 100644 --- a/rust/timsseek_cli/src/cli.rs +++ b/rust/timsseek_cli/src/cli.rs @@ -26,6 +26,13 @@ pub struct Cli { #[arg(short, long)] pub speclib_file: Option, + /// Path to a calibration library (optional). + /// If provided, Phase 1 prescore uses this library for calibrant selection, + /// while Phase 3 scoring uses the main speclib. + /// If not provided, the main speclib is used for both phases. + #[arg(long)] + pub calib_lib: Option, + /// Path to the output directory #[arg(short, long)] pub output_dir: Option, diff --git a/rust/timsseek_cli/src/main.rs b/rust/timsseek_cli/src/main.rs index 8a4f839..fa5b286 100644 --- a/rust/timsseek_cli/src/main.rs +++ b/rust/timsseek_cli/src/main.rs @@ -51,6 +51,7 @@ static GLOBAL: MiMalloc = MiMalloc; struct ValidatedInputs { dotd_files: Vec, speclib_path: std::path::PathBuf, + calib_lib_path: Option, output_directory: std::path::PathBuf, overwrite: bool, } @@ -102,6 +103,18 @@ fn validate_inputs( } info!("✓ Speclib file exists: {:?}", speclib_path); + // Validate calib lib if provided + let calib_lib_path = args.calib_lib.clone(); + if let Some(ref path) = calib_lib_path { + if !path.exists() { + return Err(errors::CliError::Io { + source: "Calibration library file does not exist".to_string(), + path: Some(path.to_string_lossy().to_string()), + }); + } + info!("✓ Calibration library exists: {:?}", path); + } + // Validate all raw files exist for dotd_file in &dotd_files { if !dotd_file.exists() { @@ -183,6 +196,7 @@ fn validate_inputs( Ok(ValidatedInputs { dotd_files, speclib_path, + calib_lib_path, output_directory, overwrite: args.overwrite, }) @@ -222,6 +236,7 @@ fn get_frag_range(file: &TimsTofPath) -> TupleRange { fn process_single_file( dotd_file: &std::path::Path, speclib_path: &std::path::Path, + calib_lib_path: Option<&std::path::Path>, config: &Config, base_output_dir: &std::path::Path, overwrite: bool, @@ -298,6 +313,7 @@ fn process_single_file( // Process speclib processing::process_speclib( speclib_path, + calib_lib_path, &pipeline, config.analysis.chunk_size, &file_output_config, @@ -472,6 +488,7 @@ fn main() -> std::result::Result<(), errors::CliError> { match process_single_file( dotd_file, &validated.speclib_path, + validated.calib_lib_path.as_deref(), &config, &validated.output_directory, validated.overwrite, diff --git a/rust/timsseek_cli/src/processing.rs b/rust/timsseek_cli/src/processing.rs index 44c2d0f..d4eac76 100644 --- a/rust/timsseek_cli/src/processing.rs +++ b/rust/timsseek_cli/src/processing.rs @@ -6,15 +6,32 @@ use indicatif::{ use std::path::Path; use std::time::Instant; use timsquery::IndexedTimstofPeaks; +use timsquery::MzMobilityStatsCollector; +use timsquery::SpectralCollector; +use timsquery::Tolerance; +use timsquery::models::tolerance::{ + MobilityTolerance, + MzTolerance, + QuadTolerance, + RtTolerance, +}; use timsseek::data_sources::speclib::Speclib; use timsseek::errors::TimsSeekError; use timsseek::ml::qvalues::report_qvalues_at_thresholds; use timsseek::ml::rescore; use timsseek::rt_calibration::{ - recalibrate_results, - recalibrate_speclib, + CalibRtError, + CalibrationResult, + Point, + calibrate_with_ranges, +}; +use timsseek::scoring::{ + CalibrantCandidate, + CalibrantHeap, + CalibrationConfig, + PipelineTimings, + ScoreTimings, }; -use timsseek::scoring::ScoreTimings; use timsseek::scoring::pipeline::ScoringPipeline; use timsseek::scoring::search_results::{ IonSearchResults, @@ -22,87 +39,176 @@ use timsseek::scoring::search_results::{ }; use timsseek::{ DecoyStrategy, + IonAnnot, ScorerQueriable, }; use tracing::{ debug, info, + warn, }; +/// Check that two speclibs are on a compatible RT scale. +/// Warns loudly if the RT ranges don't overlap, which would produce a useless calibration. +fn check_rt_scale_compatibility(main_lib: &Speclib, calib_lib: &Speclib) { + fn rt_range(lib: &Speclib) -> (f32, f32) { + let mut min_rt = f32::INFINITY; + let mut max_rt = f32::NEG_INFINITY; + for item in lib.as_slice() { + let rt = item.query.rt_seconds(); + min_rt = min_rt.min(rt); + max_rt = max_rt.max(rt); + } + (min_rt, max_rt) + } + + let (main_min, main_max) = rt_range(main_lib); + let (calib_min, calib_max) = rt_range(calib_lib); + + info!( + "RT ranges — main speclib: [{:.1}, {:.1}]s, calib lib: [{:.1}, {:.1}]s", + main_min, main_max, calib_min, calib_max + ); + + // Check overlap + let overlap_start = main_min.max(calib_min); + let overlap_end = main_max.min(calib_max); + + if overlap_start >= overlap_end { + warn!("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"); + warn!("!! RT SCALE MISMATCH: main speclib and calibration library !!"); + warn!("!! have NO overlapping RT range. The calibration will be !!"); + warn!("!! meaningless. Ensure both libraries use the same iRT !!"); + warn!("!! scale (e.g., both from the same prediction model). !!"); + warn!("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"); + return; + } + + let main_span = main_max - main_min; + let overlap_span = overlap_end - overlap_start; + let overlap_pct = overlap_span / main_span * 100.0; + + if overlap_pct < 50.0 { + warn!("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"); + warn!( + "!! RT SCALE WARNING: only {:.0}% overlap between main speclib !!", overlap_pct + ); + warn!("!! and calibration library. Calibration may be unreliable. !!"); + warn!("!! Ensure both libraries use the same iRT scale. !!"); + warn!("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"); + } else if overlap_pct < 80.0 { + warn!( + "RT overlap between main speclib and calib lib is {:.0}% — may affect calibration at the extremes", + overlap_pct + ); + } +} + #[cfg_attr( feature = "instrumentation", tracing::instrument(skip_all, level = "trace") )] pub fn main_loop( - // query_iterator: impl ExactSizeIterator, - // # I would like this to be streaming - mut query_iterator: Speclib, + speclib: Speclib, + calib_lib: Option, pipeline: &ScoringPipeline, chunk_size: usize, out_path: &OutputConfig, -) -> std::result::Result { - let total = query_iterator.len(); - let mut chunk_num = 0; - let mut nqueried = 0; - let mut nwritten = 0; - let start = Instant::now(); - - let mut all_timings = ScoreTimings::default(); - let style = ProgressStyle::with_template( - "{spinner:.green} [{elapsed_precise}] [{wide_bar:.cyan/blue}] {pos}/{len} ({eta})", - ) - .unwrap(); - let mut results: Vec = Vec::new(); - query_iterator - .as_slice() - .chunks(chunk_size) - .progress_with_style(style) - .for_each(|chunk| { - nqueried += chunk.len(); - // Parallelism happens here within the process_batch function - let (mut out, timings): (Vec, ScoreTimings) = pipeline.process_batch(chunk); - all_timings += timings; - nwritten += out.len(); - out.sort_unstable_by(|x, y| x.main_score.partial_cmp(&y.main_score).unwrap()); - debug!("Worst score in chunk: {:#?}", out[0]); - if let Some(last) = out.last() { - debug!("Best Score in chunk: {:#?}", last); - } - results.extend(out.iter().cloned()); - - chunk_num += 1; - let pct_done = (nqueried as f64 / total as f64) * 100.0; - let estimated_total = start.elapsed().as_secs_f64() * (100.0 / pct_done); - let eta = (estimated_total - start.elapsed().as_secs_f64()).max(0.0); - let eta_duration = std::time::Duration::from_secs_f64(eta); - info!( - "Processed chunk {}, total queries {}, total written {}, elapsed {:?}, {:.2}% done, ETA {:?}", - chunk_num, nqueried, nwritten, start.elapsed(), pct_done, eta_duration - ); - }); +) -> std::result::Result { + let calib_config = CalibrationConfig::default(); - info!("Processed {} queries, found {} results", nqueried, nwritten); + // === PHASE 1: Broad prescore -> collect top calibrants === + // Use calibration library if provided, otherwise fall back to main speclib + let phase1_lib = calib_lib.as_ref().unwrap_or(&speclib); + if let Some(ref clib) = calib_lib { + info!( + "Phase 1: Broad prescore using calibration library ({} entries)...", + clib.len() + ); + check_rt_scale_compatibility(&speclib, clib); + } else { + info!("Phase 1: Broad prescore (unrestricted RT)..."); + } + let phase1_start = Instant::now(); + let calibrants = phase1_prescore(phase1_lib, pipeline, chunk_size, &calib_config); + let phase1_ms = phase1_start.elapsed().as_millis() as u64; info!( - "Finished processing {} chunks in {:?}", - chunk_num, - start.elapsed() + "Phase 1 complete: {} calibrant candidates in {}ms", + calibrants.len(), + phase1_ms ); - let mut results = target_decoy_compete(results); - - // Sort in descending order of score - results.sort_unstable_by(|x, y| y.main_score.partial_cmp(&x.main_score).unwrap()); - assert!(results.first().unwrap().main_score >= results.last().unwrap().main_score); + // === PHASE 2: Calibration (fit RT + measure errors + derive tolerances) === + // Build lookup from main speclib when using a separate calib lib. + // Maps (quantized_precursor_mz, charge) -> Vec<(rt, sorted_fragment_mzs)>. + // Matching requires same precursor (0.01 Da) + charge + at least 5 shared fragment masses. + let main_lookup: Option< + std::collections::HashMap<(i64, u8), Vec<(f32, Vec)>>, + > = if calib_lib.is_some() { + let mut map: std::collections::HashMap<(i64, u8), Vec<(f32, Vec)>> = + std::collections::HashMap::new(); + for item in speclib.as_slice() { + let mz_key = (item.query.mono_precursor_mz() * 100.0).round() as i64; + let charge = item.query.precursor_charge(); + let mut frag_mzs: Vec = item + .query + .iter_fragments() + .map(|(_, mz)| (mz * 100.0).round() as i64) + .collect(); + frag_mzs.sort_unstable(); + map.entry((mz_key, charge)) + .or_default() + .push((item.query.rt_seconds(), frag_mzs)); + } + info!( + "Built precursor+fragment lookup with {} unique (mz, charge) buckets from main speclib", + map.len() + ); + Some(map) + } else { + None + }; - match recalibrate_speclib(&mut query_iterator, &results) { + info!("Phase 2: Calibration..."); + let phase2_start = Instant::now(); + let calibration = match calibrate_from_phase1( + calibrants, + phase1_lib, + main_lookup.as_ref(), + pipeline, + &calib_config, + ) { Ok(calib) => { - info!("Recalibrated speclib retention times based on search results"); - recalibrate_results(&calib, results.as_mut_slice()); + info!("Calibration succeeded"); + calib } Err(e) => { - tracing::error!("Error recalibrating speclib retention times: {:?}", e); + tracing::error!("Calibration failed: {:?}. Using fallback.", e); + CalibrationResult::fallback(pipeline) } }; + let phase2_ms = phase2_start.elapsed().as_millis() as u64; + + // === PHASE 3: Narrow scoring with calibrated tolerances === + info!("Phase 3: Scoring with calibrated extraction..."); + let phase3_start = Instant::now(); + let mut phase3_timings = ScoreTimings::default(); + let results = phase3_score( + &speclib, + pipeline, + &calibration, + chunk_size, + &mut phase3_timings, + ); + info!( + "Phase 3 complete: {} scored peptides in {:?}", + results.len(), + phase3_start.elapsed() + ); + + // === Post-processing === + let mut results = target_decoy_compete(results); + results.sort_unstable_by(|x, y| y.main_score.partial_cmp(&x.main_score).unwrap()); let data = rescore(results); for val in report_qvalues_at_thresholds(&data, &[0.01, 0.05, 0.1, 0.5, 1.0]) { @@ -130,9 +236,296 @@ pub fn main_loop( pq_writer.close(); info!("Wrote final results to {:?}", out_path_pq); - Ok(all_timings) + Ok(PipelineTimings { + phase1_prescore_ms: phase1_ms, + phase2_calibration_ms: phase2_ms, + phase3_prescore_ms: phase3_timings.prescore.as_millis() as u64, + phase3_localize_ms: phase3_timings.localize.as_millis() as u64, + phase3_secondary_query_ms: phase3_timings.secondary_query.as_millis() as u64, + phase3_finalization_ms: phase3_timings.finalization.as_millis() as u64, + }) +} + +#[cfg_attr( + feature = "instrumentation", + tracing::instrument(skip_all, level = "trace") +)] +fn phase1_prescore( + speclib: &Speclib, + pipeline: &ScoringPipeline, + chunk_size: usize, + config: &CalibrationConfig, +) -> Vec { + let style = ProgressStyle::with_template( + "{spinner:.green} Phase 1 [{elapsed_precise}] [{wide_bar:.cyan/blue}] {pos}/{len} ({eta})", + ) + .unwrap(); + + let mut global_heap = CalibrantHeap::new(config.n_calibrants); + let mut offset = 0usize; + + for chunk in speclib.as_slice().chunks(chunk_size).progress_with_style(style) { + let chunk_heap = pipeline.prescore_batch(chunk, offset, config); + global_heap = global_heap.merge(chunk_heap); + offset += chunk.len(); + } + + global_heap.into_vec() +} + +#[cfg_attr( + feature = "instrumentation", + tracing::instrument(skip_all, level = "trace") +)] +/// Count shared fragment m/z values between two sorted lists (within 0.01 Da = 1 unit of quantized m/z). +fn count_shared_fragments(a: &[i64], b: &[i64]) -> usize { + let mut i = 0; + let mut j = 0; + let mut count = 0; + while i < a.len() && j < b.len() { + let diff = a[i] - b[j]; + if diff.abs() <= 1 { + count += 1; + i += 1; + j += 1; + } else if diff < 0 { + i += 1; + } else { + j += 1; + } + } + count +} + +const MIN_SHARED_FRAGMENTS: usize = 5; + +fn calibrate_from_phase1( + candidates: Vec, + phase1_lib: &Speclib, + main_lookup: Option<&std::collections::HashMap<(i64, u8), Vec<(f32, Vec)>>>, + pipeline: &ScoringPipeline, + config: &CalibrationConfig, +) -> Result { + // === Step A: Fit iRT -> RT curve === + // When a separate calib lib is used, we need the main speclib's iRT as x. + // The calibration curve must map main_speclib_irt -> observed_rt. + let points: Vec = candidates + .iter() + .filter_map(|c| { + let calib_item = &phase1_lib.as_slice()[c.speclib_index]; + + let irt_for_curve = match main_lookup { + Some(lookup) => { + let mz_key = + (calib_item.query.mono_precursor_mz() * 100.0).round() as i64; + let charge = calib_item.query.precursor_charge(); + let bucket = lookup.get(&(mz_key, charge))?; + + // Build sorted fragment m/z list for the calib entry + let mut calib_frags: Vec = calib_item + .query + .iter_fragments() + .map(|(_, mz)| (mz * 100.0).round() as i64) + .collect(); + calib_frags.sort_unstable(); + + // Find best match: most shared fragments, break ties by closest RT + let calib_rt = calib_item.query.rt_seconds(); + bucket + .iter() + .filter_map(|(main_rt, main_frags)| { + let shared = count_shared_fragments(&calib_frags, main_frags); + if shared >= MIN_SHARED_FRAGMENTS { + Some((shared, (main_rt - calib_rt).abs(), *main_rt)) + } else { + None + } + }) + // Best = most shared fragments, then closest RT + .min_by(|a, b| b.0.cmp(&a.0).then(a.1.partial_cmp(&b.1).unwrap())) + .map(|(_, _, rt)| rt)? + } + None => calib_item.query.rt_seconds(), + }; + + Some(Point { + x: irt_for_curve as f64, + y: c.apex_rt_seconds as f64, + weight: 1.0, + }) + }) + .collect(); + + if main_lookup.is_some() { + info!( + "Calibration: {} of {} calibrants matched in main speclib (>={} shared fragments)", + points.len(), + candidates.len(), + MIN_SHARED_FRAGMENTS, + ); + } + + let (min_x, max_x, min_y, max_y) = points.iter().fold( + ( + f64::INFINITY, + f64::NEG_INFINITY, + f64::INFINITY, + f64::NEG_INFINITY, + ), + |(mnx, mxx, mny, mxy), p| (mnx.min(p.x), mxx.max(p.x), mny.min(p.y), mxy.max(p.y)), + ); + + let cal_curve = + calibrate_with_ranges(&points, (min_x, max_x), (min_y, max_y), config.grid_size)?; + + // === Step B: Measure m/z and mobility errors at calibrant apexes === + let query_tolerance = Tolerance { + ms: MzTolerance::Ppm((10.0, 10.0)), + rt: RtTolerance::Minutes(( + config.calibration_query_rt_window_minutes, + config.calibration_query_rt_window_minutes, + )), + mobility: MobilityTolerance::Pct((5.0, 5.0)), + quad: QuadTolerance::Absolute((0.1, 0.1)), + }; + + let mut mz_errors_ppm: Vec = Vec::with_capacity(candidates.len()); + let mut mobility_errors_pct: Vec = Vec::with_capacity(candidates.len()); + + for candidate in &candidates { + let item = &phase1_lib.as_slice()[candidate.speclib_index]; + let query_at_apex = item + .query + .clone() + .with_rt_seconds(candidate.apex_rt_seconds); + let mut agg: SpectralCollector = + SpectralCollector::new(query_at_apex); + pipeline.index.add_query(&mut agg, &query_tolerance); + + for ((_key, expected_mz), stats) in agg.iter_precursors() { + if let (Ok(obs_mz), Ok(obs_mob)) = (stats.mean_mz(), stats.mean_mobility()) { + let mz_err = (obs_mz - expected_mz) / expected_mz * 1e6; + mz_errors_ppm.push(mz_err as f32); + + let expected_mob = item.query.mobility_ook0() as f64; + let mob_err = (obs_mob - expected_mob) / expected_mob * 100.0; + mobility_errors_pct.push(mob_err as f32); + break; + } + } + } + + // === Step C: Derive tolerances from error distributions === + let rt_tolerance_minutes = { + let mut abs_residuals: Vec = points + .iter() + .map(|p| { + let predicted = cal_curve.predict(p.x).unwrap_or(p.y); + (p.y - predicted).abs() + }) + .collect(); + abs_residuals.sort_by(|a, b| a.partial_cmp(b).unwrap()); + let mad_seconds = abs_residuals + .get(abs_residuals.len() / 2) + .copied() + .unwrap_or(0.0); + info!( + "RT residuals: MAD={:.1}s, n={}", + mad_seconds, abs_residuals.len() + ); + (config.rt_sigma_factor * mad_seconds as f32 / 60.0).max(config.min_rt_tolerance_minutes) + }; + + let mz_tolerance_ppm = { + let (l, r) = asymmetric_tolerance(&mz_errors_ppm, config.mz_sigma, 0.1); + (l as f64, r as f64) + }; + + let mobility_tolerance_pct = + asymmetric_tolerance(&mobility_errors_pct, config.mobility_sigma, 0.1); + + info!( + "Calibration: RT tol={:.2} min, m/z tol=({:.1}, {:.1}) ppm, mob tol=({:.1}, {:.1}) %", + rt_tolerance_minutes, + mz_tolerance_ppm.0, + mz_tolerance_ppm.1, + mobility_tolerance_pct.0, + mobility_tolerance_pct.1, + ); + + Ok(CalibrationResult::new( + cal_curve, + rt_tolerance_minutes, + mz_tolerance_ppm, + mobility_tolerance_pct, + )) } +#[cfg_attr( + feature = "instrumentation", + tracing::instrument(skip_all, level = "trace") +)] +fn phase3_score( + speclib: &Speclib, + pipeline: &ScoringPipeline, + calibration: &CalibrationResult, + chunk_size: usize, + timings: &mut ScoreTimings, +) -> Vec { + let style = ProgressStyle::with_template( + "{spinner:.green} Phase 3 [{elapsed_precise}] [{wide_bar:.cyan/blue}] {pos}/{len} ({eta})", + ) + .unwrap(); + + let total_peptides = speclib.as_slice().len(); + let mut results = Vec::new(); + + for chunk in speclib + .as_slice() + .chunks(chunk_size) + .progress_with_style(style) + { + let (batch_results, batch_timings) = + pipeline.score_calibrated_batch(chunk, calibration); + *timings += batch_timings; + results.extend(batch_results); + } + + let skipped = total_peptides - results.len(); + if skipped > total_peptides / 20 { + warn!( + "{}/{} peptides produced no Phase 3 result (>{:.0}%). \ + If this is unexpected, check calibration quality.", + skipped, + total_peptides, + (skipped as f64 / total_peptides as f64) * 100.0 + ); + } + + results +} + +/// Derive asymmetric tolerance from error distribution. +#[cfg_attr( + feature = "instrumentation", + tracing::instrument(skip_all, level = "trace") +)] +fn asymmetric_tolerance(errors: &[f32], n_sigma: f32, min_val: f32) -> (f32, f32) { + if errors.is_empty() { + return (min_val, min_val); + } + let mean = errors.iter().sum::() / errors.len() as f32; + let variance = errors.iter().map(|e| (e - mean).powi(2)).sum::() / errors.len() as f32; + let std = variance.sqrt(); + let left = (-(mean - n_sigma * std)).max(min_val); + let right = (mean + n_sigma * std).max(min_val); + (left, right) +} + +#[cfg_attr( + feature = "instrumentation", + tracing::instrument(skip_all, level = "trace") +)] fn target_decoy_compete(mut results: Vec) -> Vec { // TODO: re-implement so we dont drop results but instead just flag them as rejected (maybe // a slice where we push rejected results to the end and keep the trailing slice as the "active") @@ -251,12 +644,12 @@ fn target_decoy_compete(mut results: Vec) -> Vec, pipeline: &ScoringPipeline, chunk_size: usize, output: &OutputConfig, decoy_strategy: DecoyStrategy, ) -> std::result::Result<(), TimsSeekError> { - // TODO: I should probably "inline" this function with the main loop info!("Building database from speclib file {:?}", path); info!("Decoy generation strategy: {}", decoy_strategy); @@ -270,7 +663,23 @@ pub fn process_speclib( elap_time, path.display() ); - let timings = main_loop(speclib, pipeline, chunk_size, output)?; + + let calib_lib = match calib_lib_path { + Some(p) => { + info!("Loading calibration library from {:?}", p); + let st = std::time::Instant::now(); + let lib = Speclib::from_file(p, decoy_strategy)?; + info!( + "Loaded calibration library of length {} in {:?}", + lib.len(), + st.elapsed() + ); + Some(lib) + } + None => None, + }; + + let timings = main_loop(speclib, calib_lib, pipeline, chunk_size, output)?; let perf_report = serde_json::to_string_pretty(&timings).map_err(|e| TimsSeekError::ParseError { msg: format!("Error serializing performance report to JSON: {}", e), From 0c9d253380c094b14d2075445e3ec05d0b80c78d Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Thu, 9 Apr 2026 19:23:10 -0700 Subject: [PATCH 03/64] refactor: rename ScoringPipeline to Scorer, inline tolerance fields Delete ToleranceHierarchy struct and inline its prescore/secondary fields directly onto Scorer as broad_tolerance and secondary_tolerance. The tertiary_tolerance() method is inlined at its single call site. Update all re-exports, consumer function signatures, and construction sites. --- rust/timsseek/src/lib.rs | 3 +- rust/timsseek/src/rt_calibration.rs | 4 +-- rust/timsseek/src/scoring/mod.rs | 3 +- rust/timsseek/src/scoring/pipeline.rs | 48 +++++++++------------------ rust/timsseek/src/traits.rs | 23 +++++-------- rust/timsseek_cli/src/main.rs | 21 +++++------- rust/timsseek_cli/src/processing.rs | 12 +++---- 7 files changed, 43 insertions(+), 71 deletions(-) diff --git a/rust/timsseek/src/lib.rs b/rust/timsseek/src/lib.rs index cf25376..4408baa 100644 --- a/rust/timsseek/src/lib.rs +++ b/rust/timsseek/src/lib.rs @@ -25,8 +25,7 @@ pub use models::{ }; pub use scoring::{ IonSearchResults, - ScoringPipeline, - ToleranceHierarchy, + Scorer, }; pub use timsquery::ion::{ IonAnnot, diff --git a/rust/timsseek/src/rt_calibration.rs b/rust/timsseek/src/rt_calibration.rs index f950f31..2a4302e 100644 --- a/rust/timsseek/src/rt_calibration.rs +++ b/rust/timsseek/src/rt_calibration.rs @@ -1,5 +1,5 @@ use crate::ScorerQueriable; -use crate::scoring::pipeline::ScoringPipeline; +use crate::scoring::pipeline::Scorer; pub use calibrt::{ CalibRtError, CalibrationCurve as RTCalibration, @@ -58,7 +58,7 @@ impl CalibrationResult { } /// Fallback when calibration fails: identity RT mapping, secondary tolerance. - pub fn fallback(pipeline: &ScoringPipeline) -> Self { + pub fn fallback(pipeline: &Scorer) -> Self { let range = pipeline.index.ms1_cycle_mapping().range_milis(); let start = range.0 as f64 / 1000.0; let end = range.1 as f64 / 1000.0; diff --git a/rust/timsseek/src/scoring/mod.rs b/rust/timsseek/src/scoring/mod.rs index c86ba95..a3afa4b 100644 --- a/rust/timsseek/src/scoring/mod.rs +++ b/rust/timsseek/src/scoring/mod.rs @@ -13,8 +13,7 @@ pub use pipeline::{ CalibrantCandidate, CalibrantHeap, CalibrationConfig, - ScoringPipeline, - ToleranceHierarchy, + Scorer, }; pub use search_results::IonSearchResults; pub use timings::{PipelineTimings, ScoreTimings}; diff --git a/rust/timsseek/src/scoring/pipeline.rs b/rust/timsseek/src/scoring/pipeline.rs index c076e01..7f39551 100644 --- a/rust/timsseek/src/scoring/pipeline.rs +++ b/rust/timsseek/src/scoring/pipeline.rs @@ -62,27 +62,6 @@ use super::timings::ScoreTimings; use crate::rt_calibration::CalibrationResult; use tracing::warn; -/// Hierarchical tolerance configuration for the scoring pipeline. -/// -/// Progressive refinement: prescore (broad RT) -> secondary (narrow RT) -> tertiary (tight mobility). -#[derive(Debug, Clone)] -pub struct ToleranceHierarchy { - /// Broad search for prescore phase (full RT range, e.g., +/- 5 min). - pub prescore: Tolerance, - - /// Refined search at detected apex (narrow RT window, e.g., +/- 30-60s). - pub secondary: Tolerance, -} - -impl ToleranceHierarchy { - /// Creates tertiary tolerance with 3% mobility constraints for isotope matching. - pub fn tertiary_tolerance(&self) -> Tolerance { - self.secondary - .clone() - .with_mobility_tolerance(MobilityTolerance::Pct((3.0, 3.0))) - } -} - /// Lightweight calibrant candidate — just enough to re-query in Phase 2. /// Implements Ord by score (ascending) for use in BinaryHeap>. #[derive(Debug, Clone)] @@ -322,12 +301,15 @@ fn compute_secondary_lazyscores( /// /// Pipeline stages: build context → find apex → refine → finalize. /// Uses progressive tolerance refinement, metadata separation, and buffer reuse for high throughput. -pub struct ScoringPipeline { +pub struct Scorer { /// Indexed peak data that implements the required query aggregators. pub index: I, - /// Hierarchical tolerance configuration for progressive refinement. - pub tolerances: ToleranceHierarchy, + /// Broad tolerance used during the prescore phase. + pub broad_tolerance: Tolerance, + + /// Refined tolerance used at detected apex for secondary queries. + pub secondary_tolerance: Tolerance, /// m/z range where peptides were fragmented. /// Queries with precursors outside this range are filtered out. @@ -339,7 +321,7 @@ enum SkippingReason { RetentionTimeOutOfBounds, } -impl ScoringPipeline { +impl Scorer { #[cfg_attr( feature = "instrumentation", tracing::instrument(skip_all, level = "trace") @@ -358,8 +340,7 @@ impl ScoringPipeline { let max_range = TupleRange::try_new(max_range.0, max_range.1) .expect("Reference RTs should be sorted and valid"); let rt_range = match self - .tolerances - .prescore + .broad_tolerance .rt_range_as_milis(item.query.rt_seconds()) { OptionallyRestricted::Unrestricted => max_range, @@ -380,7 +361,7 @@ impl ScoringPipeline { ) { Ok(collector) => collector, Err(e) => { - let tol_range = self.tolerances.prescore.rt_range_as_milis(item.query.rt_seconds()); + let tol_range = self.broad_tolerance.rt_range_as_milis(item.query.rt_seconds()); panic!( "Failed to create ChromatogramCollector for query id {:#?}: {:?} with RT tolerance {:#?}", item.query, e, tol_range, @@ -391,7 +372,7 @@ impl ScoringPipeline { tracing::span!(tracing::Level::TRACE, "build_candidate_context::add_query").in_scope( || { - self.index.add_query(&mut agg, &self.tolerances.prescore); + self.index.add_query(&mut agg, &self.broad_tolerance); }, ); @@ -479,7 +460,7 @@ impl ScoringPipeline { let new_query = item.query.clone().with_rt_seconds(new_rt_seconds); let mut agg: SpectralCollector<_, MzMobilityStatsCollector> = SpectralCollector::new(new_query); - self.index.add_query(&mut agg, &self.tolerances.secondary); + self.index.add_query(&mut agg, &self.secondary_tolerance); // Calculate weighted mean mobility from observed data let mobility = Self::get_mobility(&agg); @@ -499,7 +480,10 @@ impl ScoringPipeline { let mut agg: SpectralCollector<_, MzMobilityStatsCollector> = SpectralCollector::new(new_query); - let tol_use = self.tolerances.tertiary_tolerance(); + let tol_use = self + .secondary_tolerance + .clone() + .with_mobility_tolerance(MobilityTolerance::Pct((3.0, 3.0))); self.index.add_query(&mut agg, &tol_use); self.index.add_query(&mut isotope_agg, &tol_use); @@ -536,7 +520,7 @@ impl ScoringPipeline { } } -impl ScoringPipeline { +impl Scorer { pub fn process_query_full( &self, item: QueryItemToScore, diff --git a/rust/timsseek/src/traits.rs b/rust/timsseek/src/traits.rs index 438726d..0d7e0fb 100644 --- a/rust/timsseek/src/traits.rs +++ b/rust/timsseek/src/traits.rs @@ -8,7 +8,7 @@ use timsquery::{ SpectralCollector, }; -/// Trait for indexed data that supports the aggregators needed by [`crate::ScoringPipeline`]. +/// Trait for indexed data that supports the aggregators needed by [`crate::scoring::pipeline::Scorer`]. /// /// This trait is a convenience bound that documents exactly what query capabilities /// the scoring engine requires. It's more specific than [`timsquery::GenerallyQueriable`] @@ -25,38 +25,33 @@ use timsquery::{ /// # Why This Trait? /// /// Instead of using the general `GenerallyQueriable` trait (which includes -/// `PointIntensityAggregator` that ScoringPipeline doesn't use), this trait: +/// `PointIntensityAggregator` that Scorer doesn't use), this trait: /// -/// - Documents exactly what ScoringPipeline needs +/// - Documents exactly what Scorer needs /// - Makes function signatures more explicit /// - Allows future pipeline variants to have different requirements /// /// # Example /// /// ```ignore -/// use timsseek::{ScoringPipeline, ScorerQueriable, ToleranceHierarchy}; +/// use timsseek::{Scorer, ScorerQueriable}; /// use timscentroid::IndexedTimstofPeaks; /// use timsquery::Tolerance; -/// use std::sync::Arc; /// /// # let peaks: IndexedTimstofPeaks = unimplemented!(); -/// # let ref_rt = Arc::new(vec![0u32; 100]); -/// # let prescore_tol = Tolerance::default(); +/// # let broad_tol = Tolerance::default(); /// # let secondary_tol = Tolerance::default(); /// # let fragmented_range = (400.0, 1200.0).try_into().unwrap(); /// /// // IndexedTimstofPeaks implements ScorerQueriable -/// let pipeline = ScoringPipeline { -/// index_cycle_rt_ms: ref_rt, +/// let scorer = Scorer { /// index: peaks, -/// tolerances: ToleranceHierarchy { -/// prescore: prescore_tol, -/// secondary: secondary_tol, -/// }, +/// broad_tolerance: broad_tol, +/// secondary_tolerance: secondary_tol, /// fragmented_range, /// }; /// -/// // ScoringPipeline can now use ChromatogramCollector, SpectralCollector variants +/// // Scorer can now use ChromatogramCollector, SpectralCollector variants /// ``` /// /// # Implementation Note diff --git a/rust/timsseek_cli/src/main.rs b/rust/timsseek_cli/src/main.rs index fa5b286..b2770f3 100644 --- a/rust/timsseek_cli/src/main.rs +++ b/rust/timsseek_cli/src/main.rs @@ -8,10 +8,7 @@ use timsquery::TimsTofPath; use timsquery::models::tolerance::RtTolerance; use timsquery::serde::load_index_auto; use timsquery::utils::TupleRange; -use timsseek::scoring::{ - ScoringPipeline, - ToleranceHierarchy, -}; +use timsseek::scoring::Scorer; use tracing::{ error, info, @@ -260,16 +257,14 @@ fn process_single_file( let fragmented_range = get_frag_range(&timstofpath); - let pipeline = ScoringPipeline { + let pipeline = Scorer { index, - tolerances: ToleranceHierarchy { - prescore: config.analysis.tolerance.clone(), - secondary: config - .analysis - .tolerance - .clone() - .with_rt_tolerance(RtTolerance::Minutes((0.2, 0.2))), - }, + broad_tolerance: config.analysis.tolerance.clone(), + secondary_tolerance: config + .analysis + .tolerance + .clone() + .with_rt_tolerance(RtTolerance::Minutes((0.2, 0.2))), fragmented_range, }; diff --git a/rust/timsseek_cli/src/processing.rs b/rust/timsseek_cli/src/processing.rs index d4eac76..09487f7 100644 --- a/rust/timsseek_cli/src/processing.rs +++ b/rust/timsseek_cli/src/processing.rs @@ -32,7 +32,7 @@ use timsseek::scoring::{ PipelineTimings, ScoreTimings, }; -use timsseek::scoring::pipeline::ScoringPipeline; +use timsseek::scoring::pipeline::Scorer; use timsseek::scoring::search_results::{ IonSearchResults, ResultParquetWriter, @@ -111,7 +111,7 @@ fn check_rt_scale_compatibility(main_lib: &Speclib, calib_lib: &Speclib) { pub fn main_loop( speclib: Speclib, calib_lib: Option, - pipeline: &ScoringPipeline, + pipeline: &Scorer, chunk_size: usize, out_path: &OutputConfig, ) -> std::result::Result { @@ -252,7 +252,7 @@ pub fn main_loop( )] fn phase1_prescore( speclib: &Speclib, - pipeline: &ScoringPipeline, + pipeline: &Scorer, chunk_size: usize, config: &CalibrationConfig, ) -> Vec { @@ -303,7 +303,7 @@ fn calibrate_from_phase1( candidates: Vec, phase1_lib: &Speclib, main_lookup: Option<&std::collections::HashMap<(i64, u8), Vec<(f32, Vec)>>>, - pipeline: &ScoringPipeline, + pipeline: &Scorer, config: &CalibrationConfig, ) -> Result { // === Step A: Fit iRT -> RT curve === @@ -467,7 +467,7 @@ fn calibrate_from_phase1( )] fn phase3_score( speclib: &Speclib, - pipeline: &ScoringPipeline, + pipeline: &Scorer, calibration: &CalibrationResult, chunk_size: usize, timings: &mut ScoreTimings, @@ -645,7 +645,7 @@ fn target_decoy_compete(mut results: Vec) -> Vec, - pipeline: &ScoringPipeline, + pipeline: &Scorer, chunk_size: usize, output: &OutputConfig, decoy_strategy: DecoyStrategy, From 6eb710a40bc852a8b54b29dc9de3a5674344da70 Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Thu, 9 Apr 2026 19:25:20 -0700 Subject: [PATCH 04/64] feat: add ScoringFields, ScoredCandidate, CompetedCandidate, FinalResult types --- rust/timsseek/src/scoring/mod.rs | 2 + rust/timsseek/src/scoring/results.rs | 117 +++++++++++++++++++++++++++ 2 files changed, 119 insertions(+) create mode 100644 rust/timsseek/src/scoring/results.rs diff --git a/rust/timsseek/src/scoring/mod.rs b/rust/timsseek/src/scoring/mod.rs index a3afa4b..15d2d62 100644 --- a/rust/timsseek/src/scoring/mod.rs +++ b/rust/timsseek/src/scoring/mod.rs @@ -3,6 +3,7 @@ pub mod apex_finding; pub mod full_results; mod offsets; pub mod pipeline; +pub mod results; mod scores; pub mod search_results; pub mod timings; @@ -15,6 +16,7 @@ pub use pipeline::{ CalibrationConfig, Scorer, }; +pub use results::{ScoredCandidate, CompetedCandidate, FinalResult, ScoringFields}; pub use search_results::IonSearchResults; pub use timings::{PipelineTimings, ScoreTimings}; diff --git a/rust/timsseek/src/scoring/results.rs b/rust/timsseek/src/scoring/results.rs new file mode 100644 index 0000000..d76f5f2 --- /dev/null +++ b/rust/timsseek/src/scoring/results.rs @@ -0,0 +1,117 @@ +use serde::Serialize; + +pub const NUM_MS2_IONS: usize = 7; +pub const NUM_MS1_IONS: usize = 3; + +/// Shared scoring fields produced by Phase 3. Every field is guaranteed populated. +#[derive(Debug, Clone, Serialize)] +pub struct ScoringFields { + // Identity + pub sequence: String, + pub library_id: u32, + pub decoy_group_id: u32, + pub precursor_mz: f64, + pub precursor_charge: u8, + pub precursor_mobility: f32, + pub is_target: bool, + + // RT + pub query_rt_seconds: f32, + pub obs_rt_seconds: f32, + pub delta_rt: f32, + pub sq_delta_rt: f32, + pub calibrated_sq_delta_rt: f32, + pub recalibrated_rt: f32, + + // Mobility + pub obs_mobility: f32, + pub delta_ms1_ms2_mobility: f32, + pub sq_delta_ms1_ms2_mobility: f32, + + // Primary scores + pub main_score: f32, + pub delta_next: f32, + pub delta_second_next: f32, + + // Lazyscores + pub apex_lazyscore: f32, + pub ms2_lazyscore: f32, + pub ms2_isotope_lazyscore: f32, + pub ms2_isotope_lazyscore_ratio: f32, + pub lazyscore_z: f32, + pub lazyscore_vs_baseline: f32, + + // Split product (9 components) + pub split_product_score: f32, + pub cosine_au: f32, + pub scribe_au: f32, + pub cosine_cg: f32, + pub scribe_cg: f32, + pub cosine_weighted_coelution: f32, + pub cosine_gradient_consistency: f32, + pub scribe_weighted_coelution: f32, + pub scribe_gradient_consistency: f32, + + // 11 apex features + pub peak_shape: f32, + pub ratio_cv: f32, + pub centered_apex: f32, + pub precursor_coelution: f32, + pub fragment_coverage: f32, + pub precursor_apex_match: f32, + pub xic_quality: f32, + pub fragment_apex_agreement: f32, + pub isotope_correlation: f32, + pub gaussian_correlation: f32, + pub per_frag_gaussian_corr: f32, + + // Peak shape + pub rising_cycles: u8, + pub falling_cycles: u8, + + // Counts + pub npeaks: u8, + pub n_scored_fragments: u8, + + // Intensities + pub ms2_summed_intensity: f32, + pub ms1_summed_intensity: f32, + + // Per-ion errors (real arrays, not numbered fields) + pub ms2_mz_errors: [f32; NUM_MS2_IONS], + pub ms2_mobility_errors: [f32; NUM_MS2_IONS], + pub ms1_mz_errors: [f32; NUM_MS1_IONS], + pub ms1_mobility_errors: [f32; NUM_MS1_IONS], + + // Relative intensities + pub ms2_intensity_ratios: [f32; NUM_MS2_IONS], + pub ms1_intensity_ratios: [f32; NUM_MS1_IONS], +} + +/// Phase 3 output. All scoring fields guaranteed populated. +#[derive(Debug, Clone, Serialize)] +pub struct ScoredCandidate { + pub scoring: ScoringFields, +} + +/// After target-decoy competition. +#[derive(Debug, Clone, Serialize)] +pub struct CompetedCandidate { + pub scoring: ScoringFields, + pub delta_group: f32, + pub delta_group_ratio: f32, + /// Scratch field for CrossValidatedScorer (written during rescore) + pub(crate) discriminant_score: f32, + /// Scratch field for q-value assignment + pub(crate) qvalue: f32, +} + +/// After rescoring. Written to Parquet. +#[derive(Debug, Clone, Serialize)] +pub struct FinalResult { + pub scoring: ScoringFields, + pub delta_group: f32, + pub delta_group_ratio: f32, + pub discriminant_score: f32, + pub qvalue: f32, +} From 30c4fa9ec4b5bcf83f9fe3c1a74c3280a598d2e1 Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Thu, 9 Apr 2026 19:29:14 -0700 Subject: [PATCH 05/64] feat: add ScoredCandidateBuilder, update finalize_results MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Ports SearchResultBuilder → ScoredCandidateBuilder in results.rs using the new ScoringFields field names and [f32; N] arrays for per-ion data. Updates finalize_results in pipeline.rs to use the new builder and return Result. Callers (Task 5) still expect IonSearchResults; those mismatches are intentional intermediate state. --- rust/timsseek/src/scoring/pipeline.rs | 13 +- rust/timsseek/src/scoring/results.rs | 344 ++++++++++++++++++++++++++ 2 files changed, 350 insertions(+), 7 deletions(-) diff --git a/rust/timsseek/src/scoring/pipeline.rs b/rust/timsseek/src/scoring/pipeline.rs index 7f39551..18ef9a4 100644 --- a/rust/timsseek/src/scoring/pipeline.rs +++ b/rust/timsseek/src/scoring/pipeline.rs @@ -54,10 +54,11 @@ use super::apex_finding::{ use super::full_results::FullQueryResult; use super::hyperscore::single_lazyscore; use super::offsets::MzMobilityOffsets; -use super::search_results::{ - IonSearchResults, - SearchResultBuilder, +use super::results::{ + ScoredCandidate, + ScoredCandidateBuilder, }; +use super::search_results::IonSearchResults; use super::timings::ScoreTimings; use crate::rt_calibration::CalibrationResult; use tracing::warn; @@ -502,14 +503,12 @@ impl Scorer { main_score: &ApexScore, inner_collector: &SpectralCollector, isotope_collector: &SpectralCollector, - ) -> Result { + ) -> Result { let offsets = MzMobilityOffsets::new(inner_collector, metadata.ref_mobility_ook0 as f64); let rel_inten = RelativeIntensities::new(inner_collector); let lazyscores = compute_secondary_lazyscores(inner_collector, isotope_collector); - let builder = SearchResultBuilder::default(); - - builder + ScoredCandidateBuilder::default() .with_metadata(metadata) .with_nqueries(nqueries) .with_sorted_offsets(&offsets) diff --git a/rust/timsseek/src/scoring/results.rs b/rust/timsseek/src/scoring/results.rs index d76f5f2..921a662 100644 --- a/rust/timsseek/src/scoring/results.rs +++ b/rust/timsseek/src/scoring/results.rs @@ -1,5 +1,14 @@ use serde::Serialize; +use super::apex_finding::{ + ApexScore, + PeptideMetadata, + RelativeIntensities, +}; +use super::offsets::MzMobilityOffsets; +use super::pipeline::SecondaryLazyScores; +use crate::errors::DataProcessingError; + pub const NUM_MS2_IONS: usize = 7; pub const NUM_MS1_IONS: usize = 3; @@ -115,3 +124,338 @@ pub struct FinalResult { pub discriminant_score: f32, pub qvalue: f32, } + +// --------------------------------------------------------------------------- +// Builder +// --------------------------------------------------------------------------- + +/// Tracks whether a field has been set or is still unset. +/// +/// Mirrors the `SetField` in `search_results.rs` but lives here so +/// `ScoredCandidateBuilder` is self-contained. +#[derive(Debug, Clone, Copy, Default)] +pub enum SetField { + Some(T), + #[default] + None, +} + +impl SetField { + pub fn is_some(&self) -> bool { + matches!(self, Self::Some(_)) + } + + pub fn expect_some(self, field_name: &'static str) -> Result { + match self { + Self::Some(v) => Ok(v), + Self::None => Err(DataProcessingError::ExpectedSetField { + field: field_name, + context: "".into(), + }), + } + } +} + +/// Builder that collects all inputs for a `ScoredCandidate` and validates +/// completeness in `finalize()`. +#[derive(Debug, Default)] +pub struct ScoredCandidateBuilder { + // --- Identity --- + sequence: SetField, + library_id: SetField, + decoy_group_id: SetField, + precursor_mz: SetField, + precursor_charge: SetField, + precursor_mobility: SetField, + is_target: SetField, + + // --- Reference RT / mobility (used to compute deltas) --- + ref_rt_seconds: SetField, + + // --- Observed RT / mobility --- + obs_rt_seconds: SetField, + obs_mobility: SetField, + delta_ms1_ms2_mobility: SetField, + + // --- Primary scores --- + main_score: SetField, + delta_next: SetField, + delta_second_next: SetField, + + // --- Lazyscores --- + apex_lazyscore: SetField, + ms2_lazyscore: SetField, + ms2_isotope_lazyscore: SetField, + ms2_isotope_lazyscore_ratio: SetField, + lazyscore_z: SetField, + lazyscore_vs_baseline: SetField, + + // --- Split product --- + split_product_score: SetField, + cosine_au: SetField, + scribe_au: SetField, + cosine_cg: SetField, + scribe_cg: SetField, + cosine_weighted_coelution: SetField, + cosine_gradient_consistency: SetField, + scribe_weighted_coelution: SetField, + scribe_gradient_consistency: SetField, + + // --- 11 apex features --- + peak_shape: SetField, + ratio_cv: SetField, + centered_apex: SetField, + precursor_coelution: SetField, + fragment_coverage: SetField, + precursor_apex_match: SetField, + xic_quality: SetField, + fragment_apex_agreement: SetField, + isotope_correlation: SetField, + gaussian_correlation: SetField, + per_frag_gaussian_corr: SetField, + + // --- Peak shape --- + rising_cycles: SetField, + falling_cycles: SetField, + + // --- Counts --- + npeaks: SetField, + n_scored_fragments: SetField, + + // --- Intensities --- + ms2_summed_intensity: SetField, + ms1_summed_intensity: SetField, + + // --- Per-ion errors --- + ms2_mz_errors: SetField<[f32; NUM_MS2_IONS]>, + ms2_mobility_errors: SetField<[f32; NUM_MS2_IONS]>, + ms1_mz_errors: SetField<[f32; NUM_MS1_IONS]>, + ms1_mobility_errors: SetField<[f32; NUM_MS1_IONS]>, + + // --- Relative intensities --- + relative_intensities: SetField, +} + +impl ScoredCandidateBuilder { + /// Populate identity fields and reference values from peptide metadata. + pub fn with_metadata(mut self, metadata: &PeptideMetadata) -> Self { + self.library_id = SetField::Some(metadata.library_id); + self.sequence = SetField::Some(String::from(metadata.digest.clone())); + self.is_target = SetField::Some(metadata.digest.decoy.is_target()); + self.decoy_group_id = SetField::Some(metadata.digest.decoy_group); + self.precursor_charge = SetField::Some(metadata.charge); + self.precursor_mz = SetField::Some(metadata.ref_precursor_mz); + self.precursor_mobility = SetField::Some(metadata.ref_mobility_ook0); + self.ref_rt_seconds = SetField::Some(metadata.ref_rt_seconds); + self + } + + /// Set the number of scored fragments (ions used during scoring). + pub fn with_nqueries(mut self, nqueries: u8) -> Self { + self.n_scored_fragments = SetField::Some(nqueries); + self + } + + /// Populate per-ion m/z and mobility error arrays plus observed mobility. + pub fn with_sorted_offsets(mut self, offsets: &MzMobilityOffsets) -> Self { + self.ms1_mz_errors = SetField::Some(offsets.ms1_mz_errors()); + self.ms1_mobility_errors = SetField::Some(offsets.ms1_mobility_errors()); + self.ms2_mz_errors = SetField::Some(offsets.ms2_mz_errors()); + self.ms2_mobility_errors = SetField::Some(offsets.ms2_mobility_errors()); + + let mob_errors = offsets.avg_delta_mobs(); + let cum_err = mob_errors.0 + mob_errors.1; + let obs_mob = + (offsets.ref_mobility + cum_err.mean_mobility().unwrap_or(f64::NAN)) as f32; + let d_err = match (mob_errors.0.mean_mobility(), mob_errors.1.mean_mobility()) { + (Ok(mz), Ok(mob)) => mz - mob, + _ => f64::NAN, + }; + self.delta_ms1_ms2_mobility = SetField::Some(d_err as f32); + self.obs_mobility = SetField::Some(obs_mob); + self + } + + /// Populate secondary lazyscore fields from the isotope/inner collectors. + pub fn with_secondary_lazyscores(mut self, lazyscores: SecondaryLazyScores) -> Self { + self.ms2_lazyscore = SetField::Some(lazyscores.lazyscore); + self.ms2_isotope_lazyscore = SetField::Some(lazyscores.iso_lazyscore); + self.ms2_isotope_lazyscore_ratio = SetField::Some(lazyscores.ratio); + self + } + + /// Populate relative intensity arrays from the inner collector. + pub fn with_relative_intensities(mut self, relative_intensities: RelativeIntensities) -> Self { + self.relative_intensities = SetField::Some(relative_intensities); + self + } + + /// Populate all fields derived from the full apex score. + pub fn with_apex_score(mut self, main_score: &ApexScore) -> Self { + let ApexScore { + score, + retention_time_ms, + joint_apex_cycle: _, + split_product, + features, + delta_next, + delta_second_next, + lazyscore, + lazyscore_vs_baseline, + lazyscore_z, + npeaks, + ms2_summed_intensity, + ms1_summed_intensity, + raising_cycles, + falling_cycles, + } = *main_score; + + self.main_score = SetField::Some(score); + self.delta_next = SetField::Some(delta_next); + self.delta_second_next = SetField::Some(delta_second_next); + self.obs_rt_seconds = SetField::Some(retention_time_ms as f32 / 1000.0); + + self.split_product_score = SetField::Some(split_product.base_score); + self.cosine_au = SetField::Some(split_product.cosine_au); + self.scribe_au = SetField::Some(split_product.scribe_au); + self.cosine_cg = SetField::Some(split_product.cosine_cg); + self.scribe_cg = SetField::Some(split_product.scribe_cg); + self.cosine_weighted_coelution = + SetField::Some(split_product.cosine_weighted_coelution); + self.cosine_gradient_consistency = + SetField::Some(split_product.cosine_gradient_consistency); + self.scribe_weighted_coelution = + SetField::Some(split_product.scribe_weighted_coelution); + self.scribe_gradient_consistency = + SetField::Some(split_product.scribe_gradient_consistency); + + self.peak_shape = SetField::Some(features.peak_shape); + self.ratio_cv = SetField::Some(features.ratio_cv); + self.centered_apex = SetField::Some(features.centered_apex); + self.precursor_coelution = SetField::Some(features.precursor_coelution); + self.fragment_coverage = SetField::Some(features.fragment_coverage); + self.precursor_apex_match = SetField::Some(features.precursor_apex_match); + self.xic_quality = SetField::Some(features.xic_quality); + self.fragment_apex_agreement = SetField::Some(features.fragment_apex_agreement); + self.isotope_correlation = SetField::Some(features.isotope_correlation); + self.gaussian_correlation = SetField::Some(features.gaussian_correlation); + self.per_frag_gaussian_corr = SetField::Some(features.per_frag_gaussian_corr); + + self.apex_lazyscore = SetField::Some(lazyscore); + self.lazyscore_z = SetField::Some(lazyscore_z); + self.lazyscore_vs_baseline = SetField::Some(lazyscore_vs_baseline); + self.npeaks = SetField::Some(npeaks); + self.ms1_summed_intensity = SetField::Some(ms1_summed_intensity); + self.ms2_summed_intensity = SetField::Some(ms2_summed_intensity); + self.rising_cycles = SetField::Some(raising_cycles); + self.falling_cycles = SetField::Some(falling_cycles); + + self + } + + /// Validate completeness and construct a `ScoredCandidate`. + pub fn finalize(self) -> Result { + macro_rules! expect_some { + ($field:ident) => { + self.$field.expect_some(stringify!($field))? + }; + } + + let obs_rt_seconds = expect_some!(obs_rt_seconds); + let ref_rt = expect_some!(ref_rt_seconds); + let delta_rt = obs_rt_seconds - ref_rt; + let sq_delta_rt = delta_rt * delta_rt; + + let delta_ms1_ms2_mobility = expect_some!(delta_ms1_ms2_mobility); + let sq_delta_ms1_ms2_mobility = delta_ms1_ms2_mobility * delta_ms1_ms2_mobility; + + let relints = expect_some!(relative_intensities); + let ms1_intensity_ratios = relints.ms1.get_values(); + let ms2_intensity_ratios = relints.ms2.get_values(); + + let scoring = ScoringFields { + // Identity + sequence: expect_some!(sequence), + library_id: expect_some!(library_id), + decoy_group_id: expect_some!(decoy_group_id), + precursor_mz: expect_some!(precursor_mz), + precursor_charge: expect_some!(precursor_charge), + precursor_mobility: expect_some!(precursor_mobility), + is_target: expect_some!(is_target), + + // RT + query_rt_seconds: ref_rt, + obs_rt_seconds, + delta_rt, + sq_delta_rt, + calibrated_sq_delta_rt: sq_delta_rt, + recalibrated_rt: ref_rt, + + // Mobility + obs_mobility: expect_some!(obs_mobility), + delta_ms1_ms2_mobility, + sq_delta_ms1_ms2_mobility, + + // Primary scores + main_score: expect_some!(main_score), + delta_next: expect_some!(delta_next), + delta_second_next: expect_some!(delta_second_next), + + // Lazyscores + apex_lazyscore: expect_some!(apex_lazyscore), + ms2_lazyscore: expect_some!(ms2_lazyscore), + ms2_isotope_lazyscore: expect_some!(ms2_isotope_lazyscore), + ms2_isotope_lazyscore_ratio: expect_some!(ms2_isotope_lazyscore_ratio), + lazyscore_z: expect_some!(lazyscore_z), + lazyscore_vs_baseline: expect_some!(lazyscore_vs_baseline), + + // Split product + split_product_score: expect_some!(split_product_score), + cosine_au: expect_some!(cosine_au), + scribe_au: expect_some!(scribe_au), + cosine_cg: expect_some!(cosine_cg), + scribe_cg: expect_some!(scribe_cg), + cosine_weighted_coelution: expect_some!(cosine_weighted_coelution), + cosine_gradient_consistency: expect_some!(cosine_gradient_consistency), + scribe_weighted_coelution: expect_some!(scribe_weighted_coelution), + scribe_gradient_consistency: expect_some!(scribe_gradient_consistency), + + // 11 apex features + peak_shape: expect_some!(peak_shape), + ratio_cv: expect_some!(ratio_cv), + centered_apex: expect_some!(centered_apex), + precursor_coelution: expect_some!(precursor_coelution), + fragment_coverage: expect_some!(fragment_coverage), + precursor_apex_match: expect_some!(precursor_apex_match), + xic_quality: expect_some!(xic_quality), + fragment_apex_agreement: expect_some!(fragment_apex_agreement), + isotope_correlation: expect_some!(isotope_correlation), + gaussian_correlation: expect_some!(gaussian_correlation), + per_frag_gaussian_corr: expect_some!(per_frag_gaussian_corr), + + // Peak shape + rising_cycles: expect_some!(rising_cycles), + falling_cycles: expect_some!(falling_cycles), + + // Counts + npeaks: expect_some!(npeaks), + n_scored_fragments: expect_some!(n_scored_fragments), + + // Intensities + ms2_summed_intensity: expect_some!(ms2_summed_intensity), + ms1_summed_intensity: expect_some!(ms1_summed_intensity), + + // Per-ion errors + ms2_mz_errors: expect_some!(ms2_mz_errors), + ms2_mobility_errors: expect_some!(ms2_mobility_errors), + ms1_mz_errors: expect_some!(ms1_mz_errors), + ms1_mobility_errors: expect_some!(ms1_mobility_errors), + + // Relative intensities + ms2_intensity_ratios, + ms1_intensity_ratios, + }; + + Ok(ScoredCandidate { scoring }) + } +} From 5a5e10b29a18da6dc2910e158666eeb716999607 Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Thu, 9 Apr 2026 19:33:16 -0700 Subject: [PATCH 06/64] refactor: migrate accumulator and calibrated scoring to ScoredCandidate Replace all IonSearchResults references with ScoredCandidate in the accumulator, score_calibrated_extraction, score_calibrated_batch, and FullQueryResult. The pipeline now produces ScoredCandidate throughout; CLI callers in processing.rs will be updated in Task 6. --- rust/timsseek/src/scoring/accumulator.rs | 14 +++++++------- rust/timsseek/src/scoring/full_results.rs | 4 ++-- rust/timsseek/src/scoring/pipeline.rs | 5 ++--- 3 files changed, 11 insertions(+), 12 deletions(-) diff --git a/rust/timsseek/src/scoring/accumulator.rs b/rust/timsseek/src/scoring/accumulator.rs index 2dfe43a..ea1f1fa 100644 --- a/rust/timsseek/src/scoring/accumulator.rs +++ b/rust/timsseek/src/scoring/accumulator.rs @@ -3,7 +3,7 @@ //! This module provides efficient collection of scoring results from parallel iterators. //! It aggregates both successful search results and timing measurements across threads. -use super::search_results::IonSearchResults; +use super::results::ScoredCandidate; use super::timings::ScoreTimings; use rayon::iter::{ FromParallelIterator, @@ -26,7 +26,7 @@ use rayon::iter::{ /// This minimizes contention and allows efficient parallel collection of results. #[derive(Default)] pub(super) struct IonSearchAccumulator { - pub(super) res: Vec, + pub(super) res: Vec, pub(super) timings: ScoreTimings, } @@ -44,7 +44,7 @@ impl IonSearchAccumulator { /// /// Used in the fold phase of parallel collection. Successful results are collected, /// while `None` results (failed scoring) are discarded. - pub(super) fn fold(mut self, item: (Option, ScoreTimings)) -> Self { + pub(super) fn fold(mut self, item: (Option, ScoreTimings)) -> Self { if let Some(elem) = item.0 { self.res.push(elem); } @@ -53,20 +53,20 @@ impl IonSearchAccumulator { } } -impl FromIterator<(Option, ScoreTimings)> for IonSearchAccumulator { +impl FromIterator<(Option, ScoreTimings)> for IonSearchAccumulator { fn from_iter(iter: I) -> Self where - I: IntoIterator, ScoreTimings)>, + I: IntoIterator, ScoreTimings)>, { iter.into_iter() .fold(IonSearchAccumulator::default(), IonSearchAccumulator::fold) } } -impl FromParallelIterator<(Option, ScoreTimings)> for IonSearchAccumulator { +impl FromParallelIterator<(Option, ScoreTimings)> for IonSearchAccumulator { fn from_par_iter(par_iter: I) -> Self where - I: IntoParallelIterator, ScoreTimings)>, + I: IntoParallelIterator, ScoreTimings)>, { par_iter .into_par_iter() diff --git a/rust/timsseek/src/scoring/full_results.rs b/rust/timsseek/src/scoring/full_results.rs index 583b78a..1b3974b 100644 --- a/rust/timsseek/src/scoring/full_results.rs +++ b/rust/timsseek/src/scoring/full_results.rs @@ -1,6 +1,6 @@ use crate::IonAnnot; use crate::scoring::apex_finding::ScoreTraces; -use crate::scoring::search_results::IonSearchResults; +use crate::scoring::results::ScoredCandidate; use serde::Serialize; use timsquery::models::aggregators::ChromatogramCollector; @@ -9,5 +9,5 @@ pub struct FullQueryResult { pub main_score_elements: ScoreTraces, pub longitudinal_main_score: Vec, pub extractions: ChromatogramCollector, - pub search_results: IonSearchResults, + pub search_results: ScoredCandidate, } diff --git a/rust/timsseek/src/scoring/pipeline.rs b/rust/timsseek/src/scoring/pipeline.rs index 18ef9a4..bdc217d 100644 --- a/rust/timsseek/src/scoring/pipeline.rs +++ b/rust/timsseek/src/scoring/pipeline.rs @@ -58,7 +58,6 @@ use super::results::{ ScoredCandidate, ScoredCandidateBuilder, }; -use super::search_results::IonSearchResults; use super::timings::ScoreTimings; use crate::rt_calibration::CalibrationResult; use tracing::warn; @@ -639,7 +638,7 @@ impl Scorer { calibration: &CalibrationResult, buffer: &mut ApexFinder, timings: &mut ScoreTimings, - ) -> Option { + ) -> Option { let st = Instant::now(); let (metadata, scoring_ctx) = tracing::span!(tracing::Level::TRACE, "score_calibrated::extraction").in_scope( @@ -708,7 +707,7 @@ impl Scorer { &self, items_to_score: &[QueryItemToScore], calibration: &CalibrationResult, - ) -> (Vec, ScoreTimings) { + ) -> (Vec, ScoreTimings) { let init_fn = || ApexFinder::new(self.num_cycles()); let filter_fn = |x: &&QueryItemToScore| { let tmp = x.query.get_precursor_mz_limits(); From c4aaea79bd1f56a8ec8557ebb9d906196152b51d Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Thu, 9 Apr 2026 19:39:21 -0700 Subject: [PATCH 07/64] =?UTF-8?q?refactor:=20typed=20pipeline=20stages=20(?= =?UTF-8?q?ScoredCandidate=20=E2=86=92=20CompetedCandidate=20=E2=86=92=20F?= =?UTF-8?q?inalResult)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- rust/timsseek/src/ml/qvalues.rs | 161 +++++++++++++++++++++++++-- rust/timsseek/src/scoring/results.rs | 33 ++++++ rust/timsseek_cli/src/processing.rs | 132 +++++++++++----------- 3 files changed, 251 insertions(+), 75 deletions(-) diff --git a/rust/timsseek/src/ml/qvalues.rs b/rust/timsseek/src/ml/qvalues.rs index be8ba8a..2298b6c 100644 --- a/rust/timsseek/src/ml/qvalues.rs +++ b/rust/timsseek/src/ml/qvalues.rs @@ -82,28 +82,28 @@ pub fn report_qvalues_at_thresholds( feature = "instrumentation", tracing::instrument(skip_all, level = "trace") )] -pub fn rescore( - mut data: Vec, -) -> Vec { +pub fn rescore(mut data: Vec) -> Vec { let config = GBMConfig::default(); data.shuffle(&mut rand::rng()); - let mut scorer = CrossValidatedScorer::new_from_shuffled(3, data, config); + let mut scorer = CrossValidatedScorer::::new_from_shuffled(3, data, config); scorer .fit(&mut DataBuffer::default(), &mut DataBuffer::default()) .unwrap(); - let mut out = scorer.score(); + let mut scored = scorer.score(); // Sort by score descending - out.par_sort_unstable_by(|a, b| b.get_score().total_cmp(&a.get_score())); - assign_qval(&mut out, |x| T::get_score(x) as f32); - debug!("Best:\n{:#?}", out.first()); - debug!("Worst:\n{:#?}", out.last()); - out + scored.par_sort_unstable_by(|a, b| b.get_score().total_cmp(&a.get_score())); + assign_qval(&mut scored, |x| CompetedCandidate::get_score(x) as f32); + debug!("Best:\n{:#?}", scored.first()); + debug!("Worst:\n{:#?}", scored.last()); + + scored.into_iter().map(|c| c.into_final()).collect() } use crate::IonSearchResults; +use crate::scoring::results::{CompetedCandidate, FinalResult}; fn mean_abs_error(errs: &[f32]) -> f64 { let (sum, n) = errs.iter().filter(|e| e.is_finite() && **e != 0.0) @@ -356,6 +356,147 @@ impl LabelledScore for IonSearchResults { } } +// --------------------------------------------------------------------------- +// CompetedCandidate: FeatureLike + LabelledScore +// --------------------------------------------------------------------------- + +impl FeatureLike for CompetedCandidate { + fn as_feature(&self) -> impl IntoIterator + '_ { + let s = &self.scoring; + + vec![ + (s.precursor_mz / 5.0).round(), + s.precursor_charge as f64, + s.precursor_mobility as f64, + s.query_rt_seconds.round() as f64, + s.n_scored_fragments as f64, + // Combined + s.main_score as f64, + (s.main_score / s.delta_next) as f64, + s.delta_next as f64, + s.delta_second_next as f64, + s.obs_rt_seconds as f64, + s.obs_mobility as f64, + s.delta_rt as f64, + s.sq_delta_rt as f64, + s.delta_ms1_ms2_mobility as f64, + s.sq_delta_ms1_ms2_mobility as f64, + s.rising_cycles as f64, + s.falling_cycles as f64, + // MS2 + s.npeaks as f64, + s.apex_lazyscore as f64, + (s.ms2_summed_intensity as f64).ln_1p(), + s.ms2_lazyscore as f64, + s.ms2_isotope_lazyscore as f64, + s.ms2_isotope_lazyscore_ratio as f64, + s.lazyscore_z as f64, + s.lazyscore_vs_baseline as f64, + // Split product & apex features + (s.split_product_score as f64).ln_1p(), + (s.cosine_au as f64).ln_1p(), + (s.scribe_au as f64).ln_1p(), + s.cosine_cg as f64, + s.scribe_cg as f64, + s.cosine_weighted_coelution as f64, + s.cosine_gradient_consistency as f64, + s.scribe_weighted_coelution as f64, + s.scribe_gradient_consistency as f64, + s.peak_shape as f64, + s.ratio_cv as f64, + s.centered_apex as f64, + s.precursor_coelution as f64, + s.fragment_coverage as f64, + s.precursor_apex_match as f64, + s.xic_quality as f64, + s.fragment_apex_agreement as f64, + s.isotope_correlation as f64, + s.gaussian_correlation as f64, + s.per_frag_gaussian_corr as f64, + // MS2 per-ion errors + s.ms2_mz_errors[0] as f64, + s.ms2_mz_errors[1] as f64, + s.ms2_mz_errors[2] as f64, + s.ms2_mz_errors[3] as f64, + s.ms2_mz_errors[4] as f64, + s.ms2_mz_errors[5] as f64, + s.ms2_mz_errors[6] as f64, + s.ms2_mobility_errors[0] as f64, + s.ms2_mobility_errors[1] as f64, + s.ms2_mobility_errors[2] as f64, + s.ms2_mobility_errors[3] as f64, + s.ms2_mobility_errors[4] as f64, + s.ms2_mobility_errors[5] as f64, + s.ms2_mobility_errors[6] as f64, + // MS1 + (s.ms1_summed_intensity as f64).ln_1p(), + // MS1 per-ion errors + s.ms1_mz_errors[0] as f64, + s.ms1_mz_errors[1] as f64, + s.ms1_mz_errors[2] as f64, + s.ms1_mobility_errors[0] as f64, + s.ms1_mobility_errors[1] as f64, + s.ms1_mobility_errors[2] as f64, + // Relative intensities + s.ms1_intensity_ratios[0] as f64, + s.ms1_intensity_ratios[1] as f64, + s.ms1_intensity_ratios[2] as f64, + s.ms2_intensity_ratios[0] as f64, + s.ms2_intensity_ratios[1] as f64, + s.ms2_intensity_ratios[2] as f64, + s.ms2_intensity_ratios[3] as f64, + s.ms2_intensity_ratios[4] as f64, + s.ms2_intensity_ratios[5] as f64, + s.ms2_intensity_ratios[6] as f64, + self.delta_group as f64, + self.delta_group_ratio as f64, + s.recalibrated_rt as f64, + s.calibrated_sq_delta_rt as f64, + // Derived intensity features + { + let ratios = &s.ms2_intensity_ratios; + ratios.iter().filter(|r| r.is_finite()).fold(f32::NEG_INFINITY, |a, &b| a.max(b)) as f64 + }, + // Interaction features + (s.main_score * s.delta_next) as f64, + (s.split_product_score * s.fragment_coverage) as f64, + // Summary error features + mean_abs_error(&s.ms2_mz_errors), + mean_abs_error(&s.ms2_mobility_errors), + mean_abs_error(&s.ms1_mz_errors), + mean_abs_error(&s.ms1_mobility_errors), + ] + } + + fn get_y(&self) -> f64 { + if self.scoring.is_target { 1.0 } else { 0.0 } + } + + fn assign_score(&mut self, score: f64) { + self.discriminant_score = score as f32; + } + + fn get_score(&self) -> f64 { + self.discriminant_score as f64 + } +} + +impl LabelledScore for CompetedCandidate { + fn get_label(&self) -> TargetDecoy { + if self.scoring.is_target { TargetDecoy::Target } else { TargetDecoy::Decoy } + } + fn assign_qval(&mut self, q: f32) { self.qvalue = q; } + fn get_qval(&self) -> f32 { self.qvalue } +} + +impl LabelledScore for FinalResult { + fn get_label(&self) -> TargetDecoy { + if self.scoring.is_target { TargetDecoy::Target } else { TargetDecoy::Decoy } + } + fn assign_qval(&mut self, q: f32) { self.qvalue = q; } + fn get_qval(&self) -> f32 { self.qvalue } +} + #[cfg(test)] mod tests { use super::*; diff --git a/rust/timsseek/src/scoring/results.rs b/rust/timsseek/src/scoring/results.rs index 921a662..215e97f 100644 --- a/rust/timsseek/src/scoring/results.rs +++ b/rust/timsseek/src/scoring/results.rs @@ -125,6 +125,39 @@ pub struct FinalResult { pub qvalue: f32, } +// --------------------------------------------------------------------------- +// Stage conversions +// --------------------------------------------------------------------------- + +impl ScoredCandidate { + /// Convert into a `CompetedCandidate` with the given delta-group values. + /// + /// Items that are alone in their group (no competitor) should pass + /// `f32::NAN` for both deltas. + pub fn into_competed(self, delta_group: f32, delta_group_ratio: f32) -> CompetedCandidate { + CompetedCandidate { + scoring: self.scoring, + delta_group, + delta_group_ratio, + discriminant_score: f32::NAN, + qvalue: f32::NAN, + } + } +} + +impl CompetedCandidate { + /// Promote to a `FinalResult` (all fields frozen). + pub fn into_final(self) -> FinalResult { + FinalResult { + scoring: self.scoring, + delta_group: self.delta_group, + delta_group_ratio: self.delta_group_ratio, + discriminant_score: self.discriminant_score, + qvalue: self.qvalue, + } + } +} + // --------------------------------------------------------------------------- // Builder // --------------------------------------------------------------------------- diff --git a/rust/timsseek_cli/src/processing.rs b/rust/timsseek_cli/src/processing.rs index 09487f7..2bb9566 100644 --- a/rust/timsseek_cli/src/processing.rs +++ b/rust/timsseek_cli/src/processing.rs @@ -29,14 +29,12 @@ use timsseek::scoring::{ CalibrantCandidate, CalibrantHeap, CalibrationConfig, + CompetedCandidate, PipelineTimings, + ScoredCandidate, ScoreTimings, }; use timsseek::scoring::pipeline::Scorer; -use timsseek::scoring::search_results::{ - IonSearchResults, - ResultParquetWriter, -}; use timsseek::{ DecoyStrategy, IonAnnot, @@ -113,7 +111,7 @@ pub fn main_loop( calib_lib: Option, pipeline: &Scorer, chunk_size: usize, - out_path: &OutputConfig, + _out_path: &OutputConfig, ) -> std::result::Result { let calib_config = CalibrationConfig::default(); @@ -207,10 +205,12 @@ pub fn main_loop( ); // === Post-processing === - let mut results = target_decoy_compete(results); - results.sort_unstable_by(|x, y| y.main_score.partial_cmp(&x.main_score).unwrap()); + let mut competed = target_decoy_compete(results); + competed.sort_unstable_by(|x, y| { + y.scoring.main_score.partial_cmp(&x.scoring.main_score).unwrap() + }); - let data = rescore(results); + let data = rescore(competed); for val in report_qvalues_at_thresholds(&data, &[0.01, 0.05, 0.1, 0.5, 1.0]) { let (thresh, n_below_thresh, n_targets, n_decoys) = val; println!( @@ -218,23 +218,9 @@ pub fn main_loop( n_targets, n_decoys, thresh, n_below_thresh ); } - let out_path_pq = out_path.directory.join("results.parquet"); - let mut pq_writer = ResultParquetWriter::new(out_path_pq.clone(), 20_000).map_err(|e| { - tracing::error!( - "Error creating parquet writer for path {:?}: {}", - out_path_pq.clone(), - e - ); - TimsSeekError::Io { - path: out_path_pq.clone().into(), - source: e, - } - })?; - for res in data.into_iter() { - pq_writer.add(res); - } - pq_writer.close(); - info!("Wrote final results to {:?}", out_path_pq); + + // TODO: Task 7 — manual Parquet writer for FinalResult + let _ = &data; Ok(PipelineTimings { phase1_prescore_ms: phase1_ms, @@ -471,7 +457,7 @@ fn phase3_score( calibration: &CalibrationResult, chunk_size: usize, timings: &mut ScoreTimings, -) -> Vec { +) -> Vec { let style = ProgressStyle::with_template( "{spinner:.green} Phase 3 [{elapsed_precise}] [{wide_bar:.cyan/blue}] {pos}/{len} ({eta})", ) @@ -526,18 +512,17 @@ fn asymmetric_tolerance(errors: &[f32], n_sigma: f32, min_val: f32) -> (f32, f32 feature = "instrumentation", tracing::instrument(skip_all, level = "trace") )] -fn target_decoy_compete(mut results: Vec) -> Vec { +fn target_decoy_compete(mut results: Vec) -> Vec { // TODO: re-implement so we dont drop results but instead just flag them as rejected (maybe // a slice where we push rejected results to the end and keep the trailing slice as the "active") - // I KNOW this is an ugly place for a function... - fn glimpse_result_head(results: &[IonSearchResults]) -> Vec { + fn glimpse_result_head(results: &[ScoredCandidate]) -> Vec { results[..10.min(results.len())] .iter() .map(|x| { format!( "{} {} {} {}", - x.sequence, x.precursor_charge, x.precursor_mz, x.main_score + x.scoring.sequence, x.scoring.precursor_charge, x.scoring.precursor_mz, x.scoring.main_score ) }) .collect::>() @@ -545,16 +530,16 @@ fn target_decoy_compete(mut results: Vec) -> Vec std::cmp::Ordering::Less, (false, true) => std::cmp::Ordering::Greater, _ => std::cmp::Ordering::Equal, @@ -569,9 +554,9 @@ fn target_decoy_compete(mut results: Vec) -> Vec) -> Vec = vec![(f32::NAN, f32::NAN); results.len()]; let mut previous: Option<(u32, u8, usize, f32)> = None; for i in 0..results.len() { let current = &results[i]; - let current_key = (current.decoy_group_id, current.precursor_charge); + let current_key = (current.scoring.decoy_group_id, current.scoring.precursor_charge); if let Some((prev_group_id, prev_charge, prev_index, prev_score)) = previous { let prev_key = (prev_group_id, prev_charge); if current_key == prev_key { // This is the second item in a target/decoy pair - let delta_score = current.main_score - prev_score; - let delta_ratio = current.main_score / prev_score; + let delta_score = current.scoring.main_score - prev_score; + let delta_ratio = current.scoring.main_score / prev_score; - results[prev_index].delta_group = -delta_score; - results[prev_index].delta_group_ratio = delta_ratio; + delta_map[prev_index] = (-delta_score, delta_ratio); // Skip updating previous - we only compare first two items per group continue; @@ -620,26 +602,46 @@ fn target_decoy_compete(mut results: Vec) -> Vec = Vec::with_capacity(results.len()); + { + let mut last_key: Option<(u32, u8)> = None; + for i in 0..results.len() { + let key = (results[i].scoring.decoy_group_id, results[i].scoring.precursor_charge); + if last_key == Some(key) { + continue; // duplicate in same group + } + last_key = Some(key); + kept_indices.push(i); + } + } + + info!("Number of results after t/d competition: {}", kept_indices.len()); + + // Build CompetedCandidate vec from the kept indices. + // We need to pull elements out of `results` by index, but they are non-Copy. + // Convert the whole Vec into an indexed form we can drain. + let mut results_opt: Vec> = results.into_iter().map(Some).collect(); + let competed: Vec = kept_indices + .into_iter() + .map(|i| { + let (dg, dgr) = delta_map[i]; + results_opt[i] + .take() + .expect("index should be unique") + .into_competed(dg, dgr) + }) + .collect(); + + competed } pub fn process_speclib( From 0ff4ca756da8835a810266001f94981e0a00a8ea Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Thu, 9 Apr 2026 19:43:25 -0700 Subject: [PATCH 08/64] feat: manual Parquet writer with exhaustive destructure --- Cargo.lock | 1 + rust/timsseek/Cargo.toml | 1 + rust/timsseek/src/scoring/mod.rs | 1 + rust/timsseek/src/scoring/parquet_writer.rs | 302 ++++++++++++++++++++ rust/timsseek_cli/src/processing.rs | 17 +- 5 files changed, 320 insertions(+), 2 deletions(-) create mode 100644 rust/timsseek/src/scoring/parquet_writer.rs diff --git a/Cargo.lock b/Cargo.lock index 4b562d8..51a8935 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6117,6 +6117,7 @@ dependencies = [ name = "timsseek" version = "0.26.0" dependencies = [ + "arrow", "calibrt", "forust-ml", "micromzpaf", diff --git a/rust/timsseek/Cargo.toml b/rust/timsseek/Cargo.toml index da7439f..c1b809b 100644 --- a/rust/timsseek/Cargo.toml +++ b/rust/timsseek/Cargo.toml @@ -29,6 +29,7 @@ tracing = { workspace = true } rayon = { workspace = true } parquet_derive = { workspace = true } parquet = { workspace = true } +arrow = { workspace = true } [features] # This enables adding instrumentation to a lot of the scoring diff --git a/rust/timsseek/src/scoring/mod.rs b/rust/timsseek/src/scoring/mod.rs index 15d2d62..4341ecc 100644 --- a/rust/timsseek/src/scoring/mod.rs +++ b/rust/timsseek/src/scoring/mod.rs @@ -2,6 +2,7 @@ mod accumulator; pub mod apex_finding; pub mod full_results; mod offsets; +pub mod parquet_writer; pub mod pipeline; pub mod results; mod scores; diff --git a/rust/timsseek/src/scoring/parquet_writer.rs b/rust/timsseek/src/scoring/parquet_writer.rs new file mode 100644 index 0000000..38d2028 --- /dev/null +++ b/rust/timsseek/src/scoring/parquet_writer.rs @@ -0,0 +1,302 @@ +use arrow::array::*; +use arrow::datatypes::*; +use arrow::record_batch::RecordBatch; +use parquet::arrow::ArrowWriter; +use parquet::basic::Compression; +use parquet::file::properties::WriterProperties; +use std::fs::File; +use std::path::Path; +use std::sync::Arc; +use tracing::debug; + +use super::results::{FinalResult, ScoringFields}; + +// --------------------------------------------------------------------------- +// Macro: build typed columns from accessor closures +// --------------------------------------------------------------------------- + +macro_rules! columns { + ($results:expr; $( $name:expr => $dtype:expr, $array_type:ident ( $accessor:expr ) );* $(;)? ) => {{ + let mut fields: Vec = Vec::new(); + let mut arrays: Vec> = Vec::new(); + $( + fields.push(Field::new($name, $dtype, true)); + arrays.push(Arc::new(<$array_type>::from_iter( + $results.iter().map($accessor) + ))); + )* + (fields, arrays) + }}; +} + +// --------------------------------------------------------------------------- +// Helper: expand fixed-size [f32; N] arrays into numbered columns +// --------------------------------------------------------------------------- + +fn expand_f32_array_columns( + results: &[FinalResult], + prefix: &str, + accessor: impl Fn(&FinalResult) -> &[f32; N], + fields: &mut Vec, + arrays: &mut Vec>, +) { + for i in 0..N { + fields.push(Field::new( + format!("{}_{}", prefix, i), + DataType::Float32, + true, + )); + arrays.push(Arc::new(Float32Array::from_iter( + results.iter().map(|r| Some(accessor(r)[i])), + ))); + } +} + +// --------------------------------------------------------------------------- +// Build a RecordBatch from a slice of FinalResult +// --------------------------------------------------------------------------- + +/// Build a `RecordBatch` from a slice of `FinalResult`. +/// +/// **COMPILE-TIME SAFETY:** The exhaustive destructure at the top ensures that +/// adding a field to `ScoringFields` or `FinalResult` without updating this +/// function causes a compile error. +pub fn build_record_batch(results: &[FinalResult]) -> RecordBatch { + // ----------------------------------------------------------------------- + // Exhaustive destructure for compile-time completeness check. + // Every field must be listed -- no `..` allowed. + // The bindings are prefixed with _ to suppress unused warnings; + // the actual column building uses accessor closures below. + // ----------------------------------------------------------------------- + if let Some(r) = results.first() { + let FinalResult { + scoring: + ScoringFields { + sequence: _, + library_id: _, + decoy_group_id: _, + precursor_mz: _, + precursor_charge: _, + precursor_mobility: _, + is_target: _, + query_rt_seconds: _, + obs_rt_seconds: _, + delta_rt: _, + sq_delta_rt: _, + calibrated_sq_delta_rt: _, + recalibrated_rt: _, + obs_mobility: _, + delta_ms1_ms2_mobility: _, + sq_delta_ms1_ms2_mobility: _, + main_score: _, + delta_next: _, + delta_second_next: _, + apex_lazyscore: _, + ms2_lazyscore: _, + ms2_isotope_lazyscore: _, + ms2_isotope_lazyscore_ratio: _, + lazyscore_z: _, + lazyscore_vs_baseline: _, + split_product_score: _, + cosine_au: _, + scribe_au: _, + cosine_cg: _, + scribe_cg: _, + cosine_weighted_coelution: _, + cosine_gradient_consistency: _, + scribe_weighted_coelution: _, + scribe_gradient_consistency: _, + peak_shape: _, + ratio_cv: _, + centered_apex: _, + precursor_coelution: _, + fragment_coverage: _, + precursor_apex_match: _, + xic_quality: _, + fragment_apex_agreement: _, + isotope_correlation: _, + gaussian_correlation: _, + per_frag_gaussian_corr: _, + rising_cycles: _, + falling_cycles: _, + npeaks: _, + n_scored_fragments: _, + ms2_summed_intensity: _, + ms1_summed_intensity: _, + ms2_mz_errors: _, + ms2_mobility_errors: _, + ms1_mz_errors: _, + ms1_mobility_errors: _, + ms2_intensity_ratios: _, + ms1_intensity_ratios: _, + }, + delta_group: _, + delta_group_ratio: _, + discriminant_score: _, + qvalue: _, + } = r; + } + + // ----------------------------------------------------------------------- + // Scalar columns via the `columns!` macro + // ----------------------------------------------------------------------- + let (mut fields, mut arrays) = columns!(results; + // Identity + "sequence" => DataType::Utf8, StringArray(|r: &FinalResult| Some(r.scoring.sequence.as_str())); + "library_id" => DataType::UInt32, UInt32Array(|r: &FinalResult| Some(r.scoring.library_id)); + "decoy_group_id" => DataType::UInt32, UInt32Array(|r: &FinalResult| Some(r.scoring.decoy_group_id)); + "precursor_mz" => DataType::Float64, Float64Array(|r: &FinalResult| Some(r.scoring.precursor_mz)); + "precursor_charge" => DataType::UInt8, UInt8Array(|r: &FinalResult| Some(r.scoring.precursor_charge)); + "precursor_mobility" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.precursor_mobility)); + "is_target" => DataType::Boolean, BooleanArray(|r: &FinalResult| Some(r.scoring.is_target)); + + // RT + "query_rt_seconds" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.query_rt_seconds)); + "obs_rt_seconds" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.obs_rt_seconds)); + "delta_rt" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.delta_rt)); + "sq_delta_rt" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.sq_delta_rt)); + "calibrated_sq_delta_rt" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.calibrated_sq_delta_rt)); + "recalibrated_rt" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.recalibrated_rt)); + + // Mobility + "obs_mobility" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.obs_mobility)); + "delta_ms1_ms2_mobility" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.delta_ms1_ms2_mobility)); + "sq_delta_ms1_ms2_mobility" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.sq_delta_ms1_ms2_mobility)); + + // Primary scores + "main_score" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.main_score)); + "delta_next" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.delta_next)); + "delta_second_next" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.delta_second_next)); + + // Lazyscores + "apex_lazyscore" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.apex_lazyscore)); + "ms2_lazyscore" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.ms2_lazyscore)); + "ms2_isotope_lazyscore" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.ms2_isotope_lazyscore)); + "ms2_isotope_lazyscore_ratio" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.ms2_isotope_lazyscore_ratio)); + "lazyscore_z" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.lazyscore_z)); + "lazyscore_vs_baseline" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.lazyscore_vs_baseline)); + + // Split product (9 components) + "split_product_score" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.split_product_score)); + "cosine_au" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.cosine_au)); + "scribe_au" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.scribe_au)); + "cosine_cg" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.cosine_cg)); + "scribe_cg" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.scribe_cg)); + "cosine_weighted_coelution" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.cosine_weighted_coelution)); + "cosine_gradient_consistency" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.cosine_gradient_consistency)); + "scribe_weighted_coelution" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.scribe_weighted_coelution)); + "scribe_gradient_consistency" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.scribe_gradient_consistency)); + + // 11 apex features + "peak_shape" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.peak_shape)); + "ratio_cv" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.ratio_cv)); + "centered_apex" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.centered_apex)); + "precursor_coelution" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.precursor_coelution)); + "fragment_coverage" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.fragment_coverage)); + "precursor_apex_match" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.precursor_apex_match)); + "xic_quality" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.xic_quality)); + "fragment_apex_agreement" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.fragment_apex_agreement)); + "isotope_correlation" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.isotope_correlation)); + "gaussian_correlation" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.gaussian_correlation)); + "per_frag_gaussian_corr" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.per_frag_gaussian_corr)); + + // Peak shape + "rising_cycles" => DataType::UInt8, UInt8Array(|r: &FinalResult| Some(r.scoring.rising_cycles)); + "falling_cycles" => DataType::UInt8, UInt8Array(|r: &FinalResult| Some(r.scoring.falling_cycles)); + + // Counts + "npeaks" => DataType::UInt8, UInt8Array(|r: &FinalResult| Some(r.scoring.npeaks)); + "n_scored_fragments" => DataType::UInt8, UInt8Array(|r: &FinalResult| Some(r.scoring.n_scored_fragments)); + + // Intensities + "ms2_summed_intensity" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.ms2_summed_intensity)); + "ms1_summed_intensity" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.ms1_summed_intensity)); + + // FinalResult-level fields + "delta_group" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.delta_group)); + "delta_group_ratio" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.delta_group_ratio)); + "discriminant_score" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.discriminant_score)); + "qvalue" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.qvalue)) + ); + + // ----------------------------------------------------------------------- + // Array columns: expand [f32; N] into numbered columns + // ----------------------------------------------------------------------- + expand_f32_array_columns(results, "ms2_mz_error", |r| &r.scoring.ms2_mz_errors, &mut fields, &mut arrays); + expand_f32_array_columns(results, "ms2_mobility_error", |r| &r.scoring.ms2_mobility_errors, &mut fields, &mut arrays); + expand_f32_array_columns(results, "ms1_mz_error", |r| &r.scoring.ms1_mz_errors, &mut fields, &mut arrays); + expand_f32_array_columns(results, "ms1_mobility_error", |r| &r.scoring.ms1_mobility_errors, &mut fields, &mut arrays); + expand_f32_array_columns(results, "ms2_intensity_ratio", |r| &r.scoring.ms2_intensity_ratios, &mut fields, &mut arrays); + expand_f32_array_columns(results, "ms1_intensity_ratio", |r| &r.scoring.ms1_intensity_ratios, &mut fields, &mut arrays); + + // ----------------------------------------------------------------------- + // Build the RecordBatch + // ----------------------------------------------------------------------- + let schema = Arc::new(Schema::new(fields)); + RecordBatch::try_new(schema, arrays).expect("schema/array mismatch in build_record_batch") +} + +// --------------------------------------------------------------------------- +// Buffered Parquet writer +// --------------------------------------------------------------------------- + +pub struct ResultParquetWriter { + writer: ArrowWriter, + buffer: Vec, + row_group_size: usize, +} + +impl ResultParquetWriter { + pub fn new(path: impl AsRef, row_group_size: usize) -> std::io::Result { + let file = match File::create_new(path.as_ref()) { + Ok(f) => f, + Err(err) => { + tracing::error!( + "Failed to create file {:?}: {}", + path.as_ref(), + err + ); + return Err(err); + } + }; + + // Build schema from a zero-row batch + let empty_batch = build_record_batch(&[]); + let schema = empty_batch.schema(); + + let props = WriterProperties::builder() + .set_compression(Compression::SNAPPY) + .build(); + + let writer = ArrowWriter::try_new(file, schema, Some(props)) + .map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e))?; + + Ok(Self { + writer, + buffer: Vec::with_capacity(row_group_size), + row_group_size, + }) + } + + pub fn add(&mut self, result: FinalResult) { + self.buffer.push(result); + if self.buffer.len() >= self.row_group_size { + self.flush(); + } + } + + fn flush(&mut self) { + if self.buffer.is_empty() { + return; + } + debug!("Flushing {} results to parquet", self.buffer.len()); + let batch = build_record_batch(&self.buffer); + self.writer.write(&batch).expect("parquet write failed"); + self.buffer.clear(); + } + + pub fn close(mut self) { + self.flush(); + self.writer.close().expect("parquet close failed"); + } +} diff --git a/rust/timsseek_cli/src/processing.rs b/rust/timsseek_cli/src/processing.rs index 2bb9566..2b1f4d2 100644 --- a/rust/timsseek_cli/src/processing.rs +++ b/rust/timsseek_cli/src/processing.rs @@ -219,8 +219,21 @@ pub fn main_loop( ); } - // TODO: Task 7 — manual Parquet writer for FinalResult - let _ = &data; + // Write final results to Parquet + let out_path_pq = _out_path.directory.join("results.parquet"); + let mut pq_writer = timsseek::scoring::parquet_writer::ResultParquetWriter::new( + &out_path_pq, + 20_000, + ) + .map_err(|e| TimsSeekError::Io { + path: out_path_pq.clone().into(), + source: e, + })?; + for res in data.into_iter() { + pq_writer.add(res); + } + pq_writer.close(); + info!("Wrote final results to {:?}", out_path_pq); Ok(PipelineTimings { phase1_prescore_ms: phase1_ms, From 302c926287ebcfc5088eec4668cd52062d534b2b Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Thu, 9 Apr 2026 20:51:44 -0700 Subject: [PATCH 09/64] refactor: remove IonSearchResults, SearchResultBuilder, and parquet_derive Delete search_results.rs entirely and remove all references to IonSearchResults and SearchResultBuilder across the codebase. All consumers now use the new ScoredCandidate/CompetedCandidate/FinalResult types. Drop the parquet_derive dependency since the derive macro is no longer used. --- Cargo.lock | 13 - rust/timsseek/Cargo.toml | 1 - rust/timsseek/src/lib.rs | 9 +- rust/timsseek/src/ml/qvalues.rs | 246 -------- rust/timsseek/src/scoring/mod.rs | 2 - rust/timsseek/src/scoring/search_results.rs | 597 -------------------- 6 files changed, 4 insertions(+), 864 deletions(-) delete mode 100644 rust/timsseek/src/scoring/search_results.rs diff --git a/Cargo.lock b/Cargo.lock index 51a8935..f6d4dbd 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4436,18 +4436,6 @@ dependencies = [ "zstd", ] -[[package]] -name = "parquet_derive" -version = "57.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "777a89bd0515e7948516ce6d3f41c16b3a32de71f7eb99c5fdb55456b695b227" -dependencies = [ - "parquet", - "proc-macro2", - "quote", - "syn 2.0.111", -] - [[package]] name = "paste" version = "1.0.15" @@ -6122,7 +6110,6 @@ dependencies = [ "forust-ml", "micromzpaf", "parquet", - "parquet_derive", "rand 0.9.2", "rayon", "regex", diff --git a/rust/timsseek/Cargo.toml b/rust/timsseek/Cargo.toml index c1b809b..547401c 100644 --- a/rust/timsseek/Cargo.toml +++ b/rust/timsseek/Cargo.toml @@ -27,7 +27,6 @@ serde = { workspace = true } serde_json = { workspace = true } tracing = { workspace = true } rayon = { workspace = true } -parquet_derive = { workspace = true } parquet = { workspace = true } arrow = { workspace = true } diff --git a/rust/timsseek/src/lib.rs b/rust/timsseek/src/lib.rs index 4408baa..89b51f9 100644 --- a/rust/timsseek/src/lib.rs +++ b/rust/timsseek/src/lib.rs @@ -12,10 +12,6 @@ pub mod traits; pub mod utils; pub use micromzpaf; -extern crate parquet; -#[macro_use] -extern crate parquet_derive; - pub use data_sources::Speclib; pub use models::{ DecoyStrategy, @@ -24,8 +20,11 @@ pub use models::{ QueryItemToScore, }; pub use scoring::{ - IonSearchResults, Scorer, + ScoredCandidate, + CompetedCandidate, + FinalResult, + ScoringFields, }; pub use timsquery::ion::{ IonAnnot, diff --git a/rust/timsseek/src/ml/qvalues.rs b/rust/timsseek/src/ml/qvalues.rs index 2298b6c..1f56e1d 100644 --- a/rust/timsseek/src/ml/qvalues.rs +++ b/rust/timsseek/src/ml/qvalues.rs @@ -102,7 +102,6 @@ pub fn rescore(mut data: Vec) -> Vec { scored.into_iter().map(|c| c.into_final()).collect() } -use crate::IonSearchResults; use crate::scoring::results::{CompetedCandidate, FinalResult}; fn mean_abs_error(errs: &[f32]) -> f64 { @@ -111,251 +110,6 @@ fn mean_abs_error(errs: &[f32]) -> f64 { if n > 0 { sum / n as f64 } else { f64::NAN } } -impl FeatureLike for IonSearchResults { - fn as_feature(&self) -> impl IntoIterator + '_ { - let Self { - sequence: _, - library_id: _, - decoy_group_id: _, - precursor_mz, - precursor_charge, - precursor_mobility_query, - precursor_rt_query_seconds, - recalibrated_query_rt, - nqueries, - is_target: _, - - // Combined - main_score, - delta_next, - delta_second_next, - obs_rt_seconds, - obs_mobility, - delta_theo_rt, - sq_delta_theo_rt, - calibrated_sq_delta_theo_rt, - delta_ms1_ms2_mobility, - sq_delta_ms1_ms2_mobility, - raising_cycles, - falling_cycles, - - // MS2 - npeaks, - apex_lazyerscore, - ms2_summed_transition_intensity, - ms2_lazyerscore, - ms2_isotope_lazyerscore, - ms2_isotope_lazyerscore_ratio, - lazyscore_z, - lazyscore_vs_baseline, - - // Split product & apex features - split_product_score, - cosine_au_score, - scribe_au_score, - coelution_gradient_cosine, - coelution_gradient_scribe, - cosine_weighted_coelution, - cosine_gradient_consistency, - scribe_weighted_coelution, - scribe_gradient_consistency, - peak_shape, - ratio_cv, - centered_apex, - precursor_coelution, - fragment_coverage, - precursor_apex_match, - xic_quality, - fragment_apex_agreement, - isotope_correlation, - gaussian_correlation, - per_frag_gaussian_corr, - - // MS2 - Split - ms2_mz_error_0, - ms2_mz_error_1, - ms2_mz_error_2, - ms2_mz_error_3, - ms2_mz_error_4, - ms2_mz_error_5, - ms2_mz_error_6, - ms2_mobility_error_0, - ms2_mobility_error_1, - ms2_mobility_error_2, - ms2_mobility_error_3, - ms2_mobility_error_4, - ms2_mobility_error_5, - ms2_mobility_error_6, - - // MS1 - ms1_summed_precursor_intensity, - - // MS1 Split - ms1_mz_error_0, - ms1_mz_error_1, - ms1_mz_error_2, - ms1_mobility_error_0, - ms1_mobility_error_1, - ms1_mobility_error_2, - - // Relative Intensities - ms1_inten_ratio_0, - ms1_inten_ratio_1, - ms1_inten_ratio_2, - - ms2_inten_ratio_0, - ms2_inten_ratio_1, - ms2_inten_ratio_2, - ms2_inten_ratio_3, - ms2_inten_ratio_4, - ms2_inten_ratio_5, - ms2_inten_ratio_6, - - discriminant_score: _, - qvalue: _, - delta_group, - delta_group_ratio, - } = *self; - - vec![ - (precursor_mz / 5.0).round(), - precursor_charge as f64, - precursor_mobility_query as f64, - precursor_rt_query_seconds.round() as f64, - nqueries as f64, - // Combined - main_score as f64, - (main_score / delta_next) as f64, - delta_next as f64, - delta_second_next as f64, - obs_rt_seconds as f64, - obs_mobility as f64, - delta_theo_rt as f64, - sq_delta_theo_rt as f64, - delta_ms1_ms2_mobility as f64, - sq_delta_ms1_ms2_mobility as f64, - raising_cycles as f64, - falling_cycles as f64, - // MS2 - npeaks as f64, - apex_lazyerscore as f64, - (ms2_summed_transition_intensity as f64).ln_1p(), - ms2_lazyerscore as f64, - ms2_isotope_lazyerscore as f64, - ms2_isotope_lazyerscore_ratio as f64, - lazyscore_z as f64, - lazyscore_vs_baseline as f64, - // Split product & apex features - (split_product_score as f64).ln_1p(), - (cosine_au_score as f64).ln_1p(), - (scribe_au_score as f64).ln_1p(), - coelution_gradient_cosine as f64, - coelution_gradient_scribe as f64, - cosine_weighted_coelution as f64, - cosine_gradient_consistency as f64, - scribe_weighted_coelution as f64, - scribe_gradient_consistency as f64, - peak_shape as f64, - ratio_cv as f64, - centered_apex as f64, - precursor_coelution as f64, - fragment_coverage as f64, - precursor_apex_match as f64, - xic_quality as f64, - fragment_apex_agreement as f64, - isotope_correlation as f64, - gaussian_correlation as f64, - per_frag_gaussian_corr as f64, - // MS2 - Split - ms2_mz_error_0 as f64, - ms2_mz_error_1 as f64, - ms2_mz_error_2 as f64, - ms2_mz_error_3 as f64, - ms2_mz_error_4 as f64, - ms2_mz_error_5 as f64, - ms2_mz_error_6 as f64, - ms2_mobility_error_0 as f64, - ms2_mobility_error_1 as f64, - ms2_mobility_error_2 as f64, - ms2_mobility_error_3 as f64, - ms2_mobility_error_4 as f64, - ms2_mobility_error_5 as f64, - ms2_mobility_error_6 as f64, - // MS1 - (ms1_summed_precursor_intensity as f64).ln_1p(), - // MS1 Split - ms1_mz_error_0 as f64, - ms1_mz_error_1 as f64, - ms1_mz_error_2 as f64, - ms1_mobility_error_0 as f64, - ms1_mobility_error_1 as f64, - ms1_mobility_error_2 as f64, - // Relative Intensities - ms1_inten_ratio_0 as f64, - ms1_inten_ratio_1 as f64, - ms1_inten_ratio_2 as f64, - ms2_inten_ratio_0 as f64, - ms2_inten_ratio_1 as f64, - ms2_inten_ratio_2 as f64, - ms2_inten_ratio_3 as f64, - ms2_inten_ratio_4 as f64, - ms2_inten_ratio_5 as f64, - ms2_inten_ratio_6 as f64, - delta_group as f64, - delta_group_ratio as f64, - recalibrated_query_rt as f64, - calibrated_sq_delta_theo_rt as f64, - // Derived intensity features - { - // Max fragment intensity ratio (dominance of strongest fragment) - let ratios = [ms2_inten_ratio_0, ms2_inten_ratio_1, ms2_inten_ratio_2, - ms2_inten_ratio_3, ms2_inten_ratio_4, ms2_inten_ratio_5, ms2_inten_ratio_6]; - ratios.iter().filter(|r| r.is_finite()).fold(f32::NEG_INFINITY, |a, &b| a.max(b)) as f64 - }, - // Interaction features - (main_score * delta_next) as f64, // score × peak separation - (split_product_score * fragment_coverage) as f64, // base score × coverage - // Summary error features - mean_abs_error(&[ms2_mz_error_0, ms2_mz_error_1, ms2_mz_error_2, - ms2_mz_error_3, ms2_mz_error_4, ms2_mz_error_5, ms2_mz_error_6]), - mean_abs_error(&[ms2_mobility_error_0, ms2_mobility_error_1, ms2_mobility_error_2, - ms2_mobility_error_3, ms2_mobility_error_4, ms2_mobility_error_5, ms2_mobility_error_6]), - mean_abs_error(&[ms1_mz_error_0, ms1_mz_error_1, ms1_mz_error_2]), - mean_abs_error(&[ms1_mobility_error_0, ms1_mobility_error_1, ms1_mobility_error_2]), - ] - } - - fn get_y(&self) -> f64 { - if self.is_target { 1.0 } else { 0.0 } - } - - fn assign_score(&mut self, score: f64) { - self.discriminant_score = score as f32; - } - - fn get_score(&self) -> f64 { - self.discriminant_score as f64 - } -} - -impl LabelledScore for IonSearchResults { - fn get_label(&self) -> TargetDecoy { - if self.is_target { - TargetDecoy::Target - } else { - TargetDecoy::Decoy - } - } - - fn assign_qval(&mut self, qval: f32) { - self.qvalue = qval; - } - - fn get_qval(&self) -> f32 { - self.qvalue - } -} - // --------------------------------------------------------------------------- // CompetedCandidate: FeatureLike + LabelledScore // --------------------------------------------------------------------------- diff --git a/rust/timsseek/src/scoring/mod.rs b/rust/timsseek/src/scoring/mod.rs index 4341ecc..1135eb6 100644 --- a/rust/timsseek/src/scoring/mod.rs +++ b/rust/timsseek/src/scoring/mod.rs @@ -6,7 +6,6 @@ pub mod parquet_writer; pub mod pipeline; pub mod results; mod scores; -pub mod search_results; pub mod timings; pub use scores::hyperscore; @@ -18,7 +17,6 @@ pub use pipeline::{ Scorer, }; pub use results::{ScoredCandidate, CompetedCandidate, FinalResult, ScoringFields}; -pub use search_results::IonSearchResults; pub use timings::{PipelineTimings, ScoreTimings}; pub const NUM_MS2_IONS: usize = 7; diff --git a/rust/timsseek/src/scoring/search_results.rs b/rust/timsseek/src/scoring/search_results.rs deleted file mode 100644 index 8ed7645..0000000 --- a/rust/timsseek/src/scoring/search_results.rs +++ /dev/null @@ -1,597 +0,0 @@ -use super::apex_finding::{ - ApexScore, - CandidateContext, - RelativeIntensities, -}; -use super::offsets::MzMobilityOffsets; -use super::pipeline::SecondaryLazyScores; -use super::{ - NUM_MS1_IONS, - NUM_MS2_IONS, -}; -use crate::IonAnnot; -use crate::errors::DataProcessingError; -use crate::models::{ - DecoyMarking, - DigestSlice, -}; -use parquet::file::writer::SerializedFileWriter; -use parquet::record::RecordWriter; -use serde::Serialize; -use std::fs::File; -use std::path::Path; -use timsquery::TimsElutionGroup; -use tracing::debug; - -#[derive(Debug, Default)] -pub struct SearchResultBuilder<'q> { - digest_slice: SetField<&'q DigestSlice>, - ref_eg: SetField<&'q TimsElutionGroup>, - decoy_marking: SetField, - library_id: SetField, - decoy_group_id: SetField, - charge: SetField, - nqueries: SetField, - - // Reference values for new metadata-based approach - ref_precursor_mz: SetField, - ref_rt_seconds: SetField, - ref_mobility: SetField, - - main_score: SetField, - delta_next: SetField, - delta_second_next: SetField, - rt_seconds: SetField, - observed_mobility: SetField, - delta_ms1_ms2_mobility: SetField, - // ms1_ms2_correlation: SetField, - npeaks: SetField, - apex_lazyerscore: SetField, - ms2_summed_transition_intensity: SetField, - ms2_lazyerscore: SetField, - ms2_isotope_lazyerscore: SetField, - ms2_isotope_lazyerscore_ratio: SetField, - lazyscore_z: SetField, - lazyscore_vs_baseline: SetField, - - ms2_mz_errors: SetField<[f32; NUM_MS2_IONS]>, - ms2_mobility_errors: SetField<[f32; NUM_MS2_IONS]>, - - ms1_summed_precursor_intensity: SetField, - - ms1_mz_errors: SetField<[f32; NUM_MS1_IONS]>, - ms1_mobility_errors: SetField<[f32; NUM_MS1_IONS]>, - - // Split product & apex features - split_product_score: SetField, - cosine_au_score: SetField, - scribe_au_score: SetField, - coelution_gradient_cosine: SetField, - coelution_gradient_scribe: SetField, - cosine_weighted_coelution: SetField, - cosine_gradient_consistency: SetField, - scribe_weighted_coelution: SetField, - scribe_gradient_consistency: SetField, - peak_shape: SetField, - ratio_cv: SetField, - centered_apex: SetField, - precursor_coelution: SetField, - fragment_coverage: SetField, - precursor_apex_match: SetField, - xic_quality: SetField, - fragment_apex_agreement: SetField, - isotope_correlation: SetField, - gaussian_correlation: SetField, - per_frag_gaussian_corr: SetField, - - relative_intensities: SetField, - raising_cycles: SetField, - falling_cycles: SetField, -} - -#[derive(Debug, Clone, Copy, Default)] -pub enum SetField { - Some(T), - #[default] - None, -} - -impl SetField { - pub fn is_some(&self) -> bool { - matches!(self, Self::Some(_)) - } - - pub fn expect_some( - self, - field_name: &'static str, - // msg: impl ToString, - ) -> Result { - match self { - Self::Some(v) => Ok(v), - Self::None => Err(DataProcessingError::ExpectedSetField { - field: field_name, - context: "".into(), - }), - } - } -} - -impl<'q> SearchResultBuilder<'q> { - pub fn with_candidate_context( - mut self, - candidate_context: &'q CandidateContext, - ) -> Self { - self.library_id = SetField::Some(candidate_context.query_values.eg.id() as u32); - self.digest_slice = SetField::Some(&candidate_context.label); - self.ref_eg = SetField::Some(&candidate_context.query_values.eg); - self.nqueries = SetField::Some(candidate_context.query_values.fragments.num_ions() as u8); - self.decoy_marking = SetField::Some(candidate_context.label.decoy); - self.charge = SetField::Some(candidate_context.charge); - self.decoy_group_id = SetField::Some(candidate_context.label.decoy_group); - self - } - - pub fn with_metadata(mut self, metadata: &'q super::apex_finding::PeptideMetadata) -> Self { - self.library_id = SetField::Some(metadata.library_id); - self.digest_slice = SetField::Some(&metadata.digest); - self.decoy_marking = SetField::Some(metadata.digest.decoy); - self.charge = SetField::Some(metadata.charge); - self.decoy_group_id = SetField::Some(metadata.digest.decoy_group); - - // Store ref values for later use in finalize() - self.ref_precursor_mz = SetField::Some(metadata.ref_precursor_mz); - self.ref_rt_seconds = SetField::Some(metadata.ref_rt_seconds); - self.ref_mobility = SetField::Some(metadata.ref_mobility_ook0); - - self - } - - pub fn with_nqueries(mut self, nqueries: u8) -> Self { - self.nqueries = SetField::Some(nqueries); - self - } - - pub fn with_sorted_offsets(mut self, offsets: &MzMobilityOffsets) -> Self { - self.ms1_mz_errors = SetField::Some(offsets.ms1_mz_errors()); - self.ms1_mobility_errors = SetField::Some(offsets.ms1_mobility_errors()); - self.ms2_mz_errors = SetField::Some(offsets.ms2_mz_errors()); - self.ms2_mobility_errors = SetField::Some(offsets.ms2_mobility_errors()); - - let mob_errors = offsets.avg_delta_mobs(); - let cum_err = mob_errors.0 + mob_errors.1; - let obs_mob = (offsets.ref_mobility + cum_err.mean_mobility().unwrap_or(f64::NAN)) as f32; - let d_err = match (mob_errors.0.mean_mobility(), mob_errors.1.mean_mobility()) { - (Ok(mz), Ok(mob)) => mz - mob, - _ => f64::NAN, - }; - self.delta_ms1_ms2_mobility = SetField::Some(d_err as f32); - self.observed_mobility = SetField::Some(obs_mob); - self - } - - pub fn with_secondary_lazyscores(mut self, lazyscores: SecondaryLazyScores) -> Self { - self.ms2_lazyerscore = SetField::Some(lazyscores.lazyscore); - self.ms2_isotope_lazyerscore = SetField::Some(lazyscores.iso_lazyscore); - self.ms2_isotope_lazyerscore_ratio = SetField::Some(lazyscores.ratio); - self - } - - pub fn with_relative_intensities(mut self, relative_intensities: RelativeIntensities) -> Self { - self.relative_intensities = SetField::Some(relative_intensities); - self - } - - pub fn with_apex_score(mut self, main_score: &ApexScore) -> Self { - let ApexScore { - score, - retention_time_ms, - joint_apex_cycle: _, - split_product, - features, - delta_next, - delta_second_next, - lazyscore, - lazyscore_vs_baseline, - lazyscore_z, - npeaks, - ms2_summed_intensity, - ms1_summed_intensity, - raising_cycles, - falling_cycles, - } = *main_score; - { - self.main_score = SetField::Some(score); - self.delta_next = SetField::Some(delta_next); - self.delta_second_next = SetField::Some(delta_second_next); - self.rt_seconds = SetField::Some(retention_time_ms as f32 / 1000.0); - - self.split_product_score = SetField::Some(split_product.base_score); - self.cosine_au_score = SetField::Some(split_product.cosine_au); - self.scribe_au_score = SetField::Some(split_product.scribe_au); - self.coelution_gradient_cosine = SetField::Some(split_product.cosine_cg); - self.coelution_gradient_scribe = SetField::Some(split_product.scribe_cg); - self.cosine_weighted_coelution = SetField::Some(split_product.cosine_weighted_coelution); - self.cosine_gradient_consistency = SetField::Some(split_product.cosine_gradient_consistency); - self.scribe_weighted_coelution = SetField::Some(split_product.scribe_weighted_coelution); - self.scribe_gradient_consistency = SetField::Some(split_product.scribe_gradient_consistency); - - self.peak_shape = SetField::Some(features.peak_shape); - self.ratio_cv = SetField::Some(features.ratio_cv); - self.centered_apex = SetField::Some(features.centered_apex); - self.precursor_coelution = SetField::Some(features.precursor_coelution); - self.fragment_coverage = SetField::Some(features.fragment_coverage); - self.precursor_apex_match = SetField::Some(features.precursor_apex_match); - self.xic_quality = SetField::Some(features.xic_quality); - self.fragment_apex_agreement = SetField::Some(features.fragment_apex_agreement); - self.isotope_correlation = SetField::Some(features.isotope_correlation); - self.gaussian_correlation = SetField::Some(features.gaussian_correlation); - self.per_frag_gaussian_corr = SetField::Some(features.per_frag_gaussian_corr); - - self.apex_lazyerscore = SetField::Some(lazyscore); - self.lazyscore_z = SetField::Some(lazyscore_z); - self.lazyscore_vs_baseline = SetField::Some(lazyscore_vs_baseline); - self.npeaks = SetField::Some(npeaks); - self.ms1_summed_precursor_intensity = SetField::Some(ms1_summed_intensity); - self.ms2_summed_transition_intensity = SetField::Some(ms2_summed_intensity); - self.raising_cycles = SetField::Some(raising_cycles); - self.falling_cycles = SetField::Some(falling_cycles); - } - - self - } - - pub fn finalize(self) -> Result { - macro_rules! expect_some { - ($field:ident) => { - self.$field.expect_some(stringify!($field))? - }; - } - - let [mz1_e0, mz1_e1, mz1_e2] = expect_some!(ms1_mz_errors); - let [mz2_e0, mz2_e1, mz2_e2, mz2_e3, mz2_e4, mz2_e5, mz2_e6] = expect_some!(ms2_mz_errors); - - let [mob1_e0, mob1_e1, mob1_e2] = expect_some!(ms1_mobility_errors); - let [ - mob2_e0, - mob2_e1, - mob2_e2, - mob2_e3, - mob2_e4, - mob2_e5, - mob2_e6, - ] = expect_some!(ms2_mobility_errors); - - let relints = expect_some!(relative_intensities); - let [int1_e0, int1_e1, int1_e2] = relints.ms1.get_values(); - let [ - int2_e0, - int2_e1, - int2_e2, - int2_e3, - int2_e4, - int2_e5, - int2_e6, - ] = relints.ms2.get_values(); - - // Use stored ref values or fallback to ref_eg if available - let ref_mz = if self.ref_precursor_mz.is_some() { - expect_some!(ref_precursor_mz) - } else { - let ref_eg = expect_some!(ref_eg); - ref_eg.mono_precursor_mz() - }; - - let ref_rt = if self.ref_rt_seconds.is_some() { - expect_some!(ref_rt_seconds) - } else { - let ref_eg = expect_some!(ref_eg); - ref_eg.rt_seconds() - }; - - let ref_mob = if self.ref_mobility.is_some() { - expect_some!(ref_mobility) - } else { - let ref_eg = expect_some!(ref_eg); - ref_eg.mobility_ook0() - }; - - let obs_rt_seconds = expect_some!(rt_seconds); - let delta_theo_rt = obs_rt_seconds - ref_rt; - let sq_delta_theo_rt = delta_theo_rt * delta_theo_rt; - - let delta_ms1_ms2_mobility = expect_some!(delta_ms1_ms2_mobility); - let sq_delta_ms1_ms2_mobility = delta_ms1_ms2_mobility * delta_ms1_ms2_mobility; - - let results = IonSearchResults { - sequence: String::from(expect_some!(digest_slice).clone()), - library_id: expect_some!(library_id), - decoy_group_id: expect_some!(decoy_group_id), - precursor_mz: ref_mz, - precursor_charge: expect_some!(charge), - precursor_mobility_query: ref_mob, - precursor_rt_query_seconds: ref_rt, - nqueries: expect_some!(nqueries), - is_target: expect_some!(decoy_marking).is_target(), - main_score: expect_some!(main_score), - delta_next: expect_some!(delta_next), - delta_second_next: expect_some!(delta_second_next), - delta_theo_rt, - sq_delta_theo_rt, - obs_rt_seconds, - obs_mobility: expect_some!(observed_mobility), - delta_ms1_ms2_mobility, - sq_delta_ms1_ms2_mobility, - npeaks: expect_some!(npeaks), - raising_cycles: expect_some!(raising_cycles), - falling_cycles: expect_some!(falling_cycles), - - apex_lazyerscore: expect_some!(apex_lazyerscore), - ms2_summed_transition_intensity: expect_some!(ms2_summed_transition_intensity), - ms2_lazyerscore: expect_some!(ms2_lazyerscore), - ms2_isotope_lazyerscore: expect_some!(ms2_isotope_lazyerscore), - ms2_isotope_lazyerscore_ratio: expect_some!(ms2_isotope_lazyerscore_ratio), - lazyscore_z: expect_some!(lazyscore_z), - lazyscore_vs_baseline: expect_some!(lazyscore_vs_baseline), - - split_product_score: expect_some!(split_product_score), - cosine_au_score: expect_some!(cosine_au_score), - scribe_au_score: expect_some!(scribe_au_score), - coelution_gradient_cosine: expect_some!(coelution_gradient_cosine), - coelution_gradient_scribe: expect_some!(coelution_gradient_scribe), - cosine_weighted_coelution: expect_some!(cosine_weighted_coelution), - cosine_gradient_consistency: expect_some!(cosine_gradient_consistency), - scribe_weighted_coelution: expect_some!(scribe_weighted_coelution), - scribe_gradient_consistency: expect_some!(scribe_gradient_consistency), - peak_shape: expect_some!(peak_shape), - ratio_cv: expect_some!(ratio_cv), - centered_apex: expect_some!(centered_apex), - precursor_coelution: expect_some!(precursor_coelution), - fragment_coverage: expect_some!(fragment_coverage), - precursor_apex_match: expect_some!(precursor_apex_match), - xic_quality: expect_some!(xic_quality), - fragment_apex_agreement: expect_some!(fragment_apex_agreement), - isotope_correlation: expect_some!(isotope_correlation), - gaussian_correlation: expect_some!(gaussian_correlation), - per_frag_gaussian_corr: expect_some!(per_frag_gaussian_corr), - - ms2_mz_error_0: mz2_e0, - ms2_mz_error_1: mz2_e1, - ms2_mz_error_2: mz2_e2, - ms2_mz_error_3: mz2_e3, - ms2_mz_error_4: mz2_e4, - ms2_mz_error_5: mz2_e5, - ms2_mz_error_6: mz2_e6, - ms2_mobility_error_0: mob2_e0, - ms2_mobility_error_1: mob2_e1, - ms2_mobility_error_2: mob2_e2, - ms2_mobility_error_3: mob2_e3, - ms2_mobility_error_4: mob2_e4, - ms2_mobility_error_5: mob2_e5, - ms2_mobility_error_6: mob2_e6, - - ms1_summed_precursor_intensity: expect_some!(ms1_summed_precursor_intensity), - ms1_mz_error_0: mz1_e0, - ms1_mz_error_1: mz1_e1, - ms1_mz_error_2: mz1_e2, - - ms1_mobility_error_0: mob1_e0, - ms1_mobility_error_1: mob1_e1, - ms1_mobility_error_2: mob1_e2, - - ms1_inten_ratio_0: int1_e0, - ms1_inten_ratio_1: int1_e1, - ms1_inten_ratio_2: int1_e2, - - ms2_inten_ratio_0: int2_e0, - ms2_inten_ratio_1: int2_e1, - ms2_inten_ratio_2: int2_e2, - ms2_inten_ratio_3: int2_e3, - ms2_inten_ratio_4: int2_e4, - ms2_inten_ratio_5: int2_e5, - ms2_inten_ratio_6: int2_e6, - - discriminant_score: f32::NAN, - qvalue: f32::NAN, - delta_group: f32::NAN, - delta_group_ratio: f32::NAN, - recalibrated_query_rt: ref_rt, - calibrated_sq_delta_theo_rt: sq_delta_theo_rt, - }; - - Ok(results) - } -} - -/// Contains the results as they will be serialized to -/// parquet. -/// -/// Napkin math ... as of Sept 23/2025 this struct is 265 bytes -/// Eyeballing a human proteome without mods is 1.8M peptides -/// So ... 500MB-ish / proteome in memory. -#[derive(Debug, Clone, Serialize, ParquetRecordWriter)] -pub struct IonSearchResults { - pub sequence: String, - pub library_id: u32, - pub decoy_group_id: u32, - pub precursor_mz: f64, - pub precursor_charge: u8, - pub precursor_mobility_query: f32, - pub precursor_rt_query_seconds: f32, - pub recalibrated_query_rt: f32, - pub nqueries: u8, - pub is_target: bool, - - // Combined - pub main_score: f32, - pub delta_next: f32, - pub delta_second_next: f32, - pub obs_rt_seconds: f32, - pub obs_mobility: f32, - pub delta_theo_rt: f32, - pub sq_delta_theo_rt: f32, - pub calibrated_sq_delta_theo_rt: f32, - pub delta_ms1_ms2_mobility: f32, - // ms1_ms2_correlation: f32, - pub sq_delta_ms1_ms2_mobility: f32, - pub raising_cycles: u8, - pub falling_cycles: u8, - pub delta_group: f32, - pub delta_group_ratio: f32, - - // MS2 - pub npeaks: u8, - pub apex_lazyerscore: f32, - pub ms2_summed_transition_intensity: f32, - pub ms2_lazyerscore: f32, - pub ms2_isotope_lazyerscore: f32, - pub ms2_isotope_lazyerscore_ratio: f32, - pub lazyscore_z: f32, - pub lazyscore_vs_baseline: f32, - - // Split product & apex features - pub split_product_score: f32, - pub cosine_au_score: f32, - pub scribe_au_score: f32, - pub coelution_gradient_cosine: f32, - pub coelution_gradient_scribe: f32, - pub cosine_weighted_coelution: f32, - pub cosine_gradient_consistency: f32, - pub scribe_weighted_coelution: f32, - pub scribe_gradient_consistency: f32, - pub peak_shape: f32, - pub ratio_cv: f32, - pub centered_apex: f32, - pub precursor_coelution: f32, - pub fragment_coverage: f32, - pub precursor_apex_match: f32, - pub xic_quality: f32, - pub fragment_apex_agreement: f32, - pub isotope_correlation: f32, - pub gaussian_correlation: f32, - pub per_frag_gaussian_corr: f32, - - // MS2 - Split - pub ms2_mz_error_0: f32, - pub ms2_mz_error_1: f32, - pub ms2_mz_error_2: f32, - pub ms2_mz_error_3: f32, - pub ms2_mz_error_4: f32, - pub ms2_mz_error_5: f32, - pub ms2_mz_error_6: f32, - pub ms2_mobility_error_0: f32, - pub ms2_mobility_error_1: f32, - pub ms2_mobility_error_2: f32, - pub ms2_mobility_error_3: f32, - pub ms2_mobility_error_4: f32, - pub ms2_mobility_error_5: f32, - pub ms2_mobility_error_6: f32, - - // MS1 - pub ms1_summed_precursor_intensity: f32, - - // MS1 Split - pub ms1_mz_error_0: f32, - pub ms1_mz_error_1: f32, - pub ms1_mz_error_2: f32, - pub ms1_mobility_error_0: f32, - pub ms1_mobility_error_1: f32, - pub ms1_mobility_error_2: f32, - - // Relative Intensities - pub ms1_inten_ratio_0: f32, - pub ms1_inten_ratio_1: f32, - pub ms1_inten_ratio_2: f32, - - pub ms2_inten_ratio_0: f32, - pub ms2_inten_ratio_1: f32, - pub ms2_inten_ratio_2: f32, - pub ms2_inten_ratio_3: f32, - pub ms2_inten_ratio_4: f32, - pub ms2_inten_ratio_5: f32, - pub ms2_inten_ratio_6: f32, - - pub discriminant_score: f32, - pub qvalue: f32, -} - -pub struct ResultParquetWriter { - row_group_size: usize, - writer: SerializedFileWriter, - buffer: Vec, -} - -impl ResultParquetWriter { - pub fn new(out_path: impl AsRef, row_group_size: usize) -> Result { - let file = match File::create_new(out_path.as_ref()) { - Ok(file) => file, - Err(err) => { - tracing::error!( - "Failed to open file {:?} with error: {}", - out_path.as_ref(), - err - ); - return Err(err); - } - }; - let results: &[IonSearchResults] = &[]; - let schema = results.schema().unwrap(); - let writer = SerializedFileWriter::new(file, schema, Default::default()).unwrap(); - Ok(Self { - buffer: Vec::with_capacity(row_group_size), - writer, - row_group_size, - }) - } - - fn flush_to_file(&mut self) { - debug!("Flushing {} results to file", self.buffer.len()); - let mut row_group = self.writer.next_row_group().unwrap(); - self.buffer - .as_slice() - .write_to_row_group(&mut row_group) - .unwrap(); - row_group.close().unwrap(); - self.buffer.clear(); - } - - pub fn add(&mut self, result: IonSearchResults) { - self.buffer.push(result); - if self.buffer.len() >= self.row_group_size { - self.flush_to_file(); - } - } - - pub fn close(mut self) { - // TODO: add some logging ... - if !self.buffer.is_empty() { - self.flush_to_file(); - } - self.writer.close().unwrap(); - } -} - -pub fn write_results_to_parquet + Clone>( - results: &[IonSearchResults], - out_path: P, -) -> std::result::Result<(), Box> { - // TODO: Implement multi chunk accumulator - let file = match File::create_new(out_path.clone()) { - Ok(file) => file, - Err(err) => { - tracing::error!( - "Failed to open file {:?} with error: {}", - out_path.as_ref(), - err - ); - return Err(Box::new(err)); - } - }; - let schema = results.schema().unwrap(); - let mut writer = SerializedFileWriter::new(file, schema, Default::default()).unwrap(); - let mut row_group = writer.next_row_group().unwrap(); - results.write_to_row_group(&mut row_group).unwrap(); - row_group.close().unwrap(); - writer.close().unwrap(); - - Ok(()) -} From 38479d1c783e90b96e32f0d7524339e3ce0521eb Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Thu, 9 Apr 2026 20:59:08 -0700 Subject: [PATCH 10/64] refactor: apply all field and type renames (spec Section 4) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - ScoreTraces → ElutionTraces; fields main_score → apex_profile, ms2_cosine_ref_sim → cosine_trace - ScoringContext → Extraction; field query_values → chromatograms - ApexLocation/ApexScore::raising_cycles → rising_cycles - PeptideMetadata::ref_rt_seconds → query_rt_seconds - build_candidate_context → build_broad_extraction in pipeline.rs - build_calibrated_context → build_calibrated_extraction in pipeline.rs - main_loop → execute_pipeline in processing.rs - process_speclib → run_pipeline in processing.rs / main.rs - Move SCRIBE_FLOOR from scribe.rs to apex_features.rs; delete scribe.rs - Update iter_scores() string literals: "main_score" → "apex_profile", "ms2_cosine_ref_sim" → "cosine_trace" - Update viewer files (computed_state.rs, plot_renderer.rs) for all renames --- rust/timsquery_viewer/src/computed_state.rs | 16 +-- rust/timsquery_viewer/src/plot_renderer.rs | 10 +- rust/timsseek/src/scoring/apex_finding.rs | 108 +++++++++--------- rust/timsseek/src/scoring/full_results.rs | 4 +- rust/timsseek/src/scoring/pipeline.rs | 40 +++---- rust/timsseek/src/scoring/results.rs | 10 +- .../src/scoring/scores/apex_features.rs | 3 + rust/timsseek/src/scoring/scores/mod.rs | 1 - rust/timsseek/src/scoring/scores/scribe.rs | 2 - rust/timsseek_cli/src/main.rs | 2 +- rust/timsseek_cli/src/processing.rs | 6 +- 11 files changed, 101 insertions(+), 101 deletions(-) delete mode 100644 rust/timsseek/src/scoring/scores/scribe.rs diff --git a/rust/timsquery_viewer/src/computed_state.rs b/rust/timsquery_viewer/src/computed_state.rs index eb653ff..7e442db 100644 --- a/rust/timsquery_viewer/src/computed_state.rs +++ b/rust/timsquery_viewer/src/computed_state.rs @@ -37,7 +37,7 @@ use timsquery::serde::IndexedPeaksHandle; use timsseek::scoring::apex_finding::{ ApexFinder, ApexScore, - ScoringContext, + Extraction, }; /// Result bundle from background chromatogram computation @@ -318,10 +318,10 @@ impl ComputedState { Ok(output) } - #[instrument(skip_all, fields(eg_id = %context.query_values.eg.id()))] + #[instrument(skip_all, fields(eg_id = %context.chromatograms.eg.id()))] fn find_apex( apex_finder: &mut ApexFinder, - context: &ScoringContext, + context: &Extraction, index: &IndexedPeaksHandle, ) -> Result { apex_finder.find_apex(context, &|idx| { @@ -333,11 +333,11 @@ impl ComputedState { let user_msg = match &x { DataProcessingError::ExpectedNonEmptyData { context: err_context } => { tracing::warn!( - "{:#?}", context.query_values.eg, + "{:#?}", context.chromatograms.eg, ); tracing::warn!( "Apex finding failed for elution group {}: No valid data found in context {:?}", - context.query_values.eg.id(), + context.chromatograms.eg.id(), err_context ); "No data found with the current tolerances. \ @@ -348,7 +348,7 @@ impl ComputedState { _ => { tracing::error!( "Apex finding failed for elution group {}: {:?}", - context.query_values.eg.id(), + context.chromatograms.eg.id(), x ); format!("Apex finding failed: {:?}", x) @@ -421,9 +421,9 @@ impl ComputedState { .apex_finder .get_or_insert_with(|| ApexFinder::new(num_cycles)); - let scoring_ctx = ScoringContext { + let scoring_ctx = Extraction { expected_intensities: expected_intensities.clone(), - query_values: collector.clone(), + chromatograms: collector.clone(), }; let apex_score = match Self::find_apex(apex_finder, &scoring_ctx, index) { diff --git a/rust/timsquery_viewer/src/plot_renderer.rs b/rust/timsquery_viewer/src/plot_renderer.rs index 129670a..ba711d1 100644 --- a/rust/timsquery_viewer/src/plot_renderer.rs +++ b/rust/timsquery_viewer/src/plot_renderer.rs @@ -15,7 +15,7 @@ use timscentroid::rt_mapping::{ }; use timsseek::scoring::apex_finding::{ ApexScore, - ScoreTraces, + ElutionTraces, }; use crate::chromatogram_processor::ChromatogramOutput; @@ -70,7 +70,7 @@ impl ScoreLines { #[instrument(skip_all)] pub(crate) fn from_scores( apex: ApexScore, - scores: &ScoreTraces, + scores: &ElutionTraces, mapper: &CycleToRTMapping, cycle_offset: usize, ) -> Self { @@ -81,7 +81,7 @@ impl ScoreLines { debug!("Max score for {}: {}", name, max_val); let norm_factor = max_val.max(1e-6); let inv_norm_factor = (1.0 / norm_factor) as f64; - let inv_norm_factor = if name == "main_score" { + let inv_norm_factor = if name == "apex_profile" { debug!("Main score trace length: {}", trace.len()); 1.0 } else { @@ -121,8 +121,8 @@ impl ScoreLines { } let main_score_line = lines - .pop_if(|x| x.name == "main_score") - .expect("There should be a main_score line"); + .pop_if(|x| x.name == "apex_profile") + .expect("There should be an apex_profile line"); let rt_seconds_range = ( lines diff --git a/rust/timsseek/src/scoring/apex_finding.rs b/rust/timsseek/src/scoring/apex_finding.rs index 09b1974..674530b 100644 --- a/rust/timsseek/src/scoring/apex_finding.rs +++ b/rust/timsseek/src/scoring/apex_finding.rs @@ -17,7 +17,7 @@ //! digest: digest_slice, //! charge: 2, //! expected_intensities: expected, -//! query_values: chromatogram_collector, +//! chromatograms: chromatogram_collector, //! }; //! //! // 3. Score (reusing the finder's internal buffers) @@ -39,13 +39,13 @@ use crate::models::{ }; use crate::scoring::scores::apex_features::{ ApexFeatures, + SCRIBE_FLOOR, SplitProductScore, compute_apex_features, compute_split_product, compute_weighted_score, find_joint_apex, }; -use crate::scoring::scores::scribe::SCRIBE_FLOOR; use crate::utils::top_n_array::TopNArray; use serde::Serialize; use timsquery::models::aggregators::ChromatogramCollector; @@ -90,7 +90,7 @@ pub struct PeptideMetadata { pub library_id: u32, /// Reference retention time (seconds) from library. - pub ref_rt_seconds: f32, + pub query_rt_seconds: f32, /// Reference ion mobility (ook0) from library. pub ref_mobility_ook0: f32, @@ -104,12 +104,12 @@ pub struct PeptideMetadata { /// This contains only the data needed for scoring calculations, /// separated from metadata for clarity and efficiency. #[derive(Debug)] -pub struct ScoringContext { +pub struct Extraction { /// The expected theoretical intensities of precursor and fragment ions. pub expected_intensities: ExpectedIntensities, /// The observed chromatogram data collected from the instrument. - pub query_values: ChromatogramCollector, + pub chromatograms: ChromatogramCollector, } /// Lightweight result from Phase 1 apex finding. @@ -123,7 +123,7 @@ pub struct ApexLocation { /// Local cycle index of the apex within the extraction. pub apex_cycle: usize, /// Peak shape metrics for baseline computation. - pub raising_cycles: u8, + pub rising_cycles: u8, pub falling_cycles: u8, } @@ -154,15 +154,15 @@ pub struct ApexScore { pub npeaks: u8, pub ms2_summed_intensity: f32, pub ms1_summed_intensity: f32, - pub raising_cycles: u8, + pub rising_cycles: u8, pub falling_cycles: u8, } /// Stores time-resolved scores for every cycle in the chromatogram. #[derive(Debug, Clone, Serialize)] -pub struct ScoreTraces { +pub struct ElutionTraces { /// Per-cycle cosine similarity (sqrt-transformed expected). - pub ms2_cosine_ref_sim: Vec, + pub cosine_trace: Vec, /// Per-cycle lazyscore (kept for baseline lambda computation). pub ms2_lazyscore: Vec, /// Per-cycle Scribe score. @@ -172,48 +172,48 @@ pub struct ScoreTraces { /// Per-cycle summed precursor intensity (keys >= 0 only). pub ms1_precursor_trace: Vec, /// Composite apex profile for peak picking. - pub main_score: Vec, + pub apex_profile: Vec, } -impl ScoreTraces { +impl ElutionTraces { pub fn new_with_capacity(capacity: usize) -> Self { Self { - ms2_cosine_ref_sim: Vec::with_capacity(capacity), + cosine_trace: Vec::with_capacity(capacity), ms2_lazyscore: Vec::with_capacity(capacity), ms2_scribe: Vec::with_capacity(capacity), ms2_log_intensity: Vec::with_capacity(capacity), ms1_precursor_trace: Vec::with_capacity(capacity), - main_score: Vec::with_capacity(capacity), + apex_profile: Vec::with_capacity(capacity), } } pub fn clear(&mut self) { - self.ms2_cosine_ref_sim.clear(); + self.cosine_trace.clear(); self.ms2_lazyscore.clear(); self.ms2_scribe.clear(); self.ms2_log_intensity.clear(); self.ms1_precursor_trace.clear(); - self.main_score.clear(); + self.apex_profile.clear(); } /// Resize all buffers to the specified length (filling with 0.0). pub fn resize(&mut self, len: usize) { - self.ms2_cosine_ref_sim.resize(len, 0.0); + self.cosine_trace.resize(len, 0.0); self.ms2_lazyscore.resize(len, 0.0); self.ms2_scribe.resize(len, 0.0); self.ms2_log_intensity.resize(len, 0.0); self.ms1_precursor_trace.resize(len, 0.0); - self.main_score.resize(len, 0.0); + self.apex_profile.resize(len, 0.0); } pub fn iter_scores(&self) -> impl Iterator + '_ { vec![ - ("ms2_cosine_ref_sim", &self.ms2_cosine_ref_sim[..]), + ("cosine_trace", &self.cosine_trace[..]), ("ms2_lazyscore", &self.ms2_lazyscore[..]), ("ms2_scribe", &self.ms2_scribe[..]), ("ms2_log_intensity", &self.ms2_log_intensity[..]), ("ms1_precursor_trace", &self.ms1_precursor_trace[..]), - ("main_score", &self.main_score[..]), + ("apex_profile", &self.apex_profile[..]), ] .into_iter() } @@ -222,7 +222,7 @@ impl ScoreTraces { /// The core engine for finding peptide apexes. #[derive(Debug)] pub struct ApexFinder { - pub traces: ScoreTraces, + pub traces: ElutionTraces, buffers: ApexFinderBuffers, } @@ -266,7 +266,7 @@ impl ApexFinderBuffers { impl ApexFinder { pub fn new(capacity: usize) -> Self { Self { - traces: ScoreTraces::new_with_capacity(capacity), + traces: ElutionTraces::new_with_capacity(capacity), buffers: ApexFinderBuffers::new(capacity), } } @@ -274,11 +274,11 @@ impl ApexFinder { /// Build cosine and scribe profiles from traces. /// cosine_profile[i] = cos^3 * intensity, scribe_profile[i] = scribe * intensity. fn build_profiles(&self) -> (Vec, Vec) { - let n = self.traces.ms2_cosine_ref_sim.len(); + let n = self.traces.cosine_trace.len(); let mut cosine_profile = Vec::with_capacity(n); let mut scribe_profile = Vec::with_capacity(n); for i in 0..n { - let cos = self.traces.ms2_cosine_ref_sim[i]; + let cos = self.traces.cosine_trace[i]; let intensity = self.traces.ms2_log_intensity[i]; cosine_profile.push(cos * cos * cos * intensity); scribe_profile.push(self.traces.ms2_scribe[i] * intensity); @@ -296,10 +296,10 @@ impl ApexFinder { )] pub fn find_apex_location( &mut self, - scoring_ctx: &ScoringContext, + scoring_ctx: &Extraction, rt_mapper: &dyn Fn(usize) -> u32, ) -> Result { - let collector = &scoring_ctx.query_values; + let collector = &scoring_ctx.chromatograms; let n_cycles = collector.num_cycles(); self.traces.clear(); @@ -311,7 +311,7 @@ impl ApexFinder { self.compute_main_score_trace(); // Peak-pick on apex profile - let peak_picker = PeakPicker::new(&self.traces.main_score); + let peak_picker = PeakPicker::new(&self.traces.apex_profile); let (max_val, max_loc) = match peak_picker.next_peak() { Some(p) => p, None => { @@ -326,8 +326,8 @@ impl ApexFinder { }); } - let (raising_cycles, falling_cycles) = self.calculate_rise_and_fall_cycles(max_loc); - let cycle_offset = scoring_ctx.query_values.cycle_offset(); + let (rising_cycles, falling_cycles) = self.calculate_rise_and_fall_cycles(max_loc); + let cycle_offset = scoring_ctx.chromatograms.cycle_offset(); let retention_time_ms = rt_mapper(max_loc + cycle_offset); // Compute split product score for calibrant ranking @@ -336,7 +336,7 @@ impl ApexFinder { let split_product = compute_split_product( &cosine_profile, &scribe_profile, - &scoring_ctx.query_values.fragments, + &scoring_ctx.chromatograms.fragments, &scoring_ctx.expected_intensities.fragment_intensities, ); @@ -344,7 +344,7 @@ impl ApexFinder { score: split_product.base_score, retention_time_ms, apex_cycle: max_loc, - raising_cycles, + rising_cycles, falling_cycles, }) } @@ -358,10 +358,10 @@ impl ApexFinder { )] pub fn find_apex( &mut self, - scoring_ctx: &ScoringContext, + scoring_ctx: &Extraction, rt_mapper: &dyn Fn(usize) -> u32, ) -> Result { - let collector = &scoring_ctx.query_values; + let collector = &scoring_ctx.chromatograms; let n_cycles = collector.num_cycles(); // 1. Reset buffers @@ -388,9 +388,9 @@ impl ApexFinder { )] fn compute_pass_1( &mut self, - scoring_ctx: &ScoringContext, + scoring_ctx: &Extraction, ) -> Result<(), DataProcessingError> { - let collector = &scoring_ctx.query_values; + let collector = &scoring_ctx.chromatograms; // --- MS2 (Fragments) --- let ms2_dot_prod = &mut self.buffers.temp_ms2_dot_prod; @@ -439,7 +439,7 @@ impl ApexFinder { // Finalize cosine, lazyscore, log-intensity let norm_sqrt_exp = ms2_sum_exp.sqrt(); // ||sqrt(exp)|| = sqrt(sum(exp)) - let n = self.traces.ms2_cosine_ref_sim.len(); + let n = self.traces.cosine_trace.len(); for i in 0..n { // Lazyscore self.traces.ms2_lazyscore[i] = @@ -448,10 +448,10 @@ impl ApexFinder { // Cosine (sqrt-transformed expected) let obs_norm = ms2_norm_sq_obs[i].sqrt(); if obs_norm > 0.0 && norm_sqrt_exp > 0.0 { - self.traces.ms2_cosine_ref_sim[i] = + self.traces.cosine_trace[i] = (ms2_dot_prod[i] / (obs_norm * norm_sqrt_exp)).clamp(1e-3, 1.0); } else { - self.traces.ms2_cosine_ref_sim[i] = 1e-3; + self.traces.cosine_trace[i] = 1e-3; } // Log-intensity @@ -524,9 +524,9 @@ impl ApexFinder { tracing::instrument(skip_all, level = "trace") )] fn compute_main_score_trace(&mut self) { - let len = self.traces.ms2_cosine_ref_sim.len(); - self.traces.main_score.clear(); - self.traces.main_score.reserve(len); + let len = self.traces.cosine_trace.len(); + self.traces.apex_profile.clear(); + self.traces.apex_profile.reserve(len); // Compute S(t) = scribe(t) * I(t), find min/max for normalization let mut s_min = f32::INFINITY; @@ -542,7 +542,7 @@ impl ApexFinder { let s_range = s_max - s_min; for i in 0..len { - let cos = self.traces.ms2_cosine_ref_sim[i]; + let cos = self.traces.cosine_trace[i]; let intensity = self.traces.ms2_log_intensity[i]; let c = cos * cos * cos * intensity; // cos^3 * I @@ -553,16 +553,16 @@ impl ApexFinder { 0.5 // Degrade to cosine-only when scribe is constant }; - self.traces.main_score.push(c * (0.5 + s_norm)); + self.traces.apex_profile.push(c * (0.5 + s_norm)); } } fn extract_apex_score( &self, - scoring_ctx: &ScoringContext, + scoring_ctx: &Extraction, rt_mapper: &dyn Fn(usize) -> u32, ) -> Result { - let mut peak_picker = PeakPicker::new(&self.traces.main_score); + let mut peak_picker = PeakPicker::new(&self.traces.apex_profile); // Find best peak let (max_val, max_loc) = match peak_picker.next_peak() { @@ -581,10 +581,10 @@ impl ApexFinder { } // Peak shape (rise/fall) for delta computation - let (raising_cycles, falling_cycles) = self.calculate_rise_and_fall_cycles(max_loc); + let (rising_cycles, falling_cycles) = self.calculate_rise_and_fall_cycles(max_loc); // Mask and find next peaks for delta scores - peak_picker.mask_peak(max_loc, raising_cycles as usize, falling_cycles as usize, 2); + peak_picker.mask_peak(max_loc, rising_cycles as usize, falling_cycles as usize, 2); let (next_val, next_loc) = peak_picker.next_peak().unwrap_or((0.0, max_loc)); let (next_raise, next_fall) = self.calculate_rise_and_fall_cycles(next_loc); peak_picker.mask_peak(next_loc, next_raise as usize, next_fall as usize, 1); @@ -600,7 +600,7 @@ impl ApexFinder { let split_product = compute_split_product( &cosine_profile, &scribe_profile, - &scoring_ctx.query_values.fragments, + &scoring_ctx.chromatograms.fragments, &scoring_ctx.expected_intensities.fragment_intensities, ); @@ -610,8 +610,8 @@ impl ApexFinder { // 11 features at joint apex let n_cycles = cosine_profile.len(); let features = compute_apex_features( - &scoring_ctx.query_values.fragments, - &scoring_ctx.query_values.precursors, + &scoring_ctx.chromatograms.fragments, + &scoring_ctx.chromatograms.precursors, &scoring_ctx.expected_intensities, &cosine_profile, &self.traces.ms1_precursor_trace, @@ -623,18 +623,18 @@ impl ApexFinder { let score = compute_weighted_score(split_product.base_score, &features); // RT at joint apex - let cycle_offset = scoring_ctx.query_values.cycle_offset(); + let cycle_offset = scoring_ctx.chromatograms.cycle_offset(); let global_loc = joint_apex + cycle_offset; let retention_time_ms = rt_mapper(global_loc); // Intensity counts at joint apex let (ms1_summed_intensity, _) = - self.sum_intensities_at(&scoring_ctx.query_values.precursors, joint_apex); + self.sum_intensities_at(&scoring_ctx.chromatograms.precursors, joint_apex); let (ms2_summed_intensity, ms2_npeaks) = - self.sum_intensities_at(&scoring_ctx.query_values.fragments, joint_apex); + self.sum_intensities_at(&scoring_ctx.chromatograms.fragments, joint_apex); // Lazyscore baseline stats - let lambda = self.calculate_baseline_lambda(max_loc, raising_cycles, falling_cycles); + let lambda = self.calculate_baseline_lambda(max_loc, rising_cycles, falling_cycles); let k = self.traces.ms2_lazyscore[joint_apex] as f64; let norm_lazy_std = lambda.sqrt().max(1.0) as f32; let lazyscore_z = self.traces.ms2_lazyscore[joint_apex] / norm_lazy_std; @@ -659,7 +659,7 @@ impl ApexFinder { npeaks: ms2_npeaks as u8, ms2_summed_intensity, ms1_summed_intensity, - raising_cycles, + rising_cycles, falling_cycles, }) } diff --git a/rust/timsseek/src/scoring/full_results.rs b/rust/timsseek/src/scoring/full_results.rs index 1b3974b..4d3c8f5 100644 --- a/rust/timsseek/src/scoring/full_results.rs +++ b/rust/timsseek/src/scoring/full_results.rs @@ -1,12 +1,12 @@ use crate::IonAnnot; -use crate::scoring::apex_finding::ScoreTraces; +use crate::scoring::apex_finding::ElutionTraces; use crate::scoring::results::ScoredCandidate; use serde::Serialize; use timsquery::models::aggregators::ChromatogramCollector; #[derive(Debug, Clone, Serialize)] pub struct FullQueryResult { - pub main_score_elements: ScoreTraces, + pub main_score_elements: ElutionTraces, pub longitudinal_main_score: Vec, pub extractions: ChromatogramCollector, pub search_results: ScoredCandidate, diff --git a/rust/timsseek/src/scoring/pipeline.rs b/rust/timsseek/src/scoring/pipeline.rs index bdc217d..30883d7 100644 --- a/rust/timsseek/src/scoring/pipeline.rs +++ b/rust/timsseek/src/scoring/pipeline.rs @@ -326,13 +326,13 @@ impl Scorer { feature = "instrumentation", tracing::instrument(skip_all, level = "trace") )] - fn build_candidate_context( + fn build_broad_extraction( &self, item: &QueryItemToScore, ) -> Result< ( super::apex_finding::PeptideMetadata, - super::apex_finding::ScoringContext, + super::apex_finding::Extraction, ), SkippingReason, > { @@ -352,7 +352,7 @@ impl Scorer { } let mut agg = tracing::span!( tracing::Level::TRACE, - "build_candidate_context::new_collector" + "build_broad_extraction::new_collector" ).in_scope(|| { match ChromatogramCollector::new( item.query.clone(), @@ -370,7 +370,7 @@ impl Scorer { } }); - tracing::span!(tracing::Level::TRACE, "build_candidate_context::add_query").in_scope( + tracing::span!(tracing::Level::TRACE, "build_broad_extraction::add_query").in_scope( || { self.index.add_query(&mut agg, &self.broad_tolerance); }, @@ -387,14 +387,14 @@ impl Scorer { digest: item.digest.clone(), charge: item.query.precursor_charge(), library_id: agg.eg.id() as u32, - ref_rt_seconds: item.query.rt_seconds(), + query_rt_seconds: item.query.rt_seconds(), ref_mobility_ook0: item.query.mobility_ook0(), ref_precursor_mz: item.query.mono_precursor_mz(), }; - let scoring_ctx = super::apex_finding::ScoringContext { + let scoring_ctx = super::apex_finding::Extraction { expected_intensities, - query_values: agg, + chromatograms: agg, }; Ok((metadata, scoring_ctx)) @@ -528,7 +528,7 @@ impl Scorer { // Re-implementing logic here because process_query consumes `item` and returns `Option`. // We want intermediate results for `FullQueryResult`. - let (metadata, scoring_ctx) = self.build_candidate_context(&item).map_err(|_| { + let (metadata, scoring_ctx) = self.build_broad_extraction(&item).map_err(|_| { DataProcessingError::ExpectedNonEmptyData { context: Some("RT out of bounds".into()), } @@ -537,7 +537,7 @@ impl Scorer { let apex_score = buffer.find_apex(&scoring_ctx, &|idx| self.map_rt_index_to_milis(idx))?; let (inner_collector, isotope_collector) = self.execute_secondary_query(&item, &apex_score); - let nqueries = scoring_ctx.query_values.fragments.num_ions() as u8; + let nqueries = scoring_ctx.chromatograms.fragments.num_ions() as u8; let search_results = self.finalize_results( &metadata, nqueries, @@ -546,12 +546,12 @@ impl Scorer { &isotope_collector, )?; - // Extract query_values before it's consumed - let extractions = scoring_ctx.query_values; + // Extract chromatograms before it's consumed + let extractions = scoring_ctx.chromatograms; Ok(FullQueryResult { main_score_elements: buffer.traces.clone(), - longitudinal_main_score: buffer.traces.main_score.clone(), + longitudinal_main_score: buffer.traces.apex_profile.clone(), extractions, search_results, }) @@ -563,14 +563,14 @@ impl Scorer { feature = "instrumentation", tracing::instrument(skip_all, level = "trace") )] - fn build_calibrated_context( + fn build_calibrated_extraction( &self, item: &QueryItemToScore, calibration: &CalibrationResult, ) -> Result< ( super::apex_finding::PeptideMetadata, - super::apex_finding::ScoringContext, + super::apex_finding::Extraction, ), SkippingReason, > { @@ -613,14 +613,14 @@ impl Scorer { digest: item.digest.clone(), charge: item.query.precursor_charge(), library_id: agg.eg.id() as u32, - ref_rt_seconds: calibrated_rt, + query_rt_seconds: calibrated_rt, ref_mobility_ook0: item.query.mobility_ook0(), ref_precursor_mz: item.query.mono_precursor_mz(), }; - let scoring_ctx = super::apex_finding::ScoringContext { + let scoring_ctx = super::apex_finding::Extraction { expected_intensities, - query_values: agg, + chromatograms: agg, }; Ok((metadata, scoring_ctx)) @@ -642,7 +642,7 @@ impl Scorer { let st = Instant::now(); let (metadata, scoring_ctx) = tracing::span!(tracing::Level::TRACE, "score_calibrated::extraction").in_scope( - || match self.build_calibrated_context(item, calibration) { + || match self.build_calibrated_extraction(item, calibration) { Ok(result) => Some(result), Err(_) => None, }, @@ -674,7 +674,7 @@ impl Scorer { .in_scope(|| self.execute_secondary_query(item, &apex_score)); timings.secondary_query += st.elapsed(); - let nqueries = scoring_ctx.query_values.fragments.num_ions() as u8; + let nqueries = scoring_ctx.chromatograms.fragments.num_ions() as u8; let st = Instant::now(); let out = tracing::span!(tracing::Level::TRACE, "score_calibrated::finalize").in_scope( || { @@ -759,7 +759,7 @@ impl Scorer { buffer: &mut ApexFinder, ) -> Option<(ApexLocation, PeptideMetadata)> { let (metadata, scoring_ctx) = tracing::span!(tracing::Level::TRACE, "prescore::extraction") - .in_scope(|| match self.build_candidate_context(item) { + .in_scope(|| match self.build_broad_extraction(item) { Ok(result) => Some(result), Err(SkippingReason::RetentionTimeOutOfBounds) => None, })?; diff --git a/rust/timsseek/src/scoring/results.rs b/rust/timsseek/src/scoring/results.rs index 215e97f..22c0d96 100644 --- a/rust/timsseek/src/scoring/results.rs +++ b/rust/timsseek/src/scoring/results.rs @@ -203,7 +203,7 @@ pub struct ScoredCandidateBuilder { is_target: SetField, // --- Reference RT / mobility (used to compute deltas) --- - ref_rt_seconds: SetField, + query_rt_seconds: SetField, // --- Observed RT / mobility --- obs_rt_seconds: SetField, @@ -279,7 +279,7 @@ impl ScoredCandidateBuilder { self.precursor_charge = SetField::Some(metadata.charge); self.precursor_mz = SetField::Some(metadata.ref_precursor_mz); self.precursor_mobility = SetField::Some(metadata.ref_mobility_ook0); - self.ref_rt_seconds = SetField::Some(metadata.ref_rt_seconds); + self.query_rt_seconds = SetField::Some(metadata.query_rt_seconds); self } @@ -339,7 +339,7 @@ impl ScoredCandidateBuilder { npeaks, ms2_summed_intensity, ms1_summed_intensity, - raising_cycles, + rising_cycles, falling_cycles, } = *main_score; @@ -380,7 +380,7 @@ impl ScoredCandidateBuilder { self.npeaks = SetField::Some(npeaks); self.ms1_summed_intensity = SetField::Some(ms1_summed_intensity); self.ms2_summed_intensity = SetField::Some(ms2_summed_intensity); - self.rising_cycles = SetField::Some(raising_cycles); + self.rising_cycles = SetField::Some(rising_cycles); self.falling_cycles = SetField::Some(falling_cycles); self @@ -395,7 +395,7 @@ impl ScoredCandidateBuilder { } let obs_rt_seconds = expect_some!(obs_rt_seconds); - let ref_rt = expect_some!(ref_rt_seconds); + let ref_rt = expect_some!(query_rt_seconds); let delta_rt = obs_rt_seconds - ref_rt; let sq_delta_rt = delta_rt * delta_rt; diff --git a/rust/timsseek/src/scoring/scores/apex_features.rs b/rust/timsseek/src/scoring/scores/apex_features.rs index e83ec77..90587a5 100644 --- a/rust/timsseek/src/scoring/scores/apex_features.rs +++ b/rust/timsseek/src/scoring/scores/apex_features.rs @@ -8,6 +8,9 @@ use std::collections::HashMap; use timsquery::models::MzMajorIntensityArray; use timsquery::traits::KeyLike; +/// Floor value for Scribe score when no signal is observed. +pub const SCRIBE_FLOOR: f32 = -100.0; + // --------------------------------------------------------------------------- // Structs // --------------------------------------------------------------------------- diff --git a/rust/timsseek/src/scoring/scores/mod.rs b/rust/timsseek/src/scoring/scores/mod.rs index 1ac4801..d749579 100644 --- a/rust/timsseek/src/scoring/scores/mod.rs +++ b/rust/timsseek/src/scoring/scores/mod.rs @@ -1,3 +1,2 @@ pub mod apex_features; pub mod hyperscore; -pub mod scribe; diff --git a/rust/timsseek/src/scoring/scores/scribe.rs b/rust/timsseek/src/scoring/scores/scribe.rs deleted file mode 100644 index c625516..0000000 --- a/rust/timsseek/src/scoring/scores/scribe.rs +++ /dev/null @@ -1,2 +0,0 @@ -/// Floor value for Scribe score when no signal is observed. -pub const SCRIBE_FLOOR: f32 = -100.0; diff --git a/rust/timsseek_cli/src/main.rs b/rust/timsseek_cli/src/main.rs index b2770f3..bdbc491 100644 --- a/rust/timsseek_cli/src/main.rs +++ b/rust/timsseek_cli/src/main.rs @@ -306,7 +306,7 @@ fn process_single_file( }; // Process speclib - processing::process_speclib( + processing::run_pipeline( speclib_path, calib_lib_path, &pipeline, diff --git a/rust/timsseek_cli/src/processing.rs b/rust/timsseek_cli/src/processing.rs index 2b1f4d2..8393aed 100644 --- a/rust/timsseek_cli/src/processing.rs +++ b/rust/timsseek_cli/src/processing.rs @@ -106,7 +106,7 @@ fn check_rt_scale_compatibility(main_lib: &Speclib, calib_lib: &Speclib) { feature = "instrumentation", tracing::instrument(skip_all, level = "trace") )] -pub fn main_loop( +pub fn execute_pipeline( speclib: Speclib, calib_lib: Option, pipeline: &Scorer, @@ -657,7 +657,7 @@ fn target_decoy_compete(mut results: Vec) -> Vec, pipeline: &Scorer, @@ -694,7 +694,7 @@ pub fn process_speclib( None => None, }; - let timings = main_loop(speclib, calib_lib, pipeline, chunk_size, output)?; + let timings = execute_pipeline(speclib, calib_lib, pipeline, chunk_size, output)?; let perf_report = serde_json::to_string_pretty(&timings).map_err(|e| TimsSeekError::ParseError { msg: format!("Error serializing performance report to JSON: {}", e), From 8a2d3326fbeae34528805b3dff0d23c46a71eeaa Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Thu, 9 Apr 2026 21:01:56 -0700 Subject: [PATCH 11/64] refactor: plumb calibrated tolerances into execute_secondary_query, remove secondary_tolerance Add get_spectral_tolerance() and get_isotope_tolerance() to CalibrationResult, thread them as explicit parameters through execute_secondary_query and its callers, and delete the secondary_tolerance field from Scorer. --- rust/timsseek/src/rt_calibration.rs | 16 +++++++++++++++ rust/timsseek/src/scoring/pipeline.rs | 29 +++++++++++++-------------- rust/timsseek/src/traits.rs | 2 -- rust/timsseek_cli/src/main.rs | 6 ------ 4 files changed, 30 insertions(+), 23 deletions(-) diff --git a/rust/timsseek/src/rt_calibration.rs b/rust/timsseek/src/rt_calibration.rs index 2a4302e..7511cac 100644 --- a/rust/timsseek/src/rt_calibration.rs +++ b/rust/timsseek/src/rt_calibration.rs @@ -57,6 +57,22 @@ impl CalibrationResult { } } + /// Tolerance for the secondary spectral query at a detected apex. + pub fn get_spectral_tolerance(&self) -> Tolerance { + Tolerance { + ms: MzTolerance::Ppm(self.mz_tolerance_ppm), + rt: RtTolerance::Minutes((0.5 / 60.0, 0.5 / 60.0)), // ~0.5 seconds + mobility: MobilityTolerance::Pct(self.mobility_tolerance_pct), + quad: QuadTolerance::Absolute((0.1, 0.1)), + } + } + + /// Tight mobility tolerance for isotope pattern matching. + pub fn get_isotope_tolerance(&self) -> Tolerance { + self.get_spectral_tolerance() + .with_mobility_tolerance(MobilityTolerance::Pct((3.0, 3.0))) + } + /// Fallback when calibration fails: identity RT mapping, secondary tolerance. pub fn fallback(pipeline: &Scorer) -> Self { let range = pipeline.index.ms1_cycle_mapping().range_milis(); diff --git a/rust/timsseek/src/scoring/pipeline.rs b/rust/timsseek/src/scoring/pipeline.rs index 30883d7..a375187 100644 --- a/rust/timsseek/src/scoring/pipeline.rs +++ b/rust/timsseek/src/scoring/pipeline.rs @@ -32,7 +32,6 @@ use timscentroid::rt_mapping::{ MS1CycleIndex, RTIndex, }; -use timsquery::models::tolerance::MobilityTolerance; use timsquery::utils::TupleRange; use timsquery::{ ChromatogramCollector, @@ -308,9 +307,6 @@ pub struct Scorer { /// Broad tolerance used during the prescore phase. pub broad_tolerance: Tolerance, - /// Refined tolerance used at detected apex for secondary queries. - pub secondary_tolerance: Tolerance, - /// m/z range where peptides were fragmented. /// Queries with precursors outside this range are filtered out. pub fragmented_range: TupleRange, @@ -444,12 +440,14 @@ impl Scorer { /// Performs refined secondary query at detected apex with two-pass strategy. #[cfg_attr( feature = "instrumentation", - tracing::instrument(skip(self, item, main_score), level = "trace") + tracing::instrument(skip(self, item, main_score, spectral_tol, isotope_tol), level = "trace") )] fn execute_secondary_query( &self, item: &QueryItemToScore, main_score: &ApexScore, + spectral_tol: &Tolerance, + isotope_tol: &Tolerance, ) -> ( SpectralCollector, SpectralCollector, @@ -460,7 +458,7 @@ impl Scorer { let new_query = item.query.clone().with_rt_seconds(new_rt_seconds); let mut agg: SpectralCollector<_, MzMobilityStatsCollector> = SpectralCollector::new(new_query); - self.index.add_query(&mut agg, &self.secondary_tolerance); + self.index.add_query(&mut agg, spectral_tol); // Calculate weighted mean mobility from observed data let mobility = Self::get_mobility(&agg); @@ -480,13 +478,8 @@ impl Scorer { let mut agg: SpectralCollector<_, MzMobilityStatsCollector> = SpectralCollector::new(new_query); - let tol_use = self - .secondary_tolerance - .clone() - .with_mobility_tolerance(MobilityTolerance::Pct((3.0, 3.0))); - - self.index.add_query(&mut agg, &tol_use); - self.index.add_query(&mut isotope_agg, &tol_use); + self.index.add_query(&mut agg, isotope_tol); + self.index.add_query(&mut isotope_agg, isotope_tol); (agg, isotope_agg) } @@ -522,6 +515,7 @@ impl Scorer { pub fn process_query_full( &self, item: QueryItemToScore, + calibration: &CalibrationResult, ) -> Result { let mut buffer = ApexFinder::new(self.num_cycles()); @@ -535,7 +529,10 @@ impl Scorer { })?; let apex_score = buffer.find_apex(&scoring_ctx, &|idx| self.map_rt_index_to_milis(idx))?; - let (inner_collector, isotope_collector) = self.execute_secondary_query(&item, &apex_score); + let spectral_tol = calibration.get_spectral_tolerance(); + let isotope_tol = calibration.get_isotope_tolerance(); + let (inner_collector, isotope_collector) = + self.execute_secondary_query(&item, &apex_score, &spectral_tol, &isotope_tol); let nqueries = scoring_ctx.chromatograms.fragments.num_ions() as u8; let search_results = self.finalize_results( @@ -669,9 +666,11 @@ impl Scorer { timings.localize += st.elapsed(); let st = Instant::now(); + let spectral_tol = calibration.get_spectral_tolerance(); + let isotope_tol = calibration.get_isotope_tolerance(); let (inner_collector, isotope_collector) = tracing::span!(tracing::Level::TRACE, "score_calibrated::secondary_query") - .in_scope(|| self.execute_secondary_query(item, &apex_score)); + .in_scope(|| self.execute_secondary_query(item, &apex_score, &spectral_tol, &isotope_tol)); timings.secondary_query += st.elapsed(); let nqueries = scoring_ctx.chromatograms.fragments.num_ions() as u8; diff --git a/rust/timsseek/src/traits.rs b/rust/timsseek/src/traits.rs index 0d7e0fb..ca6065d 100644 --- a/rust/timsseek/src/traits.rs +++ b/rust/timsseek/src/traits.rs @@ -40,14 +40,12 @@ use timsquery::{ /// /// # let peaks: IndexedTimstofPeaks = unimplemented!(); /// # let broad_tol = Tolerance::default(); -/// # let secondary_tol = Tolerance::default(); /// # let fragmented_range = (400.0, 1200.0).try_into().unwrap(); /// /// // IndexedTimstofPeaks implements ScorerQueriable /// let scorer = Scorer { /// index: peaks, /// broad_tolerance: broad_tol, -/// secondary_tolerance: secondary_tol, /// fragmented_range, /// }; /// diff --git a/rust/timsseek_cli/src/main.rs b/rust/timsseek_cli/src/main.rs index bdbc491..8e87d6f 100644 --- a/rust/timsseek_cli/src/main.rs +++ b/rust/timsseek_cli/src/main.rs @@ -5,7 +5,6 @@ mod processing; use clap::Parser; use timsquery::TimsTofPath; -use timsquery::models::tolerance::RtTolerance; use timsquery::serde::load_index_auto; use timsquery::utils::TupleRange; use timsseek::scoring::Scorer; @@ -260,11 +259,6 @@ fn process_single_file( let pipeline = Scorer { index, broad_tolerance: config.analysis.tolerance.clone(), - secondary_tolerance: config - .analysis - .tolerance - .clone() - .with_rt_tolerance(RtTolerance::Minutes((0.2, 0.2))), fragmented_range, }; From 6d063813887718f7dd61708f5389876072fb54d4 Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Thu, 9 Apr 2026 21:05:25 -0700 Subject: [PATCH 12/64] feat: PipelineReport with full timing (phases 4-6) and q-value counts --- rust/timsseek/src/scoring/mod.rs | 2 +- rust/timsseek/src/scoring/pipeline.rs | 8 ++-- rust/timsseek/src/scoring/timings.rs | 67 ++++++++++++++------------- rust/timsseek_cli/src/processing.rs | 52 ++++++++++++++++----- 4 files changed, 82 insertions(+), 47 deletions(-) diff --git a/rust/timsseek/src/scoring/mod.rs b/rust/timsseek/src/scoring/mod.rs index 1135eb6..85d68dc 100644 --- a/rust/timsseek/src/scoring/mod.rs +++ b/rust/timsseek/src/scoring/mod.rs @@ -17,7 +17,7 @@ pub use pipeline::{ Scorer, }; pub use results::{ScoredCandidate, CompetedCandidate, FinalResult, ScoringFields}; -pub use timings::{PipelineTimings, ScoreTimings}; +pub use timings::{PipelineReport, ScoreTimings}; pub const NUM_MS2_IONS: usize = 7; pub const NUM_MS1_IONS: usize = 3; diff --git a/rust/timsseek/src/scoring/pipeline.rs b/rust/timsseek/src/scoring/pipeline.rs index a375187..f16bf08 100644 --- a/rust/timsseek/src/scoring/pipeline.rs +++ b/rust/timsseek/src/scoring/pipeline.rs @@ -644,7 +644,7 @@ impl Scorer { Err(_) => None, }, )?; - timings.prescore += st.elapsed(); + timings.extraction += st.elapsed(); if scoring_ctx .expected_intensities @@ -663,7 +663,7 @@ impl Scorer { .ok() }, )?; - timings.localize += st.elapsed(); + timings.scoring += st.elapsed(); let st = Instant::now(); let spectral_tol = calibration.get_spectral_tolerance(); @@ -671,7 +671,7 @@ impl Scorer { let (inner_collector, isotope_collector) = tracing::span!(tracing::Level::TRACE, "score_calibrated::secondary_query") .in_scope(|| self.execute_secondary_query(item, &apex_score, &spectral_tol, &isotope_tol)); - timings.secondary_query += st.elapsed(); + timings.spectral_query += st.elapsed(); let nqueries = scoring_ctx.chromatograms.fragments.num_ions() as u8; let st = Instant::now(); @@ -686,7 +686,7 @@ impl Scorer { ) }, ); - timings.finalization += st.elapsed(); + timings.assembly += st.elapsed(); match out { Ok(res) => Some(res), diff --git a/rust/timsseek/src/scoring/timings.rs b/rust/timsseek/src/scoring/timings.rs index 3f0a3ff..d795a2a 100644 --- a/rust/timsseek/src/scoring/timings.rs +++ b/rust/timsseek/src/scoring/timings.rs @@ -16,23 +16,23 @@ use std::time::Duration; /// /// ```ignore /// let (results, timings) = scorer.score_iter(&queries); -/// println!("Prescore: {}ms", timings.prescore.as_millis()); -/// println!("Localize: {}ms", timings.localize.as_millis()); +/// println!("Extraction: {}ms", timings.extraction.as_millis()); +/// println!("Scoring: {}ms", timings.scoring.as_millis()); /// ``` #[derive(Debug, Default)] pub struct ScoreTimings { - /// Time spent collecting chromatographic data (Stage 1: Prescore). - pub prescore: Duration, + /// Time spent collecting chromatographic data (Stage 1: Extraction). + pub extraction: Duration, - /// Time spent finding peak apex (Stage 2: Localization). + /// Time spent finding peak apex (Stage 2: Scoring). /// This is typically the bottleneck (~62% of total time). - pub localize: Duration, + pub scoring: Duration, - /// Time spent refining search at detected apex (Stage 3: Secondary Query). - pub secondary_query: Duration, + /// Time spent refining search at detected apex (Stage 3: Spectral Query). + pub spectral_query: Duration, - /// Time spent assembling final results (Stage 4: Finalization). - pub finalization: Duration, + /// Time spent assembling final results (Stage 4: Assembly). + pub assembly: Duration, } impl Serialize for ScoreTimings { @@ -42,37 +42,42 @@ impl Serialize for ScoreTimings { { use serde::ser::SerializeStruct; let mut state = serializer.serialize_struct("ScoreTimings", 4)?; - state.serialize_field("prescore_ms", &self.prescore.as_millis())?; - state.serialize_field("localize_ms", &self.localize.as_millis())?; - state.serialize_field("secondary_query_ms", &self.secondary_query.as_millis())?; - state.serialize_field("finalization_ms", &self.finalization.as_millis())?; + state.serialize_field("extraction_ms", &self.extraction.as_millis())?; + state.serialize_field("scoring_ms", &self.scoring.as_millis())?; + state.serialize_field("spectral_query_ms", &self.spectral_query.as_millis())?; + state.serialize_field("assembly_ms", &self.assembly.as_millis())?; state.end() } } impl std::ops::AddAssign for ScoreTimings { fn add_assign(&mut self, rhs: Self) { - self.prescore += rhs.prescore; - self.localize += rhs.localize; - self.secondary_query += rhs.secondary_query; - self.finalization += rhs.finalization; + self.extraction += rhs.extraction; + self.scoring += rhs.scoring; + self.spectral_query += rhs.spectral_query; + self.assembly += rhs.assembly; } } -/// Phase-level + stage-level timing for the two-pass pipeline. -/// All durations are in milliseconds. +/// Full pipeline report: per-phase timings and result-quality metrics. +/// All timing fields are in milliseconds. #[derive(Debug, Default, Serialize)] -pub struct PipelineTimings { - /// Wall time for Phase 1 (broad prescore, all peptides). +pub struct PipelineReport { + // Timings (all in ms) pub phase1_prescore_ms: u64, - /// Wall time for Phase 2 (calibration: RT fit + error measurement). pub phase2_calibration_ms: u64, - /// Time spent building calibrated chromatograms in Phase 3. - pub phase3_prescore_ms: u64, - /// Time spent finding peak apex in Phase 3 (typically the bottleneck). - pub phase3_localize_ms: u64, - /// Time spent on secondary spectral query in Phase 3. - pub phase3_secondary_query_ms: u64, - /// Time spent assembling final results in Phase 3. - pub phase3_finalization_ms: u64, + pub phase3_extraction_ms: u64, + pub phase3_scoring_ms: u64, + pub phase3_spectral_query_ms: u64, + pub phase3_assembly_ms: u64, + pub phase4_competition_ms: u64, + pub phase5_rescore_ms: u64, + pub phase6_output_ms: u64, + + // Result quality + pub total_scored: usize, + pub total_after_competition: usize, + pub targets_at_1pct_qval: usize, + pub targets_at_5pct_qval: usize, + pub targets_at_10pct_qval: usize, } diff --git a/rust/timsseek_cli/src/processing.rs b/rust/timsseek_cli/src/processing.rs index 8393aed..58cecba 100644 --- a/rust/timsseek_cli/src/processing.rs +++ b/rust/timsseek_cli/src/processing.rs @@ -30,7 +30,7 @@ use timsseek::scoring::{ CalibrantHeap, CalibrationConfig, CompetedCandidate, - PipelineTimings, + PipelineReport, ScoredCandidate, ScoreTimings, }; @@ -112,7 +112,7 @@ pub fn execute_pipeline( pipeline: &Scorer, chunk_size: usize, _out_path: &OutputConfig, -) -> std::result::Result { +) -> std::result::Result { let calib_config = CalibrationConfig::default(); // === PHASE 1: Broad prescore -> collect top calibrants === @@ -204,22 +204,43 @@ pub fn execute_pipeline( phase3_start.elapsed() ); - // === Post-processing === + let total_scored = results.len(); + + // === PHASE 4: Target-decoy competition === + let phase4_start = Instant::now(); let mut competed = target_decoy_compete(results); competed.sort_unstable_by(|x, y| { y.scoring.main_score.partial_cmp(&x.scoring.main_score).unwrap() }); + let phase4_ms = phase4_start.elapsed().as_millis() as u64; + let total_after_competition = competed.len(); + // === PHASE 5: Rescore (GBM cross-validated discriminant) === + let phase5_start = Instant::now(); let data = rescore(competed); - for val in report_qvalues_at_thresholds(&data, &[0.01, 0.05, 0.1, 0.5, 1.0]) { - let (thresh, n_below_thresh, n_targets, n_decoys) = val; + let phase5_ms = phase5_start.elapsed().as_millis() as u64; + + // Collect q-value threshold counts and print summary + let qval_report = report_qvalues_at_thresholds(&data, &[0.01, 0.05, 0.1, 0.5, 1.0]); + let mut targets_at_1pct_qval = 0usize; + let mut targets_at_5pct_qval = 0usize; + let mut targets_at_10pct_qval = 0usize; + for &(thresh, n_below_thresh, n_targets, n_decoys) in &qval_report { println!( "Found {} targets and {} decoys at q-value threshold {:.2} ({} total)", n_targets, n_decoys, thresh, n_below_thresh ); + if (thresh - 0.01).abs() < 1e-6 { + targets_at_1pct_qval = n_targets; + } else if (thresh - 0.05).abs() < 1e-6 { + targets_at_5pct_qval = n_targets; + } else if (thresh - 0.10).abs() < 1e-6 { + targets_at_10pct_qval = n_targets; + } } - // Write final results to Parquet + // === PHASE 6: Write Parquet output === + let phase6_start = Instant::now(); let out_path_pq = _out_path.directory.join("results.parquet"); let mut pq_writer = timsseek::scoring::parquet_writer::ResultParquetWriter::new( &out_path_pq, @@ -233,15 +254,24 @@ pub fn execute_pipeline( pq_writer.add(res); } pq_writer.close(); + let phase6_ms = phase6_start.elapsed().as_millis() as u64; info!("Wrote final results to {:?}", out_path_pq); - Ok(PipelineTimings { + Ok(PipelineReport { phase1_prescore_ms: phase1_ms, phase2_calibration_ms: phase2_ms, - phase3_prescore_ms: phase3_timings.prescore.as_millis() as u64, - phase3_localize_ms: phase3_timings.localize.as_millis() as u64, - phase3_secondary_query_ms: phase3_timings.secondary_query.as_millis() as u64, - phase3_finalization_ms: phase3_timings.finalization.as_millis() as u64, + phase3_extraction_ms: phase3_timings.extraction.as_millis() as u64, + phase3_scoring_ms: phase3_timings.scoring.as_millis() as u64, + phase3_spectral_query_ms: phase3_timings.spectral_query.as_millis() as u64, + phase3_assembly_ms: phase3_timings.assembly.as_millis() as u64, + phase4_competition_ms: phase4_ms, + phase5_rescore_ms: phase5_ms, + phase6_output_ms: phase6_ms, + total_scored, + total_after_competition, + targets_at_1pct_qval, + targets_at_5pct_qval, + targets_at_10pct_qval, }) } From 373ff425d3659031230f55fa7ce2322dc8a99f73 Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Thu, 9 Apr 2026 21:07:51 -0700 Subject: [PATCH 13/64] feat: wire dp_lookback from CalibrationConfig into pathfinding - Add `lookback: usize` parameter to `find_optimal_path` (calibrt), removing the hardcoded `let lookback = 30` - Add `lookback: usize` parameter to `calibrate_with_ranges`; update the `calibrate` convenience wrapper to pass 30 - Pass `config.dp_lookback` from `CalibrationConfig` through `calibrate_from_phase1` in processing.rs - Pass lookback=10 in the identity fallback in rt_calibration.rs (2-point curve needs no large window) - Remove unused `lowess_frac` field from `CalibrationConfig` - Update calibrt integration tests to supply the new lookback argument --- rust/calibrt/src/lib.rs | 5 +++-- rust/calibrt/src/pathfinding.rs | 3 +-- rust/calibrt/tests/tests.rs | 4 ++-- rust/timsseek/src/rt_calibration.rs | 2 +- rust/timsseek/src/scoring/pipeline.rs | 2 -- rust/timsseek_cli/src/processing.rs | 2 +- 6 files changed, 8 insertions(+), 10 deletions(-) diff --git a/rust/calibrt/src/lib.rs b/rust/calibrt/src/lib.rs index bb15cda..b3173b8 100644 --- a/rust/calibrt/src/lib.rs +++ b/rust/calibrt/src/lib.rs @@ -188,6 +188,7 @@ pub fn calibrate_with_ranges( x_range: (f64, f64), y_range: (f64, f64), grid_size: usize, + lookback: usize, ) -> Result { // Module 1: Grid data and apply nonmaximal suppression let mut grid = Grid::new(grid_size, x_range, y_range)?; @@ -203,7 +204,7 @@ pub fn calibrate_with_ranges( .collect(); // Module 2: Find the optimal ascending path - let optimal_path_points = pathfinding::find_optimal_path(&mut filtered_nodes); + let optimal_path_points = pathfinding::find_optimal_path(&mut filtered_nodes, lookback); // Module 3: Fit the final points and prepare for extrapolation let calcurve = CalibrationCurve::new(optimal_path_points); match &calcurve { @@ -262,5 +263,5 @@ pub fn calibrate(points: &[Point], grid_size: usize) -> Result Vec { +pub(crate) fn find_optimal_path(nodes: &mut [crate::grid::Node], lookback: usize) -> Vec { if nodes.is_empty() { return Vec::new(); } @@ -35,7 +35,6 @@ pub(crate) fn find_optimal_path(nodes: &mut [crate::grid::Node]) -> Vec lookback { i - lookback } else { 0 }; for j in start..i { // Only create edges where both dimensions increase (monotonic constraint) diff --git a/rust/calibrt/tests/tests.rs b/rust/calibrt/tests/tests.rs index e8d310b..f92b5e8 100644 --- a/rust/calibrt/tests/tests.rs +++ b/rust/calibrt/tests/tests.rs @@ -39,7 +39,7 @@ fn test_calibrate_zero_x_range() { y: 60.0, weight: 1.0, }]; - let result = calibrate_with_ranges(&points, (50.0, 50.0), (0.0, 100.0), 50); + let result = calibrate_with_ranges(&points, (50.0, 50.0), (0.0, 100.0), 50, 30); assert!(result.is_err()); } @@ -51,7 +51,7 @@ fn test_calibrate_zero_y_range() { y: 60.0, weight: 1.0, }]; - let result = calibrate_with_ranges(&points, (0.0, 100.0), (60.0, 60.0), 50); + let result = calibrate_with_ranges(&points, (0.0, 100.0), (60.0, 60.0), 50, 30); assert!(result.is_err()); } diff --git a/rust/timsseek/src/rt_calibration.rs b/rust/timsseek/src/rt_calibration.rs index 7511cac..ce9a0b9 100644 --- a/rust/timsseek/src/rt_calibration.rs +++ b/rust/timsseek/src/rt_calibration.rs @@ -90,7 +90,7 @@ impl CalibrationResult { weight: 1.0, }, ]; - let cal_curve = calibrate_with_ranges(&points, (start, end), (start, end), 10) + let cal_curve = calibrate_with_ranges(&points, (start, end), (start, end), 10, 10) .expect("Identity calibration should not fail"); Self { diff --git a/rust/timsseek/src/scoring/pipeline.rs b/rust/timsseek/src/scoring/pipeline.rs index f16bf08..6c9582a 100644 --- a/rust/timsseek/src/scoring/pipeline.rs +++ b/rust/timsseek/src/scoring/pipeline.rs @@ -144,7 +144,6 @@ pub struct CalibrationConfig { pub mobility_sigma: f32, pub rt_sigma_factor: f32, pub min_rt_tolerance_minutes: f32, - pub lowess_frac: f32, pub calibration_query_rt_window_minutes: f32, pub dp_lookback: usize, } @@ -158,7 +157,6 @@ impl Default for CalibrationConfig { mobility_sigma: 3.0, rt_sigma_factor: 3.0, min_rt_tolerance_minutes: 0.5, - lowess_frac: 0.5, calibration_query_rt_window_minutes: 0.5, dp_lookback: 30, } diff --git a/rust/timsseek_cli/src/processing.rs b/rust/timsseek_cli/src/processing.rs index 58cecba..471326a 100644 --- a/rust/timsseek_cli/src/processing.rs +++ b/rust/timsseek_cli/src/processing.rs @@ -405,7 +405,7 @@ fn calibrate_from_phase1( ); let cal_curve = - calibrate_with_ranges(&points, (min_x, max_x), (min_y, max_y), config.grid_size)?; + calibrate_with_ranges(&points, (min_x, max_x), (min_y, max_y), config.grid_size, config.dp_lookback)?; // === Step B: Measure m/z and mobility errors at calibrant apexes === let query_tolerance = Tolerance { From 93550bb7e7d8e132482717b2024002a60c2d0911 Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Thu, 9 Apr 2026 21:11:09 -0700 Subject: [PATCH 14/64] chore: remove timsseek_rescore and timsseek_rts_receiver packages --- README.md | 6 - bench/wandb_bench.py | 63 +-- pyproject.toml | 7 - python/timsseek_rescore/pyproject.toml | 26 - .../timsseek_rescore/__init__.py | 0 .../timsseek_rescore/__main__.py | 4 - .../timsseek_rescore/backends/mlp.py | 517 ------------------ .../timsseek_rescore/backends/mokapot.py | 23 - .../timsseek_rescore/backends/xgb.py | 401 -------------- .../timsseek_rescore/timsseek_rescore/cli.py | 88 --- .../timsseek_rescore/datamodels.py | 16 - .../timsseek_rescore/feateng.py | 355 ------------ .../timsseek_rescore/folding.py | 40 -- .../timsseek_rescore/plotting.py | 209 ------- python/timsseek_rts_receiver/app.py | 81 --- python/timsseek_rts_receiver/pyproject.toml | 21 - .../timsseek_rts_receiver/__init__.py | 0 .../timsseek_rts_receiver/constants.py | 13 - .../timsseek_rts_receiver/io.py | 35 -- .../timsseek_rts_receiver/models.py | 248 --------- .../timsseek_rts_receiver/receiver.py | 201 ------- run.bash | 3 - serve.bash | 16 - 23 files changed, 2 insertions(+), 2371 deletions(-) delete mode 100644 python/timsseek_rescore/pyproject.toml delete mode 100644 python/timsseek_rescore/timsseek_rescore/__init__.py delete mode 100644 python/timsseek_rescore/timsseek_rescore/__main__.py delete mode 100644 python/timsseek_rescore/timsseek_rescore/backends/mlp.py delete mode 100644 python/timsseek_rescore/timsseek_rescore/backends/mokapot.py delete mode 100644 python/timsseek_rescore/timsseek_rescore/backends/xgb.py delete mode 100644 python/timsseek_rescore/timsseek_rescore/cli.py delete mode 100644 python/timsseek_rescore/timsseek_rescore/datamodels.py delete mode 100644 python/timsseek_rescore/timsseek_rescore/feateng.py delete mode 100644 python/timsseek_rescore/timsseek_rescore/folding.py delete mode 100644 python/timsseek_rescore/timsseek_rescore/plotting.py delete mode 100644 python/timsseek_rts_receiver/app.py delete mode 100644 python/timsseek_rts_receiver/pyproject.toml delete mode 100644 python/timsseek_rts_receiver/timsseek_rts_receiver/__init__.py delete mode 100644 python/timsseek_rts_receiver/timsseek_rts_receiver/constants.py delete mode 100644 python/timsseek_rts_receiver/timsseek_rts_receiver/io.py delete mode 100644 python/timsseek_rts_receiver/timsseek_rts_receiver/models.py delete mode 100644 python/timsseek_rts_receiver/timsseek_rts_receiver/receiver.py delete mode 100644 serve.bash diff --git a/README.md b/README.md index 7772f0a..d867896 100644 --- a/README.md +++ b/README.md @@ -94,9 +94,6 @@ cargo run --release --bin timsseek -- \ --speclib-file $SPECLIB_NAME \ --output-dir $RESULTS_DIR \ --dotd-file $DOTD_FILE $EXTRAS - -# Run rescoring with summary plotting and target-decoy competitions -uv run python -m timsseek_rescore --results_dir $RESULTS_DIR --output_dir $SUMMARY_DIR ``` #### On-Demand Search @@ -121,9 +118,6 @@ cargo run --bin timsseek_rts --release -- \ --dotd-file $RAW_FILE & SERVER_PID=$! -# To start the receiver, this sample app allows typing a peptide -# and visualizing the scores -uv run python -m streamlit run python/timsseek_rts_receiver/app.py kill $SERVER_PID wait diff --git a/bench/wandb_bench.py b/bench/wandb_bench.py index 51e3437..14f46a6 100644 --- a/bench/wandb_bench.py +++ b/bench/wandb_bench.py @@ -10,7 +10,6 @@ import json import subprocess import tempfile -import tomllib from contextlib import contextmanager from dataclasses import dataclass from pathlib import Path @@ -102,8 +101,6 @@ def run( ) # Results are now in a subdirectory named after the raw file raw_file_stem = self.raw_file_location.stem - file_results_dir = results_path / raw_file_stem - self._rescore(results_dir=file_results_dir, summary_dir=summary_dir) self.log_results(wandb_experiment, output_loc, raw_file_stem) @staticmethod @@ -152,38 +149,6 @@ def _run(config_path, speclib_path, output_path, raw_file): logger.info(f"Timsseek completed with return code {res.returncode}") return res - @staticmethod - def _rescore(results_dir: Path, summary_dir: Path): - args = [ - "uv", - "run", - "python", - "-m", - "timsseek_rescore", - "--results_dir", - str(results_dir), - "--output_dir", - str(summary_dir), - ] - stdout_file = summary_dir / "timsseek_stdout.log" - stderr_file = summary_dir / "timsseek_stderr.log" - - logger.info(f"Starting rescoring, logging to {stdout_file} and {stderr_file}") - try: - res = subprocess.run( - args, - stdout=open(stdout_file, "w"), - stderr=open(stderr_file, "w"), - check=True, - ) - finally: - # Log stdout and stderr - logger.info(stdout_file.read_text()) - logger.error(stderr_file.read_text()) - - logger.info(f"Rescoring completed with return code {res.returncode}") - return res - def log_results(self, wandb_experiment, results_loc, raw_file_stem): metrics = self.crunch_metrics(results_loc, raw_file_stem) with open("latest_metrics.json", "w") as f: @@ -214,38 +179,14 @@ def default_timsseek_config(): @staticmethod def crunch_metrics(output_dir: Path, raw_file_stem: str) -> dict[str, Any]: metrics = {} - xgboost_images = [ - ("variable_importance_plot", "importances.png"), - ("mass_error_plot", "mass_error_rt_1pct.png"), - ("mobility_error_plot", "mobility_error_rt_1pct.png"), - ("mz_mobility_plot", "mz_mobility_1pct.png"), - ("predicted_rt_obs_plot", "predicted_rt_obs_rt_1pct.png"), - ("mass_error_mz_1pct_plot", "mass_error_mz_1pct.png"), - ] - - for metric_name, file_name in xgboost_images: - img_path = output_dir / "summ" / "xgboost" / file_name - if img_path.exists(): - metrics[metric_name] = wandb.Image(img_path) - else: - logger.warning(f"Image {img_path} does not exist, skipping") - - report_toml = output_dir / "summ" / "xgboost" / "report.toml" - with open(report_toml, "rb") as f: - report = tomllib.load(f) - metrics.update(report["report"]) - performance_report_path = ( output_dir / "res" / raw_file_stem / "performance_report.json" ) if performance_report_path.exists(): with open(performance_report_path, "r") as f: - performance_report = json.load(f) - metrics.update(performance_report) + metrics.update(json.load(f)) else: - logger.warning( - f"Performance report {performance_report_path} does not exist" - ) + logger.warning(f"Performance report {performance_report_path} does not exist") return metrics diff --git a/pyproject.toml b/pyproject.toml index 1b5f346..ab6488a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,8 +5,6 @@ requires-python = ">=3.11,<3.13" dependencies = [ "jupyter[python]>=1.1.1", "speclib_builder[ml]", - "timsseek_rescore", - "timsseek_rts_receiver", ] [dependency-groups] @@ -28,14 +26,10 @@ interactive = [ [tool.uv.sources] speclib_builder = { workspace = true } -timsseek_rescore = { workspace = true } -timsseek_rts_receiver = { workspace = true } [tool.uv.workspace] members = [ "python/speclib_builder", - "python/timsseek_rescore", - "python/timsseek_rts_receiver", ] [tool.ruff] @@ -51,7 +45,6 @@ select = ["E", "F", "T20", "I"] [tool.hatch.build.targets.wheel] packages = [ "python/speclib_builder", - "python/timsseek_rescore", ] [tool.bumpver] diff --git a/python/timsseek_rescore/pyproject.toml b/python/timsseek_rescore/pyproject.toml deleted file mode 100644 index 77963d8..0000000 --- a/python/timsseek_rescore/pyproject.toml +++ /dev/null @@ -1,26 +0,0 @@ -[build-system] -requires = ["hatchling"] -build-backend = "hatchling.build" - -[project] -name = "timsseek_rescore" -version = "0.26.0" -requires-python = ">=3.11,<3.13" -dependencies = [ - "polars", - "rich", - "matplotlib", - "numpy", - "tqdm", - "mokapot", - "xgboost", - "torch", - "uniplot", -] - -[tool.hatch.build.targets.wheel] -only-packages = true - -[tool.uv.sources] -# TODO: publish this package ... maybe ... -mokapot = { git = "https://github.com/jspaezp/mokapot.git", branch = "feat/re_add_confidence_api" } diff --git a/python/timsseek_rescore/timsseek_rescore/__init__.py b/python/timsseek_rescore/timsseek_rescore/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/python/timsseek_rescore/timsseek_rescore/__main__.py b/python/timsseek_rescore/timsseek_rescore/__main__.py deleted file mode 100644 index 8e5f130..0000000 --- a/python/timsseek_rescore/timsseek_rescore/__main__.py +++ /dev/null @@ -1,4 +0,0 @@ -from .cli import cli_main - -if __name__ == "__main__": - cli_main() diff --git a/python/timsseek_rescore/timsseek_rescore/backends/mlp.py b/python/timsseek_rescore/timsseek_rescore/backends/mlp.py deleted file mode 100644 index fcf5176..0000000 --- a/python/timsseek_rescore/timsseek_rescore/backends/mlp.py +++ /dev/null @@ -1,517 +0,0 @@ -from dataclasses import dataclass -from pathlib import Path -from typing import List, Optional - -import matplotlib.pyplot as plt -import mokapot -import numpy as np -import polars as pl -import torch -import torch.nn as nn -import torch.nn.functional as F -from rich.pretty import pprint -from torch.utils.data import DataLoader, TensorDataset -from uniplot import histogram - -from ..datamodels import Report -from ..folding import to_folds -from ..plotting import plot_importances - -# FFN_ACTIVATION = nn.SELU -FFN_ACTIVATION = nn.ReLU - - -class AsymmetricMarginBCELoss(nn.Module): - def __init__(self, *, margin_0=0.1, margin_1=0.4): - """ - margin_0: margin for negative class (0s) - margin_1: margin for positive class (1s) - - This loss pushes negative predictions further from decision boundary - - Larger margin_0 makes the model more conservative about predicting 1s - (reduces false positives) - Smaller margin_1 means we're more lenient about false negatives - Both margins are clamped to keep predictions in [0,1] range - """ - super().__init__() - self.margin_0 = margin_0 - self.margin_1 = margin_1 - - def forward(self, predictions, targets): - # Add margins to predictions based on true class - adjusted_preds = torch.where( - targets == 1, predictions + self.margin_1, predictions - self.margin_0 - ) - - # Clamp to valid probability range - adjusted_preds = torch.clamp(adjusted_preds, 0, 1) - - # Compute BCE loss - loss = F.binary_cross_entropy(adjusted_preds, targets, reduction="mean") - - return loss - - -class WeightedBCELoss(nn.Module): - def __init__(self, pos_weight=0.2, neg_weight=1.0): - """ - pos_weight: weight for positive class (1s) - neg_weight: weight for negative class (0s) - """ - super().__init__() - self.pos_weight = pos_weight - self.neg_weight = neg_weight - - def forward(self, predictions, targets): - # Create weight tensor based on targets - weights = torch.where( - targets == 1, - torch.tensor(self.pos_weight, device=targets.device), - torch.tensor(self.neg_weight, device=targets.device), - ) - - # Standard BCE loss - bce_loss = F.binary_cross_entropy(predictions, targets, reduction="none") - - # Apply weights - weighted_loss = weights * bce_loss - - return weighted_loss.mean() - - -class FocalLoss3(nn.Module): - def __init__(self, alpha=0.25, gamma=2.0, reduce=True): - """ - Focal Loss: (1 - p)^gamma * log(p) for positive class - p^gamma * log(1-p) for negative class - - alpha: weighing factor for positive class - gamma: focusing parameter that reduces the loss contribution from easy examples - """ - super().__init__() - if alpha < 0 or alpha > 1: - raise ValueError("Alpha must be in [0, 1]") - self.alpha = alpha - self.gamma = gamma - self.reduce = reduce - - def forward(self, predictions, targets): - # BCE loss - bce_loss = F.binary_cross_entropy(predictions, targets, reduction="none") - - # Focal term - pt = torch.where(targets == 1, predictions, 1 - predictions) - focal_term = (1 - pt) ** self.gamma - - # Alpha weighing - alpha_weight = torch.where( - targets == 1, - torch.tensor(self.alpha, device=targets.device), - torch.tensor(1 - self.alpha, device=targets.device), - ) - - loss = alpha_weight * focal_term * bce_loss - if self.reduce: - return loss.mean() - else: - return loss - - -class ResidualMLPBlock(nn.Module): - def __init__(self, input_dim: int, dropout: float = 0.0): - super().__init__() - self.input_layer = nn.Linear(input_dim, input_dim) - # self.activation = nn.SELU() - self.activation = FFN_ACTIVATION() - self.dropout = nn.Dropout(dropout) - self.batchnorm = nn.BatchNorm1d(input_dim) - - def forward(self, x): - residual = x - x = self.input_layer(x) - x = self.batchnorm(x) - x = self.activation(x) - x = self.dropout(x) - return x + residual - - -class ResidualMLPBlockI(nn.Module): - def __init__(self, input_dim: int, output_dim: int, dropout: float = 0.0): - super().__init__() - self.input_layer = nn.Linear(input_dim, output_dim) - # self.activation = nn.SELU() - self.activation = FFN_ACTIVATION() - self.dropout = nn.Dropout(dropout) - self.batchnorm = nn.BatchNorm1d(output_dim) - self.input_projection = nn.Linear(input_dim, output_dim) - - def forward(self, x): - residual = x - x = self.input_layer(x) - x = self.activation(x) - x = self.dropout(x) - x = self.batchnorm(x) - return x + self.input_projection(residual) - - -class BinaryClassifier(nn.Module): - def __init__( - self, - input_dim: int, - nhidden_layers: int = 2, - hidden_dims: int = 48, - dropout: float = 0.00, - ): - super().__init__() - - # ACTIVATION = nn.ReLU - # Selu seems to be critical to train deeper networks. - # ACTIVATION = nn.SELU - - layers = [] - layers.append(nn.BatchNorm1d(input_dim)) - layers.append( - ResidualMLPBlockI( - input_dim=input_dim, output_dim=hidden_dims, dropout=dropout - ) - ) - self.input_layer = nn.Sequential(*layers) - - layers = [] - for _ in range(nhidden_layers): - layers.append(ResidualMLPBlock(input_dim=hidden_dims, dropout=dropout)) - - self.hidden_layers = nn.ModuleList(layers) - - layers = [] - layers.append(nn.Linear(hidden_dims, 1)) - layers.append(nn.Sigmoid()) - self.output_layer = nn.Sequential(*layers) - - pprint(self) - nparams = sum(p.numel() for p in self.parameters()) - nparams_trainable = sum(p.numel() for p in self.parameters() if p.requires_grad) - pprint(f"Number of parameters: {nparams}") - pprint(f"Number of trainable parameters: {nparams_trainable}") - - def forward(self, x): - x = self.input_layer(x) - for layer in self.hidden_layers: - # x = x + layer(layer_norm(x)) - x = layer(x) - - return self.output_layer(x) - - def train_epoch(self, dataloader, device, optimizer, criterion) -> float: - self.train() - train_losses = [] - for x_batch, y_batch in dataloader: - x_batch, y_batch = x_batch.to(device), y_batch.to(device) - - optimizer.zero_grad() - y_pred = self(x_batch) - loss = criterion(y_pred, y_batch.view(-1, 1)) - loss.backward() - optimizer.step() - - train_losses.append(loss.item()) - - self.eval() - return np.mean(train_losses).item() - - def val_epoch(self, dataloader, device, criterion) -> float: - val_losses = [] - with torch.no_grad(): - for x_batch, y_batch in dataloader: - x_batch, y_batch = ( - x_batch.to(device), - y_batch.to(device), - ) - y_pred = self(x_batch) - val_loss = criterion(y_pred, y_batch.view(-1, 1)) - val_losses.append(val_loss.item()) - - avg_val_loss = np.mean(val_losses) - return avg_val_loss - - -@dataclass -class MLPKFoldModel: - """ - PyTorch implementation of K-Fold cross validation. - Each fold contains: - 1. One fold for training - 2. One fold for validation (early stopping) - 3. Remaining folds for inference - """ - - folds: List[tuple[torch.Tensor, torch.Tensor]] # List of (features, targets) tuples - models: List[Optional[BinaryClassifier]] - scores: List[Optional[torch.Tensor]] - device: torch.device - - def determine_device(self): - # Determine the device - if torch.cuda.is_available(): - self.device = torch.device("cuda") - print("Using CUDA (GPU) for training.") - elif hasattr(torch.backends, "mps") and torch.backends.mps.is_available(): - self.device = torch.device("mps") - print("Using MPS (Apple Silicon GPU) for training.") - else: - self.device = torch.device("cpu") - print("Using CPU for training.") - - @staticmethod - def from_folds( - folds: List[tuple[torch.Tensor, torch.Tensor]], device: torch.device - ): - return MLPKFoldModel( - folds=folds, - models=[None] * len(folds), - scores=[None] * len(folds), - device=device, - ) - - def train( - self, - batch_size: int = 250, - epochs: int = 20, - learning_rate: float = 1e-4, - pos_weight: float = 0.2, - **kwargs, - ): - for i in range(len(self.folds)): - print(f"Training model {i}/{len(self.folds)}") - - # Prepare data - train_data = self.folds[i] - val_data = self.folds[(i + 1) % len(self.folds)] - - train_dataset = TensorDataset(train_data[0], train_data[1]) - val_dataset = TensorDataset(val_data[0], val_data[1]) - - train_loader = DataLoader( - train_dataset, - batch_size=batch_size, - shuffle=True, - num_workers=0, - ) - val_loader = DataLoader( - val_dataset, - batch_size=batch_size, - num_workers=0, - ) - - # Initialize model - model = BinaryClassifier(input_dim=train_data[0].shape[1], **kwargs).to( - self.device - ) - optimizer = torch.optim.AdamW(model.parameters(), lr=learning_rate) - # Choose one of the following loss functions based on your needs: - # criterion = WeightedBCELoss(pos_weight=pos_weight, neg_weight=1.0) - # criterion = AsymmetricMarginBCELoss(margin_0=0.5, margin_1=0.2) - # criterion = WeightedBCELoss2(fneg_weight=pos_weight) - - # Usually gamma is positive bc the desire is to emphasize well classified - # BUT ... since we know we have a lot of false positives, we want to give - # "false targets" a lower weight. - criterion = FocalLoss3(alpha=pos_weight, gamma=0.5) - - best_val_loss = float("inf") - patience = 5 - patience_counter = 0 - best_model = None - - for epoch in range(epochs): - train_loss_mean = model.train_epoch( - dataloader=train_loader, - device=self.device, - optimizer=optimizer, - criterion=criterion, - ) - val_loss_mean = model.val_epoch( - dataloader=val_loader, - device=self.device, - criterion=criterion, - ) - - # Early stopping - print( - f"Epoch {epoch}: train_loss = {train_loss_mean:.4f}, val_loss = {val_loss_mean:.4f}" - ) - if val_loss_mean < best_val_loss: - best_val_loss = val_loss_mean - best_model = model.state_dict() - patience_counter = 0 - else: - patience_counter += 1 - if patience_counter >= patience: - print( - f"Early stopping at epoch {epoch}." - f" Best validation loss: {best_val_loss:.4f}" - ) - break - - # Save best model - model.load_state_dict(best_model) - self.models[i] = model - - def score(self, batch_size: int = 32): - for i in range(len(self.folds)): - print(f"Scoring fold {i}/{len(self.folds)}") - fold_data = self.folds[i] - dataset = TensorDataset(fold_data[0], fold_data[1]) - loader = DataLoader(dataset, batch_size=batch_size) - - scores_list = [] - - for j, model in enumerate(self.models): - if j == i or j == (i + 1) % len(self.folds): - continue - - model.eval() - fold_scores = [] - - with torch.no_grad(): - for x_batch, _ in loader: - x_batch = x_batch.to(self.device) - predictions = model(x_batch) - fold_scores.append(predictions.cpu()) - - scores_list.append(torch.cat(fold_scores)) - - self.scores[i] = torch.stack(scores_list).mean(dim=0) - - def get_importances2(self, feat_names: list[str] | None = None): - """ - Calculate feature importance using gradients after BatchNorm layer. - Uses hooks to capture intermediate gradients. - """ - importances = [] - - for model in self.models: - importance_dict = {} - post_bn_gradients = [] - - # Register hook to capture gradients after BatchNorm - def hook_fn(module, grad_input, grad_output): - # post_bn_gradients.append(grad_input[0].detach().cpu()) - post_bn_gradients.append(grad_output[0].detach().cpu()) - - # Get the BatchNorm layer - bn_layer = model.input_layer[0] # First layer is BatchNorm - hook = bn_layer.register_backward_hook(hook_fn) - - # Compute gradients for each fold - for i, (features, targets) in enumerate(self.folds): - features = features.to(self.device) - features.requires_grad_(True) - post_bn_gradients = [] # Reset for each batch - - # Forward and backward pass - output = model(features) - output.sum().backward() - - # Average gradients across samples in the fold - fold_grads = post_bn_gradients[0].abs().mean(dim=0) - - # Update importance dictionary - if not importance_dict: - if feat_names is None: - feat_names = [f"feature_{j}" for j in range(features.shape[1])] - importance_dict = { - k: fold_grads[j].item() for j, k in enumerate(feat_names) - } - else: - for j, k in enumerate(feat_names): - importance_dict[k] += fold_grads[j].item() - - features.requires_grad_(False) - model.zero_grad() - - for key in importance_dict: - importance_dict[key] /= len(self.folds) - - importances.append(importance_dict) - - hook.remove() - - imps_order = sorted(importances[0].items(), key=lambda x: x[1], reverse=True) - out = {k[0]: [w.get(k[0], 0) for w in importances] for k in imps_order} - return out - - def concat_scores(self): - if self.scores[0] is None: - raise ValueError("Scores not computed") - return torch.cat(self.scores) - - def concat_targets(self): - return torch.cat([fold[1] for fold in self.folds]) - - -def to_torch_folds(shuffled_df: pl.LazyFrame, num_folds: int, cols): - tmp = to_folds(shuffled_df=shuffled_df, cols=cols, num_folds=num_folds) - out = [ - (torch.from_numpy(x[0]).float(), torch.from_numpy(x[1]).float()) for x in tmp - ] - return out - - -def mlp_stuff( - df_use: pl.LazyFrame, cols, output_dir: Path, pos_weight: float = 0.05, **kwargs -): - pprint("Shuffling") - df_use = df_use.sample(frac=1).reset_index(drop=True, inplace=False) - folds = to_torch_folds(shuffled_df=df_use, num_folds=5, cols=cols) - fold_model = MLPKFoldModel.from_folds(folds, device="cpu") - # Moving the data back and forth is slower ... - # fold_model.determine_device() - fold_model.train(pos_weight=pos_weight, **kwargs) - fold_model.score() - importances = fold_model.get_importances2(feat_names=cols.feature_columns) - pprint(importances) - fig = plot_importances(importances) - outfile = output_dir / "importances_nn.png" - fig.savefig(outfile) - pprint(f"Wrote {outfile}") - plt.close() - - ctargs = fold_model.concat_targets().numpy().flatten() - cscores = fold_model.concat_scores().numpy().flatten() - df_use["rescore_score"] = cscores - df_use["qvalue"] = mokapot.qvalues.qvalues_from_scores(cscores, ctargs == 1) - df_use = df_use.sort_values("rescore_score", ascending=False) - outfile = output_dir / "rescored_values_nn.parquet" - df_use.to_parquet(outfile, index=False) - pprint(f"Wrote {outfile}") - order = np.argsort(-cscores) - ctargs = ctargs[order] - cscores = cscores[order] - qvals = mokapot.qvalues.qvalues_from_scores(cscores, ctargs == 1) - for ct in [0.01, 0.05, 0.1, 0.5, 1.0]: - num_at_thresh = int(np.sum(ctargs[qvals < ct])) - ssc = cscores[qvals < ct] - if len(ssc) == 0: - pprint(f"No scores at {ct}") - continue - score_at_thresh = np.min(ssc) - pprint(f"Score at {ct}: {score_at_thresh}") - pprint(f"Number of targets at {ct}: {num_at_thresh}") - - target_preds = cscores[ctargs == 1] - decoy_preds = cscores[ctargs == 0] - histogram(target_preds, title="Target scores") - histogram(decoy_preds, title="Decoy scores") - - report = Report( - targets_at_1=np.sum(ctargs[qvals < 0.01]).item(), - targets_at_5=np.sum(ctargs[qvals < 0.05]).item(), - targets_at_10=np.sum(ctargs[qvals < 0.1]).item(), - ) - pprint(report) - outfile = output_dir / "report_nn.toml" - report.save_to_toml(outfile) - pprint(f"Wrote {outfile}") - return np.sum(ctargs[qvals < 0.01]).item() diff --git a/python/timsseek_rescore/timsseek_rescore/backends/mokapot.py b/python/timsseek_rescore/timsseek_rescore/backends/mokapot.py deleted file mode 100644 index 5a17065..0000000 --- a/python/timsseek_rescore/timsseek_rescore/backends/mokapot.py +++ /dev/null @@ -1,23 +0,0 @@ -import mokapot -import numpy as np -from rich.pretty import pprint - -from ..feateng import to_mokapot - - -def mokapot_stuff(data, outdir): - ds = to_mokapot(data) - pprint("Brewing Mokapot") - models, scores = mokapot.brew([ds]) - qvals = mokapot.qvalues.qvalues_from_scores(scores[0], ds.targets) - for ct in [0.01, 0.05, 0.1, 0.5, 1.0]: - num_at_thresh = np.sum(ds.targets[qvals < ct]) - ssc = scores[0][qvals < ct] - if len(ssc) == 0: - pprint(f"No scores at {ct}") - continue - score_at_thresh = np.min(ssc) - pprint( - f"Mokapot Number of targets at {ct}: {num_at_thresh};" - f" Score: {score_at_thresh}" - ) diff --git a/python/timsseek_rescore/timsseek_rescore/backends/xgb.py b/python/timsseek_rescore/timsseek_rescore/backends/xgb.py deleted file mode 100644 index f55dccc..0000000 --- a/python/timsseek_rescore/timsseek_rescore/backends/xgb.py +++ /dev/null @@ -1,401 +0,0 @@ -from dataclasses import dataclass -from pathlib import Path - -import matplotlib.pyplot as plt -import mokapot -import numpy as np -import polars as pl -import xgboost as xgb -from rich.pretty import pprint -from uniplot import histogram - -from ..datamodels import Report -from ..feateng import to_mokapot_df -from ..folding import to_folds_xgb -from ..plotting import plot_importances - - -@dataclass -class KFoldModel: - """ - The idea here is to have N folds that will be used in this way: - 1. Train on 1 fold. - 2. Use 1 fold for early stopping. - 3. For prediction, use all other folds (models where the fold was not used for training or early stopping). - """ - - folds: list[xgb.DMatrix] - models: list[xgb.Booster | None] - scores: list[np.ndarray | None] - - @staticmethod - def from_folds(folds: list[xgb.DMatrix]): - return KFoldModel(folds, [None] * len(folds), [None] * len(folds)) - - def train(self): - for i in range(len(self.folds)): - pprint(f"Training model {i}/{len(self.folds)}") - model = self._train_fold(i) - self.models[i] = model - - def _train_fold(self, fold_index: int) -> xgb.Booster: - train = self.folds[fold_index] - early_stop = self.folds[(fold_index + 1) % len(self.folds)] - - model = self._train_model(train, early_stop) - return model - - def _train_model(self, train: xgb.DMatrix, early_stop: xgb.DMatrix) -> xgb.Booster: - model = xgb.train( - {"objective": "binary:logistic", "scale_pos_weight": 0.5}, - # {"objective": "binary:logistic"}, - train, - num_boost_round=500, - evals=[ - (early_stop, "validation"), - ], - early_stopping_rounds=5, - verbose_eval=1, - ) - return model - - def score(self): - for i in range(len(self.folds)): - pprint(f"Scoring fold {i}/{len(self.folds)}") - scores = [] - for j, model in enumerate(self.models): - if j == i: - continue - if j == (i + 1) % len(self.folds): - continue - scores.append(model.predict(self.folds[i])) - self.scores[i] = np.mean(scores, axis=0) - - def get_importances(self): - imps = [model.get_score(importance_type="gain") for model in self.models] - imps_order = sorted(imps[0].items(), key=lambda x: x[1], reverse=True) - out = {k[0]: [w.get(k[0], 0) for w in imps] for k in imps_order} - return out - - def concat_scores(self): - if self.scores[0] is None: - raise ValueError("Scores not computed") - return np.concatenate(self.scores) - - def concat_targets(self): - targets = [x.get_label() for x in self.folds] - return np.concatenate(targets) - - -def tdc(scores: np.ndarray[float], target: np.ndarray[bool], desc: bool = True): - """Estimate q-values using target decoy competition. - - Estimates q-values using the simple target decoy competition method. - For set of target and decoy PSMs meeting a specified score threshold, - the false discovery rate (FDR) is estimated as: - - ...math: - FDR = \frac{Decoys + 1}{Targets} - - More formally, let the scores of target and decoy PSMs be indicated as - :math:`f_1, f_2, ..., f_{m_f}` and :math:`d_1, d_2, ..., d_{m_d}`, - respectively. For a score threshold :math:`t`, the false discovery - rate is estimated as: - - ...math: - E\\{FDR(t)\\} = \frac{|\\{d_i > t; i=1, ..., m_d\\}| + 1} - {\\{|f_i > t; i=1, ..., m_f|\\}} - - The reported q-value for each PSM is the minimum FDR at which that - PSM would be accepted. - - With one exception, the lowest score will always have a q-value of 1.0. - - Parameters - ---------- - scores : numpy.ndarray of float - A 1D array containing the score to rank by - target : numpy.ndarray of bool - A 1D array indicating if the entry is from a target or decoy - hit. This should be boolean, where `True` indicates a target - and `False` indicates a decoy. `target[i]` is the label for - `metric[i]`; thus `target` and `metric` should be of - equal length. - desc : bool - Are higher scores better? `True` indicates that they are, - `False` indicates that they are not. - - Returns - ------- - numpy.ndarray - A 1D array with the estimated q-value for each entry. The - array is the same length as the `scores` and `target` arrays. - """ - # Since numpy 2.x relying in attribute errors is not viable here - # https://numpy.org/neps/nep-0050-scalar-promotion.html#impact-on-can-cast - # So I am manually checking the constraints. - if ( - np.issubdtype(target.dtype, np.integer) - and target.max() <= 1 - and target.min() >= 0 - ): - target = target.astype(bool) - - if np.issubdtype(target.dtype, np.floating): - like_one = target == np.ones_like(target) - like_zero = target == np.zeros_like(target) - if np.all(like_one | like_zero): - target = target.astype(bool) - - if not np.issubdtype(target.dtype, bool): - err = ValueError( - f"'target' should be boolean. passed type: {target.dtype}" - f" with value: {target}" - ) - raise err - - if scores.shape[0] != target.shape[0]: - raise ValueError("'scores' and 'target' must be the same length") - - # Unsigned integers can cause weird things to happen. - # Convert all scores to floats to for safety. - scores = scores.astype(np.float32) - - # Sort and estimate FDR - # Sort order is first by score and then ties are - # sorted by target - if desc: - srt_idx = np.lexsort((target, -scores)) - else: - srt_idx = np.lexsort((target, scores)) - - scores = scores[srt_idx] - target = target[srt_idx] - - cum_targets = target.cumsum() - cum_decoys = (~target).cumsum() - - # Handles zeros in denominator - fdr = np.divide( - (cum_decoys + 1), - cum_targets, - out=np.ones_like(cum_targets, dtype=np.float32), - where=(cum_targets != 0), - ) - # Clamp the FDR to 1.0 - fdr = np.minimum(fdr, 1.0) - - # Sort by scores and resolve ties with fdr - # This implementation is 1.5x slower with small data - # from 0.05 to 0.07 miliseconds. - # But up to 100x faster with large data - # and does not need compilation (or numba as a dependency) - # For the former implementation check the git history - if desc: - sorting = np.lexsort((fdr, scores)) - else: - sorting = np.lexsort((fdr, -scores)) - tmp = np.minimum.accumulate(fdr[sorting]) - # Set the FDR to 1 for the lowest score - # This prevent prevents a bug where features like the charge - # would seem to be very good, because ... since they tie a lot - # of PSMs, and we report the 'best' FDR for all ties, it would - # artifially yield very low q-values. - tmp[tmp == tmp[0]] = 1.0 - np_qval = np.flip(tmp)[np.argsort(srt_idx)] - - return np_qval - - -def xgboost_stuff(shuffled_df: pl.DataFrame, cols: list[str], output_dir: Path): - folds = to_folds_xgb(shuffled_df=shuffled_df, num_folds=5, cols=cols) - fold_model = KFoldModel.from_folds(folds) - fold_model.train() - fold_model.score() - importances = fold_model.get_importances() - pprint(importances) - fig = plot_importances(importances) - outfile = output_dir / "importances.png" - fig.savefig(outfile) - pprint(f"Wrote {outfile}") - plt.close() - - ctargs = fold_model.concat_targets() - cscores = fold_model.concat_scores() - shuffled_df["rescore_score"] = cscores - shuffled_df["qvalue"] = mokapot.qvalues.qvalues_from_scores(cscores, ctargs == 1) - shuffled_df = shuffled_df.sort_values("rescore_score", ascending=False) - outfile = output_dir / "rescored_values.parquet" - shuffled_df.to_parquet(outfile, index=False) - - one_pct_df = shuffled_df[shuffled_df["qvalue"] < 0.01] - if len(one_pct_df) > 0: - - def plot_hexbin( - target_df, - decoy_df, - x_col_label, - y_col_label, - outfile, - title, - add_one_to_one: bool = False, - ): - fig, ax = plt.subplots(1, 2, figsize=(10, 5)) - x_col, xlabel = x_col_label - y_col, ylabel = y_col_label - for i, (df, sub_title) in enumerate( - zip([target_df, decoy_df], ["Targets", "Decoys"]) - ): - ax[i].hexbin( - df[x_col], - df[y_col], - gridsize=50, - cmap="viridis", - bins="log", - ) - ax[i].set_title(f"{title} ({sub_title})") - ax[i].set_xlabel(xlabel) - ax[i].set_ylabel(ylabel) - if add_one_to_one: - min_val = min( - df[x_col].min(), - df[y_col].min(), - ) - max_val = max( - df[x_col].max(), - df[y_col].max(), - ) - ax[i].plot([min_val, max_val], [min_val, max_val], "r--", lw=1) - fig.tight_layout() - fig.savefig(outfile) - plt.close() - pprint(f"Wrote {outfile}") - - target_df = one_pct_df[one_pct_df["is_target"]] - decoy_df = one_pct_df[~one_pct_df["is_target"]] - - plot_hexbin( - target_df, - decoy_df, - ("ms2_mz_error_0", "Mass error (m/z)"), - ("obs_rt_seconds", "Observed RT (s)"), - output_dir / "mass_error_rt_1pct.png", - "1% FDR", - ) - - plot_hexbin( - target_df, - decoy_df, - ("ms2_mz_error_0", "Mass error (m/z)"), - ("precursor_mz", "Precursor m/z"), - output_dir / "mass_error_mz_1pct.png", - "1% FDR", - ) - - plot_hexbin( - target_df, - decoy_df, - ("obs_mobility", "Observed Mobility"), - ("obs_rt_seconds", "Observed RT (s)"), - output_dir / "mobility_rt_1pct.png", - "1% FDR", - ) - - # For mobility error, subtract precursor_mobility_query from obs_mobility - one_pct_df = one_pct_df.copy() - one_pct_df["mobility_error"] = ( - one_pct_df["obs_mobility"] - one_pct_df["precursor_mobility_query"] - ) - target_df = one_pct_df[one_pct_df["is_target"]] - decoy_df = one_pct_df[~one_pct_df["is_target"]] - plot_hexbin( - target_df, - decoy_df, - ("mobility_error", "Observed Mobility Error"), - ("obs_rt_seconds", "Observed RT (s)"), - output_dir / "mobility_error_rt_1pct.png", - "1% FDR", - ) - - plot_hexbin( - target_df, - decoy_df, - ("precursor_mz", "Precursor m/z"), - ("obs_mobility", "Observed Mobility"), - output_dir / "mz_mobility_1pct.png", - "1% FDR", - ) - - plot_hexbin( - target_df, - decoy_df, - ("precursor_rt_query_seconds", "Predicted RT (s)"), - ("obs_rt_seconds", "Observed RT (s)"), - output_dir / "predicted_rt_obs_rt_1pct.png", - "1% FDR", - add_one_to_one=True, - ) - else: - pprint("No values at 1% FDR") - - pprint(f"Wrote {outfile}") - order = np.argsort(-cscores) - ctargs = ctargs[order] - cscores = cscores[order] - cumtargs = np.cumsum(ctargs) - - target_preds = cscores[ctargs == 1] - decoy_preds = cscores[ctargs == 0] - - histogram(target_preds, title="Target scores") - histogram(decoy_preds, title="Decoy scores") - - qvals = mokapot.qvalues.qvalues_from_scores(cscores, ctargs == 1) - for ct in [0.01, 0.05, 0.1, 0.5, 1.0]: - num_at_thresh = int(np.sum(ctargs[qvals < ct])) - ssc = cscores[qvals < ct] - if len(ssc) == 0: - pprint(f"No scores at {ct}") - continue - score_at_thresh = np.min(ssc) - pprint(f"Score at {ct}: {score_at_thresh}") - pprint(f"Number of targets at {ct}: {num_at_thresh}") - - report = Report( - targets_at_1=np.sum(ctargs[qvals < 0.01]).item(), - targets_at_5=np.sum(ctargs[qvals < 0.05]).item(), - targets_at_10=np.sum(ctargs[qvals < 0.1]).item(), - ) - pprint(report) - outfile = output_dir / "report.toml" - report.save_to_toml(outfile) - pprint(f"Wrote {outfile}") - - # plt.plot(qvals, cumtargs, label="All Scores") - - mask = qvals < 0.1 - plt.plot(qvals[mask], cumtargs[mask], label="QValues < 0.1") - plt.legend(loc="upper right") - plt.xlim(0, 0.1) - plt.title("Cumulative number of accepted peptides.") - outfile = output_dir / "plot_qvalues.png" - plt.savefig(outfile) - pprint(f"Wrote {outfile}") - plt.close() - - fig, ax = plt.subplots(1, 2, figsize=(10, 4)) - ax[0].hist(target_preds, alpha=0.5, bins=100, label="Targets") - ax[0].hist(decoy_preds, alpha=0.5, bins=100, label="Decoys") - # ax[0].axvline(x=score_at_onepct, color="k", linestyle="--", alpha=0.5) - ax[0].legend() - - ax[1].hist(target_preds, alpha=0.5, bins=100, label="Targets") - ax[1].hist(decoy_preds, alpha=0.5, bins=100, label="Decoys") - # ax[1].axvline(x=score_at_onepct, color="k", linestyle="--", alpha=0.5) - ax[1].set_yscale("log") - ax[1].legend() - plt.title("Histogram of 'rescoring' score.") - outfile = output_dir / "plot_hist.png" - plt.savefig(outfile) - pprint(f"Wrote {outfile}") - plt.close() diff --git a/python/timsseek_rescore/timsseek_rescore/cli.py b/python/timsseek_rescore/timsseek_rescore/cli.py deleted file mode 100644 index f7712cf..0000000 --- a/python/timsseek_rescore/timsseek_rescore/cli.py +++ /dev/null @@ -1,88 +0,0 @@ -import argparse -import logging -from pathlib import Path - -import polars as pl -from rich.pretty import pprint - -from .backends.mlp import mlp_stuff -from .backends.mokapot import mokapot_stuff -from .backends.xgb import xgboost_stuff -from .feateng import read_files, to_mokapot_df -from .plotting import main_score_hist, plot_scores_hist - - -# TODO: Rename -def main(args): - paths = [Path(p) for p in args.results_dir] - for p in paths: - if not p.exists(): - raise FileNotFoundError(f"Path {p} does not exist") - - outdir = Path(args.output_dir) - if not outdir.exists(): - outdir.mkdir(parents=True) - - xgb_out = outdir / "xgboost" - if not xgb_out.exists(): - xgb_out.mkdir(parents=True) - mlp_out = outdir / "mlp" - if not mlp_out.exists(): - mlp_out.mkdir(parents=True) - xgboost_part(paths, xgb_out) - # mlp_part(paths, mlp_out) - - -def xgboost_part(paths, outdir): - data = read_files(paths) - data = data.filter(pl.col("obs_mobility").is_not_nan()) - data, cols = to_mokapot_df(data, make_nonmissing=False, make_monotonic=False) - pprint("Shuffling") - data = data.sample(frac=1, random_state=42).reset_index(drop=True, inplace=False) - # This generates a pandas df ... - - main_score_hist(pl.from_pandas(data).lazy(), outdir) - plot_scores_hist(pl.from_pandas(data), cols.feature_columns, outdir) - # mokapot_stuff(data, outdir) - xgboost_stuff(data, cols, outdir) - - -def mlp_part(paths, outdir): - data = read_files(paths) - data, cols = to_mokapot_df(data, make_nonmissing=True, make_monotonic=True) - pprint("Shuffling") - data = data.sample(frac=1, random_state=42).reset_index(drop=True, inplace=False) - data = data.filter(pl.col("obs_mobility").is_not_nan()) - score = mlp_stuff(data, cols=cols, output_dir=outdir) - - -def build_parser(): - parser = argparse.ArgumentParser() - parser.add_argument( - "--results_dir", - type=str, - nargs="+", - help="Path to the directories containing the results.csv files", - ) - parser.add_argument( - "-o", "--output_dir", type=str, default=".", help="Output directory" - ) - return parser - - -def cli_main(): - logging.basicConfig( - level=logging.INFO, - format="%(levelname)s: %(message)s", - ) - - parser = build_parser() - args, unkargs = parser.parse_known_args() - if unkargs: - raise ValueError(f"Unknown arguments: {unkargs}") - - main(args) - - -if __name__ == "__main__": - cli_main() diff --git a/python/timsseek_rescore/timsseek_rescore/datamodels.py b/python/timsseek_rescore/timsseek_rescore/datamodels.py deleted file mode 100644 index 600f369..0000000 --- a/python/timsseek_rescore/timsseek_rescore/datamodels.py +++ /dev/null @@ -1,16 +0,0 @@ -from dataclasses import dataclass -from pathlib import Path - - -@dataclass -class Report: - targets_at_1: int - targets_at_5: int - targets_at_10: int - - def save_to_toml(self, path: Path): - with open(path, "w") as f: - f.write("[report]\n") - f.write(f"targets_at_1 = {self.targets_at_1}\n") - f.write(f"targets_at_5 = {self.targets_at_5}\n") - f.write(f"targets_at_10 = {self.targets_at_10}\n") diff --git a/python/timsseek_rescore/timsseek_rescore/feateng.py b/python/timsseek_rescore/timsseek_rescore/feateng.py deleted file mode 100644 index 04fae44..0000000 --- a/python/timsseek_rescore/timsseek_rescore/feateng.py +++ /dev/null @@ -1,355 +0,0 @@ -import math -from pathlib import Path - -import mokapot -import pandas as pd -import polars as pl -from mokapot.column_defs import ColumnGroups, OptionalColumns -from rich.pretty import pprint -from tqdm.auto import tqdm - - -def lazy_abs_and_maxfill(df: pl.LazyFrame, columns: list[str]) -> pl.LazyFrame: - exprs_first = [] - exprs_later = [] - for column in columns: - exprs_first.append(pl.col(column).abs()) - exprs_later.append(pl.col(column).fill_nan(pl.col(column).max())) - - return df.with_columns(exprs_first).with_columns(exprs_later) - - -def lazy_zero_fill(df: pl.LazyFrame, columns: list[str]) -> pl.LazyFrame: - exprs = [] - for column in columns: - exprs.append(pl.col(column).fill_nan(0)) - return df.with_columns(exprs) - - -def log_cols(df: pl.LazyFrame, columns: list[str]) -> pl.LazyFrame: - exprs = [] - for col in columns: - exprs.append(pl.col(col).fill_nan(0).log1p().fill_nan(0)) - out = df.with_columns(exprs) - return out - - -def add_id(df: pl.LazyFrame) -> pl.LazyFrame: - # Add an id column ... pretty simple incremental integer - df = df.with_row_index(name="id") - return df - - -def check_noninf(df: pl.DataFrame, columns: list[str]): - pprint("Checking for infinite values") - any_inf = False - df_inf = df.select(columns).filter(pl.any_horizontal(pl.all().is_infinite())) - if len(df_inf) > 0: - pprint(df_inf) - pprint(df_inf[0].to_dict(as_series=False)) - for col in columns: - if df_inf[col].is_infinite().any(): - ninf = df_inf[col].is_infinite().sum() - pprint(f"Column {col} has infinite ({ninf}/{len(df)}) values") - any_inf = True - if any_inf: - raise ValueError("Data contains infinite values") - - -def check_nonnan(df: pl.LazyFrame, columns: list[str]): - pprint("Checking for NaN values") - nan_cols = [] - df_nan = df.select(columns).filter(pl.any_horizontal(pl.all().is_nan())) - pprint(df_nan) - for col in columns: - if df_nan[col].is_nan().any(): - pprint(f"Column {col} has NaN values") - nan_cols.append(col) - if nan_cols: - raise ValueError(f"Data contains NaN values: {nan_cols}") - - -def check_nonexp(df: pl.DataFrame, columns: list[str]): - # checks that things are not exponential - # The heuristic here is that stuff that is log transformed - # should only span 3 orders of magnitude - - norders = {} - failing_cols = [] - df = df.select(columns) - for col in columns: - try: - mag_diff = df[col].abs().max() - df[col].abs().min() - nmags = math.log10(mag_diff) - except ValueError as e: - if "math domain error" in str(e): - raise ValueError(f"Column {col} has 0 variance values") - norders[col] = nmags - if nmags > 3: - failing_cols.append(col) - pprint(norders) - if failing_cols: - ranges = [ - f"{col}: min={df[col].min()}, max={df[col].max()} norders={norders[col]}" - for col in failing_cols - ] - pprint(ranges) - raise ValueError(f"Data contains exponential values: {failing_cols}") - - -def cast_f32(df: pl.LazyFrame, cols: list[str]) -> pl.LazyFrame: - exprs = [] - for col in cols: - exprs.append(pl.col(col).cast(pl.Float32)) - return df.with_columns(exprs) - - -def ohe_charges(df: pl.LazyFrame, charges: list[int]) -> tuple[pl.LazyFrame, list[str]]: - exprs = [] - colnames = [] - for charge in charges: - colname = f"charge_{charge}" - colnames.append(colname) - exprs.append((pl.col("precursor_charge") == charge).alias(colname)) - return df.with_columns(exprs), tuple(colnames) - - -def scale_columns( - df: pl.LazyFrame, cols: list[tuple[str, float]] -) -> tuple[pl.LazyFrame, tuple[str, ...]]: - exprs = [] - cols_out = [] - for col, factor in cols: - exprs.append(pl.col(col).cast(pl.Float32) / factor) - cols_out.append(col) - return df.with_columns(exprs), tuple(cols_out) - - -def td_compete(df_use: pl.DataFrame) -> pl.DataFrame: - pprint("Stripping sequences") - stripped_seqs = ( - df_use["sequence"].str.replace_all("\/\d+", "").str.replace_all("\[.*?\]", "") - ) - mods = [ - tuple(x) - for x in df_use["sequence"] - .str.replace_all("\/\d+", "") - .str.extract_all("\[.*?\]") - .list.sort() - .to_list() - ] - df_use = df_use.with_columns( - td_id=pl.Series( - derive_td_pair( - stripped_seqs.to_list(), - mods, - charges=df_use["precursor_charge"].to_list(), - ) - ) - ) - - return df_use - - -def to_mokapot_df( - df: pl.LazyFrame, - make_nonmissing: bool = True, - make_monotonic: bool = True, - scale_cols: bool = True, -) -> tuple[pd.DataFrame, ColumnGroups]: - pprint("Starting to_mokapot") - loggable_cols = ( - # Log - "npeaks", - "main_score", - "delta_next", - "delta_second_next", - "apex_lazyerscore", - "split_product_score", - "cosine_au_score", - "scribe_au_score", - "ms2_isotope_lazyerscore", - "ms2_lazyerscore", - "ms2_isotope_lazyerscore_ratio", - "ms1_summed_precursor_intensity", - "ms2_summed_transition_intensity", - # TODO: consider clamping instead of logging here. - "sq_delta_theo_rt", - "calibrated_sq_delta_theo_rt", - "delta_group", - "delta_group_ratio", - ) - imputable_cols = ( - # Abs impute - "ms2_mz_error_0", - "ms2_mz_error_1", - "ms2_mz_error_2", - "ms2_mz_error_3", - "ms2_mz_error_4", - "ms2_mz_error_5", - "ms2_mz_error_6", - "ms2_mobility_error_0", - "ms2_mobility_error_1", - "ms2_mobility_error_2", - "ms2_mobility_error_3", - "ms2_mobility_error_4", - "ms2_mobility_error_5", - "ms2_mobility_error_6", - "ms1_mz_error_0", - "ms1_mz_error_1", - "ms1_mz_error_2", - "ms1_mobility_error_0", - "ms1_mobility_error_1", - "ms1_mobility_error_2", - "sq_delta_ms1_ms2_mobility", - "delta_ms1_ms2_mobility", - ) - scaling_cols = ( - ("precursor_rt_query_seconds", 60), - ("obs_rt_seconds", 60), - ("delta_theo_rt", 60), - ) - # zero_imputable_cols = ("ms1_ms2_correlation",) - zero_imputable_cols = () - generated_cols = [] - - if scale_cols: - df_use = log_cols(df, loggable_cols) - if make_monotonic: - df_use = lazy_abs_and_maxfill(df_use, imputable_cols) - if make_nonmissing: - df_use = lazy_zero_fill(df_use, zero_imputable_cols) - - df_use, scaling_cols = scale_columns(df_use, scaling_cols) - pprint("Collecting") - df_use, ohe_cols = ohe_charges(df_use, charges=[2, 3, 4]) - generated_cols += ohe_cols - df_use = add_id(df_use).collect(streaming=True) - df_use = td_compete(df_use) - - feat_cols = ( - ( - "precursor_charge", - "precursor_mz", - "precursor_mobility_query", - "obs_mobility", - "peak_shape", - "ratio_cv", - "centered_apex", - "precursor_coelution", - "fragment_coverage", - "precursor_apex_match", - "xic_quality", - "fragment_apex_agreement", - "isotope_correlation", - "gaussian_correlation", - "per_frag_gaussian_corr", - "coelution_gradient_cosine", - "coelution_gradient_scribe", - "cosine_weighted_coelution", - "cosine_gradient_consistency", - "scribe_weighted_coelution", - "scribe_gradient_consistency", - "nqueries", - "lazyscore_z", - "lazyscore_vs_baseline", - # Intensity ratios - "ms1_inten_ratio_2", - "ms2_inten_ratio_4", - "ms2_inten_ratio_6", - "ms1_inten_ratio_1", - "ms2_inten_ratio_2", - "ms2_inten_ratio_1", - "ms2_inten_ratio_3", - "ms1_inten_ratio_0", - "ms2_inten_ratio_5", - "ms2_inten_ratio_0", - # Cycle counts - "raising_cycles", - "falling_cycles", - # delta_group and delta_group_ratio are in loggable_cols - ) - + loggable_cols - + imputable_cols - + zero_imputable_cols - + tuple(generated_cols) - + scaling_cols - ) - - # This requires all columns to exist, so we do it after all preprocessing - df_use = cast_f32(df_use, feat_cols) - check_noninf(df_use, feat_cols) - if make_nonmissing: - check_nonnan(df_use, feat_cols) - if scale_cols: - check_nonexp(df_use, feat_cols) - - pprint("Converting to pandas") - df_use = df_use.to_pandas() - cols = ColumnGroups( - columns=df_use.columns, - target_column="is_target", - peptide_column="sequence", - # spectrum_columns=("id", "td_id"), - spectrum_columns=("id",), - feature_columns=feat_cols, - extra_confidence_level_columns=(), - optional_columns=OptionalColumns( - id=None, - filename=None, - scan=None, - calcmass="precursor_mz", - # charge="precursor_charge", - charge=None, - expmass=None, - rt="obs_rt_seconds", - protein=None, - ), - ) - nonfeat_cols = set(cols.columns) - set(cols.feature_columns) - pprint(f"Non-feature columns: {nonfeat_cols}") - return df_use, cols - - -def read_files(results_dirs: list[Path]) -> pl.LazyFrame: - files = set() - for results_dir in results_dirs: - files.update(results_dir.glob("results.parquet")) - - files = list(files) - pprint(f"Scanning {len(files)} files -> {files}") - data = pl.scan_parquet(files) - pprint("Done scanning") - return data - - -def derive_td_pair( - sequences: list[str], mods: list[tuple[str, ...]], charges: list[int] -) -> list[int]: - pprint("Deriving TD pairs") - td_pairs = {} - max_id = 0 - - out = [] - for seq, mod, charge in tqdm( - zip(sequences, mods, charges, strict=True), total=len(sequences) - ): - if (seq, mod, charge) in td_pairs: - out.append(td_pairs[(seq, mod, charge)]) - continue - - dec_seq = seq[0] + seq[1:-1][::-1] + seq[-1] - td_pairs[(seq, mod, charge)] = td_pairs[(dec_seq, mod, charge)] = max_id - out.append(max_id) - max_id += 1 - - pprint(f"Found {max_id} TD pairs in {len(sequences)} sequences/charge pairs") - return out - - -def to_mokapot(df: pl.LazyFrame) -> mokapot.LinearPsmDataset: - df_use, cols = to_mokapot_df(df) - return mokapot.LinearPsmDataset( - df_use, - column_groups=cols, - ) diff --git a/python/timsseek_rescore/timsseek_rescore/folding.py b/python/timsseek_rescore/timsseek_rescore/folding.py deleted file mode 100644 index 0fd9795..0000000 --- a/python/timsseek_rescore/timsseek_rescore/folding.py +++ /dev/null @@ -1,40 +0,0 @@ -import numpy as np -import pandas as pd -import xgboost as xgb -from mokapot.column_defs import ColumnGroups - - -def to_folds_xgb( - *, - shuffled_df: pd.DataFrame, - cols: ColumnGroups, - num_folds: int, -) -> list[xgb.DMatrix]: - tmp = to_folds(shuffled_df=shuffled_df, cols=cols, num_folds=num_folds) - out = [ - xgb.DMatrix( - x[0], - label=x[1], - feature_names=cols.feature_columns, - ) - for x in tmp - ] - return out - - -def to_folds( - *, - shuffled_df: pd.DataFrame, - cols: ColumnGroups, - num_folds: int, -) -> list[tuple[np.ndarray, np.ndarray]]: - out = np.array_split(shuffled_df, num_folds) - - out = [ - ( - x.loc[:, cols.feature_columns].to_numpy(), - np.where(x.loc[:, cols.target_column].to_numpy(), 1, 0), - ) - for x in out - ] - return out diff --git a/python/timsseek_rescore/timsseek_rescore/plotting.py b/python/timsseek_rescore/timsseek_rescore/plotting.py deleted file mode 100644 index 2cde3ed..0000000 --- a/python/timsseek_rescore/timsseek_rescore/plotting.py +++ /dev/null @@ -1,209 +0,0 @@ -from pathlib import Path - -import matplotlib.pyplot as plt -import numpy as np -import polars as pl -from rich.pretty import pprint - - -def plot_importances(importances: dict[str, list[float]]): - # Lollipop plot showing the importance of each feature - fig, ax = plt.subplots(figsize=(6, 8)) - rev_imps = [(k, v) for k, v in importances.items()] - rev_imps = rev_imps[::-1] - for feature, importance in rev_imps: - expanded_feat = [feature] * len(importance) - # Stem of the lollipop - ax.hlines(expanded_feat, 0, importance) - - # Dot at the top of the lollipop - ax.scatter(importance, expanded_feat, c="k", marker="o") - - # Rotate the labels - ax.tick_params(axis="x", rotation=45) - - ax.set_ylabel("Importance ('gain' as defined by xgboost)") - ax.set_xlabel("Feature") - # square root scale the x axis - ax.set_xscale("log") - fig.tight_layout() - return fig - - -def main_score_hist(df: pl.LazyFrame, output_dir: Path): - pprint("Plotting main scores") - scores_df = df.select(["is_target", "main_score", "precursor_charge"]).collect() - target_scores = scores_df.filter(pl.col("is_target") == "true")[ - "main_score" - ].to_numpy() - target_charges = scores_df.filter(pl.col("is_target") == "true")["precursor_charge"] - decoy_scores = scores_df.filter(pl.col("is_target") == "false")[ - "main_score" - ].to_numpy() - decoy_charges = scores_df.filter(pl.col("is_target") == "false")["precursor_charge"] - - pprint("Number of targets: {}".format(len(target_scores))) - pprint("Number of decoys: {}".format(len(decoy_scores))) - if (len(target_scores) + len(decoy_scores)) != len(scores_df): - raise ValueError("Error filtering targets and decoys") - - scores = np.log1p(np.concatenate((target_scores, decoy_scores))) - bins = np.histogram_bin_edges(scores[~np.isnan(scores)], bins=50) - - fig, ax = plt.subplots(nrows=2, ncols=1, figsize=(10, 8), sharex=True) - - ax[0].hist(scores, bins, alpha=0.5, label="All Scores") - ax[0].hist(target_scores, bins, alpha=0.5, label="Target Scores") - ax[0].hist(decoy_scores, bins, alpha=0.5, label="Decoy Scores") - ax[0].set_xlabel("Main Score (log1p)") - ax[0].set_ylabel("Count") - ax[0].legend(loc="upper right") - ax[0].set_yscale("log") - - ax[1].hist(scores, bins, alpha=0.5, label="All Scores") - ax[1].hist(target_scores, bins, alpha=0.5, label="Target Scores") - ax[1].hist(decoy_scores, bins, alpha=0.5, label="Decoy Scores") - ax[1].set_xlabel("Main Score (log1p)") - ax[1].set_ylabel("Count") - ax[1].legend(loc="upper right") - - target_file = output_dir / "plot_mainscores.png" - pprint(f"Saving plot to {target_file}") - plt.title("Histogram of 'main_score' scores.") - plt.savefig(target_file) - plt.close() - - ## Re-make the same plot but filtering for charge states - uniq_charges = np.unique(np.concatenate((target_charges, decoy_charges))) - pprint(uniq_charges) - - fig, ax = plt.subplots( - nrows=len(uniq_charges), ncols=1, figsize=(10, 8), sharex=True - ) - - for i, charge in enumerate(uniq_charges): - target_scores_loc = target_scores[target_charges == charge] - decoy_scores_loc = decoy_scores[decoy_charges == charge] - loc_scores = np.log1p(np.concatenate((target_scores_loc, decoy_scores_loc))) - - ax[i].hist(loc_scores, bins, alpha=0.5, label="All Scores") - ax[i].hist(np.log1p(target_scores_loc), bins, alpha=0.5, label="Target Scores") - ax[i].hist(np.log1p(decoy_scores_loc), bins, alpha=0.5, label="Decoy Scores") - ax[i].set_xlabel(f"Main Score (log1p); charge={charge}") - ax[i].set_ylabel("Count") - ax[i].legend(loc="upper right") - ax[i].set_yscale("log") - - target_file = output_dir / "plot_mainscores_by_charge.png" - pprint(f"Saving plot to {target_file}") - # plt.title("Histogram of 'main_score' scores.") - plt.savefig(target_file) - plt.close() - - -def main_score_hist(df: pl.LazyFrame, output_dir: Path): - pprint("Plotting main scores") - scores_df = df.select(["is_target", "main_score", "precursor_charge"]).collect() - target_scores = scores_df.filter(pl.col("is_target") == "true")[ - "main_score" - ].to_numpy() - target_charges = scores_df.filter(pl.col("is_target") == "true")["precursor_charge"] - decoy_scores = scores_df.filter(pl.col("is_target") == "false")[ - "main_score" - ].to_numpy() - decoy_charges = scores_df.filter(pl.col("is_target") == "false")["precursor_charge"] - - pprint("Number of targets: {}".format(len(target_scores))) - pprint("Number of decoys: {}".format(len(decoy_scores))) - if (len(target_scores) + len(decoy_scores)) != len(scores_df): - raise ValueError("Error filtering targets and decoys") - - scores = np.log1p(np.concatenate((target_scores, decoy_scores))) - bins = np.histogram_bin_edges(scores[~np.isnan(scores)], bins=50) - - fig, ax = plt.subplots(nrows=2, ncols=1, figsize=(10, 8), sharex=True) - - ax[0].hist(scores, bins, alpha=0.5, label="All Scores") - ax[0].hist(np.log1p(target_scores), bins, alpha=0.5, label="Target Scores") - ax[0].hist(np.log1p(decoy_scores), bins, alpha=0.5, label="Decoy Scores") - ax[0].set_xlabel("Main Score (log1p)") - ax[0].set_ylabel("Count") - ax[0].legend(loc="upper right") - ax[0].set_yscale("log") - - ax[1].hist(scores, bins, alpha=0.5, label="All Scores") - ax[1].hist(np.log1p(target_scores), bins, alpha=0.5, label="Target Scores") - ax[1].hist(np.log1p(decoy_scores), bins, alpha=0.5, label="Decoy Scores") - ax[1].set_xlabel("Main Score (log1p)") - ax[1].set_ylabel("Count") - ax[1].legend(loc="upper right") - - target_file = output_dir / "plot_mainscores.png" - pprint(f"Saving plot to {target_file}") - plt.title("Histogram of 'main_score' scores.") - plt.savefig(target_file) - plt.close() - - ## Re-make the same plot but filtering for charge states - uniq_charges = np.unique(np.concatenate((target_charges, decoy_charges))) - pprint(uniq_charges) - - fig, ax = plt.subplots( - nrows=len(uniq_charges), ncols=1, figsize=(10, 8), sharex=True - ) - - for i, charge in enumerate(uniq_charges): - target_scores_loc = target_scores[target_charges == charge] - decoy_scores_loc = decoy_scores[decoy_charges == charge] - loc_scores = np.log1p(np.concatenate((target_scores_loc, decoy_scores_loc))) - - ax[i].hist(loc_scores, bins, alpha=0.5, label="All Scores") - ax[i].hist(np.log1p(target_scores_loc), bins, alpha=0.5, label="Target Scores") - ax[i].hist(np.log1p(decoy_scores_loc), bins, alpha=0.5, label="Decoy Scores") - ax[i].set_xlabel(f"Main Score (log1p); charge={charge}") - ax[i].set_ylabel("Count") - ax[i].legend(loc="upper right") - ax[i].set_yscale("log") - - target_file = output_dir / "plot_mainscores_by_charge.png" - pprint(f"Saving plot to {target_file}") - # plt.title("Histogram of 'main_score' scores.") - plt.savefig(target_file) - plt.close() - - -def plot_scores_hist(df: pl.LazyFrame, columns: list[str], output_dir: Path): - targets_df = df.filter(pl.col("is_target") == "true") - decoys_df = df.filter(pl.col("is_target") == "false") - - pprint(f"Plotting scores for columns: {columns}") - for score in columns: - pprint(f"Plotting histogram for score: {score}") - target_scores = targets_df[score].to_numpy() - decoy_scores = decoys_df[score].to_numpy() - - scores = np.concatenate((target_scores, decoy_scores)) - bins = np.histogram_bin_edges(scores[~np.isnan(scores)], bins=50) - - fig, ax = plt.subplots(nrows=2, ncols=1, figsize=(10, 8), sharex=True) - - ax[0].hist(scores, bins, alpha=0.5, label="All Scores") - ax[0].hist(target_scores, bins, alpha=0.5, label="Target Scores") - ax[0].hist(decoy_scores, bins, alpha=0.5, label="Decoy Scores") - ax[0].set_xlabel(f"Score: {score}") - ax[0].set_ylabel("Count") - ax[0].legend(loc="upper right") - ax[0].set_yscale("log") - - ax[1].hist(scores, bins, alpha=0.5, label="All Scores") - ax[1].hist(target_scores, bins, alpha=0.5, label="Target Scores") - ax[1].hist(decoy_scores, bins, alpha=0.5, label="Decoy Scores") - ax[1].set_xlabel(f"Score: {score}") - ax[1].set_ylabel("Count") - ax[1].legend(loc="upper right") - - target_file = output_dir / f"score_plot_{score}.png" - pprint(f"Saving plot to {target_file}") - plt.title(f"Histogram of '{score}' scores.") - plt.savefig(target_file) - plt.close() diff --git a/python/timsseek_rts_receiver/app.py b/python/timsseek_rts_receiver/app.py deleted file mode 100644 index d9b97c8..0000000 --- a/python/timsseek_rts_receiver/app.py +++ /dev/null @@ -1,81 +0,0 @@ -import time - -import streamlit as st -from speclib_builder.builder import DummyAnnotator, EntryBuilder -from speclib_builder.onxx_predictor import OnnxPeptideTransformerAnnotator -from timsseek_rts_receiver.io import query_server -from timsseek_rts_receiver.receiver import input_component, show_results - -st.set_page_config(layout="wide") - - -def main(): - st.title("TimsSeek RTs") - st.markdown("This is a demo of how to use the timsseek rts server.") - - host = st.text_input("Host", "localhost") - port = st.text_input("Port", "3724") - - entry_builder = EntryBuilder( - min_mz=100, - max_mz=2000, - max_ions_keep=10, - min_ion_mz=250, - max_ion_mz=2000, - min_ions=3, - ) - - annotator = DummyAnnotator() - ml_annotator = OnnxPeptideTransformerAnnotator.get_default() - peptide = input_component() - - if ml_annotator is not None: - st.info("Using ML annotator") - annotator = ml_annotator - else: - st.warning("Using dummy annotator") - - query_data_target = entry_builder.build_entry(annotator.model(peptide.target)) - query_data_decoy = entry_builder.build_entry(annotator.model(peptide.decoy)) - with st.expander("Query data - target"): - st.code(query_data_target.model_dump_json(indent=2), language="json") - with st.expander("Query data - decoy"): - st.code(query_data_decoy.model_dump_json(indent=2), language="json") - - stime = time.monotonic() - - try: - data_target = query_server(host, int(port), query_data_target.model_dump()) - data_decoy = query_server(host, int(port), query_data_decoy.model_dump()) - except Exception as e: - st.error(f"Query failed: {e}") - st.stop() - - if isinstance(data_target, str): - st.error(f"Query failed: {data_target}") - st.write(data_target) - - if isinstance(data_decoy, str): - st.error(f"Query failed: {data_decoy}") - st.write(data_decoy) - - etime = time.monotonic() - # st.write(f"Query took {etime - stime} seconds") - - cols = st.columns(2) - show_results( - data_target, - subtitle="Target results", - key_prefix="target_", - column=cols[0], - ) - show_results( - data_decoy, - subtitle="Decoy results", - key_prefix="decoy_", - column=cols[1], - ) - - -if __name__ == "__main__": - main() diff --git a/python/timsseek_rts_receiver/pyproject.toml b/python/timsseek_rts_receiver/pyproject.toml deleted file mode 100644 index 7163a87..0000000 --- a/python/timsseek_rts_receiver/pyproject.toml +++ /dev/null @@ -1,21 +0,0 @@ -[project] -name = "timsseek_rts_receiver" -version = "0.26.0" -requires-python = ">=3.11,<3.13" -description = "Add your description here" -dependencies = [ - "streamlit", - "pandas", - "numpy", - "matplotlib", - "requests", - "speclib_builder[ml]", - "pydantic>=2.0.0", -] - -[build-system] -requires = ["hatchling"] -build-backend = "hatchling.build" - -[tool.uv.sources] -speclib_builder = { workspace = true } diff --git a/python/timsseek_rts_receiver/timsseek_rts_receiver/__init__.py b/python/timsseek_rts_receiver/timsseek_rts_receiver/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/python/timsseek_rts_receiver/timsseek_rts_receiver/constants.py b/python/timsseek_rts_receiver/timsseek_rts_receiver/constants.py deleted file mode 100644 index 48cc43c..0000000 --- a/python/timsseek_rts_receiver/timsseek_rts_receiver/constants.py +++ /dev/null @@ -1,13 +0,0 @@ -DEFAULT_PORT = 3724 -BSA_FASTA_ENTRY = """>sp|P02769|ALBU_BOVIN Albumin OS=Bos taurus OX=9913 GN=ALB PE=1 SV=4 -MKWVTFISLLLLFSSAYSRGVFRRDTHKSEIAHRFKDLGEEHFKGLVLIAFSQYLQQCPF -DEHVKLVNELTEFAKTCVADESHAGCEKSLHTLFGDELCKVASLRETYGDMADCCEKQEP -ERNECFLSHKDDSPDLPKLKPDPNTLCDEFKADEKKFWGKYLYEIARRHPYFYAPELLYY -ANKYNGVFQECCQAEDKGACLLPKIETMREKVLASSARQRLRCASIQKFGERALKAWSVA -RLSQKFPKAEFVEVTKLVTDLTKVHKECCHGDLLECADDRADLAKYICDNQDTISSKLKE -CCDKPLLEKSHCIAEVEKDAIPENLPPLTADFAEDKDVCKNYQEAKDAFLGSFLYEYSRR -HPEYAVSVLLRLAKEYEATLEECCAKDDPHACYSTVFDKLKHLVDEPQNLIKQNCDQFEK -LGEYGFQNALIVRYTRKVPQVSTPTLVEVSRSLGKVGTRCCTKPESERMPCTEDYLSLIL -NRLCVLHEKTPVSEKVTKCCTESLVNRRPCFSALTPDETYVPKAFDEKLFTFHADICTLP -DTEKQIKKQTALVELLKHKPKATEEQLKTVMENFVAFVDKCCAADDKEACFAVEGPKLVV -STQTALA""" diff --git a/python/timsseek_rts_receiver/timsseek_rts_receiver/io.py b/python/timsseek_rts_receiver/timsseek_rts_receiver/io.py deleted file mode 100644 index fbd2fd4..0000000 --- a/python/timsseek_rts_receiver/timsseek_rts_receiver/io.py +++ /dev/null @@ -1,35 +0,0 @@ -import json -import socket - -from .constants import DEFAULT_PORT - - -def query_server(host="localhost", port: int = DEFAULT_PORT, query_data: dict = {}): - with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: - s.connect((host, port)) - - # Send query - query = json.dumps(query_data).encode("utf-8") - s.sendall(query) - s.shutdown(socket.SHUT_WR) - - # Read response with timeout - s.settimeout(2.0) # 2 second timeout - chunks = [] - - try: - while True: - chunk = s.recv(2048) - if not chunk: - break - chunks.append(chunk) - except socket.timeout: - pass # It's okay if we timeout after receiving data - - # print(chunks) - response = b"".join(chunks).decode("utf-8") - - try: - return json.loads(response) - except json.JSONDecodeError: - return response diff --git a/python/timsseek_rts_receiver/timsseek_rts_receiver/models.py b/python/timsseek_rts_receiver/timsseek_rts_receiver/models.py deleted file mode 100644 index a87e88f..0000000 --- a/python/timsseek_rts_receiver/timsseek_rts_receiver/models.py +++ /dev/null @@ -1,248 +0,0 @@ -from typing import Any, Dict, List, Optional, Tuple, Union - -import numpy as np -import pandas as pd -from matplotlib import pyplot as plt -from pydantic import BaseModel, Field, ConfigDict -from speclib_builder.base import ElutionGroup - - -class ArrayResponse(BaseModel): - model_config = ConfigDict(extra="forbid") - - arr: List[List[float]] - rts_ms: List[int] - mz_order: Union[List[Tuple[int, float]], list[Tuple[str, float]]] - - def plot_transition(self, min_rt, max_rt): - # In the same figure/axes show every intensity across retention time - rt_use = np.array(self.rts_ms) - ranges = np.searchsorted(rt_use, [min_rt, max_rt]) - rt_plot = (rt_use[ranges[0] : ranges[1]] / 1000) / 60 - - fig, ax = plt.subplots() - for k, v in zip(self.mz_order, self.arr, strict=True): - ax.plot(rt_plot, v[ranges[0] : ranges[1]], label=k) - - ax.set_xlabel("Retention Time (min)") - ax.set_ylabel("Intensity") - ax.legend() - fig.tight_layout() - return fig - - -class Extractions(BaseModel): - model_config = ConfigDict(extra="forbid") - - eg: ElutionGroup - fragments: ArrayResponse - precursors: ArrayResponse - - def min_rt(self): - return min(self.fragments.rts_ms + self.precursors.rts_ms) - - def max_rt(self): - return max(self.fragments.rts_ms + self.precursors.rts_ms) - - -class MainScoreElements(BaseModel): - model_config = ConfigDict(extra="forbid") - - ms1_coelution_score: List[float] - ms1_cosine_ref_sim: List[float] - ms2_coelution_score: List[float] - ms2_cosine_ref_sim: List[float] - ms2_lazyscore: List[float] - # Should Nones be allowed?? - ms2_lazyscore_vs_baseline: List[float | None] - ms2_corr_v_gauss: List[float] - ms1_corr_v_gauss: List[float] - - ms2_lazyscore_vs_baseline_std: float - - def plot( - self, - min_rt_ms, - max_rt_ms, - rt_use: np.array, - vlines_ms: Optional[List[int]] = None, - ): - # TODO: fix the piping of the retention times ... - - # Make a plot grid, where each row is a different score element - # but all share the same retention time axis - ncol = 3 - nrow = 3 - fig, ax = plt.subplots(nrows=nrow, ncols=ncol, figsize=(10, 12)) - - ranges = np.searchsorted(rt_use, [min_rt_ms, max_rt_ms]) - rt_plot = (rt_use[ranges[0] : ranges[1]] / 1000) / 60 - - mask_label = np.zeros_like(rt_use, dtype=bool) - if vlines_ms is not None: - for vline in vlines_ms: - local_range = np.searchsorted(rt_use, [vline - 5_000, vline + 5_000]) - mask_label[local_range[0] : local_range[1]] = True - - score_name_pairs = [ - ("MS1 Coelution Score", self.ms1_coelution_score), - ("MS1 Cosine Ref Sim", self.ms1_cosine_ref_sim), - ("MS2 Coelution Score", self.ms2_coelution_score), - ("MS2 Cosine Ref Sim", self.ms2_cosine_ref_sim), - ("MS2 LazyScore", self.ms2_lazyscore), - ("MS2 LazyScore Baseline", self.ms2_lazyscore_vs_baseline), - ("MS2 Corr v Gauss", self.ms2_corr_v_gauss), - ("MS1 Corr v Gauss", self.ms1_corr_v_gauss), - ] - - for i, (name, score) in enumerate(score_name_pairs): - local_score = np.array(score).astype(float) - score_ir = local_score[mask_label] - score_or = local_score[~mask_label] - - # Signal to noise ratio would be (max(ir) - max(or))/std(or) - max_ir = np.nanmax(score_ir) if len(score_ir) > 0 else 0 - max_or = np.nanmax(score_or) if len(score_or) > 0 else 0 - std_or = np.nanstd(score_or) if len(score_or) > 0 else 1e-3 - snr = (max_ir - max_or) / std_or - - ax[i // ncol, i % ncol].plot(rt_plot, score[ranges[0] : ranges[1]]) - ax[i // ncol, i % ncol].set_title(name) - - ax[i // ncol, i % ncol].text( - 0.95, - 0.95, - f"SNR: {np.round(snr, 2)}", - transform=ax[i // ncol, i % ncol].transAxes, - ha="right", - va="top", - ) - - if vlines_ms is not None: - vlines_minutes = np.array(vlines_ms) / 1000 / 60 - for li in range(len(vlines_minutes)): - for r in range(nrow): - for c in range(ncol): - ax[r, c].axvline( - x=vlines_minutes[li], color="k", linestyle="--", alpha=0.5 - ) - - for i in range(nrow): - for j in range(ncol): - ax[i, j].set_xlabel("Retention Time (min)") - - fig.tight_layout() - return fig - - -class SearchResults(BaseModel): - model_config = ConfigDict(extra="forbid") - - is_target: bool - apex_lazyerscore: float - apex_norm_lazyerscore_vs_baseline: float - apex_lazyerscore_vs_baseline: float - ms2_lazyerscore: float - ms2_isotope_lazyerscore: float - ms2_isotope_lazyerscore_ratio: float - ms2_corr_v_gauss: float - ms1_corr_v_gauss: float - main_score: float - delta_next: float - delta_second_next: float - nqueries: int - falling_cycles: float - raising_cycles: float - ms1_coelution_score: float - ms1_cosine_ref_similarity: float - ms1_mobility_error_0: float - ms1_mobility_error_1: float - ms1_mobility_error_2: float - ms1_mz_error_0: float - ms1_mz_error_1: float - ms1_mz_error_2: float - ms1_summed_precursor_intensity: float - ms2_coelution_score: float - ms2_cosine_ref_similarity: float - ms2_mobility_error_0: None | float - ms2_mobility_error_1: None | float - ms2_mobility_error_2: None | float - ms2_mobility_error_3: None | float - ms2_mobility_error_4: None | float - ms2_mobility_error_5: None | float - ms2_mobility_error_6: None | float - ms2_mz_error_0: None | float - ms2_mz_error_1: None | float - ms2_mz_error_2: None | float - ms2_mz_error_3: None | float - ms2_mz_error_4: None | float - ms2_mz_error_5: None | float - ms2_mz_error_6: None | float - - ms1_inten_ratio_0: float - ms1_inten_ratio_1: float - ms1_inten_ratio_2: float - ms2_inten_ratio_0: float - ms2_inten_ratio_1: float - ms2_inten_ratio_2: float - ms2_inten_ratio_3: float - ms2_inten_ratio_4: float - ms2_inten_ratio_5: float - ms2_inten_ratio_6: float - - delta_ms1_ms2_mobility: None | float - sq_delta_ms1_ms2_mobility: None | float - - delta_theo_rt: float - sq_delta_theo_rt: float - - ms2_summed_transition_intensity: float - npeaks: int - obs_mobility: float - obs_rt_seconds: float - precursor_charge: int - precursor_mobility_query: float - precursor_mz: float - precursor_rt_query_seconds: float - sequence: str - - def as_table(self): - return pd.DataFrame({ - "key": self.model_dump().keys(), - "value": [str(x) for x in self.model_dump().values()], - }) - - -class ResponseData(BaseModel): - model_config = ConfigDict(extra="forbid") - - extractions: Extractions - main_score_elements: MainScoreElements - longitudinal_main_score: List[float] - search_results: SearchResults - - def plot_main_score(self, min_rt, max_rt): - fig, ax = plt.subplots() - peptide = self.search_results.sequence - charge = self.search_results.precursor_charge - - rts = np.array(self.extractions.precursors.rts_ms) - ranges = np.searchsorted(rts, [min_rt, max_rt]) - rt_plot = (rts[ranges[0] : ranges[1]] / 1000) / 60 - ax.plot(rt_plot, self.longitudinal_main_score[ranges[0] : ranges[1]]) - ax.set_xlabel("Retention Time (min)") - ax.set_ylabel("Main Score") - # Title - ax.set_title( - f"Main Score for {peptide} (z={charge})\n" - f"Best RT: {self.search_results.obs_rt_seconds / 60:.2f} min" - ) - fig.tight_layout() - return fig - - -class Response(BaseModel): - model_config = ConfigDict(extra="forbid") - - status: str - data: ResponseData diff --git a/python/timsseek_rts_receiver/timsseek_rts_receiver/receiver.py b/python/timsseek_rts_receiver/timsseek_rts_receiver/receiver.py deleted file mode 100644 index fb2f36d..0000000 --- a/python/timsseek_rts_receiver/timsseek_rts_receiver/receiver.py +++ /dev/null @@ -1,201 +0,0 @@ -import json -import time -import numpy as np - -import streamlit as st -from matplotlib import pyplot as plt -from pydantic import BaseModel -from pyteomics import parser as pyteomics_parser -from speclib_builder.base import PeptideElement - -from .constants import BSA_FASTA_ENTRY -from .models import ( - Extractions, - MainScoreElements, - Response, - ResponseData, - SearchResults, -) - - -def infinite_colour_loop(): - options = ["#ff0000", "#00ff00", "#0000ff"] - i = 0 - while True: - if i >= len(options): - i = 0 - yield options[i] - i += 1 - - -def digest(sequence: str) -> set[str]: - unique_peptides = set() - new_peptides = pyteomics_parser.cleave(sequence, "trypsin") - unique_peptides.update(new_peptides) - return unique_peptides - - -def digest_maybe_fasta(sequence: str) -> list[str]: - splits = sequence.split("\n") - splits = [x.strip() for x in splits] - if splits[0].startswith(">"): - splits = splits[1:] - digests = digest("".join(splits)) - return [x for x in digests if len(x) > 5 and len(x) < 30] - - -class TargetDecoyPair(BaseModel): - target: PeptideElement - decoy: PeptideElement - - -def input_component() -> TargetDecoyPair: - options = ["Sequence", "Examples", "Digest"] - option = st.selectbox("Input", options) - if option == "Sequence": - st.subheader("Sequence") - cols = st.columns(2) - target = PeptideElement( - peptide=cols[0].text_input("Peptide", "TLSDYNIQK"), - charge=cols[0].slider("Charge", 2, 5, 2), - nce=cols[0].slider("NCE", 10, 50, 35), - decoy=cols[0].checkbox("Decoy"), - ) - decoy = PeptideElement( - peptide=cols[1].text_input("Peptide", "TQINYDSLK"), - charge=cols[1].slider("Charge", 2, 5, 2, key="decoy_charge"), - nce=cols[1].slider("NCE", 10, 50, 35, key="decoy_nce"), - decoy=cols[1].checkbox("Decoy", key="decoy_decoy"), - ) - return TargetDecoyPair(target=target, decoy=decoy) - if option == "Digest": - st.subheader("Digest") - fasta = st.text_area("Fasta", BSA_FASTA_ENTRY) - elems = list(digest_maybe_fasta(fasta)) - target_seq = st.selectbox("Peptide", elems, key="target_peptide_dg") - decoy_seq = target_seq[0] + target_seq[::-1][1:-1] + target_seq[-1] - target_charge = st.slider("Charge", 2, 5, 2, key="target_charge_dg") - target_nce = st.slider("NCE", 10, 50, 35, key="target_nce_dg") - target = PeptideElement( - peptide=target_seq, - charge=target_charge, - nce=target_nce, - decoy=False, - ) - decoy = PeptideElement( - peptide=decoy_seq, - charge=target_charge, - nce=target_nce, - decoy=True, - ) - return TargetDecoyPair(target=target, decoy=decoy) - - if option == "Examples": - st.subheader("Examples") - examples = [ - ("TLSDYNIQK", 2, "TLSDYNIQK"), - ("ESTLHLVLR", 2, "ELVLHLTSR"), - ("DIKPENLLLGSAGELK", 3, "DLEGASGLLLNEPKIK"), - ("VTEGLTDVILYHQPDDK", 3, "KFEEFQTDMAAHEER"), - ("IAQDLEMYGVNYFSIK", 2, "STGNFLTLTQAIDK"), - ("TFEMSDFIVDTR", 2, "MTGLVDEAIDTK"), - ("VIVDFSSPNIAK", 2, "ELLGQGLLLR"), - ] - edict = {f"{k} - {w}": (k, v, w) for k, v, w in examples} - picked = st.selectbox("Example", list(edict.keys())) - - picked, charge, decoy = edict[picked] - target = PeptideElement( - peptide=picked, - charge=charge, - nce=35, - decoy=False, - ) - decoy = PeptideElement( - peptide=decoy, - charge=charge, - nce=35, - decoy=True, - ) - return TargetDecoyPair(target=target, decoy=decoy) - - raise NotImplementedError() - - -def show_results(data, column, subtitle=None, key_prefix=""): - if subtitle is not None: - column.subheader(subtitle) - - if isinstance(data, str): - column.error(data) - st.stop() - - if data["status"] != "success": - column.write(str(data)) - st.stop() - - # extractions = Extractions(**data["data"]["extractions"]) - # main_score_elements = MainScoreElements(**data["data"]["main_score_elements"]) - # search_results = SearchResults(**data["data"]["search_results"]) - # tmp = ResponseData(**data["data"]) - - res: Response = Response(**data) - main_score = res.data.search_results.main_score - column.subheader("Main Score: " + str(main_score)) - - column.dataframe(res.data.search_results.as_table(), use_container_width=True) - - best_rt = res.data.search_results.obs_rt_seconds / 60 - min_rt = (res.data.extractions.min_rt() / 1000) / 60 - max_rt = (res.data.extractions.max_rt() / 1000) / 60 - default_min = best_rt - 0.5 - default_max = best_rt + 0.5 - min_rt_show = column.slider( - "Minimum retention time (minutes)", - min_value=min_rt, - max_value=max_rt, - value=default_min, - key=key_prefix + "min_rt", - ) - max_rt_show = column.slider( - "Maximum retention time (minutes)", - max_value=max_rt, - min_value=min_rt, - value=default_max, - key=key_prefix + "max_rt", - ) - - fig = res.data.plot_main_score(min_rt_show * 1000 * 60, max_rt_show * 1000 * 60) - plt.axvline(x=best_rt, color="red", alpha=0.5) - column.pyplot(fig, clear_figure=True, use_container_width=True) - - fig = res.data.main_score_elements.plot( - min_rt_show * 1000 * 60, - max_rt_show * 1000 * 60, - rt_use=np.array(res.data.extractions.precursors.rts_ms), - vlines_ms=[best_rt * 1000 * 60], - ) - column.pyplot(fig, clear_figure=True, use_container_width=True) - - fig = res.data.extractions.precursors.plot_transition( - min_rt_show * 1000 * 60, max_rt_show * 1000 * 60 - ) - plt.axvline(x=best_rt, color="red", alpha=0.5) - column.pyplot(fig, clear_figure=True, use_container_width=True) - - fig = res.data.extractions.fragments.plot_transition( - min_rt_show * 1000 * 60, max_rt_show * 1000 * 60 - ) - plt.axvline(x=best_rt, color="red", alpha=0.5) - column.pyplot(fig, clear_figure=True, use_container_width=True) - - # Button to download data - data_dict = res.data.model_dump() - data_json = json.dumps(data_dict) - column.download_button( - label="Download data", - data=data_json, - file_name="data.json", - mime="application/json", - key=key_prefix + "download_button", - ) diff --git a/run.bash b/run.bash index 755bdd8..c0ed38d 100644 --- a/run.bash +++ b/run.bash @@ -77,6 +77,3 @@ cargo run --release --bin timsseek -- \ --speclib-file $SPECLIB_NAME \ --output-dir $RESULTS_DIR \ --dotd-file $DOTD_FILE - -# Technically does T/D competition -uv run python -m timsseek_rescore --results_dir $RESULTS_DIR --output_dir $SUMMARY_DIR diff --git a/serve.bash b/serve.bash deleted file mode 100644 index e66169f..0000000 --- a/serve.bash +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/bash - -RAW_FILE=$1 -if [ -z "$RAW_FILE" ]; then - echo "Please provide a raw file" - exit 1 -fi - -cargo run --bin timsseek_rts --release -- \ - --config ./tolconfig.json \ - --dotd-file $RAW_FILE & -SERVER_PID=$! - -uv run --verbose python -m streamlit run python/timsseek_rts_receiver/app.py -kill $SERVER_PID -wait From eb6605ec9ee3431388e6a79d0f8055b1363d74ff Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Thu, 9 Apr 2026 21:14:07 -0700 Subject: [PATCH 15/64] refactor: rename process_query_full to score_for_viewer, FullQueryResult to ViewerResult MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Update viewer-facing API to use clearer names: FullQueryResult → ViewerResult with fields traces/longitudinal_apex_profile/chromatograms/scored, and the method process_query_full → score_for_viewer on Scorer. --- rust/timsseek/src/scoring/full_results.rs | 14 +++++++------- rust/timsseek/src/scoring/pipeline.rs | 22 +++++++++++----------- 2 files changed, 18 insertions(+), 18 deletions(-) diff --git a/rust/timsseek/src/scoring/full_results.rs b/rust/timsseek/src/scoring/full_results.rs index 4d3c8f5..5cf205e 100644 --- a/rust/timsseek/src/scoring/full_results.rs +++ b/rust/timsseek/src/scoring/full_results.rs @@ -1,13 +1,13 @@ use crate::IonAnnot; -use crate::scoring::apex_finding::ElutionTraces; -use crate::scoring::results::ScoredCandidate; +use super::apex_finding::ElutionTraces; +use super::results::ScoredCandidate; use serde::Serialize; use timsquery::models::aggregators::ChromatogramCollector; #[derive(Debug, Clone, Serialize)] -pub struct FullQueryResult { - pub main_score_elements: ElutionTraces, - pub longitudinal_main_score: Vec, - pub extractions: ChromatogramCollector, - pub search_results: ScoredCandidate, +pub struct ViewerResult { + pub traces: ElutionTraces, + pub longitudinal_apex_profile: Vec, + pub chromatograms: ChromatogramCollector, + pub scored: ScoredCandidate, } diff --git a/rust/timsseek/src/scoring/pipeline.rs b/rust/timsseek/src/scoring/pipeline.rs index 6c9582a..6987373 100644 --- a/rust/timsseek/src/scoring/pipeline.rs +++ b/rust/timsseek/src/scoring/pipeline.rs @@ -50,7 +50,7 @@ use super::apex_finding::{ PeptideMetadata, RelativeIntensities, }; -use super::full_results::FullQueryResult; +use super::full_results::ViewerResult; use super::hyperscore::single_lazyscore; use super::offsets::MzMobilityOffsets; use super::results::{ @@ -510,15 +510,15 @@ impl Scorer { } impl Scorer { - pub fn process_query_full( + pub fn score_for_viewer( &self, item: QueryItemToScore, calibration: &CalibrationResult, - ) -> Result { + ) -> Result { let mut buffer = ApexFinder::new(self.num_cycles()); // Re-implementing logic here because process_query consumes `item` and returns `Option`. - // We want intermediate results for `FullQueryResult`. + // We want intermediate results for `ViewerResult`. let (metadata, scoring_ctx) = self.build_broad_extraction(&item).map_err(|_| { DataProcessingError::ExpectedNonEmptyData { @@ -533,7 +533,7 @@ impl Scorer { self.execute_secondary_query(&item, &apex_score, &spectral_tol, &isotope_tol); let nqueries = scoring_ctx.chromatograms.fragments.num_ions() as u8; - let search_results = self.finalize_results( + let scored = self.finalize_results( &metadata, nqueries, &apex_score, @@ -542,13 +542,13 @@ impl Scorer { )?; // Extract chromatograms before it's consumed - let extractions = scoring_ctx.chromatograms; + let chromatograms = scoring_ctx.chromatograms; - Ok(FullQueryResult { - main_score_elements: buffer.traces.clone(), - longitudinal_main_score: buffer.traces.apex_profile.clone(), - extractions, - search_results, + Ok(ViewerResult { + traces: buffer.traces.clone(), + longitudinal_apex_profile: buffer.traces.apex_profile.clone(), + chromatograms, + scored, }) } From 991c182bc429f437c5b3dd5f95bdad70c622d0c6 Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Thu, 9 Apr 2026 22:13:05 -0700 Subject: [PATCH 16/64] feat: structured logging -- file + stderr warn/error + stdout milestones Separate CLI output into three streams for clearer user experience: - stdout: brief phase milestones + 1% FDR result summary - log file: full tracing record at configured level (default: {output_dir}/timsseek.log) - stderr: progress bars (TTY only) + warn/error tracing messages Replace -v/-q verbosity flags with --log-path and --log-level options. Progress bars auto-hide when stderr is not a terminal. --- rust/timsseek_cli/src/cli.rs | 16 +-- rust/timsseek_cli/src/main.rs | 165 ++++++++++++++++------------ rust/timsseek_cli/src/processing.rs | 77 ++++++++++--- 3 files changed, 166 insertions(+), 92 deletions(-) diff --git a/rust/timsseek_cli/src/cli.rs b/rust/timsseek_cli/src/cli.rs index 9a5bf1e..ba8a717 100644 --- a/rust/timsseek_cli/src/cli.rs +++ b/rust/timsseek_cli/src/cli.rs @@ -5,13 +5,15 @@ use timsseek::DecoyStrategy; #[derive(Parser, Debug)] #[command(author, version, about, long_about = None)] pub struct Cli { - /// Increase logging verbosity (can be repeated: -v for debug, -vv for trace) - #[arg(short, long, action = clap::ArgAction::Count, global = true)] - pub verbose: u8, - - /// Decrease logging verbosity (can be repeated: -q for warn, -qq for error) - #[arg(short, long, action = clap::ArgAction::Count, global = true)] - pub quiet: u8, + /// Path to the log file. + /// Defaults to {output_dir}/timsseek.log. + /// Use "-" to send logs to stderr instead of a file. + #[arg(long, value_name = "PATH")] + pub log_path: Option, + + /// Log level for the log file (default: info) + #[arg(long, value_name = "LEVEL", default_value = "info")] + pub log_level: String, /// Path to the JSON configuration file (optional, uses defaults if not provided) #[arg(short, long)] diff --git a/rust/timsseek_cli/src/main.rs b/rust/timsseek_cli/src/main.rs index 8e87d6f..64fbf5f 100644 --- a/rust/timsseek_cli/src/main.rs +++ b/rust/timsseek_cli/src/main.rs @@ -13,7 +13,6 @@ use tracing::{ info, }; use tracing_subscriber::filter::EnvFilter; -use tracing_subscriber::fmt::format::FmtSpan; use tracing_subscriber::fmt::{ self, }; @@ -314,77 +313,10 @@ fn process_single_file( Ok(()) } -/// Converts verbosity flags to a log level string. -/// Returns the log level based on verbose/quiet counts. -/// If RUST_LOG is set, it takes precedence. -fn get_log_level(verbose: u8, quiet: u8) -> String { - // RUST_LOG environment variable takes precedence - if std::env::var("RUST_LOG").is_ok() { - return std::env::var("RUST_LOG").unwrap(); - } - - // Calculate effective verbosity: positive = more verbose, negative = more quiet - let effective = verbose as i8 - quiet as i8; - - match effective { - 2.. => "trace".to_string(), - 1 => "debug".to_string(), - 0 => "info".to_string(), - -1 => "warn".to_string(), - _ => "error".to_string(), - } -} - fn main() -> std::result::Result<(), errors::CliError> { - // Parse command line arguments first to get verbosity flags + // Parse command line arguments first let args = Cli::parse(); - let log_level = get_log_level(args.verbose, args.quiet); - let fmt_filter = EnvFilter::builder() - .with_default_directive(log_level.parse().unwrap()) - .from_env_lossy(); - - #[cfg(feature = "instrumentation")] - let perf_filter = EnvFilter::builder() - .with_default_directive("trace".parse().unwrap()) - .with_env_var("RUST_PERF_LOG") - .from_env_lossy() - .add_directive("forust_ml::gradientbooster=warn".parse().unwrap()); - - // Filter out events but keep spans - #[cfg(feature = "instrumentation")] - let events_filter = tracing_subscriber::filter::filter_fn(|metadata| !metadata.is_event()); - - // I am aware that this conditional compilation is ugly ... - #[cfg(feature = "instrumentation")] - let (tree_layer, _guard) = PrintTreeLayer::new(PrintTreeConfig { - attention_above_percent: 25.0, - relevant_above_percent: 2.5, - hide_below_percent: 0.0, - display_unaccounted: true, - no_color: false, - accumulate_spans_count: false, - accumulate_events: false, - aggregate_similar_siblings: true, - }); - #[cfg(feature = "instrumentation")] - let tree_layer = tree_layer - .with_filter(perf_filter) - .with_filter(events_filter); - - // let (pf_layer, pf_guard) = PerfettoLayer::new_from_env().unwrap(); - - let fmt_layer = fmt::layer() - .with_span_events(FmtSpan::CLOSE) - .with_filter(fmt_filter); - - let reg = tracing_subscriber::registry().with(fmt_layer); - - #[cfg(feature = "instrumentation")] - let reg = reg.with(tree_layer); - - reg.init(); - // Load and parse configuration, or use defaults let mut config = match args.config { Some(ref config_path) => { @@ -436,6 +368,101 @@ fn main() -> std::result::Result<(), errors::CliError> { config.analysis.decoy_strategy = strategy; } + // === Set up tracing subscriber === + // We defer this until after config/validation so we know the output directory for the log file. + + // Determine log file path + let log_file_path = match args.log_path { + Some(ref p) if p.to_str() == Some("-") => None, // stderr-only mode + Some(ref p) => Some(p.clone()), + None => args + .output_dir + .as_ref() + .or(config.output.as_ref().map(|o| &o.directory)) + .map(|d| d.join("timsseek.log")), + }; + + // Build the env filter for the main logging layer + let env_filter = EnvFilter::builder() + .with_default_directive( + args.log_level + .parse() + .unwrap_or_else(|_| "info".parse().unwrap()), + ) + .from_env_lossy() + .add_directive("forust_ml=warn".parse().unwrap()) + .add_directive("timscentroid::storage=warn".parse().unwrap()); + + // Use Option layers so we can build a single subscriber type regardless + // of whether we're writing to a log file or to stderr. + let (file_layer, stderr_warn_layer, stderr_all_layer) = if let Some(ref log_path) = + log_file_path + { + // File mode: log file gets env_filter, stderr gets WARN+ only + if let Some(parent) = log_path.parent() { + let _ = std::fs::create_dir_all(parent); + } + let log_file = + std::fs::File::create(log_path).expect("Failed to create log file"); + let fl = fmt::layer() + .with_writer(std::sync::Mutex::new(log_file)) + .with_filter(env_filter); + let sl = fmt::layer() + .with_writer(std::io::stderr) + .without_time() + .with_filter(tracing_subscriber::filter::LevelFilter::WARN); + (Some(fl), Some(sl), None) + } else { + // stderr-only mode (--log-path -) + let sl = fmt::layer() + .with_writer(std::io::stderr) + .with_filter(env_filter); + (None, None, Some(sl)) + }; + + #[cfg(feature = "instrumentation")] + let perf_filter = EnvFilter::builder() + .with_default_directive("trace".parse().unwrap()) + .with_env_var("RUST_PERF_LOG") + .from_env_lossy() + .add_directive("forust_ml::gradientbooster=warn".parse().unwrap()); + + #[cfg(feature = "instrumentation")] + let events_filter = tracing_subscriber::filter::filter_fn(|metadata| !metadata.is_event()); + + #[cfg(feature = "instrumentation")] + let (tree_layer, _guard) = PrintTreeLayer::new(PrintTreeConfig { + attention_above_percent: 25.0, + relevant_above_percent: 2.5, + hide_below_percent: 0.0, + display_unaccounted: true, + no_color: false, + accumulate_spans_count: false, + accumulate_events: false, + aggregate_similar_siblings: true, + }); + #[cfg(feature = "instrumentation")] + let tree_layer = tree_layer + .with_filter(perf_filter) + .with_filter(events_filter); + + let reg = tracing_subscriber::registry() + .with(file_layer) + .with(stderr_warn_layer) + .with(stderr_all_layer); + + #[cfg(feature = "instrumentation")] + let reg = reg.with(tree_layer); + + reg.init(); + + // Print version and log path to stdout + if let Some(ref log_path) = log_file_path { + println!("timsseek v{}", env!("CARGO_PKG_VERSION")); + println!("Log: {}", log_path.display()); + println!(); + } + info!("Parsed configuration: {:#?}", config.clone()); let validated = validate_inputs(&config, &args)?; diff --git a/rust/timsseek_cli/src/processing.rs b/rust/timsseek_cli/src/processing.rs index 471326a..f1df8fc 100644 --- a/rust/timsseek_cli/src/processing.rs +++ b/rust/timsseek_cli/src/processing.rs @@ -1,8 +1,10 @@ use super::config::OutputConfig; use indicatif::{ + ProgressBar, ProgressIterator, ProgressStyle, }; +use std::io::IsTerminal; use std::path::Path; use std::time::Instant; use timsquery::IndexedTimstofPeaks; @@ -46,6 +48,20 @@ use tracing::{ warn, }; +/// Create a progress bar that writes to stderr when it is a TTY, or a hidden +/// (no-op) bar when stderr is not a terminal (e.g. piped / redirected). +fn make_progress_bar(len: u64, label: &str) -> ProgressBar { + if !std::io::stderr().is_terminal() { + return ProgressBar::hidden(); + } + let style = ProgressStyle::with_template(&format!( + "{{spinner:.green}} {} [{{elapsed_precise}}] [{{wide_bar:.cyan/blue}}] {{pos}}/{{len}} ({{eta}})", + label + )) + .unwrap(); + ProgressBar::new(len).with_style(style) +} + /// Check that two speclibs are on a compatible RT scale. /// Warns loudly if the RT ranges don't overlap, which would produce a useless calibration. fn check_rt_scale_compatibility(main_lib: &Speclib, calib_lib: &Speclib) { @@ -135,6 +151,11 @@ pub fn execute_pipeline( calibrants.len(), phase1_ms ); + println!( + "Phase 1: Prescore ........ {:.1}s ({} calibrants)", + phase1_ms as f64 / 1000.0, + calibrants.len() + ); // === PHASE 2: Calibration (fit RT + measure errors + derive tolerances) === // Build lookup from main speclib when using a separate calib lib. @@ -186,6 +207,10 @@ pub fn execute_pipeline( } }; let phase2_ms = phase2_start.elapsed().as_millis() as u64; + println!( + "Phase 2: Calibrate ....... {:.1}s", + phase2_ms as f64 / 1000.0 + ); // === PHASE 3: Narrow scoring with calibrated tolerances === info!("Phase 3: Scoring with calibrated extraction..."); @@ -198,11 +223,17 @@ pub fn execute_pipeline( chunk_size, &mut phase3_timings, ); + let phase3_ms = phase3_start.elapsed().as_millis() as u64; info!( "Phase 3 complete: {} scored peptides in {:?}", results.len(), phase3_start.elapsed() ); + println!( + "Phase 3: Score ........... {:.1}s ({} peptides)", + phase3_ms as f64 / 1000.0, + results.len() + ); let total_scored = results.len(); @@ -214,21 +245,30 @@ pub fn execute_pipeline( }); let phase4_ms = phase4_start.elapsed().as_millis() as u64; let total_after_competition = competed.len(); + println!( + "Phase 4: Compete ......... {:.1}s ({} candidates)", + phase4_ms as f64 / 1000.0, + total_after_competition + ); // === PHASE 5: Rescore (GBM cross-validated discriminant) === let phase5_start = Instant::now(); let data = rescore(competed); let phase5_ms = phase5_start.elapsed().as_millis() as u64; + println!( + "Phase 5: Rescore ......... {:.1}s", + phase5_ms as f64 / 1000.0 + ); - // Collect q-value threshold counts and print summary + // Collect q-value threshold counts — full report to log, key result to stdout let qval_report = report_qvalues_at_thresholds(&data, &[0.01, 0.05, 0.1, 0.5, 1.0]); let mut targets_at_1pct_qval = 0usize; let mut targets_at_5pct_qval = 0usize; let mut targets_at_10pct_qval = 0usize; for &(thresh, n_below_thresh, n_targets, n_decoys) in &qval_report { - println!( - "Found {} targets and {} decoys at q-value threshold {:.2} ({} total)", - n_targets, n_decoys, thresh, n_below_thresh + info!( + "q-value threshold {:.2}: {} targets, {} decoys ({} total)", + thresh, n_targets, n_decoys, n_below_thresh ); if (thresh - 0.01).abs() < 1e-6 { targets_at_1pct_qval = n_targets; @@ -256,6 +296,15 @@ pub fn execute_pipeline( pq_writer.close(); let phase6_ms = phase6_start.elapsed().as_millis() as u64; info!("Wrote final results to {:?}", out_path_pq); + println!( + "Phase 6: Write output .... {:.1}s", + phase6_ms as f64 / 1000.0 + ); + + // Key result to stdout + println!(); + println!("{} targets at 1% FDR", targets_at_1pct_qval); + println!("Output: {}", out_path_pq.display()); Ok(PipelineReport { phase1_prescore_ms: phase1_ms, @@ -285,15 +334,13 @@ fn phase1_prescore( chunk_size: usize, config: &CalibrationConfig, ) -> Vec { - let style = ProgressStyle::with_template( - "{spinner:.green} Phase 1 [{elapsed_precise}] [{wide_bar:.cyan/blue}] {pos}/{len} ({eta})", - ) - .unwrap(); + let n_chunks = (speclib.as_slice().len() + chunk_size - 1) / chunk_size; + let pb = make_progress_bar(n_chunks as u64, "Phase 1"); let mut global_heap = CalibrantHeap::new(config.n_calibrants); let mut offset = 0usize; - for chunk in speclib.as_slice().chunks(chunk_size).progress_with_style(style) { + for chunk in speclib.as_slice().chunks(chunk_size).progress_with(pb) { let chunk_heap = pipeline.prescore_batch(chunk, offset, config); global_heap = global_heap.merge(chunk_heap); offset += chunk.len(); @@ -501,18 +548,16 @@ fn phase3_score( chunk_size: usize, timings: &mut ScoreTimings, ) -> Vec { - let style = ProgressStyle::with_template( - "{spinner:.green} Phase 3 [{elapsed_precise}] [{wide_bar:.cyan/blue}] {pos}/{len} ({eta})", - ) - .unwrap(); - let total_peptides = speclib.as_slice().len(); + let n_chunks = (total_peptides + chunk_size - 1) / chunk_size; + let pb = make_progress_bar(n_chunks as u64, "Phase 3"); + let mut results = Vec::new(); for chunk in speclib .as_slice() .chunks(chunk_size) - .progress_with_style(style) + .progress_with(pb) { let (batch_results, batch_timings) = pipeline.score_calibrated_batch(chunk, calibration); @@ -588,7 +633,7 @@ fn target_decoy_compete(mut results: Vec) -> Vec std::cmp::Ordering::Equal, } } else { - seq_ord + ord } }); // As debug lets print the first and last results after deduplication From ba3cae3bc60e511d5e4254721ef4d171dbde71d4 Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Thu, 9 Apr 2026 22:32:51 -0700 Subject: [PATCH 17/64] =?UTF-8?q?fix:=20quiet=20index=20detection=20loggin?= =?UTF-8?q?g=20=E2=80=94=20use=20filesystem=20checks=20for=20local,=20debu?= =?UTF-8?q?g-level=20for=20expected=20failures?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Replace StorageProvider probe in sniff_cached_index with a direct Path::exists() check for local paths, eliminating spurious ERROR/INFO lines on the normal "not a cached index" code path. Cloud paths keep the probe but log at debug! instead of error!. Also demote cache-miss error! to debug! in try_load_from_cache, simplify the load_index_auto detection log, and drop noisy info! calls in timscentroid storage.rs to trace!/debug!. --- rust/timscentroid/src/storage.rs | 11 ++--- rust/timsquery/src/serde/index_serde.rs | 58 ++++++++++++------------- 2 files changed, 34 insertions(+), 35 deletions(-) diff --git a/rust/timscentroid/src/storage.rs b/rust/timscentroid/src/storage.rs index 2f45bee..0f58d25 100644 --- a/rust/timscentroid/src/storage.rs +++ b/rust/timscentroid/src/storage.rs @@ -63,8 +63,9 @@ use crate::instrumentation::{ }; use crate::serialization::SerializationError; use tracing::{ - info, + debug, instrument, + trace, }; /// Global tokio runtime for all async operations (created lazily) @@ -262,15 +263,15 @@ impl StorageProvider { #[instrument(skip(self), fields(path = %path))] pub async fn read_bytes_async(&self, path: &str) -> Result, SerializationError> { let full_path = self.build_path(path); - info!("Reading from full path: {}", full_path); + trace!("Reading from full path: {}", full_path); let object_path = ObjectPath::from(full_path.as_str()); - info!("Object path: {:?}", object_path); + trace!("Object path: {:?}", object_path); let result = match self.store.get(&object_path).await { Ok(res) => res, Err(e) => { // Categorize the error properly based on what it actually is let error_str = e.to_string(); - info!("Error getting object: {:?}", e); + debug!("Error getting object: {:?}", e); // Check if it's an authentication/permission error let error_kind = if error_str.contains("ExpiredToken") @@ -406,7 +407,7 @@ async fn parse_url(url: &url::Url) -> Result, Serialization "Missing bucket in S3 URL", )) })?; - info!("Creating S3 ObjectStore for bucket: {}", bucket); + debug!("Creating S3 ObjectStore for bucket: {}", bucket); // 1. Load the AWS configuration from the environment (handles Profile, MFA, SSO, etc.) let sdk_config = aws_config::load_defaults(BehaviorVersion::latest()).await; diff --git a/rust/timsquery/src/serde/index_serde.rs b/rust/timsquery/src/serde/index_serde.rs index 8171654..67ec526 100644 --- a/rust/timsquery/src/serde/index_serde.rs +++ b/rust/timsquery/src/serde/index_serde.rs @@ -125,6 +125,7 @@ use timscentroid::StorageLocation; use timscentroid::lazy::LazyIndexedTimstofPeaks; use timscentroid::serialization::SerializationConfig; use tracing::{ + debug, error, info, }; @@ -522,7 +523,7 @@ impl TimsIndexReader { other => match other.to_storage_location() { Some(Ok(loc)) => Some(loc), Some(Err(e)) => { - error!("Invalid cache location: {}", e); + debug!("Invalid cache location: {}", e); None } None => None, @@ -568,10 +569,7 @@ impl TimsIndexReader { Some(idx) } Err(e) => { - error!( - "Failed to load index from cache at {}: {:?}", - location_desc, e - ); + debug!("Cache miss at {}: {:?}", location_desc, e); None } } @@ -631,44 +629,42 @@ impl Default for TimsIndexReader { fn sniff_cached_index(location: &str) -> Result { let is_cloud = location.contains("://"); - // Try to create storage location and check for metadata.json - let storage_result = if is_cloud { - StorageLocation::from_url(location) - } else { - Ok(StorageLocation::from_path(location)) - }; + if !is_cloud { + // Local: check filesystem directly — no StorageProvider, no logging noise + let metadata_path = std::path::Path::new(location).join("metadata.json"); + let is_cached = metadata_path.exists(); + debug!("Local index sniff: {} -> cached={}", location, is_cached); + return Ok(is_cached); + } - let storage_location = match storage_result { + // Cloud: must probe via StorageProvider (can't stat files on S3) + let storage_location = match StorageLocation::from_url(location) { Ok(loc) => loc, Err(e) => { - error!("Failed to parse storage location for sniffing: {:?}", e); - return Ok(false); // Treat parse errors as "not cached" + debug!("Failed to parse cloud URL for sniffing: {:?}", e); + return Ok(false); } }; - // Try to read metadata.json as a quick check match timscentroid::storage::StorageProvider::new(storage_location) { Ok(provider) => { - // Just try to read a few bytes - if metadata.json exists, it's likely a cached index match provider.read_bytes("metadata.json") { Ok(_) => Ok(true), Err(e) => { - // Check if it's a permission error - propagate it! - if let timscentroid::serialization::SerializationError::Io(io_err) = &e - && io_err.kind() == std::io::ErrorKind::PermissionDenied - { - error!("Permission denied while checking for cached index: {:?}", e); - return Err(crate::errors::DataReadingError::SerializationError(e)); + if let timscentroid::serialization::SerializationError::Io(io_err) = &e { + if io_err.kind() == std::io::ErrorKind::PermissionDenied { + error!("Permission denied checking for cached index at {}: {:?}", location, e); + return Err(crate::errors::DataReadingError::SerializationError(e)); + } } - // For other errors (like NotFound), treat as not cached - error!("metadata.json not found or unreadable: {:?}", e); + debug!("Cloud index sniff: {} -> not cached ({:?})", location, e); Ok(false) } } } Err(e) => { - error!("Failed to access storage location for sniffing: {:?}", e); - Ok(false) // Treat provider creation errors as "not cached" + debug!("Cloud index sniff: {} -> provider error ({:?})", location, e); + Ok(false) } } } @@ -736,10 +732,12 @@ pub fn load_index_auto( let is_cached = sniff_cached_index(input)?; let is_cloud = input.contains("://"); - info!( - "Detected: cached={}, cloud={}, prefer_lazy={}", - is_cached, is_cloud, config.prefer_lazy - ); + info!("Index type: {}", match (is_cached, is_cloud) { + (true, true) => "cloud cached index", + (true, false) => "local cached index", + (false, true) => "cloud raw (unsupported)", + (false, false) => "local raw .d file", + }); // Early validation: reject cloud raw .d files with helpful error if is_cloud && !is_cached { From 0ef96650f6e8e427978df5de2100693a1e67904b Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Fri, 10 Apr 2026 09:08:56 -0700 Subject: [PATCH 18/64] =?UTF-8?q?refactor:=20pipeline=20cleanup=20?= =?UTF-8?q?=E2=80=94=20speclib=20by=20ref,=20RunReport,=20early=20stopping?= =?UTF-8?q?,=20max-qvalue?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Upgrade forust-ml 0.4.8 → 0.5.0 - GBM early stopping at 100 rounds (PrecomputedFeatures row-major matrix) - Load speclib/calib_lib once in main(), pass by &Speclib reference - Add RunReport (per-invocation) with speclib/index loading timings - Add --max-qvalue CLI arg (default 0.5, filters Parquet output) - Fix duplicate NUM_MS2_IONS/NUM_MS1_IONS constants (import from mod.rs) - Re-export FileReport, RunReport from scoring mod --- .gitignore | 1 + Cargo.lock | 4 +- rust/timsseek/Cargo.toml | 5 +- rust/timsseek/src/ml/cv.rs | 70 +++++++++++++++----------- rust/timsseek/src/scoring/mod.rs | 2 +- rust/timsseek/src/scoring/results.rs | 3 +- rust/timsseek/src/scoring/timings.rs | 23 ++++++++- rust/timsseek_cli/src/cli.rs | 6 +++ rust/timsseek_cli/src/main.rs | 73 +++++++++++++++++++++++----- rust/timsseek_cli/src/processing.rs | 62 ++++++++--------------- 10 files changed, 157 insertions(+), 92 deletions(-) diff --git a/.gitignore b/.gitignore index 64baf74..bc42522 100644 --- a/.gitignore +++ b/.gitignore @@ -44,3 +44,4 @@ docs/superpowers/* sh*/ *plan*.md +docs/superpowers/ diff --git a/Cargo.lock b/Cargo.lock index f6d4dbd..c6613c1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2386,9 +2386,9 @@ dependencies = [ [[package]] name = "forust-ml" -version = "0.4.8" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36e2b0aa48a6ad52ae5ed92f6f41b46bb9b8e80922fb6c23067bab7022f1b114" +checksum = "51128a6700cc6e8561877f999a69b48ec9536e8fd3c1db3e570d2805d69f92a5" dependencies = [ "log", "rand 0.8.5", diff --git a/rust/timsseek/Cargo.toml b/rust/timsseek/Cargo.toml index 547401c..a4a1be8 100644 --- a/rust/timsseek/Cargo.toml +++ b/rust/timsseek/Cargo.toml @@ -10,7 +10,7 @@ rmp-serde = "1.1" zstd = "0.13" # Gradient boosted tree -forust-ml = "0.4.8" +forust-ml = "0.5.0" rand = "0.9.2" # Workspace member deps @@ -31,9 +31,6 @@ parquet = { workspace = true } arrow = { workspace = true } [features] -# This enables adding instrumentation to a lot of the scoring -# internals, meant only for profiling use cases and generate -# VERY verbose output. instrumentation = [] serial_scoring = [] diff --git a/rust/timsseek/src/ml/cv.rs b/rust/timsseek/src/ml/cv.rs index d07614c..155519b 100644 --- a/rust/timsseek/src/ml/cv.rs +++ b/rust/timsseek/src/ml/cv.rs @@ -159,7 +159,7 @@ impl Default for GBMConfig { grow_policy: GrowPolicy::DepthWise, evaluation_metric: Some(Metric::LogLoss), // evaluation_metric: None, - early_stopping_rounds: None, + early_stopping_rounds: Some(100), initialize_base_score: true, terminate_missing_features: HashSet::new(), missing_node_treatment: MissingNodeTreatment::AssignToParent, @@ -271,40 +271,28 @@ pub enum DataBufferError { } impl DataBuffer { - fn fill_buffer( + fn fill_buffer_precomputed( &mut self, assigned_fold: &[u8], - data: &[impl FeatureLike], + precomputed: &PrecomputedFeatures, fold: u8, ) -> Result<(), DataBufferError> { self.fold_buffer.clear(); self.response_buffer.clear(); self.nrows = assigned_fold.iter().filter(|&&x| x == fold).count(); + self.ncols = precomputed.ncols; - let mut dumb_buffer = Vec::new(); - dumb_buffer.extend(data.first().unwrap().as_feature()); - - self.ncols = dumb_buffer.len(); - - // now we resize to 0s, since the matrix is feature-major - // so we need to insert stuff in essentually the transposed order + // Feature-major layout: fold_buffer[feature_idx * nrows + sample_idx] self.fold_buffer.resize(self.ncols * self.nrows, 0.0); let mut sample_idx = 0; - for (elem_fold, elem) in assigned_fold.iter().zip(data.iter()) { - if fold == *elem_fold { - let mut local_added = 0; - for (feature_idx, val) in elem.as_feature().into_iter().enumerate() { - let idx = feature_idx * self.nrows + sample_idx; - assert!(self.fold_buffer[idx] == 0.0); - self.fold_buffer[idx] = val; - local_added += 1; - } - - if local_added != self.ncols { - return Err(DataBufferError::UnequalLengths(local_added, self.ncols)); + for (elem_idx, &elem_fold) in assigned_fold.iter().enumerate() { + if fold == elem_fold { + let row = precomputed.row(elem_idx); + for (feature_idx, &val) in row.iter().enumerate() { + self.fold_buffer[feature_idx * self.nrows + sample_idx] = val; } - self.response_buffer.push(elem.get_y()); + self.response_buffer.push(precomputed.responses[elem_idx]); sample_idx += 1; } } @@ -333,15 +321,40 @@ impl DataBuffer { /// So the score for any point in the data is the average of /// the results for all classifiers that didint use it /// for either training or early_stopping_rounds. +/// Row-major precomputed feature matrix: features[sample_idx * ncols + feature_idx] +struct PrecomputedFeatures { + features: Vec, + responses: Vec, + ncols: usize, +} + +impl PrecomputedFeatures { + fn from_data(data: &[impl FeatureLike]) -> Self { + let ncols = data.first().map_or(0, |d| { + d.as_feature().into_iter().count() + }); + let mut features = Vec::with_capacity(data.len() * ncols); + let mut responses = Vec::with_capacity(data.len()); + for elem in data { + features.extend(elem.as_feature()); + responses.push(elem.get_y()); + } + Self { features, responses, ncols } + } + + fn row(&self, idx: usize) -> &[f64] { + &self.features[idx * self.ncols..(idx + 1) * self.ncols] + } +} + pub struct CrossValidatedScorer { n_folds: u8, data: Vec, weights: Vec, assigned_fold: Vec, fold_classifiers: Vec>, - // I tried this but makes no difference ... - // fold_classifiers: Vec>, config: GBMConfig, + precomputed: PrecomputedFeatures, } impl CrossValidatedScorer { @@ -354,13 +367,13 @@ impl CrossValidatedScorer { let assigned_fold: Vec = (0..data.len()) .map(|x| (x % n_folds as usize).try_into().unwrap()) .collect(); - // let weights = vec![1.0; data.len()]; - // default to a weight of 0.5 to all targets and 1.0 for decoys let weights: Vec = data .iter() .map(|x| if x.get_y() > 0.5 { 0.5 } else { 1.0 }) .collect(); + let precomputed = PrecomputedFeatures::from_data(&data); + Self { n_folds, data, @@ -368,6 +381,7 @@ impl CrossValidatedScorer { fold_classifiers: Vec::new(), weights, config, + precomputed, } } @@ -473,7 +487,7 @@ impl CrossValidatedScorer { buffer: &'a mut DataBuffer, ) -> (Matrix<'a, f64>, &'a [f64]) { buffer - .fill_buffer(self.assigned_fold.as_slice(), self.data.as_slice(), fold) + .fill_buffer_precomputed(self.assigned_fold.as_slice(), &self.precomputed, fold) .unwrap(); buffer.as_matrix() } diff --git a/rust/timsseek/src/scoring/mod.rs b/rust/timsseek/src/scoring/mod.rs index 85d68dc..1a166b6 100644 --- a/rust/timsseek/src/scoring/mod.rs +++ b/rust/timsseek/src/scoring/mod.rs @@ -17,7 +17,7 @@ pub use pipeline::{ Scorer, }; pub use results::{ScoredCandidate, CompetedCandidate, FinalResult, ScoringFields}; -pub use timings::{PipelineReport, ScoreTimings}; +pub use timings::{FileReport, PipelineReport, RunReport, ScoreTimings}; pub const NUM_MS2_IONS: usize = 7; pub const NUM_MS1_IONS: usize = 3; diff --git a/rust/timsseek/src/scoring/results.rs b/rust/timsseek/src/scoring/results.rs index 22c0d96..6d63a56 100644 --- a/rust/timsseek/src/scoring/results.rs +++ b/rust/timsseek/src/scoring/results.rs @@ -9,8 +9,7 @@ use super::offsets::MzMobilityOffsets; use super::pipeline::SecondaryLazyScores; use crate::errors::DataProcessingError; -pub const NUM_MS2_IONS: usize = 7; -pub const NUM_MS1_IONS: usize = 3; +use super::{NUM_MS2_IONS, NUM_MS1_IONS}; /// Shared scoring fields produced by Phase 3. Every field is guaranteed populated. #[derive(Debug, Clone, Serialize)] diff --git a/rust/timsseek/src/scoring/timings.rs b/rust/timsseek/src/scoring/timings.rs index d795a2a..0c7b0dd 100644 --- a/rust/timsseek/src/scoring/timings.rs +++ b/rust/timsseek/src/scoring/timings.rs @@ -63,7 +63,10 @@ impl std::ops::AddAssign for ScoreTimings { /// All timing fields are in milliseconds. #[derive(Debug, Default, Serialize)] pub struct PipelineReport { - // Timings (all in ms) + // Per-file: index loading (ms) + pub load_index_ms: u64, + + // Phase timings (all in ms) pub phase1_prescore_ms: u64, pub phase2_calibration_ms: u64, pub phase3_extraction_ms: u64, @@ -81,3 +84,21 @@ pub struct PipelineReport { pub targets_at_5pct_qval: usize, pub targets_at_10pct_qval: usize, } + +/// Top-level report for an entire CLI invocation. +/// Contains shared loading costs and per-file pipeline reports. +#[derive(Debug, Default, Serialize)] +pub struct RunReport { + pub load_speclib_ms: u64, + pub load_calib_lib_ms: u64, + pub speclib_entries: usize, + pub calib_lib_entries: usize, + pub files: Vec, +} + +/// Per-file report: file name + pipeline report. +#[derive(Debug, Serialize)] +pub struct FileReport { + pub file_name: String, + pub pipeline: PipelineReport, +} diff --git a/rust/timsseek_cli/src/cli.rs b/rust/timsseek_cli/src/cli.rs index ba8a717..b3863f1 100644 --- a/rust/timsseek_cli/src/cli.rs +++ b/rust/timsseek_cli/src/cli.rs @@ -43,6 +43,12 @@ pub struct Cli { #[arg(short = 'O', long)] pub overwrite: bool, + /// Maximum q-value for output. Only results at or below this + /// threshold are written to the Parquet file. + /// Default: 0.5 + #[arg(long, default_value = "0.5")] + pub max_qvalue: f32, + /// Decoy generation strategy /// Options: if-missing (default), force, never /// - if-missing: Generate mass-shift decoys only if library has none diff --git a/rust/timsseek_cli/src/main.rs b/rust/timsseek_cli/src/main.rs index 64fbf5f..2c4395e 100644 --- a/rust/timsseek_cli/src/main.rs +++ b/rust/timsseek_cli/src/main.rs @@ -230,12 +230,13 @@ fn get_frag_range(file: &TimsTofPath) -> TupleRange { fn process_single_file( dotd_file: &std::path::Path, - speclib_path: &std::path::Path, - calib_lib_path: Option<&std::path::Path>, + speclib: &timsseek::data_sources::speclib::Speclib, + calib_lib: Option<&timsseek::data_sources::speclib::Speclib>, config: &Config, base_output_dir: &std::path::Path, overwrite: bool, -) -> std::result::Result<(), errors::CliError> { + max_qvalue: f32, +) -> std::result::Result { info!("Processing file: {:?}", dotd_file); let timstofpath = @@ -244,6 +245,7 @@ fn process_single_file( path: Some(dotd_file.to_string_lossy().to_string()), })?; + let index_start = std::time::Instant::now(); let index = load_index_auto( dotd_file.to_str().ok_or_else(|| errors::CliError::Io { source: "Invalid path encoding".to_string(), @@ -252,6 +254,8 @@ fn process_single_file( None, )? .into_eager()?; + let load_index_ms = index_start.elapsed().as_millis() as u64; + println!("Loading index ........... {:.1}s", load_index_ms as f64 / 1000.0); let fragmented_range = get_frag_range(&timstofpath); @@ -298,19 +302,19 @@ fn process_single_file( directory: file_output_dir, }; - // Process speclib - processing::run_pipeline( - speclib_path, - calib_lib_path, + let report = processing::run_pipeline( + speclib, + calib_lib, &pipeline, config.analysis.chunk_size, &file_output_config, - config.analysis.decoy_strategy, + max_qvalue, + load_index_ms, ) .unwrap(); info!("Successfully processed {:?}", dotd_file); - Ok(()) + Ok(report) } fn main() -> std::result::Result<(), errors::CliError> { @@ -487,9 +491,42 @@ fn main() -> std::result::Result<(), errors::CliError> { })?; info!("Wrote final configuration to {:?}", config_output_path); + let mut run_report = timsseek::scoring::RunReport::default(); let mut failed_files: Vec<(std::path::PathBuf, errors::CliError)> = Vec::new(); let mut successful_files: Vec = Vec::new(); + // Load speclib once (shared across all files) + let speclib_start = std::time::Instant::now(); + info!("Building database from speclib file {:?}", validated.speclib_path); + info!("Decoy generation strategy: {}", config.analysis.decoy_strategy); + let speclib = timsseek::data_sources::speclib::Speclib::from_file( + &validated.speclib_path, + config.analysis.decoy_strategy, + ).map_err(|e| errors::CliError::Config { source: format!("Failed to load speclib: {:?}", e) })?; + let load_speclib_ms = speclib_start.elapsed().as_millis() as u64; + println!("Loading speclib ......... {:.1}s ({} entries)", load_speclib_ms as f64 / 1000.0, speclib.len()); + + // Load calibration library once (if provided) + let calib_start = std::time::Instant::now(); + let calib_lib = match &validated.calib_lib_path { + Some(p) => { + info!("Loading calibration library from {:?}", p); + let lib = timsseek::data_sources::speclib::Speclib::from_file( + p, + config.analysis.decoy_strategy, + ).map_err(|e| errors::CliError::Config { source: format!("Failed to load calib lib: {:?}", e) })?; + println!("Loading calib lib ....... {:.1}s ({} entries)", calib_start.elapsed().as_secs_f64(), lib.len()); + Some(lib) + } + None => None, + }; + let load_calib_lib_ms = calib_start.elapsed().as_millis() as u64; + + run_report.load_speclib_ms = load_speclib_ms; + run_report.load_calib_lib_ms = load_calib_lib_ms; + run_report.speclib_entries = speclib.len(); + run_report.calib_lib_entries = calib_lib.as_ref().map_or(0, |l| l.len()); + let total_files = validated.dotd_files.len(); info!("Processing {} raw file(s)", total_files); @@ -503,14 +540,19 @@ fn main() -> std::result::Result<(), errors::CliError> { match process_single_file( dotd_file, - &validated.speclib_path, - validated.calib_lib_path.as_deref(), + &speclib, + calib_lib.as_ref(), &config, &validated.output_directory, validated.overwrite, + args.max_qvalue, ) { - Ok(_) => { + Ok(report) => { successful_files.push(dotd_file.clone()); + run_report.files.push(timsseek::scoring::FileReport { + file_name: dotd_file.to_string_lossy().to_string(), + pipeline: report, + }); } Err(e) => { error!("Failed to process {:?}: {}", dotd_file, e); @@ -519,6 +561,13 @@ fn main() -> std::result::Result<(), errors::CliError> { } } + // Write run-level report + let run_report_path = validated.output_directory.join("run_report.json"); + if let Ok(json) = serde_json::to_string_pretty(&run_report) { + let _ = std::fs::write(&run_report_path, json); + info!("Wrote run report to {:?}", run_report_path); + } + info!("Successfully processed {} file(s)", successful_files.len()); if !failed_files.is_empty() { error!("Failed to process {} file(s):", failed_files.len()); diff --git a/rust/timsseek_cli/src/processing.rs b/rust/timsseek_cli/src/processing.rs index f1df8fc..911f949 100644 --- a/rust/timsseek_cli/src/processing.rs +++ b/rust/timsseek_cli/src/processing.rs @@ -5,7 +5,6 @@ use indicatif::{ ProgressStyle, }; use std::io::IsTerminal; -use std::path::Path; use std::time::Instant; use timsquery::IndexedTimstofPeaks; use timsquery::MzMobilityStatsCollector; @@ -38,7 +37,6 @@ use timsseek::scoring::{ }; use timsseek::scoring::pipeline::Scorer; use timsseek::{ - DecoyStrategy, IonAnnot, ScorerQueriable, }; @@ -123,17 +121,18 @@ fn check_rt_scale_compatibility(main_lib: &Speclib, calib_lib: &Speclib) { tracing::instrument(skip_all, level = "trace") )] pub fn execute_pipeline( - speclib: Speclib, - calib_lib: Option, + speclib: &Speclib, + calib_lib: Option<&Speclib>, pipeline: &Scorer, chunk_size: usize, - _out_path: &OutputConfig, + out_path: &OutputConfig, + max_qvalue: f32, ) -> std::result::Result { let calib_config = CalibrationConfig::default(); // === PHASE 1: Broad prescore -> collect top calibrants === // Use calibration library if provided, otherwise fall back to main speclib - let phase1_lib = calib_lib.as_ref().unwrap_or(&speclib); + let phase1_lib = calib_lib.unwrap_or(speclib); if let Some(ref clib) = calib_lib { info!( "Phase 1: Broad prescore using calibration library ({} entries)...", @@ -251,7 +250,7 @@ pub fn execute_pipeline( total_after_competition ); - // === PHASE 5: Rescore (GBM cross-validated discriminant) === + // === PHASE 5: Rescore === let phase5_start = Instant::now(); let data = rescore(competed); let phase5_ms = phase5_start.elapsed().as_millis() as u64; @@ -281,7 +280,7 @@ pub fn execute_pipeline( // === PHASE 6: Write Parquet output === let phase6_start = Instant::now(); - let out_path_pq = _out_path.directory.join("results.parquet"); + let out_path_pq = out_path.directory.join("results.parquet"); let mut pq_writer = timsseek::scoring::parquet_writer::ResultParquetWriter::new( &out_path_pq, 20_000, @@ -291,7 +290,9 @@ pub fn execute_pipeline( source: e, })?; for res in data.into_iter() { - pq_writer.add(res); + if res.qvalue <= max_qvalue { + pq_writer.add(res); + } } pq_writer.close(); let phase6_ms = phase6_start.elapsed().as_millis() as u64; @@ -307,6 +308,7 @@ pub fn execute_pipeline( println!("Output: {}", out_path_pq.display()); Ok(PipelineReport { + load_index_ms: 0, // set by caller after return phase1_prescore_ms: phase1_ms, phase2_calibration_ms: phase2_ms, phase3_extraction_ms: phase3_timings.extraction.as_millis() as u64, @@ -733,43 +735,19 @@ fn target_decoy_compete(mut results: Vec) -> Vec, + speclib: &Speclib, + calib_lib: Option<&Speclib>, pipeline: &Scorer, chunk_size: usize, output: &OutputConfig, - decoy_strategy: DecoyStrategy, -) -> std::result::Result<(), TimsSeekError> { - info!("Building database from speclib file {:?}", path); - info!("Decoy generation strategy: {}", decoy_strategy); - - let st = std::time::Instant::now(); + max_qvalue: f32, + load_index_ms: u64, +) -> std::result::Result { let performance_report_path = output.directory.join("performance_report.json"); - let speclib = Speclib::from_file(path, decoy_strategy)?; - let elap_time = st.elapsed(); - info!( - "Loading speclib of length {} took: {:?} for {}", - speclib.len(), - elap_time, - path.display() - ); - - let calib_lib = match calib_lib_path { - Some(p) => { - info!("Loading calibration library from {:?}", p); - let st = std::time::Instant::now(); - let lib = Speclib::from_file(p, decoy_strategy)?; - info!( - "Loaded calibration library of length {} in {:?}", - lib.len(), - st.elapsed() - ); - Some(lib) - } - None => None, - }; - let timings = execute_pipeline(speclib, calib_lib, pipeline, chunk_size, output)?; + let mut timings = execute_pipeline(speclib, calib_lib, pipeline, chunk_size, output, max_qvalue)?; + timings.load_index_ms = load_index_ms; + // Write per-file report let perf_report = serde_json::to_string_pretty(&timings).map_err(|e| TimsSeekError::ParseError { msg: format!("Error serializing performance report to JSON: {}", e), @@ -778,5 +756,5 @@ pub fn run_pipeline( path: performance_report_path.into(), source: e, })?; - Ok(()) + Ok(timings) } From 840d466f7d31f44a2e9941ba218f1ce29563a9e6 Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Fri, 10 Apr 2026 09:12:20 -0700 Subject: [PATCH 19/64] feat(calibrt): make Node pub, add Grid::reset() and grid_cells() --- rust/calibrt/src/grid.rs | 46 ++++++++++++++++++++++++++++++++++------ 1 file changed, 39 insertions(+), 7 deletions(-) diff --git a/rust/calibrt/src/grid.rs b/rust/calibrt/src/grid.rs index c2321ed..13b43aa 100644 --- a/rust/calibrt/src/grid.rs +++ b/rust/calibrt/src/grid.rs @@ -162,22 +162,54 @@ impl Grid { Ok(()) } + + /// Zero all node weights and suppression flags. Keeps allocation. + pub fn reset(&mut self) { + for node in &mut self.nodes { + node.center = Point::default(); + node.suppressed = false; + node.sum_wx = 0.0; + node.sum_wy = 0.0; + node.sum_w = 0.0; + } + } + + /// Read access to all grid cells. + pub fn grid_cells(&self) -> &[Node] { + &self.nodes + } } /// Represents a node (cell) in the grid. #[derive(Debug, Clone, Copy, Default)] -pub(crate) struct Node { - pub(crate) center: Point, - pub(crate) suppressed: bool, - // Weighted centroid accumulators - sum_wx: f64, - sum_wy: f64, - sum_w: f64, +pub struct Node { + pub center: Point, + pub suppressed: bool, + // Internal accumulators — not exposed beyond crate + pub(crate) sum_wx: f64, + pub(crate) sum_wy: f64, + pub(crate) sum_w: f64, } #[cfg(test)] mod tests { use super::*; + use crate::Point; + + #[test] + fn test_grid_reset_preserves_allocation() { + let mut grid = Grid::new(10, (0.0, 100.0), (0.0, 100.0)).unwrap(); + grid.add_point(&Point { x: 50.0, y: 50.0, weight: 1.0 }).unwrap(); + + let capacity_before = grid.nodes.capacity(); + assert!(grid.grid_cells().iter().any(|n| n.sum_w > 0.0)); + + grid.reset(); + + assert_eq!(grid.nodes.capacity(), capacity_before); + assert!(grid.grid_cells().iter().all(|n| n.sum_w == 0.0)); + assert!(grid.grid_cells().iter().all(|n| !n.suppressed)); + } /// Helper function to print grid state and return non-suppressed nodes fn print_grid_state(grid: &Grid) -> Vec<(usize, usize, f64)> { From de37d6a3a8a840d0a47e1aebcd1ee32c377d9c1b Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Fri, 10 Apr 2026 09:17:44 -0700 Subject: [PATCH 20/64] refactor(calibrt): pathfinding accepts external DP buffers Change find_optimal_path signature to accept &mut Vec and &mut Vec> buffers instead of allocating them internally. Caller in calibrate_with_ranges creates temporary buffers for now; Task 3 will move ownership to CalibrationState for reuse across calls. --- rust/calibrt/src/lib.rs | 6 +++++- rust/calibrt/src/pathfinding.rs | 15 +++++++++++---- 2 files changed, 16 insertions(+), 5 deletions(-) diff --git a/rust/calibrt/src/lib.rs b/rust/calibrt/src/lib.rs index b3173b8..1806bad 100644 --- a/rust/calibrt/src/lib.rs +++ b/rust/calibrt/src/lib.rs @@ -204,7 +204,11 @@ pub fn calibrate_with_ranges( .collect(); // Module 2: Find the optimal ascending path - let optimal_path_points = pathfinding::find_optimal_path(&mut filtered_nodes, lookback); + let mut max_weights = Vec::new(); + let mut prev_indices = Vec::new(); + let optimal_path_points = pathfinding::find_optimal_path( + &mut filtered_nodes, lookback, &mut max_weights, &mut prev_indices, + ); // Module 3: Fit the final points and prepare for extrapolation let calcurve = CalibrationCurve::new(optimal_path_points); match &calcurve { diff --git a/rust/calibrt/src/pathfinding.rs b/rust/calibrt/src/pathfinding.rs index c448e8d..8687428 100644 --- a/rust/calibrt/src/pathfinding.rs +++ b/rust/calibrt/src/pathfinding.rs @@ -12,7 +12,12 @@ const DISTANCE_THRESHOLD: f64 = 1e-6; /// - Nodes are sorted by (x, y) to ensure topological order /// - Edges exist only between nodes where both x and y increase (monotonic constraint) /// - Edge weights favor high-confidence nodes that are geometrically close -pub(crate) fn find_optimal_path(nodes: &mut [crate::grid::Node], lookback: usize) -> Vec { +pub(crate) fn find_optimal_path( + nodes: &mut [crate::grid::Node], + lookback: usize, + max_weights: &mut Vec, + prev_node_indices: &mut Vec>, +) -> Vec { if nodes.is_empty() { return Vec::new(); } @@ -29,8 +34,10 @@ pub(crate) fn find_optimal_path(nodes: &mut [crate::grid::Node], lookback: usize }); let n = nodes.len(); - let mut max_weights = vec![0.0; n]; - let mut prev_node_indices = vec![None; n]; + max_weights.clear(); + max_weights.resize(n, 0.0); + prev_node_indices.clear(); + prev_node_indices.resize(n, None); for i in 0..n { max_weights[i] = nodes[i].center.weight; // Path can start at any node @@ -64,7 +71,7 @@ pub(crate) fn find_optimal_path(nodes: &mut [crate::grid::Node], lookback: usize let mut max_path_weight = 0.0; let mut end_of_path_idx = 0; - for (i, max_w) in max_weights.into_iter().enumerate() { + for (i, &max_w) in max_weights.iter().enumerate() { if max_w > max_path_weight { max_path_weight = max_w; end_of_path_idx = i; From 366b9c81b80ed2754847c99492f4081772f00170 Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Fri, 10 Apr 2026 09:20:21 -0700 Subject: [PATCH 21/64] feat(calibrt): add CalibrationState with update/fit/reset cycle Introduces CalibrationState, a reusable struct that owns Grid, DP buffers, and path indices to enable incremental calibration without repeated allocation. Makes CalibrationCurve::new pub(crate) so fit() can construct it within the crate. --- rust/calibrt/src/lib.rs | 160 +++++++++++++++++++++++++++++++++++++++- 1 file changed, 159 insertions(+), 1 deletion(-) diff --git a/rust/calibrt/src/lib.rs b/rust/calibrt/src/lib.rs index 1806bad..4e66a50 100644 --- a/rust/calibrt/src/lib.rs +++ b/rust/calibrt/src/lib.rs @@ -55,7 +55,7 @@ pub struct CalibrationCurve { impl CalibrationCurve { /// Creates a new CalibrationCurve from a slice of points. /// Precomputes slopes for faster prediction. - fn new(mut points: Vec) -> Result { + pub(crate) fn new(mut points: Vec) -> Result { if points.is_empty() { return Err(CalibRtError::NoPoints); } @@ -145,6 +145,164 @@ impl CalibrationCurve { } } +/// Reusable calibration state for incremental fitting. Owns all allocations. +pub struct CalibrationState { + grid: grid::Grid, + path_indices: Vec, + dp_max_weights: Vec, + dp_prev_indices: Vec>, + curve: Option, + stale: bool, + lookback: usize, +} + +impl CalibrationState { + pub fn new( + grid_size: usize, + x_range: (f64, f64), + y_range: (f64, f64), + lookback: usize, + ) -> Result { + Ok(Self { + grid: grid::Grid::new(grid_size, x_range, y_range)?, + path_indices: Vec::new(), + dp_max_weights: Vec::new(), + dp_prev_indices: Vec::new(), + curve: None, + stale: false, + lookback, + }) + } + + pub fn update(&mut self, points: impl Iterator) { + for (x, y, w) in points { + let _ = self.grid.add_point(&Point { x, y, weight: w }); + } + self.stale = true; + } + + pub fn fit(&mut self) { + if self.grid.suppress_nonmax().is_err() { + self.curve = None; + self.path_indices.clear(); + self.stale = false; + return; + } + + // Collect non-suppressed nodes for pathfinding + let mut filtered: Vec = self.grid.grid_cells() + .iter() + .filter(|n| !n.suppressed && n.center.weight > 0.0) + .copied() + .collect(); + + // Pathfinding with reused buffers + let path_points = pathfinding::find_optimal_path( + &mut filtered, + self.lookback, + &mut self.dp_max_weights, + &mut self.dp_prev_indices, + ); + + // Store path indices by matching path points back to grid cells + self.path_indices.clear(); + for pp in &path_points { + if let Some(idx) = self.grid.grid_cells().iter().position(|n| { + (n.center.x - pp.x).abs() < 1e-9 && (n.center.y - pp.y).abs() < 1e-9 + }) { + self.path_indices.push(idx); + } + } + + self.curve = CalibrationCurve::new(path_points).ok(); + self.stale = false; + } + + pub fn reset(&mut self) { + self.grid.reset(); + self.curve = None; + self.path_indices.clear(); + self.stale = false; + } + + pub fn grid_cells(&self) -> &[grid::Node] { + self.grid.grid_cells() + } + + pub fn path_indices(&self) -> &[usize] { + &self.path_indices + } + + pub fn curve(&self) -> Option<&CalibrationCurve> { + self.curve.as_ref() + } + + pub fn is_stale(&self) -> bool { + self.stale + } +} + +#[cfg(test)] +mod calibration_state_tests { + use super::*; + + #[test] + fn test_update_fit_cycle() { + let mut state = CalibrationState::new(10, (0.0, 100.0), (0.0, 100.0), 30).unwrap(); + let points: Vec<(f64, f64, f64)> = (0..10) + .map(|i| { + let v = (i as f64) * 10.0 + 5.0; + (v, v, 1.0) + }) + .collect(); + + state.update(points.into_iter()); + assert!(state.is_stale()); + + state.fit(); + assert!(!state.is_stale()); + assert!(state.curve().is_some()); + + let curve = state.curve().unwrap(); + let pred = curve.predict(50.0).unwrap(); + assert!((pred - 50.0).abs() < 5.0, "predicted {} expected ~50.0", pred); + } + + #[test] + fn test_reset_clears_state() { + let mut state = CalibrationState::new(10, (0.0, 100.0), (0.0, 100.0), 30).unwrap(); + let points = vec![(25.0, 25.0, 1.0), (75.0, 75.0, 1.0)]; + state.update(points.into_iter()); + state.fit(); + assert!(state.curve().is_some()); + + state.reset(); + assert!(state.curve().is_none()); + assert!(state.path_indices().is_empty()); + assert!(!state.is_stale()); + } + + #[test] + fn test_refit_after_reset_update() { + let mut state = CalibrationState::new(10, (0.0, 100.0), (0.0, 100.0), 30).unwrap(); + + // First fit: y = x + let points1: Vec<_> = (0..10).map(|i| ((i as f64) * 10.0 + 5.0, (i as f64) * 10.0 + 5.0, 1.0)).collect(); + state.update(points1.into_iter()); + state.fit(); + let curve1_pred = state.curve().unwrap().predict(50.0).unwrap(); + + // Reset and refit: y = 2x + state.reset(); + let points2: Vec<_> = (0..10).map(|i| ((i as f64) * 10.0 + 5.0, (i as f64) * 20.0 + 5.0, 1.0)).collect(); + state.update(points2.into_iter()); + state.fit(); + let curve2_pred = state.curve().unwrap().predict(50.0).unwrap(); + + assert!((curve2_pred - curve1_pred).abs() > 10.0); + } +} + /// Computes the min and max values from an iterator of f64 values. /// /// # Returns From 66d3aca7c6a1291dd9d3dd2603075da9b2b02e36 Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Fri, 10 Apr 2026 09:21:47 -0700 Subject: [PATCH 22/64] feat(timsseek): add library_rt_seconds to CalibrantCandidate, add CalibrantHeap::iter() --- rust/timsseek/src/scoring/pipeline.rs | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/rust/timsseek/src/scoring/pipeline.rs b/rust/timsseek/src/scoring/pipeline.rs index 6987373..ffbc4cc 100644 --- a/rust/timsseek/src/scoring/pipeline.rs +++ b/rust/timsseek/src/scoring/pipeline.rs @@ -68,6 +68,7 @@ pub struct CalibrantCandidate { pub score: f32, pub apex_rt_seconds: f32, pub speclib_index: usize, + pub library_rt_seconds: f32, } impl PartialEq for CalibrantCandidate { @@ -133,6 +134,11 @@ impl CalibrantHeap { pub fn len(&self) -> usize { self.heap.len() } + + /// Iterate over heap contents. Order is arbitrary (not sorted by score). + pub fn iter(&self) -> impl Iterator { + self.heap.iter().map(|r| &r.0) + } } /// Calibration configuration — all tunable parameters with defaults. @@ -811,6 +817,7 @@ impl Scorer { score: loc.score, apex_rt_seconds: loc.retention_time_ms as f32 / 1000.0, speclib_index: speclib_offset + chunk_idx, + library_rt_seconds: item.query.rt_seconds(), }); } (scorer, heap) @@ -833,6 +840,7 @@ impl Scorer { score: loc.score, apex_rt_seconds: loc.retention_time_ms as f32 / 1000.0, speclib_index: speclib_offset + chunk_idx, + library_rt_seconds: item.query.rt_seconds(), }); } } From ba8cf65a57444c1185171f025f387629cb75f99f Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Fri, 10 Apr 2026 09:36:18 -0700 Subject: [PATCH 23/64] refactor(timsseek): rename ApexFinder -> TraceScorer, add compute_traces + cached profiles --- rust/timsseek/src/scoring/apex_finding.rs | 69 +++++++++++++++++++---- 1 file changed, 58 insertions(+), 11 deletions(-) diff --git a/rust/timsseek/src/scoring/apex_finding.rs b/rust/timsseek/src/scoring/apex_finding.rs index 674530b..91537d9 100644 --- a/rust/timsseek/src/scoring/apex_finding.rs +++ b/rust/timsseek/src/scoring/apex_finding.rs @@ -7,10 +7,10 @@ //! # Usage //! //! ```ignore -//! use timsseek::scoring::apex_finding::{ApexFinder, CandidateContext}; +//! use timsseek::scoring::apex_finding::{TraceScorer, CandidateContext}; //! -//! // 1. Create a reusable finder (one per thread) -//! let mut finder = ApexFinder::new(chromatogram_collector.num_cycles()); +//! // 1. Create a reusable scorer (one per thread) +//! let mut scorer = TraceScorer::new(chromatogram_collector.num_cycles()); //! //! // 2. Create the context for a specific query //! let context = CandidateContext { @@ -20,8 +20,8 @@ //! chromatograms: chromatogram_collector, //! }; //! -//! // 3. Score (reusing the finder's internal buffers) -//! let score = finder.find_apex(&context, rt_mapping_fn).unwrap(); +//! // 3. Score (reusing the scorer's internal buffers) +//! let score = scorer.find_apex(&context, rt_mapping_fn).unwrap(); //! println!("Found apex at RT: {} ms with score {}", score.retention_time_ms, score.score); //! ``` @@ -219,15 +219,20 @@ impl ElutionTraces { } } +/// Backward compatibility alias. Migrate callers to `TraceScorer`. +pub type ApexFinder = TraceScorer; + /// The core engine for finding peptide apexes. #[derive(Debug)] -pub struct ApexFinder { +pub struct TraceScorer { pub traces: ElutionTraces, - buffers: ApexFinderBuffers, + buffers: TraceScorerBuffers, + cosine_profile: Vec, + scribe_profile: Vec, } #[derive(Debug)] -struct ApexFinderBuffers { +struct TraceScorerBuffers { /// Cosine numerator: sum(obs * sqrt(exp)) per cycle. temp_ms2_dot_prod: Vec, /// Cosine denominator: sum(obs^2) per cycle. @@ -238,7 +243,7 @@ struct ApexFinderBuffers { temp_raw_intensity_sum: Vec, } -impl ApexFinderBuffers { +impl TraceScorerBuffers { fn new(size: usize) -> Self { Self { temp_ms2_dot_prod: vec![0.0f32; size], @@ -263,14 +268,20 @@ impl ApexFinderBuffers { } } -impl ApexFinder { +impl TraceScorer { pub fn new(capacity: usize) -> Self { Self { traces: ElutionTraces::new_with_capacity(capacity), - buffers: ApexFinderBuffers::new(capacity), + buffers: TraceScorerBuffers::new(capacity), + cosine_profile: Vec::with_capacity(capacity), + scribe_profile: Vec::with_capacity(capacity), } } + pub fn traces(&self) -> &ElutionTraces { + &self.traces + } + /// Build cosine and scribe profiles from traces. /// cosine_profile[i] = cos^3 * intensity, scribe_profile[i] = scribe * intensity. fn build_profiles(&self) -> (Vec, Vec) { @@ -286,6 +297,42 @@ impl ApexFinder { (cosine_profile, scribe_profile) } + /// Build cosine and scribe profiles into cached fields, avoiding allocation. + fn build_profiles_cached(&mut self) { + let n = self.traces.cosine_trace.len(); + self.cosine_profile.clear(); + self.cosine_profile.reserve(n); + self.scribe_profile.clear(); + self.scribe_profile.reserve(n); + + for i in 0..n { + let cos = self.traces.cosine_trace[i]; + let intensity = self.traces.ms2_log_intensity[i]; + self.cosine_profile.push(cos * cos * cos * intensity); + self.scribe_profile.push(self.traces.ms2_scribe[i] * intensity); + } + } + + /// Stage A: Compute all per-cycle traces and cached profiles. + /// O(fragments x cycles). Call once per extraction. + pub fn compute_traces( + &mut self, + scoring_ctx: &Extraction, + ) -> Result<(), DataProcessingError> { + let n_cycles = scoring_ctx.chromatograms.num_cycles(); + + self.traces.clear(); + self.traces.resize(n_cycles); + self.buffers.clear(); + self.buffers.resize(n_cycles); + + self.compute_pass_1(scoring_ctx)?; + self.compute_main_score_trace(); + self.build_profiles_cached(); + + Ok(()) + } + /// Phase 1: Find apex location using broad extraction. /// /// Returns a lightweight `ApexLocation` with just the peak location and From 739df173788833a1508bd9d1eaed1e4f3247e42e Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Fri, 10 Apr 2026 09:51:22 -0700 Subject: [PATCH 24/64] feat(timsseek): add suggest_apex and score_at, rewrite find_apex/find_apex_location as wrappers --- rust/timsseek/src/scoring/apex_finding.rs | 303 +++++++++------------- 1 file changed, 128 insertions(+), 175 deletions(-) diff --git a/rust/timsseek/src/scoring/apex_finding.rs b/rust/timsseek/src/scoring/apex_finding.rs index 91537d9..4feff45 100644 --- a/rust/timsseek/src/scoring/apex_finding.rs +++ b/rust/timsseek/src/scoring/apex_finding.rs @@ -282,21 +282,6 @@ impl TraceScorer { &self.traces } - /// Build cosine and scribe profiles from traces. - /// cosine_profile[i] = cos^3 * intensity, scribe_profile[i] = scribe * intensity. - fn build_profiles(&self) -> (Vec, Vec) { - let n = self.traces.cosine_trace.len(); - let mut cosine_profile = Vec::with_capacity(n); - let mut scribe_profile = Vec::with_capacity(n); - for i in 0..n { - let cos = self.traces.cosine_trace[i]; - let intensity = self.traces.ms2_log_intensity[i]; - cosine_profile.push(cos * cos * cos * intensity); - scribe_profile.push(self.traces.ms2_scribe[i] * intensity); - } - (cosine_profile, scribe_profile) - } - /// Build cosine and scribe profiles into cached fields, avoiding allocation. fn build_profiles_cached(&mut self) { let n = self.traces.cosine_trace.len(); @@ -333,31 +318,14 @@ impl TraceScorer { Ok(()) } - /// Phase 1: Find apex location using broad extraction. - /// - /// Returns a lightweight `ApexLocation` with just the peak location and - /// a basic score (apex profile value). Sufficient for calibrant ranking. - #[cfg_attr( - feature = "instrumentation", - tracing::instrument(skip(self, scoring_ctx, rt_mapper), level = "trace") - )] - pub fn find_apex_location( - &mut self, - scoring_ctx: &Extraction, + /// Stage B: Suggest best apex from precomputed traces. + /// Pure peak-pick on apex_profile — O(cycles). Truly cheap. + /// Returns apex_profile[max_loc] as the ranking score. + pub fn suggest_apex( + &self, rt_mapper: &dyn Fn(usize) -> u32, + cycle_offset: usize, ) -> Result { - let collector = &scoring_ctx.chromatograms; - let n_cycles = collector.num_cycles(); - - self.traces.clear(); - self.traces.resize(n_cycles); - self.buffers.clear(); - self.buffers.resize(n_cycles); - - self.compute_pass_1(scoring_ctx)?; - self.compute_main_score_trace(); - - // Peak-pick on apex profile let peak_picker = PeakPicker::new(&self.traces.apex_profile); let (max_val, max_loc) = match peak_picker.next_peak() { Some(p) => p, @@ -374,31 +342,136 @@ impl TraceScorer { } let (rising_cycles, falling_cycles) = self.calculate_rise_and_fall_cycles(max_loc); - let cycle_offset = scoring_ctx.chromatograms.cycle_offset(); let retention_time_ms = rt_mapper(max_loc + cycle_offset); - // Compute split product score for calibrant ranking - let (cosine_profile, scribe_profile) = self.build_profiles(); + Ok(ApexLocation { + score: max_val, // apex_profile peak value, NOT split_product + retention_time_ms, + apex_cycle: max_loc, + rising_cycles, + falling_cycles, + }) + } + + /// Stage C: Compute full score at a given cycle index. + /// Uses precomputed traces and cached profiles. + /// `suggested` provides peak context for delta/baseline calculations. + pub fn score_at( + &mut self, // needs &mut for PeakPicker on traces + scoring_ctx: &Extraction, + cycle: usize, + suggested: &ApexLocation, + rt_mapper: &dyn Fn(usize) -> u32, + ) -> Result { + let cycle_offset = scoring_ctx.chromatograms.cycle_offset(); + // Split product using cached profiles let split_product = compute_split_product( - &cosine_profile, - &scribe_profile, + &self.cosine_profile, + &self.scribe_profile, &scoring_ctx.chromatograms.fragments, &scoring_ctx.expected_intensities.fragment_intensities, ); - Ok(ApexLocation { - score: split_product.base_score, + // Delta scores: compare apex_profile[cycle] against global 2nd/3rd peaks + let mut peak_picker = PeakPicker::new(&self.traces.apex_profile); + let cycle_val = self.traces.apex_profile[cycle]; + + // Mask the suggested apex region (stable reference for deltas) + peak_picker.mask_peak( + suggested.apex_cycle, + suggested.rising_cycles as usize, + suggested.falling_cycles as usize, + 2, + ); + let (next_val, next_loc) = peak_picker.next_peak().unwrap_or((0.0, cycle)); + let (next_raise, next_fall) = self.calculate_rise_and_fall_cycles(next_loc); + peak_picker.mask_peak(next_loc, next_raise as usize, next_fall as usize, 1); + let (second_next_val, _) = peak_picker.next_peak().unwrap_or((0.0, cycle)); + + let delta_next = cycle_val - next_val; + let delta_second_next = cycle_val - second_next_val; + + // Joint apex: find precursor-fragment agreement, but use clicked cycle if far from it + let joint_apex = find_joint_apex(&self.cosine_profile, &self.traces.ms1_precursor_trace); + let effective_apex = if (joint_apex as i64 - cycle as i64).unsigned_abs() as usize <= 3 { + joint_apex + } else { + cycle + }; + + // 11 features at effective apex + let n_cycles = self.cosine_profile.len(); + let features = compute_apex_features( + &scoring_ctx.chromatograms.fragments, + &scoring_ctx.chromatograms.precursors, + &scoring_ctx.expected_intensities, + &self.cosine_profile, + &self.traces.ms1_precursor_trace, + effective_apex, + n_cycles, + ); + + let score = compute_weighted_score(split_product.base_score, &features); + let retention_time_ms = rt_mapper(effective_apex + cycle_offset); + + // Intensity sums at effective apex + let (ms1_summed_intensity, _) = + self.sum_intensities_at(&scoring_ctx.chromatograms.precursors, effective_apex); + let (ms2_summed_intensity, ms2_npeaks) = + self.sum_intensities_at(&scoring_ctx.chromatograms.fragments, effective_apex); + + // Baseline lambda uses suggested apex's rise/fall (stable window) + let lambda = self.calculate_baseline_lambda( + suggested.apex_cycle, + suggested.rising_cycles, + suggested.falling_cycles, + ); + let norm_lazy_std = lambda.sqrt().max(1.0) as f32; + let lazyscore_at = self.traces.ms2_lazyscore[effective_apex]; + let lazyscore_z = lazyscore_at / norm_lazy_std; + + if lazyscore_z.is_nan() { + return Err(DataProcessingError::ExpectedFiniteNonNanData { + context: format!("Lazy score is NaN {} and {}", lazyscore_at, norm_lazy_std), + }); + } + + Ok(ApexScore { + score, retention_time_ms, - apex_cycle: max_loc, - rising_cycles, - falling_cycles, + joint_apex_cycle: effective_apex, + split_product, + features, + delta_next, + delta_second_next, + lazyscore: lazyscore_at, + lazyscore_vs_baseline: (lazyscore_at as f64 / lambda) as f32, + lazyscore_z, + npeaks: ms2_npeaks as u8, + ms2_summed_intensity, + ms1_summed_intensity, + rising_cycles: suggested.rising_cycles, + falling_cycles: suggested.falling_cycles, }) } - /// Phase 3: Full scoring on a (narrow) extraction. - /// - /// Computes traces, apex profile, split product, 11 features, and weighted score. + /// Convenience: compute_traces + suggest_apex. Migration aid. + #[cfg_attr( + feature = "instrumentation", + tracing::instrument(skip(self, scoring_ctx, rt_mapper), level = "trace") + )] + pub fn find_apex_location( + &mut self, + scoring_ctx: &Extraction, + rt_mapper: &dyn Fn(usize) -> u32, + ) -> Result { + self.compute_traces(scoring_ctx)?; + let cycle_offset = scoring_ctx.chromatograms.cycle_offset(); + self.suggest_apex(rt_mapper, cycle_offset) + } + + /// Convenience: compute_traces + suggest_apex + score_at. Migration aid. #[cfg_attr( feature = "instrumentation", tracing::instrument(skip(self, scoring_ctx, rt_mapper), level = "trace") @@ -408,23 +481,10 @@ impl TraceScorer { scoring_ctx: &Extraction, rt_mapper: &dyn Fn(usize) -> u32, ) -> Result { - let collector = &scoring_ctx.chromatograms; - let n_cycles = collector.num_cycles(); - - // 1. Reset buffers - self.traces.clear(); - self.traces.resize(n_cycles); - self.buffers.clear(); - self.buffers.resize(n_cycles); - - // 2. Compute per-cycle scores (single pass) - self.compute_pass_1(scoring_ctx)?; - - // 3. Compute apex profile (cos^3 * I combined with scribe * I) - self.compute_main_score_trace(); - - // 4. Find apex and extract features - self.extract_apex_score(scoring_ctx, &rt_mapper) + self.compute_traces(scoring_ctx)?; + let cycle_offset = scoring_ctx.chromatograms.cycle_offset(); + let loc = self.suggest_apex(rt_mapper, cycle_offset)?; + self.score_at(scoring_ctx, loc.apex_cycle, &loc, rt_mapper) } /// Single-pass scoring: cosine (sqrt-transformed), scribe, lazyscore, @@ -604,113 +664,6 @@ impl TraceScorer { } } - fn extract_apex_score( - &self, - scoring_ctx: &Extraction, - rt_mapper: &dyn Fn(usize) -> u32, - ) -> Result { - let mut peak_picker = PeakPicker::new(&self.traces.apex_profile); - - // Find best peak - let (max_val, max_loc) = match peak_picker.next_peak() { - Some(p) => p, - None => { - return Err(DataProcessingError::ExpectedNonEmptyData { - context: Some("No main score found".into()), - }); - } - }; - - if max_val == 0.0 { - return Err(DataProcessingError::ExpectedNonEmptyData { - context: Some("No non-0 main score".into()), - }); - } - - // Peak shape (rise/fall) for delta computation - let (rising_cycles, falling_cycles) = self.calculate_rise_and_fall_cycles(max_loc); - - // Mask and find next peaks for delta scores - peak_picker.mask_peak(max_loc, rising_cycles as usize, falling_cycles as usize, 2); - let (next_val, next_loc) = peak_picker.next_peak().unwrap_or((0.0, max_loc)); - let (next_raise, next_fall) = self.calculate_rise_and_fall_cycles(next_loc); - peak_picker.mask_peak(next_loc, next_raise as usize, next_fall as usize, 1); - let (second_next_val, _) = peak_picker.next_peak().unwrap_or((0.0, max_loc)); - - let delta_next = max_val - next_val; - let delta_second_next = max_val - second_next_val; - - // Build intermediate profiles for split product and features - let (cosine_profile, scribe_profile) = self.build_profiles(); - - // Split product score (independent cosine/scribe apexes) - let split_product = compute_split_product( - &cosine_profile, - &scribe_profile, - &scoring_ctx.chromatograms.fragments, - &scoring_ctx.expected_intensities.fragment_intensities, - ); - - // Joint precursor-fragment apex - let joint_apex = find_joint_apex(&cosine_profile, &self.traces.ms1_precursor_trace); - - // 11 features at joint apex - let n_cycles = cosine_profile.len(); - let features = compute_apex_features( - &scoring_ctx.chromatograms.fragments, - &scoring_ctx.chromatograms.precursors, - &scoring_ctx.expected_intensities, - &cosine_profile, - &self.traces.ms1_precursor_trace, - joint_apex, - n_cycles, - ); - - // Weighted final score - let score = compute_weighted_score(split_product.base_score, &features); - - // RT at joint apex - let cycle_offset = scoring_ctx.chromatograms.cycle_offset(); - let global_loc = joint_apex + cycle_offset; - let retention_time_ms = rt_mapper(global_loc); - - // Intensity counts at joint apex - let (ms1_summed_intensity, _) = - self.sum_intensities_at(&scoring_ctx.chromatograms.precursors, joint_apex); - let (ms2_summed_intensity, ms2_npeaks) = - self.sum_intensities_at(&scoring_ctx.chromatograms.fragments, joint_apex); - - // Lazyscore baseline stats - let lambda = self.calculate_baseline_lambda(max_loc, rising_cycles, falling_cycles); - let k = self.traces.ms2_lazyscore[joint_apex] as f64; - let norm_lazy_std = lambda.sqrt().max(1.0) as f32; - let lazyscore_z = self.traces.ms2_lazyscore[joint_apex] / norm_lazy_std; - - if lazyscore_z.is_nan() { - return Err(DataProcessingError::ExpectedFiniteNonNanData { - context: format!("Lazy score is NaN {} and {}", k, norm_lazy_std), - }); - } - - Ok(ApexScore { - score, - retention_time_ms, - joint_apex_cycle: joint_apex, - split_product, - features, - delta_next, - delta_second_next, - lazyscore: self.traces.ms2_lazyscore[joint_apex], - lazyscore_vs_baseline: (k / lambda) as f32, - lazyscore_z, - npeaks: ms2_npeaks as u8, - ms2_summed_intensity, - ms1_summed_intensity, - rising_cycles, - falling_cycles, - }) - } - fn calculate_rise_and_fall_cycles(&self, max_loc: usize) -> (u8, u8) { let score_slice = &self.traces.ms2_lazyscore; let raising = count_falling_steps(max_loc, -1, score_slice); From 39cbcf3ce5cbce7373a80a011029647964655910 Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Fri, 10 Apr 2026 09:55:58 -0700 Subject: [PATCH 25/64] refactor(timsseek): extract shared build_extraction() function Move the core extraction logic from Scorer::build_broad_extraction into a standalone generic function that works with any KeyLike type (IonAnnot or String), enabling reuse by both the CLI scorer and the viewer. --- rust/timsseek/src/scoring/extraction.rs | 68 +++++++++++++++++++++++++ rust/timsseek/src/scoring/mod.rs | 1 + rust/timsseek/src/scoring/pipeline.rs | 68 +++++-------------------- rust/timsseek/src/traits.rs | 8 +++ 4 files changed, 89 insertions(+), 56 deletions(-) create mode 100644 rust/timsseek/src/scoring/extraction.rs diff --git a/rust/timsseek/src/scoring/extraction.rs b/rust/timsseek/src/scoring/extraction.rs new file mode 100644 index 0000000..cfb45e3 --- /dev/null +++ b/rust/timsseek/src/scoring/extraction.rs @@ -0,0 +1,68 @@ +//! Shared extraction builder -- used by both CLI (Scorer) and viewer. + +use crate::models::ExpectedIntensities; +use crate::scoring::apex_finding::Extraction; +use crate::traits::MappableRTCycles; +use timsquery::utils::TupleRange; +use timsquery::{ + ChromatogramCollector, + KeyLike, + OptionallyRestricted, + QueriableData, + Tolerance, +}; + +use super::pipeline::SkippingReason; + +/// Build an Extraction from an elution group + index query. +/// +/// RT range derived internally from elution_group.rt_seconds() + tolerance, +/// clamped to the index's cycle_mapping range. +/// +/// top_n_fragments: +/// Some(n) -> filter_zero_intensity_ions + select_top_n_fragments(n) +/// None -> no filtering, all ions kept +pub(crate) fn build_extraction( + elution_group: &timsquery::TimsElutionGroup, + mut expected_intensities: ExpectedIntensities, + index: &I, + tolerance: &Tolerance, + top_n_fragments: Option, +) -> Result, SkippingReason> +where + T: KeyLike, + I: QueriableData> + MappableRTCycles, +{ + let cycle_mapping = index.ms1_cycle_mapping(); + let max_range = cycle_mapping.range_milis(); + let max_range = TupleRange::try_new(max_range.0, max_range.1) + .expect("Reference RTs should be sorted and valid"); + + let rt_range = match tolerance.rt_range_as_milis(elution_group.rt_seconds()) { + OptionallyRestricted::Unrestricted => max_range, + OptionallyRestricted::Restricted(r) => r, + }; + + if !max_range.intersects(rt_range) { + return Err(SkippingReason::RetentionTimeOutOfBounds); + } + + let mut agg = ChromatogramCollector::new( + elution_group.clone(), + rt_range, + cycle_mapping, + ) + .map_err(|_| SkippingReason::RetentionTimeOutOfBounds)?; + + index.add_query(&mut agg, tolerance); + + if let Some(n) = top_n_fragments { + super::pipeline::filter_zero_intensity_ions(&mut agg, &mut expected_intensities); + super::pipeline::select_top_n_fragments(&mut agg, &mut expected_intensities, n); + } + + Ok(Extraction { + expected_intensities, + chromatograms: agg, + }) +} diff --git a/rust/timsseek/src/scoring/mod.rs b/rust/timsseek/src/scoring/mod.rs index 1a166b6..05f269b 100644 --- a/rust/timsseek/src/scoring/mod.rs +++ b/rust/timsseek/src/scoring/mod.rs @@ -1,5 +1,6 @@ mod accumulator; pub mod apex_finding; +pub mod extraction; pub mod full_results; mod offsets; pub mod parquet_writer; diff --git a/rust/timsseek/src/scoring/pipeline.rs b/rust/timsseek/src/scoring/pipeline.rs index ffbc4cc..40bd644 100644 --- a/rust/timsseek/src/scoring/pipeline.rs +++ b/rust/timsseek/src/scoring/pipeline.rs @@ -176,7 +176,7 @@ const TOP_N_FRAGMENTS: usize = 8; /// /// Removes lower-ranked fragments from the chromatogram collector (fragments array + eg) /// and from expected intensities, maintaining the invariant that all three agree on count. -fn select_top_n_fragments( +pub(crate) fn select_top_n_fragments( agg: &mut ChromatogramCollector, expected: &mut crate::ExpectedIntensities, n: usize, @@ -239,7 +239,7 @@ fn select_top_n_fragments( feature = "instrumentation", tracing::instrument(skip_all, level = "trace") )] -fn filter_zero_intensity_ions( +pub(crate) fn filter_zero_intensity_ions( agg: &mut ChromatogramCollector, expected: &mut crate::ExpectedIntensities, ) { @@ -316,7 +316,7 @@ pub struct Scorer { pub fragmented_range: TupleRange, } -enum SkippingReason { +pub enum SkippingReason { // TODO: Implement more options and a counter ... RetentionTimeOutOfBounds, } @@ -336,68 +336,24 @@ impl Scorer { ), SkippingReason, > { - let max_range = self.index.ms1_cycle_mapping().range_milis(); - let max_range = TupleRange::try_new(max_range.0, max_range.1) - .expect("Reference RTs should be sorted and valid"); - let rt_range = match self - .broad_tolerance - .rt_range_as_milis(item.query.rt_seconds()) - { - OptionallyRestricted::Unrestricted => max_range, - OptionallyRestricted::Restricted(r) => r, - }; - - if !max_range.intersects(rt_range) { - return Err(SkippingReason::RetentionTimeOutOfBounds); - } - let mut agg = tracing::span!( - tracing::Level::TRACE, - "build_broad_extraction::new_collector" - ).in_scope(|| { - match ChromatogramCollector::new( - item.query.clone(), - rt_range, - self.index.ms1_cycle_mapping(), - ) { - Ok(collector) => collector, - Err(e) => { - let tol_range = self.broad_tolerance.rt_range_as_milis(item.query.rt_seconds()); - panic!( - "Failed to create ChromatogramCollector for query id {:#?}: {:?} with RT tolerance {:#?}", - item.query, e, tol_range, - ) - } - } - }); - - tracing::span!(tracing::Level::TRACE, "build_broad_extraction::add_query").in_scope( - || { - self.index.add_query(&mut agg, &self.broad_tolerance); - }, - ); - - // Filter out zero-intensity ions and update expected intensities in one pass - let mut expected_intensities = item.expected_intensity.clone(); - filter_zero_intensity_ions(&mut agg, &mut expected_intensities); - - // Retain only top-N fragments by predicted intensity for scoring - select_top_n_fragments(&mut agg, &mut expected_intensities, TOP_N_FRAGMENTS); + let extraction = super::extraction::build_extraction( + &item.query, + item.expected_intensity.clone(), + &self.index, + &self.broad_tolerance, + Some(TOP_N_FRAGMENTS), + )?; let metadata = super::apex_finding::PeptideMetadata { digest: item.digest.clone(), charge: item.query.precursor_charge(), - library_id: agg.eg.id() as u32, + library_id: extraction.chromatograms.eg.id() as u32, query_rt_seconds: item.query.rt_seconds(), ref_mobility_ook0: item.query.mobility_ook0(), ref_precursor_mz: item.query.mono_precursor_mz(), }; - let scoring_ctx = super::apex_finding::Extraction { - expected_intensities, - chromatograms: agg, - }; - - Ok((metadata, scoring_ctx)) + Ok((metadata, extraction)) } /// Calculates the weighted mean ion mobility across fragments and precursors. diff --git a/rust/timsseek/src/traits.rs b/rust/timsseek/src/traits.rs index ca6065d..04d9dda 100644 --- a/rust/timsseek/src/traits.rs +++ b/rust/timsseek/src/traits.rs @@ -86,3 +86,11 @@ impl MappableRTCycles for timscentroid::IndexedTimstofPeaks { self.ms1_cycle_mapping() } } + +impl MappableRTCycles for timsquery::serde::IndexedPeaksHandle { + fn ms1_cycle_mapping( + &self, + ) -> &timscentroid::rt_mapping::CycleToRTMapping { + self.ms1_cycle_mapping() + } +} From f2ca852fc1cd20f1799e242f2a8d961cfb98e3dc Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Fri, 10 Apr 2026 10:18:48 -0700 Subject: [PATCH 26/64] refactor(viewer): migrate to TraceScorer (rename only, same behavior) --- rust/timsquery_viewer/src/computed_state.rs | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/rust/timsquery_viewer/src/computed_state.rs b/rust/timsquery_viewer/src/computed_state.rs index 7e442db..3e35a56 100644 --- a/rust/timsquery_viewer/src/computed_state.rs +++ b/rust/timsquery_viewer/src/computed_state.rs @@ -35,7 +35,7 @@ use timscentroid::rt_mapping::{ use timscentroid::utils::OptionallyRestricted; use timsquery::serde::IndexedPeaksHandle; use timsseek::scoring::apex_finding::{ - ApexFinder, + TraceScorer, ApexScore, Extraction, }; @@ -108,7 +108,7 @@ struct MobilityState { #[derive(Debug, Default)] struct ScratchBuffers { - apex_finder: Option, + trace_scorer: Option, } /// State machine for chromatogram computation lifecycle. @@ -320,11 +320,11 @@ impl ComputedState { #[instrument(skip_all, fields(eg_id = %context.chromatograms.eg.id()))] fn find_apex( - apex_finder: &mut ApexFinder, + trace_scorer: &mut TraceScorer, context: &Extraction, index: &IndexedPeaksHandle, ) -> Result { - apex_finder.find_apex(context, &|idx| { + trace_scorer.find_apex(context, &|idx| { index .ms1_cycle_mapping() .rt_milis_for_index(&MS1CycleIndex::new(idx as u32)) @@ -417,16 +417,16 @@ impl ComputedState { collector.fragments.mz_order.len(), ); - let apex_finder = scratch - .apex_finder - .get_or_insert_with(|| ApexFinder::new(num_cycles)); + let trace_scorer = scratch + .trace_scorer + .get_or_insert_with(|| TraceScorer::new(num_cycles)); let scoring_ctx = Extraction { expected_intensities: expected_intensities.clone(), chromatograms: collector.clone(), }; - let apex_score = match Self::find_apex(apex_finder, &scoring_ctx, index) { + let apex_score = match Self::find_apex(trace_scorer, &scoring_ctx, index) { Ok(score) => { tracing::info!("Apex found at RT={:.2}ms", score.retention_time_ms); score @@ -444,7 +444,7 @@ impl ComputedState { let score_lines = ScoreLines::from_scores( apex_score, - &apex_finder.traces, + &trace_scorer.traces, index.ms1_cycle_mapping(), collector.cycle_offset(), ); From 3affdc2a4d957eaf5d991dbe64cbaf75b52beac9 Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Fri, 10 Apr 2026 10:20:20 -0700 Subject: [PATCH 27/64] refactor: remove ApexFinder alias, all code uses TraceScorer --- rust/timsseek/src/scoring/apex_finding.rs | 3 --- rust/timsseek/src/scoring/pipeline.rs | 18 +++++++++--------- 2 files changed, 9 insertions(+), 12 deletions(-) diff --git a/rust/timsseek/src/scoring/apex_finding.rs b/rust/timsseek/src/scoring/apex_finding.rs index 4feff45..4c6bd10 100644 --- a/rust/timsseek/src/scoring/apex_finding.rs +++ b/rust/timsseek/src/scoring/apex_finding.rs @@ -219,9 +219,6 @@ impl ElutionTraces { } } -/// Backward compatibility alias. Migrate callers to `TraceScorer`. -pub type ApexFinder = TraceScorer; - /// The core engine for finding peptide apexes. #[derive(Debug)] pub struct TraceScorer { diff --git a/rust/timsseek/src/scoring/pipeline.rs b/rust/timsseek/src/scoring/pipeline.rs index 40bd644..9b7748e 100644 --- a/rust/timsseek/src/scoring/pipeline.rs +++ b/rust/timsseek/src/scoring/pipeline.rs @@ -6,11 +6,11 @@ //! throughput, it's critical to minimize allocations in the hot path. //! //! Each scoring operation requires several buffers: -//! - `ApexFinder` holds time-series feature buffers (size varies with query) +//! - `TraceScorer` holds time-series feature buffers (size varies with query) //! - Chromatogram collectors (size varies with query complexity) //! //! `prescore_batch` and `score_calibrated_batch` use Rayon's `map_init()` / `fold` to create -//! one `ApexFinder` buffer per thread, which is reused across thousands of queries. +//! one `TraceScorer` buffer per thread, which is reused across thousands of queries. //! //! ## Scoring Pipeline //! @@ -44,7 +44,7 @@ use timsquery::{ use super::accumulator::IonSearchAccumulator; use super::apex_finding::{ - ApexFinder, + TraceScorer, ApexLocation, ApexScore, PeptideMetadata, @@ -477,7 +477,7 @@ impl Scorer { item: QueryItemToScore, calibration: &CalibrationResult, ) -> Result { - let mut buffer = ApexFinder::new(self.num_cycles()); + let mut buffer = TraceScorer::new(self.num_cycles()); // Re-implementing logic here because process_query consumes `item` and returns `Option`. // We want intermediate results for `ViewerResult`. @@ -593,7 +593,7 @@ impl Scorer { &self, item: &QueryItemToScore, calibration: &CalibrationResult, - buffer: &mut ApexFinder, + buffer: &mut TraceScorer, timings: &mut ScoreTimings, ) -> Option { let st = Instant::now(); @@ -667,7 +667,7 @@ impl Scorer { items_to_score: &[QueryItemToScore], calibration: &CalibrationResult, ) -> (Vec, ScoreTimings) { - let init_fn = || ApexFinder::new(self.num_cycles()); + let init_fn = || TraceScorer::new(self.num_cycles()); let filter_fn = |x: &&QueryItemToScore| { let tmp = x.query.get_precursor_mz_limits(); let lims = TupleRange::try_new(tmp.0, tmp.1).expect("Should already be ordered"); @@ -715,7 +715,7 @@ impl Scorer { pub fn prescore( &self, item: &QueryItemToScore, - buffer: &mut ApexFinder, + buffer: &mut TraceScorer, ) -> Option<(ApexLocation, PeptideMetadata)> { let (metadata, scoring_ctx) = tracing::span!(tracing::Level::TRACE, "prescore::extraction") .in_scope(|| match self.build_broad_extraction(item) { @@ -761,7 +761,7 @@ impl Scorer { #[cfg(not(feature = "serial_scoring"))] let heap: CalibrantHeap = { let init_fn = - || (ApexFinder::new(self.num_cycles()), CalibrantHeap::new(config.n_calibrants)); + || (TraceScorer::new(self.num_cycles()), CalibrantHeap::new(config.n_calibrants)); items_to_score .par_iter() @@ -787,7 +787,7 @@ impl Scorer { #[cfg(feature = "serial_scoring")] let heap: CalibrantHeap = { - let mut scorer = ApexFinder::new(self.num_cycles()); + let mut scorer = TraceScorer::new(self.num_cycles()); let mut heap = CalibrantHeap::new(config.n_calibrants); for (chunk_idx, item) in items_to_score.iter().enumerate().filter(|(_, x)| filter_fn(x)) { From c3429621c887589d2aa5aa5f00556c1361210e39 Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Fri, 10 Apr 2026 10:40:14 -0700 Subject: [PATCH 28/64] feat(viewer): add ViewerCalibrationState with background thread Introduce the calibration state machine and background scoring thread infrastructure for the viewer's live RT calibration panel: - Create calibration.rs with ViewerCalibrationState (Idle/Running/Paused/Done), background thread using AtomicU8 control + thread::park for pause, bounded sync_channel for CalibrationMessage snapshots, and CalibrantHeap accumulation with periodic snapshot sends. - Add Pane::Calibration variant to the dock layout. - Wrap ElutionGroupData in Arc<> for sharing with the background thread. - Add calibration.poll() to the update loop with request_repaint. - Make build_extraction() pub (was pub(crate)) so the viewer can call it. - Add calibrt dependency to the viewer. --- Cargo.lock | 1 + rust/timsquery_viewer/Cargo.toml | 1 + rust/timsquery_viewer/src/app.rs | 31 +- rust/timsquery_viewer/src/calibration.rs | 455 +++++++++++++++++++++++ rust/timsquery_viewer/src/main.rs | 1 + rust/timsseek/src/scoring/extraction.rs | 2 +- 6 files changed, 488 insertions(+), 3 deletions(-) create mode 100644 rust/timsquery_viewer/src/calibration.rs diff --git a/Cargo.lock b/Cargo.lock index c6613c1..032190a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6062,6 +6062,7 @@ dependencies = [ name = "timsquery_viewer" version = "0.26.0" dependencies = [ + "calibrt", "clap", "eframe", "egui", diff --git a/rust/timsquery_viewer/Cargo.toml b/rust/timsquery_viewer/Cargo.toml index a519025..405edd6 100644 --- a/rust/timsquery_viewer/Cargo.toml +++ b/rust/timsquery_viewer/Cargo.toml @@ -13,6 +13,7 @@ path = "src/main.rs" timsquery = { path = "../timsquery" } timscentroid = { path = "../timscentroid" } timsseek = { path = "../timsseek" } +calibrt = { path = "../calibrt" } # GUI dependencies egui = {version = "0.33", features = ["persistence", "rayon"]} diff --git a/rust/timsquery_viewer/src/app.rs b/rust/timsquery_viewer/src/app.rs index d7b1026..10bc015 100644 --- a/rust/timsquery_viewer/src/app.rs +++ b/rust/timsquery_viewer/src/app.rs @@ -12,6 +12,7 @@ use std::time::Instant; use timsquery::models::tolerance::Tolerance; use timsquery::serde::IndexedPeaksHandle; +use crate::calibration::ViewerCalibrationState; use crate::chromatogram_processor::SmoothingMethod; use crate::cli::Cli; use crate::computed_state::{ @@ -96,6 +97,7 @@ enum Pane { FragmentPlot, ScoresPlot, Mobility, + Calibration, } /// All pane variants that should exist in the dock. @@ -108,6 +110,7 @@ const ALL_PANES: &[Pane] = &[ Pane::FragmentPlot, Pane::ScoresPlot, Pane::Mobility, + Pane::Calibration, ]; /// State to be persisted across restarts @@ -157,7 +160,7 @@ pub enum ElutionGroupState { Failed(PathBuf, String), /// Library successfully loaded Loaded { - data: ElutionGroupData, + data: Arc, source: PathBuf, }, } @@ -170,6 +173,14 @@ impl ElutionGroupState { } } + /// Get a cloned Arc handle to the loaded data (for sharing with background threads). + pub fn arc_clone(&self) -> Option> { + match self { + ElutionGroupState::Loaded { data, .. } => Some(Arc::clone(data)), + _ => None, + } + } + pub fn is_none(&self) -> bool { matches!(self, ElutionGroupState::None) } @@ -264,6 +275,9 @@ pub struct ViewerApp { screenshot_state: ScreenshotState, /// Countdown duration in seconds before capture screenshot_delay_secs: f32, + + /// Calibration state machine (background scoring + calibrt) + calibration: ViewerCalibrationState, } fn apply_theme(ctx: &egui::Context, dark_mode: bool) { @@ -336,6 +350,7 @@ impl ViewerApp { cancellation_token: None, screenshot_state: ScreenshotState::default(), screenshot_delay_secs: 3.0, + calibration: ViewerCalibrationState::default(), }; } } else { @@ -352,6 +367,7 @@ impl ViewerApp { Pane::MS2Spectrum, Pane::ScoresPlot, Pane::Mobility, + Pane::Calibration, ]; let dock_state = DockState::new(tabs); @@ -371,6 +387,7 @@ impl ViewerApp { cancellation_token: None, screenshot_state: ScreenshotState::default(), screenshot_delay_secs: 3.0, + calibration: ViewerCalibrationState::default(), } } @@ -954,7 +971,7 @@ impl ViewerApp { let count = egs.len(); tracing::info!("Loaded {} elution groups", count); data.elution_groups = ElutionGroupState::Loaded { - data: egs, + data: Arc::new(egs), source: path, }; // Validate selected_index is within bounds of new library @@ -1361,6 +1378,11 @@ impl eframe::App for ViewerApp { // Check if background computation completed self.check_chromatogram_completion(); + // Poll calibration background thread + if self.calibration.poll() { + ctx.request_repaint(); + } + // Generate MS2 spectrum and mobility data if RT was clicked self.handle_rt_click(); @@ -1489,6 +1511,7 @@ impl<'a> TabViewer for AppTabViewer<'a> { Pane::FragmentPlot => "Fragments".into(), Pane::ScoresPlot => "Scores".into(), Pane::Mobility => self.mobility_panel.title().into(), + Pane::Calibration => "Calibration".into(), } } @@ -1642,6 +1665,10 @@ impl<'a> TabViewer for AppTabViewer<'a> { self.mobility_panel .render(ui, self.computed.mobility_data()); } + Pane::Calibration => { + // Placeholder: Task 11 will render the full calibration panel UI. + ui.label("Calibration panel (UI wired in Task 11)"); + } } } diff --git a/rust/timsquery_viewer/src/calibration.rs b/rust/timsquery_viewer/src/calibration.rs new file mode 100644 index 0000000..a95e783 --- /dev/null +++ b/rust/timsquery_viewer/src/calibration.rs @@ -0,0 +1,455 @@ +//! Viewer calibration state machine and background scoring thread. +//! +//! Drives the live RT calibration panel: shuffles elution groups, +//! scores them on a background thread, feeds calibrant candidates to +//! `calibrt::CalibrationState`, and exposes snapshots to the UI via +//! an MPSC channel. + +use std::sync::atomic::{AtomicU8, Ordering}; +use std::sync::mpsc::{self, Receiver, SyncSender}; +use std::sync::Arc; +use std::thread::JoinHandle; + +use calibrt::CalibrationState; +use timscentroid::rt_mapping::{MS1CycleIndex, RTIndex}; +use timsquery::models::tolerance::{ + MobilityTolerance, MzTolerance, QuadTolerance, RtTolerance, Tolerance, +}; +use timsquery::serde::IndexedPeaksHandle; +use timsseek::scoring::apex_finding::TraceScorer; +use timsseek::scoring::extraction::build_extraction; +use timsseek::scoring::pipeline::{CalibrantCandidate, CalibrantHeap}; + +use crate::file_loader::ElutionGroupData; + +// --------------------------------------------------------------------------- +// Thread-control constants (stored in Arc) +// --------------------------------------------------------------------------- + +const CONTROL_RUNNING: u8 = 0; +const CONTROL_PAUSED: u8 = 1; +const CONTROL_STOP_REQUESTED: u8 = 2; + +// --------------------------------------------------------------------------- +// Configuration +// --------------------------------------------------------------------------- + +/// Number of top fragments to keep per elution group during calibration scoring. +const CALIBRATION_TOP_N_FRAGMENTS: usize = 8; + +/// How many scored elution groups between channel snapshots. +const SNAPSHOT_INTERVAL: usize = 100; + +/// Default CalibrantHeap capacity. +const DEFAULT_HEAP_CAPACITY: usize = 2000; + +/// Default calibrt grid size. +const DEFAULT_GRID_SIZE: usize = 100; + +/// Default DP lookback for calibrt pathfinding. +const DEFAULT_LOOKBACK: usize = 30; + +// --------------------------------------------------------------------------- +// Public types +// --------------------------------------------------------------------------- + +/// Phase of the calibration state machine. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum CalibrationPhase { + Idle, + Running, + Paused, + Done, +} + +/// Derived tolerance windows (placeholder for downstream use). +#[derive(Debug, Clone)] +pub struct DerivedTolerances { + pub rt_tolerance_minutes: f32, +} + +/// Messages sent from the background thread to the UI. +#[derive(Debug)] +pub enum CalibrationMessage { + /// Periodic progress snapshot. + Snapshot { + n_scored: usize, + heap_len: usize, + /// (library_rt_seconds, apex_rt_seconds, score) + points: Vec<(f64, f64, f64)>, + }, + /// Thread completed (all elution groups scored or stopped). + Done { n_scored: usize }, +} + +/// Broad tolerance used for calibration extraction queries. +/// Wider than a typical search to ensure calibrants are found even +/// before accurate RT calibration is available. +fn broad_calibration_tolerance() -> Tolerance { + Tolerance { + ms: MzTolerance::Ppm((15.0, 15.0)), + rt: RtTolerance::Unrestricted, + mobility: MobilityTolerance::Pct((5.0, 5.0)), + quad: QuadTolerance::Absolute((0.1, 0.1)), + } +} + +// --------------------------------------------------------------------------- +// ViewerCalibrationState +// --------------------------------------------------------------------------- + +/// Owns the calibration background thread and collects results. +pub struct ViewerCalibrationState { + pub phase: CalibrationPhase, + pub calibration_state: Option, + pub generation: u64, + pub n_scored: usize, + pub n_calibrants: usize, + pub heap_capacity: usize, + pub elution_group_count: usize, + pub derived_tolerances: Option, + + thread_handle: Option>, + thread_control: Arc, + receiver: Option>, + + /// Latest calibrant points: (library_rt, apex_rt, score). + pub snapshot_points: Vec<(f64, f64, f64)>, +} + +impl Default for ViewerCalibrationState { + fn default() -> Self { + Self { + phase: CalibrationPhase::Idle, + calibration_state: None, + generation: 0, + n_scored: 0, + n_calibrants: 0, + heap_capacity: DEFAULT_HEAP_CAPACITY, + elution_group_count: 0, + derived_tolerances: None, + thread_handle: None, + thread_control: Arc::new(AtomicU8::new(CONTROL_STOP_REQUESTED)), + receiver: None, + snapshot_points: Vec::new(), + } + } +} + +impl ViewerCalibrationState { + /// Start the calibration background thread. + /// + /// Requires both raw data and elution groups to be loaded. + /// If already running, this is a no-op. + pub fn start( + &mut self, + index: Arc, + elution_groups: Arc, + ) { + if self.phase == CalibrationPhase::Running { + return; + } + + // Increment generation to invalidate stale data. + self.generation += 1; + self.n_scored = 0; + self.n_calibrants = 0; + self.snapshot_points.clear(); + self.elution_group_count = elution_groups.len(); + + // Build calibration state with RT range from the data. + let cycle_mapping = index.ms1_cycle_mapping(); + let (rt_min_ms, rt_max_ms) = cycle_mapping.range_milis(); + let rt_min_sec = rt_min_ms as f64 / 1000.0; + let rt_max_sec = rt_max_ms as f64 / 1000.0; + + self.calibration_state = CalibrationState::new( + DEFAULT_GRID_SIZE, + (rt_min_sec, rt_max_sec), + (rt_min_sec, rt_max_sec), + DEFAULT_LOOKBACK, + ) + .ok(); + + // Set up channel and control flag. + let (tx, rx) = mpsc::sync_channel::(1); + self.receiver = Some(rx); + + let control = Arc::new(AtomicU8::new(CONTROL_RUNNING)); + self.thread_control = control.clone(); + + let heap_capacity = self.heap_capacity; + + let handle = std::thread::Builder::new() + .name("calibration-bg".into()) + .spawn(move || { + Self::background_loop(index, elution_groups, tx, control, heap_capacity); + }) + .expect("Failed to spawn calibration thread"); + + self.thread_handle = Some(handle); + self.phase = CalibrationPhase::Running; + } + + /// Pause the background thread (it will park until resumed). + pub fn pause(&mut self) { + if self.phase == CalibrationPhase::Running { + self.thread_control + .store(CONTROL_PAUSED, Ordering::Release); + self.phase = CalibrationPhase::Paused; + } + } + + /// Resume a paused background thread. + pub fn resume(&mut self) { + if self.phase == CalibrationPhase::Paused { + self.thread_control + .store(CONTROL_RUNNING, Ordering::Release); + if let Some(handle) = &self.thread_handle { + handle.thread().unpark(); + } + self.phase = CalibrationPhase::Running; + } + } + + /// Request the background thread to stop. + pub fn stop(&mut self) { + self.thread_control + .store(CONTROL_STOP_REQUESTED, Ordering::Release); + // Unpark in case the thread is parked. + if let Some(handle) = &self.thread_handle { + handle.thread().unpark(); + } + } + + /// Stop and reset all state. Returns to Idle. + pub fn reset(&mut self) { + self.stop(); + // Wait for the thread to finish (non-blocking check; if it takes + // too long the Drop impl will also try). + if let Some(handle) = self.thread_handle.take() { + let _ = handle.join(); + } + self.receiver = None; + self.phase = CalibrationPhase::Idle; + self.n_scored = 0; + self.n_calibrants = 0; + self.snapshot_points.clear(); + self.generation += 1; + if let Some(cs) = &mut self.calibration_state { + cs.reset(); + } + } + + /// Drain the channel and update internal state. + /// + /// Returns `true` if any new data was received (caller should + /// `request_repaint`). + pub fn poll(&mut self) -> bool { + let Some(rx) = &self.receiver else { + return false; + }; + + let mut changed = false; + + loop { + match rx.try_recv() { + Ok(CalibrationMessage::Snapshot { + n_scored, + heap_len, + points, + }) => { + self.n_scored = n_scored; + self.n_calibrants = heap_len; + self.snapshot_points = points; + + // Feed points into CalibrationState for curve fitting. + if let Some(cs) = &mut self.calibration_state { + cs.update( + self.snapshot_points + .iter() + .map(|&(lib_rt, apex_rt, score)| (lib_rt, apex_rt, score)), + ); + cs.fit(); + } + changed = true; + } + Ok(CalibrationMessage::Done { n_scored }) => { + self.n_scored = n_scored; + self.phase = CalibrationPhase::Done; + // Clean up thread handle. + if let Some(handle) = self.thread_handle.take() { + let _ = handle.join(); + } + self.receiver = None; + changed = true; + break; + } + Err(mpsc::TryRecvError::Empty) => break, + Err(mpsc::TryRecvError::Disconnected) => { + // Thread exited unexpectedly. + self.phase = CalibrationPhase::Done; + self.thread_handle = None; + self.receiver = None; + changed = true; + break; + } + } + } + + changed + } + + // ----------------------------------------------------------------------- + // Background thread + // ----------------------------------------------------------------------- + + fn background_loop( + index: Arc, + elution_groups: Arc, + tx: SyncSender, + control: Arc, + heap_capacity: usize, + ) { + let n_elution_groups = elution_groups.len(); + if n_elution_groups == 0 { + let _ = tx.send(CalibrationMessage::Done { n_scored: 0 }); + return; + } + + // Deterministic shuffle via simple LCG (no rand dependency). + let mut indices: Vec = (0..n_elution_groups).collect(); + simple_shuffle(&mut indices); + + let tolerance = broad_calibration_tolerance(); + let cycle_mapping = index.ms1_cycle_mapping(); + + // Thread-local scorer. + let n_cycles = cycle_mapping.len(); + let mut scorer = TraceScorer::new(n_cycles); + let mut heap = CalibrantHeap::new(heap_capacity); + + let mut n_scored: usize = 0; + let mut last_snapshot_heap_len: usize = 0; + + for &eg_idx in &indices { + // Check control flag. + loop { + let flag = control.load(Ordering::Acquire); + match flag { + CONTROL_RUNNING => break, + CONTROL_PAUSED => { + std::thread::park(); + // Re-check after unpark. + continue; + } + _ => { + // STOP_REQUESTED or unknown. + let _ = tx.send(CalibrationMessage::Done { n_scored }); + return; + } + } + } + + // Get elution group data. Skip on error. + let Ok((elution_group, expected_intensities)) = elution_groups.get_elem(eg_idx) else { + continue; + }; + + // Build extraction. + let extraction = match build_extraction( + &elution_group, + expected_intensities, + index.as_ref(), + &tolerance, + Some(CALIBRATION_TOP_N_FRAGMENTS), + ) { + Ok(ext) => ext, + Err(_) => continue, + }; + + // Compute traces. + if scorer.compute_traces(&extraction).is_err() { + continue; + } + + // Build RT mapper closure. + let cycle_offset = extraction.chromatograms.cycle_offset(); + let rt_mapper = |idx: usize| -> u32 { + cycle_mapping + .rt_milis_for_index(&MS1CycleIndex::new((idx + cycle_offset) as u32)) + .unwrap_or(0) + }; + + // Suggest apex. + let apex = match scorer.suggest_apex(&rt_mapper, 0) { + Ok(a) => a, + Err(_) => continue, + }; + + let candidate = CalibrantCandidate { + score: apex.score, + apex_rt_seconds: apex.retention_time_ms as f32 / 1000.0, + speclib_index: eg_idx, + library_rt_seconds: elution_group.rt_seconds(), + }; + heap.push(candidate); + n_scored += 1; + + // Periodic snapshot. + if n_scored % SNAPSHOT_INTERVAL == 0 && heap.len() != last_snapshot_heap_len { + last_snapshot_heap_len = heap.len(); + let points: Vec<(f64, f64, f64)> = heap + .iter() + .map(|c| { + ( + c.library_rt_seconds as f64, + c.apex_rt_seconds as f64, + c.score as f64, + ) + }) + .collect(); + + let msg = CalibrationMessage::Snapshot { + n_scored, + heap_len: heap.len(), + points, + }; + // Use try_send: if the channel is full, skip this snapshot. + let _ = tx.try_send(msg); + } + } + + // Final snapshot with Done marker. + let _ = tx.send(CalibrationMessage::Done { n_scored }); + } +} + +impl Drop for ViewerCalibrationState { + fn drop(&mut self) { + self.stop(); + if let Some(handle) = self.thread_handle.take() { + let _ = handle.join(); + } + } +} + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +/// Simple deterministic shuffle using a linear congruential generator. +/// Avoids pulling in the `rand` crate just for this. +fn simple_shuffle(indices: &mut [usize]) { + let len = indices.len(); + if len <= 1 { + return; + } + // LCG parameters (Numerical Recipes). + let mut state: u64 = 0xDEAD_BEEF_CAFE_BABE; + for i in (1..len).rev() { + state = state.wrapping_mul(6_364_136_223_846_793_005).wrapping_add(1); + let j = (state >> 33) as usize % (i + 1); + indices.swap(i, j); + } +} diff --git a/rust/timsquery_viewer/src/main.rs b/rust/timsquery_viewer/src/main.rs index aff59f5..4475a26 100644 --- a/rust/timsquery_viewer/src/main.rs +++ b/rust/timsquery_viewer/src/main.rs @@ -1,4 +1,5 @@ mod app; +mod calibration; mod chromatogram_processor; mod cli; mod computed_state; diff --git a/rust/timsseek/src/scoring/extraction.rs b/rust/timsseek/src/scoring/extraction.rs index cfb45e3..4647c43 100644 --- a/rust/timsseek/src/scoring/extraction.rs +++ b/rust/timsseek/src/scoring/extraction.rs @@ -22,7 +22,7 @@ use super::pipeline::SkippingReason; /// top_n_fragments: /// Some(n) -> filter_zero_intensity_ions + select_top_n_fragments(n) /// None -> no filtering, all ions kept -pub(crate) fn build_extraction( +pub fn build_extraction( elution_group: &timsquery::TimsElutionGroup, mut expected_intensities: ExpectedIntensities, index: &I, From 74a5d7bcbe9282357faea98b19e02f108eaef31e Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Fri, 10 Apr 2026 10:46:04 -0700 Subject: [PATCH 29/64] feat(viewer): wire calibration panel UI with grid+curve plot Add render_panel() to ViewerCalibrationState with: - Context-sensitive control buttons (Start/Pause/Resume/Stop/Reset) - Progress counters (scored / total, calibrants / capacity) - egui_plot scatter showing suppressed, retained, and path grid cells - Fitted calibration curve as a cyan line sampled at 200 points - WRMSE display and RT tolerance suggestion with Apply button Also adds CalibrationCurve::points() public accessor to calibrt. --- rust/calibrt/src/lib.rs | 5 + rust/timsquery_viewer/src/app.rs | 10 +- rust/timsquery_viewer/src/calibration.rs | 282 +++++++++++++++++++++++ 3 files changed, 295 insertions(+), 2 deletions(-) diff --git a/rust/calibrt/src/lib.rs b/rust/calibrt/src/lib.rs index 4e66a50..f73e68f 100644 --- a/rust/calibrt/src/lib.rs +++ b/rust/calibrt/src/lib.rs @@ -73,6 +73,11 @@ impl CalibrationCurve { Ok(Self { points, slopes }) } + /// Read access to the sorted calibration points. + pub fn points(&self) -> &[Point] { + &self.points + } + pub fn wrmse<'a>(&self, test_points: impl Iterator + 'a) -> f64 { let mut total_error = 0.0; let mut weight: f64 = 0.0; diff --git a/rust/timsquery_viewer/src/app.rs b/rust/timsquery_viewer/src/app.rs index 10bc015..93fe178 100644 --- a/rust/timsquery_viewer/src/app.rs +++ b/rust/timsquery_viewer/src/app.rs @@ -1400,6 +1400,7 @@ impl eframe::App for ViewerApp { mobility_panel: &mut self.mobility_panel, screenshot_delay_secs: &mut self.screenshot_delay_secs, screenshot_state: &mut self.screenshot_state, + calibration: &mut self.calibration, }; egui::CentralPanel::default().show(ctx, |ui| { @@ -1424,6 +1425,7 @@ struct AppTabViewer<'a> { mobility_panel: &'a mut MobilityPanel, screenshot_delay_secs: &'a mut f32, screenshot_state: &'a mut ScreenshotState, + calibration: &'a mut ViewerCalibrationState, } impl<'a> AppTabViewer<'a> { @@ -1666,8 +1668,12 @@ impl<'a> TabViewer for AppTabViewer<'a> { .render(ui, self.computed.mobility_data()); } Pane::Calibration => { - // Placeholder: Task 11 will render the full calibration panel UI. - ui.label("Calibration panel (UI wired in Task 11)"); + self.calibration.render_panel( + ui, + &self.data.indexed_data, + &self.data.elution_groups, + &mut self.data.tolerance, + ); } } } diff --git a/rust/timsquery_viewer/src/calibration.rs b/rust/timsquery_viewer/src/calibration.rs index a95e783..a503a3f 100644 --- a/rust/timsquery_viewer/src/calibration.rs +++ b/rust/timsquery_viewer/src/calibration.rs @@ -10,6 +10,8 @@ use std::sync::mpsc::{self, Receiver, SyncSender}; use std::sync::Arc; use std::thread::JoinHandle; +use eframe::egui; + use calibrt::CalibrationState; use timscentroid::rt_mapping::{MS1CycleIndex, RTIndex}; use timsquery::models::tolerance::{ @@ -423,6 +425,286 @@ impl ViewerCalibrationState { // Final snapshot with Done marker. let _ = tx.send(CalibrationMessage::Done { n_scored }); } + + // ----------------------------------------------------------------------- + // UI rendering + // ----------------------------------------------------------------------- + + /// Render the calibration panel inside an egui `Ui`. + /// + /// `indexed_data` and `elution_groups` are needed to enable the Start + /// button (we need both loaded). `tolerance` is written when the user + /// clicks [Apply]. + pub fn render_panel( + &mut self, + ui: &mut egui::Ui, + indexed_data: &crate::app::IndexedDataState, + elution_groups: &crate::app::ElutionGroupState, + tolerance: &mut Tolerance, + ) { + // -- Control buttons -------------------------------------------------- + ui.horizontal(|ui| { + match self.phase { + CalibrationPhase::Idle => { + let both_loaded = matches!( + indexed_data, + crate::app::IndexedDataState::Loaded { .. } + ) && matches!( + elution_groups, + crate::app::ElutionGroupState::Loaded { .. } + ); + if ui + .add_enabled(both_loaded, egui::Button::new("\u{25B6} Start")) + .clicked() + { + // Extract Arc handles from the loaded states. + if let ( + crate::app::IndexedDataState::Loaded { index, .. }, + crate::app::ElutionGroupState::Loaded { data, .. }, + ) = (indexed_data, elution_groups) + { + self.start(Arc::clone(index), Arc::clone(data)); + } + } + } + CalibrationPhase::Running => { + if ui.button("\u{23F8} Pause").clicked() { + self.pause(); + } + if ui.button("\u{23F9} Stop").clicked() { + self.stop(); + } + } + CalibrationPhase::Paused => { + if ui.button("\u{25B6} Resume").clicked() { + self.resume(); + } + if ui.button("\u{23F9} Stop").clicked() { + self.stop(); + } + } + CalibrationPhase::Done => { + if ui.button("\u{21BA} Reset").clicked() { + self.reset(); + } + } + } + }); + + ui.add_space(4.0); + + // -- Progress counters ------------------------------------------------ + ui.horizontal(|ui| { + let total = if self.elution_group_count > 0 { + self.elution_group_count + } else { + // Fallback: show "?" until we know the total + 0 + }; + ui.label(format!( + "Scored: {} / {}", + self.n_scored, total + )); + ui.separator(); + ui.label(format!( + "Calibrants: {} / {}", + self.n_calibrants, self.heap_capacity + )); + }); + + ui.add_space(4.0); + ui.separator(); + ui.add_space(4.0); + + // -- Grid + curve plot ------------------------------------------------ + self.render_calibration_plot(ui); + + ui.add_space(4.0); + ui.separator(); + ui.add_space(4.0); + + // -- Tolerance suggestion --------------------------------------------- + self.render_tolerance_suggestion(ui, tolerance); + } + + /// Render the scatter + curve calibration plot. + fn render_calibration_plot(&self, ui: &mut egui::Ui) { + use egui_plot::{Line, Plot, PlotPoints, Points}; + + let plot_id = format!("calibration_plot_{}", self.generation); + let plot = Plot::new(plot_id) + .height(ui.available_height().min(400.0)) + .x_axis_label("Library RT (s)") + .y_axis_label("Measured RT (s)") + .allow_zoom(true) + .allow_drag(true); + + let cal_state = self.calibration_state.as_ref(); + + plot.show(ui, |plot_ui| { + // Grid cells from CalibrationState (if available). + if let Some(cs) = cal_state { + let cells = cs.grid_cells(); + let path_indices = cs.path_indices(); + + // Suppressed cells with any weight: small gray dots + let suppressed_pts: Vec<[f64; 2]> = cells + .iter() + .filter(|n| n.suppressed && n.center.weight > 0.0) + .map(|n| [n.center.x, n.center.y]) + .collect(); + + if !suppressed_pts.is_empty() { + plot_ui.points( + Points::new( + "suppressed", + PlotPoints::new(suppressed_pts), + ) + .color(egui::Color32::from_rgb(140, 140, 140)) + .radius(2.0), + ); + } + + // Retained (non-suppressed) cells with weight: larger blue dots + let retained_pts: Vec<[f64; 2]> = cells + .iter() + .filter(|n| !n.suppressed && n.center.weight > 0.0) + .map(|n| [n.center.x, n.center.y]) + .collect(); + + if !retained_pts.is_empty() { + plot_ui.points( + Points::new( + "retained", + PlotPoints::new(retained_pts), + ) + .color(egui::Color32::from_rgb(70, 130, 230)) + .radius(4.0), + ); + } + + // Path nodes: green dots + let path_pts: Vec<[f64; 2]> = path_indices + .iter() + .filter_map(|&idx| cells.get(idx)) + .map(|n| [n.center.x, n.center.y]) + .collect(); + + if !path_pts.is_empty() { + plot_ui.points( + Points::new( + "path", + PlotPoints::new(path_pts), + ) + .color(egui::Color32::from_rgb(50, 205, 50)) + .radius(5.0), + ); + } + + // Fitted curve: cyan line sampled at 200 points + if let Some(curve) = cs.curve() { + let curve_points = curve.points(); + if curve_points.len() >= 2 { + let x_min = curve_points.first().unwrap().x; + let x_max = curve_points.last().unwrap().x; + let n_samples = 200; + let step = (x_max - x_min) / n_samples as f64; + + let line_pts: Vec<[f64; 2]> = (0..=n_samples) + .filter_map(|i| { + let x = x_min + i as f64 * step; + // predict returns Err for out-of-bounds but we stay in range + let y = match curve.predict(x) { + Ok(y) => y, + Err(calibrt::CalibRtError::OutOfBounds(y)) => y, + Err(_) => return None, + }; + Some([x, y]) + }) + .collect(); + + if !line_pts.is_empty() { + plot_ui.line( + Line::new( + "fitted curve", + PlotPoints::new(line_pts), + ) + .color(egui::Color32::from_rgb(0, 220, 220)) + .width(2.0), + ); + } + } + } + } else if !self.snapshot_points.is_empty() { + // Fallback: show raw calibrant points if CalibrationState + // hasn't been built yet (shouldn't normally happen, but + // keeps the plot populated). + let raw_pts: Vec<[f64; 2]> = self + .snapshot_points + .iter() + .map(|&(lib_rt, apex_rt, _)| [lib_rt, apex_rt]) + .collect(); + + plot_ui.points( + Points::new( + "calibrants", + PlotPoints::new(raw_pts), + ) + .color(egui::Color32::from_rgb(70, 130, 230)) + .radius(3.0), + ); + } + }); + } + + /// Render tolerance suggestion and Apply button. + fn render_tolerance_suggestion(&mut self, ui: &mut egui::Ui, tolerance: &mut Tolerance) { + // Compute WRMSE if we have a curve and snapshot points. + let wrmse = self + .calibration_state + .as_ref() + .and_then(|cs| { + let curve = cs.curve()?; + let points: Vec = self + .snapshot_points + .iter() + .map(|&(lib_rt, apex_rt, score)| calibrt::Point { + x: lib_rt, + y: apex_rt, + weight: score, + }) + .collect(); + let val = curve.wrmse(points.iter()); + if val.is_finite() { Some(val) } else { None } + }); + + // Derive a suggested RT tolerance: 3x WRMSE in minutes. + let suggested_rt_min = wrmse.map(|w| (w / 60.0) * 3.0); + + if let Some(derived) = &suggested_rt_min { + self.derived_tolerances = Some(DerivedTolerances { + rt_tolerance_minutes: *derived as f32, + }); + } + + ui.horizontal(|ui| { + if let (Some(rt_min), Some(w)) = (suggested_rt_min, wrmse) { + ui.label(format!( + "Suggested RT: \u{00B1}{:.2} min WRMSE: {:.2} s", + rt_min, w + )); + if ui.button("Apply").clicked() { + let rt_tol = rt_min as f32; + tolerance.rt = RtTolerance::Minutes((rt_tol, rt_tol)); + } + } else if self.phase == CalibrationPhase::Idle { + ui.label("Start calibration to compute RT tolerance suggestion."); + } else { + ui.label("Collecting data..."); + ui.spinner(); + } + }); + } } impl Drop for ViewerCalibrationState { From b000f40b1b11a0306ca3b364d4b0581d95b2d7bf Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Fri, 10 Apr 2026 10:54:57 -0700 Subject: [PATCH 30/64] feat(viewer): add save/load calibration as JSON v1 --- rust/timsquery_viewer/src/calibration.rs | 199 ++++++++++++++++++++++- 1 file changed, 197 insertions(+), 2 deletions(-) diff --git a/rust/timsquery_viewer/src/calibration.rs b/rust/timsquery_viewer/src/calibration.rs index a503a3f..73966dc 100644 --- a/rust/timsquery_viewer/src/calibration.rs +++ b/rust/timsquery_viewer/src/calibration.rs @@ -11,6 +11,7 @@ use std::sync::Arc; use std::thread::JoinHandle; use eframe::egui; +use serde::{Deserialize, Serialize}; use calibrt::CalibrationState; use timscentroid::rt_mapping::{MS1CycleIndex, RTIndex}; @@ -68,6 +69,28 @@ pub enum CalibrationPhase { #[derive(Debug, Clone)] pub struct DerivedTolerances { pub rt_tolerance_minutes: f32, + pub wrmse: f64, +} + +// --------------------------------------------------------------------------- +// Serde types for save/load +// --------------------------------------------------------------------------- + +#[derive(Serialize, Deserialize)] +struct SavedCalibration { + version: String, + rt_range_seconds: [f64; 2], + calibrant_points: Vec<[f64; 3]>, // [lib_rt, measured_rt, weight] + tolerances: SavedTolerances, + wrmse: f64, + n_calibrants: usize, + n_scored: usize, + grid_size: usize, +} + +#[derive(Serialize, Deserialize)] +struct SavedTolerances { + rt_minutes: f32, } /// Messages sent from the background thread to the UI. @@ -426,6 +449,108 @@ impl ViewerCalibrationState { let _ = tx.send(CalibrationMessage::Done { n_scored }); } + // ----------------------------------------------------------------------- + // Save / Load + // ----------------------------------------------------------------------- + + /// Serialize the current calibration state to a JSON v1 file. + pub fn save_to_file( + &self, + path: &std::path::Path, + rt_range_seconds: [f64; 2], + ) -> Result<(), String> { + let tol = self.derived_tolerances.as_ref(); + let saved = SavedCalibration { + version: "v1".to_string(), + rt_range_seconds, + calibrant_points: self + .snapshot_points + .iter() + .map(|(x, y, w)| [*x, *y, *w]) + .collect(), + tolerances: SavedTolerances { + rt_minutes: tol.map_or(0.0, |t| t.rt_tolerance_minutes), + }, + wrmse: tol.map_or(0.0, |t| t.wrmse), + n_calibrants: self.n_calibrants, + n_scored: self.n_scored, + grid_size: DEFAULT_GRID_SIZE, + }; + let json = serde_json::to_string_pretty(&saved).map_err(|e| e.to_string())?; + std::fs::write(path, json).map_err(|e| e.to_string()) + } + + /// Deserialize calibration state from a JSON v1 file. + /// + /// Returns an optional warning string (e.g. RT range mismatch). + pub fn load_from_file( + &mut self, + path: &std::path::Path, + raw_rt_range: Option<[f64; 2]>, + ) -> Result, String> { + let json = std::fs::read_to_string(path).map_err(|e| e.to_string())?; + let saved: SavedCalibration = + serde_json::from_str(&json).map_err(|e| e.to_string())?; + + if saved.version != "v1" { + return Err(format!("Unsupported version: {}", saved.version)); + } + + // RT range compatibility check. + let mut warning = None; + if let Some(raw_range) = raw_rt_range { + let overlap_lo = saved.rt_range_seconds[0].max(raw_range[0]); + let overlap_hi = saved.rt_range_seconds[1].min(raw_range[1]); + let overlap = (overlap_hi - overlap_lo).max(0.0); + let saved_span = saved.rt_range_seconds[1] - saved.rt_range_seconds[0]; + if saved_span > 0.0 && overlap / saved_span < 0.5 { + warning = Some( + "RT range mismatch — calibration may not be valid for this file".to_string(), + ); + } + } else { + warning = Some( + "No raw file loaded — cannot verify RT range compatibility".to_string(), + ); + } + + // Rebuild calibration state from saved points. + let points: Vec<(f64, f64, f64)> = saved + .calibrant_points + .iter() + .map(|p| (p[0], p[1], p[2])) + .collect(); + + if !points.is_empty() { + let x_min = points.iter().map(|p| p.0).fold(f64::MAX, f64::min); + let x_max = points.iter().map(|p| p.0).fold(f64::MIN, f64::max); + let y_min = points.iter().map(|p| p.1).fold(f64::MAX, f64::min); + let y_max = points.iter().map(|p| p.1).fold(f64::MIN, f64::max); + + if let Ok(mut cal) = calibrt::CalibrationState::new( + saved.grid_size, + (x_min, x_max), + (y_min, y_max), + DEFAULT_LOOKBACK, + ) { + cal.update(points.iter().copied()); + cal.fit(); + self.calibration_state = Some(cal); + } + } + + self.snapshot_points = points; + self.n_calibrants = saved.n_calibrants; + self.n_scored = saved.n_scored; + self.derived_tolerances = Some(DerivedTolerances { + rt_tolerance_minutes: saved.tolerances.rt_minutes, + wrmse: saved.wrmse, + }); + self.phase = CalibrationPhase::Done; + + Ok(warning) + } + // ----------------------------------------------------------------------- // UI rendering // ----------------------------------------------------------------------- @@ -466,6 +591,33 @@ impl ViewerCalibrationState { self.start(Arc::clone(index), Arc::clone(data)); } } + if ui.button("Load").clicked() { + if let Some(path) = rfd::FileDialog::new() + .add_filter("JSON", &["json"]) + .pick_file() + { + let raw_rt_range = if let crate::app::IndexedDataState::Loaded { + index, .. + } = indexed_data + { + let cycle_mapping = index.ms1_cycle_mapping(); + let (rt_min_ms, rt_max_ms) = cycle_mapping.range_milis(); + Some([ + rt_min_ms as f64 / 1000.0, + rt_max_ms as f64 / 1000.0, + ]) + } else { + None + }; + match self.load_from_file(&path, raw_rt_range) { + Ok(Some(warning)) => tracing::warn!("{}", warning), + Ok(None) => { + tracing::info!("Calibration loaded from {:?}", path) + } + Err(e) => tracing::error!("Failed to load calibration: {}", e), + } + } + } } CalibrationPhase::Running => { if ui.button("\u{23F8} Pause").clicked() { @@ -482,11 +634,53 @@ impl ViewerCalibrationState { if ui.button("\u{23F9} Stop").clicked() { self.stop(); } + if ui.button("Save").clicked() { + if let Some(path) = rfd::FileDialog::new() + .set_file_name("calibration.json") + .add_filter("JSON", &["json"]) + .save_file() + { + let rt_range = if let crate::app::IndexedDataState::Loaded { + index, .. + } = indexed_data + { + let cycle_mapping = index.ms1_cycle_mapping(); + let (rt_min_ms, rt_max_ms) = cycle_mapping.range_milis(); + [rt_min_ms as f64 / 1000.0, rt_max_ms as f64 / 1000.0] + } else { + [0.0, 0.0] + }; + if let Err(e) = self.save_to_file(&path, rt_range) { + tracing::error!("Failed to save calibration: {}", e); + } + } + } } CalibrationPhase::Done => { if ui.button("\u{21BA} Reset").clicked() { self.reset(); } + if ui.button("Save").clicked() { + if let Some(path) = rfd::FileDialog::new() + .set_file_name("calibration.json") + .add_filter("JSON", &["json"]) + .save_file() + { + let rt_range = if let crate::app::IndexedDataState::Loaded { + index, .. + } = indexed_data + { + let cycle_mapping = index.ms1_cycle_mapping(); + let (rt_min_ms, rt_max_ms) = cycle_mapping.range_milis(); + [rt_min_ms as f64 / 1000.0, rt_max_ms as f64 / 1000.0] + } else { + [0.0, 0.0] + }; + if let Err(e) = self.save_to_file(&path, rt_range) { + tracing::error!("Failed to save calibration: {}", e); + } + } + } } } }); @@ -681,9 +875,10 @@ impl ViewerCalibrationState { // Derive a suggested RT tolerance: 3x WRMSE in minutes. let suggested_rt_min = wrmse.map(|w| (w / 60.0) * 3.0); - if let Some(derived) = &suggested_rt_min { + if let (Some(rt_min), Some(w)) = (suggested_rt_min, wrmse) { self.derived_tolerances = Some(DerivedTolerances { - rt_tolerance_minutes: *derived as f32, + rt_tolerance_minutes: rt_min as f32, + wrmse: w, }); } From 84177a49710f9209753f83d7e32ff5555b371de7 Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Fri, 10 Apr 2026 10:59:03 -0700 Subject: [PATCH 31/64] feat(cli): save calibration.json after Phase 2 (viewer-compatible v1 format) --- rust/timsseek/src/rt_calibration.rs | 5 +++++ rust/timsseek_cli/src/processing.rs | 29 +++++++++++++++++++++++++++++ 2 files changed, 34 insertions(+) diff --git a/rust/timsseek/src/rt_calibration.rs b/rust/timsseek/src/rt_calibration.rs index ce9a0b9..25e362d 100644 --- a/rust/timsseek/src/rt_calibration.rs +++ b/rust/timsseek/src/rt_calibration.rs @@ -47,6 +47,11 @@ impl CalibrationResult { } } + /// Derived RT tolerance in minutes. + pub fn rt_tolerance_minutes(&self) -> f32 { + self.rt_tolerance_minutes + } + /// Get per-query tolerance. Initially uniform; future: position-dependent. pub fn get_tolerance(&self, _mz: f64, _mobility: f32, _rt: f32) -> Tolerance { Tolerance { diff --git a/rust/timsseek_cli/src/processing.rs b/rust/timsseek_cli/src/processing.rs index 911f949..a663fa0 100644 --- a/rust/timsseek_cli/src/processing.rs +++ b/rust/timsseek_cli/src/processing.rs @@ -187,6 +187,12 @@ pub fn execute_pipeline( None }; + // Snapshot calibrant points before calibration consumes them (for saving) + let calibrant_points: Vec<[f64; 3]> = calibrants + .iter() + .map(|c| [c.library_rt_seconds as f64, c.apex_rt_seconds as f64, 1.0]) + .collect(); + info!("Phase 2: Calibration..."); let phase2_start = Instant::now(); let calibration = match calibrate_from_phase1( @@ -211,6 +217,29 @@ pub fn execute_pipeline( phase2_ms as f64 / 1000.0 ); + // Save calibration as JSON v1 (compatible with viewer load) + if !calibrant_points.is_empty() { + let rt_lo = calibrant_points.iter().map(|p| p[1]).fold(f64::MAX, f64::min); + let rt_hi = calibrant_points.iter().map(|p| p[1]).fold(f64::MIN, f64::max); + let cal_save = serde_json::json!({ + "version": "v1", + "rt_range_seconds": [rt_lo, rt_hi], + "calibrant_points": calibrant_points, + "tolerances": { + "rt_minutes": calibration.rt_tolerance_minutes(), + }, + "wrmse": 0.0, + "n_calibrants": calibrant_points.len(), + "n_scored": calibrant_points.len(), + "grid_size": calib_config.grid_size, + }); + let cal_json_path = out_path.directory.join("calibration.json"); + if let Ok(json) = serde_json::to_string_pretty(&cal_save) { + let _ = std::fs::write(&cal_json_path, json); + info!("Saved calibration to {:?}", cal_json_path); + } + } + // === PHASE 3: Narrow scoring with calibrated tolerances === info!("Phase 3: Scoring with calibrated extraction..."); let phase3_start = Instant::now(); From 8544a6b923552859b4ffcbb723f7c1e52737261c Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Fri, 10 Apr 2026 11:04:04 -0700 Subject: [PATCH 32/64] =?UTF-8?q?refactor:=20shared=20calibration=20JSON?= =?UTF-8?q?=20format=20=E2=80=94=20CalibrationSnapshot=20in=20calibrt,=20S?= =?UTF-8?q?avedCalibration=20in=20timsseek?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - calibrt: add CalibrationSnapshot (serde) + CalibrationState::save_snapshot/from_snapshot - timsseek: add SavedCalibration/SavedTolerances/LoadedCalibration types - timsseek: add CalibrationResult::save_json/load_json - CLI: delegates to CalibrationResult::save_json (replaces inline serde_json::json!) - Viewer: delegates to CalibrationResult::load_json + CalibrationState::from_snapshot --- rust/calibrt/src/lib.rs | 32 +++++++ rust/timsquery_viewer/src/calibration.rs | 111 ++++++----------------- rust/timsseek/src/rt_calibration.rs | 94 +++++++++++++++++++ rust/timsseek_cli/src/processing.rs | 28 +++--- 4 files changed, 168 insertions(+), 97 deletions(-) diff --git a/rust/calibrt/src/lib.rs b/rust/calibrt/src/lib.rs index f73e68f..809b354 100644 --- a/rust/calibrt/src/lib.rs +++ b/rust/calibrt/src/lib.rs @@ -150,6 +150,15 @@ impl CalibrationCurve { } } +/// Serializable snapshot of calibration data — points + config. +/// Used for save/load. Does not include the fitted curve (reconstructed on load). +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] +pub struct CalibrationSnapshot { + pub points: Vec<[f64; 3]>, // [x, y, weight] + pub grid_size: usize, + pub lookback: usize, +} + /// Reusable calibration state for incremental fitting. Owns all allocations. pub struct CalibrationState { grid: grid::Grid, @@ -242,6 +251,29 @@ impl CalibrationState { self.curve.as_ref() } + /// Bundle current config into a snapshot (caller provides the points). + pub fn save_snapshot(&self, points: &[(f64, f64, f64)]) -> CalibrationSnapshot { + CalibrationSnapshot { + points: points.iter().map(|&(x, y, w)| [x, y, w]).collect(), + grid_size: self.grid.bins, + lookback: self.lookback, + } + } + + /// Reconstruct a CalibrationState from a snapshot. + pub fn from_snapshot(snapshot: &CalibrationSnapshot) -> Result { + if snapshot.points.is_empty() { + return Err(CalibRtError::NoPoints); + } + let x_range = compute_range(snapshot.points.iter().map(|p| p[0]))?; + let y_range = compute_range(snapshot.points.iter().map(|p| p[1]))?; + + let mut state = Self::new(snapshot.grid_size, x_range, y_range, snapshot.lookback)?; + state.update(snapshot.points.iter().map(|p| (p[0], p[1], p[2]))); + state.fit(); + Ok(state) + } + pub fn is_stale(&self) -> bool { self.stale } diff --git a/rust/timsquery_viewer/src/calibration.rs b/rust/timsquery_viewer/src/calibration.rs index 73966dc..7613142 100644 --- a/rust/timsquery_viewer/src/calibration.rs +++ b/rust/timsquery_viewer/src/calibration.rs @@ -11,7 +11,6 @@ use std::sync::Arc; use std::thread::JoinHandle; use eframe::egui; -use serde::{Deserialize, Serialize}; use calibrt::CalibrationState; use timscentroid::rt_mapping::{MS1CycleIndex, RTIndex}; @@ -72,26 +71,8 @@ pub struct DerivedTolerances { pub wrmse: f64, } -// --------------------------------------------------------------------------- -// Serde types for save/load -// --------------------------------------------------------------------------- - -#[derive(Serialize, Deserialize)] -struct SavedCalibration { - version: String, - rt_range_seconds: [f64; 2], - calibrant_points: Vec<[f64; 3]>, // [lib_rt, measured_rt, weight] - tolerances: SavedTolerances, - wrmse: f64, - n_calibrants: usize, - n_scored: usize, - grid_size: usize, -} - -#[derive(Serialize, Deserialize)] -struct SavedTolerances { - rt_minutes: f32, -} +// Save/load uses shared types from timsseek::rt_calibration: +// SavedCalibration, SavedTolerances, LoadedCalibration, CalibrationSnapshot /// Messages sent from the background thread to the UI. #[derive(Debug)] @@ -454,101 +435,65 @@ impl ViewerCalibrationState { // ----------------------------------------------------------------------- /// Serialize the current calibration state to a JSON v1 file. + /// Delegates to `CalibrationResult::save_json` format via shared serde types. pub fn save_to_file( &self, path: &std::path::Path, rt_range_seconds: [f64; 2], ) -> Result<(), String> { + use timsseek::rt_calibration::{SavedCalibration, SavedTolerances}; + use calibrt::CalibrationSnapshot; + let tol = self.derived_tolerances.as_ref(); let saved = SavedCalibration { version: "v1".to_string(), rt_range_seconds, - calibrant_points: self - .snapshot_points - .iter() - .map(|(x, y, w)| [*x, *y, *w]) - .collect(), + calibration: CalibrationSnapshot { + points: self.snapshot_points.iter().map(|&(x, y, w)| [x, y, w]).collect(), + grid_size: DEFAULT_GRID_SIZE, + lookback: DEFAULT_LOOKBACK, + }, tolerances: SavedTolerances { rt_minutes: tol.map_or(0.0, |t| t.rt_tolerance_minutes), + mz_ppm: [0.0, 0.0], + mobility_pct: [0.0, 0.0], }, - wrmse: tol.map_or(0.0, |t| t.wrmse), n_calibrants: self.n_calibrants, n_scored: self.n_scored, - grid_size: DEFAULT_GRID_SIZE, }; let json = serde_json::to_string_pretty(&saved).map_err(|e| e.to_string())?; std::fs::write(path, json).map_err(|e| e.to_string()) } /// Deserialize calibration state from a JSON v1 file. - /// - /// Returns an optional warning string (e.g. RT range mismatch). + /// Delegates to `CalibrationResult::load_json` + `CalibrationState::from_snapshot`. pub fn load_from_file( &mut self, path: &std::path::Path, raw_rt_range: Option<[f64; 2]>, ) -> Result, String> { - let json = std::fs::read_to_string(path).map_err(|e| e.to_string())?; - let saved: SavedCalibration = - serde_json::from_str(&json).map_err(|e| e.to_string())?; + use timsseek::rt_calibration::CalibrationResult; - if saved.version != "v1" { - return Err(format!("Unsupported version: {}", saved.version)); - } + let loaded = CalibrationResult::load_json(path, raw_rt_range)?; - // RT range compatibility check. - let mut warning = None; - if let Some(raw_range) = raw_rt_range { - let overlap_lo = saved.rt_range_seconds[0].max(raw_range[0]); - let overlap_hi = saved.rt_range_seconds[1].min(raw_range[1]); - let overlap = (overlap_hi - overlap_lo).max(0.0); - let saved_span = saved.rt_range_seconds[1] - saved.rt_range_seconds[0]; - if saved_span > 0.0 && overlap / saved_span < 0.5 { - warning = Some( - "RT range mismatch — calibration may not be valid for this file".to_string(), - ); - } - } else { - warning = Some( - "No raw file loaded — cannot verify RT range compatibility".to_string(), - ); - } - - // Rebuild calibration state from saved points. - let points: Vec<(f64, f64, f64)> = saved - .calibrant_points - .iter() - .map(|p| (p[0], p[1], p[2])) - .collect(); - - if !points.is_empty() { - let x_min = points.iter().map(|p| p.0).fold(f64::MAX, f64::min); - let x_max = points.iter().map(|p| p.0).fold(f64::MIN, f64::max); - let y_min = points.iter().map(|p| p.1).fold(f64::MAX, f64::min); - let y_max = points.iter().map(|p| p.1).fold(f64::MIN, f64::max); - - if let Ok(mut cal) = calibrt::CalibrationState::new( - saved.grid_size, - (x_min, x_max), - (y_min, y_max), - DEFAULT_LOOKBACK, - ) { - cal.update(points.iter().copied()); - cal.fit(); - self.calibration_state = Some(cal); - } + // Reconstruct CalibrationState from the snapshot + if let Ok(cal) = calibrt::CalibrationState::from_snapshot(&loaded.snapshot) { + self.snapshot_points = loaded.snapshot.points + .iter() + .map(|p| (p[0], p[1], p[2])) + .collect(); + self.calibration_state = Some(cal); } - self.snapshot_points = points; - self.n_calibrants = saved.n_calibrants; - self.n_scored = saved.n_scored; + self.n_calibrants = loaded.n_calibrants; + self.n_scored = loaded.n_scored; self.derived_tolerances = Some(DerivedTolerances { - rt_tolerance_minutes: saved.tolerances.rt_minutes, - wrmse: saved.wrmse, + rt_tolerance_minutes: loaded.tolerances.rt_minutes, + wrmse: 0.0, // WRMSE will be recomputed from snapshot on next render }); self.phase = CalibrationPhase::Done; - Ok(warning) + Ok(loaded.warning) } // ----------------------------------------------------------------------- diff --git a/rust/timsseek/src/rt_calibration.rs b/rust/timsseek/src/rt_calibration.rs index 25e362d..ed6d9f9 100644 --- a/rust/timsseek/src/rt_calibration.rs +++ b/rust/timsseek/src/rt_calibration.rs @@ -3,9 +3,11 @@ use crate::scoring::pipeline::Scorer; pub use calibrt::{ CalibRtError, CalibrationCurve as RTCalibration, + CalibrationSnapshot, Point, calibrate_with_ranges, }; +use serde::{Serialize, Deserialize}; use timsquery::Tolerance; use timsquery::models::tolerance::{ MobilityTolerance, @@ -78,6 +80,71 @@ impl CalibrationResult { .with_mobility_tolerance(MobilityTolerance::Pct((3.0, 3.0))) } + /// Save calibration to JSON v1 format. + /// `calibrant_points` are the (library_rt, measured_rt, weight) triples. + /// `rt_range_seconds` is the raw file's observed RT range. + pub fn save_json( + &self, + calibrant_points: &[(f64, f64, f64)], + rt_range_seconds: [f64; 2], + grid_size: usize, + lookback: usize, + n_scored: usize, + path: &std::path::Path, + ) -> Result<(), String> { + let saved = SavedCalibration { + version: "v1".to_string(), + rt_range_seconds, + calibration: CalibrationSnapshot { + points: calibrant_points.iter().map(|&(x, y, w)| [x, y, w]).collect(), + grid_size, + lookback, + }, + tolerances: SavedTolerances { + rt_minutes: self.rt_tolerance_minutes, + mz_ppm: [self.mz_tolerance_ppm.0, self.mz_tolerance_ppm.1], + mobility_pct: [self.mobility_tolerance_pct.0 as f64, self.mobility_tolerance_pct.1 as f64], + }, + n_calibrants: calibrant_points.len(), + n_scored, + }; + let json = serde_json::to_string_pretty(&saved).map_err(|e| e.to_string())?; + std::fs::write(path, json).map_err(|e| e.to_string()) + } + + /// Load calibration from JSON v1 format. + /// Returns (CalibrationResult-like data, optional RT range warning). + pub fn load_json(path: &std::path::Path, raw_rt_range: Option<[f64; 2]>) -> Result { + let json = std::fs::read_to_string(path).map_err(|e| e.to_string())?; + let saved: SavedCalibration = serde_json::from_str(&json).map_err(|e| e.to_string())?; + if saved.version != "v1" { + return Err(format!("Unsupported calibration version: {}", saved.version)); + } + + let warning = match raw_rt_range { + Some(raw) => { + let overlap_lo = saved.rt_range_seconds[0].max(raw[0]); + let overlap_hi = saved.rt_range_seconds[1].min(raw[1]); + let overlap = (overlap_hi - overlap_lo).max(0.0); + let span = saved.rt_range_seconds[1] - saved.rt_range_seconds[0]; + if span > 0.0 && overlap / span < 0.5 { + Some("RT range mismatch — calibration may not be valid for this file".to_string()) + } else { + None + } + } + None => Some("No raw file loaded — cannot verify RT range compatibility".to_string()), + }; + + Ok(LoadedCalibration { + snapshot: saved.calibration, + tolerances: saved.tolerances, + n_calibrants: saved.n_calibrants, + n_scored: saved.n_scored, + warning, + }) + } + /// Fallback when calibration fails: identity RT mapping, secondary tolerance. pub fn fallback(pipeline: &Scorer) -> Self { let range = pipeline.index.ms1_cycle_mapping().range_milis(); @@ -107,3 +174,30 @@ impl CalibrationResult { } } +/// JSON v1 calibration file format — shared between CLI and viewer. +#[derive(Debug, Serialize, Deserialize)] +pub struct SavedCalibration { + pub version: String, + pub rt_range_seconds: [f64; 2], + pub calibration: CalibrationSnapshot, + pub tolerances: SavedTolerances, + pub n_calibrants: usize, + pub n_scored: usize, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct SavedTolerances { + pub rt_minutes: f32, + pub mz_ppm: [f64; 2], + pub mobility_pct: [f64; 2], +} + +/// Result of loading a calibration JSON file. +pub struct LoadedCalibration { + pub snapshot: CalibrationSnapshot, + pub tolerances: SavedTolerances, + pub n_calibrants: usize, + pub n_scored: usize, + pub warning: Option, +} + diff --git a/rust/timsseek_cli/src/processing.rs b/rust/timsseek_cli/src/processing.rs index a663fa0..e5952d2 100644 --- a/rust/timsseek_cli/src/processing.rs +++ b/rust/timsseek_cli/src/processing.rs @@ -219,23 +219,23 @@ pub fn execute_pipeline( // Save calibration as JSON v1 (compatible with viewer load) if !calibrant_points.is_empty() { + let cal_points_tuples: Vec<(f64, f64, f64)> = calibrant_points + .iter() + .map(|p| (p[0], p[1], p[2])) + .collect(); let rt_lo = calibrant_points.iter().map(|p| p[1]).fold(f64::MAX, f64::min); let rt_hi = calibrant_points.iter().map(|p| p[1]).fold(f64::MIN, f64::max); - let cal_save = serde_json::json!({ - "version": "v1", - "rt_range_seconds": [rt_lo, rt_hi], - "calibrant_points": calibrant_points, - "tolerances": { - "rt_minutes": calibration.rt_tolerance_minutes(), - }, - "wrmse": 0.0, - "n_calibrants": calibrant_points.len(), - "n_scored": calibrant_points.len(), - "grid_size": calib_config.grid_size, - }); let cal_json_path = out_path.directory.join("calibration.json"); - if let Ok(json) = serde_json::to_string_pretty(&cal_save) { - let _ = std::fs::write(&cal_json_path, json); + if let Err(e) = calibration.save_json( + &cal_points_tuples, + [rt_lo, rt_hi], + calib_config.grid_size, + calib_config.dp_lookback, + calibrant_points.len(), + &cal_json_path, + ) { + tracing::warn!("Failed to save calibration: {}", e); + } else { info!("Saved calibration to {:?}", cal_json_path); } } From b6ab4048b9e183e8acbdeca2e3b741c5bfc0e6af Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Fri, 10 Apr 2026 11:09:34 -0700 Subject: [PATCH 33/64] chore(viewer): remove unused arc_clone method and wrmse field from DerivedTolerances --- rust/timsquery_viewer/src/app.rs | 8 -------- rust/timsquery_viewer/src/calibration.rs | 5 +---- 2 files changed, 1 insertion(+), 12 deletions(-) diff --git a/rust/timsquery_viewer/src/app.rs b/rust/timsquery_viewer/src/app.rs index 93fe178..762b7e6 100644 --- a/rust/timsquery_viewer/src/app.rs +++ b/rust/timsquery_viewer/src/app.rs @@ -173,14 +173,6 @@ impl ElutionGroupState { } } - /// Get a cloned Arc handle to the loaded data (for sharing with background threads). - pub fn arc_clone(&self) -> Option> { - match self { - ElutionGroupState::Loaded { data, .. } => Some(Arc::clone(data)), - _ => None, - } - } - pub fn is_none(&self) -> bool { matches!(self, ElutionGroupState::None) } diff --git a/rust/timsquery_viewer/src/calibration.rs b/rust/timsquery_viewer/src/calibration.rs index 7613142..9533c0b 100644 --- a/rust/timsquery_viewer/src/calibration.rs +++ b/rust/timsquery_viewer/src/calibration.rs @@ -68,7 +68,6 @@ pub enum CalibrationPhase { #[derive(Debug, Clone)] pub struct DerivedTolerances { pub rt_tolerance_minutes: f32, - pub wrmse: f64, } // Save/load uses shared types from timsseek::rt_calibration: @@ -489,7 +488,6 @@ impl ViewerCalibrationState { self.n_scored = loaded.n_scored; self.derived_tolerances = Some(DerivedTolerances { rt_tolerance_minutes: loaded.tolerances.rt_minutes, - wrmse: 0.0, // WRMSE will be recomputed from snapshot on next render }); self.phase = CalibrationPhase::Done; @@ -820,10 +818,9 @@ impl ViewerCalibrationState { // Derive a suggested RT tolerance: 3x WRMSE in minutes. let suggested_rt_min = wrmse.map(|w| (w / 60.0) * 3.0); - if let (Some(rt_min), Some(w)) = (suggested_rt_min, wrmse) { + if let Some(rt_min) = suggested_rt_min { self.derived_tolerances = Some(DerivedTolerances { rt_tolerance_minutes: rt_min as f32, - wrmse: w, }); } From fc093e8eb8f9b64b790e8a0c57444c8e2cfc0382 Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Fri, 10 Apr 2026 11:20:14 -0700 Subject: [PATCH 34/64] feat(viewer): render calibration grid as heatmap with per-cell colored rectangles --- rust/calibrt/src/lib.rs | 12 +++ rust/timsquery_viewer/src/calibration.rs | 98 ++++++++++++++---------- 2 files changed, 69 insertions(+), 41 deletions(-) diff --git a/rust/calibrt/src/lib.rs b/rust/calibrt/src/lib.rs index 809b354..ac4e1d5 100644 --- a/rust/calibrt/src/lib.rs +++ b/rust/calibrt/src/lib.rs @@ -243,6 +243,18 @@ impl CalibrationState { self.grid.grid_cells() } + pub fn grid_bins(&self) -> usize { + self.grid.bins + } + + pub fn grid_x_range(&self) -> (f64, f64) { + self.grid.x_range + } + + pub fn grid_y_range(&self) -> (f64, f64) { + self.grid.y_range + } + pub fn path_indices(&self) -> &[usize] { &self.path_indices } diff --git a/rust/timsquery_viewer/src/calibration.rs b/rust/timsquery_viewer/src/calibration.rs index 9533c0b..172776e 100644 --- a/rust/timsquery_viewer/src/calibration.rs +++ b/rust/timsquery_viewer/src/calibration.rs @@ -269,11 +269,13 @@ impl ViewerCalibrationState { self.snapshot_points = points; // Feed points into CalibrationState for curve fitting. + // Reset first — each snapshot is the full heap, not a delta. if let Some(cs) = &mut self.calibration_state { + cs.reset(); cs.update( self.snapshot_points .iter() - .map(|&(lib_rt, apex_rt, score)| (lib_rt, apex_rt, score)), + .map(|&(lib_rt, apex_rt, _weight)| (lib_rt, apex_rt, 1.0)), ); cs.fit(); } @@ -666,7 +668,7 @@ impl ViewerCalibrationState { /// Render the scatter + curve calibration plot. fn render_calibration_plot(&self, ui: &mut egui::Ui) { - use egui_plot::{Line, Plot, PlotPoints, Points}; + use egui_plot::{Line, Plot, PlotPoints, Points, Polygon}; let plot_id = format!("calibration_plot_{}", self.generation); let plot = Plot::new(plot_id) @@ -679,48 +681,64 @@ impl ViewerCalibrationState { let cal_state = self.calibration_state.as_ref(); plot.show(ui, |plot_ui| { - // Grid cells from CalibrationState (if available). + // Grid heatmap from CalibrationState (if available). if let Some(cs) = cal_state { let cells = cs.grid_cells(); let path_indices = cs.path_indices(); + let bins = cs.grid_bins(); + let (x_lo, x_hi) = cs.grid_x_range(); + let (y_lo, y_hi) = cs.grid_y_range(); + let cell_w = (x_hi - x_lo) / bins as f64; + let cell_h = (y_hi - y_lo) / bins as f64; + + // Find max weight for color normalization (log scale) + let max_weight = cells.iter() + .map(|n| n.center.weight) + .fold(0.0f64, f64::max) + .max(1.0); + let log_max = max_weight.ln_1p(); + + // Draw each non-zero cell as a colored rectangle + for (i, node) in cells.iter().enumerate() { + if node.center.weight <= 0.0 { + continue; + } - // Suppressed cells with any weight: small gray dots - let suppressed_pts: Vec<[f64; 2]> = cells - .iter() - .filter(|n| n.suppressed && n.center.weight > 0.0) - .map(|n| [n.center.x, n.center.y]) - .collect(); - - if !suppressed_pts.is_empty() { - plot_ui.points( - Points::new( - "suppressed", - PlotPoints::new(suppressed_pts), - ) - .color(egui::Color32::from_rgb(140, 140, 140)) - .radius(2.0), - ); - } - - // Retained (non-suppressed) cells with weight: larger blue dots - let retained_pts: Vec<[f64; 2]> = cells - .iter() - .filter(|n| !n.suppressed && n.center.weight > 0.0) - .map(|n| [n.center.x, n.center.y]) - .collect(); - - if !retained_pts.is_empty() { - plot_ui.points( - Points::new( - "retained", - PlotPoints::new(retained_pts), - ) - .color(egui::Color32::from_rgb(70, 130, 230)) - .radius(4.0), + let gx = i % bins; + let gy = i / bins; + let cx = x_lo + (gx as f64 + 0.5) * cell_w; + let cy = y_lo + (gy as f64 + 0.5) * cell_h; + let hw = cell_w * 0.5; + let hh = cell_h * 0.5; + + // Log-scale color: dark blue → bright yellow + let t = (node.center.weight.ln_1p() / log_max) as f32; + let color = if node.suppressed { + // Suppressed: gray tones + let v = (40.0 + t * 80.0) as u8; + egui::Color32::from_rgba_unmultiplied(v, v, v, 180) + } else { + // Retained: blue → cyan → yellow heat + let r = (t * 255.0) as u8; + let g = (t * 200.0 + 55.0) as u8; + let b = ((1.0 - t) * 200.0) as u8; + egui::Color32::from_rgba_unmultiplied(r, g, b, 200) + }; + + let rect = vec![ + [cx - hw, cy - hh], + [cx + hw, cy - hh], + [cx + hw, cy + hh], + [cx - hw, cy + hh], + ]; + plot_ui.polygon( + Polygon::new(format!("cell_{i}"), PlotPoints::new(rect)) + .fill_color(color) + .stroke(egui::Stroke::new(0.0, egui::Color32::TRANSPARENT)) ); } - // Path nodes: green dots + // Path nodes: bright green dots on top of heatmap let path_pts: Vec<[f64; 2]> = path_indices .iter() .filter_map(|&idx| cells.get(idx)) @@ -733,7 +751,7 @@ impl ViewerCalibrationState { "path", PlotPoints::new(path_pts), ) - .color(egui::Color32::from_rgb(50, 205, 50)) + .color(egui::Color32::from_rgb(50, 255, 50)) .radius(5.0), ); } @@ -750,7 +768,6 @@ impl ViewerCalibrationState { let line_pts: Vec<[f64; 2]> = (0..=n_samples) .filter_map(|i| { let x = x_min + i as f64 * step; - // predict returns Err for out-of-bounds but we stay in range let y = match curve.predict(x) { Ok(y) => y, Err(calibrt::CalibRtError::OutOfBounds(y)) => y, @@ -774,8 +791,7 @@ impl ViewerCalibrationState { } } else if !self.snapshot_points.is_empty() { // Fallback: show raw calibrant points if CalibrationState - // hasn't been built yet (shouldn't normally happen, but - // keeps the plot populated). + // hasn't been built yet. let raw_pts: Vec<[f64; 2]> = self .snapshot_points .iter() From 4469f20417feaab2e9c081aeb87cfe8e27722ac9 Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Fri, 10 Apr 2026 12:12:06 -0700 Subject: [PATCH 35/64] =?UTF-8?q?feat(viewer):=20calibration=20panel=20pol?= =?UTF-8?q?ish=20=E2=80=94=20heatmap,=20projected=20RT,=20tolerance=20fixe?= =?UTF-8?q?s?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Render grid as heatmap (colored rectangles per cell, log-scale weight) - Show selected peptide crosshair on calibration plot (library RT + predicted RT) - Project library RT through calibration curve for chromatogram queries - Fix snapshot sending (always send at interval, not just when heap.len changes) - Fix tolerance: use non-suppressed cell residuals (MAD + Q75) instead of all calibrants - Pin tolerance bar to bottom via TopBottomPanel (no magic pixel offsets) - Add is_active() + request_repaint_after(500ms) for live updates - Log calibration refit stats (scored, retained cells, path nodes, curve) --- rust/timsquery_viewer/src/app.rs | 29 ++++- rust/timsquery_viewer/src/calibration.rs | 134 +++++++++++++++++------ 2 files changed, 131 insertions(+), 32 deletions(-) diff --git a/rust/timsquery_viewer/src/app.rs b/rust/timsquery_viewer/src/app.rs index 762b7e6..b515841 100644 --- a/rust/timsquery_viewer/src/app.rs +++ b/rust/timsquery_viewer/src/app.rs @@ -517,7 +517,23 @@ impl ViewerApp { self.chromatogram_receiver = Some(rx); let index_owned = index.clone(); - let elution_group_owned = elution_group.clone(); + // If calibration is available, project the library RT to measured RT + let mut elution_group_owned = elution_group.clone(); + if let Some(cs) = &self.calibration.calibration_state { + if let Some(curve) = cs.curve() { + let lib_rt = elution_group_owned.rt_seconds(); + let calibrated_rt = match curve.predict(lib_rt as f64) { + Ok(y) => y as f32, + Err(calibrt::CalibRtError::OutOfBounds(y)) => y as f32, + Err(_) => lib_rt, + }; + tracing::debug!( + "RT calibration: {:.1}s (library) → {:.1}s (projected)", + lib_rt, calibrated_rt, + ); + elution_group_owned.set_rt_seconds(calibrated_rt); + } + } let expected_intensities_owned = expected_intensities.clone(); let tolerance_owned = self.data.tolerance.clone(); let smoothing_owned = self.data.smoothing; @@ -1374,6 +1390,10 @@ impl eframe::App for ViewerApp { if self.calibration.poll() { ctx.request_repaint(); } + // Keep polling while calibration is running (egui won't repaint without input otherwise) + if self.calibration.is_active() { + ctx.request_repaint_after(std::time::Duration::from_millis(500)); + } // Generate MS2 spectrum and mobility data if RT was clicked self.handle_rt_click(); @@ -1660,11 +1680,18 @@ impl<'a> TabViewer for AppTabViewer<'a> { .render(ui, self.computed.mobility_data()); } Pane::Calibration => { + // Get selected peptide's library RT for overlay on heatmap + let selected_library_rt = self.ui.selected_index.and_then(|idx| { + let eg_data = self.data.elution_groups.as_ref()?; + let (eg, _) = eg_data.get_elem(idx).ok()?; + Some(eg.rt_seconds() as f64) + }); self.calibration.render_panel( ui, &self.data.indexed_data, &self.data.elution_groups, &mut self.data.tolerance, + selected_library_rt, ); } } diff --git a/rust/timsquery_viewer/src/calibration.rs b/rust/timsquery_viewer/src/calibration.rs index 172776e..ec8b865 100644 --- a/rust/timsquery_viewer/src/calibration.rs +++ b/rust/timsquery_viewer/src/calibration.rs @@ -248,6 +248,11 @@ impl ViewerCalibrationState { /// Drain the channel and update internal state. /// + /// Whether the background thread is still active (Running or Paused). + pub fn is_active(&self) -> bool { + matches!(self.phase, CalibrationPhase::Running | CalibrationPhase::Paused) + } + /// Returns `true` if any new data was received (caller should /// `request_repaint`). pub fn poll(&mut self) -> bool { @@ -278,6 +283,15 @@ impl ViewerCalibrationState { .map(|&(lib_rt, apex_rt, _weight)| (lib_rt, apex_rt, 1.0)), ); cs.fit(); + let has_curve = cs.curve().is_some(); + let n_path = cs.path_indices().len(); + let n_retained = cs.grid_cells().iter() + .filter(|n| !n.suppressed && n.center.weight > 0.0) + .count(); + tracing::info!( + "Calibration refit: scored={} calibrants={} retained_cells={} path_nodes={} curve={}", + n_scored, heap_len, n_retained, n_path, has_curve, + ); } changed = true; } @@ -337,7 +351,6 @@ impl ViewerCalibrationState { let mut heap = CalibrantHeap::new(heap_capacity); let mut n_scored: usize = 0; - let mut last_snapshot_heap_len: usize = 0; for &eg_idx in &indices { // Check control flag. @@ -403,9 +416,10 @@ impl ViewerCalibrationState { heap.push(candidate); n_scored += 1; - // Periodic snapshot. - if n_scored % SNAPSHOT_INTERVAL == 0 && heap.len() != last_snapshot_heap_len { - last_snapshot_heap_len = heap.len(); + // Periodic snapshot — always send at interval. + // Heap content changes even when len() stays at capacity + // (better candidates evict worse ones). + if n_scored % SNAPSHOT_INTERVAL == 0 { let points: Vec<(f64, f64, f64)> = heap .iter() .map(|c| { @@ -511,6 +525,7 @@ impl ViewerCalibrationState { indexed_data: &crate::app::IndexedDataState, elution_groups: &crate::app::ElutionGroupState, tolerance: &mut Tolerance, + selected_library_rt: Option, ) { // -- Control buttons -------------------------------------------------- ui.horizontal(|ui| { @@ -655,24 +670,23 @@ impl ViewerCalibrationState { ui.separator(); ui.add_space(4.0); - // -- Grid + curve plot ------------------------------------------------ - self.render_calibration_plot(ui); - - ui.add_space(4.0); - ui.separator(); - ui.add_space(4.0); + // -- Tolerance suggestion (pinned to bottom, reserves its natural height) -- + egui::TopBottomPanel::bottom("calibration_footer") + .show_inside(ui, |ui| { + self.render_tolerance_suggestion(ui, tolerance); + }); - // -- Tolerance suggestion --------------------------------------------- - self.render_tolerance_suggestion(ui, tolerance); + // -- Grid + curve plot (fills remaining space) ------------------------- + self.render_calibration_plot(ui, selected_library_rt); } /// Render the scatter + curve calibration plot. - fn render_calibration_plot(&self, ui: &mut egui::Ui) { - use egui_plot::{Line, Plot, PlotPoints, Points, Polygon}; + fn render_calibration_plot(&self, ui: &mut egui::Ui, selected_library_rt: Option) { + use egui_plot::{Line, Plot, PlotPoints, Points, Polygon, VLine, HLine}; let plot_id = format!("calibration_plot_{}", self.generation); let plot = Plot::new(plot_id) - .height(ui.available_height().min(400.0)) + .height(ui.available_height().max(100.0)) .x_axis_label("Library RT (s)") .y_axis_label("Measured RT (s)") .allow_zoom(true) @@ -789,6 +803,43 @@ impl ViewerCalibrationState { } } } + + // Selected peptide overlay: vertical line at library RT, + // horizontal line at predicted measured RT, tolerance band + if let Some(lib_rt) = selected_library_rt { + // Vertical line: library RT (x-axis) + plot_ui.vline( + VLine::new("library RT", lib_rt) + .color(egui::Color32::from_rgba_unmultiplied(255, 100, 100, 160)) + .width(1.5), + ); + + // If curve is fitted, show predicted RT + tolerance band + if let Some(curve) = cs.curve() { + let predicted_rt = match curve.predict(lib_rt) { + Ok(y) => y, + Err(calibrt::CalibRtError::OutOfBounds(y)) => y, + Err(_) => lib_rt, + }; + + // Horizontal line: predicted measured RT + plot_ui.hline( + HLine::new("predicted RT", predicted_rt) + .color(egui::Color32::from_rgba_unmultiplied(255, 100, 100, 160)) + .width(1.5), + ); + + // Crosshair point at (lib_rt, predicted_rt) + plot_ui.points( + Points::new( + "query", + PlotPoints::new(vec![[lib_rt, predicted_rt]]), + ) + .color(egui::Color32::from_rgb(255, 80, 80)) + .radius(6.0), + ); + } + } } else if !self.snapshot_points.is_empty() { // Fallback: show raw calibrant points if CalibrationState // hasn't been built yet. @@ -812,39 +863,60 @@ impl ViewerCalibrationState { /// Render tolerance suggestion and Apply button. fn render_tolerance_suggestion(&mut self, ui: &mut egui::Ui, tolerance: &mut Tolerance) { - // Compute WRMSE if we have a curve and snapshot points. - let wrmse = self + // Compute tolerance from NON-SUPPRESSED GRID CELL centroids. + // These ~90 cells survived NMS — they represent the grid's consensus evidence. + // Not all 2000 raw calibrants (many false positives with huge residuals). + // Not the ~20 path points (near-zero residuals by construction). + let stats = self .calibration_state .as_ref() .and_then(|cs| { let curve = cs.curve()?; - let points: Vec = self - .snapshot_points + let cells = cs.grid_cells(); + + // Absolute residuals from non-suppressed cell centroids + let mut abs_residuals: Vec = cells .iter() - .map(|&(lib_rt, apex_rt, score)| calibrt::Point { - x: lib_rt, - y: apex_rt, - weight: score, + .filter(|n| !n.suppressed && n.center.weight > 0.0) + .filter_map(|n| { + let pred = match curve.predict(n.center.x) { + Ok(y) => y, + Err(calibrt::CalibRtError::OutOfBounds(y)) => y, + Err(_) => return None, + }; + Some((pred - n.center.y).abs()) }) .collect(); - let val = curve.wrmse(points.iter()); - if val.is_finite() { Some(val) } else { None } + + if abs_residuals.len() < 4 { + return None; + } + + abs_residuals.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal)); + let n = abs_residuals.len(); + let mad_seconds = abs_residuals[n / 2]; + let q75_seconds = abs_residuals[3 * n / 4]; + + Some((mad_seconds, q75_seconds, n)) }); - // Derive a suggested RT tolerance: 3x WRMSE in minutes. - let suggested_rt_min = wrmse.map(|w| (w / 60.0) * 3.0); + // Suggested RT tolerance: 3x MAD of retained cell residuals, floored at 0.5 min. + let suggested = stats.map(|(mad_s, q75_s, n_cells)| { + let rt_min = (mad_s / 60.0 * 3.0).max(0.5); + (rt_min, mad_s, q75_s, n_cells) + }); - if let Some(rt_min) = suggested_rt_min { + if let Some((rt_min, _, _, _)) = suggested { self.derived_tolerances = Some(DerivedTolerances { rt_tolerance_minutes: rt_min as f32, }); } ui.horizontal(|ui| { - if let (Some(rt_min), Some(w)) = (suggested_rt_min, wrmse) { + if let Some((rt_min, mad_s, q75_s, n_cells)) = suggested { ui.label(format!( - "Suggested RT: \u{00B1}{:.2} min WRMSE: {:.2} s", - rt_min, w + "Suggested RT: \u{00B1}{:.2} min MAD: {:.1} s Q75: {:.1} s ({} cells)", + rt_min, mad_s, q75_s, n_cells, )); if ui.button("Apply").clicked() { let rt_tol = rt_min as f32; From fb22e3e87aeac94574074b11efaae4f7a330f70e Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Fri, 10 Apr 2026 12:15:56 -0700 Subject: [PATCH 36/64] feat(viewer): show separate Library RT (blue) and Calibrated RT (orange) reference lines --- rust/timsquery_viewer/src/app.rs | 37 ++++++++++++++++++++++++++------ 1 file changed, 31 insertions(+), 6 deletions(-) diff --git a/rust/timsquery_viewer/src/app.rs b/rust/timsquery_viewer/src/app.rs index b515841..11f01ff 100644 --- a/rust/timsquery_viewer/src/app.rs +++ b/rust/timsquery_viewer/src/app.rs @@ -670,16 +670,41 @@ impl ViewerApp { ); } - // Add library RT reference line + // Add RT reference lines if let Some(elution_groups) = self.data.elution_groups.as_ref() && let Ok((elution_group, _)) = elution_groups.get_elem(selected_idx as usize) { - self.computed.insert_reference_line( - "Library RT".into(), - elution_group.rt_seconds() as f64, - Color32::BLUE, - ); + let lib_rt = elution_group.rt_seconds() as f64; + + // Check if calibration projects to a different RT + let calibrated_rt = self.calibration.calibration_state.as_ref() + .and_then(|cs| cs.curve()) + .and_then(|curve| match curve.predict(lib_rt) { + Ok(y) => Some(y), + Err(calibrt::CalibRtError::OutOfBounds(y)) => Some(y), + Err(_) => None, + }); + + if let Some(cal_rt) = calibrated_rt { + // Show both: dashed library RT + solid calibrated RT + self.computed.insert_reference_line( + "Library RT".into(), + lib_rt, + Color32::from_rgba_unmultiplied(100, 100, 255, 120), // dim blue + ); + self.computed.insert_reference_line( + "Calibrated RT".into(), + cal_rt, + Color32::from_rgb(255, 165, 0), // orange + ); + } else { + self.computed.insert_reference_line( + "Library RT".into(), + lib_rt, + Color32::BLUE, + ); + } } } Err(e) => { From 392c1353089bfff701ad76d42b944071964093bc Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Fri, 10 Apr 2026 12:26:45 -0700 Subject: [PATCH 37/64] feat(viewer): persist calibration across restarts via app state snapshot --- rust/timsquery_viewer/src/app.rs | 7 ++- rust/timsquery_viewer/src/calibration.rs | 55 ++++++++++++++++++++++++ 2 files changed, 61 insertions(+), 1 deletion(-) diff --git a/rust/timsquery_viewer/src/app.rs b/rust/timsquery_viewer/src/app.rs index 11f01ff..eed8207 100644 --- a/rust/timsquery_viewer/src/app.rs +++ b/rust/timsquery_viewer/src/app.rs @@ -121,6 +121,8 @@ struct PersistentState { tolerance: Tolerance, smoothing: SmoothingMethod, dock_state: DockState, + #[serde(default)] + calibration_snapshot: Option, } fn default_true() -> bool { @@ -342,7 +344,9 @@ impl ViewerApp { cancellation_token: None, screenshot_state: ScreenshotState::default(), screenshot_delay_secs: 3.0, - calibration: ViewerCalibrationState::default(), + calibration: ViewerCalibrationState::from_snapshot( + state.calibration_snapshot, + ), }; } } else { @@ -1389,6 +1393,7 @@ impl eframe::App for ViewerApp { tolerance: self.data.tolerance.clone(), smoothing: self.data.smoothing, dock_state: self.dock_state.clone(), + calibration_snapshot: self.calibration.snapshot_for_persistence(), }; if let Ok(value) = ron::to_string(&state) { diff --git a/rust/timsquery_viewer/src/calibration.rs b/rust/timsquery_viewer/src/calibration.rs index ec8b865..e280b16 100644 --- a/rust/timsquery_viewer/src/calibration.rs +++ b/rust/timsquery_viewer/src/calibration.rs @@ -142,6 +142,61 @@ impl Default for ViewerCalibrationState { } impl ViewerCalibrationState { + /// Reconstruct from a persisted snapshot (app state restore). + pub fn from_snapshot(snapshot: Option) -> Self { + let Some(snapshot) = snapshot else { + return Self::default(); + }; + if snapshot.points.is_empty() { + return Self::default(); + } + + let snapshot_points: Vec<(f64, f64, f64)> = snapshot + .points + .iter() + .map(|p| (p[0], p[1], p[2])) + .collect(); + let n_calibrants = snapshot_points.len(); + + let calibration_state = calibrt::CalibrationState::from_snapshot(&snapshot).ok(); + + Self { + phase: if calibration_state.is_some() { + CalibrationPhase::Done + } else { + CalibrationPhase::Idle + }, + calibration_state, + generation: 0, + n_scored: n_calibrants, + n_calibrants, + heap_capacity: DEFAULT_HEAP_CAPACITY, + elution_group_count: 0, + derived_tolerances: None, + thread_handle: None, + thread_control: Arc::new(AtomicU8::new(CONTROL_STOP_REQUESTED)), + receiver: None, + snapshot_points, + } + } + + /// Extract snapshot for persistence (returns None if no calibration data). + pub fn snapshot_for_persistence(&self) -> Option { + if self.snapshot_points.is_empty() { + return None; + } + // Use CalibrationState's save_snapshot if available, otherwise build from raw points + if let Some(cs) = &self.calibration_state { + Some(cs.save_snapshot(&self.snapshot_points)) + } else { + Some(calibrt::CalibrationSnapshot { + points: self.snapshot_points.iter().map(|&(x, y, w)| [x, y, w]).collect(), + grid_size: DEFAULT_GRID_SIZE, + lookback: DEFAULT_LOOKBACK, + }) + } + } + /// Start the calibration background thread. /// /// Requires both raw data and elution groups to be loaded. From cf5b456a7b9be24960bec55e29a8c6bd82d398ba Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Fri, 10 Apr 2026 12:29:13 -0700 Subject: [PATCH 38/64] feat(viewer): parallelize calibration scoring via Rayon chunked par_iter --- rust/timsquery_viewer/src/calibration.rs | 145 ++++++++++++----------- 1 file changed, 73 insertions(+), 72 deletions(-) diff --git a/rust/timsquery_viewer/src/calibration.rs b/rust/timsquery_viewer/src/calibration.rs index e280b16..febbee7 100644 --- a/rust/timsquery_viewer/src/calibration.rs +++ b/rust/timsquery_viewer/src/calibration.rs @@ -387,6 +387,8 @@ impl ViewerCalibrationState { control: Arc, heap_capacity: usize, ) { + use rayon::prelude::*; + let n_elution_groups = elution_groups.len(); if n_elution_groups == 0 { let _ = tx.send(CalibrationMessage::Done { n_scored: 0 }); @@ -399,104 +401,103 @@ impl ViewerCalibrationState { let tolerance = broad_calibration_tolerance(); let cycle_mapping = index.ms1_cycle_mapping(); - - // Thread-local scorer. let n_cycles = cycle_mapping.len(); - let mut scorer = TraceScorer::new(n_cycles); - let mut heap = CalibrantHeap::new(heap_capacity); + let mut heap = CalibrantHeap::new(heap_capacity); let mut n_scored: usize = 0; - for &eg_idx in &indices { - // Check control flag. + // Process in chunks — each chunk is parallelized via Rayon. + // Between chunks: merge heaps, send snapshot, check pause/stop. + for chunk in indices.chunks(SNAPSHOT_INTERVAL) { + // Check control flag between chunks. loop { let flag = control.load(Ordering::Acquire); match flag { CONTROL_RUNNING => break, CONTROL_PAUSED => { std::thread::park(); - // Re-check after unpark. continue; } _ => { - // STOP_REQUESTED or unknown. let _ = tx.send(CalibrationMessage::Done { n_scored }); return; } } } - // Get elution group data. Skip on error. - let Ok((elution_group, expected_intensities)) = elution_groups.get_elem(eg_idx) else { - continue; - }; - - // Build extraction. - let extraction = match build_extraction( - &elution_group, - expected_intensities, - index.as_ref(), - &tolerance, - Some(CALIBRATION_TOP_N_FRAGMENTS), - ) { - Ok(ext) => ext, - Err(_) => continue, - }; + // Score chunk in parallel — per-thread TraceScorer + CalibrantHeap. + let chunk_heap: CalibrantHeap = chunk + .par_iter() + .fold( + || (TraceScorer::new(n_cycles), CalibrantHeap::new(heap_capacity)), + |(mut scorer, mut local_heap), &eg_idx| { + let Ok((elution_group, expected_intensities)) = + elution_groups.get_elem(eg_idx) + else { + return (scorer, local_heap); + }; - // Compute traces. - if scorer.compute_traces(&extraction).is_err() { - continue; - } + let extraction = match build_extraction( + &elution_group, + expected_intensities, + index.as_ref(), + &tolerance, + Some(CALIBRATION_TOP_N_FRAGMENTS), + ) { + Ok(ext) => ext, + Err(_) => return (scorer, local_heap), + }; - // Build RT mapper closure. - let cycle_offset = extraction.chromatograms.cycle_offset(); - let rt_mapper = |idx: usize| -> u32 { - cycle_mapping - .rt_milis_for_index(&MS1CycleIndex::new((idx + cycle_offset) as u32)) - .unwrap_or(0) - }; + if scorer.compute_traces(&extraction).is_err() { + return (scorer, local_heap); + } - // Suggest apex. - let apex = match scorer.suggest_apex(&rt_mapper, 0) { - Ok(a) => a, - Err(_) => continue, - }; + let cycle_offset = extraction.chromatograms.cycle_offset(); + let rt_mapper = |idx: usize| -> u32 { + cycle_mapping + .rt_milis_for_index(&MS1CycleIndex::new( + (idx + cycle_offset) as u32, + )) + .unwrap_or(0) + }; - let candidate = CalibrantCandidate { - score: apex.score, - apex_rt_seconds: apex.retention_time_ms as f32 / 1000.0, - speclib_index: eg_idx, - library_rt_seconds: elution_group.rt_seconds(), - }; - heap.push(candidate); - n_scored += 1; - - // Periodic snapshot — always send at interval. - // Heap content changes even when len() stays at capacity - // (better candidates evict worse ones). - if n_scored % SNAPSHOT_INTERVAL == 0 { - let points: Vec<(f64, f64, f64)> = heap - .iter() - .map(|c| { - ( - c.library_rt_seconds as f64, - c.apex_rt_seconds as f64, - c.score as f64, - ) - }) - .collect(); + if let Ok(apex) = scorer.suggest_apex(&rt_mapper, 0) { + local_heap.push(CalibrantCandidate { + score: apex.score, + apex_rt_seconds: apex.retention_time_ms as f32 / 1000.0, + speclib_index: eg_idx, + library_rt_seconds: elution_group.rt_seconds(), + }); + } + (scorer, local_heap) + }, + ) + .map(|(_, local_heap)| local_heap) + .reduce(|| CalibrantHeap::new(heap_capacity), CalibrantHeap::merge); + + // Merge chunk results into main heap. + heap = heap.merge(chunk_heap); + n_scored += chunk.len(); + + // Send snapshot. + let points: Vec<(f64, f64, f64)> = heap + .iter() + .map(|c| { + ( + c.library_rt_seconds as f64, + c.apex_rt_seconds as f64, + c.score as f64, + ) + }) + .collect(); - let msg = CalibrationMessage::Snapshot { - n_scored, - heap_len: heap.len(), - points, - }; - // Use try_send: if the channel is full, skip this snapshot. - let _ = tx.try_send(msg); - } + let _ = tx.try_send(CalibrationMessage::Snapshot { + n_scored, + heap_len: heap.len(), + points, + }); } - // Final snapshot with Done marker. let _ = tx.send(CalibrationMessage::Done { n_scored }); } From de0f41d74e312249ee651557595f03a6c4e866e8 Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Fri, 10 Apr 2026 12:49:49 -0700 Subject: [PATCH 39/64] feat(calibrt): ridge width measurement for tolerance estimation - CalibrationState::measure_ridge_width() expands from path cells into adjacent cells above a weight threshold fraction - Returns RidgeMeasurement { x, half_width, total_weight } per column - Viewer: weighted-average half-width as global tolerance (heavy columns count more), replaces non-suppressed cell residual approach --- rust/calibrt/src/lib.rs | 82 ++++++++++++++++++++++++ rust/timsquery_viewer/src/calibration.rs | 64 +++++++++--------- 2 files changed, 111 insertions(+), 35 deletions(-) diff --git a/rust/calibrt/src/lib.rs b/rust/calibrt/src/lib.rs index ac4e1d5..e9ef2dd 100644 --- a/rust/calibrt/src/lib.rs +++ b/rust/calibrt/src/lib.rs @@ -150,6 +150,17 @@ impl CalibrationCurve { } } +/// Measurement of the evidence ridge width at one grid column. +#[derive(Debug, Clone)] +pub struct RidgeMeasurement { + /// Center x position (library RT, seconds). + pub x: f64, + /// Half-width of the ridge in y-units (seconds). + pub half_width: f64, + /// Total accumulated weight in the expanded range — more weight = more trustworthy. + pub total_weight: f64, +} + /// Serializable snapshot of calibration data — points + config. /// Used for save/load. Does not include the fitted curve (reconstructed on load). #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] @@ -263,6 +274,77 @@ impl CalibrationState { self.curve.as_ref() } + /// Measure the width of the evidence "mountain" around the fitted path. + /// + /// For each grid column that contains a path cell, expands up and down + /// from the path cell until cell weight drops below `fraction` of the + /// path cell's weight. Returns `(column_center_x, half_width_y, total_weight)`. + /// + /// `fraction`: weight threshold as a fraction of the path cell's weight + /// (e.g., 0.1 = expand until weight < 10% of path cell). + /// `total_weight`: sum of all cell weights in the expanded range — heavier + /// columns should carry more authority in tolerance estimation. + pub fn measure_ridge_width(&self, fraction: f64) -> Vec { + let bins = self.grid.bins; + let cells = self.grid.grid_cells(); + let y_span = self.grid.y_range.1 - self.grid.y_range.0; + let cell_h = y_span / bins as f64; + + let mut widths = Vec::new(); + + for &path_idx in &self.path_indices { + let path_node = &cells[path_idx]; + if path_node.center.weight <= 0.0 { + continue; + } + + let gx = path_idx % bins; + let gy = path_idx / bins; + let threshold = path_node.center.weight * fraction; + + // Expand upward (increasing gy) from path cell + let mut upper_gy = gy; + let mut total_weight = path_node.center.weight; + for dy in 1..bins { + let check_gy = gy + dy; + if check_gy >= bins { + break; + } + let idx = check_gy * bins + gx; + if cells[idx].center.weight < threshold { + break; + } + total_weight += cells[idx].center.weight; + upper_gy = check_gy; + } + + // Expand downward (decreasing gy) from path cell + let mut lower_gy = gy; + for dy in 1..bins { + if dy > gy { + break; + } + let check_gy = gy - dy; + let idx = check_gy * bins + gx; + if cells[idx].center.weight < threshold { + break; + } + total_weight += cells[idx].center.weight; + lower_gy = check_gy; + } + + let half_width = ((upper_gy - lower_gy) as f64 + 1.0) * cell_h * 0.5; + + widths.push(RidgeMeasurement { + x: path_node.center.x, + half_width, + total_weight, + }); + } + + widths + } + /// Bundle current config into a snapshot (caller provides the points). pub fn save_snapshot(&self, points: &[(f64, f64, f64)]) -> CalibrationSnapshot { CalibrationSnapshot { diff --git a/rust/timsquery_viewer/src/calibration.rs b/rust/timsquery_viewer/src/calibration.rs index febbee7..e138e3f 100644 --- a/rust/timsquery_viewer/src/calibration.rs +++ b/rust/timsquery_viewer/src/calibration.rs @@ -919,60 +919,54 @@ impl ViewerCalibrationState { /// Render tolerance suggestion and Apply button. fn render_tolerance_suggestion(&mut self, ui: &mut egui::Ui, tolerance: &mut Tolerance) { - // Compute tolerance from NON-SUPPRESSED GRID CELL centroids. - // These ~90 cells survived NMS — they represent the grid's consensus evidence. - // Not all 2000 raw calibrants (many false positives with huge residuals). - // Not the ~20 path points (near-zero residuals by construction). - let stats = self + // Measure ridge width: expand from path cells into adjacent cells + // with weight above 10% of the path cell's weight. Weight-averaged + // half-width gives the global tolerance — heavy columns count more. + let ridge_stats = self .calibration_state .as_ref() .and_then(|cs| { - let curve = cs.curve()?; - let cells = cs.grid_cells(); - - // Absolute residuals from non-suppressed cell centroids - let mut abs_residuals: Vec = cells - .iter() - .filter(|n| !n.suppressed && n.center.weight > 0.0) - .filter_map(|n| { - let pred = match curve.predict(n.center.x) { - Ok(y) => y, - Err(calibrt::CalibRtError::OutOfBounds(y)) => y, - Err(_) => return None, - }; - Some((pred - n.center.y).abs()) - }) - .collect(); + cs.curve()?; // ensure curve is fitted + let measurements = cs.measure_ridge_width(0.1); + if measurements.is_empty() { + return None; + } - if abs_residuals.len() < 4 { + // Weighted average half-width (seconds) + let total_weight: f64 = measurements.iter().map(|m| m.total_weight).sum(); + if total_weight <= 0.0 { return None; } + let weighted_hw: f64 = measurements + .iter() + .map(|m| m.half_width * m.total_weight) + .sum::() + / total_weight; - abs_residuals.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal)); - let n = abs_residuals.len(); - let mad_seconds = abs_residuals[n / 2]; - let q75_seconds = abs_residuals[3 * n / 4]; + // Also report min/max for context + let min_hw = measurements.iter().map(|m| m.half_width).fold(f64::MAX, f64::min); + let max_hw = measurements.iter().map(|m| m.half_width).fold(0.0f64, f64::max); - Some((mad_seconds, q75_seconds, n)) + Some((weighted_hw, min_hw, max_hw, measurements.len())) }); - // Suggested RT tolerance: 3x MAD of retained cell residuals, floored at 0.5 min. - let suggested = stats.map(|(mad_s, q75_s, n_cells)| { - let rt_min = (mad_s / 60.0 * 3.0).max(0.5); - (rt_min, mad_s, q75_s, n_cells) + // Suggested RT tolerance from weighted ridge half-width, floored at 0.5 min. + let suggested = ridge_stats.map(|(hw_s, min_s, max_s, n_cols)| { + let rt_min = (hw_s / 60.0).max(0.5); + (rt_min, hw_s, min_s, max_s, n_cols) }); - if let Some((rt_min, _, _, _)) = suggested { + if let Some((rt_min, _, _, _, _)) = suggested { self.derived_tolerances = Some(DerivedTolerances { rt_tolerance_minutes: rt_min as f32, }); } ui.horizontal(|ui| { - if let Some((rt_min, mad_s, q75_s, n_cells)) = suggested { + if let Some((rt_min, hw_s, min_s, max_s, n_cols)) = suggested { ui.label(format!( - "Suggested RT: \u{00B1}{:.2} min MAD: {:.1} s Q75: {:.1} s ({} cells)", - rt_min, mad_s, q75_s, n_cells, + "Suggested RT: \u{00B1}{:.2} min Ridge: {:.0} s (min {:.0}, max {:.0}) ({} cols)", + rt_min, hw_s, min_s, max_s, n_cols, )); if ui.button("Apply").clicked() { let rt_tol = rt_min as f32; From 0710cb28207ea1e84794b0760687a237f63f12b9 Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Fri, 10 Apr 2026 12:58:51 -0700 Subject: [PATCH 40/64] feat: position-dependent RT tolerance from ridge width measurement - CalibrationResult stores ridge widths and interpolates at query RT - get_tolerance() now returns position-dependent RT tolerance (wider at edges, tighter in middle) based on the calibration grid ridge width - CLI's calibrate_from_phase1 switched to CalibrationState API for ridge measurement after curve fitting - get_tolerance receives library RT (ridge widths indexed by library RT) - Fallback to uniform tolerance when no ridge data available - RIDGE_WIDTH_MULTIPLIER (1.0) and MIN_RT_TOLERANCE_MINUTES (0.5) tunable --- rust/timsseek/src/rt_calibration.rs | 64 +++++++++++++++++++++++++-- rust/timsseek/src/scoring/pipeline.rs | 2 +- rust/timsseek_cli/src/processing.rs | 31 +++++++++++-- 3 files changed, 89 insertions(+), 8 deletions(-) diff --git a/rust/timsseek/src/rt_calibration.rs b/rust/timsseek/src/rt_calibration.rs index ed6d9f9..aa696fe 100644 --- a/rust/timsseek/src/rt_calibration.rs +++ b/rust/timsseek/src/rt_calibration.rs @@ -4,7 +4,9 @@ pub use calibrt::{ CalibRtError, CalibrationCurve as RTCalibration, CalibrationSnapshot, + CalibrationState as CalibratedGrid, Point, + RidgeMeasurement, calibrate_with_ranges, }; use serde::{Serialize, Deserialize}; @@ -16,13 +18,24 @@ use timsquery::models::tolerance::{ RtTolerance, }; +/// Multiplier applied to the ridge half-width to get the query tolerance. +/// 1.0 = use the FW@10%max directly (already generous). +/// Increase for more conservative searches. +const RIDGE_WIDTH_MULTIPLIER: f64 = 1.0; + +/// Minimum RT tolerance in minutes (prevents pathologically tight windows). +const MIN_RT_TOLERANCE_MINUTES: f32 = 0.5; + /// Immutable calibration result. Provides RT conversion and per-query tolerance /// without mutating the speclib. pub struct CalibrationResult { cal_curve: RTCalibration, + /// Fallback uniform RT tolerance (used when no ridge data available). rt_tolerance_minutes: f32, mz_tolerance_ppm: (f64, f64), mobility_tolerance_pct: (f32, f32), + /// Position-dependent ridge widths (sorted by x). Empty = use uniform fallback. + ridge_widths: Vec, } impl CalibrationResult { @@ -37,7 +50,43 @@ impl CalibrationResult { rt_tolerance_minutes, mz_tolerance_ppm, mobility_tolerance_pct, + ridge_widths: Vec::new(), + } + } + + pub fn with_ridge_widths(mut self, mut widths: Vec) -> Self { + widths.sort_by(|a, b| a.x.partial_cmp(&b.x).unwrap_or(std::cmp::Ordering::Equal)); + self.ridge_widths = widths; + self + } + + /// Interpolate ridge half-width at a given library RT (seconds). + /// Returns the half-width in seconds, or None if no ridge data. + fn ridge_half_width_at(&self, library_rt_seconds: f64) -> Option { + if self.ridge_widths.is_empty() { + return None; } + let widths = &self.ridge_widths; + + // Clamp to endpoints + if library_rt_seconds <= widths[0].x { + return Some(widths[0].half_width); + } + if library_rt_seconds >= widths[widths.len() - 1].x { + return Some(widths[widths.len() - 1].half_width); + } + + // Binary search for the bracketing pair + let pos = widths.partition_point(|m| m.x < library_rt_seconds); + if pos == 0 { + return Some(widths[0].half_width); + } + let left = &widths[pos - 1]; + let right = &widths[pos]; + + // Linear interpolation + let t = (library_rt_seconds - left.x) / (right.x - left.x).max(1e-9); + Some(left.half_width + t * (right.half_width - left.half_width)) } /// Convert indexed RT to calibrated absolute RT (seconds). @@ -54,11 +103,19 @@ impl CalibrationResult { self.rt_tolerance_minutes } - /// Get per-query tolerance. Initially uniform; future: position-dependent. - pub fn get_tolerance(&self, _mz: f64, _mobility: f32, _rt: f32) -> Tolerance { + /// Get per-query tolerance. Uses position-dependent ridge width when available, + /// falls back to uniform `rt_tolerance_minutes` otherwise. + /// `rt` is the library RT in seconds (pre-calibration). + pub fn get_tolerance(&self, _mz: f64, _mobility: f32, rt: f32) -> Tolerance { + let rt_tol_minutes = self + .ridge_half_width_at(rt as f64) + .map(|hw| (hw * RIDGE_WIDTH_MULTIPLIER / 60.0) as f32) + .unwrap_or(self.rt_tolerance_minutes) + .max(MIN_RT_TOLERANCE_MINUTES); + Tolerance { ms: MzTolerance::Ppm(self.mz_tolerance_ppm), - rt: RtTolerance::Minutes((self.rt_tolerance_minutes, self.rt_tolerance_minutes)), + rt: RtTolerance::Minutes((rt_tol_minutes, rt_tol_minutes)), mobility: MobilityTolerance::Pct(self.mobility_tolerance_pct), quad: QuadTolerance::Absolute((0.1, 0.1)), } @@ -170,6 +227,7 @@ impl CalibrationResult { rt_tolerance_minutes: 1.0, mz_tolerance_ppm: (10.0, 10.0), mobility_tolerance_pct: (5.0, 5.0), + ridge_widths: Vec::new(), } } } diff --git a/rust/timsseek/src/scoring/pipeline.rs b/rust/timsseek/src/scoring/pipeline.rs index 9b7748e..a037916 100644 --- a/rust/timsseek/src/scoring/pipeline.rs +++ b/rust/timsseek/src/scoring/pipeline.rs @@ -536,7 +536,7 @@ impl Scorer { let tolerance = calibration.get_tolerance( item.query.mono_precursor_mz(), item.query.mobility_ook0(), - calibrated_rt, + original_irt, // library RT — ridge widths are indexed by library RT ); let calibrated_query = item.query.clone().with_rt_seconds(calibrated_rt); diff --git a/rust/timsseek_cli/src/processing.rs b/rust/timsseek_cli/src/processing.rs index e5952d2..d138908 100644 --- a/rust/timsseek_cli/src/processing.rs +++ b/rust/timsseek_cli/src/processing.rs @@ -23,8 +23,8 @@ use timsseek::ml::rescore; use timsseek::rt_calibration::{ CalibRtError, CalibrationResult, + CalibratedGrid, Point, - calibrate_with_ranges, }; use timsseek::scoring::{ CalibrantCandidate, @@ -482,8 +482,31 @@ fn calibrate_from_phase1( |(mnx, mxx, mny, mxy), p| (mnx.min(p.x), mxx.max(p.x), mny.min(p.y), mxy.max(p.y)), ); - let cal_curve = - calibrate_with_ranges(&points, (min_x, max_x), (min_y, max_y), config.grid_size, config.dp_lookback)?; + // Use CalibrationState for fitting + ridge width measurement + let mut cal_state = CalibratedGrid::new( + config.grid_size, (min_x, max_x), (min_y, max_y), config.dp_lookback, + )?; + cal_state.update(points.iter().map(|p| (p.x, p.y, p.weight))); + cal_state.fit(); + let cal_curve = cal_state.curve() + .ok_or(CalibRtError::NoPoints)? + .clone(); + + // Measure ridge width for position-dependent RT tolerance + let ridge_widths = cal_state.measure_ridge_width(0.1); + if !ridge_widths.is_empty() { + let total_weight: f64 = ridge_widths.iter().map(|m| m.total_weight).sum(); + let weighted_hw: f64 = ridge_widths.iter() + .map(|m| m.half_width * m.total_weight) + .sum::() / total_weight.max(1.0); + info!( + "Ridge width: weighted avg {:.1}s across {} columns (min {:.1}s, max {:.1}s)", + weighted_hw, + ridge_widths.len(), + ridge_widths.iter().map(|m| m.half_width).fold(f64::MAX, f64::min), + ridge_widths.iter().map(|m| m.half_width).fold(0.0f64, f64::max), + ); + } // === Step B: Measure m/z and mobility errors at calibrant apexes === let query_tolerance = Tolerance { @@ -565,7 +588,7 @@ fn calibrate_from_phase1( rt_tolerance_minutes, mz_tolerance_ppm, mobility_tolerance_pct, - )) + ).with_ridge_widths(ridge_widths)) } #[cfg_attr( From 4b0cf62c044ec81ce027bbb935fd6d58026fd514 Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Fri, 10 Apr 2026 13:00:12 -0700 Subject: [PATCH 41/64] feat(viewer): display ridge tolerance envelope on calibration heatmap --- rust/timsquery_viewer/src/calibration.rs | 36 +++++++++++++++++++++++- 1 file changed, 35 insertions(+), 1 deletion(-) diff --git a/rust/timsquery_viewer/src/calibration.rs b/rust/timsquery_viewer/src/calibration.rs index e138e3f..84a3033 100644 --- a/rust/timsquery_viewer/src/calibration.rs +++ b/rust/timsquery_viewer/src/calibration.rs @@ -826,7 +826,7 @@ impl ViewerCalibrationState { ); } - // Fitted curve: cyan line sampled at 200 points + // Fitted curve + ridge envelope if let Some(curve) = cs.curve() { let curve_points = curve.points(); if curve_points.len() >= 2 { @@ -857,6 +857,40 @@ impl ViewerCalibrationState { .width(2.0), ); } + + // Ridge envelope: semi-transparent band showing tolerance width + let ridge = cs.measure_ridge_width(0.1); + if ridge.len() >= 2 { + // Build a closed polygon: upper edge left-to-right, then lower edge right-to-left + let mut envelope: Vec<[f64; 2]> = Vec::with_capacity(ridge.len() * 2); + + // Upper edge (left to right) + for m in &ridge { + let y = match curve.predict(m.x) { + Ok(y) => y, + Err(calibrt::CalibRtError::OutOfBounds(y)) => y, + Err(_) => continue, + }; + envelope.push([m.x, y + m.half_width]); + } + // Lower edge (right to left) + for m in ridge.iter().rev() { + let y = match curve.predict(m.x) { + Ok(y) => y, + Err(calibrt::CalibRtError::OutOfBounds(y)) => y, + Err(_) => continue, + }; + envelope.push([m.x, y - m.half_width]); + } + + if envelope.len() >= 3 { + plot_ui.polygon( + Polygon::new("ridge", PlotPoints::new(envelope)) + .fill_color(egui::Color32::from_rgba_unmultiplied(0, 220, 220, 30)) + .stroke(egui::Stroke::new(1.0, egui::Color32::from_rgba_unmultiplied(0, 220, 220, 80))) + ); + } + } } } From e4a5ce0af2d4a69d9cc51a4e3524f81eb555d6dc Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Fri, 10 Apr 2026 13:05:22 -0700 Subject: [PATCH 42/64] fix(viewer): ridge envelope as dashed boundary lines instead of filled polygon --- rust/timsquery_viewer/src/calibration.rs | 64 ++++++++++++++---------- 1 file changed, 37 insertions(+), 27 deletions(-) diff --git a/rust/timsquery_viewer/src/calibration.rs b/rust/timsquery_viewer/src/calibration.rs index 84a3033..0afaf5b 100644 --- a/rust/timsquery_viewer/src/calibration.rs +++ b/rust/timsquery_viewer/src/calibration.rs @@ -858,36 +858,46 @@ impl ViewerCalibrationState { ); } - // Ridge envelope: semi-transparent band showing tolerance width + // Ridge envelope: upper and lower boundary lines showing tolerance width let ridge = cs.measure_ridge_width(0.1); if ridge.len() >= 2 { - // Build a closed polygon: upper edge left-to-right, then lower edge right-to-left - let mut envelope: Vec<[f64; 2]> = Vec::with_capacity(ridge.len() * 2); - - // Upper edge (left to right) - for m in &ridge { - let y = match curve.predict(m.x) { - Ok(y) => y, - Err(calibrt::CalibRtError::OutOfBounds(y)) => y, - Err(_) => continue, - }; - envelope.push([m.x, y + m.half_width]); - } - // Lower edge (right to left) - for m in ridge.iter().rev() { - let y = match curve.predict(m.x) { - Ok(y) => y, - Err(calibrt::CalibRtError::OutOfBounds(y)) => y, - Err(_) => continue, - }; - envelope.push([m.x, y - m.half_width]); + let ridge_color = egui::Color32::from_rgba_unmultiplied(0, 220, 220, 100); + + let upper: Vec<[f64; 2]> = ridge.iter() + .filter_map(|m| { + let y = match curve.predict(m.x) { + Ok(y) => y, + Err(calibrt::CalibRtError::OutOfBounds(y)) => y, + Err(_) => return None, + }; + Some([m.x, y + m.half_width]) + }) + .collect(); + let lower: Vec<[f64; 2]> = ridge.iter() + .filter_map(|m| { + let y = match curve.predict(m.x) { + Ok(y) => y, + Err(calibrt::CalibRtError::OutOfBounds(y)) => y, + Err(_) => return None, + }; + Some([m.x, y - m.half_width]) + }) + .collect(); + + if upper.len() >= 2 { + plot_ui.line( + Line::new("ridge upper", PlotPoints::new(upper)) + .color(ridge_color) + .width(1.5) + .style(egui_plot::LineStyle::dashed_dense()), + ); } - - if envelope.len() >= 3 { - plot_ui.polygon( - Polygon::new("ridge", PlotPoints::new(envelope)) - .fill_color(egui::Color32::from_rgba_unmultiplied(0, 220, 220, 30)) - .stroke(egui::Stroke::new(1.0, egui::Color32::from_rgba_unmultiplied(0, 220, 220, 80))) + if lower.len() >= 2 { + plot_ui.line( + Line::new("ridge lower", PlotPoints::new(lower)) + .color(ridge_color) + .width(1.5) + .style(egui_plot::LineStyle::dashed_dense()), ); } } From 050559ce66828d5534f942b5623ffd272606fe5a Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Fri, 10 Apr 2026 13:37:29 -0700 Subject: [PATCH 43/64] feat(cli): print calibration summary after Phase 2 (ridge width, tolerances) --- rust/timsseek/src/rt_calibration.rs | 35 +++++++++++++++++++++++++++++ rust/timsseek_cli/src/processing.rs | 19 ++++++++++++++-- 2 files changed, 52 insertions(+), 2 deletions(-) diff --git a/rust/timsseek/src/rt_calibration.rs b/rust/timsseek/src/rt_calibration.rs index aa696fe..92fdf16 100644 --- a/rust/timsseek/src/rt_calibration.rs +++ b/rust/timsseek/src/rt_calibration.rs @@ -121,6 +121,33 @@ impl CalibrationResult { } } + pub fn mz_tolerance(&self) -> (f64, f64) { + self.mz_tolerance_ppm + } + + pub fn mobility_tolerance(&self) -> (f32, f32) { + self.mobility_tolerance_pct + } + + /// Summary of ridge width measurements for reporting. + pub fn ridge_width_summary(&self) -> Option { + if self.ridge_widths.is_empty() { + return None; + } + let total_weight: f64 = self.ridge_widths.iter().map(|m| m.total_weight).sum(); + let weighted_avg = self.ridge_widths.iter() + .map(|m| m.half_width * m.total_weight) + .sum::() / total_weight.max(1.0); + let min = self.ridge_widths.iter().map(|m| m.half_width).fold(f64::MAX, f64::min); + let max = self.ridge_widths.iter().map(|m| m.half_width).fold(0.0f64, f64::max); + Some(RidgeWidthSummary { + weighted_avg, + min, + max, + n_columns: self.ridge_widths.len(), + }) + } + /// Tolerance for the secondary spectral query at a detected apex. pub fn get_spectral_tolerance(&self) -> Tolerance { Tolerance { @@ -232,6 +259,14 @@ impl CalibrationResult { } } +/// Summary of ridge width measurements for reporting. +pub struct RidgeWidthSummary { + pub weighted_avg: f64, + pub min: f64, + pub max: f64, + pub n_columns: usize, +} + /// JSON v1 calibration file format — shared between CLI and viewer. #[derive(Debug, Serialize, Deserialize)] pub struct SavedCalibration { diff --git a/rust/timsseek_cli/src/processing.rs b/rust/timsseek_cli/src/processing.rs index d138908..16468a7 100644 --- a/rust/timsseek_cli/src/processing.rs +++ b/rust/timsseek_cli/src/processing.rs @@ -213,8 +213,23 @@ pub fn execute_pipeline( }; let phase2_ms = phase2_start.elapsed().as_millis() as u64; println!( - "Phase 2: Calibrate ....... {:.1}s", - phase2_ms as f64 / 1000.0 + "Phase 2: Calibrate ....... {:.1}s ({} calibrants → {} path nodes)", + phase2_ms as f64 / 1000.0, + calibrant_points.len(), + // Path nodes info from the calibration log (already printed by calibrt) + calibration.ridge_width_summary().map_or(0, |s| s.n_columns), + ); + // Print tolerance summary + if let Some(summary) = calibration.ridge_width_summary() { + println!( + " RT tolerance (ridge): avg {:.0}s, min {:.0}s, max {:.0}s ({} cols)", + summary.weighted_avg, summary.min, summary.max, summary.n_columns, + ); + } + println!( + " m/z: ({:.1}, {:.1}) ppm mobility: ({:.1}, {:.1}) %", + calibration.mz_tolerance().0, calibration.mz_tolerance().1, + calibration.mobility_tolerance().0, calibration.mobility_tolerance().1, ); // Save calibration as JSON v1 (compatible with viewer load) From 564acc94685accf37f3b07676994a65b578cbbdd Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Fri, 10 Apr 2026 15:52:13 -0700 Subject: [PATCH 44/64] refactor: extract Array2D into standalone crate for shared use Move Array2D and ArrayElement from timsquery into a new rust/array2d workspace crate with its own Array2DError type. timsquery re-exports from array2d and bridges errors via From for DataProcessingError. calibrt gains array2d as a direct dependency. --- Cargo.lock | 9 + Cargo.toml | 2 + rust/array2d/Cargo.toml | 9 + rust/array2d/src/lib.rs | 683 +++++++++++++++++ rust/calibrt/Cargo.toml | 1 + rust/timsquery/Cargo.toml | 1 + rust/timsquery/src/errors.rs | 11 + rust/timsquery/src/models/base/arrays.rs | 693 +----------------- .../src/serde/chromatogram_output.rs | 4 +- 9 files changed, 723 insertions(+), 690 deletions(-) create mode 100644 rust/array2d/Cargo.toml create mode 100644 rust/array2d/src/lib.rs diff --git a/Cargo.lock b/Cargo.lock index 032190a..a777320 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -278,6 +278,13 @@ dependencies = [ "x11rb", ] +[[package]] +name = "array2d" +version = "0.26.0" +dependencies = [ + "serde", +] + [[package]] name = "arrayref" version = "0.3.9" @@ -1360,6 +1367,7 @@ dependencies = [ name = "calibrt" version = "0.26.0" dependencies = [ + "array2d", "insta", "rand 0.8.5", "serde", @@ -6010,6 +6018,7 @@ dependencies = [ name = "timsquery" version = "0.26.0" dependencies = [ + "array2d", "arrow", "bincode 1.3.3", "bon", diff --git a/Cargo.toml b/Cargo.toml index 1377d99..1e0f71d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,7 @@ [workspace] resolver = "2" members = [ + "rust/array2d", "rust/calibrt", "rust/micromzpaf", "rust/timscentroid", @@ -12,6 +13,7 @@ members = [ "python/timsquery_pyo3" ] default-members = [ + "rust/array2d", "rust/calibrt", "rust/micromzpaf", "rust/timscentroid", diff --git a/rust/array2d/Cargo.toml b/rust/array2d/Cargo.toml new file mode 100644 index 0000000..cfa35f3 --- /dev/null +++ b/rust/array2d/Cargo.toml @@ -0,0 +1,9 @@ +[package] +name = "array2d" +version.workspace = true +edition.workspace = true +authors.workspace = true +license.workspace = true + +[dependencies] +serde = { workspace = true } diff --git a/rust/array2d/src/lib.rs b/rust/array2d/src/lib.rs new file mode 100644 index 0000000..f800823 --- /dev/null +++ b/rust/array2d/src/lib.rs @@ -0,0 +1,683 @@ +use std::fmt::Debug; + +use serde::Serialize; +use serde::ser::SerializeSeq; + +use std::ops::Range; + +#[derive(Debug, Clone)] +pub enum Array2DError { + EmptyData, + DimensionMismatch, + IndexOutOfBounds(usize), +} + +pub trait ArrayElement: + Clone + + Copy + + Default + + std::ops::Mul + + std::fmt::Display + + std::fmt::Debug + + std::ops::Add + + std::ops::AddAssign +{ +} + +/// Blanket trait implementation on elements that +/// can be used as a value in the array. +impl< + T: Clone + + Copy + + Default + + std::ops::Mul + + std::fmt::Display + + std::fmt::Debug + + std::ops::Add + + std::ops::AddAssign, +> ArrayElement for T +{ +} + +/// Implements a way to represent an array of +/// dimensions x-y that will be later used to +/// implement an mz-major and a rt-major array +/// representation. +/// +/// Simple 2D array +/// +/// `values` is a flattened array of values +/// `n_col` is the number of values in each row +/// `n_row` is the number of rows +/// +/// Note on memory layout: +/// +/// Values that belong to the same row are adjacent +/// in memory. +#[derive(Debug, Clone, PartialEq)] +pub struct Array2D { + pub values: Vec, + pub n_col: usize, + pub n_row: usize, +} + +impl Serialize for Array2D { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + let mut seq = serializer.serialize_seq(Some(self.n_row))?; + for row in self.iter_rows() { + seq.serialize_element(row)?; + } + seq.end() + } +} + +impl Array2D { + pub fn new, C: AsRef<[S]>>(values: C) -> Result, Array2DError> { + let nrows = values.as_ref().len(); + if nrows == 0 { + return Err(Array2DError::EmptyData); + } + let ncols = values.as_ref()[0].as_ref().len(); + if ncols == 0 { + return Err(Array2DError::EmptyData); + } + + let expected_size = nrows * ncols; + let values: Vec = values + .as_ref() + .iter() + .flat_map(|x| x.as_ref()) + .cloned() + .collect(); + + if values.len() != expected_size { + return Err(Array2DError::DimensionMismatch); + } + + Ok(Array2D { + values, + n_col: ncols, + n_row: nrows, + }) + } + + pub fn new_transposed, C: AsRef<[S]>>( + values: C, + ) -> Result, Array2DError> { + let ncols = values.as_ref().len(); + if ncols == 0 { + return Err(Array2DError::EmptyData); + } + let nrows = values.as_ref()[0].as_ref().len(); + if nrows == 0 { + return Err(Array2DError::EmptyData); + } + + let expected_size = nrows * ncols; + let mut out_values = vec![None; expected_size]; + + for (ci, col) in values.as_ref().iter().enumerate() { + if col.as_ref().len() != nrows { + return Err(Array2DError::DimensionMismatch); + } + for (ri, val) in col.as_ref().iter().enumerate() { + let idx = ri * ncols + ci; // Changed indexing for row-major order + out_values[idx] = Some(*val); + } + } + + let unwrapped_values: Vec = out_values.into_iter().flatten().collect(); + + Ok(Array2D { + values: unwrapped_values, + n_col: ncols, + n_row: nrows, + }) + } + + pub fn from_flat_vector( + values: Vec, + nrows: usize, + ncols: usize, + ) -> Result, Array2DError> { + if values.len() != nrows * ncols { + return Err(Array2DError::DimensionMismatch); + } + Ok(Array2D { + values, + n_col: ncols, + n_row: nrows, + }) + } + + /// Apply a function to each row of the array + /// + /// Example: + /// ``` + /// use array2d::Array2D; + /// let array = Array2D::new(vec![vec![1, 2, 3], vec![4, 5, 6]]).unwrap(); + /// let result: Vec = array.row_apply(|x| x.iter().sum()).collect(); + /// assert_eq!(result, vec![6, 15]); + /// + /// let array = Array2D::new_transposed(vec![vec![1, 2, 3], vec![4, 5, 6]]).unwrap(); + /// let result: Vec = array.row_apply(|x| x.iter().sum()).collect(); + /// assert_eq!(result, vec![5, 7, 9]); + /// ``` + pub fn row_apply<'a: 'b, 'b, W, F: FnMut(&[T]) -> W + 'b>( + &'a self, + f: F, + ) -> impl Iterator + 'b { + self.iter_rows().map(f) + } + + pub fn iter_mut_rows(&mut self) -> impl Iterator { + self.values.chunks_mut(self.n_col) + } + + pub fn iter_rows(&self) -> impl Iterator { + self.values.chunks(self.n_col) + } + + /// Apply a function to each pair of rows of the array + /// + /// For example if I want to calculate the MAE between each row of the array + /// I can do: + /// ``` + /// use array2d::Array2D; + /// let array: Array2D = Array2D::new(vec![vec![1., 2., 3.], vec![4., 5., 6.]]).unwrap(); + /// let result: Vec = array.outer_row_apply(|x, y| (x.iter().zip(y.iter()).map(|(a, b)| (a - b).abs() as f64).sum::()) / x.len() as f64); + /// assert_eq!(result, vec![3.0]); + /// + /// let array: Array2D = Array2D::new_transposed(vec![vec![1.,2., 3.], vec![4., 5., 6.]]).unwrap(); + /// let result: Vec = array.outer_row_apply(|x, y| (x.iter().zip(y.iter()).map(|(a, b)| (a - b).abs() as f64).sum::()) / x.len() as f64); + /// assert_eq!(result, vec![1.0, 2.0, 1.0]); + /// ``` + pub fn outer_row_apply W>(&self, mut f: F) -> Vec { + let mut result = Vec::new(); + for i in 0..self.n_row { + let row = self.get_row(i).expect("Failed to get row, malformed array"); + for j in 0..self.n_row { + if j >= i { + continue; + } + let other_row = self.get_row(j).expect("Failed to get row, malformed array"); + result.push(f(row, other_row)); + } + } + result + } + + pub fn insert(&mut self, row_idx: usize, col_idx: usize, value: T) { + let idx = row_idx * self.n_col + col_idx; + match self.values.get_mut(idx) { + Some(v) => *v = value, + None => panic!( + "Index out of bounds ({}/{}): row {}/{}, col {}/{}", + idx, + self.values.len(), + row_idx, + self.n_row, + col_idx, + self.n_col, + ), + }; + } + + fn get_row_limits(&self, index: usize) -> Option> { + let start = index * self.n_col; + let end = start + self.n_col; + if end > self.values.len() || start >= self.values.len() { + return None; + } + Some(start..end) + } + + pub fn get_row(&self, index: usize) -> Option<&[T]> { + Some(&self.values[self.get_row_limits(index)?]) + } + + pub fn get_row_mut(&mut self, index: usize) -> Result<&mut [T], Array2DError> { + let range = self + .get_row_limits(index) + .ok_or(Array2DError::IndexOutOfBounds(index))?; + Ok(&mut self.values[range]) + } + + pub fn iter_column(&self, index: usize) -> impl '_ + Iterator { + self.iter_rows().map(move |row| row[index]) + } + + pub fn try_swap_rows(&mut self, row1: usize, row2: usize) -> Result<(), Array2DError> { + let range_1 = self + .get_row_limits(row1) + .ok_or(Array2DError::IndexOutOfBounds(row1))?; + let range_2 = self + .get_row_limits(row2) + .ok_or(Array2DError::IndexOutOfBounds(row2))?; + for (i, j) in range_1.zip(range_2) { + self.values.swap(i, j); + } + + Ok(()) + } + + pub fn try_replace_row_with( + &mut self, + row_idx: usize, + row: &[T], + ) -> Result<(), Array2DError> { + let range = self + .get_row_limits(row_idx) + .ok_or(Array2DError::IndexOutOfBounds(row_idx))?; + self.values[range].copy_from_slice(row); + Ok(()) + } + + pub fn as_flat_slice(&self) -> &[T] { + &self.values + } + + pub fn nrows(&self) -> usize { + self.n_row + } + + pub fn ncols(&self) -> usize { + self.n_col + } + + pub fn transpose_clone(&self) -> Array2D { + // Swap major_dim and minor_dim + let col_dim = self.n_col; + let row_dim = self.n_row; + + let mut result = vec![self.values[0]; row_dim * col_dim]; + + for (i, crow) in self.iter_rows().enumerate() { + for (j, v) in crow.iter().enumerate() { + let idx = (j * row_dim) + i; + result[idx] = *v; + } + } + + Array2D { + values: result, + n_col: row_dim, + n_row: col_dim, + } + } + + pub fn reset_with_value(&mut self, ncols: usize, nrows: usize, value: T) { + self.values.clear(); + self.values.resize(ncols * nrows, value); + self.n_col = ncols; + self.n_row = nrows; + } + + /// Remove a row by rotating it to the end and truncating. + /// + /// This is an O(n) operation that maintains the order of remaining rows. + /// Uses slice rotation for better performance than multiple swaps. + /// Panics if the index is out of bounds. + pub fn drop_row(&mut self, idx: usize) { + assert!( + idx < self.n_row, + "Index {} out of bounds (nrows: {})", + idx, + self.n_row + ); + + // Calculate byte ranges for the rotation + let start_byte = idx * self.n_col; + let end_byte = self.n_row * self.n_col; + + // Rotate the slice [idx..nrows) right by (nrows - idx - 1) rows + // This moves row[idx] to the end while maintaining order of other rows + // Example: [a, b, c, d, e] with idx=1 -> [a, c, d, e, b] + let num_rows_to_rotate = self.n_row - idx; + if num_rows_to_rotate > 1 { + let rotation_amount = (num_rows_to_rotate - 1) * self.n_col; + self.values[start_byte..end_byte].rotate_right(rotation_amount); + } + + // Truncate the last row + let new_len = (self.n_row - 1) * self.n_col; + self.values.truncate(new_len); + self.n_row -= 1; + } +} + +impl> Array2D { + /// RowConvolve + /// Apply a a convolution on each row of the array separately. + /// It is equivalent to applying the passed kernel on each row of the array. + /// and padding with zeros. + pub fn row_convolve(&self, kernel: &[T], default_value: T) -> Array2D { + let mut result = vec![default_value; self.n_row * self.n_col]; + let offset_size = (kernel.len() - 1) / 2; + + for i in 0..self.n_row { + let row_offset = i * (self.n_col); + let row = self.get_row(i).expect("Failed to get row, malformed array"); + row.windows(kernel.len()) + .enumerate() + .for_each(|(ii, window)| { + window + .iter() + .zip(kernel.iter()) + .map(|(&a, &b)| a * b) + .for_each(|prod| { + result[ii + offset_size + row_offset] += prod; + }); + }) + } + + Array2D::from_flat_vector(result, self.nrows(), self.ncols()).unwrap() + } + + pub fn convolve_fold( + &self, + kernel: &[T], + default_value: T, + fold_func: impl Fn(T, T) -> T, + ) -> Vec { + let mut result = vec![default_value; self.n_col]; + let offset_size = (kernel.len() - 1) / 2; + + for i in 0..self.n_row { + let row = self.get_row(i).expect("Failed to get row, malformed array"); + row.windows(kernel.len()) + .enumerate() + .for_each(|(ii, window)| { + window + .iter() + .zip(kernel.iter()) + .map(|(&a, &b)| a * b) + .for_each(|prod| { + result[ii + offset_size] = fold_func(result[ii + offset_size], prod); + }); + }) + } + + result + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_array2d_new() { + // Test creating a 2x3 array + let values = vec![vec![1, 2, 3], vec![4, 5, 6]]; + let array = Array2D::new(&values).unwrap(); + + // Check dimensions + assert_eq!(array.n_col, 3); // columns + assert_eq!(array.n_row, 2); // rows + + // Check memory layout - values in same row should be adjacent + assert_eq!(array.values, vec![1, 2, 3, 4, 5, 6]); + } + + #[test] + fn test_array2d_new_transposed() { + // Test creating a 3x2 array from columns + let columns = vec![ + vec![1, 4], // first column + vec![2, 5], // second column + vec![3, 6], // third column + ]; + let array = Array2D::new_transposed(&columns).unwrap(); + + // Check dimensions + assert_eq!(array.n_col, 3); // columns + assert_eq!(array.n_row, 2); // rows + + // Check memory layout - values should be arranged row-major + assert_eq!(array.values, vec![1, 2, 3, 4, 5, 6]); + } + + #[test] + fn test_array2d_error_handling() { + // Test with inconsistent row lengths + let invalid_values = vec![ + vec![1, 2, 3], + vec![4, 5], // Missing one value + ]; + assert!(Array2D::new(&invalid_values).is_err()); + + // Test with empty array + let empty_values: Vec> = vec![]; + assert!(Array2D::new(&empty_values).is_err()); + + // Test transposed with inconsistent column lengths + let invalid_columns = vec![ + vec![1, 4], + vec![2], // Missing one value + vec![3, 6], + ]; + assert!(Array2D::new_transposed(&invalid_columns).is_err()); + } + + #[test] + fn test_array2d_large() { + // Test with a larger array to verify memory efficiency + let size = 1000; + let values: Vec> = (0..size) + .map(|i| (0..size).map(|j| (i * size + j) as f64).collect()) + .collect(); + + let array = Array2D::new(&values).unwrap(); + + // Verify dimensions + assert_eq!(array.n_col, size); + assert_eq!(array.n_row, size); + + // Verify first and last values + assert_eq!(array.values[0], 0.0); + assert_eq!(array.values[size * size - 1], (size * size - 1) as f64); + } + + #[test] + fn test_array2d_transpose() { + let values = vec![vec![1, 2, 3], vec![4, 5, 6]]; + let array = Array2D::new(&values).unwrap(); + + // Check dimensions + assert_eq!(array.n_col, 3); // columns + assert_eq!(array.n_row, 2); // rows + + // Check memory layout - values in same row should be adjacent + assert_eq!(array.values, vec![1, 2, 3, 4, 5, 6]); + + let transposed = array.transpose_clone(); + + // Check dimensions + assert_eq!(transposed.n_col, 2); // columns + assert_eq!(transposed.n_row, 3); // rows + + // Check memory layout - values should be arranged row-major + assert_eq!(transposed.values, vec![1, 4, 2, 5, 3, 6]); + } + + #[test] + fn test_convolve() { + let values = vec![vec![1, 2, 3, 4, 5, 6], vec![4, 5, 6, 7, 8, 9]]; + let array = Array2D::new(&values).unwrap(); + let array2 = Array2D { + values: vec![1, 2, 3, 4, 5, 6, 4, 5, 6, 7, 8, 9], + n_col: 6, + n_row: 2, + }; + assert_eq!(array, array2); + + let convolved = array.row_convolve(&[1, 1, 1], 0); + + // 6 = 1 + 2 + 3 + // 9 = 2 + 3 + 4 + let expect = Array2D { + values: vec![0, 6, 9, 12, 15, 0, 0, 15, 18, 21, 24, 0], + n_col: 6, + n_row: 2, + }; + assert_eq!(convolved, expect); + } + + #[test] + fn test_convolve_reduce() { + let values = vec![vec![1, 2, 3, 4, 5, 6], vec![4, 5, 6, 7, 8, 9]]; + let array = Array2D::new(&values).unwrap(); + let array2 = Array2D { + values: vec![1, 2, 3, 4, 5, 6, 4, 5, 6, 7, 8, 9], + n_col: 6, + n_row: 2, + }; + assert_eq!(array, array2); + + let convolved = array.convolve_fold(&[1, 1, 1], 0, |a, b| a + b); + + // 21 = 1 + 2 + 3 + 4 + 5 + 6 + let expect = vec![0, 21, 27, 33, 39, 0]; + assert_eq!(convolved, expect); + } + + #[test] + fn test_reset_with_default() { + let mut array = Array2D::new(vec![vec![1, 2, 3], vec![4, 5, 6]]).unwrap(); + assert_eq!(array.ncols(), 3); + assert_eq!(array.nrows(), 2); + + array.reset_with_value(2, 3, 0i32); + assert_eq!(array.values, vec![0, 0, 0, 0, 0, 0]); + assert_eq!(array.ncols(), 2); + assert_eq!(array.nrows(), 3); + assert_eq!(array.get_row(0), Some(vec![0i32, 0i32].as_ref())); + assert_eq!(array.get_row(1), Some(vec![0i32, 0i32].as_ref())); + assert_eq!(array.get_row(2), Some(vec![0i32, 0i32].as_ref())); + } + + #[test] + fn test_insertion() { + let mut array = Array2D::new(vec![vec![1, 2, 3], vec![4, 5, 6]]).unwrap(); + array.insert(0, 0, 7); + assert_eq!(array.values, vec![7, 2, 3, 4, 5, 6]); + array.insert(1, 2, 8); + assert_eq!(array.values, vec![7, 2, 3, 4, 5, 8,]); + } + + #[test] + fn test_swap_rows() { + let mut array = Array2D::new(vec![vec![1, 2, 3], vec![4, 5, 6]]).unwrap(); + assert_eq!(array.values, vec![1, 2, 3, 4, 5, 6]); + array.try_swap_rows(0, 1).unwrap(); + assert_eq!(array.values, vec![4, 5, 6, 1, 2, 3]); + if array.try_swap_rows(1, 2).is_ok() { + panic!("Should not have succeeded") + }; + } + + // ===== TDD Tests for drop_row() ===== + + #[test] + fn test_drop_row_removes_correct_row() { + // Create a 3x3 array with distinct values in each row + let mut array = Array2D::new(vec![vec![1, 2, 3], vec![4, 5, 6], vec![7, 8, 9]]).unwrap(); + + // Remove the middle row + array.drop_row(1); + + // Check dimensions updated + assert_eq!(array.nrows(), 2); + assert_eq!(array.ncols(), 3); + + // Check remaining rows are correct + assert_eq!(array.get_row(0), Some([1, 2, 3].as_ref())); + assert_eq!(array.get_row(1), Some([7, 8, 9].as_ref())); + + // Check flat values + assert_eq!(array.values, vec![1, 2, 3, 7, 8, 9]); + } + + #[test] + fn test_drop_row_maintains_order() { + // Create a 5x2 array + let mut array = Array2D::new(vec![ + vec![10, 20], + vec![30, 40], + vec![50, 60], + vec![70, 80], + vec![90, 100], + ]) + .unwrap(); + + // Remove row at index 2 (50, 60) + array.drop_row(2); + + // Verify rows maintain their order + assert_eq!(array.nrows(), 4); + assert_eq!(array.get_row(0), Some([10, 20].as_ref())); + assert_eq!(array.get_row(1), Some([30, 40].as_ref())); + assert_eq!(array.get_row(2), Some([70, 80].as_ref())); // Was row 3 + assert_eq!(array.get_row(3), Some([90, 100].as_ref())); // Was row 4 + } + + #[test] + fn test_drop_row_first() { + // Create a 3x2 array + let mut array = Array2D::new(vec![vec![1, 2], vec![3, 4], vec![5, 6]]).unwrap(); + + // Remove first row + array.drop_row(0); + + // Check dimensions + assert_eq!(array.nrows(), 2); + assert_eq!(array.ncols(), 2); + + // Check remaining rows + assert_eq!(array.get_row(0), Some([3, 4].as_ref())); + assert_eq!(array.get_row(1), Some([5, 6].as_ref())); + assert_eq!(array.values, vec![3, 4, 5, 6]); + } + + #[test] + fn test_drop_row_last() { + // Create a 3x2 array + let mut array = Array2D::new(vec![vec![1, 2], vec![3, 4], vec![5, 6]]).unwrap(); + + // Remove last row + array.drop_row(2); + + // Check dimensions + assert_eq!(array.nrows(), 2); + assert_eq!(array.ncols(), 2); + + // Check remaining rows + assert_eq!(array.get_row(0), Some([1, 2].as_ref())); + assert_eq!(array.get_row(1), Some([3, 4].as_ref())); + assert_eq!(array.values, vec![1, 2, 3, 4]); + } + + #[test] + fn test_drop_row_only() { + // Create a 1x3 array (single row) + let mut array = Array2D::new(vec![vec![10, 20, 30]]).unwrap(); + + assert_eq!(array.nrows(), 1); + assert_eq!(array.ncols(), 3); + + // Remove the only row + array.drop_row(0); + + // Check dimensions - should be 0 rows + assert_eq!(array.nrows(), 0); + assert_eq!(array.ncols(), 3); // cols unchanged + assert_eq!(array.values.len(), 0); + + // get_row should return None for index 0 + assert_eq!(array.get_row(0), None); + } +} diff --git a/rust/calibrt/Cargo.toml b/rust/calibrt/Cargo.toml index e20bdc4..ee3ce5a 100644 --- a/rust/calibrt/Cargo.toml +++ b/rust/calibrt/Cargo.toml @@ -5,6 +5,7 @@ edition.workspace = true license.workspace = true [dependencies] +array2d = { path = "../array2d" } rand = "0.8.5" # Used for generating sample data in the example serde = { workspace = true } tracing = { workspace = true } diff --git a/rust/timsquery/Cargo.toml b/rust/timsquery/Cargo.toml index a868277..9908590 100644 --- a/rust/timsquery/Cargo.toml +++ b/rust/timsquery/Cargo.toml @@ -11,6 +11,7 @@ zstd = "0.13" # Workspace member deps +array2d = { path = "../array2d" } timscentroid = { path = "../timscentroid" } micromzpaf = { path = "../micromzpaf" } diff --git a/rust/timsquery/src/errors.rs b/rust/timsquery/src/errors.rs index 2df6064..04f8f7b 100644 --- a/rust/timsquery/src/errors.rs +++ b/rust/timsquery/src/errors.rs @@ -1,4 +1,5 @@ use std::fmt::Display; +use array2d::Array2DError; use timscentroid::serialization::SerializationError; use timsrust::{ TimsRustError, @@ -80,6 +81,16 @@ impl From for TimsqueryError { } } +impl From for DataProcessingError { + fn from(e: Array2DError) -> Self { + match e { + Array2DError::EmptyData => DataProcessingError::ExpectedNonEmptyData, + Array2DError::DimensionMismatch => DataProcessingError::ExpectedVectorSameLength, + Array2DError::IndexOutOfBounds(n) => DataProcessingError::IndexOutOfBoundsError(n), + } + } +} + impl> From for TimsqueryError { fn from(e: T) -> Self { TimsqueryError::DataReadingError(e.into()) diff --git a/rust/timsquery/src/models/base/arrays.rs b/rust/timsquery/src/models/base/arrays.rs index 80a136d..c82b17d 100644 --- a/rust/timsquery/src/models/base/arrays.rs +++ b/rust/timsquery/src/models/base/arrays.rs @@ -1,688 +1,5 @@ -use std::fmt::Debug; - -use serde::Serialize; -use serde::ser::SerializeSeq; - -use crate::errors::DataProcessingError; - -use std::ops::Range; - -pub trait ArrayElement: - Clone - + Copy - + Default - + std::ops::Mul - + std::fmt::Display - + std::fmt::Debug - + std::ops::Add - + std::ops::AddAssign -{ -} - -/// Blanket trait implementation on elements that -/// can be used as a value in the array. -impl< - T: Clone - + Copy - + Default - + std::ops::Mul - + std::fmt::Display - + std::fmt::Debug - + std::ops::Add - + std::ops::AddAssign, -> ArrayElement for T -{ -} - -/// Implements a way to represent an array of -/// dimensions x-y that will be later used to -/// implement an mz-major and a rt-major array -/// representation. -/// -/// Simple 2D array -/// -/// `values` is a flattened array of values -/// `n_col` is the number of values in each row -/// `n_row` is the number of rows -/// -/// Note on memory layout: -/// -/// Values that belong to the same row are adjacent -/// in memory. -#[derive(Debug, Clone, PartialEq)] -pub struct Array2D { - pub(super) values: Vec, - pub(super) n_col: usize, - pub(super) n_row: usize, -} - -impl Serialize for Array2D { - fn serialize(&self, serializer: S) -> Result - where - S: serde::Serializer, - { - let mut seq = serializer.serialize_seq(Some(self.n_row))?; - for row in self.iter_rows() { - seq.serialize_element(row)?; - } - seq.end() - } -} - -impl Array2D { - pub fn new, C: AsRef<[S]>>(values: C) -> Result, DataProcessingError> { - let nrows = values.as_ref().len(); - if nrows == 0 { - return Err(DataProcessingError::ExpectedNonEmptyData); - } - let ncols = values.as_ref()[0].as_ref().len(); - if ncols == 0 { - return Err(DataProcessingError::ExpectedNonEmptyData); - } - - let expected_size = nrows * ncols; - let values: Vec = values - .as_ref() - .iter() - .flat_map(|x| x.as_ref()) - .cloned() - .collect(); - - if values.len() != expected_size { - return Err(DataProcessingError::ExpectedVectorSameLength); - } - - Ok(Array2D { - values, - n_col: ncols, - n_row: nrows, - }) - } - - pub fn new_transposed, C: AsRef<[S]>>( - values: C, - ) -> Result, DataProcessingError> { - let ncols = values.as_ref().len(); - if ncols == 0 { - return Err(DataProcessingError::ExpectedNonEmptyData); - } - let nrows = values.as_ref()[0].as_ref().len(); - if nrows == 0 { - return Err(DataProcessingError::ExpectedNonEmptyData); - } - - let expected_size = nrows * ncols; - let mut out_values = vec![None; expected_size]; - - for (ci, col) in values.as_ref().iter().enumerate() { - if col.as_ref().len() != nrows { - return Err(DataProcessingError::ExpectedVectorSameLength); - } - for (ri, val) in col.as_ref().iter().enumerate() { - let idx = ri * ncols + ci; // Changed indexing for row-major order - out_values[idx] = Some(*val); - } - } - - let unwrapped_values: Vec = out_values.into_iter().flatten().collect(); - - Ok(Array2D { - values: unwrapped_values, - n_col: ncols, - n_row: nrows, - }) - } - - pub fn from_flat_vector( - values: Vec, - nrows: usize, - ncols: usize, - ) -> Result, DataProcessingError> { - if values.len() != nrows * ncols { - return Err(DataProcessingError::ExpectedVectorSameLength); - } - Ok(Array2D { - values, - n_col: ncols, - n_row: nrows, - }) - } - - /// Apply a function to each row of the array - /// - /// Example: - /// ``` - /// use timsquery::Array2D; - /// let array = Array2D::new(vec![vec![1, 2, 3], vec![4, 5, 6]]).unwrap(); - /// let result: Vec = array.row_apply(|x| x.iter().sum()).collect(); - /// assert_eq!(result, vec![6, 15]); - /// - /// let array = Array2D::new_transposed(vec![vec![1, 2, 3], vec![4, 5, 6]]).unwrap(); - /// let result: Vec = array.row_apply(|x| x.iter().sum()).collect(); - /// assert_eq!(result, vec![5, 7, 9]); - /// ``` - pub fn row_apply<'a: 'b, 'b, W, F: FnMut(&[T]) -> W + 'b>( - &'a self, - f: F, - ) -> impl Iterator + 'b { - self.iter_rows().map(f) - } - - pub fn iter_mut_rows(&mut self) -> impl Iterator { - self.values.chunks_mut(self.n_col) - } - - pub fn iter_rows(&self) -> impl Iterator { - self.values.chunks(self.n_col) - } - - /// Apply a function to each pair of rows of the array - /// - /// For example if I want to calculate the MAE between each row of the array - /// I can do: - /// ``` - /// use timsquery::Array2D; - /// let array: Array2D = Array2D::new(vec![vec![1., 2., 3.], vec![4., 5., 6.]]).unwrap(); - /// let result: Vec = array.outer_row_apply(|x, y| (x.iter().zip(y.iter()).map(|(a, b)| (a - b).abs() as f64).sum::()) / x.len() as f64); - /// assert_eq!(result, vec![3.0]); - /// - /// let array: Array2D = Array2D::new_transposed(vec![vec![1.,2., 3.], vec![4., 5., 6.]]).unwrap(); - /// let result: Vec = array.outer_row_apply(|x, y| (x.iter().zip(y.iter()).map(|(a, b)| (a - b).abs() as f64).sum::()) / x.len() as f64); - /// assert_eq!(result, vec![1.0, 2.0, 1.0]); - /// ``` - pub fn outer_row_apply W>(&self, mut f: F) -> Vec { - let mut result = Vec::new(); - for i in 0..self.n_row { - let row = self.get_row(i).expect("Failed to get row, malformed array"); - for j in 0..self.n_row { - if j >= i { - continue; - } - let other_row = self.get_row(j).expect("Failed to get row, malformed array"); - result.push(f(row, other_row)); - } - } - result - } - - pub fn insert(&mut self, row_idx: usize, col_idx: usize, value: T) { - let idx = row_idx * self.n_col + col_idx; - match self.values.get_mut(idx) { - Some(v) => *v = value, - None => panic!( - "Index out of bounds ({}/{}): row {}/{}, col {}/{}", - idx, - self.values.len(), - row_idx, - self.n_row, - col_idx, - self.n_col, - ), - }; - } - - fn get_row_limits(&self, index: usize) -> Option> { - let start = index * self.n_col; - let end = start + self.n_col; - if end > self.values.len() || start >= self.values.len() { - return None; - } - Some(start..end) - } - - pub fn get_row(&self, index: usize) -> Option<&[T]> { - Some(&self.values[self.get_row_limits(index)?]) - } - - pub fn get_row_mut(&mut self, index: usize) -> Result<&mut [T], DataProcessingError> { - let range = self - .get_row_limits(index) - .ok_or(DataProcessingError::IndexOutOfBoundsError(index))?; - Ok(&mut self.values[range]) - } - - pub fn iter_column(&self, index: usize) -> impl '_ + Iterator { - self.iter_rows().map(move |row| row[index]) - } - - pub fn try_swap_rows(&mut self, row1: usize, row2: usize) -> Result<(), DataProcessingError> { - let range_1 = self - .get_row_limits(row1) - .ok_or(DataProcessingError::IndexOutOfBoundsError(row1))?; - let range_2 = self - .get_row_limits(row2) - .ok_or(DataProcessingError::IndexOutOfBoundsError(row2))?; - for (i, j) in range_1.zip(range_2) { - self.values.swap(i, j); - } - - // Maybe using swap_with_slice would be faster? - // My inclination is to believe that the compiler would optimize - // to the same code, but we can benchmark it later. - // let mut slice = [1, 2, 3, 4, 5]; - // { - // let (left, right) = slice.split_at_mut(2); - // left.swap_with_slice(&mut right[1..]); - // } - // assert_eq!(slice, [4, 5, 3, 1, 2]); - - Ok(()) - } - - pub fn try_replace_row_with( - &mut self, - row_idx: usize, - row: &[T], - ) -> Result<(), DataProcessingError> { - let range = self - .get_row_limits(row_idx) - .ok_or(DataProcessingError::IndexOutOfBoundsError(row_idx))?; - self.values[range].copy_from_slice(row); - Ok(()) - } - - pub fn as_flat_slice(&self) -> &[T] { - &self.values - } - - pub fn nrows(&self) -> usize { - self.n_row - } - - pub fn ncols(&self) -> usize { - self.n_col - } - - pub fn transpose_clone(&self) -> Array2D { - // Swap major_dim and minor_dim - let col_dim = self.n_col; - let row_dim = self.n_row; - - let mut result = vec![self.values[0]; row_dim * col_dim]; - - for (i, crow) in self.iter_rows().enumerate() { - for (j, v) in crow.iter().enumerate() { - let idx = (j * row_dim) + i; - result[idx] = *v; - } - } - - Array2D { - values: result, - n_col: row_dim, - n_row: col_dim, - } - } - - pub fn reset_with_value(&mut self, ncols: usize, nrows: usize, value: T) { - self.values.clear(); - self.values.resize(ncols * nrows, value); - self.n_col = ncols; - self.n_row = nrows; - } - - /// Remove a row by rotating it to the end and truncating. - /// - /// This is an O(n) operation that maintains the order of remaining rows. - /// Uses slice rotation for better performance than multiple swaps. - /// Panics if the index is out of bounds. - pub fn drop_row(&mut self, idx: usize) { - assert!( - idx < self.n_row, - "Index {} out of bounds (nrows: {})", - idx, - self.n_row - ); - - // Calculate byte ranges for the rotation - let start_byte = idx * self.n_col; - let end_byte = self.n_row * self.n_col; - - // Rotate the slice [idx..nrows) right by (nrows - idx - 1) rows - // This moves row[idx] to the end while maintaining order of other rows - // Example: [a, b, c, d, e] with idx=1 -> [a, c, d, e, b] - let num_rows_to_rotate = self.n_row - idx; - if num_rows_to_rotate > 1 { - let rotation_amount = (num_rows_to_rotate - 1) * self.n_col; - self.values[start_byte..end_byte].rotate_right(rotation_amount); - } - - // Truncate the last row - let new_len = (self.n_row - 1) * self.n_col; - self.values.truncate(new_len); - self.n_row -= 1; - } -} - -impl> Array2D { - /// RowConvolve - /// Apply a a convolution on each row of the array separately. - /// It is equivalent to applying the passed kernel on each row of the array. - /// and padding with zeros. - pub fn row_convolve(&self, kernel: &[T], default_value: T) -> Array2D { - let mut result = vec![default_value; self.n_row * self.n_col]; - let offset_size = (kernel.len() - 1) / 2; - - for i in 0..self.n_row { - let row_offset = i * (self.n_col); - let row = self.get_row(i).expect("Failed to get row, malformed array"); - row.windows(kernel.len()) - .enumerate() - .for_each(|(ii, window)| { - window - .iter() - .zip(kernel.iter()) - .map(|(&a, &b)| a * b) - .for_each(|prod| { - result[ii + offset_size + row_offset] += prod; - }); - }) - } - - Array2D::from_flat_vector(result, self.nrows(), self.ncols()).unwrap() - } - - pub fn convolve_fold( - &self, - kernel: &[T], - default_value: T, - fold_func: impl Fn(T, T) -> T, - ) -> Vec { - let mut result = vec![default_value; self.n_col]; - let offset_size = (kernel.len() - 1) / 2; - - for i in 0..self.n_row { - let row = self.get_row(i).expect("Failed to get row, malformed array"); - row.windows(kernel.len()) - .enumerate() - .for_each(|(ii, window)| { - window - .iter() - .zip(kernel.iter()) - .map(|(&a, &b)| a * b) - .for_each(|prod| { - result[ii + offset_size] = fold_func(result[ii + offset_size], prod); - }); - }) - } - - result - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_array2d_new() { - // Test creating a 2x3 array - let values = vec![vec![1, 2, 3], vec![4, 5, 6]]; - let array = Array2D::new(&values).unwrap(); - - // Check dimensions - assert_eq!(array.n_col, 3); // columns - assert_eq!(array.n_row, 2); // rows - - // Check memory layout - values in same row should be adjacent - assert_eq!(array.values, vec![1, 2, 3, 4, 5, 6]); - } - - #[test] - fn test_array2d_new_transposed() { - // Test creating a 3x2 array from columns - let columns = vec![ - vec![1, 4], // first column - vec![2, 5], // second column - vec![3, 6], // third column - ]; - let array = Array2D::new_transposed(&columns).unwrap(); - - // Check dimensions - assert_eq!(array.n_col, 3); // columns - assert_eq!(array.n_row, 2); // rows - - // Check memory layout - values should be arranged row-major - assert_eq!(array.values, vec![1, 2, 3, 4, 5, 6]); - } - - #[test] - fn test_array2d_error_handling() { - // Test with inconsistent row lengths - let invalid_values = vec![ - vec![1, 2, 3], - vec![4, 5], // Missing one value - ]; - assert!(Array2D::new(&invalid_values).is_err()); - - // Test with empty array - let empty_values: Vec> = vec![]; - assert!(Array2D::new(&empty_values).is_err()); - - // Test transposed with inconsistent column lengths - let invalid_columns = vec![ - vec![1, 4], - vec![2], // Missing one value - vec![3, 6], - ]; - assert!(Array2D::new_transposed(&invalid_columns).is_err()); - } - - #[test] - fn test_array2d_large() { - // Test with a larger array to verify memory efficiency - let size = 1000; - let values: Vec> = (0..size) - .map(|i| (0..size).map(|j| (i * size + j) as f64).collect()) - .collect(); - - let array = Array2D::new(&values).unwrap(); - - // Verify dimensions - assert_eq!(array.n_col, size); - assert_eq!(array.n_row, size); - - // Verify first and last values - assert_eq!(array.values[0], 0.0); - assert_eq!(array.values[size * size - 1], (size * size - 1) as f64); - } - - #[test] - fn test_array2d_transpose() { - let values = vec![vec![1, 2, 3], vec![4, 5, 6]]; - let array = Array2D::new(&values).unwrap(); - - // Check dimensions - assert_eq!(array.n_col, 3); // columns - assert_eq!(array.n_row, 2); // rows - - // Check memory layout - values in same row should be adjacent - assert_eq!(array.values, vec![1, 2, 3, 4, 5, 6]); - - let transposed = array.transpose_clone(); - - // Check dimensions - assert_eq!(transposed.n_col, 2); // columns - assert_eq!(transposed.n_row, 3); // rows - - // Check memory layout - values should be arranged row-major - assert_eq!(transposed.values, vec![1, 4, 2, 5, 3, 6]); - } - - #[test] - fn test_convolve() { - let values = vec![vec![1, 2, 3, 4, 5, 6], vec![4, 5, 6, 7, 8, 9]]; - let array = Array2D::new(&values).unwrap(); - let array2 = Array2D { - values: vec![1, 2, 3, 4, 5, 6, 4, 5, 6, 7, 8, 9], - n_col: 6, - n_row: 2, - }; - assert_eq!(array, array2); - - let convolved = array.row_convolve(&[1, 1, 1], 0); - - // 6 = 1 + 2 + 3 - // 9 = 2 + 3 + 4 - let expect = Array2D { - values: vec![0, 6, 9, 12, 15, 0, 0, 15, 18, 21, 24, 0], - n_col: 6, - n_row: 2, - }; - assert_eq!(convolved, expect); - } - - #[test] - fn test_convolve_reduce() { - let values = vec![vec![1, 2, 3, 4, 5, 6], vec![4, 5, 6, 7, 8, 9]]; - let array = Array2D::new(&values).unwrap(); - let array2 = Array2D { - values: vec![1, 2, 3, 4, 5, 6, 4, 5, 6, 7, 8, 9], - n_col: 6, - n_row: 2, - }; - assert_eq!(array, array2); - - let convolved = array.convolve_fold(&[1, 1, 1], 0, |a, b| a + b); - - // 21 = 1 + 2 + 3 + 4 + 5 + 6 - let expect = vec![0, 21, 27, 33, 39, 0]; - assert_eq!(convolved, expect); - } - - #[test] - fn test_reset_with_default() { - let mut array = Array2D::new(vec![vec![1, 2, 3], vec![4, 5, 6]]).unwrap(); - assert_eq!(array.ncols(), 3); - assert_eq!(array.nrows(), 2); - - array.reset_with_value(2, 3, 0i32); - assert_eq!(array.values, vec![0, 0, 0, 0, 0, 0]); - assert_eq!(array.ncols(), 2); - assert_eq!(array.nrows(), 3); - assert_eq!(array.get_row(0), Some(vec![0i32, 0i32].as_ref())); - assert_eq!(array.get_row(1), Some(vec![0i32, 0i32].as_ref())); - assert_eq!(array.get_row(2), Some(vec![0i32, 0i32].as_ref())); - } - - #[test] - fn test_insertion() { - let mut array = Array2D::new(vec![vec![1, 2, 3], vec![4, 5, 6]]).unwrap(); - array.insert(0, 0, 7); - assert_eq!(array.values, vec![7, 2, 3, 4, 5, 6]); - array.insert(1, 2, 8); - assert_eq!(array.values, vec![7, 2, 3, 4, 5, 8,]); - } - - #[test] - fn test_swap_rows() { - let mut array = Array2D::new(vec![vec![1, 2, 3], vec![4, 5, 6]]).unwrap(); - assert_eq!(array.values, vec![1, 2, 3, 4, 5, 6]); - array.try_swap_rows(0, 1).unwrap(); - assert_eq!(array.values, vec![4, 5, 6, 1, 2, 3]); - if array.try_swap_rows(1, 2).is_ok() { - panic!("Should not have succeeded") - }; - } - - // ===== TDD Tests for drop_row() ===== - - #[test] - fn test_drop_row_removes_correct_row() { - // Create a 3x3 array with distinct values in each row - let mut array = Array2D::new(vec![vec![1, 2, 3], vec![4, 5, 6], vec![7, 8, 9]]).unwrap(); - - // Remove the middle row - array.drop_row(1); - - // Check dimensions updated - assert_eq!(array.nrows(), 2); - assert_eq!(array.ncols(), 3); - - // Check remaining rows are correct - assert_eq!(array.get_row(0), Some([1, 2, 3].as_ref())); - assert_eq!(array.get_row(1), Some([7, 8, 9].as_ref())); - - // Check flat values - assert_eq!(array.values, vec![1, 2, 3, 7, 8, 9]); - } - - #[test] - fn test_drop_row_maintains_order() { - // Create a 5x2 array - let mut array = Array2D::new(vec![ - vec![10, 20], - vec![30, 40], - vec![50, 60], - vec![70, 80], - vec![90, 100], - ]) - .unwrap(); - - // Remove row at index 2 (50, 60) - array.drop_row(2); - - // Verify rows maintain their order - assert_eq!(array.nrows(), 4); - assert_eq!(array.get_row(0), Some([10, 20].as_ref())); - assert_eq!(array.get_row(1), Some([30, 40].as_ref())); - assert_eq!(array.get_row(2), Some([70, 80].as_ref())); // Was row 3 - assert_eq!(array.get_row(3), Some([90, 100].as_ref())); // Was row 4 - } - - #[test] - fn test_drop_row_first() { - // Create a 3x2 array - let mut array = Array2D::new(vec![vec![1, 2], vec![3, 4], vec![5, 6]]).unwrap(); - - // Remove first row - array.drop_row(0); - - // Check dimensions - assert_eq!(array.nrows(), 2); - assert_eq!(array.ncols(), 2); - - // Check remaining rows - assert_eq!(array.get_row(0), Some([3, 4].as_ref())); - assert_eq!(array.get_row(1), Some([5, 6].as_ref())); - assert_eq!(array.values, vec![3, 4, 5, 6]); - } - - #[test] - fn test_drop_row_last() { - // Create a 3x2 array - let mut array = Array2D::new(vec![vec![1, 2], vec![3, 4], vec![5, 6]]).unwrap(); - - // Remove last row - array.drop_row(2); - - // Check dimensions - assert_eq!(array.nrows(), 2); - assert_eq!(array.ncols(), 2); - - // Check remaining rows - assert_eq!(array.get_row(0), Some([1, 2].as_ref())); - assert_eq!(array.get_row(1), Some([3, 4].as_ref())); - assert_eq!(array.values, vec![1, 2, 3, 4]); - } - - #[test] - fn test_drop_row_only() { - // Create a 1x3 array (single row) - let mut array = Array2D::new(vec![vec![10, 20, 30]]).unwrap(); - - assert_eq!(array.nrows(), 1); - assert_eq!(array.ncols(), 3); - - // Remove the only row - array.drop_row(0); - - // Check dimensions - should be 0 rows - assert_eq!(array.nrows(), 0); - assert_eq!(array.ncols(), 3); // cols unchanged - assert_eq!(array.values.len(), 0); - - // get_row should return None for index 0 - assert_eq!(array.get_row(0), None); - } -} +// Re-export Array2D and ArrayElement from the standalone array2d crate. +pub use array2d::{ + Array2D, + ArrayElement, +}; diff --git a/rust/timsquery/src/serde/chromatogram_output.rs b/rust/timsquery/src/serde/chromatogram_output.rs index 03c8609..966ea97 100644 --- a/rust/timsquery/src/serde/chromatogram_output.rs +++ b/rust/timsquery/src/serde/chromatogram_output.rs @@ -113,9 +113,9 @@ impl ChromatogramOutput { return None; } let out_vec = out_slc.to_vec(); - Some(Ok(((mz, out_vec), format!("{}", idx)))) + Some(Ok::<_, crate::errors::DataProcessingError>(((mz, out_vec), format!("{}", idx)))) }) - .collect::, _>>()? + .collect::, crate::errors::DataProcessingError>>()? .into_iter() .unzip(); From f795c710ebc2c756dfde716f59074982eff48c1a Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Fri, 10 Apr 2026 15:54:46 -0700 Subject: [PATCH 45/64] feat(calibrt): add LibraryRT and ObservedRTSeconds newtypes --- rust/calibrt/Cargo.toml | 1 + rust/calibrt/src/lib.rs | 2 ++ rust/calibrt/src/types.rs | 71 +++++++++++++++++++++++++++++++++++++++ 3 files changed, 74 insertions(+) create mode 100644 rust/calibrt/src/types.rs diff --git a/rust/calibrt/Cargo.toml b/rust/calibrt/Cargo.toml index ee3ce5a..d6870bd 100644 --- a/rust/calibrt/Cargo.toml +++ b/rust/calibrt/Cargo.toml @@ -12,6 +12,7 @@ tracing = { workspace = true } [dev-dependencies] insta = { workspace = true } +serde_json = { workspace = true } tracing-subscriber = { workspace = true } [lib] diff --git a/rust/calibrt/src/lib.rs b/rust/calibrt/src/lib.rs index e9ef2dd..b846c2a 100644 --- a/rust/calibrt/src/lib.rs +++ b/rust/calibrt/src/lib.rs @@ -5,7 +5,9 @@ pub mod grid; mod pathfinding; pub mod plotting; +pub mod types; pub use grid::Grid; +pub use types::{LibraryRT, ObservedRTSeconds}; use tracing::{ info, warn, diff --git a/rust/calibrt/src/types.rs b/rust/calibrt/src/types.rs new file mode 100644 index 0000000..0bb13a4 --- /dev/null +++ b/rust/calibrt/src/types.rs @@ -0,0 +1,71 @@ +use serde::{Serialize, Deserialize}; +use std::fmt; + +/// Library reference retention time. Unit-agnostic — could be iRT, minutes, +/// or arbitrary units depending on the spectral library. +#[derive(Debug, Clone, Copy, PartialEq, PartialOrd, Serialize, Deserialize)] +#[serde(transparent)] +pub struct LibraryRT(pub T); + +/// Observed retention time from raw instrument data, always in seconds. +#[derive(Debug, Clone, Copy, PartialEq, PartialOrd, Serialize, Deserialize)] +#[serde(transparent)] +pub struct ObservedRTSeconds(pub T); + +impl From> for LibraryRT { + fn from(v: LibraryRT) -> Self { + LibraryRT(v.0 as f64) + } +} + +impl From> for ObservedRTSeconds { + fn from(v: ObservedRTSeconds) -> Self { + ObservedRTSeconds(v.0 as f64) + } +} + +impl fmt::Display for LibraryRT { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.0) + } +} + +impl fmt::Display for ObservedRTSeconds { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}s", self.0) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_library_rt_widening() { + let narrow = LibraryRT(42.5f32); + let wide: LibraryRT = narrow.into(); + assert!((wide.0 - 42.5).abs() < 1e-5); + } + + #[test] + fn test_observed_rt_widening() { + let narrow = ObservedRTSeconds(123.4f32); + let wide: ObservedRTSeconds = narrow.into(); + assert!((wide.0 - 123.4).abs() < 1e-3); + } + + #[test] + fn test_display() { + assert_eq!(format!("{}", LibraryRT(42.5f64)), "42.5"); + assert_eq!(format!("{}", ObservedRTSeconds(42.5f64)), "42.5s"); + } + + #[test] + fn test_serde_transparent() { + let lib = LibraryRT(42.5f64); + let json = serde_json::to_string(&lib).unwrap(); + assert_eq!(json, "42.5"); + let back: LibraryRT = serde_json::from_str(&json).unwrap(); + assert_eq!(back, lib); + } +} From ac47f11f133d8d30101ca176a8d0906db6e09fbb Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Fri, 10 Apr 2026 15:59:06 -0700 Subject: [PATCH 46/64] refactor(calibrt): rename Point fields x/y to library/observed Makes the semantic meaning explicit: library RT on the x-axis and observed RT on the y-axis. Also renames RidgeMeasurement.x to .library. --- rust/calibrt/examples/calibrt.rs | 6 ++-- rust/calibrt/src/grid.rs | 32 +++++++++--------- rust/calibrt/src/lib.rs | 42 ++++++++++++------------ rust/calibrt/src/pathfinding.rs | 12 +++---- rust/calibrt/tests/tests.rs | 36 ++++++++++---------- rust/timsquery_viewer/src/calibration.rs | 14 ++++---- rust/timsseek/src/rt_calibration.rs | 18 +++++----- rust/timsseek_cli/src/processing.rs | 12 +++---- 8 files changed, 86 insertions(+), 86 deletions(-) diff --git a/rust/calibrt/examples/calibrt.rs b/rust/calibrt/examples/calibrt.rs index edd51bf..ed152cc 100644 --- a/rust/calibrt/examples/calibrt.rs +++ b/rust/calibrt/examples/calibrt.rs @@ -25,14 +25,14 @@ fn main() { for i in 0..500 { let x = i as f64; let y = real_x_to_y(x); - points.push(Point { x, y, weight: 1.0 }); + points.push(Point { library: x, observed: y, weight: 1.0 }); } // Add some random noise points let mut rng = rand::thread_rng(); for _ in 0..50 { points.push(Point { - x: rng.gen_range(0.0..100.0), - y: rng.gen_range(0.0..150.0), + library: rng.gen_range(0.0..100.0), + observed: rng.gen_range(0.0..150.0), weight: 1.0, }); } diff --git a/rust/calibrt/src/grid.rs b/rust/calibrt/src/grid.rs index 13b43aa..7af5f9c 100644 --- a/rust/calibrt/src/grid.rs +++ b/rust/calibrt/src/grid.rs @@ -39,8 +39,8 @@ impl Grid { let center_y = y_range.0 + (r as f64 + 0.5) * (y_span / bins as f64); nodes.push(Node { center: Point { - x: center_x, - y: center_y, + library: center_x, + observed: center_y, weight: 0.0, }, suppressed: false, @@ -70,15 +70,15 @@ impl Grid { /// Adds a single point to the grid, incrementing the frequency of the corresponding cell. pub fn add_point(&mut self, point: &Point) -> Result<(), CalibRtError> { - let Point { x, y, weight } = point; + let Point { library, observed, weight } = point; // If the weight is infinite or NaN, we yell ... if weight.is_infinite() || weight.is_nan() { return Err(CalibRtError::UnsupportedWeight(*weight)); } - let gx = (((x - self.x_range.0) / self.x_span) * self.bins as f64) as usize; - let gy = (((y - self.y_range.0) / self.y_span) * self.bins as f64) as usize; + let gx = (((library - self.x_range.0) / self.x_span) * self.bins as f64) as usize; + let gy = (((observed - self.y_range.0) / self.y_span) * self.bins as f64) as usize; let gx = gx.min(self.bins - 1); let gy = gy.min(self.bins - 1); @@ -86,8 +86,8 @@ impl Grid { let index = gy * self.bins + gx; if let Some(node) = self.nodes.get_mut(index) { node.center.weight += weight; - node.sum_wx += x * weight; - node.sum_wy += y * weight; + node.sum_wx += library * weight; + node.sum_wy += observed * weight; node.sum_w += weight; } @@ -155,8 +155,8 @@ impl Grid { if !node.suppressed && node.sum_w > 0.0 { let cx = (node.sum_wx / node.sum_w).clamp(self.x_range.0, self.x_range.1); let cy = (node.sum_wy / node.sum_w).clamp(self.y_range.0, self.y_range.1); - node.center.x = cx; - node.center.y = cy; + node.center.library = cx; + node.center.observed = cy; } } @@ -199,7 +199,7 @@ mod tests { #[test] fn test_grid_reset_preserves_allocation() { let mut grid = Grid::new(10, (0.0, 100.0), (0.0, 100.0)).unwrap(); - grid.add_point(&Point { x: 50.0, y: 50.0, weight: 1.0 }).unwrap(); + grid.add_point(&Point { library: 50.0, observed: 50.0, weight: 1.0 }).unwrap(); let capacity_before = grid.nodes.capacity(); assert!(grid.grid_cells().iter().any(|n| n.sum_w > 0.0)); @@ -271,8 +271,8 @@ mod tests { for (x, y, weight) in test_data.iter() { grid.add_point(&Point { - x: *x, - y: *y, + library: *x, + observed: *y, weight: *weight, }) .unwrap(); @@ -316,8 +316,8 @@ mod tests { for (x, y, weight) in test_data.iter() { grid.add_point(&Point { - x: *x, - y: *y, + library: *x, + observed: *y, weight: *weight, }) .unwrap(); @@ -358,8 +358,8 @@ mod tests { for (x, y, weight) in test_data.iter() { grid.add_point(&Point { - x: *x, - y: *y, + library: *x, + observed: *y, weight: *weight, }) .unwrap(); diff --git a/rust/calibrt/src/lib.rs b/rust/calibrt/src/lib.rs index b846c2a..4fbff4d 100644 --- a/rust/calibrt/src/lib.rs +++ b/rust/calibrt/src/lib.rs @@ -40,8 +40,8 @@ pub enum CalibRtError { /// Represents a single data point on the library-measured-RT plane. #[derive(Debug, Clone, Copy, PartialEq, Default, serde::Serialize, serde::Deserialize)] pub struct Point { - pub x: f64, - pub y: f64, + pub library: f64, + pub observed: f64, pub weight: f64, } @@ -65,11 +65,11 @@ impl CalibrationCurve { return Err(CalibRtError::InsufficientPoints); } - points.sort_by(|a, b| a.x.partial_cmp(&b.x).unwrap()); + points.sort_by(|a, b| a.library.partial_cmp(&b.library).unwrap()); let slopes = points .windows(2) - .map(|p| (p[1].y - p[0].y) / (p[1].x - p[0].x).max(MIN_SLOPE_DENOMINATOR)) + .map(|p| (p[1].observed - p[0].observed) / (p[1].library - p[0].library).max(MIN_SLOPE_DENOMINATOR)) .collect(); Ok(Self { points, slopes }) @@ -85,9 +85,9 @@ impl CalibrationCurve { let mut weight: f64 = 0.0; for p in test_points { - match self.predict(p.x) { + match self.predict(p.library) { Ok(predicted_y) => { - let error = predicted_y - p.y; + let error = predicted_y - p.observed; total_error += (error * error) * p.weight; weight += p.weight; } @@ -107,8 +107,8 @@ impl CalibrationCurve { /// Predicts a calibrated measured RT (Y) for a given library RT (X). /// Returns an error if the value is outside the bounds of the calibration curve. pub fn predict(&self, x_val: f64) -> Result { - let first_x = self.points.first().unwrap().x; - let last_x = self.points.last().unwrap().x; + let first_x = self.points.first().unwrap().library; + let last_x = self.points.last().unwrap().library; if x_val < first_x { return Err(CalibRtError::OutOfBounds(self.predict_with_index(x_val, 1))); } @@ -120,7 +120,7 @@ impl CalibrationCurve { } // Find the partition point; first element >= x_val. - let i = self.points.partition_point(|p| p.x < x_val); + let i = self.points.partition_point(|p| p.library < x_val); // Clamp to [1, slopes.len()] — partition_point can return 0 when x_val == first_x let i = i.max(1).min(self.slopes.len()); Ok(self.predict_with_index(x_val, i)) @@ -148,15 +148,15 @@ impl CalibrationCurve { ); let p1 = self.points[i - 1]; let slope = self.slopes[i - 1]; - p1.y + (x_val - p1.x) * slope + p1.observed + (x_val - p1.library) * slope } } /// Measurement of the evidence ridge width at one grid column. #[derive(Debug, Clone)] pub struct RidgeMeasurement { - /// Center x position (library RT, seconds). - pub x: f64, + /// Center library RT position (seconds). + pub library: f64, /// Half-width of the ridge in y-units (seconds). pub half_width: f64, /// Total accumulated weight in the expanded range — more weight = more trustworthy. @@ -167,7 +167,7 @@ pub struct RidgeMeasurement { /// Used for save/load. Does not include the fitted curve (reconstructed on load). #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] pub struct CalibrationSnapshot { - pub points: Vec<[f64; 3]>, // [x, y, weight] + pub points: Vec<[f64; 3]>, // [library, observed, weight] pub grid_size: usize, pub lookback: usize, } @@ -203,7 +203,7 @@ impl CalibrationState { pub fn update(&mut self, points: impl Iterator) { for (x, y, w) in points { - let _ = self.grid.add_point(&Point { x, y, weight: w }); + let _ = self.grid.add_point(&Point { library: x, observed: y, weight: w }); } self.stale = true; } @@ -235,7 +235,7 @@ impl CalibrationState { self.path_indices.clear(); for pp in &path_points { if let Some(idx) = self.grid.grid_cells().iter().position(|n| { - (n.center.x - pp.x).abs() < 1e-9 && (n.center.y - pp.y).abs() < 1e-9 + (n.center.library - pp.library).abs() < 1e-9 && (n.center.observed - pp.observed).abs() < 1e-9 }) { self.path_indices.push(idx); } @@ -338,7 +338,7 @@ impl CalibrationState { let half_width = ((upper_gy - lower_gy) as f64 + 1.0) * cell_h * 0.5; widths.push(RidgeMeasurement { - x: path_node.center.x, + library: path_node.center.library, half_width, total_weight, }); @@ -543,9 +543,9 @@ pub fn calibrate_with_ranges( /// use calibrt::{Point, calibrate}; /// /// let points = vec![ -/// Point { x: 1.0, y: 1.5, weight: 1.0 }, -/// Point { x: 2.0, y: 2.5, weight: 1.0 }, -/// Point { x: 3.0, y: 3.5, weight: 1.0 }, +/// Point { library: 1.0, observed: 1.5, weight: 1.0 }, +/// Point { library: 2.0, observed: 2.5, weight: 1.0 }, +/// Point { library: 3.0, observed: 3.5, weight: 1.0 }, /// ]; /// /// let curve = calibrate(&points, 100).expect("Calibration failed"); @@ -555,8 +555,8 @@ pub fn calibrate(points: &[Point], grid_size: usize) -> Result lookback { i - lookback } else { 0 }; for j in start..i { // Only create edges where both dimensions increase (monotonic constraint) - if nodes[i].center.x > nodes[j].center.x && nodes[i].center.y > nodes[j].center.y { - let dx = nodes[i].center.x - nodes[j].center.x; - let dy = nodes[i].center.y - nodes[j].center.y; + if nodes[i].center.library > nodes[j].center.library && nodes[i].center.observed > nodes[j].center.observed { + let dx = nodes[i].center.library - nodes[j].center.library; + let dy = nodes[i].center.observed - nodes[j].center.observed; let dist = (dx * dx + dy * dy).sqrt(); if dist > DISTANCE_THRESHOLD { diff --git a/rust/calibrt/tests/tests.rs b/rust/calibrt/tests/tests.rs index f92b5e8..d648734 100644 --- a/rust/calibrt/tests/tests.rs +++ b/rust/calibrt/tests/tests.rs @@ -9,8 +9,8 @@ fn test_calibrate_with_linear_data() { // Test: Linear relationship y = x + 10 with slight noise let points: Vec = (0..100) .map(|i| Point { - x: i as f64, - y: i as f64 + 10.0, + library: i as f64, + observed: i as f64 + 10.0, weight: 1.0, }) .collect(); @@ -35,8 +35,8 @@ fn test_calibrate_empty_points() { fn test_calibrate_zero_x_range() { // Test: Zero x range should return error let points = vec![Point { - x: 50.0, - y: 60.0, + library: 50.0, + observed: 60.0, weight: 1.0, }]; let result = calibrate_with_ranges(&points, (50.0, 50.0), (0.0, 100.0), 50, 30); @@ -47,8 +47,8 @@ fn test_calibrate_zero_x_range() { fn test_calibrate_zero_y_range() { // Test: Zero y range should return error let points = vec![Point { - x: 50.0, - y: 60.0, + library: 50.0, + observed: 60.0, weight: 1.0, }]; let result = calibrate_with_ranges(&points, (0.0, 100.0), (60.0, 60.0), 50, 30); @@ -60,8 +60,8 @@ fn test_predict_within_range() { // Test: Prediction within calibration range let points: Vec = (0..50) .map(|i| Point { - x: i as f64, - y: i as f64 * 2.0, + library: i as f64, + observed: i as f64 * 2.0, weight: 1.0, }) .collect(); @@ -76,8 +76,8 @@ fn test_predict_outside_range() { // Test: Prediction outside calibration range let points: Vec = (0..50) .map(|i| Point { - x: i as f64, - y: i as f64, + library: i as f64, + observed: i as f64, weight: 1.0, }) .collect(); @@ -91,8 +91,8 @@ fn test_predict_outside_range() { fn test_calibrate_single_point() { // Test: Single point should succeed or fail gracefully let points = vec![Point { - x: 5.0, - y: 10.0, + library: 5.0, + observed: 10.0, weight: 1.0, }]; let result = calibrate(&points, 10); @@ -105,20 +105,20 @@ fn test_calibrate_weighted_points() { // Test: Different weights affect calibration let mut points = vec![ Point { - x: 10.0, - y: 20.0, + library: 10.0, + observed: 20.0, weight: 10.0, }, Point { - x: 10.0, - y: 30.0, + library: 10.0, + observed: 30.0, weight: 1.0, }, ]; for i in 0..20 { points.push(Point { - x: i as f64, - y: i as f64 * 2.0, + library: i as f64, + observed: i as f64 * 2.0, weight: 1.0, }); } diff --git a/rust/timsquery_viewer/src/calibration.rs b/rust/timsquery_viewer/src/calibration.rs index 0afaf5b..3fff94f 100644 --- a/rust/timsquery_viewer/src/calibration.rs +++ b/rust/timsquery_viewer/src/calibration.rs @@ -812,7 +812,7 @@ impl ViewerCalibrationState { let path_pts: Vec<[f64; 2]> = path_indices .iter() .filter_map(|&idx| cells.get(idx)) - .map(|n| [n.center.x, n.center.y]) + .map(|n| [n.center.library, n.center.observed]) .collect(); if !path_pts.is_empty() { @@ -830,8 +830,8 @@ impl ViewerCalibrationState { if let Some(curve) = cs.curve() { let curve_points = curve.points(); if curve_points.len() >= 2 { - let x_min = curve_points.first().unwrap().x; - let x_max = curve_points.last().unwrap().x; + let x_min = curve_points.first().unwrap().library; + let x_max = curve_points.last().unwrap().library; let n_samples = 200; let step = (x_max - x_min) / n_samples as f64; @@ -865,22 +865,22 @@ impl ViewerCalibrationState { let upper: Vec<[f64; 2]> = ridge.iter() .filter_map(|m| { - let y = match curve.predict(m.x) { + let y = match curve.predict(m.library) { Ok(y) => y, Err(calibrt::CalibRtError::OutOfBounds(y)) => y, Err(_) => return None, }; - Some([m.x, y + m.half_width]) + Some([m.library, y + m.half_width]) }) .collect(); let lower: Vec<[f64; 2]> = ridge.iter() .filter_map(|m| { - let y = match curve.predict(m.x) { + let y = match curve.predict(m.library) { Ok(y) => y, Err(calibrt::CalibRtError::OutOfBounds(y)) => y, Err(_) => return None, }; - Some([m.x, y - m.half_width]) + Some([m.library, y - m.half_width]) }) .collect(); diff --git a/rust/timsseek/src/rt_calibration.rs b/rust/timsseek/src/rt_calibration.rs index 92fdf16..de2ca43 100644 --- a/rust/timsseek/src/rt_calibration.rs +++ b/rust/timsseek/src/rt_calibration.rs @@ -55,7 +55,7 @@ impl CalibrationResult { } pub fn with_ridge_widths(mut self, mut widths: Vec) -> Self { - widths.sort_by(|a, b| a.x.partial_cmp(&b.x).unwrap_or(std::cmp::Ordering::Equal)); + widths.sort_by(|a, b| a.library.partial_cmp(&b.library).unwrap_or(std::cmp::Ordering::Equal)); self.ridge_widths = widths; self } @@ -69,15 +69,15 @@ impl CalibrationResult { let widths = &self.ridge_widths; // Clamp to endpoints - if library_rt_seconds <= widths[0].x { + if library_rt_seconds <= widths[0].library { return Some(widths[0].half_width); } - if library_rt_seconds >= widths[widths.len() - 1].x { + if library_rt_seconds >= widths[widths.len() - 1].library { return Some(widths[widths.len() - 1].half_width); } // Binary search for the bracketing pair - let pos = widths.partition_point(|m| m.x < library_rt_seconds); + let pos = widths.partition_point(|m| m.library < library_rt_seconds); if pos == 0 { return Some(widths[0].half_width); } @@ -85,7 +85,7 @@ impl CalibrationResult { let right = &widths[pos]; // Linear interpolation - let t = (library_rt_seconds - left.x) / (right.x - left.x).max(1e-9); + let t = (library_rt_seconds - left.library) / (right.library - left.library).max(1e-9); Some(left.half_width + t * (right.half_width - left.half_width)) } @@ -236,13 +236,13 @@ impl CalibrationResult { let end = range.1 as f64 / 1000.0; let points = vec![ Point { - x: start, - y: start, + library: start, + observed: start, weight: 1.0, }, Point { - x: end, - y: end, + library: end, + observed: end, weight: 1.0, }, ]; diff --git a/rust/timsseek_cli/src/processing.rs b/rust/timsseek_cli/src/processing.rs index 16468a7..c887445 100644 --- a/rust/timsseek_cli/src/processing.rs +++ b/rust/timsseek_cli/src/processing.rs @@ -471,8 +471,8 @@ fn calibrate_from_phase1( }; Some(Point { - x: irt_for_curve as f64, - y: c.apex_rt_seconds as f64, + library: irt_for_curve as f64, + observed: c.apex_rt_seconds as f64, weight: 1.0, }) }) @@ -494,14 +494,14 @@ fn calibrate_from_phase1( f64::INFINITY, f64::NEG_INFINITY, ), - |(mnx, mxx, mny, mxy), p| (mnx.min(p.x), mxx.max(p.x), mny.min(p.y), mxy.max(p.y)), + |(mnx, mxx, mny, mxy), p| (mnx.min(p.library), mxx.max(p.library), mny.min(p.observed), mxy.max(p.observed)), ); // Use CalibrationState for fitting + ridge width measurement let mut cal_state = CalibratedGrid::new( config.grid_size, (min_x, max_x), (min_y, max_y), config.dp_lookback, )?; - cal_state.update(points.iter().map(|p| (p.x, p.y, p.weight))); + cal_state.update(points.iter().map(|p| (p.library, p.observed, p.weight))); cal_state.fit(); let cal_curve = cal_state.curve() .ok_or(CalibRtError::NoPoints)? @@ -565,8 +565,8 @@ fn calibrate_from_phase1( let mut abs_residuals: Vec = points .iter() .map(|p| { - let predicted = cal_curve.predict(p.x).unwrap_or(p.y); - (p.y - predicted).abs() + let predicted = cal_curve.predict(p.library).unwrap_or(p.observed); + (p.observed - predicted).abs() }) .collect(); abs_residuals.sort_by(|a, b| a.partial_cmp(b).unwrap()); From 971700eff8ce6ebca6169109fc6f2f9ea79dbd38 Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Fri, 10 Apr 2026 16:02:56 -0700 Subject: [PATCH 47/64] feat(calibrt): type API boundary with LibraryRT/ObservedRTSeconds newtypes --- Cargo.lock | 1 + rust/calibrt/examples/calibrt.rs | 5 +-- rust/calibrt/src/lib.rs | 59 ++++++++++++++++---------------- rust/calibrt/tests/tests.rs | 9 ++--- 4 files changed, 39 insertions(+), 35 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a777320..eefe347 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1371,6 +1371,7 @@ dependencies = [ "insta", "rand 0.8.5", "serde", + "serde_json", "tracing", "tracing-subscriber", ] diff --git a/rust/calibrt/examples/calibrt.rs b/rust/calibrt/examples/calibrt.rs index ed152cc..a28cab9 100644 --- a/rust/calibrt/examples/calibrt.rs +++ b/rust/calibrt/examples/calibrt.rs @@ -1,5 +1,6 @@ use calibrt::{ Point, + LibraryRT, calibrate, }; use rand::Rng; @@ -52,10 +53,10 @@ fn main() { println!("\n--- Predictions ---"); for &x in &test_x_vals { let real_expect = real_x_to_y(x); - match calibration_curve.predict(x) { + match calibration_curve.predict(LibraryRT(x)) { Ok(predicted_y) => println!( "- For library RT {:.2}, predicted measured RT is {:.2}; expect {}", - x, predicted_y, real_expect + x, predicted_y.0, real_expect ), Err(e) => eprintln!( "- For library RT {:.2}, prediction failed: {:?} expected: {}", diff --git a/rust/calibrt/src/lib.rs b/rust/calibrt/src/lib.rs index 4fbff4d..13090a4 100644 --- a/rust/calibrt/src/lib.rs +++ b/rust/calibrt/src/lib.rs @@ -80,16 +80,16 @@ impl CalibrationCurve { &self.points } - pub fn wrmse<'a>(&self, test_points: impl Iterator + 'a) -> f64 { + pub fn wrmse(&self, test_points: impl Iterator, ObservedRTSeconds, f64)>) -> f64 { let mut total_error = 0.0; let mut weight: f64 = 0.0; - for p in test_points { - match self.predict(p.library) { + for (lib_rt, obs_rt, w) in test_points { + match self.predict(lib_rt) { Ok(predicted_y) => { - let error = predicted_y - p.observed; - total_error += (error * error) * p.weight; - weight += p.weight; + let error = predicted_y.0 - obs_rt.0; + total_error += (error * error) * w; + weight += w; } Err(_) => { // Ignore out-of-bounds points for MSE calculation @@ -106,7 +106,8 @@ impl CalibrationCurve { /// Predicts a calibrated measured RT (Y) for a given library RT (X). /// Returns an error if the value is outside the bounds of the calibration curve. - pub fn predict(&self, x_val: f64) -> Result { + pub fn predict(&self, lib_rt: LibraryRT) -> Result, CalibRtError> { + let x_val = lib_rt.0; let first_x = self.points.first().unwrap().library; let last_x = self.points.last().unwrap().library; if x_val < first_x { @@ -123,7 +124,7 @@ impl CalibrationCurve { let i = self.points.partition_point(|p| p.library < x_val); // Clamp to [1, slopes.len()] — partition_point can return 0 when x_val == first_x let i = i.max(1).min(self.slopes.len()); - Ok(self.predict_with_index(x_val, i)) + Ok(ObservedRTSeconds(self.predict_with_index(x_val, i))) } /// Internal prediction function that performs linear interpolation using a precomputed slope. @@ -155,8 +156,8 @@ impl CalibrationCurve { /// Measurement of the evidence ridge width at one grid column. #[derive(Debug, Clone)] pub struct RidgeMeasurement { - /// Center library RT position (seconds). - pub library: f64, + /// Center library RT position. + pub library: LibraryRT, /// Half-width of the ridge in y-units (seconds). pub half_width: f64, /// Total accumulated weight in the expanded range — more weight = more trustworthy. @@ -201,9 +202,9 @@ impl CalibrationState { }) } - pub fn update(&mut self, points: impl Iterator) { - for (x, y, w) in points { - let _ = self.grid.add_point(&Point { library: x, observed: y, weight: w }); + pub fn update(&mut self, points: impl Iterator, ObservedRTSeconds, f64)>) { + for (lib_rt, obs_rt, w) in points { + let _ = self.grid.add_point(&Point { library: lib_rt.0, observed: obs_rt.0, weight: w }); } self.stale = true; } @@ -338,7 +339,7 @@ impl CalibrationState { let half_width = ((upper_gy - lower_gy) as f64 + 1.0) * cell_h * 0.5; widths.push(RidgeMeasurement { - library: path_node.center.library, + library: LibraryRT(path_node.center.library), half_width, total_weight, }); @@ -348,9 +349,9 @@ impl CalibrationState { } /// Bundle current config into a snapshot (caller provides the points). - pub fn save_snapshot(&self, points: &[(f64, f64, f64)]) -> CalibrationSnapshot { + pub fn save_snapshot(&self, points: &[(LibraryRT, ObservedRTSeconds, f64)]) -> CalibrationSnapshot { CalibrationSnapshot { - points: points.iter().map(|&(x, y, w)| [x, y, w]).collect(), + points: points.iter().map(|&(lib, obs, w)| [lib.0, obs.0, w]).collect(), grid_size: self.grid.bins, lookback: self.lookback, } @@ -365,7 +366,7 @@ impl CalibrationState { let y_range = compute_range(snapshot.points.iter().map(|p| p[1]))?; let mut state = Self::new(snapshot.grid_size, x_range, y_range, snapshot.lookback)?; - state.update(snapshot.points.iter().map(|p| (p[0], p[1], p[2]))); + state.update(snapshot.points.iter().map(|p| (LibraryRT(p[0]), ObservedRTSeconds(p[1]), p[2]))); state.fit(); Ok(state) } @@ -382,10 +383,10 @@ mod calibration_state_tests { #[test] fn test_update_fit_cycle() { let mut state = CalibrationState::new(10, (0.0, 100.0), (0.0, 100.0), 30).unwrap(); - let points: Vec<(f64, f64, f64)> = (0..10) + let points: Vec<(LibraryRT, ObservedRTSeconds, f64)> = (0..10) .map(|i| { let v = (i as f64) * 10.0 + 5.0; - (v, v, 1.0) + (LibraryRT(v), ObservedRTSeconds(v), 1.0) }) .collect(); @@ -397,14 +398,14 @@ mod calibration_state_tests { assert!(state.curve().is_some()); let curve = state.curve().unwrap(); - let pred = curve.predict(50.0).unwrap(); - assert!((pred - 50.0).abs() < 5.0, "predicted {} expected ~50.0", pred); + let pred = curve.predict(LibraryRT(50.0)).unwrap(); + assert!((pred.0 - 50.0).abs() < 5.0, "predicted {} expected ~50.0", pred.0); } #[test] fn test_reset_clears_state() { let mut state = CalibrationState::new(10, (0.0, 100.0), (0.0, 100.0), 30).unwrap(); - let points = vec![(25.0, 25.0, 1.0), (75.0, 75.0, 1.0)]; + let points = vec![(LibraryRT(25.0), ObservedRTSeconds(25.0), 1.0), (LibraryRT(75.0), ObservedRTSeconds(75.0), 1.0)]; state.update(points.into_iter()); state.fit(); assert!(state.curve().is_some()); @@ -420,19 +421,19 @@ mod calibration_state_tests { let mut state = CalibrationState::new(10, (0.0, 100.0), (0.0, 100.0), 30).unwrap(); // First fit: y = x - let points1: Vec<_> = (0..10).map(|i| ((i as f64) * 10.0 + 5.0, (i as f64) * 10.0 + 5.0, 1.0)).collect(); + let points1: Vec<_> = (0..10).map(|i| (LibraryRT((i as f64) * 10.0 + 5.0), ObservedRTSeconds((i as f64) * 10.0 + 5.0), 1.0)).collect(); state.update(points1.into_iter()); state.fit(); - let curve1_pred = state.curve().unwrap().predict(50.0).unwrap(); + let curve1_pred = state.curve().unwrap().predict(LibraryRT(50.0)).unwrap(); // Reset and refit: y = 2x state.reset(); - let points2: Vec<_> = (0..10).map(|i| ((i as f64) * 10.0 + 5.0, (i as f64) * 20.0 + 5.0, 1.0)).collect(); + let points2: Vec<_> = (0..10).map(|i| (LibraryRT((i as f64) * 10.0 + 5.0), ObservedRTSeconds((i as f64) * 20.0 + 5.0), 1.0)).collect(); state.update(points2.into_iter()); state.fit(); - let curve2_pred = state.curve().unwrap().predict(50.0).unwrap(); + let curve2_pred = state.curve().unwrap().predict(LibraryRT(50.0)).unwrap(); - assert!((curve2_pred - curve1_pred).abs() > 10.0); + assert!((curve2_pred.0 - curve1_pred.0).abs() > 10.0); } } @@ -504,11 +505,11 @@ pub fn calibrate_with_ranges( let calcurve = CalibrationCurve::new(optimal_path_points); match &calcurve { Ok(c) => { - let wrmse = c.wrmse(points.iter()); + let wrmse = c.wrmse(points.iter().map(|p| (LibraryRT(p.library), ObservedRTSeconds(p.observed), p.weight))); info!("Calibration successful, WRMSE: {}", wrmse); plotting::plot_function( |x| { - c.predict(x).map_err(|e| match e { + c.predict(LibraryRT(x)).map(|obs| obs.0).map_err(|e| match e { CalibRtError::OutOfBounds(y) => y, _ => panic!("Unexpected error during plotting"), }) diff --git a/rust/calibrt/tests/tests.rs b/rust/calibrt/tests/tests.rs index d648734..ea2c8d6 100644 --- a/rust/calibrt/tests/tests.rs +++ b/rust/calibrt/tests/tests.rs @@ -1,5 +1,6 @@ use calibrt::{ Point, + LibraryRT, calibrate, calibrate_with_ranges, }; @@ -19,8 +20,8 @@ fn test_calibrate_with_linear_data() { assert!(result.is_ok()); let curve = result.unwrap(); - let predicted = curve.predict(50.0).unwrap(); - assert!((predicted - 60.0).abs() < 5.0); // Allow small error + let predicted = curve.predict(LibraryRT(50.0)).unwrap(); + assert!((predicted.0 - 60.0).abs() < 5.0); // Allow small error } #[test] @@ -67,7 +68,7 @@ fn test_predict_within_range() { .collect(); let curve = calibrate(&points, 30).unwrap(); - let result = curve.predict(25.0); + let result = curve.predict(LibraryRT(25.0)); assert!(result.is_ok()); } @@ -83,7 +84,7 @@ fn test_predict_outside_range() { .collect(); let curve = calibrate(&points, 30).unwrap(); - let result = curve.predict(100.0); + let result = curve.predict(LibraryRT(100.0)); assert!(result.is_err()); } From 5079c23bdc2bf95f2ccec954b0f3ceb269058d79 Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Fri, 10 Apr 2026 16:09:02 -0700 Subject: [PATCH 48/64] feat(calibrt): ping-pong weight buffers with 3x3 gaussian blur for ridge measurement --- rust/array2d/src/lib.rs | 52 ++++++++++++++++++++++++++++++++++++++++ rust/calibrt/src/grid.rs | 37 ++++++++++++++++++++++++++++ rust/calibrt/src/lib.rs | 33 +++++++++++++------------ 3 files changed, 107 insertions(+), 15 deletions(-) diff --git a/rust/array2d/src/lib.rs b/rust/array2d/src/lib.rs index f800823..5a9e275 100644 --- a/rust/array2d/src/lib.rs +++ b/rust/array2d/src/lib.rs @@ -377,6 +377,32 @@ impl> Array2D { Array2D::from_flat_vector(result, self.nrows(), self.ncols()).unwrap() } + /// Apply a 3x3 kernel to the 2D array, writing results into `output`. + /// Both arrays must have the same dimensions. + /// Boundary cells use clamped indexing (repeat edge values). + /// Kernel layout: [top-left, top-center, top-right, mid-left, mid-center, mid-right, bot-left, bot-center, bot-right] + pub fn convolve_2d_into(&self, kernel: &[T; 9], output: &mut Array2D) { + assert_eq!(self.n_row, output.n_row); + assert_eq!(self.n_col, output.n_col); + + let nrows = self.n_row; + let ncols = self.n_col; + + for r in 0..nrows { + for c in 0..ncols { + let mut acc = T::default(); + for (ki, &kval) in kernel.iter().enumerate() { + let dr = (ki / 3) as isize - 1; + let dc = (ki % 3) as isize - 1; + let sr = (r as isize + dr).clamp(0, (nrows - 1) as isize) as usize; + let sc = (c as isize + dc).clamp(0, (ncols - 1) as isize) as usize; + acc += self.values[sr * ncols + sc] * kval; + } + output.values[r * ncols + c] = acc; + } + } + } + pub fn convolve_fold( &self, kernel: &[T], @@ -680,4 +706,30 @@ mod tests { // get_row should return None for index 0 assert_eq!(array.get_row(0), None); } + + #[test] + fn test_convolve_2d_identity() { + let array = Array2D::from_flat_vector(vec![1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0], 3, 3).unwrap(); + let kernel = [0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0]; + let mut output = Array2D::from_flat_vector(vec![0.0; 9], 3, 3).unwrap(); + array.convolve_2d_into(&kernel, &mut output); + assert_eq!(output.as_flat_slice(), array.as_flat_slice()); + } + + #[test] + fn test_convolve_2d_blur() { + let array = Array2D::from_flat_vector(vec![0.0, 0.0, 0.0, 0.0, 10.0, 0.0, 0.0, 0.0, 0.0], 3, 3).unwrap(); + let kernel: [f64; 9] = [ + 1.0/10.0, 1.0/10.0, 1.0/10.0, + 1.0/10.0, 2.0/10.0, 1.0/10.0, + 1.0/10.0, 1.0/10.0, 1.0/10.0, + ]; + let mut output = Array2D::from_flat_vector(vec![0.0; 9], 3, 3).unwrap(); + array.convolve_2d_into(&kernel, &mut output); + // Center should get 10 * 2/10 = 2.0 + assert!((output.as_flat_slice()[4] - 2.0).abs() < 1e-9); + // Each neighbor should get 10 * 1/10 = 1.0 + assert!((output.as_flat_slice()[0] - 1.0).abs() < 1e-9); + assert!((output.as_flat_slice()[1] - 1.0).abs() < 1e-9); + } } diff --git a/rust/calibrt/src/grid.rs b/rust/calibrt/src/grid.rs index 7af5f9c..b156034 100644 --- a/rust/calibrt/src/grid.rs +++ b/rust/calibrt/src/grid.rs @@ -1,3 +1,4 @@ +use array2d::Array2D; use crate::{ CalibRtError, Point, @@ -11,6 +12,9 @@ pub struct Grid { x_span: f64, y_span: f64, pub(crate) bins: usize, + /// Ping-pong weight buffers. A = raw weights, B = blurred output. + weights_a: Array2D, + weights_b: Array2D, } impl Grid { @@ -51,6 +55,11 @@ impl Grid { } } + let weights_a = Array2D::from_flat_vector(vec![0.0; bins * bins], bins, bins) + .expect("Grid dimensions are valid"); + let weights_b = Array2D::from_flat_vector(vec![0.0; bins * bins], bins, bins) + .expect("Grid dimensions are valid"); + Ok(Self { nodes, x_range, @@ -58,6 +67,8 @@ impl Grid { x_span, y_span, bins, + weights_a, + weights_b, }) } @@ -172,12 +183,38 @@ impl Grid { node.sum_wy = 0.0; node.sum_w = 0.0; } + self.weights_a.reset_with_value(self.bins, self.bins, 0.0); + self.weights_b.reset_with_value(self.bins, self.bins, 0.0); } /// Read access to all grid cells. pub fn grid_cells(&self) -> &[Node] { &self.nodes } + + /// Copy node weights into buffer A for blur processing. + pub(crate) fn sync_weights(&mut self) { + for (i, node) in self.nodes.iter().enumerate() { + let r = i / self.bins; + let c = i % self.bins; + self.weights_a.insert(r, c, node.center.weight); + } + } + + /// Apply 3x3 approximate gaussian blur: A -> B. + pub(crate) fn blur_weights(&mut self) { + const KERNEL: [f64; 9] = [ + 1.0/10.0, 1.0/10.0, 1.0/10.0, + 1.0/10.0, 2.0/10.0, 1.0/10.0, + 1.0/10.0, 1.0/10.0, 1.0/10.0, + ]; + self.weights_a.convolve_2d_into(&KERNEL, &mut self.weights_b); + } + + /// Read access to blurred weight at grid position (row, col). + pub(crate) fn blurred_weight(&self, row: usize, col: usize) -> f64 { + self.weights_b.as_flat_slice()[row * self.bins + col] + } } /// Represents a node (cell) in the grid. diff --git a/rust/calibrt/src/lib.rs b/rust/calibrt/src/lib.rs index 13090a4..0cd3f66 100644 --- a/rust/calibrt/src/lib.rs +++ b/rust/calibrt/src/lib.rs @@ -287,37 +287,40 @@ impl CalibrationState { /// (e.g., 0.1 = expand until weight < 10% of path cell). /// `total_weight`: sum of all cell weights in the expanded range — heavier /// columns should carry more authority in tolerance estimation. - pub fn measure_ridge_width(&self, fraction: f64) -> Vec { + pub fn measure_ridge_width(&mut self, fraction: f64) -> Vec { let bins = self.grid.bins; - let cells = self.grid.grid_cells(); let y_span = self.grid.y_range.1 - self.grid.y_range.0; let cell_h = y_span / bins as f64; + // Sync node weights into buffer A, then blur into buffer B + self.grid.sync_weights(); + self.grid.blur_weights(); + let mut widths = Vec::new(); for &path_idx in &self.path_indices { - let path_node = &cells[path_idx]; - if path_node.center.weight <= 0.0 { + let gx = path_idx % bins; + let gy = path_idx / bins; + let path_weight = self.grid.blurred_weight(gy, gx); + if path_weight <= 0.0 { continue; } - let gx = path_idx % bins; - let gy = path_idx / bins; - let threshold = path_node.center.weight * fraction; + let threshold = path_weight * fraction; // Expand upward (increasing gy) from path cell let mut upper_gy = gy; - let mut total_weight = path_node.center.weight; + let mut total_weight = path_weight; for dy in 1..bins { let check_gy = gy + dy; if check_gy >= bins { break; } - let idx = check_gy * bins + gx; - if cells[idx].center.weight < threshold { + let w = self.grid.blurred_weight(check_gy, gx); + if w < threshold { break; } - total_weight += cells[idx].center.weight; + total_weight += w; upper_gy = check_gy; } @@ -328,18 +331,18 @@ impl CalibrationState { break; } let check_gy = gy - dy; - let idx = check_gy * bins + gx; - if cells[idx].center.weight < threshold { + let w = self.grid.blurred_weight(check_gy, gx); + if w < threshold { break; } - total_weight += cells[idx].center.weight; + total_weight += w; lower_gy = check_gy; } let half_width = ((upper_gy - lower_gy) as f64 + 1.0) * cell_h * 0.5; widths.push(RidgeMeasurement { - library: LibraryRT(path_node.center.library), + library: LibraryRT(self.grid.grid_cells()[path_idx].center.library), half_width, total_weight, }); From fedcb6001218cb4e6a50668b7144100089362c30 Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Fri, 10 Apr 2026 16:12:15 -0700 Subject: [PATCH 49/64] feat(timsseek): use RT newtypes in CalibrationResult API --- rust/timsseek/src/rt_calibration.rs | 29 ++++++++++++++++------------- 1 file changed, 16 insertions(+), 13 deletions(-) diff --git a/rust/timsseek/src/rt_calibration.rs b/rust/timsseek/src/rt_calibration.rs index de2ca43..26e392c 100644 --- a/rust/timsseek/src/rt_calibration.rs +++ b/rust/timsseek/src/rt_calibration.rs @@ -5,6 +5,8 @@ pub use calibrt::{ CalibrationCurve as RTCalibration, CalibrationSnapshot, CalibrationState as CalibratedGrid, + LibraryRT, + ObservedRTSeconds, Point, RidgeMeasurement, calibrate_with_ranges, @@ -55,29 +57,30 @@ impl CalibrationResult { } pub fn with_ridge_widths(mut self, mut widths: Vec) -> Self { - widths.sort_by(|a, b| a.library.partial_cmp(&b.library).unwrap_or(std::cmp::Ordering::Equal)); + widths.sort_by(|a, b| a.library.0.partial_cmp(&b.library.0).unwrap_or(std::cmp::Ordering::Equal)); self.ridge_widths = widths; self } /// Interpolate ridge half-width at a given library RT (seconds). /// Returns the half-width in seconds, or None if no ridge data. - fn ridge_half_width_at(&self, library_rt_seconds: f64) -> Option { + fn ridge_half_width_at(&self, library_rt: LibraryRT) -> Option { if self.ridge_widths.is_empty() { return None; } let widths = &self.ridge_widths; + let rt = library_rt.0; // Clamp to endpoints - if library_rt_seconds <= widths[0].library { + if rt <= widths[0].library.0 { return Some(widths[0].half_width); } - if library_rt_seconds >= widths[widths.len() - 1].library { + if rt >= widths[widths.len() - 1].library.0 { return Some(widths[widths.len() - 1].half_width); } // Binary search for the bracketing pair - let pos = widths.partition_point(|m| m.library < library_rt_seconds); + let pos = widths.partition_point(|m| m.library.0 < rt); if pos == 0 { return Some(widths[0].half_width); } @@ -85,16 +88,16 @@ impl CalibrationResult { let right = &widths[pos]; // Linear interpolation - let t = (library_rt_seconds - left.library) / (right.library - left.library).max(1e-9); + let t = (rt - left.library.0) / (right.library.0 - left.library.0).max(1e-9); Some(left.half_width + t * (right.half_width - left.half_width)) } /// Convert indexed RT to calibrated absolute RT (seconds). - pub fn convert_irt(&self, irt_seconds: f32) -> f32 { - match self.cal_curve.predict(irt_seconds as f64) { - Ok(rt) => rt as f32, - Err(CalibRtError::OutOfBounds(rt)) => rt as f32, - Err(_) => irt_seconds, + pub fn convert_irt(&self, irt: LibraryRT) -> ObservedRTSeconds { + match self.cal_curve.predict(LibraryRT(irt.0 as f64)) { + Ok(rt) => ObservedRTSeconds(rt.0 as f32), + Err(CalibRtError::OutOfBounds(rt)) => ObservedRTSeconds(rt as f32), + Err(_) => ObservedRTSeconds(irt.0), } } @@ -106,9 +109,9 @@ impl CalibrationResult { /// Get per-query tolerance. Uses position-dependent ridge width when available, /// falls back to uniform `rt_tolerance_minutes` otherwise. /// `rt` is the library RT in seconds (pre-calibration). - pub fn get_tolerance(&self, _mz: f64, _mobility: f32, rt: f32) -> Tolerance { + pub fn get_tolerance(&self, _mz: f64, _mobility: f32, rt: LibraryRT) -> Tolerance { let rt_tol_minutes = self - .ridge_half_width_at(rt as f64) + .ridge_half_width_at(LibraryRT(rt.0 as f64)) .map(|hw| (hw * RIDGE_WIDTH_MULTIPLIER / 60.0) as f32) .unwrap_or(self.rt_tolerance_minutes) .max(MIN_RT_TOLERANCE_MINUTES); From c04a218d15f3e6174dbdf1b9c6509d830ee1bd7b Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Fri, 10 Apr 2026 17:05:56 -0700 Subject: [PATCH 50/64] feat: propagate RT newtypes to CalibrantCandidate and all callers --- rust/timsquery_viewer/src/app.rs | 9 ++-- rust/timsquery_viewer/src/calibration.rs | 56 ++++++++++++------------ rust/timsseek/src/scoring/pipeline.rs | 22 +++++----- rust/timsseek_cli/src/processing.rs | 12 ++--- 4 files changed, 51 insertions(+), 48 deletions(-) diff --git a/rust/timsquery_viewer/src/app.rs b/rust/timsquery_viewer/src/app.rs index eed8207..42b4f95 100644 --- a/rust/timsquery_viewer/src/app.rs +++ b/rust/timsquery_viewer/src/app.rs @@ -9,6 +9,7 @@ use egui_dock::{ use std::path::PathBuf; use std::sync::Arc; use std::time::Instant; +use calibrt::LibraryRT; use timsquery::models::tolerance::Tolerance; use timsquery::serde::IndexedPeaksHandle; @@ -526,8 +527,8 @@ impl ViewerApp { if let Some(cs) = &self.calibration.calibration_state { if let Some(curve) = cs.curve() { let lib_rt = elution_group_owned.rt_seconds(); - let calibrated_rt = match curve.predict(lib_rt as f64) { - Ok(y) => y as f32, + let calibrated_rt = match curve.predict(LibraryRT(lib_rt as f64)) { + Ok(y) => y.0 as f32, Err(calibrt::CalibRtError::OutOfBounds(y)) => y as f32, Err(_) => lib_rt, }; @@ -684,8 +685,8 @@ impl ViewerApp { // Check if calibration projects to a different RT let calibrated_rt = self.calibration.calibration_state.as_ref() .and_then(|cs| cs.curve()) - .and_then(|curve| match curve.predict(lib_rt) { - Ok(y) => Some(y), + .and_then(|curve| match curve.predict(LibraryRT(lib_rt)) { + Ok(y) => Some(y.0), Err(calibrt::CalibRtError::OutOfBounds(y)) => Some(y), Err(_) => None, }); diff --git a/rust/timsquery_viewer/src/calibration.rs b/rust/timsquery_viewer/src/calibration.rs index 3fff94f..eee3bb8 100644 --- a/rust/timsquery_viewer/src/calibration.rs +++ b/rust/timsquery_viewer/src/calibration.rs @@ -12,7 +12,7 @@ use std::thread::JoinHandle; use eframe::egui; -use calibrt::CalibrationState; +use calibrt::{CalibrationState, LibraryRT, ObservedRTSeconds}; use timscentroid::rt_mapping::{MS1CycleIndex, RTIndex}; use timsquery::models::tolerance::{ MobilityTolerance, MzTolerance, QuadTolerance, RtTolerance, Tolerance, @@ -80,8 +80,8 @@ pub enum CalibrationMessage { Snapshot { n_scored: usize, heap_len: usize, - /// (library_rt_seconds, apex_rt_seconds, score) - points: Vec<(f64, f64, f64)>, + /// (library_rt, apex_rt, score) + points: Vec<(LibraryRT, ObservedRTSeconds, f64)>, }, /// Thread completed (all elution groups scored or stopped). Done { n_scored: usize }, @@ -119,7 +119,7 @@ pub struct ViewerCalibrationState { receiver: Option>, /// Latest calibrant points: (library_rt, apex_rt, score). - pub snapshot_points: Vec<(f64, f64, f64)>, + pub snapshot_points: Vec<(LibraryRT, ObservedRTSeconds, f64)>, } impl Default for ViewerCalibrationState { @@ -151,10 +151,10 @@ impl ViewerCalibrationState { return Self::default(); } - let snapshot_points: Vec<(f64, f64, f64)> = snapshot + let snapshot_points: Vec<(LibraryRT, ObservedRTSeconds, f64)> = snapshot .points .iter() - .map(|p| (p[0], p[1], p[2])) + .map(|p| (LibraryRT(p[0]), ObservedRTSeconds(p[1]), p[2])) .collect(); let n_calibrants = snapshot_points.len(); @@ -190,7 +190,7 @@ impl ViewerCalibrationState { Some(cs.save_snapshot(&self.snapshot_points)) } else { Some(calibrt::CalibrationSnapshot { - points: self.snapshot_points.iter().map(|&(x, y, w)| [x, y, w]).collect(), + points: self.snapshot_points.iter().map(|&(lib, obs, w)| [lib.0, obs.0, w]).collect(), grid_size: DEFAULT_GRID_SIZE, lookback: DEFAULT_LOOKBACK, }) @@ -336,7 +336,7 @@ impl ViewerCalibrationState { self.snapshot_points .iter() .map(|&(lib_rt, apex_rt, _weight)| (lib_rt, apex_rt, 1.0)), - ); + ); // snapshot_points already typed cs.fit(); let has_curve = cs.curve().is_some(); let n_path = cs.path_indices().len(); @@ -464,9 +464,9 @@ impl ViewerCalibrationState { if let Ok(apex) = scorer.suggest_apex(&rt_mapper, 0) { local_heap.push(CalibrantCandidate { score: apex.score, - apex_rt_seconds: apex.retention_time_ms as f32 / 1000.0, + apex_rt: ObservedRTSeconds(apex.retention_time_ms as f32 / 1000.0), speclib_index: eg_idx, - library_rt_seconds: elution_group.rt_seconds(), + library_rt: LibraryRT(elution_group.rt_seconds()), }); } (scorer, local_heap) @@ -480,12 +480,12 @@ impl ViewerCalibrationState { n_scored += chunk.len(); // Send snapshot. - let points: Vec<(f64, f64, f64)> = heap + let points: Vec<(LibraryRT, ObservedRTSeconds, f64)> = heap .iter() .map(|c| { ( - c.library_rt_seconds as f64, - c.apex_rt_seconds as f64, + LibraryRT(c.library_rt.0 as f64), + ObservedRTSeconds(c.apex_rt.0 as f64), c.score as f64, ) }) @@ -520,7 +520,7 @@ impl ViewerCalibrationState { version: "v1".to_string(), rt_range_seconds, calibration: CalibrationSnapshot { - points: self.snapshot_points.iter().map(|&(x, y, w)| [x, y, w]).collect(), + points: self.snapshot_points.iter().map(|&(lib, obs, w)| [lib.0, obs.0, w]).collect(), grid_size: DEFAULT_GRID_SIZE, lookback: DEFAULT_LOOKBACK, }, @@ -551,7 +551,7 @@ impl ViewerCalibrationState { if let Ok(cal) = calibrt::CalibrationState::from_snapshot(&loaded.snapshot) { self.snapshot_points = loaded.snapshot.points .iter() - .map(|p| (p[0], p[1], p[2])) + .map(|p| (LibraryRT(p[0]), ObservedRTSeconds(p[1]), p[2])) .collect(); self.calibration_state = Some(cal); } @@ -737,7 +737,7 @@ impl ViewerCalibrationState { } /// Render the scatter + curve calibration plot. - fn render_calibration_plot(&self, ui: &mut egui::Ui, selected_library_rt: Option) { + fn render_calibration_plot(&mut self, ui: &mut egui::Ui, selected_library_rt: Option) { use egui_plot::{Line, Plot, PlotPoints, Points, Polygon, VLine, HLine}; let plot_id = format!("calibration_plot_{}", self.generation); @@ -748,7 +748,7 @@ impl ViewerCalibrationState { .allow_zoom(true) .allow_drag(true); - let cal_state = self.calibration_state.as_ref(); + let cal_state = self.calibration_state.as_mut(); plot.show(ui, |plot_ui| { // Grid heatmap from CalibrationState (if available). @@ -827,7 +827,7 @@ impl ViewerCalibrationState { } // Fitted curve + ridge envelope - if let Some(curve) = cs.curve() { + if let Some(curve) = cs.curve().cloned() { let curve_points = curve.points(); if curve_points.len() >= 2 { let x_min = curve_points.first().unwrap().library; @@ -838,8 +838,8 @@ impl ViewerCalibrationState { let line_pts: Vec<[f64; 2]> = (0..=n_samples) .filter_map(|i| { let x = x_min + i as f64 * step; - let y = match curve.predict(x) { - Ok(y) => y, + let y = match curve.predict(LibraryRT(x)) { + Ok(y) => y.0, Err(calibrt::CalibRtError::OutOfBounds(y)) => y, Err(_) => return None, }; @@ -866,21 +866,21 @@ impl ViewerCalibrationState { let upper: Vec<[f64; 2]> = ridge.iter() .filter_map(|m| { let y = match curve.predict(m.library) { - Ok(y) => y, + Ok(y) => y.0, Err(calibrt::CalibRtError::OutOfBounds(y)) => y, Err(_) => return None, }; - Some([m.library, y + m.half_width]) + Some([m.library.0, y + m.half_width]) }) .collect(); let lower: Vec<[f64; 2]> = ridge.iter() .filter_map(|m| { let y = match curve.predict(m.library) { - Ok(y) => y, + Ok(y) => y.0, Err(calibrt::CalibRtError::OutOfBounds(y)) => y, Err(_) => return None, }; - Some([m.library, y - m.half_width]) + Some([m.library.0, y - m.half_width]) }) .collect(); @@ -916,8 +916,8 @@ impl ViewerCalibrationState { // If curve is fitted, show predicted RT + tolerance band if let Some(curve) = cs.curve() { - let predicted_rt = match curve.predict(lib_rt) { - Ok(y) => y, + let predicted_rt = match curve.predict(LibraryRT(lib_rt)) { + Ok(y) => y.0, Err(calibrt::CalibRtError::OutOfBounds(y)) => y, Err(_) => lib_rt, }; @@ -946,7 +946,7 @@ impl ViewerCalibrationState { let raw_pts: Vec<[f64; 2]> = self .snapshot_points .iter() - .map(|&(lib_rt, apex_rt, _)| [lib_rt, apex_rt]) + .map(|&(lib_rt, apex_rt, _)| [lib_rt.0, apex_rt.0]) .collect(); plot_ui.points( @@ -968,7 +968,7 @@ impl ViewerCalibrationState { // half-width gives the global tolerance — heavy columns count more. let ridge_stats = self .calibration_state - .as_ref() + .as_mut() .and_then(|cs| { cs.curve()?; // ensure curve is fitted let measurements = cs.measure_ridge_width(0.1); diff --git a/rust/timsseek/src/scoring/pipeline.rs b/rust/timsseek/src/scoring/pipeline.rs index a037916..d96694f 100644 --- a/rust/timsseek/src/scoring/pipeline.rs +++ b/rust/timsseek/src/scoring/pipeline.rs @@ -58,7 +58,7 @@ use super::results::{ ScoredCandidateBuilder, }; use super::timings::ScoreTimings; -use crate::rt_calibration::CalibrationResult; +use crate::rt_calibration::{CalibrationResult, LibraryRT, ObservedRTSeconds}; use tracing::warn; /// Lightweight calibrant candidate — just enough to re-query in Phase 2. @@ -66,9 +66,9 @@ use tracing::warn; #[derive(Debug, Clone)] pub struct CalibrantCandidate { pub score: f32, - pub apex_rt_seconds: f32, + pub apex_rt: ObservedRTSeconds, pub speclib_index: usize, - pub library_rt_seconds: f32, + pub library_rt: LibraryRT, } impl PartialEq for CalibrantCandidate { @@ -531,7 +531,7 @@ impl Scorer { ), SkippingReason, > { - let original_irt = item.query.rt_seconds(); + let original_irt = LibraryRT(item.query.rt_seconds()); let calibrated_rt = calibration.convert_irt(original_irt); let tolerance = calibration.get_tolerance( item.query.mono_precursor_mz(), @@ -539,12 +539,12 @@ impl Scorer { original_irt, // library RT — ridge widths are indexed by library RT ); - let calibrated_query = item.query.clone().with_rt_seconds(calibrated_rt); + let calibrated_query = item.query.clone().with_rt_seconds(calibrated_rt.0); let max_range = self.index.ms1_cycle_mapping().range_milis(); let max_range = TupleRange::try_new(max_range.0, max_range.1) .expect("Reference RTs should be sorted and valid"); - let rt_range = match tolerance.rt_range_as_milis(calibrated_rt) { + let rt_range = match tolerance.rt_range_as_milis(calibrated_rt.0) { OptionallyRestricted::Unrestricted => max_range, OptionallyRestricted::Restricted(r) => r, }; @@ -570,7 +570,7 @@ impl Scorer { digest: item.digest.clone(), charge: item.query.precursor_charge(), library_id: agg.eg.id() as u32, - query_rt_seconds: calibrated_rt, + query_rt_seconds: calibrated_rt.0, ref_mobility_ook0: item.query.mobility_ook0(), ref_precursor_mz: item.query.mono_precursor_mz(), }; @@ -771,9 +771,9 @@ impl Scorer { if let Some((loc, _meta)) = self.prescore(item, &mut scorer) { heap.push(CalibrantCandidate { score: loc.score, - apex_rt_seconds: loc.retention_time_ms as f32 / 1000.0, + apex_rt: ObservedRTSeconds(loc.retention_time_ms as f32 / 1000.0), speclib_index: speclib_offset + chunk_idx, - library_rt_seconds: item.query.rt_seconds(), + library_rt: LibraryRT(item.query.rt_seconds()), }); } (scorer, heap) @@ -794,9 +794,9 @@ impl Scorer { if let Some((loc, _meta)) = self.prescore(item, &mut scorer) { heap.push(CalibrantCandidate { score: loc.score, - apex_rt_seconds: loc.retention_time_ms as f32 / 1000.0, + apex_rt: ObservedRTSeconds(loc.retention_time_ms as f32 / 1000.0), speclib_index: speclib_offset + chunk_idx, - library_rt_seconds: item.query.rt_seconds(), + library_rt: LibraryRT(item.query.rt_seconds()), }); } } diff --git a/rust/timsseek_cli/src/processing.rs b/rust/timsseek_cli/src/processing.rs index c887445..1d97573 100644 --- a/rust/timsseek_cli/src/processing.rs +++ b/rust/timsseek_cli/src/processing.rs @@ -24,6 +24,8 @@ use timsseek::rt_calibration::{ CalibRtError, CalibrationResult, CalibratedGrid, + LibraryRT, + ObservedRTSeconds, Point, }; use timsseek::scoring::{ @@ -190,7 +192,7 @@ pub fn execute_pipeline( // Snapshot calibrant points before calibration consumes them (for saving) let calibrant_points: Vec<[f64; 3]> = calibrants .iter() - .map(|c| [c.library_rt_seconds as f64, c.apex_rt_seconds as f64, 1.0]) + .map(|c| [c.library_rt.0 as f64, c.apex_rt.0 as f64, 1.0]) .collect(); info!("Phase 2: Calibration..."); @@ -472,7 +474,7 @@ fn calibrate_from_phase1( Some(Point { library: irt_for_curve as f64, - observed: c.apex_rt_seconds as f64, + observed: c.apex_rt.0 as f64, weight: 1.0, }) }) @@ -501,7 +503,7 @@ fn calibrate_from_phase1( let mut cal_state = CalibratedGrid::new( config.grid_size, (min_x, max_x), (min_y, max_y), config.dp_lookback, )?; - cal_state.update(points.iter().map(|p| (p.library, p.observed, p.weight))); + cal_state.update(points.iter().map(|p| (LibraryRT(p.library), ObservedRTSeconds(p.observed), p.weight))); cal_state.fit(); let cal_curve = cal_state.curve() .ok_or(CalibRtError::NoPoints)? @@ -542,7 +544,7 @@ fn calibrate_from_phase1( let query_at_apex = item .query .clone() - .with_rt_seconds(candidate.apex_rt_seconds); + .with_rt_seconds(candidate.apex_rt.0); let mut agg: SpectralCollector = SpectralCollector::new(query_at_apex); pipeline.index.add_query(&mut agg, &query_tolerance); @@ -565,7 +567,7 @@ fn calibrate_from_phase1( let mut abs_residuals: Vec = points .iter() .map(|p| { - let predicted = cal_curve.predict(p.library).unwrap_or(p.observed); + let predicted = cal_curve.predict(LibraryRT(p.library)).map(|v| v.0).unwrap_or(p.observed); (p.observed - predicted).abs() }) .collect(); From e87fdb0467d807ddd91a7b43d532bdf4c386f64c Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Fri, 10 Apr 2026 19:43:16 -0700 Subject: [PATCH 51/64] feat(calibrt): greedy path extension, sqrt edge weights, and extrapolation viz Pathfinding: use geometric mean (sqrt weights) for edge weight formula to reduce bias against sparse regions. After DP pass, greedily extend path backward/forward through monotonic non-suppressed cells to cover full RT range. Viewer: draw extrapolated prediction as dashed red line beyond curve bounds, clamped to grid y-range to prevent runaway extrapolation. --- rust/calibrt/src/pathfinding.rs | 71 ++++++++++++++++++++++-- rust/timsquery_viewer/src/calibration.rs | 69 ++++++++++++++++++----- 2 files changed, 121 insertions(+), 19 deletions(-) diff --git a/rust/calibrt/src/pathfinding.rs b/rust/calibrt/src/pathfinding.rs index 1b02da9..22ea7bb 100644 --- a/rust/calibrt/src/pathfinding.rs +++ b/rust/calibrt/src/pathfinding.rs @@ -51,11 +51,11 @@ pub(crate) fn find_optimal_path( let dist = (dx * dx + dy * dy).sqrt(); if dist > DISTANCE_THRESHOLD { - // Edge weight formula: (weight_i * weight_j) / distance - // - Product of weights: Prioritizes paths through high-confidence nodes + // Edge weight formula: sqrt(weight_i) * sqrt(weight_j) / distance + // - Geometric mean of weights: Prefers high-confidence nodes but doesn't + // annihilate edges to sparse-but-real cells (sqrt compresses the scale) // - Division by distance: Penalizes long jumps, encouraging smooth curves - // This balances data fidelity (high weights) with geometric smoothness (short edges) - let edge_weight = (nodes[i].center.weight * nodes[j].center.weight) / dist; + let edge_weight = (nodes[i].center.weight.sqrt() * nodes[j].center.weight.sqrt()) / dist; let new_weight = max_weights[j] + edge_weight; if new_weight > max_weights[i] { @@ -78,7 +78,7 @@ pub(crate) fn find_optimal_path( } } - // Reconstruct the path + // Reconstruct the DP path let mut path = Vec::new(); let mut current_idx_opt = Some(end_of_path_idx); while let Some(current_idx) = current_idx_opt { @@ -87,5 +87,64 @@ pub(crate) fn find_optimal_path( } path.reverse(); - path + if path.is_empty() { + return path; + } + + // Pass 2: Greedily extend the path beyond the DP endpoints. + // The DP optimizes total weight and may skip sparse-but-valid regions at + // the edges. We extend by walking through remaining non-suppressed nodes + // that satisfy monotonicity, picking the nearest one at each step. + + // Extend backward: find nodes before the path start that satisfy monotonicity. + // Nodes are sorted by (library, observed), so candidates are before the path's + // first node in the sorted order. + let first = path[0]; + let mut prefix = Vec::new(); + // Walk backward through sorted nodes, greedily picking the nearest monotonic predecessor + let first_sorted_idx = nodes.iter().position(|n| { + (n.center.library - first.library).abs() < 1e-9 + && (n.center.observed - first.observed).abs() < 1e-9 + }); + if let Some(start_idx) = first_sorted_idx { + let mut cursor = first; + for j in (0..start_idx).rev() { + let candidate = nodes[j].center; + if candidate.weight > 0.0 + && candidate.library < cursor.library + && candidate.observed < cursor.observed + { + prefix.push(candidate); + cursor = candidate; + } + } + prefix.reverse(); + } + + // Extend forward: find nodes after the path end that satisfy monotonicity. + let last = *path.last().unwrap(); + let last_sorted_idx = nodes.iter().rposition(|n| { + (n.center.library - last.library).abs() < 1e-9 + && (n.center.observed - last.observed).abs() < 1e-9 + }); + let mut suffix = Vec::new(); + if let Some(end_idx) = last_sorted_idx { + let mut cursor = last; + for j in (end_idx + 1)..n { + let candidate = nodes[j].center; + if candidate.weight > 0.0 + && candidate.library > cursor.library + && candidate.observed > cursor.observed + { + suffix.push(candidate); + cursor = candidate; + } + } + } + + // Assemble: prefix + DP path + suffix + let mut full_path = prefix; + full_path.append(&mut path); + full_path.append(&mut suffix); + full_path } diff --git a/rust/timsquery_viewer/src/calibration.rs b/rust/timsquery_viewer/src/calibration.rs index eee3bb8..c504701 100644 --- a/rust/timsquery_viewer/src/calibration.rs +++ b/rust/timsquery_viewer/src/calibration.rs @@ -826,38 +826,81 @@ impl ViewerCalibrationState { ); } - // Fitted curve + ridge envelope + // Fitted curve + extrapolation over full grid range if let Some(curve) = cs.curve().cloned() { let curve_points = curve.points(); if curve_points.len() >= 2 { - let x_min = curve_points.first().unwrap().library; - let x_max = curve_points.last().unwrap().library; + let curve_x_min = curve_points.first().unwrap().library; + let curve_x_max = curve_points.last().unwrap().library; + let (grid_x_min, grid_x_max) = cs.grid_x_range(); let n_samples = 200; - let step = (x_max - x_min) / n_samples as f64; - let line_pts: Vec<[f64; 2]> = (0..=n_samples) + // Interpolated region (solid cyan) — within curve bounds + let interp_step = (curve_x_max - curve_x_min) / n_samples as f64; + let interp_pts: Vec<[f64; 2]> = (0..=n_samples) .filter_map(|i| { - let x = x_min + i as f64 * step; - let y = match curve.predict(LibraryRT(x)) { - Ok(y) => y.0, - Err(calibrt::CalibRtError::OutOfBounds(y)) => y, - Err(_) => return None, - }; + let x = curve_x_min + i as f64 * interp_step; + let y = curve.predict(LibraryRT(x)).ok()?.0; Some([x, y]) }) .collect(); - if !line_pts.is_empty() { + if !interp_pts.is_empty() { plot_ui.line( Line::new( "fitted curve", - PlotPoints::new(line_pts), + PlotPoints::new(interp_pts), ) .color(egui::Color32::from_rgb(0, 220, 220)) .width(2.0), ); } + // Extrapolated regions (dashed red) — beyond curve bounds + // Clamp Y to grid range so extrapolation doesn't fly off + let (grid_y_min, grid_y_max) = cs.grid_y_range(); + let extrap_color = egui::Color32::from_rgb(255, 100, 100); + let extrap_predict = |x: f64| -> f64 { + let y = match curve.predict(LibraryRT(x)) { + Ok(y) => y.0, + Err(calibrt::CalibRtError::OutOfBounds(y)) => y, + Err(_) => 0.0, + }; + y.clamp(grid_y_min, grid_y_max) + }; + // Left extrapolation + if grid_x_min < curve_x_min { + let left_step = (curve_x_min - grid_x_min) / 50.0_f64; + let left_pts: Vec<[f64; 2]> = (0..=50) + .map(|i| { + let x = grid_x_min + i as f64 * left_step; + [x, extrap_predict(x)] + }) + .collect(); + plot_ui.line( + Line::new("extrapolation (left)", PlotPoints::new(left_pts)) + .color(extrap_color) + .width(1.5) + .style(egui_plot::LineStyle::dashed_dense()), + ); + } + // Right extrapolation + if grid_x_max > curve_x_max { + let right_step = (grid_x_max - curve_x_max) / 50.0_f64; + let right_pts: Vec<[f64; 2]> = (0..=50) + .map(|i| { + let x = curve_x_max + i as f64 * right_step; + [x, extrap_predict(x)] + }) + .collect(); + plot_ui.line( + Line::new("extrapolation (right)", PlotPoints::new(right_pts)) + .color(extrap_color) + .width(1.5) + .style(egui_plot::LineStyle::dashed_dense()), + ); + } + // Ridge envelope: upper and lower boundary lines showing tolerance width let ridge = cs.measure_ridge_width(0.1); if ridge.len() >= 2 { From 49eff9bc67208eb5b4530955dda2c790b5e3f7f6 Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Fri, 10 Apr 2026 19:52:38 -0700 Subject: [PATCH 52/64] feat(rescore): per-fold progress and remove redundant scoring pass MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Print training fold progress and scoring time to stderr so users can tell the rescore phase isn't frozen. Uses Duration debug format for automatic unit selection. Also fix: score() was calling assign_scores() redundantly after fit() already assigned them — removed the double scoring pass. --- rust/timsseek/src/ml/cv.rs | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/rust/timsseek/src/ml/cv.rs b/rust/timsseek/src/ml/cv.rs index 155519b..a02a35a 100644 --- a/rust/timsseek/src/ml/cv.rs +++ b/rust/timsseek/src/ml/cv.rs @@ -404,7 +404,10 @@ impl CrossValidatedScorer { self.fold_classifiers.clear(); (0..self.n_folds).for_each(|_| self.fold_classifiers.push(None)); for fold in 0..self.n_folds { + eprint!(" Training fold {}/{} ...", fold + 1, self.n_folds); + let start = std::time::Instant::now(); self.fit_fold(fold, train_buffer, val_buffer)?; + eprintln!(" {:?}", start.elapsed()); } Ok(()) @@ -414,6 +417,8 @@ impl CrossValidatedScorer { let mut scores = vec![0.0; self.data.len()]; let mut buffer = DataBuffer::default(); + eprint!(" Scoring folds ..."); + let score_start = std::time::Instant::now(); for train_i in 0..self.n_folds { let early_stop_i = self.next_fold(train_i); @@ -438,6 +443,7 @@ impl CrossValidatedScorer { } } } + eprintln!(" {:?}", score_start.elapsed()); let div_factor = (self.n_folds - 2) as f64; scores.iter_mut().for_each(|x| { @@ -454,8 +460,7 @@ impl CrossValidatedScorer { } } - pub fn score(mut self) -> Vec { - self.assign_scores(); + pub fn score(self) -> Vec { self.data } From ed75232554376d7f8157bd2a724e090599bee21f Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Fri, 10 Apr 2026 20:21:48 -0700 Subject: [PATCH 53/64] feat(calibrt): report in-ridge weight ratio as calibration quality metric Add column_weight to RidgeMeasurement (total weight in full column) alongside ridge_weight (weight inside ridge bounds). Compute in-ridge ratio in RidgeWidthSummary and display as percentage in both CLI calibration summary and viewer tolerance panel. --- rust/calibrt/src/lib.rs | 14 ++++++++++--- rust/timsquery_viewer/src/calibration.rs | 25 +++++++++++++++--------- rust/timsseek/src/rt_calibration.rs | 14 +++++++++++-- rust/timsseek_cli/src/processing.rs | 7 ++++--- 4 files changed, 43 insertions(+), 17 deletions(-) diff --git a/rust/calibrt/src/lib.rs b/rust/calibrt/src/lib.rs index 0cd3f66..7642006 100644 --- a/rust/calibrt/src/lib.rs +++ b/rust/calibrt/src/lib.rs @@ -160,8 +160,10 @@ pub struct RidgeMeasurement { pub library: LibraryRT, /// Half-width of the ridge in y-units (seconds). pub half_width: f64, - /// Total accumulated weight in the expanded range — more weight = more trustworthy. - pub total_weight: f64, + /// Weight inside the ridge bounds. + pub ridge_weight: f64, + /// Total weight in the full column (all rows at this x). + pub column_weight: f64, } /// Serializable snapshot of calibration data — points + config. @@ -339,12 +341,18 @@ impl CalibrationState { lower_gy = check_gy; } + // Sum all weights in this column for the in-ridge ratio + let column_weight: f64 = (0..bins) + .map(|row| self.grid.blurred_weight(row, gx)) + .sum(); + let half_width = ((upper_gy - lower_gy) as f64 + 1.0) * cell_h * 0.5; widths.push(RidgeMeasurement { library: LibraryRT(self.grid.grid_cells()[path_idx].center.library), half_width, - total_weight, + ridge_weight: total_weight, + column_weight, }); } diff --git a/rust/timsquery_viewer/src/calibration.rs b/rust/timsquery_viewer/src/calibration.rs index c504701..abf6b01 100644 --- a/rust/timsquery_viewer/src/calibration.rs +++ b/rust/timsquery_viewer/src/calibration.rs @@ -1020,13 +1020,13 @@ impl ViewerCalibrationState { } // Weighted average half-width (seconds) - let total_weight: f64 = measurements.iter().map(|m| m.total_weight).sum(); + let total_weight: f64 = measurements.iter().map(|m| m.ridge_weight).sum(); if total_weight <= 0.0 { return None; } let weighted_hw: f64 = measurements .iter() - .map(|m| m.half_width * m.total_weight) + .map(|m| m.half_width * m.ridge_weight) .sum::() / total_weight; @@ -1034,26 +1034,33 @@ impl ViewerCalibrationState { let min_hw = measurements.iter().map(|m| m.half_width).fold(f64::MAX, f64::min); let max_hw = measurements.iter().map(|m| m.half_width).fold(0.0f64, f64::max); - Some((weighted_hw, min_hw, max_hw, measurements.len())) + let total_column_weight: f64 = measurements.iter().map(|m| m.column_weight).sum(); + let in_ridge_pct = if total_column_weight > 0.0 { + total_weight / total_column_weight * 100.0 + } else { + 0.0 + }; + + Some((weighted_hw, min_hw, max_hw, measurements.len(), in_ridge_pct)) }); // Suggested RT tolerance from weighted ridge half-width, floored at 0.5 min. - let suggested = ridge_stats.map(|(hw_s, min_s, max_s, n_cols)| { + let suggested = ridge_stats.map(|(hw_s, min_s, max_s, n_cols, in_ridge_pct)| { let rt_min = (hw_s / 60.0).max(0.5); - (rt_min, hw_s, min_s, max_s, n_cols) + (rt_min, hw_s, min_s, max_s, n_cols, in_ridge_pct) }); - if let Some((rt_min, _, _, _, _)) = suggested { + if let Some((rt_min, _, _, _, _, _)) = suggested { self.derived_tolerances = Some(DerivedTolerances { rt_tolerance_minutes: rt_min as f32, }); } ui.horizontal(|ui| { - if let Some((rt_min, hw_s, min_s, max_s, n_cols)) = suggested { + if let Some((rt_min, hw_s, min_s, max_s, n_cols, in_ridge_pct)) = suggested { ui.label(format!( - "Suggested RT: \u{00B1}{:.2} min Ridge: {:.0} s (min {:.0}, max {:.0}) ({} cols)", - rt_min, hw_s, min_s, max_s, n_cols, + "Suggested RT: \u{00B1}{:.2} min Ridge: {:.0} s (min {:.0}, max {:.0}) ({} cols) {:.0}% in-ridge", + rt_min, hw_s, min_s, max_s, n_cols, in_ridge_pct, )); if ui.button("Apply").clicked() { let rt_tol = rt_min as f32; diff --git a/rust/timsseek/src/rt_calibration.rs b/rust/timsseek/src/rt_calibration.rs index 26e392c..b64e5a7 100644 --- a/rust/timsseek/src/rt_calibration.rs +++ b/rust/timsseek/src/rt_calibration.rs @@ -137,17 +137,24 @@ impl CalibrationResult { if self.ridge_widths.is_empty() { return None; } - let total_weight: f64 = self.ridge_widths.iter().map(|m| m.total_weight).sum(); + let total_weight: f64 = self.ridge_widths.iter().map(|m| m.ridge_weight).sum(); let weighted_avg = self.ridge_widths.iter() - .map(|m| m.half_width * m.total_weight) + .map(|m| m.half_width * m.ridge_weight) .sum::() / total_weight.max(1.0); let min = self.ridge_widths.iter().map(|m| m.half_width).fold(f64::MAX, f64::min); let max = self.ridge_widths.iter().map(|m| m.half_width).fold(0.0f64, f64::max); + let total_column_weight: f64 = self.ridge_widths.iter().map(|m| m.column_weight).sum(); + let in_ridge_ratio = if total_column_weight > 0.0 { + total_weight / total_column_weight + } else { + 0.0 + }; Some(RidgeWidthSummary { weighted_avg, min, max, n_columns: self.ridge_widths.len(), + in_ridge_ratio, }) } @@ -268,6 +275,9 @@ pub struct RidgeWidthSummary { pub min: f64, pub max: f64, pub n_columns: usize, + /// Fraction of total column weight that falls inside the ridge bounds (0.0–1.0). + /// Higher = better fit between spectral library and raw file. + pub in_ridge_ratio: f64, } /// JSON v1 calibration file format — shared between CLI and viewer. diff --git a/rust/timsseek_cli/src/processing.rs b/rust/timsseek_cli/src/processing.rs index 1d97573..f7431a9 100644 --- a/rust/timsseek_cli/src/processing.rs +++ b/rust/timsseek_cli/src/processing.rs @@ -224,8 +224,9 @@ pub fn execute_pipeline( // Print tolerance summary if let Some(summary) = calibration.ridge_width_summary() { println!( - " RT tolerance (ridge): avg {:.0}s, min {:.0}s, max {:.0}s ({} cols)", + " RT tolerance (ridge): avg {:.0}s, min {:.0}s, max {:.0}s ({} cols, {:.0}% in-ridge)", summary.weighted_avg, summary.min, summary.max, summary.n_columns, + summary.in_ridge_ratio * 100.0, ); } println!( @@ -512,9 +513,9 @@ fn calibrate_from_phase1( // Measure ridge width for position-dependent RT tolerance let ridge_widths = cal_state.measure_ridge_width(0.1); if !ridge_widths.is_empty() { - let total_weight: f64 = ridge_widths.iter().map(|m| m.total_weight).sum(); + let total_weight: f64 = ridge_widths.iter().map(|m| m.ridge_weight).sum(); let weighted_hw: f64 = ridge_widths.iter() - .map(|m| m.half_width * m.total_weight) + .map(|m| m.half_width * m.ridge_weight) .sum::() / total_weight.max(1.0); info!( "Ridge width: weighted avg {:.1}s across {} columns (min {:.1}s, max {:.1}s)", From 446793a18b2b0b54508081de37b11b98a6699e37 Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Sat, 11 Apr 2026 14:03:37 -0700 Subject: [PATCH 54/64] chore: update deps --- Cargo.lock | 1735 ++++++++++++++++++++++++++++------------------------ 1 file changed, 922 insertions(+), 813 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index eefe347..aabacde 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -165,23 +165,21 @@ checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" [[package]] name = "android-activity" -version = "0.6.0" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef6978589202a00cd7e118380c448a08b6ed394c3a8df3a430d0898e3a42d046" +checksum = "0f2a1bb052857d5dd49572219344a7332b31b76405648eabac5bc68978251bcd" dependencies = [ "android-properties", - "bitflags 2.10.0", + "bitflags 2.11.0", "cc", - "cesu8", "jni", - "jni-sys", "libc", "log", "ndk", "ndk-context", "ndk-sys", "num_enum", - "thiserror 1.0.69", + "thiserror 2.0.18", ] [[package]] @@ -201,9 +199,9 @@ dependencies = [ [[package]] name = "anstream" -version = "0.6.21" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43d5b281e737544384e969a5ccad3f1cdd24b48086a0fc1b2a5262a26b8f4f4a" +checksum = "824a212faf96e9acacdbd09febd34438f8f711fb84e09a8916013cd7815ca28d" dependencies = [ "anstyle", "anstyle-parse", @@ -216,15 +214,15 @@ dependencies = [ [[package]] name = "anstyle" -version = "1.0.13" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78" +checksum = "940b3a0ca603d1eade50a4846a2afffd5ef57a9feac2c0e2ec2e14f9ead76000" [[package]] name = "anstyle-parse" -version = "0.2.7" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e7644824f0aa2c7b9384579234ef10eb7efb6a0deb83f9630a49594dd9c15c2" +checksum = "52ce7f38b242319f7cabaa6813055467063ecdc9d355bbb4ce0c68908cd8130e" dependencies = [ "utf8parse", ] @@ -249,6 +247,12 @@ dependencies = [ "windows-sys 0.61.2", ] +[[package]] +name = "anyhow" +version = "1.0.102" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c" + [[package]] name = "approx" version = "0.5.1" @@ -267,7 +271,7 @@ dependencies = [ "clipboard-win", "image", "log", - "objc2 0.6.3", + "objc2 0.6.4", "objc2-app-kit 0.3.2", "objc2-core-foundation", "objc2-core-graphics", @@ -299,9 +303,9 @@ checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" [[package]] name = "arrow" -version = "57.1.0" +version = "57.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb372a7cbcac02a35d3fb7b3fc1f969ec078e871f9bb899bf00a2e1809bec8a3" +checksum = "e4754a624e5ae42081f464514be454b39711daae0458906dacde5f4c632f33a8" dependencies = [ "arrow-arith", "arrow-array", @@ -320,9 +324,9 @@ dependencies = [ [[package]] name = "arrow-arith" -version = "57.1.0" +version = "57.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f377dcd19e440174596d83deb49cd724886d91060c07fec4f67014ef9d54049" +checksum = "f7b3141e0ec5145a22d8694ea8b6d6f69305971c4fa1c1a13ef0195aef2d678b" dependencies = [ "arrow-array", "arrow-buffer", @@ -334,9 +338,9 @@ dependencies = [ [[package]] name = "arrow-array" -version = "57.1.0" +version = "57.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a23eaff85a44e9fa914660fb0d0bb00b79c4a3d888b5334adb3ea4330c84f002" +checksum = "4c8955af33b25f3b175ee10af580577280b4bd01f7e823d94c7cdef7cf8c9aef" dependencies = [ "ahash", "arrow-buffer", @@ -352,9 +356,9 @@ dependencies = [ [[package]] name = "arrow-buffer" -version = "57.1.0" +version = "57.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2819d893750cb3380ab31ebdc8c68874dd4429f90fd09180f3c93538bd21626" +checksum = "c697ddca96183182f35b3a18e50b9110b11e916d7b7799cbfd4d34662f2c56c2" dependencies = [ "bytes", "half", @@ -364,9 +368,9 @@ dependencies = [ [[package]] name = "arrow-cast" -version = "57.1.0" +version = "57.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3d131abb183f80c450d4591dc784f8d7750c50c6e2bc3fcaad148afc8361271" +checksum = "646bbb821e86fd57189c10b4fcdaa941deaf4181924917b0daa92735baa6ada5" dependencies = [ "arrow-array", "arrow-buffer", @@ -385,9 +389,9 @@ dependencies = [ [[package]] name = "arrow-csv" -version = "57.1.0" +version = "57.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2275877a0e5e7e7c76954669366c2aa1a829e340ab1f612e647507860906fb6b" +checksum = "8da746f4180004e3ce7b83c977daf6394d768332349d3d913998b10a120b790a" dependencies = [ "arrow-array", "arrow-cast", @@ -400,9 +404,9 @@ dependencies = [ [[package]] name = "arrow-data" -version = "57.1.0" +version = "57.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05738f3d42cb922b9096f7786f606fcb8669260c2640df8490533bb2fa38c9d3" +checksum = "1fdd994a9d28e6365aa78e15da3f3950c0fdcea6b963a12fa1c391afb637b304" dependencies = [ "arrow-buffer", "arrow-schema", @@ -413,9 +417,9 @@ dependencies = [ [[package]] name = "arrow-ipc" -version = "57.1.0" +version = "57.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d09446e8076c4b3f235603d9ea7c5494e73d441b01cd61fb33d7254c11964b3" +checksum = "abf7df950701ab528bf7c0cf7eeadc0445d03ef5d6ffc151eaae6b38a58feff1" dependencies = [ "arrow-array", "arrow-buffer", @@ -427,9 +431,9 @@ dependencies = [ [[package]] name = "arrow-json" -version = "57.1.0" +version = "57.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "371ffd66fa77f71d7628c63f209c9ca5341081051aa32f9c8020feb0def787c0" +checksum = "0ff8357658bedc49792b13e2e862b80df908171275f8e6e075c460da5ee4bf86" dependencies = [ "arrow-array", "arrow-buffer", @@ -451,9 +455,9 @@ dependencies = [ [[package]] name = "arrow-ord" -version = "57.1.0" +version = "57.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cbc94fc7adec5d1ba9e8cd1b1e8d6f72423b33fe978bf1f46d970fafab787521" +checksum = "f7d8f1870e03d4cbed632959498bcc84083b5a24bded52905ae1695bd29da45b" dependencies = [ "arrow-array", "arrow-buffer", @@ -464,9 +468,9 @@ dependencies = [ [[package]] name = "arrow-row" -version = "57.1.0" +version = "57.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "169676f317157dc079cc5def6354d16db63d8861d61046d2f3883268ced6f99f" +checksum = "18228633bad92bff92a95746bbeb16e5fc318e8382b75619dec26db79e4de4c0" dependencies = [ "arrow-array", "arrow-buffer", @@ -477,15 +481,15 @@ dependencies = [ [[package]] name = "arrow-schema" -version = "57.1.0" +version = "57.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d27609cd7dd45f006abae27995c2729ef6f4b9361cde1ddd019dc31a5aa017e0" +checksum = "8c872d36b7bf2a6a6a2b40de9156265f0242910791db366a2c17476ba8330d68" [[package]] name = "arrow-select" -version = "57.1.0" +version = "57.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae980d021879ea119dd6e2a13912d81e64abed372d53163e804dfe84639d8010" +checksum = "68bf3e3efbd1278f770d67e5dc410257300b161b93baedb3aae836144edcaf4b" dependencies = [ "ahash", "arrow-array", @@ -497,9 +501,9 @@ dependencies = [ [[package]] name = "arrow-string" -version = "57.1.0" +version = "57.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf35e8ef49dcf0c5f6d175edee6b8af7b45611805333129c541a8b89a0fc0534" +checksum = "85e968097061b3c0e9fe3079cf2e703e487890700546b5b0647f60fca1b5a8d8" dependencies = [ "arrow-array", "arrow-buffer", @@ -541,16 +545,16 @@ dependencies = [ [[package]] name = "ashpd" -version = "0.11.0" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6cbdf310d77fd3aaee6ea2093db7011dc2d35d2eb3481e5607f1f8d942ed99df" +checksum = "d2f3f79755c74fd155000314eb349864caa787c6592eace6c6882dad873d9c39" dependencies = [ "async-fs", "async-net", "enumflags2", "futures-channel", "futures-util", - "rand 0.9.2", + "rand 0.9.3", "raw-window-handle", "serde", "serde_repr", @@ -587,9 +591,9 @@ dependencies = [ [[package]] name = "async-executor" -version = "1.13.3" +version = "1.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "497c00e0fd83a72a79a39fcbd8e3e2f055d6f6c7e025f3b3d91f4f8e76527fb8" +checksum = "c96bf972d85afc50bf5ab8fe2d54d1586b4e0b46c97c50a0c9e71e2f7bcd812a" dependencies = [ "async-task", "concurrent-queue", @@ -623,7 +627,7 @@ dependencies = [ "futures-lite", "parking", "polling", - "rustix 1.1.3", + "rustix 1.1.4", "slab", "windows-sys 0.61.2", ] @@ -665,7 +669,7 @@ dependencies = [ "cfg-if", "event-listener", "futures-lite", - "rustix 1.1.3", + "rustix 1.1.4", ] [[package]] @@ -676,14 +680,14 @@ checksum = "3b43422f69d8ff38f95f1b2bb76517c91589a924d1559a0e935d7c8ce0274c11" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.117", ] [[package]] name = "async-signal" -version = "0.2.13" +version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43c070bbf59cd3570b6b2dd54cd772527c7c3620fce8be898406dd3ed6adc64c" +checksum = "52b5aaafa020cf5053a01f2a60e8ff5dccf550f0f77ec54a4e47285ac2bab485" dependencies = [ "async-io", "async-lock", @@ -691,7 +695,7 @@ dependencies = [ "cfg-if", "futures-core", "futures-io", - "rustix 1.1.3", + "rustix 1.1.4", "signal-hook-registry", "slab", "windows-sys 0.61.2", @@ -711,7 +715,7 @@ checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.117", ] [[package]] @@ -796,9 +800,9 @@ checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" [[package]] name = "aws-config" -version = "1.8.12" +version = "1.8.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96571e6996817bf3d58f6b569e4b9fd2e9d2fcf9f7424eed07b2ce9bb87535e5" +checksum = "11493b0bad143270fb8ad284a096dd529ba91924c5409adeac856cc1bf047dbc" dependencies = [ "aws-credential-types", "aws-runtime", @@ -816,7 +820,7 @@ dependencies = [ "fastrand", "hex", "http 1.4.0", - "ring", + "sha1", "time", "tokio", "tracing", @@ -826,9 +830,9 @@ dependencies = [ [[package]] name = "aws-credential-types" -version = "1.2.11" +version = "1.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3cd362783681b15d136480ad555a099e82ecd8e2d10a841e14dfd0078d67fee3" +checksum = "8f20799b373a1be121fe3005fba0c2090af9411573878f224df44b42727fcaf7" dependencies = [ "aws-smithy-async", "aws-smithy-runtime-api", @@ -838,9 +842,9 @@ dependencies = [ [[package]] name = "aws-lc-rs" -version = "1.15.2" +version = "1.16.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a88aab2464f1f25453baa7a07c84c5b7684e274054ba06817f382357f77a288" +checksum = "a054912289d18629dc78375ba2c3726a3afe3ff71b4edba9dedfca0e3446d1fc" dependencies = [ "aws-lc-sys", "zeroize", @@ -848,9 +852,9 @@ dependencies = [ [[package]] name = "aws-lc-sys" -version = "0.35.0" +version = "0.39.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b45afffdee1e7c9126814751f88dddc747f41d91da16c9551a0f1e8a11e788a1" +checksum = "83a25cf98105baa966497416dbd42565ce3a8cf8dbfd59803ec9ad46f3126399" dependencies = [ "cc", "cmake", @@ -860,9 +864,9 @@ dependencies = [ [[package]] name = "aws-runtime" -version = "1.5.17" +version = "1.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d81b5b2898f6798ad58f484856768bca817e3cd9de0974c24ae0f1113fe88f1b" +checksum = "5fc0651c57e384202e47153c1260b84a9936e19803d747615edf199dc3b98d17" dependencies = [ "aws-credential-types", "aws-sigv4", @@ -873,9 +877,10 @@ dependencies = [ "aws-smithy-types", "aws-types", "bytes", + "bytes-utils", "fastrand", - "http 0.2.12", - "http-body 0.4.6", + "http 1.4.0", + "http-body 1.0.1", "percent-encoding", "pin-project-lite", "tracing", @@ -884,15 +889,16 @@ dependencies = [ [[package]] name = "aws-sdk-sso" -version = "1.91.0" +version = "1.97.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ee6402a36f27b52fe67661c6732d684b2635152b676aa2babbfb5204f99115d" +checksum = "9aadc669e184501caaa6beafb28c6267fc1baef0810fb58f9b205485ca3f2567" dependencies = [ "aws-credential-types", "aws-runtime", "aws-smithy-async", "aws-smithy-http", "aws-smithy-json", + "aws-smithy-observability", "aws-smithy-runtime", "aws-smithy-runtime-api", "aws-smithy-types", @@ -900,21 +906,23 @@ dependencies = [ "bytes", "fastrand", "http 0.2.12", + "http 1.4.0", "regex-lite", "tracing", ] [[package]] name = "aws-sdk-ssooidc" -version = "1.93.0" +version = "1.99.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a45a7f750bbd170ee3677671ad782d90b894548f4e4ae168302c57ec9de5cb3e" +checksum = "1342a7db8f358d3de0aed2007a0b54e875458e39848d54cc1d46700b2bfcb0a8" dependencies = [ "aws-credential-types", "aws-runtime", "aws-smithy-async", "aws-smithy-http", "aws-smithy-json", + "aws-smithy-observability", "aws-smithy-runtime", "aws-smithy-runtime-api", "aws-smithy-types", @@ -922,21 +930,23 @@ dependencies = [ "bytes", "fastrand", "http 0.2.12", + "http 1.4.0", "regex-lite", "tracing", ] [[package]] name = "aws-sdk-sts" -version = "1.95.0" +version = "1.101.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55542378e419558e6b1f398ca70adb0b2088077e79ad9f14eb09441f2f7b2164" +checksum = "ab41ad64e4051ecabeea802d6a17845a91e83287e1dd249e6963ea1ba78c428a" dependencies = [ "aws-credential-types", "aws-runtime", "aws-smithy-async", "aws-smithy-http", "aws-smithy-json", + "aws-smithy-observability", "aws-smithy-query", "aws-smithy-runtime", "aws-smithy-runtime-api", @@ -945,15 +955,16 @@ dependencies = [ "aws-types", "fastrand", "http 0.2.12", + "http 1.4.0", "regex-lite", "tracing", ] [[package]] name = "aws-sigv4" -version = "1.3.7" +version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69e523e1c4e8e7e8ff219d732988e22bfeae8a1cafdbe6d9eca1546fa080be7c" +checksum = "b0b660013a6683ab23797778e21f1f854744fdf05f68204b4cca4c8c04b5d1f4" dependencies = [ "aws-credential-types", "aws-smithy-http", @@ -973,9 +984,9 @@ dependencies = [ [[package]] name = "aws-smithy-async" -version = "1.2.7" +version = "1.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ee19095c7c4dda59f1697d028ce704c24b2d33c6718790c7f1d5a3015b4107c" +checksum = "2ffcaf626bdda484571968400c326a244598634dc75fd451325a54ad1a59acfc" dependencies = [ "futures-util", "pin-project-lite", @@ -984,9 +995,9 @@ dependencies = [ [[package]] name = "aws-smithy-http" -version = "0.62.6" +version = "0.63.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "826141069295752372f8203c17f28e30c464d22899a43a0c9fd9c458d469c88b" +checksum = "ba1ab2dc1c2c3749ead27180d333c42f11be8b0e934058fb4b2258ee8dbe5231" dependencies = [ "aws-smithy-runtime-api", "aws-smithy-types", @@ -994,9 +1005,9 @@ dependencies = [ "bytes-utils", "futures-core", "futures-util", - "http 0.2.12", "http 1.4.0", - "http-body 0.4.6", + "http-body 1.0.1", + "http-body-util", "percent-encoding", "pin-project-lite", "pin-utils", @@ -1005,9 +1016,9 @@ dependencies = [ [[package]] name = "aws-smithy-http-client" -version = "1.1.5" +version = "1.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59e62db736db19c488966c8d787f52e6270be565727236fd5579eaa301e7bc4a" +checksum = "6a2f165a7feee6f263028b899d0a181987f4fa7179a6411a32a439fba7c5f769" dependencies = [ "aws-smithy-async", "aws-smithy-runtime-api", @@ -1029,27 +1040,27 @@ dependencies = [ [[package]] name = "aws-smithy-json" -version = "0.61.9" +version = "0.62.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49fa1213db31ac95288d981476f78d05d9cbb0353d22cdf3472cc05bb02f6551" +checksum = "9648b0bb82a2eedd844052c6ad2a1a822d1f8e3adee5fbf668366717e428856a" dependencies = [ "aws-smithy-types", ] [[package]] name = "aws-smithy-observability" -version = "0.1.5" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17f616c3f2260612fe44cede278bafa18e73e6479c4e393e2c4518cf2a9a228a" +checksum = "a06c2315d173edbf1920da8ba3a7189695827002e4c0fc961973ab1c54abca9c" dependencies = [ "aws-smithy-runtime-api", ] [[package]] name = "aws-smithy-query" -version = "0.60.9" +version = "0.60.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae5d689cf437eae90460e944a58b5668530d433b4ff85789e69d2f2a556e057d" +checksum = "1a56d79744fb3edb5d722ef79d86081e121d3b9422cb209eb03aea6aa4f21ebd" dependencies = [ "aws-smithy-types", "urlencoding", @@ -1057,9 +1068,9 @@ dependencies = [ [[package]] name = "aws-smithy-runtime" -version = "1.9.5" +version = "1.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a392db6c583ea4a912538afb86b7be7c5d8887d91604f50eb55c262ee1b4a5f5" +checksum = "028999056d2d2fd58a697232f9eec4a643cf73a71cf327690a7edad1d2af2110" dependencies = [ "aws-smithy-async", "aws-smithy-http", @@ -1073,6 +1084,7 @@ dependencies = [ "http 1.4.0", "http-body 0.4.6", "http-body 1.0.1", + "http-body-util", "pin-project-lite", "pin-utils", "tokio", @@ -1081,9 +1093,9 @@ dependencies = [ [[package]] name = "aws-smithy-runtime-api" -version = "1.9.3" +version = "1.11.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab0d43d899f9e508300e587bf582ba54c27a452dd0a9ea294690669138ae14a2" +checksum = "876ab3c9c29791ba4ba02b780a3049e21ec63dabda09268b175272c3733a79e6" dependencies = [ "aws-smithy-async", "aws-smithy-types", @@ -1098,9 +1110,9 @@ dependencies = [ [[package]] name = "aws-smithy-types" -version = "1.3.5" +version = "1.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "905cb13a9895626d49cf2ced759b062d913834c7482c38e49557eac4e6193f01" +checksum = "9d73dbfbaa8e4bc57b9045137680b958d274823509a360abfd8e1d514d40c95c" dependencies = [ "base64-simd", "bytes", @@ -1121,18 +1133,18 @@ dependencies = [ [[package]] name = "aws-smithy-xml" -version = "0.60.13" +version = "0.60.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11b2f670422ff42bf7065031e72b45bc52a3508bd089f743ea90731ca2b6ea57" +checksum = "0ce02add1aa3677d022f8adf81dcbe3046a95f17a1b1e8979c145cd21d3d22b3" dependencies = [ "xmlparser", ] [[package]] name = "aws-types" -version = "1.3.11" +version = "1.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d980627d2dd7bfc32a3c025685a033eeab8d365cc840c631ef59d1b8f428164" +checksum = "47c8323699dd9b3c8d5b3c13051ae9cdef58fd179957c882f8374dd8725962d9" dependencies = [ "aws-credential-types", "aws-smithy-async", @@ -1210,9 +1222,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.10.0" +version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3" +checksum = "843867be96c8daad0d758b57df9392b6d8d271134fce549de6ce169ff98a92af" dependencies = [ "serde_core", ] @@ -1247,7 +1259,7 @@ version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cdeb9d870516001442e364c5220d3574d2da8dc765554b4a617230d33fa58ef5" dependencies = [ - "objc2 0.6.3", + "objc2 0.6.4", ] [[package]] @@ -1265,9 +1277,9 @@ dependencies = [ [[package]] name = "bon" -version = "3.8.1" +version = "3.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebeb9aaf9329dff6ceb65c689ca3db33dbf15f324909c60e4e5eef5701ce31b1" +checksum = "f47dbe92550676ee653353c310dfb9cf6ba17ee70396e1f7cf0a2020ad49b2fe" dependencies = [ "bon-macros", "rustversion", @@ -1275,9 +1287,9 @@ dependencies = [ [[package]] name = "bon-macros" -version = "3.8.1" +version = "3.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77e9d642a7e3a318e37c2c9427b5a6a48aa1ad55dcd986f3034ab2239045a645" +checksum = "519bd3116aeeb42d5372c29d982d16d0170d3d4a5ed85fc7dd91642ffff3c67c" dependencies = [ "darling", "ident_case", @@ -1285,7 +1297,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.111", + "syn 2.0.117", ] [[package]] @@ -1311,15 +1323,15 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.19.1" +version = "3.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dd9dc738b7a8311c7ade152424974d8115f2cdad61e8dab8dac9f2362298510" +checksum = "5d20789868f4b01b2f2caec9f5c4e0213b41e3e5702a50157d699ae31ced2fcb" [[package]] name = "bytemuck" -version = "1.24.0" +version = "1.25.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fbdf580320f38b612e485521afda1ee26d10cc9884efaaa750d383e13e3c5f4" +checksum = "c8efb64bd706a16a1bdde310ae86b351e4d21550d98d056f22f8a7f7a2183fec" dependencies = [ "bytemuck_derive", ] @@ -1332,7 +1344,7 @@ checksum = "f9abbd1bc6865053c427f7198e6af43bfdedc55ab791faed4fbd361d789575ff" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.117", ] [[package]] @@ -1349,9 +1361,9 @@ checksum = "8f1fe948ff07f4bd06c30984e69f5b4899c516a3ef74f34df92a2df2ab535495" [[package]] name = "bytes" -version = "1.11.0" +version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b35204fbdc0b3f4446b89fc1ac2cf84a8a68971995d0bf2e925ec7cd960f9cb3" +checksum = "1e748733b7cbc798e1434b6ac524f0c1ff2ab456fe201501e6497c8417a4fc33" [[package]] name = "bytes-utils" @@ -1382,7 +1394,7 @@ version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b99da2f8558ca23c71f4fd15dc57c906239752dd27ff3c00a1d56b685b7cbfec" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "log", "polling", "rustix 0.38.44", @@ -1392,13 +1404,13 @@ dependencies = [ [[package]] name = "calloop" -version = "0.14.3" +version = "0.14.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb9f6e1368bd4621d2c86baa7e37de77a938adf5221e5dd3d6133340101b309e" +checksum = "4dbf9978365bac10f54d1d4b04f7ce4427e51f71d61f2fe15e3fed5166474df7" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "polling", - "rustix 1.1.3", + "rustix 1.1.4", "slab", "tracing", ] @@ -1421,17 +1433,17 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "138efcf0940a02ebf0cc8d1eff41a1682a46b431630f4c52450d6265876021fa" dependencies = [ - "calloop 0.14.3", - "rustix 1.1.3", + "calloop 0.14.4", + "rustix 1.1.4", "wayland-backend", "wayland-client", ] [[package]] name = "cc" -version = "1.2.50" +version = "1.2.60" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f50d563227a1c37cc0a263f64eca3334388c01c5e4c4861a9def205c614383c" +checksum = "43c5703da9466b66a946814e1adf53ea2c90f10063b86290cc9eb67ce3478a20" dependencies = [ "find-msvc-tools", "jobserver", @@ -1439,12 +1451,6 @@ dependencies = [ "shlex", ] -[[package]] -name = "cesu8" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d43a04d8753f35258c91f8ec639f792891f748a1edbd759cf1dcea3382ad83c" - [[package]] name = "cfg-if" version = "1.0.4" @@ -1468,9 +1474,9 @@ dependencies = [ [[package]] name = "chrono" -version = "0.4.42" +version = "0.4.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "145052bdd345b87320e369255277e3fb5152762ad123a901ef5c262dd38fe8d2" +checksum = "c673075a2e0e5f4a1dde27ce9dee1ea4558c7ffe648f576438a20ca1d2acc4b0" dependencies = [ "iana-time-zone", "js-sys", @@ -1482,9 +1488,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.53" +version = "4.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9e340e012a1bf4935f5282ed1436d1489548e8f72308207ea5df0e23d2d03f8" +checksum = "b193af5b67834b676abd72466a96c1024e6a6ad978a1f484bd90b85c94041351" dependencies = [ "clap_builder", "clap_derive", @@ -1492,9 +1498,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.53" +version = "4.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d76b5d13eaa18c901fd2f7fca939fefe3a0727a953561fefdf3b2922b8569d00" +checksum = "714a53001bf66416adb0e2ef5ac857140e7dc3a0c48fb28b2f10762fc4b5069f" dependencies = [ "anstream", "anstyle", @@ -1504,21 +1510,21 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.5.49" +version = "4.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a0b5487afeab2deb2ff4e03a807ad1a03ac532ff5a2cee5d86884440c7f7671" +checksum = "1110bd8a634a1ab8cb04345d8d878267d57c3cf1b38d91b71af6686408bbca6a" dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.117", ] [[package]] name = "clap_lex" -version = "0.7.6" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1d728cc89cf3aee9ff92b05e62b19ee65a02b5702cff7d5a377e32c6ae29d8d" +checksum = "c8d4a3bb8b1e0c1050499d1815f5ab16d04f0959b233085fb31653fbfc9d98f9" [[package]] name = "clipboard-win" @@ -1531,9 +1537,9 @@ dependencies = [ [[package]] name = "cmake" -version = "0.1.57" +version = "0.1.58" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75443c44cd6b379beb8c5b45d85d0773baf31cce901fe7bb252f4eff3008ef7d" +checksum = "c0f78a02292a74a88ac736019ab962ece0bc380e3f977bf72e376c5d78ff0678" dependencies = [ "cc", ] @@ -1551,9 +1557,9 @@ dependencies = [ [[package]] name = "colorchoice" -version = "1.0.4" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75" +checksum = "1d07550c9036bf2ae0c684c4297d503f838287c83c53686d05370d0e139ae570" [[package]] name = "combine" @@ -1576,25 +1582,12 @@ dependencies = [ [[package]] name = "console" -version = "0.15.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "054ccb5b10f9f2cbf51eb355ca1d05c2d279ce1804688d0db74b4733a5aeafd8" -dependencies = [ - "encode_unicode", - "libc", - "once_cell", - "windows-sys 0.59.0", -] - -[[package]] -name = "console" -version = "0.16.2" +version = "0.16.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03e45a4a8926227e4197636ba97a9fc9b00477e9f4bd711395687c5f0734bec4" +checksum = "d64e8af5551369d19cf50138de61f1c42074ab970f74e99be916646777f8fc87" dependencies = [ "encode_unicode", "libc", - "once_cell", "unicode-width", "windows-sys 0.61.2", ] @@ -1614,7 +1607,7 @@ version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f9d839f2a20b0aee515dc581a6172f2321f96cab76c1a38a4c584a194955390e" dependencies = [ - "getrandom 0.2.16", + "getrandom 0.2.17", "once_cell", "tiny-keccak", ] @@ -1684,7 +1677,7 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3d44a101f213f6c4cdc1853d4b78aef6db6bdfa3468798cc1d9912f4735013eb" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "core-foundation 0.10.1", "libc", ] @@ -1783,9 +1776,9 @@ checksum = "f27ae1dd37df86211c42e150270f82743308803d90a6f6e6651cd730d5e1732f" [[package]] name = "darling" -version = "0.21.3" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cdf337090841a411e2a7f3deb9187445851f91b309c0c0a29e05f74a00a48c0" +checksum = "25ae13da2f202d56bd7f91c25fba009e7717a1e4a1cc98a76d844b65ae912e9d" dependencies = [ "darling_core", "darling_macro", @@ -1793,34 +1786,33 @@ dependencies = [ [[package]] name = "darling_core" -version = "0.21.3" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1247195ecd7e3c85f83c8d2a366e4210d588e802133e1e355180a9870b517ea4" +checksum = "9865a50f7c335f53564bb694ef660825eb8610e0a53d3e11bf1b0d3df31e03b0" dependencies = [ - "fnv", "ident_case", "proc-macro2", "quote", "strsim", - "syn 2.0.111", + "syn 2.0.117", ] [[package]] name = "darling_macro" -version = "0.21.3" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d38308df82d1080de0afee5d069fa14b0326a88c14f15c5ccda35b4a6c414c81" +checksum = "ac3984ec7bd6cfa798e62b4a642426a5be0e68f9401cfc2a01e3fa9ea2fcdb8d" dependencies = [ "darling_core", "quote", - "syn 2.0.111", + "syn 2.0.117", ] [[package]] name = "deranged" -version = "0.5.5" +version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ececcb659e7ba858fb4f10388c250a7252eb0a27373f1a72b8748afdd248e587" +checksum = "7cd812cc2bc1d69d4764bd80df88b4317eaef9e773c75226407d9bc0876b211c" dependencies = [ "powerfmt", ] @@ -1844,14 +1836,14 @@ checksum = "bd0c93bb4b0c6d9b77f4435b0ae98c24d17f1c45b2ff844c6151a07256ca923b" [[package]] name = "dispatch2" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89a09f22a6c6069a18470eb92d2298acf25463f14256d24778e1230d789a2aec" +checksum = "1e0e367e4e7da84520dedcac1901e4da967309406d1e51017ae1abfb97adbd38" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "block2 0.6.2", "libc", - "objc2 0.6.3", + "objc2 0.6.4", ] [[package]] @@ -1873,14 +1865,14 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.117", ] [[package]] name = "dlib" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "330c60081dcc4c72131f8eb70510f1ac07223e5d4163db481a04a0befcffa412" +checksum = "ab8ecd87370524b461f8557c119c405552c396ed91fc0a8eec68679eab26f94a" dependencies = [ "libloading", ] @@ -1990,7 +1982,7 @@ checksum = "6a9b567d356674e9a5121ed3fedfb0a7c31e059fe71f6972b691bcd0bfc284e3" dependencies = [ "accesskit", "ahash", - "bitflags 2.10.0", + "bitflags 2.11.0", "emath", "epaint", "log", @@ -2015,7 +2007,7 @@ dependencies = [ "epaint", "log", "profiling", - "thiserror 2.0.17", + "thiserror 2.0.18", "type-map", "web-time", "wgpu", @@ -2091,9 +2083,9 @@ dependencies = [ [[package]] name = "egui_plot" -version = "0.34.0" +version = "0.34.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33233ffc010fd450381805bbbebecbbb82f077de7712ddc439f0b20effd42db7" +checksum = "67fc9b427a837264e55381a5cade6e28fe83ac5b165a61b9c888548c732a9c95" dependencies = [ "ahash", "egui", @@ -2146,7 +2138,7 @@ checksum = "f282cfdfe92516eb26c2af8589c274c7c17681f5ecc03c18255fe741c6aa64eb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.117", ] [[package]] @@ -2167,7 +2159,7 @@ checksum = "67c78a4d8fdf9953a5c9d458f9efe940fd97a0cab0941c075a813ac594733827" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.117", ] [[package]] @@ -2178,7 +2170,7 @@ checksum = "2f9ed6b3789237c8a0c1c505af1c7eb2c560df6186f01b098c3a1064ea532f38" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.117", ] [[package]] @@ -2264,9 +2256,9 @@ checksum = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a" [[package]] name = "fastrand" -version = "2.3.0" +version = "2.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" +checksum = "9f1f227452a390804cdb637b74a86990f2a7d7ba4b7d5693aac9b4dd6defd8d6" [[package]] name = "fax" @@ -2285,7 +2277,7 @@ checksum = "a0aca10fb742cb43f9e7bb8467c91aa9bcb8e3ffbc6a6f7389bb93ffc920577d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.117", ] [[package]] @@ -2299,9 +2291,9 @@ dependencies = [ [[package]] name = "find-msvc-tools" -version = "0.1.5" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a3076410a55c90011c298b04d0cfa770b00fa04e1e3c97d3f6c9de105a03844" +checksum = "5baebc0774151f905a1a2cc41989300b1e6fbb29aff0ceffa1064fdd3088d582" [[package]] name = "flatbuffers" @@ -2309,19 +2301,19 @@ version = "25.12.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "35f6839d7b3b98adde531effaf34f0c2badc6f4735d26fe74709d8e513a96ef3" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "rustc_version", ] [[package]] name = "flate2" -version = "1.1.5" +version = "1.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfe33edd8e85a12a67454e37f8c75e730830d83e313556ab9ebf9ee7fbeb3bfb" +checksum = "843fba2746e448b37e26a819579957415c8cef339bf08564fe8b7ddbd959573c" dependencies = [ "crc32fast", - "libz-rs-sys", "miniz_oxide", + "zlib-rs", ] [[package]] @@ -2350,9 +2342,9 @@ checksum = "77ce24cb58228fbb8aa041425bb1050850ac19177686ea6e0f41a70416f56fdb" [[package]] name = "font-types" -version = "0.10.1" +version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39a654f404bbcbd48ea58c617c2993ee91d1cb63727a37bf2323a4edeed1b8c5" +checksum = "2d9237c6d82152100c691fb77ea18037b402bcc7257d2c876a4ffac81bc22a1c" dependencies = [ "bytemuck", ] @@ -2375,7 +2367,7 @@ checksum = "1a5c6c585bc94aaf2c7b51dd4c2ba22680844aba4c687be581871a6f518c5742" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.117", ] [[package]] @@ -2415,9 +2407,9 @@ checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c" [[package]] name = "futures" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" +checksum = "8b147ee9d1f6d097cef9ce628cd2ee62288d963e16fb287bd9286455b241382d" dependencies = [ "futures-channel", "futures-core", @@ -2430,9 +2422,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" +checksum = "07bbe89c50d7a535e539b8c17bc0b49bdb77747034daa8087407d655f3f7cc1d" dependencies = [ "futures-core", "futures-sink", @@ -2440,15 +2432,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" +checksum = "7e3450815272ef58cec6d564423f6e755e25379b217b0bc688e295ba24df6b1d" [[package]] name = "futures-executor" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" +checksum = "baf29c38818342a3b26b5b923639e7b1f4a61fc5e76102d4b1981c6dc7a7579d" dependencies = [ "futures-core", "futures-task", @@ -2457,9 +2449,9 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" +checksum = "cecba35d7ad927e23624b22ad55235f2239cfa44fd10428eecbeba6d6a717718" [[package]] name = "futures-lite" @@ -2476,32 +2468,32 @@ dependencies = [ [[package]] name = "futures-macro" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" +checksum = "e835b70203e41293343137df5c0664546da5745f82ec9b84d40be8336958447b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.117", ] [[package]] name = "futures-sink" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" +checksum = "c39754e157331b013978ec91992bde1ac089843443c49cbc7f46150b0fad0893" [[package]] name = "futures-task" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" +checksum = "037711b3d59c33004d3856fbdc83b99d4ff37a24768fa1be9ce3538a1cde4393" [[package]] name = "futures-util" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" +checksum = "389ca41296e6190b48053de0321d02a77f32f8a5d2461dd38762c0593805c6d6" dependencies = [ "futures-channel", "futures-core", @@ -2511,7 +2503,6 @@ dependencies = [ "futures-task", "memchr", "pin-project-lite", - "pin-utils", "slab", ] @@ -2581,9 +2572,9 @@ dependencies = [ [[package]] name = "geographiclib-rs" -version = "0.2.5" +version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f611040a2bb37eaa29a78a128d1e92a378a03e0b6e66ae27398d42b1ba9a7841" +checksum = "c5a7f08910fd98737a6eda7568e7c5e645093e073328eeef49758cfe8b0489c7" dependencies = [ "libm", ] @@ -2594,15 +2585,15 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bd49230192a3797a9a4d6abe9b3eed6f7fa4c8a8a4947977c6f80025f92cbd8" dependencies = [ - "rustix 1.1.3", + "rustix 1.1.4", "windows-link 0.2.1", ] [[package]] name = "getrandom" -version = "0.2.16" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" +checksum = "ff2abc00be7fca6ebc474524697ae276ad847ad0a6b3faa4bcb027e9a4614ad0" dependencies = [ "cfg-if", "js-sys", @@ -2620,11 +2611,24 @@ dependencies = [ "cfg-if", "js-sys", "libc", - "r-efi", + "r-efi 5.3.0", "wasip2", "wasm-bindgen", ] +[[package]] +name = "getrandom" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0de51e6874e94e7bf76d726fc5d13ba782deca734ff60d5bb2fb2607c7406555" +dependencies = [ + "cfg-if", + "libc", + "r-efi 6.0.0", + "wasip2", + "wasip3", +] + [[package]] name = "gl_generator" version = "0.14.0" @@ -2654,7 +2658,7 @@ version = "0.32.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "12124de845cacfebedff80e877bb37b5b75c34c5a4c89e47e1cdd67fb6041325" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "cfg_aliases", "cgl", "dispatch2", @@ -2662,7 +2666,7 @@ dependencies = [ "glutin_glx_sys", "glutin_wgl_sys", "libloading", - "objc2 0.6.3", + "objc2 0.6.4", "objc2-app-kit 0.3.2", "objc2-core-foundation", "objc2-foundation 0.3.2", @@ -2720,7 +2724,7 @@ version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fbcd2dba93594b227a1f57ee09b8b9da8892c34d55aa332e034a228d0fe6a171" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "gpu-alloc-types", ] @@ -2730,7 +2734,7 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "98ff03b468aa837d70984d55f5d3f846f6ec31fe34bbb97c4f85219caeee1ca4" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", ] [[package]] @@ -2751,7 +2755,7 @@ version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b89c83349105e3732062a895becfc71a8f921bb71ecbbdd8ff99263e3b53a0ca" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "gpu-descriptor-types", "hashbrown 0.15.5", ] @@ -2762,14 +2766,14 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fdf242682df893b86f33a73828fb09ca4b2d3bb6cc95249707fc684d27484b91" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", ] [[package]] name = "h2" -version = "0.4.12" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3c0b69cfcb4e1b9f1bf2f53f95f766e4661169728ec61cd3fe5a0166f2d1386" +checksum = "2f44da3a8150a6703ed5d34e164b875fd14c2cdab9af1252a9a1020bde2bdc54" dependencies = [ "atomic-waker", "bytes", @@ -2830,8 +2834,6 @@ version = "0.15.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" dependencies = [ - "allocator-api2", - "equivalent", "foldhash 0.1.5", ] @@ -2841,9 +2843,17 @@ version = "0.16.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" dependencies = [ + "allocator-api2", + "equivalent", "foldhash 0.2.0", ] +[[package]] +name = "hashbrown" +version = "0.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4f467dd6dccf739c208452f8014c75c18bb8301b050ad1cfb27153803edb0f51" + [[package]] name = "hashlink" version = "0.10.0" @@ -2999,9 +3009,9 @@ checksum = "135b12329e5e3ce057a9f972339ea52bc954fe1e9358ef27f95e89716fbc5424" [[package]] name = "hyper" -version = "1.8.1" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ab2d4f250c3d7b1c9fcdff1cece94ea4e2dfbec68614f7b87cb205f24ca9d11" +checksum = "6299f016b246a94207e63da54dbe807655bf9e00044f73ded42c3ac5305fbcca" dependencies = [ "atomic-waker", "bytes", @@ -3013,7 +3023,6 @@ dependencies = [ "httparse", "itoa", "pin-project-lite", - "pin-utils", "smallvec", "tokio", "want", @@ -3038,14 +3047,13 @@ dependencies = [ [[package]] name = "hyper-util" -version = "0.1.19" +version = "0.1.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "727805d60e7938b76b826a6ef209eb70eaa1812794f9424d4a4e2d740662df5f" +checksum = "96547c2556ec9d12fb1578c4eaf448b04993e7fb79cbaad930a656880a6bdfa0" dependencies = [ "base64", "bytes", "futures-channel", - "futures-core", "futures-util", "http 1.4.0", "http-body 1.0.1", @@ -3077,9 +3085,9 @@ checksum = "9190f86706ca38ac8add223b2aed8b1330002b5cdbbce28fb58b10914d38fc27" [[package]] name = "i_overlay" -version = "4.0.6" +version = "4.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fcccbd4e4274e0f80697f5fbc6540fdac533cce02f2081b328e68629cce24f9" +checksum = "413183068e6e0289e18d7d0a1f661b81546e6918d5453a44570b9ab30cbed1b3" dependencies = [ "i_float", "i_key_sort", @@ -3105,9 +3113,9 @@ checksum = "35e6d558e6d4c7b82bc51d9c771e7a927862a161a7d87bf2b0541450e0e20915" [[package]] name = "iana-time-zone" -version = "0.1.64" +version = "0.1.65" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33e57f83510bb73707521ebaffa789ec8caf86f9657cad665b092b581d40e9fb" +checksum = "e31bc9ad994ba00e440a8aa5c9ef0ec67d5cb5e5cb0cc7f8b744a35b389cc470" dependencies = [ "android_system_properties", "core-foundation-sys", @@ -3129,12 +3137,13 @@ dependencies = [ [[package]] name = "icu_collections" -version = "2.1.1" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c6b649701667bbe825c3b7e6388cb521c23d88644678e83c0c4d0a621a34b43" +checksum = "2984d1cd16c883d7935b9e07e44071dca8d917fd52ecc02c04d5fa0b5a3f191c" dependencies = [ "displaydoc 0.2.5", "potential_utf", + "utf8_iter", "yoke", "zerofrom", "zerovec", @@ -3142,9 +3151,9 @@ dependencies = [ [[package]] name = "icu_locale_core" -version = "2.1.1" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6" +checksum = "92219b62b3e2b4d88ac5119f8904c10f8f61bf7e95b640d25ba3075e6cac2c29" dependencies = [ "displaydoc 0.2.5", "litemap", @@ -3155,9 +3164,9 @@ dependencies = [ [[package]] name = "icu_normalizer" -version = "2.1.1" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f6c8828b67bf8908d82127b2054ea1b4427ff0230ee9141c54251934ab1b599" +checksum = "c56e5ee99d6e3d33bd91c5d85458b6005a22140021cc324cea84dd0e72cff3b4" dependencies = [ "icu_collections", "icu_normalizer_data", @@ -3169,15 +3178,15 @@ dependencies = [ [[package]] name = "icu_normalizer_data" -version = "2.1.1" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a" +checksum = "da3be0ae77ea334f4da67c12f149704f19f81d1adf7c51cf482943e84a2bad38" [[package]] name = "icu_properties" -version = "2.1.2" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "020bfc02fe870ec3a66d93e677ccca0562506e5872c650f893269e08615d74ec" +checksum = "bee3b67d0ea5c2cca5003417989af8996f8604e34fb9ddf96208a033901e70de" dependencies = [ "icu_collections", "icu_locale_core", @@ -3189,15 +3198,15 @@ dependencies = [ [[package]] name = "icu_properties_data" -version = "2.1.2" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "616c294cf8d725c6afcd8f55abc17c56464ef6211f9ed59cccffe534129c77af" +checksum = "8e2bbb201e0c04f7b4b3e14382af113e17ba4f63e2c9d2ee626b720cbce54a14" [[package]] name = "icu_provider" -version = "2.1.1" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614" +checksum = "139c4cf31c8b5f33d7e199446eff9c1e02decfc2f0eec2c8d71f65befa45b421" dependencies = [ "displaydoc 0.2.5", "icu_locale_core", @@ -3208,6 +3217,12 @@ dependencies = [ "zerovec", ] +[[package]] +name = "id-arena" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d3067d79b975e8844ca9eb072e16b31c3c1c36928edf9c6789548c524d0d954" + [[package]] name = "ident_case" version = "1.0.1" @@ -3243,9 +3258,9 @@ dependencies = [ [[package]] name = "image" -version = "0.25.9" +version = "0.25.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6506c6c10786659413faa717ceebcb8f70731c0a60cbae39795fdf114519c1a" +checksum = "85ab80394333c02fe689eaf900ab500fbd0c2213da414687ebf995a65d5a6104" dependencies = [ "bytemuck", "byteorder-lite", @@ -3257,23 +3272,23 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.12.1" +version = "2.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ad4bb2b565bca0645f4d68c5c9af97fba094e9791da685bf83cb5f3ce74acf2" +checksum = "d466e9454f08e4a911e14806c24e16fba1b4c121d1ea474396f396069cf949d9" dependencies = [ "equivalent", - "hashbrown 0.16.1", + "hashbrown 0.17.0", "serde", "serde_core", ] [[package]] name = "indicatif" -version = "0.18.3" +version = "0.18.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9375e112e4b463ec1b1c6c011953545c65a30164fbab5b581df32b3abf0dcb88" +checksum = "25470f23803092da7d239834776d653104d551bc4d7eacaf31e6837854b8e9eb" dependencies = [ - "console 0.16.2", + "console", "portable-atomic", "rayon", "unicode-width", @@ -3292,11 +3307,11 @@ dependencies = [ [[package]] name = "insta" -version = "1.45.0" +version = "1.47.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b76866be74d68b1595eb8060cb9191dca9c021db2316558e52ddc5d55d41b66c" +checksum = "7b4a6248eb93a4401ed2f37dfe8ea592d3cf05b7cf4f8efa867b6895af7e094e" dependencies = [ - "console 0.15.11", + "console", "once_cell", "similar", "tempfile", @@ -3310,15 +3325,15 @@ checksum = "8bb03732005da905c88227371639bf1ad885cc712789c011c31c5fb3ab3ccf02" [[package]] name = "ipnet" -version = "2.11.0" +version = "2.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" +checksum = "d98f6fed1fde3f8c21bc40a1abb88dd75e67924f9cffc3ef95607bad8017f8e2" [[package]] name = "iri-string" -version = "0.7.9" +version = "0.7.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f867b9d1d896b67beb18518eda36fdb77a32ea590de864f1325b294a6d14397" +checksum = "25e659a4bb38e810ebc252e53b5814ff908a8c58c2a9ce2fae1bbec24cbf4e20" dependencies = [ "memchr", "serde", @@ -3350,31 +3365,67 @@ dependencies = [ [[package]] name = "itoa" -version = "1.0.16" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ee5b5339afb4c41626dde77b7a611bd4f2c202b897852b4bcf5d03eddc61010" +checksum = "8f42a60cbdf9a97f5d2305f08a87dc4e09308d1276d28c869c684d7777685682" [[package]] name = "jni" -version = "0.21.1" +version = "0.22.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a87aa2bb7d2af34197c04845522473242e1aa17c12f4935d5856491a7fb8c97" +checksum = "5efd9a482cf3a427f00d6b35f14332adc7902ce91efb778580e180ff90fa3498" dependencies = [ - "cesu8", "cfg-if", "combine", - "jni-sys", + "jni-macros", + "jni-sys 0.4.1", "log", - "thiserror 1.0.69", + "simd_cesu8", + "thiserror 2.0.18", "walkdir", - "windows-sys 0.45.0", + "windows-link 0.2.1", +] + +[[package]] +name = "jni-macros" +version = "0.22.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a00109accc170f0bdb141fed3e393c565b6f5e072365c3bd58f5b062591560a3" +dependencies = [ + "proc-macro2", + "quote", + "rustc_version", + "simd_cesu8", + "syn 2.0.117", ] [[package]] name = "jni-sys" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130" +checksum = "41a652e1f9b6e0275df1f15b32661cf0d4b78d4d87ddec5e0c3c20f097433258" +dependencies = [ + "jni-sys 0.4.1", +] + +[[package]] +name = "jni-sys" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c6377a88cb3910bee9b0fa88d4f42e1d2da8e79915598f65fb0c7ee14c878af2" +dependencies = [ + "jni-sys-macros", +] + +[[package]] +name = "jni-sys-macros" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38c0b942f458fe50cdac086d2f946512305e5631e720728f2a61aabcd47a6264" +dependencies = [ + "quote", + "syn 2.0.117", +] [[package]] name = "jobserver" @@ -3388,10 +3439,12 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.83" +version = "0.3.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "464a3709c7f55f1f721e5389aa6ea4e3bc6aba669353300af094b29ffbdde1d8" +checksum = "2964e92d1d9dc3364cae4d718d93f227e3abb088e747d92e0395bfdedf1c12ca" dependencies = [ + "cfg-if", + "futures-util", "once_cell", "wasm-bindgen", ] @@ -3419,6 +3472,12 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" +[[package]] +name = "leb128fmt" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2" + [[package]] name = "lexical-core" version = "1.0.6" @@ -3478,9 +3537,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.182" +version = "0.2.184" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6800badb6cb2082ffd7b6a67e6125bb39f18782f793520caee8cb8846be06112" +checksum = "48f5d2a454e16a5ea0f4ced81bd44e4cfc7bd3a507b61887c99fd3538b28e4af" [[package]] name = "libloading" @@ -3494,9 +3553,9 @@ dependencies = [ [[package]] name = "libm" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9fbbcab51052fe104eb5e5d351cf728d30a5be1fe14d9be8a3b097481fb97de" +checksum = "b6d2cec3eae94f9f509c767b45932f1ada8350c4bdb85af2fcab4a3c14807981" [[package]] name = "libmimalloc-sys" @@ -3510,13 +3569,14 @@ dependencies = [ [[package]] name = "libredox" -version = "0.1.11" +version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df15f6eac291ed1cf25865b1ee60399f57e7c227e7f51bdbd4c5270396a9ed50" +checksum = "e02f3bb43d335493c96bf3fd3a321600bf6bd07ed34bc64118e9293bdffea46c" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "libc", - "redox_syscall 0.6.0", + "plain", + "redox_syscall 0.7.4", ] [[package]] @@ -3530,15 +3590,6 @@ dependencies = [ "vcpkg", ] -[[package]] -name = "libz-rs-sys" -version = "0.5.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c10501e7805cee23da17c7790e59df2870c0d4043ec6d03f67d31e2b53e77415" -dependencies = [ - "zlib-rs", -] - [[package]] name = "linear-map" version = "1.2.0" @@ -3563,15 +3614,15 @@ checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" [[package]] name = "linux-raw-sys" -version = "0.11.0" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039" +checksum = "32a66949e030da00e8c7d4434b251670a91556f4144941d37452769c25d58a53" [[package]] name = "litemap" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77" +checksum = "92daf443525c4cce67b150400bc2316076100ce0b3686209eb8cf3c31612e6f0" [[package]] name = "litrs" @@ -3602,9 +3653,9 @@ checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154" [[package]] name = "lz4_flex" -version = "0.12.0" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab6473172471198271ff72e9379150e9dfd70d8e533e0752a27e515b48dd375e" +checksum = "98c23545df7ecf1b16c303910a69b079e8e251d60f7dd2cc9b4177f2afaf1746" dependencies = [ "twox-hash", ] @@ -3649,15 +3700,15 @@ dependencies = [ [[package]] name = "memchr" -version = "2.7.6" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273" +checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79" [[package]] name = "memmap2" -version = "0.9.9" +version = "0.9.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "744133e4a0e0a658e1374cf3bf8e415c4052a15a111acd372764c55b4177d490" +checksum = "714098028fe011992e1c3962653c96b2d578c4b4bce9036e15ff220319b1e0e3" dependencies = [ "libc", ] @@ -3677,7 +3728,7 @@ version = "0.32.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "00c15a6f673ff72ddcc22394663290f870fb224c1bfce55734a75c414150e605" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "block", "core-graphics-types 0.2.0", "foreign-types", @@ -3692,7 +3743,7 @@ version = "0.26.0" dependencies = [ "rustyms", "serde", - "thiserror 2.0.17", + "thiserror 2.0.18", ] [[package]] @@ -3734,9 +3785,9 @@ dependencies = [ [[package]] name = "mio" -version = "1.1.1" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a69bcab0ad47271a0234d9422b131806bf3968021e5dc9328caf2d4cd58557fc" +checksum = "50b7e5b27aa02a74bac8c3f23f448f8d87ff11f92d3aac1a6ed369ee08cc56c1" dependencies = [ "libc", "wasi", @@ -3745,9 +3796,9 @@ dependencies = [ [[package]] name = "moxcms" -version = "0.7.11" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac9557c559cd6fc9867e122e20d2cbefc9ca29d80d027a8e39310920ed2f0a97" +checksum = "bb85c154ba489f01b25c0d36ae69a87e4a1c73a72631fc6c0eb6dde34a73e44b" dependencies = [ "num-traits", "pxfm", @@ -3760,7 +3811,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7e6c319f4111abe43bbcc33433b727968e30163f0a90e440386978656df8685c" dependencies = [ "base64-simd", - "bitflags 2.10.0", + "bitflags 2.11.0", "bytemuck", "chrono", "flate2", @@ -3770,7 +3821,7 @@ dependencies = [ "mzpeaks", "num-traits", "regex", - "thiserror 2.0.17", + "thiserror 2.0.18", ] [[package]] @@ -3790,7 +3841,7 @@ checksum = "066cf25f0e8b11ee0df221219010f213ad429855f57c494f995590c861a9a7d8" dependencies = [ "arrayvec", "bit-set", - "bitflags 2.10.0", + "bitflags 2.11.0", "cfg-if", "cfg_aliases", "codespan-reporting", @@ -3804,7 +3855,7 @@ dependencies = [ "once_cell", "rustc-hash 1.1.0", "spirv", - "thiserror 2.0.17", + "thiserror 2.0.18", "unicode-ident", ] @@ -3829,8 +3880,8 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c3f42e7bbe13d351b6bead8286a43aac9534b82bd3cc43e47037f012ebfd62d4" dependencies = [ - "bitflags 2.10.0", - "jni-sys", + "bitflags 2.11.0", + "jni-sys 0.3.1", "log", "ndk-sys", "num_enum", @@ -3850,7 +3901,7 @@ version = "0.6.0+11769913" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ee6cda3051665f1fb8d9e08fc35c96d5a244fb1be711a03b71118828afc9a873" dependencies = [ - "jni-sys", + "jni-sys 0.3.1", ] [[package]] @@ -3859,25 +3910,12 @@ version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "71e2746dc3a24dd78b3cfcb7be93368c6de9963d30f43a6a73998a9cf4b17b46" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "cfg-if", "cfg_aliases", "libc", ] -[[package]] -name = "nix" -version = "0.30.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74523f3a35e05aba87a1d978330aef40f67b0304ac79c1c00b294c9830543db6" -dependencies = [ - "bitflags 2.10.0", - "cfg-if", - "cfg_aliases", - "libc", - "memoffset", -] - [[package]] name = "nohash-hasher" version = "0.2.0" @@ -3916,9 +3954,9 @@ dependencies = [ [[package]] name = "num-conv" -version = "0.1.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" +checksum = "c6673768db2d862beb9b39a78fdcb1a69439615d5794a1be50caa9bc92c81967" [[package]] name = "num-integer" @@ -3952,9 +3990,9 @@ dependencies = [ [[package]] name = "num_enum" -version = "0.7.5" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1207a7e20ad57b847bbddc6776b968420d38292bbfe2089accff5e19e82454c" +checksum = "5d0bca838442ec211fa11de3a8b0e0e8f3a4522575b5c4c06ed722e005036f26" dependencies = [ "num_enum_derive", "rustversion", @@ -3962,14 +4000,14 @@ dependencies = [ [[package]] name = "num_enum_derive" -version = "0.7.5" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff32365de1b6743cb203b710788263c44a03de03802daf96092f2da4fe6ba4d7" +checksum = "680998035259dcfcafe653688bf2aa6d3e2dc05e98be6ab46afb089dc84f1df8" dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.117", ] [[package]] @@ -3985,7 +4023,7 @@ dependencies = [ "num-traits", "pyo3", "pyo3-build-config", - "rustc-hash 2.1.1", + "rustc-hash 2.1.2", ] [[package]] @@ -4015,9 +4053,9 @@ dependencies = [ [[package]] name = "objc2" -version = "0.6.3" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7c2599ce0ec54857b29ce62166b0ed9b4f6f1a70ccc9a71165b6154caca8c05" +checksum = "3a12a8ed07aefc768292f076dc3ac8c48f3781c8f2d5851dd3d98950e8c5a89f" dependencies = [ "objc2-encode", ] @@ -4028,7 +4066,7 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e4e89ad9e3d7d297152b17d39ed92cd50ca8063a89a9fa569046d41568891eff" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "block2 0.5.1", "libc", "objc2 0.5.2", @@ -4044,9 +4082,9 @@ version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d49e936b501e5c5bf01fda3a9452ff86dc3ea98ad5f283e1455153142d97518c" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "block2 0.6.2", - "objc2 0.6.3", + "objc2 0.6.4", "objc2-core-foundation", "objc2-core-graphics", "objc2-foundation 0.3.2", @@ -4058,7 +4096,7 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "74dd3b56391c7a0596a295029734d3c1c5e7e510a4cb30245f8221ccea96b009" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "block2 0.5.1", "objc2 0.5.2", "objc2-core-location", @@ -4082,7 +4120,7 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "617fbf49e071c178c0b24c080767db52958f716d9eabdf0890523aeae54773ef" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "block2 0.5.1", "objc2 0.5.2", "objc2-foundation 0.2.2", @@ -4094,9 +4132,9 @@ version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2a180dd8642fa45cdb7dd721cd4c11b1cadd4929ce112ebd8b9f5803cc79d536" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "dispatch2", - "objc2 0.6.3", + "objc2 0.6.4", ] [[package]] @@ -4105,9 +4143,9 @@ version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e022c9d066895efa1345f8e33e584b9f958da2fd4cd116792e15e07e4720a807" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "dispatch2", - "objc2 0.6.3", + "objc2 0.6.4", "objc2-core-foundation", "objc2-io-surface", ] @@ -4148,7 +4186,7 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ee638a5da3799329310ad4cfa62fbf045d5f56e3ef5ba4149e7452dcf89d5a8" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "block2 0.5.1", "dispatch", "libc", @@ -4161,8 +4199,8 @@ version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3e0adef53c21f888deb4fa59fc59f7eb17404926ee8a6f59f5df0fd7f9f3272" dependencies = [ - "bitflags 2.10.0", - "objc2 0.6.3", + "bitflags 2.11.0", + "objc2 0.6.4", "objc2-core-foundation", ] @@ -4172,8 +4210,8 @@ version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "180788110936d59bab6bd83b6060ffdfffb3b922ba1396b312ae795e1de9d81d" dependencies = [ - "bitflags 2.10.0", - "objc2 0.6.3", + "bitflags 2.11.0", + "objc2 0.6.4", "objc2-core-foundation", ] @@ -4195,7 +4233,7 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dd0cba1276f6023976a406a14ffa85e1fdd19df6b0f737b063b95f6c8c7aadd6" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "block2 0.5.1", "objc2 0.5.2", "objc2-foundation 0.2.2", @@ -4207,7 +4245,7 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e42bee7bff906b14b167da2bac5efe6b6a07e6f7c0a21a7308d40c960242dc7a" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "block2 0.5.1", "objc2 0.5.2", "objc2-foundation 0.2.2", @@ -4230,7 +4268,7 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b8bb46798b20cd6b91cbd113524c490f1686f4c4e8f49502431415f3512e2b6f" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "block2 0.5.1", "objc2 0.5.2", "objc2-cloud-kit", @@ -4262,7 +4300,7 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "76cfcbf642358e8689af64cee815d139339f3ed8ad05103ed5eaf73db8d84cb3" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "block2 0.5.1", "objc2 0.5.2", "objc2-core-location", @@ -4271,9 +4309,9 @@ dependencies = [ [[package]] name = "object_store" -version = "0.12.4" +version = "0.12.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c1be0c6c22ec0817cdc77d3842f721a17fd30ab6965001415b5402a74e6b740" +checksum = "fbfbfff40aeccab00ec8a910b57ca8ecf4319b335c542f2edcd19dd25a1e2a00" dependencies = [ "async-trait", "base64", @@ -4291,14 +4329,14 @@ dependencies = [ "parking_lot", "percent-encoding", "quick-xml 0.38.4", - "rand 0.9.2", + "rand 0.9.3", "reqwest", "ring", "rustls-pemfile", "serde", "serde_json", "serde_urlencoded", - "thiserror 2.0.17", + "thiserror 2.0.18", "tokio", "tracing", "url", @@ -4309,9 +4347,9 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.21.3" +version = "1.21.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" +checksum = "9f7c3e4beb33f85d45ae3e3a1792185706c8e16d043238c593331cc7cd313b50" [[package]] name = "once_cell_polyfill" @@ -4321,16 +4359,17 @@ checksum = "384b8ab6d37215f3c5301a95a4accb5d64aa607f1fcb26a11b5303878451b4fe" [[package]] name = "openssl-probe" -version = "0.1.6" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" +checksum = "7c87def4c32ab89d880effc9e097653c8da5d6ef28e6b539d313baaacfbafcbe" [[package]] name = "orbclient" -version = "0.3.49" +version = "0.3.51" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "247ad146e19b9437f8604c21f8652423595cf710ad108af40e77d3ae6e96b827" +checksum = "59aed3b33578edcfa1bc96a321d590d31832b6ad55a26f0313362ce687e9abd6" dependencies = [ + "libc", "libredox", ] @@ -4345,9 +4384,9 @@ dependencies = [ [[package]] name = "ordered-float" -version = "5.1.0" +version = "5.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f4779c6901a562440c3786d08192c6fbda7c1c2060edd10006b05ee35d10f2d" +checksum = "b7d950ca161dc355eaf28f82b11345ed76c6e1f6eb1f4f4479e0323b9e2fbd0e" dependencies = [ "num-traits", "rand 0.8.5", @@ -4410,9 +4449,9 @@ dependencies = [ [[package]] name = "parquet" -version = "57.1.0" +version = "57.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be3e4f6d320dd92bfa7d612e265d7d08bba0a240bab86af3425e1d255a511d89" +checksum = "6ee96b29972a257b855ff2341b37e61af5f12d6af1158b6dcdb5b31ea07bb3cb" dependencies = [ "ahash", "arrow-array", @@ -4493,7 +4532,7 @@ dependencies = [ "phf_shared", "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.117", "unicase", ] @@ -4509,29 +4548,29 @@ dependencies = [ [[package]] name = "pin-project" -version = "1.1.10" +version = "1.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "677f1add503faace112b9f1373e43e9e054bfdd22ff1a63c1bc485eaec6a6a8a" +checksum = "f1749c7ed4bcaf4c3d0a3efc28538844fb29bcdd7d2b67b2be7e20ba861ff517" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.10" +version = "1.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861" +checksum = "d9b20ed30f105399776b9c883e68e536ef602a16ae6f596d2c473591d6ad64c6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.117", ] [[package]] name = "pin-project-lite" -version = "0.2.16" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" +checksum = "a89322df9ebe1c1578d689c92318e070967d1042b512afbe49518723f4e6d5cd" [[package]] name = "pin-utils" @@ -4541,9 +4580,9 @@ checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" [[package]] name = "piper" -version = "0.2.4" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96c8c490f422ef9a4efd2cb5b42b76c8613d7e7dfc1caf667b8a3350a5acc066" +checksum = "c835479a4443ded371d6c535cbfd8d31ad92c5d23ae9770a61bc155e4992a3c1" dependencies = [ "atomic-waker", "fastrand", @@ -4556,13 +4595,19 @@ version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" +[[package]] +name = "plain" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4596b6d070b27117e987119b4dac604f3c58cfb0b191112e24771b2faeac1a6" + [[package]] name = "png" -version = "0.18.0" +version = "0.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97baced388464909d42d89643fe4361939af9b7ce7a31ee32a168f832a70f2a0" +checksum = "60769b8b31b2a9f263dae2776c37b1b28ae246943cf719eb6946a1db05128a61" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "crc32fast", "fdeflate", "flate2", @@ -4579,7 +4624,7 @@ dependencies = [ "concurrent-queue", "hermit-abi", "pin-project-lite", - "rustix 1.1.3", + "rustix 1.1.4", "windows-sys 0.61.2", ] @@ -4591,24 +4636,24 @@ checksum = "2f3a9f18d041e6d0e102a0a46750538147e5e8992d3b4873aaafee2520b00ce3" [[package]] name = "portable-atomic" -version = "1.12.0" +version = "1.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f59e70c4aef1e55797c2e8fd94a4f2a973fc972cfde0e0b05f683667b0cd39dd" +checksum = "c33a9471896f1c69cecef8d20cbe2f7accd12527ce60845ff44c153bb2a21b49" [[package]] name = "portable-atomic-util" -version = "0.2.4" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8a2f0d8d040d7848a709caf78912debcc3f33ee4b3cac47d73d1e1069e83507" +checksum = "091397be61a01d4be58e7841595bd4bfedb15f1cd54977d79b8271e94ed799a3" dependencies = [ "portable-atomic", ] [[package]] name = "potential_utf" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77" +checksum = "0103b1cef7ec0cf76490e969665504990193874ea05c85ff9bab8b911d0a0564" dependencies = [ "zerovec", ] @@ -4641,7 +4686,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b" dependencies = [ "proc-macro2", - "syn 2.0.111", + "syn 2.0.117", ] [[package]] @@ -4656,18 +4701,18 @@ dependencies = [ [[package]] name = "proc-macro-crate" -version = "3.4.0" +version = "3.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "219cb19e96be00ab2e37d6e299658a0cfa83e52429179969b0f0121b4ac46983" +checksum = "e67ba7e9b2b56446f1d419b1d807906278ffa1a658a8a5d8a39dcb1f5a78614f" dependencies = [ "toml_edit", ] [[package]] name = "proc-macro2" -version = "1.0.103" +version = "1.0.106" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ee95bc4ef87b8d5ba32e8b7714ccc834865276eab0aed5c9958d00ec45f49e8" +checksum = "8fd00f0bb2e90d81d1044c2b32617f68fcb9fa3bb7640c23e9c748e53fb30934" dependencies = [ "unicode-ident", ] @@ -4680,7 +4725,7 @@ checksum = "af066a9c399a26e020ada66a034357a868728e72cd426f3adcd35f80d88d88c8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.117", "version_check", ] @@ -4692,12 +4737,9 @@ checksum = "3eb8486b569e12e2c32ad3e204dbaba5e4b5b216e9367044f25f1dba42341773" [[package]] name = "pxfm" -version = "0.1.27" +version = "0.1.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7186d3822593aa4393561d186d1393b3923e9d6163d3fbfd6e825e3e6cf3e6a8" -dependencies = [ - "num-traits", -] +checksum = "b5a041e753da8b807c9255f28de81879c78c876392ff2469cde94799b2896b9d" [[package]] name = "pyo3" @@ -4745,7 +4787,7 @@ dependencies = [ "proc-macro2", "pyo3-macros-backend", "quote", - "syn 2.0.111", + "syn 2.0.117", ] [[package]] @@ -4758,7 +4800,7 @@ dependencies = [ "proc-macro2", "pyo3-build-config", "quote", - "syn 2.0.111", + "syn 2.0.117", ] [[package]] @@ -4769,9 +4811,9 @@ checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3" [[package]] name = "quick-xml" -version = "0.36.2" +version = "0.38.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7649a7b4df05aed9ea7ec6f628c67c9953a43869b8bc50929569b2999d443fe" +checksum = "b66c2058c55a409d601666cffe35f04333cf1013010882cec174a7467cd4e21c" dependencies = [ "memchr", "serde", @@ -4779,21 +4821,11 @@ dependencies = [ [[package]] name = "quick-xml" -version = "0.37.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "331e97a1af0bf59823e6eadffe373d7b27f485be8748f71471c662c1f269b7fb" -dependencies = [ - "memchr", -] - -[[package]] -name = "quick-xml" -version = "0.38.4" +version = "0.39.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b66c2058c55a409d601666cffe35f04333cf1013010882cec174a7467cd4e21c" +checksum = "958f21e8e7ceb5a1aa7fa87fab28e7c75976e0bfe7e23ff069e0a260f894067d" dependencies = [ "memchr", - "serde", ] [[package]] @@ -4807,10 +4839,10 @@ dependencies = [ "pin-project-lite", "quinn-proto", "quinn-udp", - "rustc-hash 2.1.1", + "rustc-hash 2.1.2", "rustls", "socket2", - "thiserror 2.0.17", + "thiserror 2.0.18", "tokio", "tracing", "web-time", @@ -4818,20 +4850,20 @@ dependencies = [ [[package]] name = "quinn-proto" -version = "0.11.13" +version = "0.11.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1906b49b0c3bc04b5fe5d86a77925ae6524a19b816ae38ce1e426255f1d8a31" +checksum = "434b42fec591c96ef50e21e886936e66d3cc3f737104fdb9b737c40ffb94c098" dependencies = [ "bytes", "getrandom 0.3.4", "lru-slab", - "rand 0.9.2", + "rand 0.9.3", "ring", - "rustc-hash 2.1.1", + "rustc-hash 2.1.2", "rustls", "rustls-pki-types", "slab", - "thiserror 2.0.17", + "thiserror 2.0.18", "tinyvec", "tracing", "web-time", @@ -4853,9 +4885,9 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.42" +version = "1.0.45" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a338cc41d27e6cc6dce6cefc13a0729dfbb81c262b1f519331575dd80ef3067f" +checksum = "41f2619966050689382d2b44f664f4bc593e129785a36d6ee376ddf37259b924" dependencies = [ "proc-macro2", ] @@ -4866,6 +4898,12 @@ version = "5.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" +[[package]] +name = "r-efi" +version = "6.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8dcc9c7d52a811697d2151c701e0d08956f92b0e24136cf4cf27b57a6a0d9bf" + [[package]] name = "rand" version = "0.8.5" @@ -4880,12 +4918,12 @@ dependencies = [ [[package]] name = "rand" -version = "0.9.2" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" +checksum = "7ec095654a25171c2124e9e3393a930bddbffdc939556c914957a4c3e0a87166" dependencies = [ "rand_chacha 0.9.0", - "rand_core 0.9.3", + "rand_core 0.9.5", ] [[package]] @@ -4905,7 +4943,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" dependencies = [ "ppv-lite86", - "rand_core 0.9.3", + "rand_core 0.9.5", ] [[package]] @@ -4914,15 +4952,15 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom 0.2.16", + "getrandom 0.2.17", "serde", ] [[package]] name = "rand_core" -version = "0.9.3" +version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38" +checksum = "76afc826de14238e6e8c374ddcc1fa19e374fd8dd986b0d2af0d02377261d83c" dependencies = [ "getrandom 0.3.4", ] @@ -4935,9 +4973,9 @@ checksum = "474c42c904f04dfe2a595a02f71e1a0e5e92ffb5761cc9a4c02140b93b8dd504" [[package]] name = "range-alloc" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3d6831663a5098ea164f89cff59c6284e95f4e3c76ce9848d4529f5ccca9bde" +checksum = "ca45419789ae5a7899559e9512e58ca889e41f04f1f2445e9f4b290ceccd1d08" [[package]] name = "raw-window-handle" @@ -4973,9 +5011,9 @@ dependencies = [ [[package]] name = "read-fonts" -version = "0.35.0" +version = "0.37.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6717cf23b488adf64b9d711329542ba34de147df262370221940dfabc2c91358" +checksum = "7b634fabf032fab15307ffd272149b622260f55974d9fad689292a5d33df02e5" dependencies = [ "bytemuck", "font-types", @@ -4996,23 +5034,23 @@ version = "0.5.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", ] [[package]] name = "redox_syscall" -version = "0.6.0" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec96166dafa0886eb81fe1c0a388bece180fbef2135f97c1e2cf8302e74b43b5" +checksum = "f450ad9c3b1da563fb6948a8e0fb0fb9269711c9c73d9ea1de5058c79c8d643a" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", ] [[package]] name = "regex" -version = "1.12.2" +version = "1.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "843bc0191f75f3e22651ae5f1e72939ab2f72a4bc30fa80a066bd66edefc24d4" +checksum = "e10754a14b9137dd7b1e3e5b0493cc9171fdd105e0ab477f51b72e7f3ac0e276" dependencies = [ "aho-corasick", "memchr", @@ -5022,9 +5060,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.13" +version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5276caf25ac86c8d810222b3dbb938e512c55c6831a10f3e6ed1c93b84041f1c" +checksum = "6e1dd4122fc1595e8162618945476892eefca7b88c52820e74af6262213cae8f" dependencies = [ "aho-corasick", "memchr", @@ -5033,15 +5071,15 @@ dependencies = [ [[package]] name = "regex-lite" -version = "0.1.8" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d942b98df5e658f56f20d592c7f868833fe38115e65c33003d8cd224b0155da" +checksum = "cab834c73d247e67f4fae452806d17d3c7501756d98c8808d7c9c7aa7d18f973" [[package]] name = "regex-syntax" -version = "0.8.8" +version = "0.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58" +checksum = "dc897dd8d9e8bd1ed8cdad82b5966c3e0ecae09fb1907d58efaa013543185d0a" [[package]] name = "renderdoc-sys" @@ -5102,7 +5140,7 @@ dependencies = [ "dispatch2", "js-sys", "log", - "objc2 0.6.3", + "objc2 0.6.4", "objc2-app-kit 0.3.2", "objc2-core-foundation", "objc2-foundation 0.3.2", @@ -5123,7 +5161,7 @@ checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" dependencies = [ "cc", "cfg-if", - "getrandom 0.2.16", + "getrandom 0.2.17", "libc", "untrusted", "windows-sys 0.52.0", @@ -5161,7 +5199,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "db09040cc89e461f1a265139777a2bde7f8d8c67c4936f700c63ce3e2904d468" dependencies = [ "base64", - "bitflags 2.10.0", + "bitflags 2.11.0", "serde", "serde_derive", "unicode-ident", @@ -5169,11 +5207,11 @@ dependencies = [ [[package]] name = "ron" -version = "0.12.0" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd490c5b18261893f14449cbd28cb9c0b637aebf161cd77900bfdedaff21ec32" +checksum = "4147b952f3f819eca0e99527022f7d6a8d05f111aeb0a62960c74eb283bec8fc" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "once_cell", "serde", "serde_derive", @@ -5248,7 +5286,7 @@ version = "0.37.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "165ca6e57b20e1351573e3729b958bc62f0e48025386970b6e4d29e7a7e71f3f" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "fallible-iterator", "fallible-streaming-iterator", "hashlink", @@ -5264,9 +5302,9 @@ checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" [[package]] name = "rustc-hash" -version = "2.1.1" +version = "2.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" +checksum = "94300abf3f1ae2e2b8ffb7b58043de3d399c73fa6f4b73826402a5c457614dbe" [[package]] name = "rustc_version" @@ -5283,7 +5321,7 @@ version = "0.38.44" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "errno", "libc", "linux-raw-sys 0.4.15", @@ -5292,22 +5330,22 @@ dependencies = [ [[package]] name = "rustix" -version = "1.1.3" +version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "146c9e247ccc180c1f61615433868c99f3de3ae256a30a43b49f67c2d9171f34" +checksum = "b6fe4565b9518b83ef4f91bb47ce29620ca828bd32cb7e408f0062e9930ba190" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "errno", "libc", - "linux-raw-sys 0.11.0", + "linux-raw-sys 0.12.1", "windows-sys 0.61.2", ] [[package]] name = "rustls" -version = "0.23.35" +version = "0.23.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "533f54bc6a7d4f647e46ad909549eda97bf5afc1585190ef692b4286b198bd8f" +checksum = "758025cb5fccfd3bc2fd74708fd4682be41d99e5dff73c377c0646c6012c73a4" dependencies = [ "aws-lc-rs", "once_cell", @@ -5320,9 +5358,9 @@ dependencies = [ [[package]] name = "rustls-native-certs" -version = "0.8.2" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9980d917ebb0c0536119ba501e90834767bffc3d60641457fd84a1f3fd337923" +checksum = "612460d5f7bea540c490b2b6395d8e34a953e52b491accd6c86c8164c5932a63" dependencies = [ "openssl-probe", "rustls-pki-types", @@ -5341,9 +5379,9 @@ dependencies = [ [[package]] name = "rustls-pki-types" -version = "1.13.2" +version = "1.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21e6f2ab2928ca4291b86736a8bd920a277a399bba1589409d72154ff87c1282" +checksum = "be040f8b0a225e40375822a563fa9524378b9d63112f53e19ffff34df5d33fdd" dependencies = [ "web-time", "zeroize", @@ -5351,9 +5389,9 @@ dependencies = [ [[package]] name = "rustls-webpki" -version = "0.103.8" +version = "0.103.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ffdfa2f5286e2247234e03f680868ac2815974dc39e00ea15adc445d0aafe52" +checksum = "20a6af516fea4b20eccceaf166e8aa666ac996208e8a644ce3ef5aa783bc7cd4" dependencies = [ "aws-lc-rs", "ring", @@ -5379,10 +5417,10 @@ dependencies = [ "itertools 0.14.0", "mzdata", "ndarray", - "ordered-float 5.1.0", + "ordered-float 5.3.0", "paste", "probability", - "rand 0.9.2", + "rand 0.9.3", "rayon", "regex", "serde", @@ -5396,9 +5434,9 @@ dependencies = [ [[package]] name = "ryu" -version = "1.0.21" +version = "1.0.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62049b2877bf12821e8f9ad256ee38fdc31db7387ec2d3b3f403024de2034aea" +checksum = "9774ba4a74de5f7b1c1451ed6cd5285a32eddb5cccb8cc655a4e50009e06477f" [[package]] name = "same-file" @@ -5411,9 +5449,9 @@ dependencies = [ [[package]] name = "schannel" -version = "0.1.28" +version = "0.1.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "891d81b926048e76efe18581bf793546b4c0eaf8448d72be8de2bbee5fd166e1" +checksum = "91c1b7e4904c873ef0710c1f407dde2e6287de2bebc1bbbf7d430bb7cbffd939" dependencies = [ "windows-sys 0.61.2", ] @@ -5445,11 +5483,11 @@ dependencies = [ [[package]] name = "security-framework" -version = "3.5.1" +version = "3.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3297343eaf830f66ede390ea39da1d462b6b0c1b000f420d0a83f898bbbe6ef" +checksum = "b7f4bc775c73d9a02cde8bf7b2ec4c9d12743edf609006c7facc23998404cd1d" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "core-foundation 0.10.1", "core-foundation-sys", "libc", @@ -5458,9 +5496,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.15.0" +version = "2.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc1f0cbffaac4852523ce30d8bd3c5cdc873501d96ff467ca09b6767bb8cd5c0" +checksum = "6ce2691df843ecc5d231c0b14ece2acc3efb62c0a398c7e1d875f3983ce020e3" dependencies = [ "core-foundation-sys", "libc", @@ -5468,9 +5506,9 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.27" +version = "1.0.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" +checksum = "8a7852d02fc848982e0c167ef163aaff9cd91dc640ba85e263cb1ce46fae51cd" [[package]] name = "seq-macro" @@ -5505,14 +5543,14 @@ checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.117", ] [[package]] name = "serde_json" -version = "1.0.147" +version = "1.0.149" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6af14725505314343e673e9ecb7cd7e8a36aa9791eb936235a3567cc31447ae4" +checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86" dependencies = [ "itoa", "memchr", @@ -5529,7 +5567,7 @@ checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.117", ] [[package]] @@ -5544,6 +5582,17 @@ dependencies = [ "serde", ] +[[package]] +name = "sha1" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + [[package]] name = "sha2" version = "0.10.9" @@ -5572,18 +5621,29 @@ checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" [[package]] name = "signal-hook-registry" -version = "1.4.7" +version = "1.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7664a098b8e616bdfcc2dc0e9ac44eb231eedf41db4e9fe95d8d32ec728dedad" +checksum = "c4db69cba1110affc0e9f7bcd48bbf87b3f4fc7c61fc9155afd4c469eb3d6c1b" dependencies = [ + "errno", "libc", ] [[package]] name = "simd-adler32" -version = "0.3.8" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e320a6c5ad31d271ad523dcf3ad13e2767ad8b1cb8f047f75a8aeaf8da139da2" +checksum = "703d5c7ef118737c72f1af64ad2f6f8c5e1921f818cdcb97b8fe6fc69bf66214" + +[[package]] +name = "simd_cesu8" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94f90157bb87cddf702797c5dadfa0be7d266cdf49e22da2fcaa32eff75b2c33" +dependencies = [ + "rustc_version", + "simdutf8", +] [[package]] name = "simdutf8" @@ -5599,15 +5659,15 @@ checksum = "bbbb5d9659141646ae647b42fe094daf6c6192d1620870b449d9557f748b2daa" [[package]] name = "siphasher" -version = "1.0.1" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" +checksum = "b2aa850e253778c88a04c3d7323b043aeda9d3e30d5971937c1855769763678e" [[package]] name = "skrifa" -version = "0.37.0" +version = "0.40.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c31071dedf532758ecf3fed987cdb4bd9509f900e026ab684b4ecb81ea49841" +checksum = "7fbdfe3d2475fbd7ddd1f3e5cf8288a30eb3e5f95832829570cd88115a7434ac" dependencies = [ "bytemuck", "read-fonts", @@ -5615,9 +5675,9 @@ dependencies = [ [[package]] name = "slab" -version = "0.4.11" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589" +checksum = "0c790de23124f9ab44544d7ac05d60440adc586479ce501c1d6d7da3cd8c9cf5" [[package]] name = "slotmap" @@ -5640,7 +5700,7 @@ version = "0.19.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3457dea1f0eb631b4034d61d4d8c32074caa6cd1ab2d59f2327bd8461e2c0016" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "calloop 0.13.0", "calloop-wayland-source 0.3.0", "cursor-icon", @@ -5665,15 +5725,15 @@ version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0512da38f5e2b31201a93524adb8d3136276fa4fe4aafab4e1f727a82b534cc0" dependencies = [ - "bitflags 2.10.0", - "calloop 0.14.3", + "bitflags 2.11.0", + "calloop 0.14.4", "calloop-wayland-source 0.4.1", "cursor-icon", "libc", "log", "memmap2", - "rustix 1.1.3", - "thiserror 2.0.17", + "rustix 1.1.4", + "thiserror 2.0.18", "wayland-backend", "wayland-client", "wayland-csd-frame", @@ -5714,21 +5774,21 @@ checksum = "1b6b67fb9a61334225b5b790716f609cd58395f895b3fe8b328786812a40bc3b" [[package]] name = "socket2" -version = "0.6.1" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17129e116933cf371d018bb80ae557e889637989d8638274fb25622827b03881" +checksum = "3a766e1110788c36f4fa1c2b71b387a7815aa65f88ce0229841826633d93723e" dependencies = [ "libc", - "windows-sys 0.60.2", + "windows-sys 0.61.2", ] [[package]] name = "spade" -version = "2.15.0" +version = "2.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb313e1c8afee5b5647e00ee0fe6855e3d529eb863a0fdae1d60006c4d1e9990" +checksum = "9699399fd9349b00b184f5635b074f9ec93afffef30c853f8c875b32c0f8c7fa" dependencies = [ - "hashbrown 0.15.5", + "hashbrown 0.16.1", "num-traits", "robust", "smallvec", @@ -5758,7 +5818,7 @@ version = "0.3.0+sdk-1.3.268.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eda41003dc44290527a59b13432d4a0379379fa074b70174882adfbdfd917844" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", ] [[package]] @@ -5793,9 +5853,9 @@ checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" [[package]] name = "swash" -version = "0.2.6" +version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47846491253e976bdd07d0f9cc24b7daf24720d11309302ccbbc6e6b6e53550a" +checksum = "842f3cd369c2ba38966204f983eaa5e54a8e84a7d7159ed36ade2b6c335aae64" dependencies = [ "skrifa", "yazi", @@ -5815,9 +5875,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.111" +version = "2.0.117" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "390cc9a294ab71bdb1aa2e99d13be9c753cd2d7bd6560c77118597410c4d2e87" +checksum = "e665b8803e7b1d2a727f4023456bbbbe74da67099c585258af0ad9c5013b9b99" dependencies = [ "proc-macro2", "quote", @@ -5841,7 +5901,7 @@ checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.117", ] [[package]] @@ -5852,14 +5912,14 @@ checksum = "adb6935a6f5c20170eeceb1a3835a49e12e19d792f6dd344ccc76a985ca5a6ca" [[package]] name = "tempfile" -version = "3.24.0" +version = "3.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "655da9c7eb6305c55742045d5a8d2037996d61d8de95806335c7c86ce0f82e9c" +checksum = "32497e9a4c7b38532efcdebeef879707aa9f794296a4f0244f6f69e9bc8574bd" dependencies = [ "fastrand", - "getrandom 0.3.4", + "getrandom 0.4.2", "once_cell", - "rustix 1.1.3", + "rustix 1.1.4", "windows-sys 0.61.2", ] @@ -5874,9 +5934,9 @@ dependencies = [ [[package]] name = "thin-vec" -version = "0.2.14" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "144f754d318415ac792f9d69fc87abbbfc043ce2ef041c60f16ad828f638717d" +checksum = "da322882471314edc77fa5232c587bcb87c9df52bfd0d7d4826f8868ead61899" dependencies = [ "serde", ] @@ -5892,11 +5952,11 @@ dependencies = [ [[package]] name = "thiserror" -version = "2.0.17" +version = "2.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f63587ca0f12b72a0600bcba1d40081f830876000bb46dd2337a3051618f4fc8" +checksum = "4288b5bcbc7920c07a1149a35cf9590a2aa808e0bc1eafaade0b80947865fbc4" dependencies = [ - "thiserror-impl 2.0.17", + "thiserror-impl 2.0.18", ] [[package]] @@ -5907,18 +5967,18 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.117", ] [[package]] name = "thiserror-impl" -version = "2.0.17" +version = "2.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913" +checksum = "ebc4ee7f67670e9b64d05fa4253e753e016c6c95ff35b89b7941d6b856dec1d5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.117", ] [[package]] @@ -5943,9 +6003,9 @@ dependencies = [ [[package]] name = "tiff" -version = "0.10.3" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af9605de7fee8d9551863fd692cce7637f548dbd9db9180fcc07ccc6d26c336f" +checksum = "b63feaf3343d35b6ca4d50483f94843803b0f51634937cc2ec519fc32232bc52" dependencies = [ "fax", "flate2", @@ -5957,29 +6017,29 @@ dependencies = [ [[package]] name = "time" -version = "0.3.44" +version = "0.3.47" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91e7d9e3bb61134e77bde20dd4825b97c010155709965fedf0f49bb138e52a9d" +checksum = "743bd48c283afc0388f9b8827b976905fb217ad9e647fae3a379a9283c4def2c" dependencies = [ "deranged", "num-conv", "powerfmt", - "serde", + "serde_core", "time-core", "time-macros", ] [[package]] name = "time-core" -version = "0.1.6" +version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40868e7c1d2f0b8d73e4a8c7f0ff63af4f6d19be117e90bd73eb1d62cf831c6b" +checksum = "7694e1cfe791f8d31026952abf09c69ca6f6fa4e1a1229e18988f06a04a12dca" [[package]] name = "time-macros" -version = "0.2.24" +version = "0.2.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30cfb0125f12d9c277f35663a0a33f8c30190f4e4574868a330595412d34ebf3" +checksum = "2e70e4c5a0e0a8a4823ad65dfe1a6930e4f4d756dcd9dd7939022b5e8c501215" dependencies = [ "num-conv", "time-core", @@ -6002,12 +6062,12 @@ dependencies = [ "object_store", "once_cell", "parquet", - "rand 0.9.2", + "rand 0.9.3", "rayon", "serde", "serde_json", "tempfile", - "thiserror 2.0.17", + "thiserror 2.0.18", "timsrust", "tokio", "tracing", @@ -6049,7 +6109,7 @@ dependencies = [ "serde", "serde_json", "tempfile", - "thiserror 2.0.17", + "thiserror 2.0.18", "timscentroid", "timsquery", "timsrust", @@ -6083,10 +6143,10 @@ dependencies = [ "mimalloc", "rayon", "rfd", - "ron 0.12.0", + "ron 0.12.1", "serde", "serde_json", - "thiserror 2.0.17", + "thiserror 2.0.18", "timscentroid", "timsquery", "timsrust", @@ -6108,7 +6168,7 @@ dependencies = [ "rusqlite", "serde", "serde_json", - "thiserror 2.0.17", + "thiserror 2.0.18", "zstd", ] @@ -6121,7 +6181,7 @@ dependencies = [ "forust-ml", "micromzpaf", "parquet", - "rand 0.9.2", + "rand 0.9.3", "rayon", "regex", "rmp-serde", @@ -6191,9 +6251,9 @@ dependencies = [ [[package]] name = "tinystr" -version = "0.8.2" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869" +checksum = "c8323304221c2a851516f22236c5722a72eaa19749016521d6dff0824447d96d" dependencies = [ "displaydoc 0.2.5", "zerovec", @@ -6201,11 +6261,11 @@ dependencies = [ [[package]] name = "tinyvec" -version = "1.10.0" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa5fdc3bce6191a1dbc8c02d5c8bffcf557bafa17c124c5264a458f1b0613fa" +checksum = "3e61e67053d25a4e82c844e8424039d9745781b3fc4f32b8d55ed50f5f667ef3" dependencies = [ - "serde", + "serde_core", "tinyvec_macros", ] @@ -6217,9 +6277,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.48.0" +version = "1.51.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff360e02eab121e0bc37a2d3b4d4dc622e6eda3a8e5253d5435ecf5bd4c68408" +checksum = "f66bf9585cda4b724d3e78ab34b73fb2bbaba9011b9bfdf69dc836382ea13b8c" dependencies = [ "bytes", "libc", @@ -6233,13 +6293,13 @@ dependencies = [ [[package]] name = "tokio-macros" -version = "2.6.0" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5" +checksum = "385a6cb71ab9ab790c5fe8d67f1645e6c450a7ce006a33de03daa956cf70a496" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.117", ] [[package]] @@ -6254,9 +6314,9 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.17" +version = "0.7.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2efa149fe76073d6e8fd97ef4f4eca7b67f599660115591483572e406e165594" +checksum = "9ae9cec805b01e8fc3fd2fe289f89149a9b66dd16786abd8b19cfa7b48cb0098" dependencies = [ "bytes", "futures-core", @@ -6267,39 +6327,39 @@ dependencies = [ [[package]] name = "toml_datetime" -version = "0.7.5+spec-1.1.0" +version = "1.1.1+spec-1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92e1cfed4a3038bc5a127e35a2d360f145e1f4b971b551a2ba5fd7aedf7e1347" +checksum = "3165f65f62e28e0115a00b2ebdd37eb6f3b641855f9d636d3cd4103767159ad7" dependencies = [ "serde_core", ] [[package]] name = "toml_edit" -version = "0.23.10+spec-1.0.0" +version = "0.25.11+spec-1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84c8b9f757e028cee9fa244aea147aab2a9ec09d5325a9b01e0a49730c2b5269" +checksum = "0b59c4d22ed448339746c59b905d24568fcbb3ab65a500494f7b8c3e97739f2b" dependencies = [ "indexmap", "toml_datetime", "toml_parser", - "winnow", + "winnow 1.0.1", ] [[package]] name = "toml_parser" -version = "1.0.6+spec-1.1.0" +version = "1.1.2+spec-1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3198b4b0a8e11f09dd03e133c0280504d0801269e9afa46362ffde1cbeebf44" +checksum = "a2abe9b86193656635d2411dc43050282ca48aa31c2451210f4202550afb7526" dependencies = [ - "winnow", + "winnow 1.0.1", ] [[package]] name = "tower" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" +checksum = "ebe5ef63511595f1344e2d5cfa636d973292adc0eec1f0ad45fae9f0851ab1d4" dependencies = [ "futures-core", "futures-util", @@ -6316,7 +6376,7 @@ version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "bytes", "futures-util", "http 1.4.0", @@ -6360,7 +6420,7 @@ checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.117", ] [[package]] @@ -6391,17 +6451,17 @@ source = "git+https://github.com/jspaezp/tracing-profile.git?branch=feat%2Faggre dependencies = [ "cfg-if", "linear-map", - "nix 0.29.0", - "thiserror 2.0.17", + "nix", + "thiserror 2.0.18", "tracing", "tracing-subscriber", ] [[package]] name = "tracing-subscriber" -version = "0.3.22" +version = "0.3.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f30143827ddab0d256fd843b7a66d164e9f271cfa0dde49142c5ca0ca291f1e" +checksum = "cb7f578e5945fb242538965c2d0b04418d38ec25c79d160cd279bf0731c8d319" dependencies = [ "matchers", "nu-ansi-term", @@ -6439,7 +6499,7 @@ version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cb30dbbd9036155e74adad6812e9898d03ec374946234fbcebd5dfc7b9187b90" dependencies = [ - "rustc-hash 2.1.1", + "rustc-hash 2.1.2", ] [[package]] @@ -6456,32 +6516,32 @@ checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb" [[package]] name = "uds_windows" -version = "1.1.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89daebc3e6fd160ac4aa9fc8b3bf71e1f74fbf92367ae71fb83a037e8bf164b9" +checksum = "f2f6fb2847f6742cd76af783a2a2c49e9375d0a111c7bef6f71cd9e738c72d6e" dependencies = [ "memoffset", "tempfile", - "winapi", + "windows-sys 0.61.2", ] [[package]] name = "unicase" -version = "2.8.1" +version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539" +checksum = "dbc4bc3a9f746d862c45cb89d705aa10f187bb96c76001afab07a0d35ce60142" [[package]] name = "unicode-ident" -version = "1.0.22" +version = "1.0.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5" +checksum = "e6e4313cd5fcd3dad5cafa179702e2b244f760991f45397d14d4ebf38247da75" [[package]] name = "unicode-segmentation" -version = "1.12.0" +version = "1.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" +checksum = "9629274872b2bfaf8d66f5f15725007f635594914870f65218920345aa11aa8c" [[package]] name = "unicode-width" @@ -6489,6 +6549,12 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4ac048d71ede7ee76d585517add45da530660ef4390e49b098733c6e897f254" +[[package]] +name = "unicode-xid" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" + [[package]] name = "unindent" version = "0.2.4" @@ -6529,14 +6595,15 @@ dependencies = [ [[package]] name = "url" -version = "2.5.7" +version = "2.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08bc136a29a3d1758e07a9cca267be308aeebf5cfd5a10f3f67ab2097683ef5b" +checksum = "ff67a8a4397373c3ef660812acab3268222035010ab8680ec4215f38ba3d0eed" dependencies = [ "form_urlencoded", "idna", "percent-encoding", "serde", + "serde_derive", ] [[package]] @@ -6559,9 +6626,9 @@ checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" [[package]] name = "uuid" -version = "1.19.0" +version = "1.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2e054861b4bd027cd373e18e8d8d8e6548085000e41290d95ce0c373a654b4a" +checksum = "5ac8b6f42ead25368cf5b098aeb3dc8a1a2c05a3eee8a9a1a68c640edbfc79d9" dependencies = [ "js-sys", "serde_core", @@ -6625,18 +6692,27 @@ checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" [[package]] name = "wasip2" -version = "1.0.1+wasi-0.2.4" +version = "1.0.2+wasi-0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9517f9239f02c069db75e65f174b3da828fe5f5b945c4dd26bd25d89c03ebcf5" +dependencies = [ + "wit-bindgen", +] + +[[package]] +name = "wasip3" +version = "0.4.0+wasi-0.3.0-rc-2026-01-06" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7" +checksum = "5428f8bf88ea5ddc08faddef2ac4a67e390b88186c703ce6dbd955e1c145aca5" dependencies = [ "wit-bindgen", ] [[package]] name = "wasm-bindgen" -version = "0.2.106" +version = "0.2.118" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d759f433fa64a2d763d1340820e46e111a7a5ab75f993d1852d70b03dbb80fd" +checksum = "0bf938a0bacb0469e83c1e148908bd7d5a6010354cf4fb73279b7447422e3a89" dependencies = [ "cfg-if", "once_cell", @@ -6647,22 +6723,19 @@ dependencies = [ [[package]] name = "wasm-bindgen-futures" -version = "0.4.56" +version = "0.4.68" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "836d9622d604feee9e5de25ac10e3ea5f2d65b41eac0d9ce72eb5deae707ce7c" +checksum = "f371d383f2fb139252e0bfac3b81b265689bf45b6874af544ffa4c975ac1ebf8" dependencies = [ - "cfg-if", "js-sys", - "once_cell", "wasm-bindgen", - "web-sys", ] [[package]] name = "wasm-bindgen-macro" -version = "0.2.106" +version = "0.2.118" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48cb0d2638f8baedbc542ed444afc0644a29166f1595371af4fecf8ce1e7eeb3" +checksum = "eeff24f84126c0ec2db7a449f0c2ec963c6a49efe0698c4242929da037ca28ed" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -6670,26 +6743,48 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.106" +version = "0.2.118" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cefb59d5cd5f92d9dcf80e4683949f15ca4b511f4ac0a6e14d4e1ac60c6ecd40" +checksum = "9d08065faf983b2b80a79fd87d8254c409281cf7de75fc4b773019824196c904" dependencies = [ "bumpalo", "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.117", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.106" +version = "0.2.118" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cbc538057e648b67f72a982e708d485b2efa771e1ac05fec311f9f63e5800db4" +checksum = "5fd04d9e306f1907bd13c6361b5c6bfc7b3b3c095ed3f8a9246390f8dbdee129" dependencies = [ "unicode-ident", ] +[[package]] +name = "wasm-encoder" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "990065f2fe63003fe337b932cfb5e3b80e0b4d0f5ff650e6985b1048f62c8319" +dependencies = [ + "leb128fmt", + "wasmparser", +] + +[[package]] +name = "wasm-metadata" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb0e353e6a2fbdc176932bbaab493762eb1255a7900fe0fea1a2f96c296cc909" +dependencies = [ + "anyhow", + "indexmap", + "wasm-encoder", + "wasmparser", +] + [[package]] name = "wasm-streams" version = "0.4.2" @@ -6703,15 +6798,27 @@ dependencies = [ "web-sys", ] +[[package]] +name = "wasmparser" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47b807c72e1bac69382b3a6fb3dbe8ea4c0ed87ff5629b8685ae6b9a611028fe" +dependencies = [ + "bitflags 2.11.0", + "hashbrown 0.15.5", + "indexmap", + "semver", +] + [[package]] name = "wayland-backend" -version = "0.3.11" +version = "0.3.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "673a33c33048a5ade91a6b139580fa174e19fb0d23f396dca9fa15f2e1e49b35" +checksum = "2857dd20b54e916ec7253b3d6b4d5c4d7d4ca2c33c2e11c6c76a99bd8744755d" dependencies = [ "cc", "downcast-rs", - "rustix 1.1.3", + "rustix 1.1.4", "scoped-tls", "smallvec", "wayland-sys", @@ -6719,12 +6826,12 @@ dependencies = [ [[package]] name = "wayland-client" -version = "0.31.11" +version = "0.31.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c66a47e840dc20793f2264eb4b3e4ecb4b75d91c0dd4af04b456128e0bdd449d" +checksum = "645c7c96bb74690c3189b5c9cb4ca1627062bb23693a4fad9d8c3de958260144" dependencies = [ - "bitflags 2.10.0", - "rustix 1.1.3", + "bitflags 2.11.0", + "rustix 1.1.4", "wayland-backend", "wayland-scanner", ] @@ -6735,29 +6842,29 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "625c5029dbd43d25e6aa9615e88b829a5cad13b2819c4ae129fdbb7c31ab4c7e" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "cursor-icon", "wayland-backend", ] [[package]] name = "wayland-cursor" -version = "0.31.11" +version = "0.31.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "447ccc440a881271b19e9989f75726d60faa09b95b0200a9b7eb5cc47c3eeb29" +checksum = "4a52d18780be9b1314328a3de5f930b73d2200112e3849ca6cb11822793fb34d" dependencies = [ - "rustix 1.1.3", + "rustix 1.1.4", "wayland-client", "xcursor", ] [[package]] name = "wayland-protocols" -version = "0.32.9" +version = "0.32.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "efa790ed75fbfd71283bd2521a1cfdc022aabcc28bdcff00851f9e4ae88d9901" +checksum = "563a85523cade2429938e790815fd7319062103b9f4a2dc806e9b53b95982d8f" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "wayland-backend", "wayland-client", "wayland-scanner", @@ -6769,7 +6876,7 @@ version = "20250721.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "40a1f863128dcaaec790d7b4b396cc9b9a7a079e878e18c47e6c2d2c5a8dcbb1" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "wayland-backend", "wayland-client", "wayland-protocols", @@ -6778,11 +6885,11 @@ dependencies = [ [[package]] name = "wayland-protocols-misc" -version = "0.3.9" +version = "0.3.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dfe33d551eb8bffd03ff067a8b44bb963919157841a99957151299a6307d19c" +checksum = "6e9567599ef23e09b8dad6e429e5738d4509dfc46b3b21f32841a304d16b29c8" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "wayland-backend", "wayland-client", "wayland-protocols", @@ -6791,11 +6898,11 @@ dependencies = [ [[package]] name = "wayland-protocols-plasma" -version = "0.3.9" +version = "0.3.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a07a14257c077ab3279987c4f8bb987851bf57081b93710381daea94f2c2c032" +checksum = "2b6d8cf1eb2c1c31ed1f5643c88a6e53538129d4af80030c8cabd1f9fa884d91" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "wayland-backend", "wayland-client", "wayland-protocols", @@ -6804,11 +6911,11 @@ dependencies = [ [[package]] name = "wayland-protocols-wlr" -version = "0.3.9" +version = "0.3.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "efd94963ed43cf9938a090ca4f7da58eb55325ec8200c3848963e98dc25b78ec" +checksum = "eb04e52f7836d7c7976c78ca0250d61e33873c34156a2a1fc9474828ec268234" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "wayland-backend", "wayland-client", "wayland-protocols", @@ -6817,20 +6924,20 @@ dependencies = [ [[package]] name = "wayland-scanner" -version = "0.31.7" +version = "0.31.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54cb1e9dc49da91950bdfd8b848c49330536d9d1fb03d4bfec8cae50caa50ae3" +checksum = "9c324a910fd86ebdc364a3e61ec1f11737d3b1d6c273c0239ee8ff4bc0d24b4a" dependencies = [ "proc-macro2", - "quick-xml 0.37.5", + "quick-xml 0.39.2", "quote", ] [[package]] name = "wayland-sys" -version = "0.31.7" +version = "0.31.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34949b42822155826b41db8e5d0c1be3a2bd296c747577a43a3e6daefc296142" +checksum = "d8eab23fefc9e41f8e841df4a9c707e8a8c4ed26e944ef69297184de2785e3be" dependencies = [ "dlib", "log", @@ -6840,9 +6947,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.83" +version = "0.3.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b32828d774c412041098d182a8b38b16ea816958e07cf40eec2bc080ae137ac" +checksum = "4f2dfbb17949fa2088e5d39408c48368947b86f7834484e87b73de55bc14d97d" dependencies = [ "js-sys", "wasm-bindgen", @@ -6860,15 +6967,15 @@ dependencies = [ [[package]] name = "webbrowser" -version = "1.0.6" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00f1243ef785213e3a32fa0396093424a3a6ea566f9948497e5a2309261a4c97" +checksum = "fe985f41e291eecef5e5c0770a18d28390addb03331c043964d9e916453d6f16" dependencies = [ "core-foundation 0.10.1", "jni", "log", "ndk-context", - "objc2 0.6.3", + "objc2 0.6.4", "objc2-foundation 0.3.2", "url", "web-sys", @@ -6887,7 +6994,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bfe68bac7cde125de7a731c3400723cadaaf1703795ad3f4805f187459cd7a77" dependencies = [ "arrayvec", - "bitflags 2.10.0", + "bitflags 2.11.0", "cfg-if", "cfg_aliases", "document-features", @@ -6918,7 +7025,7 @@ dependencies = [ "arrayvec", "bit-set", "bit-vec", - "bitflags 2.10.0", + "bitflags 2.11.0", "bytemuck", "cfg_aliases", "document-features", @@ -6933,7 +7040,7 @@ dependencies = [ "raw-window-handle", "rustc-hash 1.1.0", "smallvec", - "thiserror 2.0.17", + "thiserror 2.0.18", "wgpu-core-deps-apple", "wgpu-core-deps-emscripten", "wgpu-core-deps-windows-linux-android", @@ -6978,7 +7085,7 @@ dependencies = [ "arrayvec", "ash", "bit-set", - "bitflags 2.10.0", + "bitflags 2.11.0", "block", "bytemuck", "cfg-if", @@ -7000,7 +7107,7 @@ dependencies = [ "ndk-sys", "objc", "once_cell", - "ordered-float 5.1.0", + "ordered-float 5.3.0", "parking_lot", "portable-atomic", "portable-atomic-util", @@ -7009,7 +7116,7 @@ dependencies = [ "raw-window-handle", "renderdoc-sys", "smallvec", - "thiserror 2.0.17", + "thiserror 2.0.18", "wasm-bindgen", "web-sys", "wgpu-types", @@ -7023,30 +7130,14 @@ version = "27.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "afdcf84c395990db737f2dd91628706cb31e86d72e53482320d368e52b5da5eb" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "bytemuck", "js-sys", "log", - "thiserror 2.0.17", + "thiserror 2.0.18", "web-sys", ] -[[package]] -name = "winapi" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" -dependencies = [ - "winapi-i686-pc-windows-gnu", - "winapi-x86_64-pc-windows-gnu", -] - -[[package]] -name = "winapi-i686-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" - [[package]] name = "winapi-util" version = "0.1.11" @@ -7056,12 +7147,6 @@ dependencies = [ "windows-sys 0.61.2", ] -[[package]] -name = "winapi-x86_64-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" - [[package]] name = "windows" version = "0.58.0" @@ -7152,7 +7237,7 @@ checksum = "2bbd5b46c938e506ecbce286b6628a02171d56153ba733b6c741fc627ec9579b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.117", ] [[package]] @@ -7163,7 +7248,7 @@ checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.117", ] [[package]] @@ -7174,7 +7259,7 @@ checksum = "053c4c462dc91d3b1504c6fe5a726dd15e216ba718e84a0e46a88fbe5ded3515" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.117", ] [[package]] @@ -7185,7 +7270,7 @@ checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.117", ] [[package]] @@ -7265,15 +7350,6 @@ dependencies = [ "windows-link 0.2.1", ] -[[package]] -name = "windows-sys" -version = "0.45.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" -dependencies = [ - "windows-targets 0.42.2", -] - [[package]] name = "windows-sys" version = "0.52.0" @@ -7310,21 +7386,6 @@ dependencies = [ "windows-link 0.2.1", ] -[[package]] -name = "windows-targets" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" -dependencies = [ - "windows_aarch64_gnullvm 0.42.2", - "windows_aarch64_msvc 0.42.2", - "windows_i686_gnu 0.42.2", - "windows_i686_msvc 0.42.2", - "windows_x86_64_gnu 0.42.2", - "windows_x86_64_gnullvm 0.42.2", - "windows_x86_64_msvc 0.42.2", -] - [[package]] name = "windows-targets" version = "0.52.6" @@ -7367,12 +7428,6 @@ dependencies = [ "windows-link 0.1.3", ] -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" - [[package]] name = "windows_aarch64_gnullvm" version = "0.52.6" @@ -7385,12 +7440,6 @@ version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53" -[[package]] -name = "windows_aarch64_msvc" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" - [[package]] name = "windows_aarch64_msvc" version = "0.52.6" @@ -7403,12 +7452,6 @@ version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006" -[[package]] -name = "windows_i686_gnu" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" - [[package]] name = "windows_i686_gnu" version = "0.52.6" @@ -7433,12 +7476,6 @@ version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c" -[[package]] -name = "windows_i686_msvc" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" - [[package]] name = "windows_i686_msvc" version = "0.52.6" @@ -7451,12 +7488,6 @@ version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2" -[[package]] -name = "windows_x86_64_gnu" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" - [[package]] name = "windows_x86_64_gnu" version = "0.52.6" @@ -7469,12 +7500,6 @@ version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499" -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" - [[package]] name = "windows_x86_64_gnullvm" version = "0.52.6" @@ -7487,12 +7512,6 @@ version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1" -[[package]] -name = "windows_x86_64_msvc" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" - [[package]] name = "windows_x86_64_msvc" version = "0.52.6" @@ -7507,14 +7526,14 @@ checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650" [[package]] name = "winit" -version = "0.30.12" +version = "0.30.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c66d4b9ed69c4009f6321f762d6e61ad8a2389cd431b97cb1e146812e9e6c732" +checksum = "a6755fa58a9f8350bd1e472d4c3fcc25f824ec358933bba33306d0b63df5978d" dependencies = [ "ahash", "android-activity", "atomic-waker", - "bitflags 2.10.0", + "bitflags 2.11.0", "block2 0.5.1", "bytemuck", "calloop 0.13.0", @@ -7559,24 +7578,115 @@ dependencies = [ [[package]] name = "winnow" -version = "0.7.14" +version = "0.7.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df79d97927682d2fd8adb29682d1140b343be4ac0f08fd68b7765d9c059d3945" +dependencies = [ + "memchr", +] + +[[package]] +name = "winnow" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a5364e9d77fcdeeaa6062ced926ee3381faa2ee02d3eb83a5c27a8825540829" +checksum = "09dac053f1cd375980747450bfc7250c264eaae0583872e845c0c7cd578872b5" dependencies = [ "memchr", ] [[package]] name = "wit-bindgen" -version = "0.46.0" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5" +dependencies = [ + "wit-bindgen-rust-macro", +] + +[[package]] +name = "wit-bindgen-core" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea61de684c3ea68cb082b7a88508a8b27fcc8b797d738bfc99a82facf1d752dc" +dependencies = [ + "anyhow", + "heck", + "wit-parser", +] + +[[package]] +name = "wit-bindgen-rust" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7c566e0f4b284dd6561c786d9cb0142da491f46a9fbed79ea69cdad5db17f21" +dependencies = [ + "anyhow", + "heck", + "indexmap", + "prettyplease", + "syn 2.0.117", + "wasm-metadata", + "wit-bindgen-core", + "wit-component", +] + +[[package]] +name = "wit-bindgen-rust-macro" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c0f9bfd77e6a48eccf51359e3ae77140a7f50b1e2ebfe62422d8afdaffab17a" +dependencies = [ + "anyhow", + "prettyplease", + "proc-macro2", + "quote", + "syn 2.0.117", + "wit-bindgen-core", + "wit-bindgen-rust", +] + +[[package]] +name = "wit-component" +version = "0.244.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59" +checksum = "9d66ea20e9553b30172b5e831994e35fbde2d165325bec84fc43dbf6f4eb9cb2" +dependencies = [ + "anyhow", + "bitflags 2.11.0", + "indexmap", + "log", + "serde", + "serde_derive", + "serde_json", + "wasm-encoder", + "wasm-metadata", + "wasmparser", + "wit-parser", +] + +[[package]] +name = "wit-parser" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecc8ac4bc1dc3381b7f59c34f00b67e18f910c2c0f50015669dde7def656a736" +dependencies = [ + "anyhow", + "id-arena", + "indexmap", + "log", + "semver", + "serde", + "serde_derive", + "serde_json", + "unicode-xid", + "wasmparser", +] [[package]] name = "writeable" -version = "0.6.2" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9" +checksum = "1ffae5123b2d3fc086436f8834ae3ab053a283cfac8fe0a0b8eaae044768a4c4" [[package]] name = "x11-dl" @@ -7600,7 +7710,7 @@ dependencies = [ "libc", "libloading", "once_cell", - "rustix 1.1.3", + "rustix 1.1.4", "x11rb-protocol", ] @@ -7622,7 +7732,7 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d039de8032a9a8856a6be89cea3e5d12fdd82306ab7c94d74e6deab2460651c5" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "dlib", "log", "once_cell", @@ -7655,9 +7765,9 @@ checksum = "e01738255b5a16e78bbb83e7fbba0a1e7dd506905cfc53f4622d89015a03fbb5" [[package]] name = "yoke" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954" +checksum = "abe8c5fda708d9ca3df187cae8bfb9ceda00dd96231bed36e445a1a48e66f9ca" dependencies = [ "stable_deref_trait", "yoke-derive", @@ -7666,21 +7776,21 @@ dependencies = [ [[package]] name = "yoke-derive" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d" +checksum = "de844c262c8848816172cef550288e7dc6c7b7814b4ee56b3e1553f275f1858e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.117", "synstructure", ] [[package]] name = "zbus" -version = "5.12.0" +version = "5.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b622b18155f7a93d1cd2dc8c01d2d6a44e08fb9ebb7b3f9e6ed101488bad6c91" +checksum = "ca82f95dbd3943a40a53cfded6c2d0a2ca26192011846a1810c4256ef92c60bc" dependencies = [ "async-broadcast", "async-executor", @@ -7696,15 +7806,16 @@ dependencies = [ "futures-core", "futures-lite", "hex", - "nix 0.30.1", + "libc", "ordered-stream", + "rustix 1.1.4", "serde", "serde_repr", "tracing", "uds_windows", "uuid", "windows-sys 0.61.2", - "winnow", + "winnow 0.7.15", "zbus_macros", "zbus_names", "zvariant", @@ -7728,7 +7839,7 @@ checksum = "10da05367f3a7b7553c8cdf8fa91aee6b64afebe32b51c95177957efc47ca3a0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.117", "zbus-lockstep", "zbus_xml", "zvariant", @@ -7736,14 +7847,14 @@ dependencies = [ [[package]] name = "zbus_macros" -version = "5.12.0" +version = "5.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cdb94821ca8a87ca9c298b5d1cbd80e2a8b67115d99f6e4551ac49e42b6a314" +checksum = "897e79616e84aac4b2c46e9132a4f63b93105d54fe8c0e8f6bffc21fa8d49222" dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.117", "zbus_names", "zvariant", "zvariant_utils", @@ -7751,25 +7862,23 @@ dependencies = [ [[package]] name = "zbus_names" -version = "4.2.0" +version = "4.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7be68e64bf6ce8db94f63e72f0c7eb9a60d733f7e0499e628dfab0f84d6bcb97" +checksum = "ffd8af6d5b78619bab301ff3c560a5bd22426150253db278f164d6cf3b72c50f" dependencies = [ "serde", - "static_assertions", - "winnow", + "winnow 0.7.15", "zvariant", ] [[package]] name = "zbus_xml" -version = "5.0.2" +version = "5.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "589e9a02bfafb9754bb2340a9e3b38f389772684c63d9637e76b1870377bec29" +checksum = "441a0064125265655bccc3a6af6bef56814d9277ac83fce48b1cd7e160b80eac" dependencies = [ - "quick-xml 0.36.2", + "quick-xml 0.38.4", "serde", - "static_assertions", "zbus_names", "zvariant", ] @@ -7782,42 +7891,42 @@ checksum = "6df3dc4292935e51816d896edcd52aa30bc297907c26167fec31e2b0c6a32524" [[package]] name = "zerocopy" -version = "0.8.31" +version = "0.8.48" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd74ec98b9250adb3ca554bdde269adf631549f51d8a8f8f0a10b50f1cb298c3" +checksum = "eed437bf9d6692032087e337407a86f04cd8d6a16a37199ed57949d415bd68e9" dependencies = [ "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.8.31" +version = "0.8.48" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8a8d209fdf45cf5138cbb5a506f6b52522a25afccc534d1475dad8e31105c6a" +checksum = "70e3cd084b1788766f53af483dd21f93881ff30d7320490ec3ef7526d203bad4" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.117", ] [[package]] name = "zerofrom" -version = "0.1.6" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" +checksum = "69faa1f2a1ea75661980b013019ed6687ed0e83d069bc1114e2cc74c6c04c4df" dependencies = [ "zerofrom-derive", ] [[package]] name = "zerofrom-derive" -version = "0.1.6" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" +checksum = "11532158c46691caf0f2593ea8358fed6bbf68a0315e80aae9bd41fbade684a1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.117", "synstructure", ] @@ -7829,9 +7938,9 @@ checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0" [[package]] name = "zerotrie" -version = "0.2.3" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851" +checksum = "0f9152d31db0792fa83f70fb2f83148effb5c1f5b8c7686c3459e361d9bc20bf" dependencies = [ "displaydoc 0.2.5", "yoke", @@ -7840,9 +7949,9 @@ dependencies = [ [[package]] name = "zerovec" -version = "0.11.5" +version = "0.11.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002" +checksum = "90f911cbc359ab6af17377d242225f4d75119aec87ea711a880987b18cd7b239" dependencies = [ "yoke", "zerofrom", @@ -7851,26 +7960,26 @@ dependencies = [ [[package]] name = "zerovec-derive" -version = "0.11.2" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3" +checksum = "625dc425cab0dca6dc3c3319506e6593dcb08a9f387ea3b284dbd52a92c40555" dependencies = [ "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.117", ] [[package]] name = "zlib-rs" -version = "0.5.5" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40990edd51aae2c2b6907af74ffb635029d5788228222c4bb811e9351c0caad3" +checksum = "3be3d40e40a133f9c916ee3f9f4fa2d9d63435b5fbe1bfc6d9dae0aa0ada1513" [[package]] name = "zmij" -version = "0.1.7" +version = "1.0.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e404bcd8afdaf006e529269d3e85a743f9480c3cef60034d77860d02964f3ba" +checksum = "b8848ee67ecc8aedbaf3e4122217aff892639231befc6a1b58d29fff4c2cabaa" [[package]] name = "zstd" @@ -7902,56 +8011,56 @@ dependencies = [ [[package]] name = "zune-core" -version = "0.4.12" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f423a2c17029964870cfaabb1f13dfab7d092a62a29a89264f4d36990ca414a" +checksum = "cb8a0807f7c01457d0379ba880ba6322660448ddebc890ce29bb64da71fb40f9" [[package]] name = "zune-jpeg" -version = "0.4.21" +version = "0.5.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29ce2c8a9384ad323cf564b67da86e21d3cfdff87908bc1223ed5c99bc792713" +checksum = "27bc9d5b815bc103f142aa054f561d9187d191692ec7c2d1e2b4737f8dbd7296" dependencies = [ "zune-core", ] [[package]] name = "zvariant" -version = "5.8.0" +version = "5.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2be61892e4f2b1772727be11630a62664a1826b62efa43a6fe7449521cb8744c" +checksum = "5708299b21903bbe348e94729f22c49c55d04720a004aa350f1f9c122fd2540b" dependencies = [ "endi", "enumflags2", "serde", "url", - "winnow", + "winnow 0.7.15", "zvariant_derive", "zvariant_utils", ] [[package]] name = "zvariant_derive" -version = "5.8.0" +version = "5.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da58575a1b2b20766513b1ec59d8e2e68db2745379f961f86650655e862d2006" +checksum = "5b59b012ebe9c46656f9cc08d8da8b4c726510aef12559da3e5f1bf72780752c" dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.111", + "syn 2.0.117", "zvariant_utils", ] [[package]] name = "zvariant_utils" -version = "3.2.1" +version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6949d142f89f6916deca2232cf26a8afacf2b9fdc35ce766105e104478be599" +checksum = "f75c23a64ef8f40f13a6989991e643554d9bef1d682a281160cf0c1bc389c5e9" dependencies = [ "proc-macro2", "quote", "serde", - "syn 2.0.111", - "winnow", + "syn 2.0.117", + "winnow 0.7.15", ] From 8c7a1f934a47ce70ae4a15baf4c0a41ce18f831d Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Sat, 11 Apr 2026 14:05:18 -0700 Subject: [PATCH 55/64] chore: update uv lock --- uv.lock | 571 -------------------------------------------------------- 1 file changed, 571 deletions(-) diff --git a/uv.lock b/uv.lock index 20e3174..9c061d2 100644 --- a/uv.lock +++ b/uv.lock @@ -9,27 +9,9 @@ resolution-markers = [ [manifest] members = [ "speclib-builder", - "timsseek-rescore", - "timsseek-rts-receiver", "timsseek-workspace", ] -[[package]] -name = "altair" -version = "6.0.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "jinja2" }, - { name = "jsonschema" }, - { name = "narwhals" }, - { name = "packaging" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/f7/c0/184a89bd5feba14ff3c41cfaf1dd8a82c05f5ceedbc92145e17042eb08a4/altair-6.0.0.tar.gz", hash = "sha256:614bf5ecbe2337347b590afb111929aa9c16c9527c4887d96c9bc7f6640756b4", size = 763834, upload-time = "2025-11-12T08:59:11.519Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/db/33/ef2f2409450ef6daa61459d5de5c08128e7d3edb773fefd0a324d1310238/altair-6.0.0-py3-none-any.whl", hash = "sha256:09ae95b53d5fe5b16987dccc785a7af8588f2dca50de1e7a156efa8a461515f8", size = 795410, upload-time = "2025-11-12T08:59:09.804Z" }, -] - [[package]] name = "annotated-types" version = "0.7.0" @@ -173,15 +155,6 @@ css = [ { name = "tinycss2" }, ] -[[package]] -name = "blinker" -version = "1.9.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/21/28/9b3f50ce0e048515135495f198351908d99540d69bfdc8c1d15b73dc55ce/blinker-1.9.0.tar.gz", hash = "sha256:b4ce2265a7abece45e7cc896e98dbebe6cead56bcf805a3d23136d145f5445bf", size = 22460, upload-time = "2024-11-08T17:25:47.436Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/10/cb/f2ad4230dc2eb1a74edf38f1a38b9b52277f75bef262d8908e60d957e13c/blinker-1.9.0-py3-none-any.whl", hash = "sha256:ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc", size = 8458, upload-time = "2024-11-08T17:25:46.184Z" }, -] - [[package]] name = "boto3" version = "1.42.30" @@ -225,15 +198,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1d/5b/2d5ea6802495ee4506721977be522804314aa66ad629d9356e3c7e5af4a6/bumpver-2025.1131-py2.py3-none-any.whl", hash = "sha256:c02527f6ed7887afbc06c07630047b24a9f9d02d544a65639e99bf8b92aaa674", size = 65361, upload-time = "2025-07-02T20:36:10.103Z" }, ] -[[package]] -name = "cachetools" -version = "6.2.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bc/1d/ede8680603f6016887c062a2cf4fc8fdba905866a3ab8831aa8aa651320c/cachetools-6.2.4.tar.gz", hash = "sha256:82c5c05585e70b6ba2d3ae09ea60b79548872185d2f24ae1f2709d37299fd607", size = 31731, upload-time = "2025-12-15T18:24:53.744Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2c/fc/1d7b80d0eb7b714984ce40efc78859c022cd930e402f599d8ca9e39c78a4/cachetools-6.2.4-py3-none-any.whl", hash = "sha256:69a7a52634fed8b8bf6e24a050fb60bff1c9bd8f6d24572b99c32d4e71e62a51", size = 11551, upload-time = "2025-12-15T18:24:52.332Z" }, -] - [[package]] name = "certifi" version = "2026.1.4" @@ -593,39 +557,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/cf/58/8acf1b3e91c58313ce5cb67df61001fc9dcd21be4fadb76c1a2d540e09ed/fqdn-1.5.1-py3-none-any.whl", hash = "sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014", size = 9121, upload-time = "2021-03-11T07:16:28.351Z" }, ] -[[package]] -name = "fsspec" -version = "2026.1.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d5/7d/5df2650c57d47c57232af5ef4b4fdbff182070421e405e0d62c6cdbfaa87/fsspec-2026.1.0.tar.gz", hash = "sha256:e987cb0496a0d81bba3a9d1cee62922fb395e7d4c3b575e57f547953334fe07b", size = 310496, upload-time = "2026-01-09T15:21:35.562Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/01/c9/97cc5aae1648dcb851958a3ddf73ccd7dbe5650d95203ecb4d7720b4cdbf/fsspec-2026.1.0-py3-none-any.whl", hash = "sha256:cb76aa913c2285a3b49bdd5fc55b1d7c708d7208126b60f2eb8194fe1b4cbdcc", size = 201838, upload-time = "2026-01-09T15:21:34.041Z" }, -] - -[[package]] -name = "gitdb" -version = "4.0.12" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "smmap" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/72/94/63b0fc47eb32792c7ba1fe1b694daec9a63620db1e313033d18140c2320a/gitdb-4.0.12.tar.gz", hash = "sha256:5ef71f855d191a3326fcfbc0d5da835f26b13fbcba60c32c21091c349ffdb571", size = 394684, upload-time = "2025-01-02T07:20:46.413Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a0/61/5c78b91c3143ed5c14207f463aecfc8f9dbb5092fb2869baf37c273b2705/gitdb-4.0.12-py3-none-any.whl", hash = "sha256:67073e15955400952c6565cc3e707c554a4eea2e428946f7a4c162fab9bd9bcf", size = 62794, upload-time = "2025-01-02T07:20:43.624Z" }, -] - -[[package]] -name = "gitpython" -version = "3.1.46" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "gitdb" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/df/b5/59d16470a1f0dfe8c793f9ef56fd3826093fc52b3bd96d6b9d6c26c7e27b/gitpython-3.1.46.tar.gz", hash = "sha256:400124c7d0ef4ea03f7310ac2fbf7151e09ff97f2a3288d64a440c584a29c37f", size = 215371, upload-time = "2026-01-01T15:37:32.073Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/6a/09/e21df6aef1e1ffc0c816f0522ddc3f6dcded766c3261813131c78a704470/gitpython-3.1.46-py3-none-any.whl", hash = "sha256:79812ed143d9d25b6d176a10bb511de0f9c67b1fa641d82097b0ab90398a2058", size = 208620, upload-time = "2026-01-01T15:37:30.574Z" }, -] - [[package]] name = "h11" version = "0.16.0" @@ -693,18 +624,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, ] -[[package]] -name = "importlib-metadata" -version = "8.7.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "zipp" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/f3/49/3b30cad09e7771a4982d9975a8cbf64f00d4a1ececb53297f1d9a7be1b10/importlib_metadata-8.7.1.tar.gz", hash = "sha256:49fef1ae6440c182052f407c8d34a68f72efc36db9ca90dc0113398f2fdde8bb", size = 57107, upload-time = "2025-12-21T10:00:19.278Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/fa/5e/f8e9a1d23b9c20a551a8a02ea3637b4642e22c2626e3a13a9a29cdea99eb/importlib_metadata-8.7.1-py3-none-any.whl", hash = "sha256:5a1f80bf1daa489495071efbb095d75a634cf28a8bc299581244063b53176151", size = 27865, upload-time = "2025-12-21T10:00:18.329Z" }, -] - [[package]] name = "iniconfig" version = "2.3.0" @@ -833,28 +752,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/31/b4/b9b800c45527aadd64d5b442f9b932b00648617eb5d63d2c7a6587b7cafc/jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", size = 20256, upload-time = "2022-06-17T18:00:10.251Z" }, ] -[[package]] -name = "job-pool" -version = "0.3.4" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "threadpoolctl" }, - { name = "tqdm" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/d9/ab/d6e9b1639e02276932da473a80b6d21ef524edc3743cc76ed44010b88b48/job_pool-0.3.4.tar.gz", hash = "sha256:7f88bffe74b4ed771ae67c855b1df482597c8e29057f6e7ac2fa6bc447b235ee", size = 11625, upload-time = "2025-04-04T12:30:19.169Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a4/d8/06223bf9af44df259c5b38eea3f36712aab0d9f2f80ede73d9e890835ccb/job_pool-0.3.4-py3-none-any.whl", hash = "sha256:9f00e000a2521c7570c67a72d67902ec2e973801020fbf1e4b0e3744429684b0", size = 9922, upload-time = "2025-04-04T12:30:17.896Z" }, -] - -[[package]] -name = "joblib" -version = "1.5.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/41/f2/d34e8b3a08a9cc79a50b2208a93dce981fe615b64d5a4d4abee421d898df/joblib-1.5.3.tar.gz", hash = "sha256:8561a3269e6801106863fd0d6d84bb737be9e7631e33aaed3fb9ce5953688da3", size = 331603, upload-time = "2025-12-15T08:41:46.427Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7b/91/984aca2ec129e2757d1e4e3c81c3fcda9d0f85b74670a094cc443d9ee949/joblib-1.5.3-py3-none-any.whl", hash = "sha256:5fc3c5039fc5ca8c0276333a188bbd59d6b7ab37fe6632daa76bc7f9ec18e713", size = 309071, upload-time = "2025-12-15T08:41:44.973Z" }, -] - [[package]] name = "json5" version = "0.13.0" @@ -1326,23 +1223,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/16/2e/9acc86985bfad8f2c2d30291b27cd2bb4c74cea08695bd540906ed744249/ml_dtypes-0.5.4-cp312-cp312-win_arm64.whl", hash = "sha256:9bad06436568442575beb2d03389aa7456c690a5b05892c471215bfd8cf39460", size = 160793, upload-time = "2025-11-17T22:31:55.358Z" }, ] -[[package]] -name = "mokapot" -version = "0.7.2.dev58+gc04c148" -source = { git = "https://github.com/jspaezp/mokapot.git?branch=feat%2Fre_add_confidence_api#c04c14856e66c92f416d7f0304173d8c077ccc11" } -dependencies = [ - { name = "importlib-metadata" }, - { name = "joblib" }, - { name = "numpy" }, - { name = "pandas" }, - { name = "pyarrow" }, - { name = "scikit-learn" }, - { name = "scipy" }, - { name = "six" }, - { name = "triqler" }, - { name = "typeguard" }, -] - [[package]] name = "mpmath" version = "1.3.0" @@ -1378,15 +1258,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c5/31/5b1a1f70eb0e87d1678e9624908f86317787b536060641d6798e3cf70ace/msgpack-1.1.2-cp312-cp312-win_arm64.whl", hash = "sha256:be5980f3ee0e6bd44f3a9e9dea01054f175b50c3e6cdb692bc9424c0bbb8bf69", size = 64119, upload-time = "2025-10-08T09:15:13.589Z" }, ] -[[package]] -name = "narwhals" -version = "2.15.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/47/6d/b57c64e5038a8cf071bce391bb11551657a74558877ac961e7fa905ece27/narwhals-2.15.0.tar.gz", hash = "sha256:a9585975b99d95084268445a1fdd881311fa26ef1caa18020d959d5b2ff9a965", size = 603479, upload-time = "2026-01-06T08:10:13.27Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3d/2e/cf2ffeb386ac3763526151163ad7da9f1b586aac96d2b4f7de1eaebf0c61/narwhals-2.15.0-py3-none-any.whl", hash = "sha256:cbfe21ca19d260d9fd67f995ec75c44592d1f106933b03ddd375df7ac841f9d6", size = 432856, upload-time = "2026-01-06T08:10:11.511Z" }, -] - [[package]] name = "nbclient" version = "0.10.4" @@ -1451,15 +1322,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a0/c4/c2971a3ba4c6103a3d10c4b0f24f461ddc027f0f09763220cf35ca1401b3/nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c", size = 5195, upload-time = "2024-01-21T14:25:17.223Z" }, ] -[[package]] -name = "networkx" -version = "3.6.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6a/51/63fe664f3908c97be9d2e4f1158eb633317598cfa6e1fc14af5383f17512/networkx-3.6.1.tar.gz", hash = "sha256:26b7c357accc0c8cde558ad486283728b65b6a95d85ee1cd66bafab4c8168509", size = 2517025, upload-time = "2025-12-08T17:02:39.908Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9e/c9/b2622292ea83fbb4ec318f5b9ab867d0a28ab43c5717bb85b0a5f6b3b0a4/networkx-3.6.1-py3-none-any.whl", hash = "sha256:d47fbf302e7d9cbbb9e2555a0d267983d2aa476bac30e90dfbe5669bd57f3762", size = 2068504, upload-time = "2025-12-08T17:02:38.159Z" }, -] - [[package]] name = "nodeenv" version = "1.10.0" @@ -1534,141 +1396,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/5b/c7/b801bf98514b6ae6475e941ac05c58e6411dd863ea92916bfd6d510b08c1/numpy-2.4.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:4f1b68ff47680c2925f8063402a693ede215f0257f02596b1318ecdfb1d79e33", size = 12492579, upload-time = "2026-01-10T06:44:57.094Z" }, ] -[[package]] -name = "nvidia-cublas-cu12" -version = "12.8.4.1" -source = { registry = "https://pypi.org/simple" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/dc/61/e24b560ab2e2eaeb3c839129175fb330dfcfc29e5203196e5541a4c44682/nvidia_cublas_cu12-12.8.4.1-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:8ac4e771d5a348c551b2a426eda6193c19aa630236b418086020df5ba9667142", size = 594346921, upload-time = "2025-03-07T01:44:31.254Z" }, -] - -[[package]] -name = "nvidia-cuda-cupti-cu12" -version = "12.8.90" -source = { registry = "https://pypi.org/simple" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f8/02/2adcaa145158bf1a8295d83591d22e4103dbfd821bcaf6f3f53151ca4ffa/nvidia_cuda_cupti_cu12-12.8.90-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ea0cb07ebda26bb9b29ba82cda34849e73c166c18162d3913575b0c9db9a6182", size = 10248621, upload-time = "2025-03-07T01:40:21.213Z" }, -] - -[[package]] -name = "nvidia-cuda-nvrtc-cu12" -version = "12.8.93" -source = { registry = "https://pypi.org/simple" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/05/6b/32f747947df2da6994e999492ab306a903659555dddc0fbdeb9d71f75e52/nvidia_cuda_nvrtc_cu12-12.8.93-py3-none-manylinux2010_x86_64.manylinux_2_12_x86_64.whl", hash = "sha256:a7756528852ef889772a84c6cd89d41dfa74667e24cca16bb31f8f061e3e9994", size = 88040029, upload-time = "2025-03-07T01:42:13.562Z" }, -] - -[[package]] -name = "nvidia-cuda-runtime-cu12" -version = "12.8.90" -source = { registry = "https://pypi.org/simple" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0d/9b/a997b638fcd068ad6e4d53b8551a7d30fe8b404d6f1804abf1df69838932/nvidia_cuda_runtime_cu12-12.8.90-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:adade8dcbd0edf427b7204d480d6066d33902cab2a4707dcfc48a2d0fd44ab90", size = 954765, upload-time = "2025-03-07T01:40:01.615Z" }, -] - -[[package]] -name = "nvidia-cudnn-cu12" -version = "9.10.2.21" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "nvidia-cublas-cu12" }, -] -wheels = [ - { url = "https://files.pythonhosted.org/packages/ba/51/e123d997aa098c61d029f76663dedbfb9bc8dcf8c60cbd6adbe42f76d049/nvidia_cudnn_cu12-9.10.2.21-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:949452be657fa16687d0930933f032835951ef0892b37d2d53824d1a84dc97a8", size = 706758467, upload-time = "2025-06-06T21:54:08.597Z" }, -] - -[[package]] -name = "nvidia-cufft-cu12" -version = "11.3.3.83" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "nvidia-nvjitlink-cu12" }, -] -wheels = [ - { url = "https://files.pythonhosted.org/packages/1f/13/ee4e00f30e676b66ae65b4f08cb5bcbb8392c03f54f2d5413ea99a5d1c80/nvidia_cufft_cu12-11.3.3.83-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d2dd21ec0b88cf61b62e6b43564355e5222e4a3fb394cac0db101f2dd0d4f74", size = 193118695, upload-time = "2025-03-07T01:45:27.821Z" }, -] - -[[package]] -name = "nvidia-cufile-cu12" -version = "1.13.1.3" -source = { registry = "https://pypi.org/simple" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/bb/fe/1bcba1dfbfb8d01be8d93f07bfc502c93fa23afa6fd5ab3fc7c1df71038a/nvidia_cufile_cu12-1.13.1.3-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1d069003be650e131b21c932ec3d8969c1715379251f8d23a1860554b1cb24fc", size = 1197834, upload-time = "2025-03-07T01:45:50.723Z" }, -] - -[[package]] -name = "nvidia-curand-cu12" -version = "10.3.9.90" -source = { registry = "https://pypi.org/simple" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/fb/aa/6584b56dc84ebe9cf93226a5cde4d99080c8e90ab40f0c27bda7a0f29aa1/nvidia_curand_cu12-10.3.9.90-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:b32331d4f4df5d6eefa0554c565b626c7216f87a06a4f56fab27c3b68a830ec9", size = 63619976, upload-time = "2025-03-07T01:46:23.323Z" }, -] - -[[package]] -name = "nvidia-cusolver-cu12" -version = "11.7.3.90" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "nvidia-cublas-cu12" }, - { name = "nvidia-cusparse-cu12" }, - { name = "nvidia-nvjitlink-cu12" }, -] -wheels = [ - { url = "https://files.pythonhosted.org/packages/85/48/9a13d2975803e8cf2777d5ed57b87a0b6ca2cc795f9a4f59796a910bfb80/nvidia_cusolver_cu12-11.7.3.90-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:4376c11ad263152bd50ea295c05370360776f8c3427b30991df774f9fb26c450", size = 267506905, upload-time = "2025-03-07T01:47:16.273Z" }, -] - -[[package]] -name = "nvidia-cusparse-cu12" -version = "12.5.8.93" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "nvidia-nvjitlink-cu12" }, -] -wheels = [ - { url = "https://files.pythonhosted.org/packages/c2/f5/e1854cb2f2bcd4280c44736c93550cc300ff4b8c95ebe370d0aa7d2b473d/nvidia_cusparse_cu12-12.5.8.93-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1ec05d76bbbd8b61b06a80e1eaf8cf4959c3d4ce8e711b65ebd0443bb0ebb13b", size = 288216466, upload-time = "2025-03-07T01:48:13.779Z" }, -] - -[[package]] -name = "nvidia-cusparselt-cu12" -version = "0.7.1" -source = { registry = "https://pypi.org/simple" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/56/79/12978b96bd44274fe38b5dde5cfb660b1d114f70a65ef962bcbbed99b549/nvidia_cusparselt_cu12-0.7.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:f1bb701d6b930d5a7cea44c19ceb973311500847f81b634d802b7b539dc55623", size = 287193691, upload-time = "2025-02-26T00:15:44.104Z" }, -] - -[[package]] -name = "nvidia-nccl-cu12" -version = "2.27.5" -source = { registry = "https://pypi.org/simple" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/bb/1c/857979db0ef194ca5e21478a0612bcdbbe59458d7694361882279947b349/nvidia_nccl_cu12-2.27.5-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:31432ad4d1fb1004eb0c56203dc9bc2178a1ba69d1d9e02d64a6938ab5e40e7a", size = 322400625, upload-time = "2025-06-26T04:11:04.496Z" }, - { url = "https://files.pythonhosted.org/packages/6e/89/f7a07dc961b60645dbbf42e80f2bc85ade7feb9a491b11a1e973aa00071f/nvidia_nccl_cu12-2.27.5-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ad730cf15cb5d25fe849c6e6ca9eb5b76db16a80f13f425ac68d8e2e55624457", size = 322348229, upload-time = "2025-06-26T04:11:28.385Z" }, -] - -[[package]] -name = "nvidia-nvjitlink-cu12" -version = "12.8.93" -source = { registry = "https://pypi.org/simple" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f6/74/86a07f1d0f42998ca31312f998bd3b9a7eff7f52378f4f270c8679c77fb9/nvidia_nvjitlink_cu12-12.8.93-py3-none-manylinux2010_x86_64.manylinux_2_12_x86_64.whl", hash = "sha256:81ff63371a7ebd6e6451970684f916be2eab07321b73c9d244dc2b4da7f73b88", size = 39254836, upload-time = "2025-03-07T01:49:55.661Z" }, -] - -[[package]] -name = "nvidia-nvshmem-cu12" -version = "3.3.20" -source = { registry = "https://pypi.org/simple" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3b/6c/99acb2f9eb85c29fc6f3a7ac4dccfd992e22666dd08a642b303311326a97/nvidia_nvshmem_cu12-3.3.20-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d00f26d3f9b2e3c3065be895e3059d6479ea5c638a3f38c9fec49b1b9dd7c1e5", size = 124657145, upload-time = "2025-08-04T20:25:19.995Z" }, -] - -[[package]] -name = "nvidia-nvtx-cu12" -version = "12.8.90" -source = { registry = "https://pypi.org/simple" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a2/eb/86626c1bbc2edb86323022371c39aa48df6fd8b0a1647bc274577f72e90b/nvidia_nvtx_cu12-12.8.90-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5b17e2001cc0d751a5bc2c6ec6d26ad95913324a4adb86788c944f8ce9ba441f", size = 89954, upload-time = "2025-03-07T01:42:44.131Z" }, -] - [[package]] name = "onnx" version = "1.20.1" @@ -2026,28 +1753,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/8e/37/efad0257dc6e593a18957422533ff0f87ede7c9c6ea010a2177d738fb82f/pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0", size = 11842, upload-time = "2024-07-21T12:58:20.04Z" }, ] -[[package]] -name = "pyarrow" -version = "22.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/30/53/04a7fdc63e6056116c9ddc8b43bc28c12cdd181b85cbeadb79278475f3ae/pyarrow-22.0.0.tar.gz", hash = "sha256:3d600dc583260d845c7d8a6db540339dd883081925da2bd1c5cb808f720b3cd9", size = 1151151, upload-time = "2025-10-24T12:30:00.762Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2e/b7/18f611a8cdc43417f9394a3ccd3eace2f32183c08b9eddc3d17681819f37/pyarrow-22.0.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:3e294c5eadfb93d78b0763e859a0c16d4051fc1c5231ae8956d61cb0b5666f5a", size = 34272022, upload-time = "2025-10-24T10:04:28.973Z" }, - { url = "https://files.pythonhosted.org/packages/26/5c/f259e2526c67eb4b9e511741b19870a02363a47a35edbebc55c3178db22d/pyarrow-22.0.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:69763ab2445f632d90b504a815a2a033f74332997052b721002298ed6de40f2e", size = 35995834, upload-time = "2025-10-24T10:04:35.467Z" }, - { url = "https://files.pythonhosted.org/packages/50/8d/281f0f9b9376d4b7f146913b26fac0aa2829cd1ee7e997f53a27411bbb92/pyarrow-22.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:b41f37cabfe2463232684de44bad753d6be08a7a072f6a83447eeaf0e4d2a215", size = 45030348, upload-time = "2025-10-24T10:04:43.366Z" }, - { url = "https://files.pythonhosted.org/packages/f5/e5/53c0a1c428f0976bf22f513d79c73000926cb00b9c138d8e02daf2102e18/pyarrow-22.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:35ad0f0378c9359b3f297299c3309778bb03b8612f987399a0333a560b43862d", size = 47699480, upload-time = "2025-10-24T10:04:51.486Z" }, - { url = "https://files.pythonhosted.org/packages/95/e1/9dbe4c465c3365959d183e6345d0a8d1dc5b02ca3f8db4760b3bc834cf25/pyarrow-22.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8382ad21458075c2e66a82a29d650f963ce51c7708c7c0ff313a8c206c4fd5e8", size = 48011148, upload-time = "2025-10-24T10:04:59.585Z" }, - { url = "https://files.pythonhosted.org/packages/c5/b4/7caf5d21930061444c3cf4fa7535c82faf5263e22ce43af7c2759ceb5b8b/pyarrow-22.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1a812a5b727bc09c3d7ea072c4eebf657c2f7066155506ba31ebf4792f88f016", size = 50276964, upload-time = "2025-10-24T10:05:08.175Z" }, - { url = "https://files.pythonhosted.org/packages/ae/f3/cec89bd99fa3abf826f14d4e53d3d11340ce6f6af4d14bdcd54cd83b6576/pyarrow-22.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:ec5d40dd494882704fb876c16fa7261a69791e784ae34e6b5992e977bd2e238c", size = 28106517, upload-time = "2025-10-24T10:05:14.314Z" }, - { url = "https://files.pythonhosted.org/packages/af/63/ba23862d69652f85b615ca14ad14f3bcfc5bf1b99ef3f0cd04ff93fdad5a/pyarrow-22.0.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:bea79263d55c24a32b0d79c00a1c58bb2ee5f0757ed95656b01c0fb310c5af3d", size = 34211578, upload-time = "2025-10-24T10:05:21.583Z" }, - { url = "https://files.pythonhosted.org/packages/b1/d0/f9ad86fe809efd2bcc8be32032fa72e8b0d112b01ae56a053006376c5930/pyarrow-22.0.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:12fe549c9b10ac98c91cf791d2945e878875d95508e1a5d14091a7aaa66d9cf8", size = 35989906, upload-time = "2025-10-24T10:05:29.485Z" }, - { url = "https://files.pythonhosted.org/packages/b4/a8/f910afcb14630e64d673f15904ec27dd31f1e009b77033c365c84e8c1e1d/pyarrow-22.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:334f900ff08ce0423407af97e6c26ad5d4e3b0763645559ece6fbf3747d6a8f5", size = 45021677, upload-time = "2025-10-24T10:05:38.274Z" }, - { url = "https://files.pythonhosted.org/packages/13/95/aec81f781c75cd10554dc17a25849c720d54feafb6f7847690478dcf5ef8/pyarrow-22.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:c6c791b09c57ed76a18b03f2631753a4960eefbbca80f846da8baefc6491fcfe", size = 47726315, upload-time = "2025-10-24T10:05:47.314Z" }, - { url = "https://files.pythonhosted.org/packages/bb/d4/74ac9f7a54cfde12ee42734ea25d5a3c9a45db78f9def949307a92720d37/pyarrow-22.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c3200cb41cdbc65156e5f8c908d739b0dfed57e890329413da2748d1a2cd1a4e", size = 47990906, upload-time = "2025-10-24T10:05:58.254Z" }, - { url = "https://files.pythonhosted.org/packages/2e/71/fedf2499bf7a95062eafc989ace56572f3343432570e1c54e6599d5b88da/pyarrow-22.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ac93252226cf288753d8b46280f4edf3433bf9508b6977f8dd8526b521a1bbb9", size = 50306783, upload-time = "2025-10-24T10:06:08.08Z" }, - { url = "https://files.pythonhosted.org/packages/68/ed/b202abd5a5b78f519722f3d29063dda03c114711093c1995a33b8e2e0f4b/pyarrow-22.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:44729980b6c50a5f2bfcc2668d36c569ce17f8b17bccaf470c4313dcbbf13c9d", size = 27972883, upload-time = "2025-10-24T10:06:14.204Z" }, -] - [[package]] name = "pycparser" version = "2.23" @@ -2127,19 +1832,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/36/c7/cfc8e811f061c841d7990b0201912c3556bfeb99cdcb7ed24adc8d6f8704/pydantic_core-2.41.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51", size = 2145302, upload-time = "2025-11-04T13:43:46.64Z" }, ] -[[package]] -name = "pydeck" -version = "0.9.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "jinja2" }, - { name = "numpy" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/a1/ca/40e14e196864a0f61a92abb14d09b3d3da98f94ccb03b49cf51688140dab/pydeck-0.9.1.tar.gz", hash = "sha256:f74475ae637951d63f2ee58326757f8d4f9cd9f2a457cf42950715003e2cb605", size = 3832240, upload-time = "2024-05-10T15:36:21.153Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ab/4c/b888e6cf58bd9db9c93f40d1c6be8283ff49d88919231afe93a6bcf61626/pydeck-0.9.1-py2.py3-none-any.whl", hash = "sha256:b3f75ba0d273fc917094fa61224f3f6076ca8752b93d46faf3bcfd9f9d59b038", size = 6900403, upload-time = "2024-05-10T15:36:17.36Z" }, -] - [[package]] name = "pygments" version = "2.19.2" @@ -2490,32 +2182,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/fc/51/727abb13f44c1fcf6d145979e1535a35794db0f6e450a0cb46aa24732fe2/s3transfer-0.16.0-py3-none-any.whl", hash = "sha256:18e25d66fed509e3868dc1572b3f427ff947dd2c56f844a5bf09481ad3f3b2fe", size = 86830, upload-time = "2025-12-01T02:30:57.729Z" }, ] -[[package]] -name = "scikit-learn" -version = "1.8.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "joblib" }, - { name = "numpy" }, - { name = "scipy" }, - { name = "threadpoolctl" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/0e/d4/40988bf3b8e34feec1d0e6a051446b1f66225f8529b9309becaeef62b6c4/scikit_learn-1.8.0.tar.gz", hash = "sha256:9bccbb3b40e3de10351f8f5068e105d0f4083b1a65fa07b6634fbc401a6287fd", size = 7335585, upload-time = "2025-12-10T07:08:53.618Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c9/92/53ea2181da8ac6bf27170191028aee7251f8f841f8d3edbfdcaf2008fde9/scikit_learn-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:146b4d36f800c013d267b29168813f7a03a43ecd2895d04861f1240b564421da", size = 8595835, upload-time = "2025-12-10T07:07:39.385Z" }, - { url = "https://files.pythonhosted.org/packages/01/18/d154dc1638803adf987910cdd07097d9c526663a55666a97c124d09fb96a/scikit_learn-1.8.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:f984ca4b14914e6b4094c5d52a32ea16b49832c03bd17a110f004db3c223e8e1", size = 8080381, upload-time = "2025-12-10T07:07:41.93Z" }, - { url = "https://files.pythonhosted.org/packages/8a/44/226142fcb7b7101e64fdee5f49dbe6288d4c7af8abf593237b70fca080a4/scikit_learn-1.8.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5e30adb87f0cc81c7690a84f7932dd66be5bac57cfe16b91cb9151683a4a2d3b", size = 8799632, upload-time = "2025-12-10T07:07:43.899Z" }, - { url = "https://files.pythonhosted.org/packages/36/4d/4a67f30778a45d542bbea5db2dbfa1e9e100bf9ba64aefe34215ba9f11f6/scikit_learn-1.8.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ada8121bcb4dac28d930febc791a69f7cb1673c8495e5eee274190b73a4559c1", size = 9103788, upload-time = "2025-12-10T07:07:45.982Z" }, - { url = "https://files.pythonhosted.org/packages/89/3c/45c352094cfa60050bcbb967b1faf246b22e93cb459f2f907b600f2ceda5/scikit_learn-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:c57b1b610bd1f40ba43970e11ce62821c2e6569e4d74023db19c6b26f246cb3b", size = 8081706, upload-time = "2025-12-10T07:07:48.111Z" }, - { url = "https://files.pythonhosted.org/packages/3d/46/5416595bb395757f754feb20c3d776553a386b661658fb21b7c814e89efe/scikit_learn-1.8.0-cp311-cp311-win_arm64.whl", hash = "sha256:2838551e011a64e3053ad7618dda9310175f7515f1742fa2d756f7c874c05961", size = 7688451, upload-time = "2025-12-10T07:07:49.873Z" }, - { url = "https://files.pythonhosted.org/packages/90/74/e6a7cc4b820e95cc38cf36cd74d5aa2b42e8ffc2d21fe5a9a9c45c1c7630/scikit_learn-1.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5fb63362b5a7ddab88e52b6dbb47dac3fd7dafeee740dc6c8d8a446ddedade8e", size = 8548242, upload-time = "2025-12-10T07:07:51.568Z" }, - { url = "https://files.pythonhosted.org/packages/49/d8/9be608c6024d021041c7f0b3928d4749a706f4e2c3832bbede4fb4f58c95/scikit_learn-1.8.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:5025ce924beccb28298246e589c691fe1b8c1c96507e6d27d12c5fadd85bfd76", size = 8079075, upload-time = "2025-12-10T07:07:53.697Z" }, - { url = "https://files.pythonhosted.org/packages/dd/47/f187b4636ff80cc63f21cd40b7b2d177134acaa10f6bb73746130ee8c2e5/scikit_learn-1.8.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4496bb2cf7a43ce1a2d7524a79e40bc5da45cf598dbf9545b7e8316ccba47bb4", size = 8660492, upload-time = "2025-12-10T07:07:55.574Z" }, - { url = "https://files.pythonhosted.org/packages/97/74/b7a304feb2b49df9fafa9382d4d09061a96ee9a9449a7cbea7988dda0828/scikit_learn-1.8.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0bcfe4d0d14aec44921545fd2af2338c7471de9cb701f1da4c9d85906ab847a", size = 8931904, upload-time = "2025-12-10T07:07:57.666Z" }, - { url = "https://files.pythonhosted.org/packages/9f/c4/0ab22726a04ede56f689476b760f98f8f46607caecff993017ac1b64aa5d/scikit_learn-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:35c007dedb2ffe38fe3ee7d201ebac4a2deccd2408e8621d53067733e3c74809", size = 8019359, upload-time = "2025-12-10T07:07:59.838Z" }, - { url = "https://files.pythonhosted.org/packages/24/90/344a67811cfd561d7335c1b96ca21455e7e472d281c3c279c4d3f2300236/scikit_learn-1.8.0-cp312-cp312-win_arm64.whl", hash = "sha256:8c497fff237d7b4e07e9ef1a640887fa4fb765647f86fbe00f969ff6280ce2bb", size = 7641898, upload-time = "2025-12-10T07:08:01.36Z" }, -] - [[package]] name = "scipy" version = "1.17.0" @@ -2588,15 +2254,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, ] -[[package]] -name = "smmap" -version = "5.0.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/44/cd/a040c4b3119bbe532e5b0732286f805445375489fceaec1f48306068ee3b/smmap-5.0.2.tar.gz", hash = "sha256:26ea65a03958fa0c8a1c7e8c7a58fdc77221b8910f6be2131affade476898ad5", size = 22329, upload-time = "2025-01-02T07:14:40.909Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/04/be/d09147ad1ec7934636ad912901c5fd7667e1c858e19d355237db0d0cd5e4/smmap-5.0.2-py3-none-any.whl", hash = "sha256:b30115f0def7d7531d22a0fb6502488d879e75b260a9db4d0819cfb25403af5e", size = 24303, upload-time = "2025-01-02T07:14:38.724Z" }, -] - [[package]] name = "soupsieve" version = "2.8.2" @@ -2693,35 +2350,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/60/15/3daba2df40be8b8a9a027d7f54c8dedf24f0d81b96e54b52293f5f7e3418/statsmodels-0.14.6-cp312-cp312-win_amd64.whl", hash = "sha256:b5eb07acd115aa6208b4058211138393a7e6c2cf12b6f213ede10f658f6a714f", size = 9543991, upload-time = "2025-12-05T23:10:58.536Z" }, ] -[[package]] -name = "streamlit" -version = "1.53.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "altair" }, - { name = "blinker" }, - { name = "cachetools" }, - { name = "click" }, - { name = "gitpython" }, - { name = "numpy" }, - { name = "packaging" }, - { name = "pandas" }, - { name = "pillow" }, - { name = "protobuf" }, - { name = "pyarrow" }, - { name = "pydeck" }, - { name = "requests" }, - { name = "tenacity" }, - { name = "toml" }, - { name = "tornado" }, - { name = "typing-extensions" }, - { name = "watchdog", marker = "sys_platform != 'darwin'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/2a/b1/5e5fd38d4a5f97163ff071d76e8d6b3aa43e03f86bf94fd0265c43e43fa3/streamlit-1.53.0.tar.gz", hash = "sha256:0114116d34589f2e652bf4ac735a3aca69807e659f92f99c98e7b620d000838f", size = 8650270, upload-time = "2026-01-14T19:52:24.94Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a8/54/47ed40f34403205b2c9aab04472e864d1b496b4381b9bf408cf2c20e144c/streamlit-1.53.0-py3-none-any.whl", hash = "sha256:e8b65210bd1a785d121340b794a47c7c912d8da401af9e4403e16c84e3bc4410", size = 9110100, upload-time = "2026-01-14T19:52:22.589Z" }, -] - [[package]] name = "sympy" version = "1.14.0" @@ -2734,15 +2362,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a2/09/77d55d46fd61b4a135c444fc97158ef34a095e5681d0a6c10b75bf356191/sympy-1.14.0-py3-none-any.whl", hash = "sha256:e091cc3e99d2141a0ba2847328f5479b05d94a6635cb96148ccb3f34671bd8f5", size = 6299353, upload-time = "2025-04-27T18:04:59.103Z" }, ] -[[package]] -name = "tenacity" -version = "9.1.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0a/d4/2b0cd0fe285e14b36db076e78c93766ff1d529d70408bd1d2a5a84f1d929/tenacity-9.1.2.tar.gz", hash = "sha256:1169d376c297e7de388d18b4481760d478b0e99a777cad3a9c86e556f4b697cb", size = 48036, upload-time = "2025-04-02T08:25:09.966Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/30/643397144bfbfec6f6ef821f36f33e57d35946c44a2352d3c9f0ae847619/tenacity-9.1.2-py3-none-any.whl", hash = "sha256:f77bf36710d8b73a50b2dd155c97b870017ad21afe6ab300326b0371b3b05138", size = 28248, upload-time = "2025-04-02T08:25:07.678Z" }, -] - [[package]] name = "terminado" version = "0.18.1" @@ -2757,69 +2376,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6a/9e/2064975477fdc887e47ad42157e214526dcad8f317a948dee17e1659a62f/terminado-0.18.1-py3-none-any.whl", hash = "sha256:a4468e1b37bb318f8a86514f65814e1afc977cf29b3992a4500d9dd305dcceb0", size = 14154, upload-time = "2024-03-12T14:34:36.569Z" }, ] -[[package]] -name = "threadpoolctl" -version = "3.6.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b7/4d/08c89e34946fce2aec4fbb45c9016efd5f4d7f24af8e5d93296e935631d8/threadpoolctl-3.6.0.tar.gz", hash = "sha256:8ab8b4aa3491d812b623328249fab5302a68d2d71745c8a4c719a2fcaba9f44e", size = 21274, upload-time = "2025-03-13T13:49:23.031Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/32/d5/f9a850d79b0851d1d4ef6456097579a9005b31fea68726a4ae5f2d82ddd9/threadpoolctl-3.6.0-py3-none-any.whl", hash = "sha256:43a0b8fd5a2928500110039e43a5eed8480b918967083ea48dc3ab9f13c4a7fb", size = 18638, upload-time = "2025-03-13T13:49:21.846Z" }, -] - -[[package]] -name = "timsseek-rescore" -version = "0.26.0" -source = { editable = "python/timsseek_rescore" } -dependencies = [ - { name = "matplotlib" }, - { name = "mokapot" }, - { name = "numpy" }, - { name = "polars" }, - { name = "rich" }, - { name = "torch" }, - { name = "tqdm" }, - { name = "uniplot" }, - { name = "xgboost" }, -] - -[package.metadata] -requires-dist = [ - { name = "matplotlib" }, - { name = "mokapot", git = "https://github.com/jspaezp/mokapot.git?branch=feat%2Fre_add_confidence_api" }, - { name = "numpy" }, - { name = "polars" }, - { name = "rich" }, - { name = "torch" }, - { name = "tqdm" }, - { name = "uniplot" }, - { name = "xgboost" }, -] - -[[package]] -name = "timsseek-rts-receiver" -version = "0.26.0" -source = { editable = "python/timsseek_rts_receiver" } -dependencies = [ - { name = "matplotlib" }, - { name = "numpy" }, - { name = "pandas" }, - { name = "pydantic" }, - { name = "requests" }, - { name = "speclib-builder", extra = ["ml"] }, - { name = "streamlit" }, -] - -[package.metadata] -requires-dist = [ - { name = "matplotlib" }, - { name = "numpy" }, - { name = "pandas" }, - { name = "pydantic", specifier = ">=2.0.0" }, - { name = "requests" }, - { name = "speclib-builder", extras = ["ml"], editable = "python/speclib_builder" }, - { name = "streamlit" }, -] - [[package]] name = "timsseek-workspace" version = "0.26.0" @@ -2827,8 +2383,6 @@ source = { virtual = "." } dependencies = [ { name = "jupyter" }, { name = "speclib-builder", extra = ["ml"] }, - { name = "timsseek-rescore" }, - { name = "timsseek-rts-receiver" }, ] [package.dev-dependencies] @@ -2853,8 +2407,6 @@ interactive = [ requires-dist = [ { name = "jupyter", extras = ["python"], specifier = ">=1.1.1" }, { name = "speclib-builder", extras = ["ml"], editable = "python/speclib_builder" }, - { name = "timsseek-rescore", editable = "python/timsseek_rescore" }, - { name = "timsseek-rts-receiver", editable = "python/timsseek_rts_receiver" }, ] [package.metadata.requires-dev] @@ -2895,46 +2447,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/44/6f/7120676b6d73228c96e17f1f794d8ab046fc910d781c8d151120c3f1569e/toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", size = 16588, upload-time = "2020-11-01T01:40:20.672Z" }, ] -[[package]] -name = "torch" -version = "2.9.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "filelock" }, - { name = "fsspec" }, - { name = "jinja2" }, - { name = "networkx" }, - { name = "nvidia-cublas-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "nvidia-cuda-cupti-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "nvidia-cuda-nvrtc-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "nvidia-cuda-runtime-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "nvidia-cudnn-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "nvidia-cufft-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "nvidia-cufile-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "nvidia-curand-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "nvidia-cusolver-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "nvidia-cusparse-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "nvidia-cusparselt-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "nvidia-nccl-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "nvidia-nvjitlink-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "nvidia-nvshmem-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "nvidia-nvtx-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "setuptools", marker = "python_full_version >= '3.12'" }, - { name = "sympy" }, - { name = "triton", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "typing-extensions" }, -] -wheels = [ - { url = "https://files.pythonhosted.org/packages/15/db/c064112ac0089af3d2f7a2b5bfbabf4aa407a78b74f87889e524b91c5402/torch-2.9.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:62b3fd888277946918cba4478cf849303da5359f0fb4e3bfb86b0533ba2eaf8d", size = 104220430, upload-time = "2025-11-12T15:20:31.705Z" }, - { url = "https://files.pythonhosted.org/packages/56/be/76eaa36c9cd032d3b01b001e2c5a05943df75f26211f68fae79e62f87734/torch-2.9.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d033ff0ac3f5400df862a51bdde9bad83561f3739ea0046e68f5401ebfa67c1b", size = 899821446, upload-time = "2025-11-12T15:20:15.544Z" }, - { url = "https://files.pythonhosted.org/packages/47/cc/7a2949e38dfe3244c4df21f0e1c27bce8aedd6c604a587dd44fc21017cb4/torch-2.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:0d06b30a9207b7c3516a9e0102114024755a07045f0c1d2f2a56b1819ac06bcb", size = 110973074, upload-time = "2025-11-12T15:21:39.958Z" }, - { url = "https://files.pythonhosted.org/packages/1e/ce/7d251155a783fb2c1bb6837b2b7023c622a2070a0a72726ca1df47e7ea34/torch-2.9.1-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:52347912d868653e1528b47cafaf79b285b98be3f4f35d5955389b1b95224475", size = 74463887, upload-time = "2025-11-12T15:20:36.611Z" }, - { url = "https://files.pythonhosted.org/packages/0f/27/07c645c7673e73e53ded71705045d6cb5bae94c4b021b03aa8d03eee90ab/torch-2.9.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:da5f6f4d7f4940a173e5572791af238cb0b9e21b1aab592bd8b26da4c99f1cd6", size = 104126592, upload-time = "2025-11-12T15:20:41.62Z" }, - { url = "https://files.pythonhosted.org/packages/19/17/e377a460603132b00760511299fceba4102bd95db1a0ee788da21298ccff/torch-2.9.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:27331cd902fb4322252657f3902adf1c4f6acad9dcad81d8df3ae14c7c4f07c4", size = 899742281, upload-time = "2025-11-12T15:22:17.602Z" }, - { url = "https://files.pythonhosted.org/packages/b1/1a/64f5769025db846a82567fa5b7d21dba4558a7234ee631712ee4771c436c/torch-2.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:81a285002d7b8cfd3fdf1b98aa8df138d41f1a8334fd9ea37511517cedf43083", size = 110940568, upload-time = "2025-11-12T15:21:18.689Z" }, - { url = "https://files.pythonhosted.org/packages/6e/ab/07739fd776618e5882661d04c43f5b5586323e2f6a2d7d84aac20d8f20bd/torch-2.9.1-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:c0d25d1d8e531b8343bea0ed811d5d528958f1dcbd37e7245bc686273177ad7e", size = 74479191, upload-time = "2025-11-12T15:21:25.816Z" }, -] - [[package]] name = "tornado" version = "6.5.4" @@ -2975,44 +2487,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/00/c0/8f5d070730d7836adc9c9b6408dec68c6ced86b304a9b26a14df072a6e8c/traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f", size = 85359, upload-time = "2024-04-19T11:11:46.763Z" }, ] -[[package]] -name = "triqler" -version = "0.9.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "job-pool" }, - { name = "numpy" }, - { name = "pandas" }, - { name = "pyarrow" }, - { name = "scipy" }, - { name = "threadpoolctl" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/53/98/186d0f0380f6a5b9b4b51c4ae75c920d7aaf41d833f416bbf15714fb76c6/triqler-0.9.1.tar.gz", hash = "sha256:0fb6752f84471af275d9c6d24393c1a87e548089e77fc7ae97736e50772fa0e2", size = 58182, upload-time = "2025-10-02T13:30:38.931Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9d/a4/e427c9e1717c53befb5d44edc906ecec47b93c501d9ac8e5bb5c6899bc1f/triqler-0.9.1-py3-none-any.whl", hash = "sha256:5abebb2899795cd75a2c879d1acf7a9b41e5e039b32ae09f9fd4995d20f78f95", size = 64899, upload-time = "2025-10-02T13:30:38.047Z" }, -] - -[[package]] -name = "triton" -version = "3.5.1" -source = { registry = "https://pypi.org/simple" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b0/72/ec90c3519eaf168f22cb1757ad412f3a2add4782ad3a92861c9ad135d886/triton-3.5.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:61413522a48add32302353fdbaaf92daaaab06f6b5e3229940d21b5207f47579", size = 170425802, upload-time = "2025-11-11T17:40:53.209Z" }, - { url = "https://files.pythonhosted.org/packages/f2/50/9a8358d3ef58162c0a415d173cfb45b67de60176e1024f71fbc4d24c0b6d/triton-3.5.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d2c6b915a03888ab931a9fd3e55ba36785e1fe70cbea0b40c6ef93b20fc85232", size = 170470207, upload-time = "2025-11-11T17:41:00.253Z" }, -] - -[[package]] -name = "typeguard" -version = "4.4.4" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/c7/68/71c1a15b5f65f40e91b65da23b8224dad41349894535a97f63a52e462196/typeguard-4.4.4.tar.gz", hash = "sha256:3a7fd2dffb705d4d0efaed4306a704c89b9dee850b688f060a8b1615a79e5f74", size = 75203, upload-time = "2025-06-18T09:56:07.624Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/1b/a9/e3aee762739c1d7528da1c3e06d518503f8b6c439c35549b53735ba52ead/typeguard-4.4.4-py3-none-any.whl", hash = "sha256:b5f562281b6bfa1f5492470464730ef001646128b180769880468bd84b68b09e", size = 34874, upload-time = "2025-06-18T09:56:05.999Z" }, -] - [[package]] name = "typing-extensions" version = "4.15.0" @@ -3146,24 +2620,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/48/ee/b1dcfa25f18e6964cf73906c5c39a15654d0a702670ff89e9ed3ebab3e05/vizta-1.1.2-py3-none-any.whl", hash = "sha256:39d66bc7c30256d47a5cd2ca0a0924bd8dc65b5f63ea686da8216b606b95ee3c", size = 8864, upload-time = "2025-09-01T21:08:57.577Z" }, ] -[[package]] -name = "watchdog" -version = "6.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/db/7d/7f3d619e951c88ed75c6037b246ddcf2d322812ee8ea189be89511721d54/watchdog-6.0.0.tar.gz", hash = "sha256:9ddf7c82fda3ae8e24decda1338ede66e1c99883db93711d8fb941eaa2d8c282", size = 131220, upload-time = "2024-11-01T14:07:13.037Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a9/c7/ca4bf3e518cb57a686b2feb4f55a1892fd9a3dd13f470fca14e00f80ea36/watchdog-6.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7607498efa04a3542ae3e05e64da8202e58159aa1fa4acddf7678d34a35d4f13", size = 79079, upload-time = "2024-11-01T14:06:59.472Z" }, - { url = "https://files.pythonhosted.org/packages/5c/51/d46dc9332f9a647593c947b4b88e2381c8dfc0942d15b8edc0310fa4abb1/watchdog-6.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:9041567ee8953024c83343288ccc458fd0a2d811d6a0fd68c4c22609e3490379", size = 79078, upload-time = "2024-11-01T14:07:01.431Z" }, - { url = "https://files.pythonhosted.org/packages/d4/57/04edbf5e169cd318d5f07b4766fee38e825d64b6913ca157ca32d1a42267/watchdog-6.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:82dc3e3143c7e38ec49d61af98d6558288c415eac98486a5c581726e0737c00e", size = 79076, upload-time = "2024-11-01T14:07:02.568Z" }, - { url = "https://files.pythonhosted.org/packages/ab/cc/da8422b300e13cb187d2203f20b9253e91058aaf7db65b74142013478e66/watchdog-6.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:212ac9b8bf1161dc91bd09c048048a95ca3a4c4f5e5d4a7d1b1a7d5752a7f96f", size = 79077, upload-time = "2024-11-01T14:07:03.893Z" }, - { url = "https://files.pythonhosted.org/packages/2c/3b/b8964e04ae1a025c44ba8e4291f86e97fac443bca31de8bd98d3263d2fcf/watchdog-6.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:e3df4cbb9a450c6d49318f6d14f4bbc80d763fa587ba46ec86f99f9e6876bb26", size = 79078, upload-time = "2024-11-01T14:07:05.189Z" }, - { url = "https://files.pythonhosted.org/packages/62/ae/a696eb424bedff7407801c257d4b1afda455fe40821a2be430e173660e81/watchdog-6.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:2cce7cfc2008eb51feb6aab51251fd79b85d9894e98ba847408f662b3395ca3c", size = 79077, upload-time = "2024-11-01T14:07:06.376Z" }, - { url = "https://files.pythonhosted.org/packages/b5/e8/dbf020b4d98251a9860752a094d09a65e1b436ad181faf929983f697048f/watchdog-6.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:20ffe5b202af80ab4266dcd3e91aae72bf2da48c0d33bdb15c66658e685e94e2", size = 79078, upload-time = "2024-11-01T14:07:07.547Z" }, - { url = "https://files.pythonhosted.org/packages/07/f6/d0e5b343768e8bcb4cda79f0f2f55051bf26177ecd5651f84c07567461cf/watchdog-6.0.0-py3-none-win32.whl", hash = "sha256:07df1fdd701c5d4c8e55ef6cf55b8f0120fe1aef7ef39a1c6fc6bc2e606d517a", size = 79065, upload-time = "2024-11-01T14:07:09.525Z" }, - { url = "https://files.pythonhosted.org/packages/db/d9/c495884c6e548fce18a8f40568ff120bc3a4b7b99813081c8ac0c936fa64/watchdog-6.0.0-py3-none-win_amd64.whl", hash = "sha256:cbafb470cf848d93b5d013e2ecb245d4aa1c8fd0504e863ccefa32445359d680", size = 79070, upload-time = "2024-11-01T14:07:10.686Z" }, - { url = "https://files.pythonhosted.org/packages/33/e8/e40370e6d74ddba47f002a32919d91310d6074130fe4e17dabcafc15cbf1/watchdog-6.0.0-py3-none-win_ia64.whl", hash = "sha256:a1914259fa9e1454315171103c6a30961236f508b9b623eae470268bbcc6a22f", size = 79067, upload-time = "2024-11-01T14:07:11.845Z" }, -] - [[package]] name = "wcwidth" version = "0.2.14" @@ -3218,33 +2674,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e1/07/c6fe3ad3e685340704d314d765b7912993bcb8dc198f0e7a89382d37974b/win32_setctime-1.2.0-py3-none-any.whl", hash = "sha256:95d644c4e708aba81dc3704a116d8cbc974d70b3bdb8be1d150e36be6e9d1390", size = 4083, upload-time = "2024-12-07T15:28:26.465Z" }, ] -[[package]] -name = "xgboost" -version = "3.1.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "numpy" }, - { name = "nvidia-nccl-cu12", marker = "sys_platform == 'linux'" }, - { name = "scipy" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/42/db/ff3eb8ff8cdf87a57cbb0f484234b4353178587236c4c84c1d307165c1f8/xgboost-3.1.3.tar.gz", hash = "sha256:0aeaa59d7ba09221a6fa75f70406751cfafdf3f149d0a91b197a1360404a28f3", size = 1237662, upload-time = "2026-01-10T00:20:13.458Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/1b/a9/8668a5662c497c32ab127b7ca57d91153f499b31c725969a1e4147782e64/xgboost-3.1.3-py3-none-macosx_10_15_x86_64.whl", hash = "sha256:e16a6c352ee1a4c19372a7b2bb75129e10e63adeeabd3d11f21b7787378e5a50", size = 2378032, upload-time = "2026-01-10T00:18:14.103Z" }, - { url = "https://files.pythonhosted.org/packages/52/39/ec5c53228b091387e934d3d419e8e3a5ce98c1650d458987d6e254a15304/xgboost-3.1.3-py3-none-macosx_12_0_arm64.whl", hash = "sha256:a7a1d59f3529de0ad9089c59b6cc595cd7b4424feabcc06463c4bde41f202f74", size = 2211477, upload-time = "2026-01-10T00:18:34.409Z" }, - { url = "https://files.pythonhosted.org/packages/99/f7/ceb06e6b959e5a8b303883482ecad346495641947679e3f735ae8ac1caa7/xgboost-3.1.3-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:2e31482633883b2e95fda6055db654bbfac82e10d91ad3d9929086ebd28eb1c4", size = 115346575, upload-time = "2026-01-10T00:19:11.44Z" }, - { url = "https://files.pythonhosted.org/packages/6c/9c/9d4ad7f586698bad52a570d2bf81138e500a5d9f32723c2b4ed1dd9252d8/xgboost-3.1.3-py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:687504d1d76dc797df08b0dbe8b83d58629cdc06df52378f617164d16142bf2c", size = 115926894, upload-time = "2026-01-10T00:19:49.123Z" }, - { url = "https://files.pythonhosted.org/packages/3a/d8/4d4ae25452577f2dfabc66b60e712e7c01f9fe6c389fa88c546c2f427c4d/xgboost-3.1.3-py3-none-win_amd64.whl", hash = "sha256:3fe349b4c6030f0d66e166a3a6b7d470e776d530ea240d77335e36144cbe132a", size = 72011993, upload-time = "2026-01-10T00:17:42.98Z" }, -] - -[[package]] -name = "zipp" -version = "3.23.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" }, -] - [[package]] name = "zstandard" version = "0.25.0" From a822c50fd4c5fa7fa60d25e8aaaa1585d0da8ffb Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Sat, 11 Apr 2026 14:08:55 -0700 Subject: [PATCH 56/64] chore: update uv lock --- uv.lock | 1060 +++++++++++++++++++++++++++++-------------------------- 1 file changed, 550 insertions(+), 510 deletions(-) diff --git a/uv.lock b/uv.lock index 9c061d2..3b3c50b 100644 --- a/uv.lock +++ b/uv.lock @@ -2,8 +2,18 @@ version = 1 revision = 3 requires-python = ">=3.11, <3.13" resolution-markers = [ - "python_full_version >= '3.12'", - "python_full_version < '3.12'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32'", + "python_full_version < '3.12' and platform_machine != 's390x' and sys_platform == 'win32'", + "python_full_version < '3.12' and platform_machine == 's390x' and sys_platform == 'win32'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten'", + "python_full_version < '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten'", + "python_full_version < '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'emscripten' and sys_platform != 'win32'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'emscripten' and sys_platform != 'win32'", + "python_full_version < '3.12' and platform_machine != 's390x' and sys_platform != 'emscripten' and sys_platform != 'win32'", + "python_full_version < '3.12' and platform_machine == 's390x' and sys_platform != 'emscripten' and sys_platform != 'win32'", ] [manifest] @@ -23,15 +33,15 @@ wheels = [ [[package]] name = "anyio" -version = "4.12.1" +version = "4.13.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "idna" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/96/f0/5eb65b2bb0d09ac6776f2eb54adee6abe8228ea05b20a5ad0e4945de8aac/anyio-4.12.1.tar.gz", hash = "sha256:41cfcc3a4c85d3f05c932da7c26d0201ac36f72abd4435ba90d0464a3ffed703", size = 228685, upload-time = "2026-01-06T11:45:21.246Z" } +sdist = { url = "https://files.pythonhosted.org/packages/19/14/2c5dd9f512b66549ae92767a9c7b330ae88e1932ca57876909410251fe13/anyio-4.13.0.tar.gz", hash = "sha256:334b70e641fd2221c1505b3890c69882fe4a2df910cba14d97019b90b24439dc", size = 231622, upload-time = "2026-03-24T12:59:09.671Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl", hash = "sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c", size = 113592, upload-time = "2026-01-06T11:45:19.497Z" }, + { url = "https://files.pythonhosted.org/packages/da/42/e921fccf5015463e32a3cf6ee7f980a6ed0f395ceeaa45060b61d86486c2/anyio-4.13.0-py3-none-any.whl", hash = "sha256:08b310f9e24a9594186fd75b4f73f4a4152069e3853f1ed8bfbf58369f4ad708", size = 114353, upload-time = "2026-03-24T12:59:08.246Z" }, ] [[package]] @@ -100,29 +110,29 @@ wheels = [ [[package]] name = "async-lru" -version = "2.1.0" +version = "2.3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ef/c3/bbf34f15ea88dfb649ab2c40f9d75081784a50573a9ea431563cab64adb8/async_lru-2.1.0.tar.gz", hash = "sha256:9eeb2fecd3fe42cc8a787fc32ead53a3a7158cc43d039c3c55ab3e4e5b2a80ed", size = 12041, upload-time = "2026-01-17T22:52:18.931Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e8/1f/989ecfef8e64109a489fff357450cb73fa73a865a92bd8c272170a6922c2/async_lru-2.3.0.tar.gz", hash = "sha256:89bdb258a0140d7313cf8f4031d816a042202faa61d0ab310a0a538baa1c24b6", size = 16332, upload-time = "2026-03-19T01:04:32.413Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2e/e9/eb6a5db5ac505d5d45715388e92bced7a5bb556facc4d0865d192823f2d2/async_lru-2.1.0-py3-none-any.whl", hash = "sha256:fa12dcf99a42ac1280bc16c634bbaf06883809790f6304d85cdab3f666f33a7e", size = 6933, upload-time = "2026-01-17T22:52:17.389Z" }, + { url = "https://files.pythonhosted.org/packages/e5/e2/c2e3abf398f80732e58b03be77bde9022550d221dd8781bf586bd4d97cc1/async_lru-2.3.0-py3-none-any.whl", hash = "sha256:eea27b01841909316f2cc739807acea1c623df2be8c5cfad7583286397bb8315", size = 8403, upload-time = "2026-03-19T01:04:30.883Z" }, ] [[package]] name = "attrs" -version = "25.4.0" +version = "26.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6b/5c/685e6633917e101e5dcb62b9dd76946cbb57c26e133bae9e0cd36033c0a9/attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11", size = 934251, upload-time = "2025-10-06T13:54:44.725Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9a/8e/82a0fe20a541c03148528be8cac2408564a6c9a0cc7e9171802bc1d26985/attrs-26.1.0.tar.gz", hash = "sha256:d03ceb89cb322a8fd706d4fb91940737b6642aa36998fe130a9bc96c985eff32", size = 952055, upload-time = "2026-03-19T14:22:25.026Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615, upload-time = "2025-10-06T13:54:43.17Z" }, + { url = "https://files.pythonhosted.org/packages/64/b4/17d4b0b2a2dc85a6df63d1157e028ed19f90d4cd97c36717afef2bc2f395/attrs-26.1.0-py3-none-any.whl", hash = "sha256:c647aa4a12dfbad9333ca4e71fe62ddc36f4e63b2d260a37a8b83d2f043ac309", size = 67548, upload-time = "2026-03-19T14:22:23.645Z" }, ] [[package]] name = "babel" -version = "2.17.0" +version = "2.18.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7d/6b/d52e42361e1aa00709585ecc30b3f9684b3ab62530771402248b1b1d6240/babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d", size = 9951852, upload-time = "2025-02-01T15:17:41.026Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7d/b2/51899539b6ceeeb420d40ed3cd4b7a40519404f9baf3d4ac99dc413a834b/babel-2.18.0.tar.gz", hash = "sha256:b80b99a14bd085fcacfa15c9165f651fbb3406e66cc603abf11c5750937c992d", size = 9959554, upload-time = "2026-02-01T12:30:56.078Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b7/b8/3fe70c75fe32afc4bb507f75563d39bc5642255d1d94f1f23604725780bf/babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2", size = 10182537, upload-time = "2025-02-01T15:17:37.39Z" }, + { url = "https://files.pythonhosted.org/packages/77/f5/21d2de20e8b8b0408f0681956ca2c69f1320a3848ac50e6e7f39c6159675/babel-2.18.0-py3-none-any.whl", hash = "sha256:e2b422b277c2b9a9630c1d7903c2a00d0830c409c59ac8cae9081c92f1aeba35", size = 10196845, upload-time = "2026-02-01T12:30:53.445Z" }, ] [[package]] @@ -157,30 +167,30 @@ css = [ [[package]] name = "boto3" -version = "1.42.30" +version = "1.42.88" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "botocore" }, { name = "jmespath" }, { name = "s3transfer" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/42/79/2dac8b7cb075cfa43908ee9af3f8ee06880d84b86013854c5cca8945afac/boto3-1.42.30.tar.gz", hash = "sha256:ba9cd2f7819637d15bfbeb63af4c567fcc8a7dcd7b93dd12734ec58601169538", size = 112809, upload-time = "2026-01-16T20:37:23.636Z" } +sdist = { url = "https://files.pythonhosted.org/packages/da/bb/7d4435cca6fccf235dd40c891c731bcb9078e815917b57ebadd1e0ffabaf/boto3-1.42.88.tar.gz", hash = "sha256:2d22c70de5726918676a06f1a03acfb4d5d9ea92fc759354800b67b22aaeef19", size = 113238, upload-time = "2026-04-10T19:41:06.912Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/52/b3/2c0d828c9f668292e277ca5232e6160dd5b4b660a3f076f20dd5378baa1e/boto3-1.42.30-py3-none-any.whl", hash = "sha256:d7e548bea65e0ae2c465c77de937bc686b591aee6a352d5a19a16bc751e591c1", size = 140573, upload-time = "2026-01-16T20:37:22.089Z" }, + { url = "https://files.pythonhosted.org/packages/0a/2b/8bfddb39a19f5fbc16a869f1a394771e6223f07160dbc0ff6b38e05ea0ae/boto3-1.42.88-py3-none-any.whl", hash = "sha256:2d0f52c971503377e4370d2a83edee6f077ddb8e684366ff38df4f13581d9cfc", size = 140557, upload-time = "2026-04-10T19:41:05.309Z" }, ] [[package]] name = "botocore" -version = "1.42.30" +version = "1.42.88" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "jmespath" }, { name = "python-dateutil" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/44/38/23862628a0eb044c8b8b3d7a9ad1920b3bfd6bce6d746d5a871e8382c7e4/botocore-1.42.30.tar.gz", hash = "sha256:9bf1662b8273d5cc3828a49f71ca85abf4e021011c1f0a71f41a2ea5769a5116", size = 14891439, upload-time = "2026-01-16T20:37:13.77Z" } +sdist = { url = "https://files.pythonhosted.org/packages/93/50/87966238f7aa3f7e5f87081185d5a407a95ede8b551e11bbe134ca3306dc/botocore-1.42.88.tar.gz", hash = "sha256:cbb59ee464662039b0c2c95a520cdf85b1e8ce00b72375ab9cd9f842cc001301", size = 15195331, upload-time = "2026-04-10T19:40:57.012Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3d/8d/6d7b016383b1f74dd93611b1c5078bbaddaca901553ab886dcda87cae365/botocore-1.42.30-py3-none-any.whl", hash = "sha256:97070a438cac92430bb7b65f8ebd7075224f4a289719da4ee293d22d1e98db02", size = 14566340, upload-time = "2026-01-16T20:37:10.94Z" }, + { url = "https://files.pythonhosted.org/packages/2a/46/ad14e41245adb8b0c83663ba13e822b68a0df08999dd250e75b0750fdf6c/botocore-1.42.88-py3-none-any.whl", hash = "sha256:032375b213305b6b81eedb269eaeefdf96f674620799bbf96117dca86052cc1a", size = 14876640, upload-time = "2026-04-10T19:40:53.663Z" }, ] [[package]] @@ -200,11 +210,11 @@ wheels = [ [[package]] name = "certifi" -version = "2026.1.4" +version = "2026.2.25" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e0/2d/a891ca51311197f6ad14a7ef42e2399f36cf2f9bd44752b3dc4eab60fdc5/certifi-2026.1.4.tar.gz", hash = "sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120", size = 154268, upload-time = "2026-01-04T02:42:41.825Z" } +sdist = { url = "https://files.pythonhosted.org/packages/af/2d/7bf41579a8986e348fa033a31cdd0e4121114f6bce2457e8876010b092dd/certifi-2026.2.25.tar.gz", hash = "sha256:e887ab5cee78ea814d3472169153c2d12cd43b14bd03329a39a9c6e2e80bfba7", size = 155029, upload-time = "2026-02-25T02:54:17.342Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e6/ad/3cc14f097111b4de0040c83a525973216457bbeeb63739ef1ed275c1c021/certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c", size = 152900, upload-time = "2026-01-04T02:42:40.15Z" }, + { url = "https://files.pythonhosted.org/packages/9a/3c/c17fb3ca2d9c3acff52e30b309f538586f9f5b9c9cf454f3845fc9af4881/certifi-2026.2.25-py3-none-any.whl", hash = "sha256:027692e4402ad994f1c42e52a4997a9763c646b73e4096e4d5d6db8af1d6f0fa", size = 153684, upload-time = "2026-02-25T02:54:15.766Z" }, ] [[package]] @@ -254,55 +264,55 @@ wheels = [ [[package]] name = "charset-normalizer" -version = "3.4.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ed/27/c6491ff4954e58a10f69ad90aca8a1b6fe9c5d3c6f380907af3c37435b59/charset_normalizer-3.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e1fcf0720908f200cd21aa4e6750a48ff6ce4afe7ff5a79a90d5ed8a08296f8", size = 206988, upload-time = "2025-10-14T04:40:33.79Z" }, - { url = "https://files.pythonhosted.org/packages/94/59/2e87300fe67ab820b5428580a53cad894272dbb97f38a7a814a2a1ac1011/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f819d5fe9234f9f82d75bdfa9aef3a3d72c4d24a6e57aeaebba32a704553aa0", size = 147324, upload-time = "2025-10-14T04:40:34.961Z" }, - { url = "https://files.pythonhosted.org/packages/07/fb/0cf61dc84b2b088391830f6274cb57c82e4da8bbc2efeac8c025edb88772/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a59cb51917aa591b1c4e6a43c132f0cdc3c76dbad6155df4e28ee626cc77a0a3", size = 142742, upload-time = "2025-10-14T04:40:36.105Z" }, - { url = "https://files.pythonhosted.org/packages/62/8b/171935adf2312cd745d290ed93cf16cf0dfe320863ab7cbeeae1dcd6535f/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8ef3c867360f88ac904fd3f5e1f902f13307af9052646963ee08ff4f131adafc", size = 160863, upload-time = "2025-10-14T04:40:37.188Z" }, - { url = "https://files.pythonhosted.org/packages/09/73/ad875b192bda14f2173bfc1bc9a55e009808484a4b256748d931b6948442/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d9e45d7faa48ee908174d8fe84854479ef838fc6a705c9315372eacbc2f02897", size = 157837, upload-time = "2025-10-14T04:40:38.435Z" }, - { url = "https://files.pythonhosted.org/packages/6d/fc/de9cce525b2c5b94b47c70a4b4fb19f871b24995c728e957ee68ab1671ea/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:840c25fb618a231545cbab0564a799f101b63b9901f2569faecd6b222ac72381", size = 151550, upload-time = "2025-10-14T04:40:40.053Z" }, - { url = "https://files.pythonhosted.org/packages/55/c2/43edd615fdfba8c6f2dfbd459b25a6b3b551f24ea21981e23fb768503ce1/charset_normalizer-3.4.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ca5862d5b3928c4940729dacc329aa9102900382fea192fc5e52eb69d6093815", size = 149162, upload-time = "2025-10-14T04:40:41.163Z" }, - { url = "https://files.pythonhosted.org/packages/03/86/bde4ad8b4d0e9429a4e82c1e8f5c659993a9a863ad62c7df05cf7b678d75/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9c7f57c3d666a53421049053eaacdd14bbd0a528e2186fcb2e672effd053bb0", size = 150019, upload-time = "2025-10-14T04:40:42.276Z" }, - { url = "https://files.pythonhosted.org/packages/1f/86/a151eb2af293a7e7bac3a739b81072585ce36ccfb4493039f49f1d3cae8c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:277e970e750505ed74c832b4bf75dac7476262ee2a013f5574dd49075879e161", size = 143310, upload-time = "2025-10-14T04:40:43.439Z" }, - { url = "https://files.pythonhosted.org/packages/b5/fe/43dae6144a7e07b87478fdfc4dbe9efd5defb0e7ec29f5f58a55aeef7bf7/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31fd66405eaf47bb62e8cd575dc621c56c668f27d46a61d975a249930dd5e2a4", size = 162022, upload-time = "2025-10-14T04:40:44.547Z" }, - { url = "https://files.pythonhosted.org/packages/80/e6/7aab83774f5d2bca81f42ac58d04caf44f0cc2b65fc6db2b3b2e8a05f3b3/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:0d3d8f15c07f86e9ff82319b3d9ef6f4bf907608f53fe9d92b28ea9ae3d1fd89", size = 149383, upload-time = "2025-10-14T04:40:46.018Z" }, - { url = "https://files.pythonhosted.org/packages/4f/e8/b289173b4edae05c0dde07f69f8db476a0b511eac556dfe0d6bda3c43384/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9f7fcd74d410a36883701fafa2482a6af2ff5ba96b9a620e9e0721e28ead5569", size = 159098, upload-time = "2025-10-14T04:40:47.081Z" }, - { url = "https://files.pythonhosted.org/packages/d8/df/fe699727754cae3f8478493c7f45f777b17c3ef0600e28abfec8619eb49c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebf3e58c7ec8a8bed6d66a75d7fb37b55e5015b03ceae72a8e7c74495551e224", size = 152991, upload-time = "2025-10-14T04:40:48.246Z" }, - { url = "https://files.pythonhosted.org/packages/1a/86/584869fe4ddb6ffa3bd9f491b87a01568797fb9bd8933f557dba9771beaf/charset_normalizer-3.4.4-cp311-cp311-win32.whl", hash = "sha256:eecbc200c7fd5ddb9a7f16c7decb07b566c29fa2161a16cf67b8d068bd21690a", size = 99456, upload-time = "2025-10-14T04:40:49.376Z" }, - { url = "https://files.pythonhosted.org/packages/65/f6/62fdd5feb60530f50f7e38b4f6a1d5203f4d16ff4f9f0952962c044e919a/charset_normalizer-3.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:5ae497466c7901d54b639cf42d5b8c1b6a4fead55215500d2f486d34db48d016", size = 106978, upload-time = "2025-10-14T04:40:50.844Z" }, - { url = "https://files.pythonhosted.org/packages/7a/9d/0710916e6c82948b3be62d9d398cb4fcf4e97b56d6a6aeccd66c4b2f2bd5/charset_normalizer-3.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:65e2befcd84bc6f37095f5961e68a6f077bf44946771354a28ad434c2cce0ae1", size = 99969, upload-time = "2025-10-14T04:40:52.272Z" }, - { url = "https://files.pythonhosted.org/packages/f3/85/1637cd4af66fa687396e757dec650f28025f2a2f5a5531a3208dc0ec43f2/charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394", size = 208425, upload-time = "2025-10-14T04:40:53.353Z" }, - { url = "https://files.pythonhosted.org/packages/9d/6a/04130023fef2a0d9c62d0bae2649b69f7b7d8d24ea5536feef50551029df/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25", size = 148162, upload-time = "2025-10-14T04:40:54.558Z" }, - { url = "https://files.pythonhosted.org/packages/78/29/62328d79aa60da22c9e0b9a66539feae06ca0f5a4171ac4f7dc285b83688/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef", size = 144558, upload-time = "2025-10-14T04:40:55.677Z" }, - { url = "https://files.pythonhosted.org/packages/86/bb/b32194a4bf15b88403537c2e120b817c61cd4ecffa9b6876e941c3ee38fe/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d", size = 161497, upload-time = "2025-10-14T04:40:57.217Z" }, - { url = "https://files.pythonhosted.org/packages/19/89/a54c82b253d5b9b111dc74aca196ba5ccfcca8242d0fb64146d4d3183ff1/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8", size = 159240, upload-time = "2025-10-14T04:40:58.358Z" }, - { url = "https://files.pythonhosted.org/packages/c0/10/d20b513afe03acc89ec33948320a5544d31f21b05368436d580dec4e234d/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86", size = 153471, upload-time = "2025-10-14T04:40:59.468Z" }, - { url = "https://files.pythonhosted.org/packages/61/fa/fbf177b55bdd727010f9c0a3c49eefa1d10f960e5f09d1d887bf93c2e698/charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a", size = 150864, upload-time = "2025-10-14T04:41:00.623Z" }, - { url = "https://files.pythonhosted.org/packages/05/12/9fbc6a4d39c0198adeebbde20b619790e9236557ca59fc40e0e3cebe6f40/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f", size = 150647, upload-time = "2025-10-14T04:41:01.754Z" }, - { url = "https://files.pythonhosted.org/packages/ad/1f/6a9a593d52e3e8c5d2b167daf8c6b968808efb57ef4c210acb907c365bc4/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc", size = 145110, upload-time = "2025-10-14T04:41:03.231Z" }, - { url = "https://files.pythonhosted.org/packages/30/42/9a52c609e72471b0fc54386dc63c3781a387bb4fe61c20231a4ebcd58bdd/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf", size = 162839, upload-time = "2025-10-14T04:41:04.715Z" }, - { url = "https://files.pythonhosted.org/packages/c4/5b/c0682bbf9f11597073052628ddd38344a3d673fda35a36773f7d19344b23/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15", size = 150667, upload-time = "2025-10-14T04:41:05.827Z" }, - { url = "https://files.pythonhosted.org/packages/e4/24/a41afeab6f990cf2daf6cb8c67419b63b48cf518e4f56022230840c9bfb2/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9", size = 160535, upload-time = "2025-10-14T04:41:06.938Z" }, - { url = "https://files.pythonhosted.org/packages/2a/e5/6a4ce77ed243c4a50a1fecca6aaaab419628c818a49434be428fe24c9957/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0", size = 154816, upload-time = "2025-10-14T04:41:08.101Z" }, - { url = "https://files.pythonhosted.org/packages/a8/ef/89297262b8092b312d29cdb2517cb1237e51db8ecef2e9af5edbe7b683b1/charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26", size = 99694, upload-time = "2025-10-14T04:41:09.23Z" }, - { url = "https://files.pythonhosted.org/packages/3d/2d/1e5ed9dd3b3803994c155cd9aacb60c82c331bad84daf75bcb9c91b3295e/charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525", size = 107131, upload-time = "2025-10-14T04:41:10.467Z" }, - { url = "https://files.pythonhosted.org/packages/d0/d9/0ed4c7098a861482a7b6a95603edce4c0d9db2311af23da1fb2b75ec26fc/charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3", size = 100390, upload-time = "2025-10-14T04:41:11.915Z" }, - { url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" }, +version = "3.4.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/a1/67fe25fac3c7642725500a3f6cfe5821ad557c3abb11c9d20d12c7008d3e/charset_normalizer-3.4.7.tar.gz", hash = "sha256:ae89db9e5f98a11a4bf50407d4363e7b09b31e55bc117b4f7d80aab97ba009e5", size = 144271, upload-time = "2026-04-02T09:28:39.342Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/d7/b5b7020a0565c2e9fa8c09f4b5fa6232feb326b8c20081ccded47ea368fd/charset_normalizer-3.4.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7641bb8895e77f921102f72833904dcd9901df5d6d72a2ab8f31d04b7e51e4e7", size = 309705, upload-time = "2026-04-02T09:26:02.191Z" }, + { url = "https://files.pythonhosted.org/packages/5a/53/58c29116c340e5456724ecd2fff4196d236b98f3da97b404bc5e51ac3493/charset_normalizer-3.4.7-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:202389074300232baeb53ae2569a60901f7efadd4245cf3a3bf0617d60b439d7", size = 206419, upload-time = "2026-04-02T09:26:03.583Z" }, + { url = "https://files.pythonhosted.org/packages/b2/02/e8146dc6591a37a00e5144c63f29fb7c97a734ea8a111190783c0e60ab63/charset_normalizer-3.4.7-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:30b8d1d8c52a48c2c5690e152c169b673487a2a58de1ec7393196753063fcd5e", size = 227901, upload-time = "2026-04-02T09:26:04.738Z" }, + { url = "https://files.pythonhosted.org/packages/fb/73/77486c4cd58f1267bf17db420e930c9afa1b3be3fe8c8b8ebbebc9624359/charset_normalizer-3.4.7-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:532bc9bf33a68613fd7d65e4b1c71a6a38d7d42604ecf239c77392e9b4e8998c", size = 222742, upload-time = "2026-04-02T09:26:06.36Z" }, + { url = "https://files.pythonhosted.org/packages/a1/fa/f74eb381a7d94ded44739e9d94de18dc5edc9c17fb8c11f0a6890696c0a9/charset_normalizer-3.4.7-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2fe249cb4651fd12605b7288b24751d8bfd46d35f12a20b1ba33dea122e690df", size = 214061, upload-time = "2026-04-02T09:26:08.347Z" }, + { url = "https://files.pythonhosted.org/packages/dc/92/42bd3cefcf7687253fb86694b45f37b733c97f59af3724f356fa92b8c344/charset_normalizer-3.4.7-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:65bcd23054beab4d166035cabbc868a09c1a49d1efe458fe8e4361215df40265", size = 199239, upload-time = "2026-04-02T09:26:09.823Z" }, + { url = "https://files.pythonhosted.org/packages/4c/3d/069e7184e2aa3b3cddc700e3dd267413dc259854adc3380421c805c6a17d/charset_normalizer-3.4.7-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:08e721811161356f97b4059a9ba7bafb23ea5ee2255402c42881c214e173c6b4", size = 210173, upload-time = "2026-04-02T09:26:10.953Z" }, + { url = "https://files.pythonhosted.org/packages/62/51/9d56feb5f2e7074c46f93e0ebdbe61f0848ee246e2f0d89f8e20b89ebb8f/charset_normalizer-3.4.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e060d01aec0a910bdccb8be71faf34e7799ce36950f8294c8bf612cba65a2c9e", size = 209841, upload-time = "2026-04-02T09:26:12.142Z" }, + { url = "https://files.pythonhosted.org/packages/d2/59/893d8f99cc4c837dda1fe2f1139079703deb9f321aabcb032355de13b6c7/charset_normalizer-3.4.7-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:38c0109396c4cfc574d502df99742a45c72c08eff0a36158b6f04000043dbf38", size = 200304, upload-time = "2026-04-02T09:26:13.711Z" }, + { url = "https://files.pythonhosted.org/packages/7d/1d/ee6f3be3464247578d1ed5c46de545ccc3d3ff933695395c402c21fa6b77/charset_normalizer-3.4.7-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:1c2a768fdd44ee4a9339a9b0b130049139b8ce3c01d2ce09f67f5a68048d477c", size = 229455, upload-time = "2026-04-02T09:26:14.941Z" }, + { url = "https://files.pythonhosted.org/packages/54/bb/8fb0a946296ea96a488928bdce8ef99023998c48e4713af533e9bb98ef07/charset_normalizer-3.4.7-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:1a87ca9d5df6fe460483d9a5bbf2b18f620cbed41b432e2bddb686228282d10b", size = 210036, upload-time = "2026-04-02T09:26:16.478Z" }, + { url = "https://files.pythonhosted.org/packages/9a/bc/015b2387f913749f82afd4fcba07846d05b6d784dd16123cb66860e0237d/charset_normalizer-3.4.7-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:d635aab80466bc95771bb78d5370e74d36d1fe31467b6b29b8b57b2a3cd7d22c", size = 224739, upload-time = "2026-04-02T09:26:17.751Z" }, + { url = "https://files.pythonhosted.org/packages/17/ab/63133691f56baae417493cba6b7c641571a2130eb7bceba6773367ab9ec5/charset_normalizer-3.4.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ae196f021b5e7c78e918242d217db021ed2a6ace2bc6ae94c0fc596221c7f58d", size = 216277, upload-time = "2026-04-02T09:26:18.981Z" }, + { url = "https://files.pythonhosted.org/packages/06/6d/3be70e827977f20db77c12a97e6a9f973631a45b8d186c084527e53e77a4/charset_normalizer-3.4.7-cp311-cp311-win32.whl", hash = "sha256:adb2597b428735679446b46c8badf467b4ca5f5056aae4d51a19f9570301b1ad", size = 147819, upload-time = "2026-04-02T09:26:20.295Z" }, + { url = "https://files.pythonhosted.org/packages/20/d9/5f67790f06b735d7c7637171bbfd89882ad67201891b7275e51116ed8207/charset_normalizer-3.4.7-cp311-cp311-win_amd64.whl", hash = "sha256:8e385e4267ab76874ae30db04c627faaaf0b509e1ccc11a95b3fc3e83f855c00", size = 159281, upload-time = "2026-04-02T09:26:21.74Z" }, + { url = "https://files.pythonhosted.org/packages/ca/83/6413f36c5a34afead88ce6f66684d943d91f233d76dd083798f9602b75ae/charset_normalizer-3.4.7-cp311-cp311-win_arm64.whl", hash = "sha256:d4a48e5b3c2a489fae013b7589308a40146ee081f6f509e047e0e096084ceca1", size = 147843, upload-time = "2026-04-02T09:26:22.901Z" }, + { url = "https://files.pythonhosted.org/packages/0c/eb/4fc8d0a7110eb5fc9cc161723a34a8a6c200ce3b4fbf681bc86feee22308/charset_normalizer-3.4.7-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:eca9705049ad3c7345d574e3510665cb2cf844c2f2dcfe675332677f081cbd46", size = 311328, upload-time = "2026-04-02T09:26:24.331Z" }, + { url = "https://files.pythonhosted.org/packages/f8/e3/0fadc706008ac9d7b9b5be6dc767c05f9d3e5df51744ce4cc9605de7b9f4/charset_normalizer-3.4.7-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6178f72c5508bfc5fd446a5905e698c6212932f25bcdd4b47a757a50605a90e2", size = 208061, upload-time = "2026-04-02T09:26:25.568Z" }, + { url = "https://files.pythonhosted.org/packages/42/f0/3dd1045c47f4a4604df85ec18ad093912ae1344ac706993aff91d38773a2/charset_normalizer-3.4.7-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e1421b502d83040e6d7fb2fb18dff63957f720da3d77b2fbd3187ceb63755d7b", size = 229031, upload-time = "2026-04-02T09:26:26.865Z" }, + { url = "https://files.pythonhosted.org/packages/dc/67/675a46eb016118a2fbde5a277a5d15f4f69d5f3f5f338e5ee2f8948fcf43/charset_normalizer-3.4.7-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:edac0f1ab77644605be2cbba52e6b7f630731fc42b34cb0f634be1a6eface56a", size = 225239, upload-time = "2026-04-02T09:26:28.044Z" }, + { url = "https://files.pythonhosted.org/packages/4b/f8/d0118a2f5f23b02cd166fa385c60f9b0d4f9194f574e2b31cef350ad7223/charset_normalizer-3.4.7-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5649fd1c7bade02f320a462fdefd0b4bd3ce036065836d4f42e0de958038e116", size = 216589, upload-time = "2026-04-02T09:26:29.239Z" }, + { url = "https://files.pythonhosted.org/packages/b1/f1/6d2b0b261b6c4ceef0fcb0d17a01cc5bc53586c2d4796fa04b5c540bc13d/charset_normalizer-3.4.7-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:203104ed3e428044fd943bc4bf45fa73c0730391f9621e37fe39ecf477b128cb", size = 202733, upload-time = "2026-04-02T09:26:30.5Z" }, + { url = "https://files.pythonhosted.org/packages/6f/c0/7b1f943f7e87cc3db9626ba17807d042c38645f0a1d4415c7a14afb5591f/charset_normalizer-3.4.7-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:298930cec56029e05497a76988377cbd7457ba864beeea92ad7e844fe74cd1f1", size = 212652, upload-time = "2026-04-02T09:26:31.709Z" }, + { url = "https://files.pythonhosted.org/packages/38/dd/5a9ab159fe45c6e72079398f277b7d2b523e7f716acc489726115a910097/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:708838739abf24b2ceb208d0e22403dd018faeef86ddac04319a62ae884c4f15", size = 211229, upload-time = "2026-04-02T09:26:33.282Z" }, + { url = "https://files.pythonhosted.org/packages/d5/ff/531a1cad5ca855d1c1a8b69cb71abfd6d85c0291580146fda7c82857caa1/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:0f7eb884681e3938906ed0434f20c63046eacd0111c4ba96f27b76084cd679f5", size = 203552, upload-time = "2026-04-02T09:26:34.845Z" }, + { url = "https://files.pythonhosted.org/packages/c1/4c/a5fb52d528a8ca41f7598cb619409ece30a169fbdf9cdce592e53b46c3a6/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4dc1e73c36828f982bfe79fadf5919923f8a6f4df2860804db9a98c48824ce8d", size = 230806, upload-time = "2026-04-02T09:26:36.152Z" }, + { url = "https://files.pythonhosted.org/packages/59/7a/071feed8124111a32b316b33ae4de83d36923039ef8cf48120266844285b/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:aed52fea0513bac0ccde438c188c8a471c4e0f457c2dd20cdbf6ea7a450046c7", size = 212316, upload-time = "2026-04-02T09:26:37.672Z" }, + { url = "https://files.pythonhosted.org/packages/fd/35/f7dba3994312d7ba508e041eaac39a36b120f32d4c8662b8814dab876431/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:fea24543955a6a729c45a73fe90e08c743f0b3334bbf3201e6c4bc1b0c7fa464", size = 227274, upload-time = "2026-04-02T09:26:38.93Z" }, + { url = "https://files.pythonhosted.org/packages/8a/2d/a572df5c9204ab7688ec1edc895a73ebded3b023bb07364710b05dd1c9be/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bb6d88045545b26da47aa879dd4a89a71d1dce0f0e549b1abcb31dfe4a8eac49", size = 218468, upload-time = "2026-04-02T09:26:40.17Z" }, + { url = "https://files.pythonhosted.org/packages/86/eb/890922a8b03a568ca2f336c36585a4713c55d4d67bf0f0c78924be6315ca/charset_normalizer-3.4.7-cp312-cp312-win32.whl", hash = "sha256:2257141f39fe65a3fdf38aeccae4b953e5f3b3324f4ff0daf9f15b8518666a2c", size = 148460, upload-time = "2026-04-02T09:26:41.416Z" }, + { url = "https://files.pythonhosted.org/packages/35/d9/0e7dffa06c5ab081f75b1b786f0aefc88365825dfcd0ac544bdb7b2b6853/charset_normalizer-3.4.7-cp312-cp312-win_amd64.whl", hash = "sha256:5ed6ab538499c8644b8a3e18debabcd7ce684f3fa91cf867521a7a0279cab2d6", size = 159330, upload-time = "2026-04-02T09:26:42.554Z" }, + { url = "https://files.pythonhosted.org/packages/9e/5d/481bcc2a7c88ea6b0878c299547843b2521ccbc40980cb406267088bc701/charset_normalizer-3.4.7-cp312-cp312-win_arm64.whl", hash = "sha256:56be790f86bfb2c98fb742ce566dfb4816e5a83384616ab59c49e0604d49c51d", size = 147828, upload-time = "2026-04-02T09:26:44.075Z" }, + { url = "https://files.pythonhosted.org/packages/db/8f/61959034484a4a7c527811f4721e75d02d653a35afb0b6054474d8185d4c/charset_normalizer-3.4.7-py3-none-any.whl", hash = "sha256:3dce51d0f5e7951f8bb4900c257dad282f49190fdbebecd4ba99bcc41fef404d", size = 61958, upload-time = "2026-04-02T09:28:37.794Z" }, ] [[package]] name = "click" -version = "8.3.1" +version = "8.3.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065, upload-time = "2025-11-15T20:45:42.706Z" } +sdist = { url = "https://files.pythonhosted.org/packages/57/75/31212c6bf2503fdf920d87fee5d7a86a2e3bcf444984126f13d8e4016804/click-8.3.2.tar.gz", hash = "sha256:14162b8b3b3550a7d479eafa77dfd3c38d9dc8951f6f69c78913a8f9a7540fd5", size = 302856, upload-time = "2026-04-03T19:14:45.118Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274, upload-time = "2025-11-15T20:45:41.139Z" }, + { url = "https://files.pythonhosted.org/packages/e4/20/71885d8b97d4f3dde17b1fdb92dbd4908b00541c5a3379787137285f602e/click-8.3.2-py3-none-any.whl", hash = "sha256:1924d2c27c5653561cd2cae4548d1406039cb79b858b747cfea24924bbc1616d", size = 108379, upload-time = "2026-04-03T19:14:43.505Z" }, ] [[package]] @@ -328,18 +338,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, ] -[[package]] -name = "coloredlogs" -version = "15.0.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "humanfriendly" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/cc/c7/eed8f27100517e8c0e6b923d5f0845d0cb99763da6fdee00478f91db7325/coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0", size = 278520, upload-time = "2021-06-11T10:22:45.202Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a7/06/3d6badcf13db419e25b07041d9c7b4a2c331d3f4e7134445ec5df57714cd/coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934", size = 46018, upload-time = "2021-06-11T10:22:42.561Z" }, -] - [[package]] name = "comm" version = "0.2.3" @@ -398,19 +396,19 @@ wheels = [ [[package]] name = "debugpy" -version = "1.8.19" +version = "1.8.20" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/73/75/9e12d4d42349b817cd545b89247696c67917aab907012ae5b64bbfea3199/debugpy-1.8.19.tar.gz", hash = "sha256:eea7e5987445ab0b5ed258093722d5ecb8bb72217c5c9b1e21f64efe23ddebdb", size = 1644590, upload-time = "2025-12-15T21:53:28.044Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e0/b7/cd8080344452e4874aae67c40d8940e2b4d47b01601a8fd9f44786c757c7/debugpy-1.8.20.tar.gz", hash = "sha256:55bc8701714969f1ab89a6d5f2f3d40c36f91b2cbe2f65d98bf8196f6a6a2c33", size = 1645207, upload-time = "2026-01-29T23:03:28.199Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/80/e2/48531a609b5a2aa94c6b6853afdfec8da05630ab9aaa96f1349e772119e9/debugpy-1.8.19-cp311-cp311-macosx_15_0_universal2.whl", hash = "sha256:c5dcfa21de1f735a4f7ced4556339a109aa0f618d366ede9da0a3600f2516d8b", size = 2207620, upload-time = "2025-12-15T21:53:37.1Z" }, - { url = "https://files.pythonhosted.org/packages/1b/d4/97775c01d56071969f57d93928899e5616a4cfbbf4c8cc75390d3a51c4a4/debugpy-1.8.19-cp311-cp311-manylinux_2_34_x86_64.whl", hash = "sha256:806d6800246244004625d5222d7765874ab2d22f3ba5f615416cf1342d61c488", size = 3170796, upload-time = "2025-12-15T21:53:38.513Z" }, - { url = "https://files.pythonhosted.org/packages/8d/7e/8c7681bdb05be9ec972bbb1245eb7c4c7b0679bb6a9e6408d808bc876d3d/debugpy-1.8.19-cp311-cp311-win32.whl", hash = "sha256:783a519e6dfb1f3cd773a9bda592f4887a65040cb0c7bd38dde410f4e53c40d4", size = 5164287, upload-time = "2025-12-15T21:53:40.857Z" }, - { url = "https://files.pythonhosted.org/packages/f2/a8/aaac7ff12ddf5d68a39e13a423a8490426f5f661384f5ad8d9062761bd8e/debugpy-1.8.19-cp311-cp311-win_amd64.whl", hash = "sha256:14035cbdbb1fe4b642babcdcb5935c2da3b1067ac211c5c5a8fdc0bb31adbcaa", size = 5188269, upload-time = "2025-12-15T21:53:42.359Z" }, - { url = "https://files.pythonhosted.org/packages/4a/15/d762e5263d9e25b763b78be72dc084c7a32113a0bac119e2f7acae7700ed/debugpy-1.8.19-cp312-cp312-macosx_15_0_universal2.whl", hash = "sha256:bccb1540a49cde77edc7ce7d9d075c1dbeb2414751bc0048c7a11e1b597a4c2e", size = 2549995, upload-time = "2025-12-15T21:53:43.773Z" }, - { url = "https://files.pythonhosted.org/packages/a7/88/f7d25c68b18873b7c53d7c156ca7a7ffd8e77073aa0eac170a9b679cf786/debugpy-1.8.19-cp312-cp312-manylinux_2_34_x86_64.whl", hash = "sha256:e9c68d9a382ec754dc05ed1d1b4ed5bd824b9f7c1a8cd1083adb84b3c93501de", size = 4309891, upload-time = "2025-12-15T21:53:45.26Z" }, - { url = "https://files.pythonhosted.org/packages/c5/4f/a65e973aba3865794da65f71971dca01ae66666132c7b2647182d5be0c5f/debugpy-1.8.19-cp312-cp312-win32.whl", hash = "sha256:6599cab8a783d1496ae9984c52cb13b7c4a3bd06a8e6c33446832a5d97ce0bee", size = 5286355, upload-time = "2025-12-15T21:53:46.763Z" }, - { url = "https://files.pythonhosted.org/packages/d8/3a/d3d8b48fec96e3d824e404bf428276fb8419dfa766f78f10b08da1cb2986/debugpy-1.8.19-cp312-cp312-win_amd64.whl", hash = "sha256:66e3d2fd8f2035a8f111eb127fa508469dfa40928a89b460b41fd988684dc83d", size = 5328239, upload-time = "2025-12-15T21:53:48.868Z" }, - { url = "https://files.pythonhosted.org/packages/25/3e/e27078370414ef35fafad2c06d182110073daaeb5d3bf734b0b1eeefe452/debugpy-1.8.19-py2.py3-none-any.whl", hash = "sha256:360ffd231a780abbc414ba0f005dad409e71c78637efe8f2bd75837132a41d38", size = 5292321, upload-time = "2025-12-15T21:54:16.024Z" }, + { url = "https://files.pythonhosted.org/packages/51/56/c3baf5cbe4dd77427fd9aef99fcdade259ad128feeb8a786c246adb838e5/debugpy-1.8.20-cp311-cp311-macosx_15_0_universal2.whl", hash = "sha256:eada6042ad88fa1571b74bd5402ee8b86eded7a8f7b827849761700aff171f1b", size = 2208318, upload-time = "2026-01-29T23:03:36.481Z" }, + { url = "https://files.pythonhosted.org/packages/9a/7d/4fa79a57a8e69fe0d9763e98d1110320f9ecd7f1f362572e3aafd7417c9d/debugpy-1.8.20-cp311-cp311-manylinux_2_34_x86_64.whl", hash = "sha256:7de0b7dfeedc504421032afba845ae2a7bcc32ddfb07dae2c3ca5442f821c344", size = 3171493, upload-time = "2026-01-29T23:03:37.775Z" }, + { url = "https://files.pythonhosted.org/packages/7d/f2/1e8f8affe51e12a26f3a8a8a4277d6e60aa89d0a66512f63b1e799d424a4/debugpy-1.8.20-cp311-cp311-win32.whl", hash = "sha256:773e839380cf459caf73cc533ea45ec2737a5cc184cf1b3b796cd4fd98504fec", size = 5209240, upload-time = "2026-01-29T23:03:39.109Z" }, + { url = "https://files.pythonhosted.org/packages/d5/92/1cb532e88560cbee973396254b21bece8c5d7c2ece958a67afa08c9f10dc/debugpy-1.8.20-cp311-cp311-win_amd64.whl", hash = "sha256:1f7650546e0eded1902d0f6af28f787fa1f1dbdbc97ddabaf1cd963a405930cb", size = 5233481, upload-time = "2026-01-29T23:03:40.659Z" }, + { url = "https://files.pythonhosted.org/packages/14/57/7f34f4736bfb6e00f2e4c96351b07805d83c9a7b33d28580ae01374430f7/debugpy-1.8.20-cp312-cp312-macosx_15_0_universal2.whl", hash = "sha256:4ae3135e2089905a916909ef31922b2d733d756f66d87345b3e5e52b7a55f13d", size = 2550686, upload-time = "2026-01-29T23:03:42.023Z" }, + { url = "https://files.pythonhosted.org/packages/ab/78/b193a3975ca34458f6f0e24aaf5c3e3da72f5401f6054c0dfd004b41726f/debugpy-1.8.20-cp312-cp312-manylinux_2_34_x86_64.whl", hash = "sha256:88f47850a4284b88bd2bfee1f26132147d5d504e4e86c22485dfa44b97e19b4b", size = 4310588, upload-time = "2026-01-29T23:03:43.314Z" }, + { url = "https://files.pythonhosted.org/packages/c1/55/f14deb95eaf4f30f07ef4b90a8590fc05d9e04df85ee379712f6fb6736d7/debugpy-1.8.20-cp312-cp312-win32.whl", hash = "sha256:4057ac68f892064e5f98209ab582abfee3b543fb55d2e87610ddc133a954d390", size = 5331372, upload-time = "2026-01-29T23:03:45.526Z" }, + { url = "https://files.pythonhosted.org/packages/a1/39/2bef246368bd42f9bd7cba99844542b74b84dacbdbea0833e610f384fee8/debugpy-1.8.20-cp312-cp312-win_amd64.whl", hash = "sha256:a1a8f851e7cf171330679ef6997e9c579ef6dd33c9098458bd9986a0f4ca52e3", size = 5372835, upload-time = "2026-01-29T23:03:47.245Z" }, + { url = "https://files.pythonhosted.org/packages/e0/c3/7f67dea8ccf8fdcb9c99033bbe3e90b9e7395415843accb81428c441be2d/debugpy-1.8.20-py2.py3-none-any.whl", hash = "sha256:5be9bed9ae3be00665a06acaa48f8329d2b9632f15fd09f6a9a8c8d9907e54d7", size = 5337658, upload-time = "2026-01-29T23:04:17.404Z" }, ] [[package]] @@ -508,11 +506,11 @@ wheels = [ [[package]] name = "filelock" -version = "3.20.3" +version = "3.25.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1d/65/ce7f1b70157833bf3cb851b556a37d4547ceafc158aa9b34b36782f23696/filelock-3.20.3.tar.gz", hash = "sha256:18c57ee915c7ec61cff0ecf7f0f869936c7c30191bb0cf406f1341778d0834e1", size = 19485, upload-time = "2026-01-09T17:55:05.421Z" } +sdist = { url = "https://files.pythonhosted.org/packages/94/b8/00651a0f559862f3bb7d6f7477b192afe3f583cc5e26403b44e59a55ab34/filelock-3.25.2.tar.gz", hash = "sha256:b64ece2b38f4ca29dd3e810287aa8c48182bbecd1ae6e9ae126c9b35f1382694", size = 40480, upload-time = "2026-03-11T20:45:38.487Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b5/36/7fb70f04bf00bc646cd5bb45aa9eddb15e19437a28b8fb2b4a5249fac770/filelock-3.20.3-py3-none-any.whl", hash = "sha256:4b0dda527ee31078689fc205ec4f1c1bf7d56cf88b6dc9426c4f230e46c2dce1", size = 16701, upload-time = "2026-01-09T17:55:04.334Z" }, + { url = "https://files.pythonhosted.org/packages/a4/a5/842ae8f0c08b61d6484b52f99a03510a3a72d23141942d216ebe81fefbce/filelock-3.25.2-py3-none-any.whl", hash = "sha256:ca8afb0da15f229774c9ad1b455ed96e85a81373065fb10446672f64444ddf70", size = 26759, upload-time = "2026-03-11T20:45:37.437Z" }, ] [[package]] @@ -525,27 +523,27 @@ wheels = [ [[package]] name = "fonttools" -version = "4.61.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ec/ca/cf17b88a8df95691275a3d77dc0a5ad9907f328ae53acbe6795da1b2f5ed/fonttools-4.61.1.tar.gz", hash = "sha256:6675329885c44657f826ef01d9e4fb33b9158e9d93c537d84ad8399539bc6f69", size = 3565756, upload-time = "2025-12-12T17:31:24.246Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/69/12/bf9f4eaa2fad039356cc627587e30ed008c03f1cebd3034376b5ee8d1d44/fonttools-4.61.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c6604b735bb12fef8e0efd5578c9fb5d3d8532d5001ea13a19cddf295673ee09", size = 2852213, upload-time = "2025-12-12T17:29:46.675Z" }, - { url = "https://files.pythonhosted.org/packages/ac/49/4138d1acb6261499bedde1c07f8c2605d1d8f9d77a151e5507fd3ef084b6/fonttools-4.61.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5ce02f38a754f207f2f06557523cd39a06438ba3aafc0639c477ac409fc64e37", size = 2401689, upload-time = "2025-12-12T17:29:48.769Z" }, - { url = "https://files.pythonhosted.org/packages/e5/fe/e6ce0fe20a40e03aef906af60aa87668696f9e4802fa283627d0b5ed777f/fonttools-4.61.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:77efb033d8d7ff233385f30c62c7c79271c8885d5c9657d967ede124671bbdfb", size = 5058809, upload-time = "2025-12-12T17:29:51.701Z" }, - { url = "https://files.pythonhosted.org/packages/79/61/1ca198af22f7dd22c17ab86e9024ed3c06299cfdb08170640e9996d501a0/fonttools-4.61.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:75c1a6dfac6abd407634420c93864a1e274ebc1c7531346d9254c0d8f6ca00f9", size = 5036039, upload-time = "2025-12-12T17:29:53.659Z" }, - { url = "https://files.pythonhosted.org/packages/99/cc/fa1801e408586b5fce4da9f5455af8d770f4fc57391cd5da7256bb364d38/fonttools-4.61.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0de30bfe7745c0d1ffa2b0b7048fb7123ad0d71107e10ee090fa0b16b9452e87", size = 5034714, upload-time = "2025-12-12T17:29:55.592Z" }, - { url = "https://files.pythonhosted.org/packages/bf/aa/b7aeafe65adb1b0a925f8f25725e09f078c635bc22754f3fecb7456955b0/fonttools-4.61.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:58b0ee0ab5b1fc9921eccfe11d1435added19d6494dde14e323f25ad2bc30c56", size = 5158648, upload-time = "2025-12-12T17:29:57.861Z" }, - { url = "https://files.pythonhosted.org/packages/99/f9/08ea7a38663328881384c6e7777bbefc46fd7d282adfd87a7d2b84ec9d50/fonttools-4.61.1-cp311-cp311-win32.whl", hash = "sha256:f79b168428351d11e10c5aeb61a74e1851ec221081299f4cf56036a95431c43a", size = 2280681, upload-time = "2025-12-12T17:29:59.943Z" }, - { url = "https://files.pythonhosted.org/packages/07/ad/37dd1ae5fa6e01612a1fbb954f0927681f282925a86e86198ccd7b15d515/fonttools-4.61.1-cp311-cp311-win_amd64.whl", hash = "sha256:fe2efccb324948a11dd09d22136fe2ac8a97d6c1347cf0b58a911dcd529f66b7", size = 2331951, upload-time = "2025-12-12T17:30:02.254Z" }, - { url = "https://files.pythonhosted.org/packages/6f/16/7decaa24a1bd3a70c607b2e29f0adc6159f36a7e40eaba59846414765fd4/fonttools-4.61.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f3cb4a569029b9f291f88aafc927dd53683757e640081ca8c412781ea144565e", size = 2851593, upload-time = "2025-12-12T17:30:04.225Z" }, - { url = "https://files.pythonhosted.org/packages/94/98/3c4cb97c64713a8cf499b3245c3bf9a2b8fd16a3e375feff2aed78f96259/fonttools-4.61.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:41a7170d042e8c0024703ed13b71893519a1a6d6e18e933e3ec7507a2c26a4b2", size = 2400231, upload-time = "2025-12-12T17:30:06.47Z" }, - { url = "https://files.pythonhosted.org/packages/b7/37/82dbef0f6342eb01f54bca073ac1498433d6ce71e50c3c3282b655733b31/fonttools-4.61.1-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:10d88e55330e092940584774ee5e8a6971b01fc2f4d3466a1d6c158230880796", size = 4954103, upload-time = "2025-12-12T17:30:08.432Z" }, - { url = "https://files.pythonhosted.org/packages/6c/44/f3aeac0fa98e7ad527f479e161aca6c3a1e47bb6996b053d45226fe37bf2/fonttools-4.61.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:15acc09befd16a0fb8a8f62bc147e1a82817542d72184acca9ce6e0aeda9fa6d", size = 5004295, upload-time = "2025-12-12T17:30:10.56Z" }, - { url = "https://files.pythonhosted.org/packages/14/e8/7424ced75473983b964d09f6747fa09f054a6d656f60e9ac9324cf40c743/fonttools-4.61.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e6bcdf33aec38d16508ce61fd81838f24c83c90a1d1b8c68982857038673d6b8", size = 4944109, upload-time = "2025-12-12T17:30:12.874Z" }, - { url = "https://files.pythonhosted.org/packages/c8/8b/6391b257fa3d0b553d73e778f953a2f0154292a7a7a085e2374b111e5410/fonttools-4.61.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5fade934607a523614726119164ff621e8c30e8fa1ffffbbd358662056ba69f0", size = 5093598, upload-time = "2025-12-12T17:30:15.79Z" }, - { url = "https://files.pythonhosted.org/packages/d9/71/fd2ea96cdc512d92da5678a1c98c267ddd4d8c5130b76d0f7a80f9a9fde8/fonttools-4.61.1-cp312-cp312-win32.whl", hash = "sha256:75da8f28eff26defba42c52986de97b22106cb8f26515b7c22443ebc9c2d3261", size = 2269060, upload-time = "2025-12-12T17:30:18.058Z" }, - { url = "https://files.pythonhosted.org/packages/80/3b/a3e81b71aed5a688e89dfe0e2694b26b78c7d7f39a5ffd8a7d75f54a12a8/fonttools-4.61.1-cp312-cp312-win_amd64.whl", hash = "sha256:497c31ce314219888c0e2fce5ad9178ca83fe5230b01a5006726cdf3ac9f24d9", size = 2319078, upload-time = "2025-12-12T17:30:22.862Z" }, - { url = "https://files.pythonhosted.org/packages/c7/4e/ce75a57ff3aebf6fc1f4e9d508b8e5810618a33d900ad6c19eb30b290b97/fonttools-4.61.1-py3-none-any.whl", hash = "sha256:17d2bf5d541add43822bcf0c43d7d847b160c9bb01d15d5007d84e2217aaa371", size = 1148996, upload-time = "2025-12-12T17:31:21.03Z" }, +version = "4.62.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9a/08/7012b00a9a5874311b639c3920270c36ee0c445b69d9989a85e5c92ebcb0/fonttools-4.62.1.tar.gz", hash = "sha256:e54c75fd6041f1122476776880f7c3c3295ffa31962dc6ebe2543c00dca58b5d", size = 3580737, upload-time = "2026-03-13T13:54:25.52Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/39/23ff32561ec8d45a4d48578b4d241369d9270dc50926c017570e60893701/fonttools-4.62.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:40975849bac44fb0b9253d77420c6d8b523ac4dcdcefeff6e4d706838a5b80f7", size = 2871039, upload-time = "2026-03-13T13:52:33.127Z" }, + { url = "https://files.pythonhosted.org/packages/24/7f/66d3f8a9338a9b67fe6e1739f47e1cd5cee78bd3bc1206ef9b0b982289a5/fonttools-4.62.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9dde91633f77fa576879a0c76b1d89de373cae751a98ddf0109d54e173b40f14", size = 2416346, upload-time = "2026-03-13T13:52:35.676Z" }, + { url = "https://files.pythonhosted.org/packages/aa/53/5276ceba7bff95da7793a07c5284e1da901cf00341ce5e2f3273056c0cca/fonttools-4.62.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6acb4109f8bee00fec985c8c7afb02299e35e9c94b57287f3ea542f28bd0b0a7", size = 5100897, upload-time = "2026-03-13T13:52:38.102Z" }, + { url = "https://files.pythonhosted.org/packages/cc/a1/40a5c4d8e28b0851d53a8eeeb46fbd73c325a2a9a165f290a5ed90e6c597/fonttools-4.62.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1c5c25671ce8805e0d080e2ffdeca7f1e86778c5cbfbeae86d7f866d8830517b", size = 5071078, upload-time = "2026-03-13T13:52:41.305Z" }, + { url = "https://files.pythonhosted.org/packages/e3/be/d378fca4c65ea1956fee6d90ace6e861776809cbbc5af22388a090c3c092/fonttools-4.62.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a5d8825e1140f04e6c99bb7d37a9e31c172f3bc208afbe02175339e699c710e1", size = 5076908, upload-time = "2026-03-13T13:52:44.122Z" }, + { url = "https://files.pythonhosted.org/packages/f8/d9/ae6a1d0693a4185a84605679c8a1f719a55df87b9c6e8e817bfdd9ef5936/fonttools-4.62.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:268abb1cb221e66c014acc234e872b7870d8b5d4657a83a8f4205094c32d2416", size = 5202275, upload-time = "2026-03-13T13:52:46.591Z" }, + { url = "https://files.pythonhosted.org/packages/54/6c/af95d9c4efb15cabff22642b608342f2bd67137eea6107202d91b5b03184/fonttools-4.62.1-cp311-cp311-win32.whl", hash = "sha256:942b03094d7edbb99bdf1ae7e9090898cad7bf9030b3d21f33d7072dbcb51a53", size = 2293075, upload-time = "2026-03-13T13:52:48.711Z" }, + { url = "https://files.pythonhosted.org/packages/d3/97/bf54c5b3f2be34e1f143e6db838dfdc54f2ffa3e68c738934c82f3b2a08d/fonttools-4.62.1-cp311-cp311-win_amd64.whl", hash = "sha256:e8514f4924375f77084e81467e63238b095abda5107620f49421c368a6017ed2", size = 2344593, upload-time = "2026-03-13T13:52:50.725Z" }, + { url = "https://files.pythonhosted.org/packages/47/d4/dbacced3953544b9a93088cc10ef2b596d348c983d5c67a404fa41ec51ba/fonttools-4.62.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:90365821debbd7db678809c7491ca4acd1e0779b9624cdc6ddaf1f31992bf974", size = 2870219, upload-time = "2026-03-13T13:52:53.664Z" }, + { url = "https://files.pythonhosted.org/packages/66/9e/a769c8e99b81e5a87ab7e5e7236684de4e96246aae17274e5347d11ebd78/fonttools-4.62.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:12859ff0b47dd20f110804c3e0d0970f7b832f561630cd879969011541a464a9", size = 2414891, upload-time = "2026-03-13T13:52:56.493Z" }, + { url = "https://files.pythonhosted.org/packages/69/64/f19a9e3911968c37e1e620e14dfc5778299e1474f72f4e57c5ec771d9489/fonttools-4.62.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c125ffa00c3d9003cdaaf7f2c79e6e535628093e14b5de1dccb08859b680936", size = 5033197, upload-time = "2026-03-13T13:52:59.179Z" }, + { url = "https://files.pythonhosted.org/packages/9b/8a/99c8b3c3888c5c474c08dbfd7c8899786de9604b727fcefb055b42c84bba/fonttools-4.62.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:149f7d84afca659d1a97e39a4778794a2f83bf344c5ee5134e09995086cc2392", size = 4988768, upload-time = "2026-03-13T13:53:02.761Z" }, + { url = "https://files.pythonhosted.org/packages/d1/c6/0f904540d3e6ab463c1243a0d803504826a11604c72dd58c2949796a1762/fonttools-4.62.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0aa72c43a601cfa9273bb1ae0518f1acadc01ee181a6fc60cd758d7fdadffc04", size = 4971512, upload-time = "2026-03-13T13:53:05.678Z" }, + { url = "https://files.pythonhosted.org/packages/29/0b/5cbef6588dc9bd6b5c9ad6a4d5a8ca384d0cea089da31711bbeb4f9654a6/fonttools-4.62.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:19177c8d96c7c36359266e571c5173bcee9157b59cfc8cb0153c5673dc5a3a7d", size = 5122723, upload-time = "2026-03-13T13:53:08.662Z" }, + { url = "https://files.pythonhosted.org/packages/4a/47/b3a5342d381595ef439adec67848bed561ab7fdb1019fa522e82101b7d9c/fonttools-4.62.1-cp312-cp312-win32.whl", hash = "sha256:a24decd24d60744ee8b4679d38e88b8303d86772053afc29b19d23bb8207803c", size = 2281278, upload-time = "2026-03-13T13:53:10.998Z" }, + { url = "https://files.pythonhosted.org/packages/28/b1/0c2ab56a16f409c6c8a68816e6af707827ad5d629634691ff60a52879792/fonttools-4.62.1-cp312-cp312-win_amd64.whl", hash = "sha256:9e7863e10b3de72376280b515d35b14f5eeed639d1aa7824f4cf06779ec65e42", size = 2331414, upload-time = "2026-03-13T13:53:13.992Z" }, + { url = "https://files.pythonhosted.org/packages/fd/ba/56147c165442cc5ba7e82ecf301c9a68353cede498185869e6e02b4c264f/fonttools-4.62.1-py3-none-any.whl", hash = "sha256:7487782e2113861f4ddcc07c3436450659e3caa5e470b27dc2177cade2d8e7fd", size = 1152647, upload-time = "2026-03-13T13:54:22.735Z" }, ] [[package]] @@ -594,25 +592,13 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, ] -[[package]] -name = "humanfriendly" -version = "10.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pyreadline3", marker = "sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/cc/3f/2c29224acb2e2df4d2046e4c73ee2662023c58ff5b113c4c1adac0886c43/humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc", size = 360702, upload-time = "2021-09-17T21:40:43.31Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f0/0f/310fb31e39e2d734ccaa2c0fb981ee41f7bd5056ce9bc29b2248bd569169/humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477", size = 86794, upload-time = "2021-09-17T21:40:39.897Z" }, -] - [[package]] name = "identify" -version = "2.6.16" +version = "2.6.18" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5b/8d/e8b97e6bd3fb6fb271346f7981362f1e04d6a7463abd0de79e1fda17c067/identify-2.6.16.tar.gz", hash = "sha256:846857203b5511bbe94d5a352a48ef2359532bc8f6727b5544077a0dcfb24980", size = 99360, upload-time = "2026-01-12T18:58:58.201Z" } +sdist = { url = "https://files.pythonhosted.org/packages/46/c4/7fb4db12296cdb11893d61c92048fe617ee853f8523b9b296ac03b43757e/identify-2.6.18.tar.gz", hash = "sha256:873ac56a5e3fd63e7438a7ecbc4d91aca692eb3fefa4534db2b7913f3fc352fd", size = 99580, upload-time = "2026-03-15T18:39:50.319Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b8/58/40fbbcefeda82364720eba5cf2270f98496bdfa19ea75b4cccae79c698e6/identify-2.6.16-py2.py3-none-any.whl", hash = "sha256:391ee4d77741d994189522896270b787aed8670389bfd60f326d677d64a6dfb0", size = 99202, upload-time = "2026-01-12T18:58:56.627Z" }, + { url = "https://files.pythonhosted.org/packages/46/33/92ef41c6fad0233e41d3d84ba8e8ad18d1780f1e5d99b3c683e6d7f98b63/identify-2.6.18-py2.py3-none-any.whl", hash = "sha256:8db9d3c8ea9079db92cafb0ebf97abdc09d52e97f4dcf773a2e694048b7cd737", size = 99394, upload-time = "2026-03-15T18:39:48.915Z" }, ] [[package]] @@ -635,13 +621,14 @@ wheels = [ [[package]] name = "ipykernel" -version = "7.1.0" +version = "7.2.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "appnope", marker = "sys_platform == 'darwin'" }, { name = "comm" }, { name = "debugpy" }, - { name = "ipython" }, + { name = "ipython", version = "9.10.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.12'" }, + { name = "ipython", version = "9.12.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12'" }, { name = "jupyter-client" }, { name = "jupyter-core" }, { name = "matplotlib-inline" }, @@ -652,31 +639,68 @@ dependencies = [ { name = "tornado" }, { name = "traitlets" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b9/a4/4948be6eb88628505b83a1f2f40d90254cab66abf2043b3c40fa07dfce0f/ipykernel-7.1.0.tar.gz", hash = "sha256:58a3fc88533d5930c3546dc7eac66c6d288acde4f801e2001e65edc5dc9cf0db", size = 174579, upload-time = "2025-10-27T09:46:39.471Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ca/8d/b68b728e2d06b9e0051019640a40a9eb7a88fcd82c2e1b5ce70bef5ff044/ipykernel-7.2.0.tar.gz", hash = "sha256:18ed160b6dee2cbb16e5f3575858bc19d8f1fe6046a9a680c708494ce31d909e", size = 176046, upload-time = "2026-02-06T16:43:27.403Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a3/17/20c2552266728ceba271967b87919664ecc0e33efca29c3efc6baf88c5f9/ipykernel-7.1.0-py3-none-any.whl", hash = "sha256:763b5ec6c5b7776f6a8d7ce09b267693b4e5ce75cb50ae696aaefb3c85e1ea4c", size = 117968, upload-time = "2025-10-27T09:46:37.805Z" }, + { url = "https://files.pythonhosted.org/packages/82/b9/e73d5d9f405cba7706c539aa8b311b49d4c2f3d698d9c12f815231169c71/ipykernel-7.2.0-py3-none-any.whl", hash = "sha256:3bbd4420d2b3cc105cbdf3756bfc04500b1e52f090a90716851f3916c62e1661", size = 118788, upload-time = "2026-02-06T16:43:25.149Z" }, ] [[package]] name = "ipython" -version = "9.9.0" +version = "9.10.1" source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "colorama", marker = "sys_platform == 'win32'" }, - { name = "decorator" }, - { name = "ipython-pygments-lexers" }, - { name = "jedi" }, - { name = "matplotlib-inline" }, - { name = "pexpect", marker = "sys_platform != 'emscripten' and sys_platform != 'win32'" }, - { name = "prompt-toolkit" }, - { name = "pygments" }, - { name = "stack-data" }, - { name = "traitlets" }, +resolution-markers = [ + "python_full_version < '3.12' and platform_machine != 's390x' and sys_platform == 'win32'", + "python_full_version < '3.12' and platform_machine == 's390x' and sys_platform == 'win32'", + "python_full_version < '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten'", + "python_full_version < '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten'", + "python_full_version < '3.12' and platform_machine != 's390x' and sys_platform != 'emscripten' and sys_platform != 'win32'", + "python_full_version < '3.12' and platform_machine == 's390x' and sys_platform != 'emscripten' and sys_platform != 'win32'", +] +dependencies = [ + { name = "colorama", marker = "python_full_version < '3.12' and sys_platform == 'win32'" }, + { name = "decorator", marker = "python_full_version < '3.12'" }, + { name = "ipython-pygments-lexers", marker = "python_full_version < '3.12'" }, + { name = "jedi", marker = "python_full_version < '3.12'" }, + { name = "matplotlib-inline", marker = "python_full_version < '3.12'" }, + { name = "pexpect", marker = "python_full_version < '3.12' and sys_platform != 'emscripten' and sys_platform != 'win32'" }, + { name = "prompt-toolkit", marker = "python_full_version < '3.12'" }, + { name = "pygments", marker = "python_full_version < '3.12'" }, + { name = "stack-data", marker = "python_full_version < '3.12'" }, + { name = "traitlets", marker = "python_full_version < '3.12'" }, { name = "typing-extensions", marker = "python_full_version < '3.12'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/46/dd/fb08d22ec0c27e73c8bc8f71810709870d51cadaf27b7ddd3f011236c100/ipython-9.9.0.tar.gz", hash = "sha256:48fbed1b2de5e2c7177eefa144aba7fcb82dac514f09b57e2ac9da34ddb54220", size = 4425043, upload-time = "2026-01-05T12:36:46.233Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c5/25/daae0e764047b0a2480c7bbb25d48f4f509b5818636562eeac145d06dfee/ipython-9.10.1.tar.gz", hash = "sha256:e170e9b2a44312484415bdb750492699bf329233b03f2557a9692cce6466ada4", size = 4426663, upload-time = "2026-03-27T09:53:26.244Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/86/92/162cfaee4ccf370465c5af1ce36a9eacec1becb552f2033bb3584e6f640a/ipython-9.9.0-py3-none-any.whl", hash = "sha256:b457fe9165df2b84e8ec909a97abcf2ed88f565970efba16b1f7229c283d252b", size = 621431, upload-time = "2026-01-05T12:36:44.669Z" }, + { url = "https://files.pythonhosted.org/packages/01/09/ba70f8d662d5671687da55ad2cc0064cf795b15e1eea70907532202e7c97/ipython-9.10.1-py3-none-any.whl", hash = "sha256:82d18ae9fb9164ded080c71ef92a182ee35ee7db2395f67616034bebb020a232", size = 622827, upload-time = "2026-03-27T09:53:24.566Z" }, +] + +[[package]] +name = "ipython" +version = "9.12.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'emscripten' and sys_platform != 'win32'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'emscripten' and sys_platform != 'win32'", +] +dependencies = [ + { name = "colorama", marker = "python_full_version >= '3.12' and sys_platform == 'win32'" }, + { name = "decorator", marker = "python_full_version >= '3.12'" }, + { name = "ipython-pygments-lexers", marker = "python_full_version >= '3.12'" }, + { name = "jedi", marker = "python_full_version >= '3.12'" }, + { name = "matplotlib-inline", marker = "python_full_version >= '3.12'" }, + { name = "pexpect", marker = "python_full_version >= '3.12' and sys_platform != 'emscripten' and sys_platform != 'win32'" }, + { name = "prompt-toolkit", marker = "python_full_version >= '3.12'" }, + { name = "pygments", marker = "python_full_version >= '3.12'" }, + { name = "stack-data", marker = "python_full_version >= '3.12'" }, + { name = "traitlets", marker = "python_full_version >= '3.12'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3a/73/7114f80a8f9cabdb13c27732dce24af945b2923dcab80723602f7c8bc2d8/ipython-9.12.0.tar.gz", hash = "sha256:01daa83f504b693ba523b5a407246cabde4eb4513285a3c6acaff11a66735ee4", size = 4428879, upload-time = "2026-03-27T09:42:45.312Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/59/22/906c8108974c673ebef6356c506cebb6870d48cedea3c41e949e2dd556bb/ipython-9.12.0-py3-none-any.whl", hash = "sha256:0f2701e8ee86e117e37f50563205d36feaa259d2e08d4a6bc6b6d74b18ce128d", size = 625661, upload-time = "2026-03-27T09:42:42.831Z" }, ] [[package]] @@ -697,7 +721,8 @@ version = "8.1.8" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "comm" }, - { name = "ipython" }, + { name = "ipython", version = "9.10.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.12'" }, + { name = "ipython", version = "9.12.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12'" }, { name = "jupyterlab-widgets" }, { name = "traitlets" }, { name = "widgetsnbextension" }, @@ -745,29 +770,29 @@ wheels = [ [[package]] name = "jmespath" -version = "1.0.1" +version = "1.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/00/2a/e867e8531cf3e36b41201936b7fa7ba7b5702dbef42922193f05c8976cd6/jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe", size = 25843, upload-time = "2022-06-17T18:00:12.224Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d3/59/322338183ecda247fb5d1763a6cbe46eff7222eaeebafd9fa65d4bf5cb11/jmespath-1.1.0.tar.gz", hash = "sha256:472c87d80f36026ae83c6ddd0f1d05d4e510134ed462851fd5f754c8c3cbb88d", size = 27377, upload-time = "2026-01-22T16:35:26.279Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/31/b4/b9b800c45527aadd64d5b442f9b932b00648617eb5d63d2c7a6587b7cafc/jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", size = 20256, upload-time = "2022-06-17T18:00:10.251Z" }, + { url = "https://files.pythonhosted.org/packages/14/2f/967ba146e6d58cf6a652da73885f52fc68001525b4197effc174321d70b4/jmespath-1.1.0-py3-none-any.whl", hash = "sha256:a5663118de4908c91729bea0acadca56526eb2698e83de10cd116ae0f4e97c64", size = 20419, upload-time = "2026-01-22T16:35:24.919Z" }, ] [[package]] name = "json5" -version = "0.13.0" +version = "0.14.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/77/e8/a3f261a66e4663f22700bc8a17c08cb83e91fbf086726e7a228398968981/json5-0.13.0.tar.gz", hash = "sha256:b1edf8d487721c0bf64d83c28e91280781f6e21f4a797d3261c7c828d4c165bf", size = 52441, upload-time = "2026-01-01T19:42:14.99Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9c/4b/6f8906aaf67d501e259b0adab4d312945bb7211e8b8d4dcc77c92320edaa/json5-0.14.0.tar.gz", hash = "sha256:b3f492fad9f6cdbced8b7d40b28b9b1c9701c5f561bef0d33b81c2ff433fefcb", size = 52656, upload-time = "2026-03-27T22:50:48.108Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d7/9e/038522f50ceb7e74f1f991bf1b699f24b0c2bbe7c390dd36ad69f4582258/json5-0.13.0-py3-none-any.whl", hash = "sha256:9a08e1dd65f6a4d4c6fa82d216cf2477349ec2346a38fd70cc11d2557499fbcc", size = 36163, upload-time = "2026-01-01T19:42:13.962Z" }, + { url = "https://files.pythonhosted.org/packages/b8/42/cf027b4ac873b076189d935b135397675dac80cb29acb13e1ab86ad6c631/json5-0.14.0-py3-none-any.whl", hash = "sha256:56cf861bab076b1178eb8c92e1311d273a9b9acea2ccc82c276abf839ebaef3a", size = 36271, upload-time = "2026-03-27T22:50:47.073Z" }, ] [[package]] name = "jsonpointer" -version = "3.0.0" +version = "3.1.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6a/0a/eebeb1fa92507ea94016a2a790b93c2ae41a7e18778f85471dc54475ed25/jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef", size = 9114, upload-time = "2024-06-10T19:24:42.462Z" } +sdist = { url = "https://files.pythonhosted.org/packages/18/c7/af399a2e7a67fd18d63c40c5e62d3af4e67b836a2107468b6a5ea24c4304/jsonpointer-3.1.1.tar.gz", hash = "sha256:0b801c7db33a904024f6004d526dcc53bbb8a4a0f4e32bfd10beadf60adf1900", size = 9068, upload-time = "2026-03-23T22:32:32.458Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/71/92/5e77f98553e9e75130c78900d000368476aed74276eb8ae8796f65f00918/jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942", size = 7595, upload-time = "2024-06-10T19:24:40.698Z" }, + { url = "https://files.pythonhosted.org/packages/9e/6a/a83720e953b1682d2d109d3c2dbb0bc9bf28cc1cbc205be4ef4be5da709d/jsonpointer-3.1.1-py3-none-any.whl", hash = "sha256:8ff8b95779d071ba472cf5bc913028df06031797532f08a7d5b602d8b2a488ca", size = 7659, upload-time = "2026-03-23T22:32:31.568Z" }, ] [[package]] @@ -849,7 +874,8 @@ version = "6.6.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "ipykernel" }, - { name = "ipython" }, + { name = "ipython", version = "9.10.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.12'" }, + { name = "ipython", version = "9.12.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12'" }, { name = "jupyter-client" }, { name = "jupyter-core" }, { name = "prompt-toolkit" }, @@ -896,14 +922,14 @@ wheels = [ [[package]] name = "jupyter-lsp" -version = "2.3.0" +version = "2.3.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "jupyter-server" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/eb/5a/9066c9f8e94ee517133cd98dba393459a16cd48bba71a82f16a65415206c/jupyter_lsp-2.3.0.tar.gz", hash = "sha256:458aa59339dc868fb784d73364f17dbce8836e906cd75fd471a325cba02e0245", size = 54823, upload-time = "2025-08-27T17:47:34.671Z" } +sdist = { url = "https://files.pythonhosted.org/packages/36/ff/1e4a61f5170a9a1d978f3ac3872449de6c01fc71eaf89657824c878b1549/jupyter_lsp-2.3.1.tar.gz", hash = "sha256:fdf8a4aa7d85813976d6e29e95e6a2c8f752701f926f2715305249a3829805a6", size = 55677, upload-time = "2026-04-02T08:10:06.749Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1a/60/1f6cee0c46263de1173894f0fafcb3475ded276c472c14d25e0280c18d6d/jupyter_lsp-2.3.0-py3-none-any.whl", hash = "sha256:e914a3cb2addf48b1c7710914771aaf1819d46b2e5a79b0f917b5478ec93f34f", size = 76687, upload-time = "2025-08-27T17:47:33.15Z" }, + { url = "https://files.pythonhosted.org/packages/23/e8/9d61dcbd1dce8ef418f06befd4ac084b4720429c26b0b1222bc218685eff/jupyter_lsp-2.3.1-py3-none-any.whl", hash = "sha256:71b954d834e85ff3096400554f2eefaf7fe37053036f9a782b0f7c5e42dadb81", size = 77513, upload-time = "2026-04-02T08:10:01.753Z" }, ] [[package]] @@ -951,7 +977,7 @@ wheels = [ [[package]] name = "jupyterlab" -version = "4.5.2" +version = "4.5.6" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "async-lru" }, @@ -968,9 +994,9 @@ dependencies = [ { name = "tornado" }, { name = "traitlets" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/93/dc/2c8c4ff1aee27ac999ba04c373c5d0d7c6c181b391640d7b916b884d5985/jupyterlab-4.5.2.tar.gz", hash = "sha256:c80a6b9f6dace96a566d590c65ee2785f61e7cd4aac5b4d453dcc7d0d5e069b7", size = 23990371, upload-time = "2026-01-12T12:27:08.493Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ac/d5/730628e03fff2e8a8e8ccdaedde1489ab1309f9a4fa2536248884e30b7c7/jupyterlab-4.5.6.tar.gz", hash = "sha256:642fe2cfe7f0f5922a8a558ba7a0d246c7bc133b708dfe43f7b3a826d163cf42", size = 23970670, upload-time = "2026-03-11T14:17:04.531Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a4/78/7e455920f104ef2aa94a4c0d2b40e5b44334ee7057eae1aa1fb97b9631ad/jupyterlab-4.5.2-py3-none-any.whl", hash = "sha256:76466ebcfdb7a9bb7e2fbd6459c0e2c032ccf75be673634a84bee4b3e6b13ab6", size = 12385807, upload-time = "2026-01-12T12:27:03.923Z" }, + { url = "https://files.pythonhosted.org/packages/e1/1b/dad6fdcc658ed7af26fdf3841e7394072c9549a8b896c381ab49dd11e2d9/jupyterlab-4.5.6-py3-none-any.whl", hash = "sha256:d6b3dac883aa4d9993348e0f8e95b24624f75099aed64eab6a4351a9cdd1e580", size = 12447124, upload-time = "2026-03-11T14:17:00.229Z" }, ] [[package]] @@ -1011,41 +1037,49 @@ wheels = [ [[package]] name = "kiwisolver" -version = "1.4.9" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5c/3c/85844f1b0feb11ee581ac23fe5fce65cd049a200c1446708cc1b7f922875/kiwisolver-1.4.9.tar.gz", hash = "sha256:c3b22c26c6fd6811b0ae8363b95ca8ce4ea3c202d3d0975b2914310ceb1bcc4d", size = 97564, upload-time = "2025-08-10T21:27:49.279Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/6f/ab/c80b0d5a9d8a1a65f4f815f2afff9798b12c3b9f31f1d304dd233dd920e2/kiwisolver-1.4.9-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:eb14a5da6dc7642b0f3a18f13654847cd8b7a2550e2645a5bda677862b03ba16", size = 124167, upload-time = "2025-08-10T21:25:53.403Z" }, - { url = "https://files.pythonhosted.org/packages/a0/c0/27fe1a68a39cf62472a300e2879ffc13c0538546c359b86f149cc19f6ac3/kiwisolver-1.4.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:39a219e1c81ae3b103643d2aedb90f1ef22650deb266ff12a19e7773f3e5f089", size = 66579, upload-time = "2025-08-10T21:25:54.79Z" }, - { url = "https://files.pythonhosted.org/packages/31/a2/a12a503ac1fd4943c50f9822678e8015a790a13b5490354c68afb8489814/kiwisolver-1.4.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2405a7d98604b87f3fc28b1716783534b1b4b8510d8142adca34ee0bc3c87543", size = 65309, upload-time = "2025-08-10T21:25:55.76Z" }, - { url = "https://files.pythonhosted.org/packages/66/e1/e533435c0be77c3f64040d68d7a657771194a63c279f55573188161e81ca/kiwisolver-1.4.9-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:dc1ae486f9abcef254b5618dfb4113dd49f94c68e3e027d03cf0143f3f772b61", size = 1435596, upload-time = "2025-08-10T21:25:56.861Z" }, - { url = "https://files.pythonhosted.org/packages/67/1e/51b73c7347f9aabdc7215aa79e8b15299097dc2f8e67dee2b095faca9cb0/kiwisolver-1.4.9-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8a1f570ce4d62d718dce3f179ee78dac3b545ac16c0c04bb363b7607a949c0d1", size = 1246548, upload-time = "2025-08-10T21:25:58.246Z" }, - { url = "https://files.pythonhosted.org/packages/21/aa/72a1c5d1e430294f2d32adb9542719cfb441b5da368d09d268c7757af46c/kiwisolver-1.4.9-cp311-cp311-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:cb27e7b78d716c591e88e0a09a2139c6577865d7f2e152488c2cc6257f460872", size = 1263618, upload-time = "2025-08-10T21:25:59.857Z" }, - { url = "https://files.pythonhosted.org/packages/a3/af/db1509a9e79dbf4c260ce0cfa3903ea8945f6240e9e59d1e4deb731b1a40/kiwisolver-1.4.9-cp311-cp311-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:15163165efc2f627eb9687ea5f3a28137217d217ac4024893d753f46bce9de26", size = 1317437, upload-time = "2025-08-10T21:26:01.105Z" }, - { url = "https://files.pythonhosted.org/packages/e0/f2/3ea5ee5d52abacdd12013a94130436e19969fa183faa1e7c7fbc89e9a42f/kiwisolver-1.4.9-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bdee92c56a71d2b24c33a7d4c2856bd6419d017e08caa7802d2963870e315028", size = 2195742, upload-time = "2025-08-10T21:26:02.675Z" }, - { url = "https://files.pythonhosted.org/packages/6f/9b/1efdd3013c2d9a2566aa6a337e9923a00590c516add9a1e89a768a3eb2fc/kiwisolver-1.4.9-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:412f287c55a6f54b0650bd9b6dce5aceddb95864a1a90c87af16979d37c89771", size = 2290810, upload-time = "2025-08-10T21:26:04.009Z" }, - { url = "https://files.pythonhosted.org/packages/fb/e5/cfdc36109ae4e67361f9bc5b41323648cb24a01b9ade18784657e022e65f/kiwisolver-1.4.9-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2c93f00dcba2eea70af2be5f11a830a742fe6b579a1d4e00f47760ef13be247a", size = 2461579, upload-time = "2025-08-10T21:26:05.317Z" }, - { url = "https://files.pythonhosted.org/packages/62/86/b589e5e86c7610842213994cdea5add00960076bef4ae290c5fa68589cac/kiwisolver-1.4.9-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f117e1a089d9411663a3207ba874f31be9ac8eaa5b533787024dc07aeb74f464", size = 2268071, upload-time = "2025-08-10T21:26:06.686Z" }, - { url = "https://files.pythonhosted.org/packages/3b/c6/f8df8509fd1eee6c622febe54384a96cfaf4d43bf2ccec7a0cc17e4715c9/kiwisolver-1.4.9-cp311-cp311-win_amd64.whl", hash = "sha256:be6a04e6c79819c9a8c2373317d19a96048e5a3f90bec587787e86a1153883c2", size = 73840, upload-time = "2025-08-10T21:26:07.94Z" }, - { url = "https://files.pythonhosted.org/packages/e2/2d/16e0581daafd147bc11ac53f032a2b45eabac897f42a338d0a13c1e5c436/kiwisolver-1.4.9-cp311-cp311-win_arm64.whl", hash = "sha256:0ae37737256ba2de764ddc12aed4956460277f00c4996d51a197e72f62f5eec7", size = 65159, upload-time = "2025-08-10T21:26:09.048Z" }, - { url = "https://files.pythonhosted.org/packages/86/c9/13573a747838aeb1c76e3267620daa054f4152444d1f3d1a2324b78255b5/kiwisolver-1.4.9-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ac5a486ac389dddcc5bef4f365b6ae3ffff2c433324fb38dd35e3fab7c957999", size = 123686, upload-time = "2025-08-10T21:26:10.034Z" }, - { url = "https://files.pythonhosted.org/packages/51/ea/2ecf727927f103ffd1739271ca19c424d0e65ea473fbaeea1c014aea93f6/kiwisolver-1.4.9-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f2ba92255faa7309d06fe44c3a4a97efe1c8d640c2a79a5ef728b685762a6fd2", size = 66460, upload-time = "2025-08-10T21:26:11.083Z" }, - { url = "https://files.pythonhosted.org/packages/5b/5a/51f5464373ce2aeb5194508298a508b6f21d3867f499556263c64c621914/kiwisolver-1.4.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a2899935e724dd1074cb568ce7ac0dce28b2cd6ab539c8e001a8578eb106d14", size = 64952, upload-time = "2025-08-10T21:26:12.058Z" }, - { url = "https://files.pythonhosted.org/packages/70/90/6d240beb0f24b74371762873e9b7f499f1e02166a2d9c5801f4dbf8fa12e/kiwisolver-1.4.9-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f6008a4919fdbc0b0097089f67a1eb55d950ed7e90ce2cc3e640abadd2757a04", size = 1474756, upload-time = "2025-08-10T21:26:13.096Z" }, - { url = "https://files.pythonhosted.org/packages/12/42/f36816eaf465220f683fb711efdd1bbf7a7005a2473d0e4ed421389bd26c/kiwisolver-1.4.9-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:67bb8b474b4181770f926f7b7d2f8c0248cbcb78b660fdd41a47054b28d2a752", size = 1276404, upload-time = "2025-08-10T21:26:14.457Z" }, - { url = "https://files.pythonhosted.org/packages/2e/64/bc2de94800adc830c476dce44e9b40fd0809cddeef1fde9fcf0f73da301f/kiwisolver-1.4.9-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2327a4a30d3ee07d2fbe2e7933e8a37c591663b96ce42a00bc67461a87d7df77", size = 1294410, upload-time = "2025-08-10T21:26:15.73Z" }, - { url = "https://files.pythonhosted.org/packages/5f/42/2dc82330a70aa8e55b6d395b11018045e58d0bb00834502bf11509f79091/kiwisolver-1.4.9-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7a08b491ec91b1d5053ac177afe5290adacf1f0f6307d771ccac5de30592d198", size = 1343631, upload-time = "2025-08-10T21:26:17.045Z" }, - { url = "https://files.pythonhosted.org/packages/22/fd/f4c67a6ed1aab149ec5a8a401c323cee7a1cbe364381bb6c9c0d564e0e20/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d8fc5c867c22b828001b6a38d2eaeb88160bf5783c6cb4a5e440efc981ce286d", size = 2224963, upload-time = "2025-08-10T21:26:18.737Z" }, - { url = "https://files.pythonhosted.org/packages/45/aa/76720bd4cb3713314677d9ec94dcc21ced3f1baf4830adde5bb9b2430a5f/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:3b3115b2581ea35bb6d1f24a4c90af37e5d9b49dcff267eeed14c3893c5b86ab", size = 2321295, upload-time = "2025-08-10T21:26:20.11Z" }, - { url = "https://files.pythonhosted.org/packages/80/19/d3ec0d9ab711242f56ae0dc2fc5d70e298bb4a1f9dfab44c027668c673a1/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:858e4c22fb075920b96a291928cb7dea5644e94c0ee4fcd5af7e865655e4ccf2", size = 2487987, upload-time = "2025-08-10T21:26:21.49Z" }, - { url = "https://files.pythonhosted.org/packages/39/e9/61e4813b2c97e86b6fdbd4dd824bf72d28bcd8d4849b8084a357bc0dd64d/kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ed0fecd28cc62c54b262e3736f8bb2512d8dcfdc2bcf08be5f47f96bf405b145", size = 2291817, upload-time = "2025-08-10T21:26:22.812Z" }, - { url = "https://files.pythonhosted.org/packages/a0/41/85d82b0291db7504da3c2defe35c9a8a5c9803a730f297bd823d11d5fb77/kiwisolver-1.4.9-cp312-cp312-win_amd64.whl", hash = "sha256:f68208a520c3d86ea51acf688a3e3002615a7f0238002cccc17affecc86a8a54", size = 73895, upload-time = "2025-08-10T21:26:24.37Z" }, - { url = "https://files.pythonhosted.org/packages/e2/92/5f3068cf15ee5cb624a0c7596e67e2a0bb2adee33f71c379054a491d07da/kiwisolver-1.4.9-cp312-cp312-win_arm64.whl", hash = "sha256:2c1a4f57df73965f3f14df20b80ee29e6a7930a57d2d9e8491a25f676e197c60", size = 64992, upload-time = "2025-08-10T21:26:25.732Z" }, - { url = "https://files.pythonhosted.org/packages/a3/0f/36d89194b5a32c054ce93e586d4049b6c2c22887b0eb229c61c68afd3078/kiwisolver-1.4.9-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:720e05574713db64c356e86732c0f3c5252818d05f9df320f0ad8380641acea5", size = 60104, upload-time = "2025-08-10T21:27:43.287Z" }, - { url = "https://files.pythonhosted.org/packages/52/ba/4ed75f59e4658fd21fe7dde1fee0ac397c678ec3befba3fe6482d987af87/kiwisolver-1.4.9-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:17680d737d5335b552994a2008fab4c851bcd7de33094a82067ef3a576ff02fa", size = 58592, upload-time = "2025-08-10T21:27:44.314Z" }, - { url = "https://files.pythonhosted.org/packages/33/01/a8ea7c5ea32a9b45ceeaee051a04c8ed4320f5add3c51bfa20879b765b70/kiwisolver-1.4.9-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:85b5352f94e490c028926ea567fc569c52ec79ce131dadb968d3853e809518c2", size = 80281, upload-time = "2025-08-10T21:27:45.369Z" }, - { url = "https://files.pythonhosted.org/packages/da/e3/dbd2ecdce306f1d07a1aaf324817ee993aab7aee9db47ceac757deabafbe/kiwisolver-1.4.9-pp311-pypy311_pp73-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:464415881e4801295659462c49461a24fb107c140de781d55518c4b80cb6790f", size = 78009, upload-time = "2025-08-10T21:27:46.376Z" }, - { url = "https://files.pythonhosted.org/packages/da/e9/0d4add7873a73e462aeb45c036a2dead2562b825aa46ba326727b3f31016/kiwisolver-1.4.9-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:fb940820c63a9590d31d88b815e7a3aa5915cad3ce735ab45f0c730b39547de1", size = 73929, upload-time = "2025-08-10T21:27:48.236Z" }, +version = "1.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d0/67/9c61eccb13f0bdca9307614e782fec49ffdde0f7a2314935d489fa93cd9c/kiwisolver-1.5.0.tar.gz", hash = "sha256:d4193f3d9dc3f6f79aaed0e5637f45d98850ebf01f7ca20e69457f3e8946b66a", size = 103482, upload-time = "2026-03-09T13:15:53.382Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/dd/a495a9c104be1c476f0386e714252caf2b7eca883915422a64c50b88c6f5/kiwisolver-1.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9eed0f7edbb274413b6ee781cca50541c8c0facd3d6fd289779e494340a2b85c", size = 122798, upload-time = "2026-03-09T13:12:58.963Z" }, + { url = "https://files.pythonhosted.org/packages/11/60/37b4047a2af0cf5ef6d8b4b26e91829ae6fc6a2d1f74524bcb0e7cd28a32/kiwisolver-1.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c4923e404d6bcd91b6779c009542e5647fef32e4a5d75e115e3bbac6f2335eb", size = 66216, upload-time = "2026-03-09T13:13:00.155Z" }, + { url = "https://files.pythonhosted.org/packages/0a/aa/510dc933d87767584abfe03efa445889996c70c2990f6f87c3ebaa0a18c5/kiwisolver-1.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0df54df7e686afa55e6f21fb86195224a6d9beb71d637e8d7920c95cf0f89aac", size = 63911, upload-time = "2026-03-09T13:13:01.671Z" }, + { url = "https://files.pythonhosted.org/packages/80/46/bddc13df6c2a40741e0cc7865bb1c9ed4796b6760bd04ce5fae3928ef917/kiwisolver-1.5.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2517e24d7315eb51c10664cdb865195df38ab74456c677df67bb47f12d088a27", size = 1438209, upload-time = "2026-03-09T13:13:03.385Z" }, + { url = "https://files.pythonhosted.org/packages/fd/d6/76621246f5165e5372f02f5e6f3f48ea336a8f9e96e43997d45b240ed8cd/kiwisolver-1.5.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ff710414307fefa903e0d9bdf300972f892c23477829f49504e59834f4195398", size = 1248888, upload-time = "2026-03-09T13:13:05.231Z" }, + { url = "https://files.pythonhosted.org/packages/b2/c1/31559ec6fb39a5b48035ce29bb63ade628f321785f38c384dee3e2c08bc1/kiwisolver-1.5.0-cp311-cp311-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6176c1811d9d5a04fa391c490cc44f451e240697a16977f11c6f722efb9041db", size = 1266304, upload-time = "2026-03-09T13:13:06.743Z" }, + { url = "https://files.pythonhosted.org/packages/5e/ef/1cb8276f2d29cc6a41e0a042f27946ca347d3a4a75acf85d0a16aa6dcc82/kiwisolver-1.5.0-cp311-cp311-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:50847dca5d197fcbd389c805aa1a1cf32f25d2e7273dc47ab181a517666b68cc", size = 1319650, upload-time = "2026-03-09T13:13:08.607Z" }, + { url = "https://files.pythonhosted.org/packages/4c/e4/5ba3cecd7ce6236ae4a80f67e5d5531287337d0e1f076ca87a5abe4cd5d0/kiwisolver-1.5.0-cp311-cp311-manylinux_2_39_riscv64.whl", hash = "sha256:01808c6d15f4c3e8559595d6d1fe6411c68e4a3822b4b9972b44473b24f4e679", size = 970949, upload-time = "2026-03-09T13:13:10.299Z" }, + { url = "https://files.pythonhosted.org/packages/5a/69/dc61f7ae9a2f071f26004ced87f078235b5507ab6e5acd78f40365655034/kiwisolver-1.5.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f1f9f4121ec58628c96baa3de1a55a4e3a333c5102c8e94b64e23bf7b2083309", size = 2199125, upload-time = "2026-03-09T13:13:11.841Z" }, + { url = "https://files.pythonhosted.org/packages/e5/7b/abbe0f1b5afa85f8d084b73e90e5f801c0939eba16ac2e49af7c61a6c28d/kiwisolver-1.5.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:b7d335370ae48a780c6e6a6bbfa97342f563744c39c35562f3f367665f5c1de2", size = 2293783, upload-time = "2026-03-09T13:13:14.399Z" }, + { url = "https://files.pythonhosted.org/packages/8a/80/5908ae149d96d81580d604c7f8aefd0e98f4fd728cf172f477e9f2a81744/kiwisolver-1.5.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:800ee55980c18545af444d93fdd60c56b580db5cc54867d8cbf8a1dc0829938c", size = 1960726, upload-time = "2026-03-09T13:13:16.047Z" }, + { url = "https://files.pythonhosted.org/packages/84/08/a78cb776f8c085b7143142ce479859cfec086bd09ee638a317040b6ef420/kiwisolver-1.5.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:c438f6ca858697c9ab67eb28246c92508af972e114cac34e57a6d4ba17a3ac08", size = 2464738, upload-time = "2026-03-09T13:13:17.897Z" }, + { url = "https://files.pythonhosted.org/packages/b1/e1/65584da5356ed6cb12c63791a10b208860ac40a83de165cb6a6751a686e3/kiwisolver-1.5.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8c63c91f95173f9c2a67c7c526b2cea976828a0e7fced9cdcead2802dc10f8a4", size = 2270718, upload-time = "2026-03-09T13:13:19.421Z" }, + { url = "https://files.pythonhosted.org/packages/be/6c/28f17390b62b8f2f520e2915095b3c94d88681ecf0041e75389d9667f202/kiwisolver-1.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:beb7f344487cdcb9e1efe4b7a29681b74d34c08f0043a327a74da852a6749e7b", size = 73480, upload-time = "2026-03-09T13:13:20.818Z" }, + { url = "https://files.pythonhosted.org/packages/d8/0e/2ee5debc4f77a625778fec5501ff3e8036fe361b7ee28ae402a485bb9694/kiwisolver-1.5.0-cp311-cp311-win_arm64.whl", hash = "sha256:ad4ae4ffd1ee9cd11357b4c66b612da9888f4f4daf2f36995eda64bd45370cac", size = 64930, upload-time = "2026-03-09T13:13:21.997Z" }, + { url = "https://files.pythonhosted.org/packages/4d/b2/818b74ebea34dabe6d0c51cb1c572e046730e64844da6ed646d5298c40ce/kiwisolver-1.5.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:4e9750bc21b886308024f8a54ccb9a2cc38ac9fa813bf4348434e3d54f337ff9", size = 123158, upload-time = "2026-03-09T13:13:23.127Z" }, + { url = "https://files.pythonhosted.org/packages/bf/d9/405320f8077e8e1c5c4bd6adc45e1e6edf6d727b6da7f2e2533cf58bff71/kiwisolver-1.5.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:72ec46b7eba5b395e0a7b63025490d3214c11013f4aacb4f5e8d6c3041829588", size = 66388, upload-time = "2026-03-09T13:13:24.765Z" }, + { url = "https://files.pythonhosted.org/packages/99/9f/795fedf35634f746151ca8839d05681ceb6287fbed6cc1c9bf235f7887c2/kiwisolver-1.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ed3a984b31da7481b103f68776f7128a89ef26ed40f4dc41a2223cda7fb24819", size = 64068, upload-time = "2026-03-09T13:13:25.878Z" }, + { url = "https://files.pythonhosted.org/packages/c4/13/680c54afe3e65767bed7ec1a15571e1a2f1257128733851ade24abcefbcc/kiwisolver-1.5.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bb5136fb5352d3f422df33f0c879a1b0c204004324150cc3b5e3c4f310c9049f", size = 1477934, upload-time = "2026-03-09T13:13:27.166Z" }, + { url = "https://files.pythonhosted.org/packages/c8/2f/cebfcdb60fd6a9b0f6b47a9337198bcbad6fbe15e68189b7011fd914911f/kiwisolver-1.5.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b2af221f268f5af85e776a73d62b0845fc8baf8ef0abfae79d29c77d0e776aaf", size = 1278537, upload-time = "2026-03-09T13:13:28.707Z" }, + { url = "https://files.pythonhosted.org/packages/f2/0d/9b782923aada3fafb1d6b84e13121954515c669b18af0c26e7d21f579855/kiwisolver-1.5.0-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b0f172dc8ffaccb8522d7c5d899de00133f2f1ca7b0a49b7da98e901de87bf2d", size = 1296685, upload-time = "2026-03-09T13:13:30.528Z" }, + { url = "https://files.pythonhosted.org/packages/27/70/83241b6634b04fe44e892688d5208332bde130f38e610c0418f9ede47ded/kiwisolver-1.5.0-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6ab8ba9152203feec73758dad83af9a0bbe05001eb4639e547207c40cfb52083", size = 1346024, upload-time = "2026-03-09T13:13:32.818Z" }, + { url = "https://files.pythonhosted.org/packages/e4/db/30ed226fb271ae1a6431fc0fe0edffb2efe23cadb01e798caeb9f2ceae8f/kiwisolver-1.5.0-cp312-cp312-manylinux_2_39_riscv64.whl", hash = "sha256:cdee07c4d7f6d72008d3f73b9bf027f4e11550224c7c50d8df1ae4a37c1402a6", size = 987241, upload-time = "2026-03-09T13:13:34.435Z" }, + { url = "https://files.pythonhosted.org/packages/ec/bd/c314595208e4c9587652d50959ead9e461995389664e490f4dce7ff0f782/kiwisolver-1.5.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7c60d3c9b06fb23bd9c6139281ccbdc384297579ae037f08ae90c69f6845c0b1", size = 2227742, upload-time = "2026-03-09T13:13:36.4Z" }, + { url = "https://files.pythonhosted.org/packages/c1/43/0499cec932d935229b5543d073c2b87c9c22846aab48881e9d8d6e742a2d/kiwisolver-1.5.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:e315e5ec90d88e140f57696ff85b484ff68bb311e36f2c414aa4286293e6dee0", size = 2323966, upload-time = "2026-03-09T13:13:38.204Z" }, + { url = "https://files.pythonhosted.org/packages/3d/6f/79b0d760907965acfd9d61826a3d41f8f093c538f55cd2633d3f0db269f6/kiwisolver-1.5.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:1465387ac63576c3e125e5337a6892b9e99e0627d52317f3ca79e6930d889d15", size = 1977417, upload-time = "2026-03-09T13:13:39.966Z" }, + { url = "https://files.pythonhosted.org/packages/ab/31/01d0537c41cb75a551a438c3c7a80d0c60d60b81f694dac83dd436aec0d0/kiwisolver-1.5.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:530a3fd64c87cffa844d4b6b9768774763d9caa299e9b75d8eca6a4423b31314", size = 2491238, upload-time = "2026-03-09T13:13:41.698Z" }, + { url = "https://files.pythonhosted.org/packages/e4/34/8aefdd0be9cfd00a44509251ba864f5caf2991e36772e61c408007e7f417/kiwisolver-1.5.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1d9daea4ea6b9be74fe2f01f7fbade8d6ffab263e781274cffca0dba9be9eec9", size = 2294947, upload-time = "2026-03-09T13:13:43.343Z" }, + { url = "https://files.pythonhosted.org/packages/ad/cf/0348374369ca588f8fe9c338fae49fa4e16eeb10ffb3d012f23a54578a9e/kiwisolver-1.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:f18c2d9782259a6dc132fdc7a63c168cbc74b35284b6d75c673958982a378384", size = 73569, upload-time = "2026-03-09T13:13:45.792Z" }, + { url = "https://files.pythonhosted.org/packages/28/26/192b26196e2316e2bd29deef67e37cdf9870d9af8e085e521afff0fed526/kiwisolver-1.5.0-cp312-cp312-win_arm64.whl", hash = "sha256:f7c7553b13f69c1b29a5bde08ddc6d9d0c8bfb84f9ed01c30db25944aeb852a7", size = 64997, upload-time = "2026-03-09T13:13:46.878Z" }, + { url = "https://files.pythonhosted.org/packages/1c/fa/2910df836372d8761bb6eff7d8bdcb1613b5c2e03f260efe7abe34d388a7/kiwisolver-1.5.0-graalpy312-graalpy250_312_native-macosx_10_13_x86_64.whl", hash = "sha256:5ae8e62c147495b01a0f4765c878e9bfdf843412446a247e28df59936e99e797", size = 130262, upload-time = "2026-03-09T13:15:35.629Z" }, + { url = "https://files.pythonhosted.org/packages/0f/41/c5f71f9f00aabcc71fee8b7475e3f64747282580c2fe748961ba29b18385/kiwisolver-1.5.0-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:f6764a4ccab3078db14a632420930f6186058750df066b8ea2a7106df91d3203", size = 138036, upload-time = "2026-03-09T13:15:36.894Z" }, + { url = "https://files.pythonhosted.org/packages/fa/06/7399a607f434119c6e1fdc8ec89a8d51ccccadf3341dee4ead6bd14caaf5/kiwisolver-1.5.0-graalpy312-graalpy250_312_native-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c31c13da98624f957b0fb1b5bae5383b2333c2c3f6793d9825dd5ce79b525cb7", size = 194295, upload-time = "2026-03-09T13:15:38.22Z" }, + { url = "https://files.pythonhosted.org/packages/b5/91/53255615acd2a1eaca307ede3c90eb550bae9c94581f8c00081b6b1c8f44/kiwisolver-1.5.0-graalpy312-graalpy250_312_native-win_amd64.whl", hash = "sha256:1f1489f769582498610e015a8ef2d36f28f505ab3096d0e16b4858a9ec214f57", size = 75987, upload-time = "2026-03-09T13:15:39.65Z" }, + { url = "https://files.pythonhosted.org/packages/e9/eb/5fcbbbf9a0e2c3a35effb88831a483345326bbc3a030a3b5b69aee647f84/kiwisolver-1.5.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:ec4c85dc4b687c7f7f15f553ff26a98bfe8c58f5f7f0ac8905f0ba4c7be60232", size = 59532, upload-time = "2026-03-09T13:15:47.047Z" }, + { url = "https://files.pythonhosted.org/packages/c3/9b/e17104555bb4db148fd52327feea1e96be4b88e8e008b029002c281a21ab/kiwisolver-1.5.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:12e91c215a96e39f57989c8912ae761286ac5a9584d04030ceb3368a357f017a", size = 57420, upload-time = "2026-03-09T13:15:48.199Z" }, + { url = "https://files.pythonhosted.org/packages/48/44/2b5b95b7aa39fb2d8d9d956e0f3d5d45aef2ae1d942d4c3ffac2f9cfed1a/kiwisolver-1.5.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:be4a51a55833dc29ab5d7503e7bcb3b3af3402d266018137127450005cdfe737", size = 79892, upload-time = "2026-03-09T13:15:49.694Z" }, + { url = "https://files.pythonhosted.org/packages/52/7d/7157f9bba6b455cfb4632ed411e199fc8b8977642c2b12082e1bd9e6d173/kiwisolver-1.5.0-pp311-pypy311_pp73-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:daae526907e262de627d8f70058a0f64acc9e2641c164c99c8f594b34a799a16", size = 77603, upload-time = "2026-03-09T13:15:50.945Z" }, + { url = "https://files.pythonhosted.org/packages/0a/dd/8050c947d435c8d4bc94e3252f4d8bb8a76cfb424f043a8680be637a57f1/kiwisolver-1.5.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:59cd8683f575d96df5bb48f6add94afc055012c29e28124fcae2b63661b9efb1", size = 73558, upload-time = "2026-03-09T13:15:52.112Z" }, ] [[package]] @@ -1189,7 +1223,7 @@ wheels = [ [[package]] name = "mizani" -version = "0.14.3" +version = "0.14.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy" }, @@ -1197,9 +1231,9 @@ dependencies = [ { name = "scipy" }, { name = "tzdata", marker = "sys_platform == 'emscripten' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6d/19/98f2bd61e5441b687e0a5d3b36041981cc032451f2d11472021b040d27fd/mizani-0.14.3.tar.gz", hash = "sha256:c2fb886b3c9e8109be5b8fd21e1130fba1f0a20230a987146240221209fc0ddd", size = 772470, upload-time = "2025-10-30T20:16:53.268Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a7/02/43fcf763c70e8aa8edc28ac65713daca2c18d3bc2b998af4647966b5bafb/mizani-0.14.4.tar.gz", hash = "sha256:28934d91516d922d7cb0382c82a6c513692abc0174c42a50294ae571520633f9", size = 772490, upload-time = "2026-01-28T14:42:18.108Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5c/d2/4ffcaa27c8a4b4f9ad456da4821c76dfbdfada23e8210cd4d80e1eb3236a/mizani-0.14.3-py3-none-any.whl", hash = "sha256:6d2ca9b1b8366ff85668f0cc1b6095f1e702e26e66f132c4f02a949efa32a688", size = 133433, upload-time = "2025-10-30T20:16:51.218Z" }, + { url = "https://files.pythonhosted.org/packages/bd/30/b6617c74a8234ff60265373ef730eb6378ccdda74042f51f9ac936191664/mizani-0.14.4-py3-none-any.whl", hash = "sha256:ed72bf249e2a18b5dcc65cd54c7eaa5444b2cb09c7e18aafa2ab6f05f1b78620", size = 133471, upload-time = "2026-01-28T14:42:16.328Z" }, ] [[package]] @@ -1275,7 +1309,7 @@ wheels = [ [[package]] name = "nbconvert" -version = "7.16.6" +version = "7.17.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "beautifulsoup4" }, @@ -1293,9 +1327,9 @@ dependencies = [ { name = "pygments" }, { name = "traitlets" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a3/59/f28e15fc47ffb73af68a8d9b47367a8630d76e97ae85ad18271b9db96fdf/nbconvert-7.16.6.tar.gz", hash = "sha256:576a7e37c6480da7b8465eefa66c17844243816ce1ccc372633c6b71c3c0f582", size = 857715, upload-time = "2025-01-28T09:29:14.724Z" } +sdist = { url = "https://files.pythonhosted.org/packages/01/b1/708e53fe2e429c103c6e6e159106bcf0357ac41aa4c28772bd8402339051/nbconvert-7.17.1.tar.gz", hash = "sha256:34d0d0a7e73ce3cbab6c5aae8f4f468797280b01fd8bd2ca746da8569eddd7d2", size = 865311, upload-time = "2026-04-08T00:44:14.914Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cc/9a/cd673b2f773a12c992f41309ef81b99da1690426bd2f96957a7ade0d3ed7/nbconvert-7.16.6-py3-none-any.whl", hash = "sha256:1375a7b67e0c2883678c48e506dc320febb57685e5ee67faa51b18a90f3a712b", size = 258525, upload-time = "2025-01-28T09:29:12.551Z" }, + { url = "https://files.pythonhosted.org/packages/67/f8/bb0a9d5f46819c821dc1f004aa2cc29b1d91453297dbf5ff20470f00f193/nbconvert-7.17.1-py3-none-any.whl", hash = "sha256:aa85c087b435e7bf1ffd03319f658e285f2b89eccab33bc1ba7025495ab3e7c8", size = 261927, upload-time = "2026-04-08T00:44:12.845Z" }, ] [[package]] @@ -1333,7 +1367,7 @@ wheels = [ [[package]] name = "notebook" -version = "7.5.2" +version = "7.5.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "jupyter-server" }, @@ -1342,9 +1376,9 @@ dependencies = [ { name = "notebook-shim" }, { name = "tornado" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3b/b6/6b2c653570b02e4ec2a94c0646a4a25132be0749617776d0b72a2bcedb9b/notebook-7.5.2.tar.gz", hash = "sha256:83e82f93c199ca730313bea1bb24bc279ea96f74816d038a92d26b6b9d5f3e4a", size = 14059605, upload-time = "2026-01-12T14:56:53.483Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1f/6d/41052c48d6f6349ca0a7c4d1f6a78464de135e6d18f5829ba2510e62184c/notebook-7.5.5.tar.gz", hash = "sha256:dc0bfab0f2372c8278c457423d3256c34154ac2cc76bf20e9925260c461013c3", size = 14169167, upload-time = "2026-03-11T16:32:51.922Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/32/55/b754cd51c6011d90ef03e3f06136f1ebd44658b9529dbcf0c15fc0d6a0b7/notebook-7.5.2-py3-none-any.whl", hash = "sha256:17d078a98603d70d62b6b4b3fcb67e87d7a68c398a7ae9b447eb2d7d9aec9979", size = 14468915, upload-time = "2026-01-12T14:56:47.87Z" }, + { url = "https://files.pythonhosted.org/packages/f8/aa/cbd1deb9f07446241e88f8d5fecccd95b249bca0b4e5482214a4d1714c49/notebook-7.5.5-py3-none-any.whl", hash = "sha256:a7c14dbeefa6592e87f72290ca982e0c10f5bbf3786be2a600fda9da2764a2b8", size = 14578929, upload-time = "2026-03-11T16:32:48.021Z" }, ] [[package]] @@ -1361,44 +1395,44 @@ wheels = [ [[package]] name = "numpy" -version = "2.4.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/24/62/ae72ff66c0f1fd959925b4c11f8c2dea61f47f6acaea75a08512cdfe3fed/numpy-2.4.1.tar.gz", hash = "sha256:a1ceafc5042451a858231588a104093474c6a5c57dcc724841f5c888d237d690", size = 20721320, upload-time = "2026-01-10T06:44:59.619Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a5/34/2b1bc18424f3ad9af577f6ce23600319968a70575bd7db31ce66731bbef9/numpy-2.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0cce2a669e3c8ba02ee563c7835f92c153cf02edff1ae05e1823f1dde21b16a5", size = 16944563, upload-time = "2026-01-10T06:42:14.615Z" }, - { url = "https://files.pythonhosted.org/packages/2c/57/26e5f97d075aef3794045a6ca9eada6a4ed70eb9a40e7a4a93f9ac80d704/numpy-2.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:899d2c18024984814ac7e83f8f49d8e8180e2fbe1b2e252f2e7f1d06bea92425", size = 12645658, upload-time = "2026-01-10T06:42:17.298Z" }, - { url = "https://files.pythonhosted.org/packages/8e/ba/80fc0b1e3cb2fd5c6143f00f42eb67762aa043eaa05ca924ecc3222a7849/numpy-2.4.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:09aa8a87e45b55a1c2c205d42e2808849ece5c484b2aab11fecabec3841cafba", size = 5474132, upload-time = "2026-01-10T06:42:19.637Z" }, - { url = "https://files.pythonhosted.org/packages/40/ae/0a5b9a397f0e865ec171187c78d9b57e5588afc439a04ba9cab1ebb2c945/numpy-2.4.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:edee228f76ee2dab4579fad6f51f6a305de09d444280109e0f75df247ff21501", size = 6804159, upload-time = "2026-01-10T06:42:21.44Z" }, - { url = "https://files.pythonhosted.org/packages/86/9c/841c15e691c7085caa6fd162f063eff494099c8327aeccd509d1ab1e36ab/numpy-2.4.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a92f227dbcdc9e4c3e193add1a189a9909947d4f8504c576f4a732fd0b54240a", size = 14708058, upload-time = "2026-01-10T06:42:23.546Z" }, - { url = "https://files.pythonhosted.org/packages/5d/9d/7862db06743f489e6a502a3b93136d73aea27d97b2cf91504f70a27501d6/numpy-2.4.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:538bf4ec353709c765ff75ae616c34d3c3dca1a68312727e8f2676ea644f8509", size = 16651501, upload-time = "2026-01-10T06:42:25.909Z" }, - { url = "https://files.pythonhosted.org/packages/a6/9c/6fc34ebcbd4015c6e5f0c0ce38264010ce8a546cb6beacb457b84a75dfc8/numpy-2.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ac08c63cb7779b85e9d5318e6c3518b424bc1f364ac4cb2c6136f12e5ff2dccc", size = 16492627, upload-time = "2026-01-10T06:42:28.938Z" }, - { url = "https://files.pythonhosted.org/packages/aa/63/2494a8597502dacda439f61b3c0db4da59928150e62be0e99395c3ad23c5/numpy-2.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4f9c360ecef085e5841c539a9a12b883dff005fbd7ce46722f5e9cef52634d82", size = 18585052, upload-time = "2026-01-10T06:42:31.312Z" }, - { url = "https://files.pythonhosted.org/packages/6a/93/098e1162ae7522fc9b618d6272b77404c4656c72432ecee3abc029aa3de0/numpy-2.4.1-cp311-cp311-win32.whl", hash = "sha256:0f118ce6b972080ba0758c6087c3617b5ba243d806268623dc34216d69099ba0", size = 6236575, upload-time = "2026-01-10T06:42:33.872Z" }, - { url = "https://files.pythonhosted.org/packages/8c/de/f5e79650d23d9e12f38a7bc6b03ea0835b9575494f8ec94c11c6e773b1b1/numpy-2.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:18e14c4d09d55eef39a6ab5b08406e84bc6869c1e34eef45564804f90b7e0574", size = 12604479, upload-time = "2026-01-10T06:42:35.778Z" }, - { url = "https://files.pythonhosted.org/packages/dd/65/e1097a7047cff12ce3369bd003811516b20ba1078dbdec135e1cd7c16c56/numpy-2.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:6461de5113088b399d655d45c3897fa188766415d0f568f175ab071c8873bd73", size = 10578325, upload-time = "2026-01-10T06:42:38.518Z" }, - { url = "https://files.pythonhosted.org/packages/78/7f/ec53e32bf10c813604edf07a3682616bd931d026fcde7b6d13195dfb684a/numpy-2.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d3703409aac693fa82c0aee023a1ae06a6e9d065dba10f5e8e80f642f1e9d0a2", size = 16656888, upload-time = "2026-01-10T06:42:40.913Z" }, - { url = "https://files.pythonhosted.org/packages/b8/e0/1f9585d7dae8f14864e948fd7fa86c6cb72dee2676ca2748e63b1c5acfe0/numpy-2.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7211b95ca365519d3596a1d8688a95874cc94219d417504d9ecb2df99fa7bfa8", size = 12373956, upload-time = "2026-01-10T06:42:43.091Z" }, - { url = "https://files.pythonhosted.org/packages/8e/43/9762e88909ff2326f5e7536fa8cb3c49fb03a7d92705f23e6e7f553d9cb3/numpy-2.4.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:5adf01965456a664fc727ed69cc71848f28d063217c63e1a0e200a118d5eec9a", size = 5202567, upload-time = "2026-01-10T06:42:45.107Z" }, - { url = "https://files.pythonhosted.org/packages/4b/ee/34b7930eb61e79feb4478800a4b95b46566969d837546aa7c034c742ef98/numpy-2.4.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:26f0bcd9c79a00e339565b303badc74d3ea2bd6d52191eeca5f95936cad107d0", size = 6549459, upload-time = "2026-01-10T06:42:48.152Z" }, - { url = "https://files.pythonhosted.org/packages/79/e3/5f115fae982565771be994867c89bcd8d7208dbfe9469185497d70de5ddf/numpy-2.4.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0093e85df2960d7e4049664b26afc58b03236e967fb942354deef3208857a04c", size = 14404859, upload-time = "2026-01-10T06:42:49.947Z" }, - { url = "https://files.pythonhosted.org/packages/d9/7d/9c8a781c88933725445a859cac5d01b5871588a15969ee6aeb618ba99eee/numpy-2.4.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7ad270f438cbdd402c364980317fb6b117d9ec5e226fff5b4148dd9aa9fc6e02", size = 16371419, upload-time = "2026-01-10T06:42:52.409Z" }, - { url = "https://files.pythonhosted.org/packages/a6/d2/8aa084818554543f17cf4162c42f162acbd3bb42688aefdba6628a859f77/numpy-2.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:297c72b1b98100c2e8f873d5d35fb551fce7040ade83d67dd51d38c8d42a2162", size = 16182131, upload-time = "2026-01-10T06:42:54.694Z" }, - { url = "https://files.pythonhosted.org/packages/60/db/0425216684297c58a8df35f3284ef56ec4a043e6d283f8a59c53562caf1b/numpy-2.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:cf6470d91d34bf669f61d515499859fa7a4c2f7c36434afb70e82df7217933f9", size = 18295342, upload-time = "2026-01-10T06:42:56.991Z" }, - { url = "https://files.pythonhosted.org/packages/31/4c/14cb9d86240bd8c386c881bafbe43f001284b7cce3bc01623ac9475da163/numpy-2.4.1-cp312-cp312-win32.whl", hash = "sha256:b6bcf39112e956594b3331316d90c90c90fb961e39696bda97b89462f5f3943f", size = 5959015, upload-time = "2026-01-10T06:42:59.631Z" }, - { url = "https://files.pythonhosted.org/packages/51/cf/52a703dbeb0c65807540d29699fef5fda073434ff61846a564d5c296420f/numpy-2.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:e1a27bb1b2dee45a2a53f5ca6ff2d1a7f135287883a1689e930d44d1ff296c87", size = 12310730, upload-time = "2026-01-10T06:43:01.627Z" }, - { url = "https://files.pythonhosted.org/packages/69/80/a828b2d0ade5e74a9fe0f4e0a17c30fdc26232ad2bc8c9f8b3197cf7cf18/numpy-2.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:0e6e8f9d9ecf95399982019c01223dc130542960a12edfa8edd1122dfa66a8a8", size = 10312166, upload-time = "2026-01-10T06:43:03.673Z" }, - { url = "https://files.pythonhosted.org/packages/1e/48/d86f97919e79314a1cdee4c832178763e6e98e623e123d0bada19e92c15a/numpy-2.4.1-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:8ad35f20be147a204e28b6a0575fbf3540c5e5f802634d4258d55b1ff5facce1", size = 16822202, upload-time = "2026-01-10T06:44:43.738Z" }, - { url = "https://files.pythonhosted.org/packages/51/e9/1e62a7f77e0f37dcfb0ad6a9744e65df00242b6ea37dfafb55debcbf5b55/numpy-2.4.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:8097529164c0f3e32bb89412a0905d9100bf434d9692d9fc275e18dcf53c9344", size = 12569985, upload-time = "2026-01-10T06:44:45.945Z" }, - { url = "https://files.pythonhosted.org/packages/c7/7e/914d54f0c801342306fdcdce3e994a56476f1b818c46c47fc21ae968088c/numpy-2.4.1-pp311-pypy311_pp73-macosx_14_0_arm64.whl", hash = "sha256:ea66d2b41ca4a1630aae5507ee0a71647d3124d1741980138aa8f28f44dac36e", size = 5398484, upload-time = "2026-01-10T06:44:48.012Z" }, - { url = "https://files.pythonhosted.org/packages/1c/d8/9570b68584e293a33474e7b5a77ca404f1dcc655e40050a600dee81d27fb/numpy-2.4.1-pp311-pypy311_pp73-macosx_14_0_x86_64.whl", hash = "sha256:d3f8f0df9f4b8be57b3bf74a1d087fec68f927a2fab68231fdb442bf2c12e426", size = 6713216, upload-time = "2026-01-10T06:44:49.725Z" }, - { url = "https://files.pythonhosted.org/packages/33/9b/9dd6e2db8d49eb24f86acaaa5258e5f4c8ed38209a4ee9de2d1a0ca25045/numpy-2.4.1-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2023ef86243690c2791fd6353e5b4848eedaa88ca8a2d129f462049f6d484696", size = 14538937, upload-time = "2026-01-10T06:44:51.498Z" }, - { url = "https://files.pythonhosted.org/packages/53/87/d5bd995b0f798a37105b876350d346eea5838bd8f77ea3d7a48392f3812b/numpy-2.4.1-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8361ea4220d763e54cff2fbe7d8c93526b744f7cd9ddab47afeff7e14e8503be", size = 16479830, upload-time = "2026-01-10T06:44:53.931Z" }, - { url = "https://files.pythonhosted.org/packages/5b/c7/b801bf98514b6ae6475e941ac05c58e6411dd863ea92916bfd6d510b08c1/numpy-2.4.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:4f1b68ff47680c2925f8063402a693ede215f0257f02596b1318ecdfb1d79e33", size = 12492579, upload-time = "2026-01-10T06:44:57.094Z" }, +version = "2.4.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d7/9f/b8cef5bffa569759033adda9481211426f12f53299629b410340795c2514/numpy-2.4.4.tar.gz", hash = "sha256:2d390634c5182175533585cc89f3608a4682ccb173cc9bb940b2881c8d6f8fa0", size = 20731587, upload-time = "2026-03-29T13:22:01.298Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/c6/4218570d8c8ecc9704b5157a3348e486e84ef4be0ed3e38218ab473c83d2/numpy-2.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f983334aea213c99992053ede6168500e5f086ce74fbc4acc3f2b00f5762e9db", size = 16976799, upload-time = "2026-03-29T13:18:15.438Z" }, + { url = "https://files.pythonhosted.org/packages/dd/92/b4d922c4a5f5dab9ed44e6153908a5c665b71acf183a83b93b690996e39b/numpy-2.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:72944b19f2324114e9dc86a159787333b77874143efcf89a5167ef83cfee8af0", size = 14971552, upload-time = "2026-03-29T13:18:18.606Z" }, + { url = "https://files.pythonhosted.org/packages/8a/dc/df98c095978fa6ee7b9a9387d1d58cbb3d232d0e69ad169a4ce784bde4fd/numpy-2.4.4-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:86b6f55f5a352b48d7fbfd2dbc3d5b780b2d79f4d3c121f33eb6efb22e9a2015", size = 5476566, upload-time = "2026-03-29T13:18:21.532Z" }, + { url = "https://files.pythonhosted.org/packages/28/34/b3fdcec6e725409223dd27356bdf5a3c2cc2282e428218ecc9cb7acc9763/numpy-2.4.4-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:ba1f4fc670ed79f876f70082eff4f9583c15fb9a4b89d6188412de4d18ae2f40", size = 6806482, upload-time = "2026-03-29T13:18:23.634Z" }, + { url = "https://files.pythonhosted.org/packages/68/62/63417c13aa35d57bee1337c67446761dc25ea6543130cf868eace6e8157b/numpy-2.4.4-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8a87ec22c87be071b6bdbd27920b129b94f2fc964358ce38f3822635a3e2e03d", size = 15973376, upload-time = "2026-03-29T13:18:26.677Z" }, + { url = "https://files.pythonhosted.org/packages/cf/c5/9fcb7e0e69cef59cf10c746b84f7d58b08bc66a6b7d459783c5a4f6101a6/numpy-2.4.4-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:df3775294accfdd75f32c74ae39fcba920c9a378a2fc18a12b6820aa8c1fb502", size = 16925137, upload-time = "2026-03-29T13:18:30.14Z" }, + { url = "https://files.pythonhosted.org/packages/7e/43/80020edacb3f84b9efdd1591120a4296462c23fd8db0dde1666f6ef66f13/numpy-2.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0d4e437e295f18ec29bc79daf55e8a47a9113df44d66f702f02a293d93a2d6dd", size = 17329414, upload-time = "2026-03-29T13:18:33.733Z" }, + { url = "https://files.pythonhosted.org/packages/fd/06/af0658593b18a5f73532d377188b964f239eb0894e664a6c12f484472f97/numpy-2.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6aa3236c78803afbcb255045fbef97a9e25a1f6c9888357d205ddc42f4d6eba5", size = 18658397, upload-time = "2026-03-29T13:18:37.511Z" }, + { url = "https://files.pythonhosted.org/packages/e6/ce/13a09ed65f5d0ce5c7dd0669250374c6e379910f97af2c08c57b0608eee4/numpy-2.4.4-cp311-cp311-win32.whl", hash = "sha256:30caa73029a225b2d40d9fae193e008e24b2026b7ee1a867b7ee8d96ca1a448e", size = 6239499, upload-time = "2026-03-29T13:18:40.372Z" }, + { url = "https://files.pythonhosted.org/packages/bd/63/05d193dbb4b5eec1eca73822d80da98b511f8328ad4ae3ca4caf0f4db91d/numpy-2.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:6bbe4eb67390b0a0265a2c25458f6b90a409d5d069f1041e6aff1e27e3d9a79e", size = 12614257, upload-time = "2026-03-29T13:18:42.95Z" }, + { url = "https://files.pythonhosted.org/packages/87/c5/8168052f080c26fa984c413305012be54741c9d0d74abd7fbeeccae3889f/numpy-2.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:fcfe2045fd2e8f3cb0ce9d4ba6dba6333b8fa05bb8a4939c908cd43322d14c7e", size = 10486775, upload-time = "2026-03-29T13:18:45.835Z" }, + { url = "https://files.pythonhosted.org/packages/28/05/32396bec30fb2263770ee910142f49c1476d08e8ad41abf8403806b520ce/numpy-2.4.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:15716cfef24d3a9762e3acdf87e27f58dc823d1348f765bbea6bef8c639bfa1b", size = 16689272, upload-time = "2026-03-29T13:18:49.223Z" }, + { url = "https://files.pythonhosted.org/packages/c5/f3/a983d28637bfcd763a9c7aafdb6d5c0ebf3d487d1e1459ffdb57e2f01117/numpy-2.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23cbfd4c17357c81021f21540da84ee282b9c8fba38a03b7b9d09ba6b951421e", size = 14699573, upload-time = "2026-03-29T13:18:52.629Z" }, + { url = "https://files.pythonhosted.org/packages/9b/fd/e5ecca1e78c05106d98028114f5c00d3eddb41207686b2b7de3e477b0e22/numpy-2.4.4-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:8b3b60bb7cba2c8c81837661c488637eee696f59a877788a396d33150c35d842", size = 5204782, upload-time = "2026-03-29T13:18:55.579Z" }, + { url = "https://files.pythonhosted.org/packages/de/2f/702a4594413c1a8632092beae8aba00f1d67947389369b3777aed783fdca/numpy-2.4.4-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:e4a010c27ff6f210ff4c6ef34394cd61470d01014439b192ec22552ee867f2a8", size = 6552038, upload-time = "2026-03-29T13:18:57.769Z" }, + { url = "https://files.pythonhosted.org/packages/7f/37/eed308a8f56cba4d1fdf467a4fc67ef4ff4bf1c888f5fc980481890104b1/numpy-2.4.4-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f9e75681b59ddaa5e659898085ae0eaea229d054f2ac0c7e563a62205a700121", size = 15670666, upload-time = "2026-03-29T13:19:00.341Z" }, + { url = "https://files.pythonhosted.org/packages/0a/0d/0e3ecece05b7a7e87ab9fb587855548da437a061326fff64a223b6dcb78a/numpy-2.4.4-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:81f4a14bee47aec54f883e0cad2d73986640c1590eb9bfaaba7ad17394481e6e", size = 16645480, upload-time = "2026-03-29T13:19:03.63Z" }, + { url = "https://files.pythonhosted.org/packages/34/49/f2312c154b82a286758ee2f1743336d50651f8b5195db18cdb63675ff649/numpy-2.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:62d6b0f03b694173f9fcb1fb317f7222fd0b0b103e784c6549f5e53a27718c44", size = 17020036, upload-time = "2026-03-29T13:19:07.428Z" }, + { url = "https://files.pythonhosted.org/packages/7b/e9/736d17bd77f1b0ec4f9901aaec129c00d59f5d84d5e79bba540ef12c2330/numpy-2.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fbc356aae7adf9e6336d336b9c8111d390a05df88f1805573ebb0807bd06fd1d", size = 18368643, upload-time = "2026-03-29T13:19:10.775Z" }, + { url = "https://files.pythonhosted.org/packages/63/f6/d417977c5f519b17c8a5c3bc9e8304b0908b0e21136fe43bf628a1343914/numpy-2.4.4-cp312-cp312-win32.whl", hash = "sha256:0d35aea54ad1d420c812bfa0385c71cd7cc5bcf7c65fed95fc2cd02fe8c79827", size = 5961117, upload-time = "2026-03-29T13:19:13.464Z" }, + { url = "https://files.pythonhosted.org/packages/2d/5b/e1deebf88ff431b01b7406ca3583ab2bbb90972bbe1c568732e49c844f7e/numpy-2.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:b5f0362dc928a6ecd9db58868fca5e48485205e3855957bdedea308f8672ea4a", size = 12320584, upload-time = "2026-03-29T13:19:16.155Z" }, + { url = "https://files.pythonhosted.org/packages/58/89/e4e856ac82a68c3ed64486a544977d0e7bdd18b8da75b78a577ca31c4395/numpy-2.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:846300f379b5b12cc769334464656bc882e0735d27d9726568bc932fdc49d5ec", size = 10221450, upload-time = "2026-03-29T13:19:18.994Z" }, + { url = "https://files.pythonhosted.org/packages/6b/33/8fae8f964a4f63ed528264ddf25d2b683d0b663e3cba26961eb838a7c1bd/numpy-2.4.4-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:58c8b5929fcb8287cbd6f0a3fae19c6e03a5c48402ae792962ac465224a629a4", size = 16854491, upload-time = "2026-03-29T13:21:38.03Z" }, + { url = "https://files.pythonhosted.org/packages/bc/d0/1aabee441380b981cf8cdda3ae7a46aa827d1b5a8cce84d14598bc94d6d9/numpy-2.4.4-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:eea7ac5d2dce4189771cedb559c738a71512768210dc4e4753b107a2048b3d0e", size = 14895830, upload-time = "2026-03-29T13:21:41.509Z" }, + { url = "https://files.pythonhosted.org/packages/a5/b8/aafb0d1065416894fccf4df6b49ef22b8db045187949545bced89c034b8e/numpy-2.4.4-pp311-pypy311_pp73-macosx_14_0_arm64.whl", hash = "sha256:51fc224f7ca4d92656d5a5eb315f12eb5fe2c97a66249aa7b5f562528a3be38c", size = 5400927, upload-time = "2026-03-29T13:21:44.747Z" }, + { url = "https://files.pythonhosted.org/packages/d6/77/063baa20b08b431038c7f9ff5435540c7b7265c78cf56012a483019ca72d/numpy-2.4.4-pp311-pypy311_pp73-macosx_14_0_x86_64.whl", hash = "sha256:28a650663f7314afc3e6ec620f44f333c386aad9f6fc472030865dc0ebb26ee3", size = 6715557, upload-time = "2026-03-29T13:21:47.406Z" }, + { url = "https://files.pythonhosted.org/packages/c7/a8/379542d45a14f149444c5c4c4e7714707239ce9cc1de8c2803958889da14/numpy-2.4.4-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:19710a9ca9992d7174e9c52f643d4272dcd1558c5f7af7f6f8190f633bd651a7", size = 15804253, upload-time = "2026-03-29T13:21:50.753Z" }, + { url = "https://files.pythonhosted.org/packages/a2/c8/f0a45426d6d21e7ea3310a15cf90c43a14d9232c31a837702dba437f3373/numpy-2.4.4-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9b2aec6af35c113b05695ebb5749a787acd63cafc83086a05771d1e1cd1e555f", size = 16753552, upload-time = "2026-03-29T13:21:54.344Z" }, + { url = "https://files.pythonhosted.org/packages/04/74/f4c001f4714c3ad9ce037e18cf2b9c64871a84951eaa0baf683a9ca9301c/numpy-2.4.4-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:f2cf083b324a467e1ab358c105f6cad5ea950f50524668a80c486ff1db24e119", size = 12509075, upload-time = "2026-03-29T13:21:57.644Z" }, ] [[package]] name = "onnx" -version = "1.20.1" +version = "1.21.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "ml-dtypes" }, @@ -1406,43 +1440,43 @@ dependencies = [ { name = "protobuf" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3b/8a/335c03a8683a88a32f9a6bb98899ea6df241a41df64b37b9696772414794/onnx-1.20.1.tar.gz", hash = "sha256:ded16de1df563d51fbc1ad885f2a426f814039d8b5f4feb77febe09c0295ad67", size = 12048980, upload-time = "2026-01-10T01:40:03.043Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c5/93/942d2a0f6a70538eea042ce0445c8aefd46559ad153469986f29a743c01c/onnx-1.21.0.tar.gz", hash = "sha256:4d8b67d0aaec5864c87633188b91cc520877477ec0254eda122bef8be43cd764", size = 12074608, upload-time = "2026-03-27T21:33:36.118Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0c/38/1a0e74d586c08833404100f5c052f92732fb5be417c0b2d7cb0838443bfe/onnx-1.20.1-cp311-cp311-macosx_12_0_universal2.whl", hash = "sha256:53426e1b458641e7a537e9f176330012ff59d90206cac1c1a9d03cdd73ed3095", size = 17904965, upload-time = "2026-01-10T01:39:13.532Z" }, - { url = "https://files.pythonhosted.org/packages/96/25/64b076e9684d17335f80b15b3bf502f7a8e1a89f08a6b208d4f2861b3011/onnx-1.20.1-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ca7281f8c576adf396c338cf43fff26faee8d4d2e2577b8e73738f37ceccf945", size = 17415179, upload-time = "2026-01-10T01:39:16.516Z" }, - { url = "https://files.pythonhosted.org/packages/ac/d5/6743b409421ced20ad5af1b3a7b4c4e568689ffaca86db431692fca409a6/onnx-1.20.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2297f428c51c7fc6d8fad0cf34384284dfeff3f86799f8e83ef905451348ade0", size = 17513672, upload-time = "2026-01-10T01:39:19.35Z" }, - { url = "https://files.pythonhosted.org/packages/9a/6b/dae82e6fdb2043302f29adca37522312ea2be55b75907b59be06fbdffe87/onnx-1.20.1-cp311-cp311-win32.whl", hash = "sha256:63d9cbcab8c96841eadeb7c930e07bfab4dde8081eb76fb68e0dfb222706b81e", size = 16239336, upload-time = "2026-01-10T01:39:22.506Z" }, - { url = "https://files.pythonhosted.org/packages/8e/17/a0d7863390c1f2067d7c02dcc1477034965c32aaa1407bfcf775305ffee4/onnx-1.20.1-cp311-cp311-win_amd64.whl", hash = "sha256:d78cde72d7ca8356a2d99c5dc0dbf67264254828cae2c5780184486c0cd7b3bf", size = 16392120, upload-time = "2026-01-10T01:39:25.106Z" }, - { url = "https://files.pythonhosted.org/packages/aa/72/9b879a46eb7a3322223791f36bf9c25d95da9ed93779eabb75a560f22e5b/onnx-1.20.1-cp311-cp311-win_arm64.whl", hash = "sha256:0104bb2d4394c179bcea3df7599a45a2932b80f4633840896fcf0d7d8daecea2", size = 16346923, upload-time = "2026-01-10T01:39:27.782Z" }, - { url = "https://files.pythonhosted.org/packages/7c/4c/4b17e82f91ab9aa07ff595771e935ca73547b035030dc5f5a76e63fbfea9/onnx-1.20.1-cp312-abi3-macosx_12_0_universal2.whl", hash = "sha256:1d923bb4f0ce1b24c6859222a7e6b2f123e7bfe7623683662805f2e7b9e95af2", size = 17903547, upload-time = "2026-01-10T01:39:31.015Z" }, - { url = "https://files.pythonhosted.org/packages/64/5e/1bfa100a9cb3f2d3d5f2f05f52f7e60323b0e20bb0abace1ae64dbc88f25/onnx-1.20.1-cp312-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ddc0b7d8b5a94627dc86c533d5e415af94cbfd103019a582669dad1f56d30281", size = 17412021, upload-time = "2026-01-10T01:39:33.885Z" }, - { url = "https://files.pythonhosted.org/packages/fb/71/d3fec0dcf9a7a99e7368112d9c765154e81da70fcba1e3121131a45c245b/onnx-1.20.1-cp312-abi3-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9336b6b8e6efcf5c490a845f6afd7e041c89a56199aeda384ed7d58fb953b080", size = 17510450, upload-time = "2026-01-10T01:39:36.589Z" }, - { url = "https://files.pythonhosted.org/packages/74/a7/edce1403e05a46e59b502fae8e3350ceeac5841f8e8f1561e98562ed9b09/onnx-1.20.1-cp312-abi3-win32.whl", hash = "sha256:564c35a94811979808ab5800d9eb4f3f32c12daedba7e33ed0845f7c61ef2431", size = 16238216, upload-time = "2026-01-10T01:39:39.46Z" }, - { url = "https://files.pythonhosted.org/packages/8b/c7/8690c81200ae652ac550c1df52f89d7795e6cc941f3cb38c9ef821419e80/onnx-1.20.1-cp312-abi3-win_amd64.whl", hash = "sha256:9fe7f9a633979d50984b94bda8ceb7807403f59a341d09d19342dc544d0ca1d5", size = 16389207, upload-time = "2026-01-10T01:39:41.955Z" }, - { url = "https://files.pythonhosted.org/packages/01/a0/4fb0e6d36eaf079af366b2c1f68bafe92df6db963e2295da84388af64abc/onnx-1.20.1-cp312-abi3-win_arm64.whl", hash = "sha256:21d747348b1c8207406fa2f3e12b82f53e0d5bb3958bcd0288bd27d3cb6ebb00", size = 16344155, upload-time = "2026-01-10T01:39:45.536Z" }, + { url = "https://files.pythonhosted.org/packages/45/48/32e383aa6bc40b72a9fd419937aaa647078190c9bfccdc97b316d2dee687/onnx-1.21.0-cp311-cp311-macosx_12_0_universal2.whl", hash = "sha256:2aca19949260875c14866fc77ea0bc37e4e809b24976108762843d328c92d3ce", size = 17968053, upload-time = "2026-03-27T21:32:29.558Z" }, + { url = "https://files.pythonhosted.org/packages/e2/26/5726e8df7d36e96bb3c679912d1a86af42f393d77aa17d6b98a97d4289ce/onnx-1.21.0-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:82aa6ab51144df07c58c4850cb78d4f1ae969d8c0bf657b28041796d49ba6974", size = 17534821, upload-time = "2026-03-27T21:32:32.351Z" }, + { url = "https://files.pythonhosted.org/packages/d6/2b/021dcd2dd50c3c71b7959d7368526da384a295c162fb4863f36057973f78/onnx-1.21.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:10c3185a232089335581fabb98fba4e86d3e8246b8140f2e406082438100ebda", size = 17616664, upload-time = "2026-03-27T21:32:34.921Z" }, + { url = "https://files.pythonhosted.org/packages/12/00/afa32a46fa122a7ed42df1cfe8796922156a3725ba8fc581c4779c96e2fc/onnx-1.21.0-cp311-cp311-win32.whl", hash = "sha256:f53b3c15a3b539c16b99655c43c365622046d68c49b680c48eba4da2a4fb6f27", size = 16289035, upload-time = "2026-03-27T21:32:37.783Z" }, + { url = "https://files.pythonhosted.org/packages/73/8d/483cc980a24d4c0131d0af06d0ff6a37fb08ae90a7848ece8cef645194f1/onnx-1.21.0-cp311-cp311-win_amd64.whl", hash = "sha256:5f78c411743db317a76e5d009f84f7e3d5380411a1567a868e82461a1e5c775d", size = 16443748, upload-time = "2026-03-27T21:32:40.337Z" }, + { url = "https://files.pythonhosted.org/packages/38/78/9d06fd5aaaed1ec9cb8a3b70fbbf00c1bdc18db610771e96379f0ed58112/onnx-1.21.0-cp311-cp311-win_arm64.whl", hash = "sha256:ab6a488dabbb172eebc9f3b3e7ac68763f32b0c571626d4a5004608f866cc83d", size = 16406123, upload-time = "2026-03-27T21:32:45.159Z" }, + { url = "https://files.pythonhosted.org/packages/7d/ae/cb644ec84c25e63575d9d8790fdcc5d1a11d67d3f62f872edb35fa38d158/onnx-1.21.0-cp312-abi3-macosx_12_0_universal2.whl", hash = "sha256:fc2635400fe39ff37ebc4e75342cc54450eadadf39c540ff132c319bf4960095", size = 17965930, upload-time = "2026-03-27T21:32:48.089Z" }, + { url = "https://files.pythonhosted.org/packages/6f/b6/eeb5903586645ef8a49b4b7892580438741acc3df91d7a5bd0f3a59ea9cb/onnx-1.21.0-cp312-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9003d5206c01fa2ff4b46311566865d8e493e1a6998d4009ec6de39843f1b59b", size = 17531344, upload-time = "2026-03-27T21:32:50.837Z" }, + { url = "https://files.pythonhosted.org/packages/a7/00/4823f06357892d1e60d6f34e7299d2ba4ed2108c487cc394f7ce85a3ff14/onnx-1.21.0-cp312-abi3-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a9261bd580fb8548c9c37b3c6750387eb8f21ea43c63880d37b2c622e1684285", size = 17613697, upload-time = "2026-03-27T21:32:54.222Z" }, + { url = "https://files.pythonhosted.org/packages/23/1d/391f3c567ae068c8ac4f1d1316bae97c9eb45e702f05975fe0e17ad441f0/onnx-1.21.0-cp312-abi3-win32.whl", hash = "sha256:9ea4e824964082811938a9250451d89c4ec474fe42dd36c038bfa5df31993d1e", size = 16287200, upload-time = "2026-03-27T21:32:57.277Z" }, + { url = "https://files.pythonhosted.org/packages/9c/a6/5eefbe5b40ea96de95a766bd2e0e751f35bdea2d4b951991ec9afaa69531/onnx-1.21.0-cp312-abi3-win_amd64.whl", hash = "sha256:458d91948ad9a7729a347550553b49ab6939f9af2cddf334e2116e45467dc61f", size = 16441045, upload-time = "2026-03-27T21:33:00.081Z" }, + { url = "https://files.pythonhosted.org/packages/63/c4/0ed8dc037a39113d2a4d66e0005e07751c299c46b993f1ad5c2c35664c20/onnx-1.21.0-cp312-abi3-win_arm64.whl", hash = "sha256:ca14bc4842fccc3187eb538f07eabeb25a779b39388b006db4356c07403a7bbb", size = 16403134, upload-time = "2026-03-27T21:33:03.987Z" }, ] [[package]] name = "onnx-ir" -version = "0.1.14" +version = "0.2.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "ml-dtypes" }, { name = "numpy" }, { name = "onnx" }, + { name = "sympy" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a5/5b/ebd083a5c3d25ce9f95b34a11b3a492cdcf7831bf127c0f64429a4e83961/onnx_ir-0.1.14.tar.gz", hash = "sha256:bd69e3b5821046d5d7c9d0fdd023f8e1d0cc9a62cbee986fa0e5ab2b1602d7ae", size = 120732, upload-time = "2026-01-07T01:19:47.777Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/a5/acc43c8fa6edbc584d127fb6bbd13ae9ebfc01b9675c74e0da2de15fa4a6/onnx_ir-0.2.0.tar.gz", hash = "sha256:8bad3906691987290789b26d05e0dbff467029a0b1e411e12e4cae02e43503e4", size = 141693, upload-time = "2026-02-24T02:31:10.998Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/53/d1/bd9a5007448b4599a80143b0b5ccc78e9c46176e5e1bee81f6d3da68d217/onnx_ir-0.1.14-py3-none-any.whl", hash = "sha256:89b212fa7840981c5db5dc478190f1b7369536297c3c6eae68fb1c2237dd2554", size = 139128, upload-time = "2026-01-07T01:19:46.403Z" }, + { url = "https://files.pythonhosted.org/packages/4a/df/a99736bcca6b16e36c687ce4996abcf4ce73c514fddd9e730cfcb6a334f2/onnx_ir-0.2.0-py3-none-any.whl", hash = "sha256:eb14d1399c2442bd1ff702719e70074e9cedfa3af5729416a32752c9e0f82591", size = 164100, upload-time = "2026-02-24T02:31:09.454Z" }, ] [[package]] name = "onnxruntime" -version = "1.23.2" +version = "1.24.4" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "coloredlogs" }, { name = "flatbuffers" }, { name = "numpy" }, { name = "packaging" }, @@ -1450,21 +1484,21 @@ dependencies = [ { name = "sympy" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/44/be/467b00f09061572f022ffd17e49e49e5a7a789056bad95b54dfd3bee73ff/onnxruntime-1.23.2-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:6f91d2c9b0965e86827a5ba01531d5b669770b01775b23199565d6c1f136616c", size = 17196113, upload-time = "2025-10-22T03:47:33.526Z" }, - { url = "https://files.pythonhosted.org/packages/9f/a8/3c23a8f75f93122d2b3410bfb74d06d0f8da4ac663185f91866b03f7da1b/onnxruntime-1.23.2-cp311-cp311-macosx_13_0_x86_64.whl", hash = "sha256:87d8b6eaf0fbeb6835a60a4265fde7a3b60157cf1b2764773ac47237b4d48612", size = 19153857, upload-time = "2025-10-22T03:46:37.578Z" }, - { url = "https://files.pythonhosted.org/packages/3f/d8/506eed9af03d86f8db4880a4c47cd0dffee973ef7e4f4cff9f1d4bcf7d22/onnxruntime-1.23.2-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bbfd2fca76c855317568c1b36a885ddea2272c13cb0e395002c402f2360429a6", size = 15220095, upload-time = "2025-10-22T03:46:24.769Z" }, - { url = "https://files.pythonhosted.org/packages/e9/80/113381ba832d5e777accedc6cb41d10f9eca82321ae31ebb6bcede530cea/onnxruntime-1.23.2-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da44b99206e77734c5819aa2142c69e64f3b46edc3bd314f6a45a932defc0b3e", size = 17372080, upload-time = "2025-10-22T03:47:00.265Z" }, - { url = "https://files.pythonhosted.org/packages/3a/db/1b4a62e23183a0c3fe441782462c0ede9a2a65c6bbffb9582fab7c7a0d38/onnxruntime-1.23.2-cp311-cp311-win_amd64.whl", hash = "sha256:902c756d8b633ce0dedd889b7c08459433fbcf35e9c38d1c03ddc020f0648c6e", size = 13468349, upload-time = "2025-10-22T03:47:25.783Z" }, - { url = "https://files.pythonhosted.org/packages/1b/9e/f748cd64161213adeef83d0cb16cb8ace1e62fa501033acdd9f9341fff57/onnxruntime-1.23.2-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:b8f029a6b98d3cf5be564d52802bb50a8489ab73409fa9db0bf583eabb7c2321", size = 17195929, upload-time = "2025-10-22T03:47:36.24Z" }, - { url = "https://files.pythonhosted.org/packages/91/9d/a81aafd899b900101988ead7fb14974c8a58695338ab6a0f3d6b0100f30b/onnxruntime-1.23.2-cp312-cp312-macosx_13_0_x86_64.whl", hash = "sha256:218295a8acae83905f6f1aed8cacb8e3eb3bd7513a13fe4ba3b2664a19fc4a6b", size = 19157705, upload-time = "2025-10-22T03:46:40.415Z" }, - { url = "https://files.pythonhosted.org/packages/3c/35/4e40f2fba272a6698d62be2cd21ddc3675edfc1a4b9ddefcc4648f115315/onnxruntime-1.23.2-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:76ff670550dc23e58ea9bc53b5149b99a44e63b34b524f7b8547469aaa0dcb8c", size = 15226915, upload-time = "2025-10-22T03:46:27.773Z" }, - { url = "https://files.pythonhosted.org/packages/ef/88/9cc25d2bafe6bc0d4d3c1db3ade98196d5b355c0b273e6a5dc09c5d5d0d5/onnxruntime-1.23.2-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f9b4ae77f8e3c9bee50c27bc1beede83f786fe1d52e99ac85aa8d65a01e9b77", size = 17382649, upload-time = "2025-10-22T03:47:02.782Z" }, - { url = "https://files.pythonhosted.org/packages/c0/b4/569d298f9fc4d286c11c45e85d9ffa9e877af12ace98af8cab52396e8f46/onnxruntime-1.23.2-cp312-cp312-win_amd64.whl", hash = "sha256:25de5214923ce941a3523739d34a520aac30f21e631de53bba9174dc9c004435", size = 13470528, upload-time = "2025-10-22T03:47:28.106Z" }, + { url = "https://files.pythonhosted.org/packages/60/69/6c40720201012c6af9aa7d4ecdd620e521bd806dc6269d636fdd5c5aeebe/onnxruntime-1.24.4-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:0bdfce8e9a6497cec584aab407b71bf697dac5e1b7b7974adc50bf7533bdb3a2", size = 17332131, upload-time = "2026-03-17T22:05:49.005Z" }, + { url = "https://files.pythonhosted.org/packages/38/e9/8c901c150ce0c368da38638f44152fb411059c0c7364b497c9e5c957321a/onnxruntime-1.24.4-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:046ff290045a387676941a02a8ae5c3ebec6b4f551ae228711968c4a69d8f6b7", size = 15152472, upload-time = "2026-03-17T22:03:26.176Z" }, + { url = "https://files.pythonhosted.org/packages/d5/b6/7a4df417cdd01e8f067a509e123ac8b31af450a719fa7ed81787dd6057ec/onnxruntime-1.24.4-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e54ad52e61d2d4618dcff8fa1480ac66b24ee2eab73331322db1049f11ccf330", size = 17222993, upload-time = "2026-03-17T22:04:34.485Z" }, + { url = "https://files.pythonhosted.org/packages/dd/59/8febe015f391aa1757fa5ba82c759ea4b6c14ef970132efb5e316665ba61/onnxruntime-1.24.4-cp311-cp311-win_amd64.whl", hash = "sha256:b43b63eb24a2bc8fc77a09be67587a570967a412cccb837b6245ccb546691153", size = 12594863, upload-time = "2026-03-17T22:05:38.749Z" }, + { url = "https://files.pythonhosted.org/packages/32/84/4155fcd362e8873eb6ce305acfeeadacd9e0e59415adac474bea3d9281bb/onnxruntime-1.24.4-cp311-cp311-win_arm64.whl", hash = "sha256:e26478356dba25631fb3f20112e345f8e8bf62c499bb497e8a559f7d69cf7e7b", size = 12259895, upload-time = "2026-03-17T22:05:28.812Z" }, + { url = "https://files.pythonhosted.org/packages/d7/38/31db1b232b4ba960065a90c1506ad7a56995cd8482033184e97fadca17cc/onnxruntime-1.24.4-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:cad1c2b3f455c55678ab2a8caa51fb420c25e6e3cf10f4c23653cdabedc8de78", size = 17341875, upload-time = "2026-03-17T22:05:51.669Z" }, + { url = "https://files.pythonhosted.org/packages/aa/60/c4d1c8043eb42f8a9aa9e931c8c293d289c48ff463267130eca97d13357f/onnxruntime-1.24.4-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1a5c5a544b22f90859c88617ecb30e161ee3349fcc73878854f43d77f00558b5", size = 15172485, upload-time = "2026-03-17T22:03:32.182Z" }, + { url = "https://files.pythonhosted.org/packages/6d/ab/5b68110e0460d73fad814d5bd11c7b1ddcce5c37b10177eb264d6a36e331/onnxruntime-1.24.4-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0d640eb9f3782689b55cfa715094474cd5662f2f137be6a6f847a594b6e9705c", size = 17244912, upload-time = "2026-03-17T22:04:37.251Z" }, + { url = "https://files.pythonhosted.org/packages/8b/f4/6b89e297b93704345f0f3f8c62229bee323ef25682a3f9b4f89a39324950/onnxruntime-1.24.4-cp312-cp312-win_amd64.whl", hash = "sha256:535b29475ca42b593c45fbb2152fbf1cdf3f287315bf650e6a724a0a1d065cdb", size = 12596856, upload-time = "2026-03-17T22:05:41.224Z" }, + { url = "https://files.pythonhosted.org/packages/43/06/8b8ec6e9e6a474fcd5d772453f627ad4549dfe3ab8c0bf70af5afcde551b/onnxruntime-1.24.4-cp312-cp312-win_arm64.whl", hash = "sha256:e6214096e14b7b52e3bee1903dc12dc7ca09cb65e26664668a4620cc5e6f9a90", size = 12270275, upload-time = "2026-03-17T22:05:31.132Z" }, ] [[package]] name = "onnxscript" -version = "0.6.0.dev20260116" +version = "0.6.3.dev20260411" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "ml-dtypes" }, @@ -1474,9 +1508,9 @@ dependencies = [ { name = "packaging" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9c/6e/09c2f4317e661150d99ffe4816452438a9a4d93b97b54dc302a75f8cc4c7/onnxscript-0.6.0.dev20260116.tar.gz", hash = "sha256:6ec595f138f181028112b6803a1c15f9cd109d435b261a513f6e37d57439508a", size = 592926, upload-time = "2026-01-16T08:15:28.935Z" } +sdist = { url = "https://files.pythonhosted.org/packages/77/86/7d1ad52e8c1439b3455b1451fb237b2689aaa9df592d05d9788cee9f3f99/onnxscript-0.6.3.dev20260411.tar.gz", hash = "sha256:f0350d529df820d463b5b50a97bda05bebb7e4fa766bde75362309ea90a772a8", size = 609333, upload-time = "2026-04-11T07:23:50.8Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0b/f3/5aca6afe74b83e7dd06156bd3b74dc014d90df9b6b7ed4c6d47953cc06bf/onnxscript-0.6.0.dev20260116-py3-none-any.whl", hash = "sha256:11cc94ce38b4d089383507af1420f70f874b0c1bbabf966d61218bfc9b856162", size = 690830, upload-time = "2026-01-16T08:15:31.069Z" }, + { url = "https://files.pythonhosted.org/packages/d2/00/8cc5d4c5fbdd0691bc5a0d578fa5fc818170eb6cef407224aa57cc7d29c3/onnxscript-0.6.3.dev20260411-py3-none-any.whl", hash = "sha256:af8b6074d3f886f5e0dbbca83436988e744c0732431b899133695a599d99bb78", size = 712093, upload-time = "2026-04-11T07:23:53.018Z" }, ] [[package]] @@ -1490,39 +1524,40 @@ wheels = [ [[package]] name = "packaging" -version = "25.0" +version = "26.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } +sdist = { url = "https://files.pythonhosted.org/packages/65/ee/299d360cdc32edc7d2cf530f3accf79c4fca01e96ffc950d8a52213bd8e4/packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4", size = 143416, upload-time = "2026-01-21T20:50:39.064Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, + { url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366, upload-time = "2026-01-21T20:50:37.788Z" }, ] [[package]] name = "pandas" -version = "2.3.3" +version = "3.0.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy" }, { name = "python-dateutil" }, - { name = "pytz" }, - { name = "tzdata" }, + { name = "tzdata", marker = "sys_platform == 'emscripten' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/33/01/d40b85317f86cf08d853a4f495195c73815fdf205eef3993821720274518/pandas-2.3.3.tar.gz", hash = "sha256:e05e1af93b977f7eafa636d043f9f94c7ee3ac81af99c13508215942e64c993b", size = 4495223, upload-time = "2025-09-29T23:34:51.853Z" } +sdist = { url = "https://files.pythonhosted.org/packages/da/99/b342345300f13440fe9fe385c3c481e2d9a595ee3bab4d3219247ac94e9a/pandas-3.0.2.tar.gz", hash = "sha256:f4753e73e34c8d83221ba58f232433fca2748be8b18dbca02d242ed153945043", size = 4645855, upload-time = "2026-03-31T06:48:30.816Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c1/fa/7ac648108144a095b4fb6aa3de1954689f7af60a14cf25583f4960ecb878/pandas-2.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:602b8615ebcc4a0c1751e71840428ddebeb142ec02c786e8ad6b1ce3c8dec523", size = 11578790, upload-time = "2025-09-29T23:18:30.065Z" }, - { url = "https://files.pythonhosted.org/packages/9b/35/74442388c6cf008882d4d4bdfc4109be87e9b8b7ccd097ad1e7f006e2e95/pandas-2.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8fe25fc7b623b0ef6b5009149627e34d2a4657e880948ec3c840e9402e5c1b45", size = 10833831, upload-time = "2025-09-29T23:38:56.071Z" }, - { url = "https://files.pythonhosted.org/packages/fe/e4/de154cbfeee13383ad58d23017da99390b91d73f8c11856f2095e813201b/pandas-2.3.3-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b468d3dad6ff947df92dcb32ede5b7bd41a9b3cceef0a30ed925f6d01fb8fa66", size = 12199267, upload-time = "2025-09-29T23:18:41.627Z" }, - { url = "https://files.pythonhosted.org/packages/bf/c9/63f8d545568d9ab91476b1818b4741f521646cbdd151c6efebf40d6de6f7/pandas-2.3.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b98560e98cb334799c0b07ca7967ac361a47326e9b4e5a7dfb5ab2b1c9d35a1b", size = 12789281, upload-time = "2025-09-29T23:18:56.834Z" }, - { url = "https://files.pythonhosted.org/packages/f2/00/a5ac8c7a0e67fd1a6059e40aa08fa1c52cc00709077d2300e210c3ce0322/pandas-2.3.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37b5848ba49824e5c30bedb9c830ab9b7751fd049bc7914533e01c65f79791", size = 13240453, upload-time = "2025-09-29T23:19:09.247Z" }, - { url = "https://files.pythonhosted.org/packages/27/4d/5c23a5bc7bd209231618dd9e606ce076272c9bc4f12023a70e03a86b4067/pandas-2.3.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:db4301b2d1f926ae677a751eb2bd0e8c5f5319c9cb3f88b0becbbb0b07b34151", size = 13890361, upload-time = "2025-09-29T23:19:25.342Z" }, - { url = "https://files.pythonhosted.org/packages/8e/59/712db1d7040520de7a4965df15b774348980e6df45c129b8c64d0dbe74ef/pandas-2.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:f086f6fe114e19d92014a1966f43a3e62285109afe874f067f5abbdcbb10e59c", size = 11348702, upload-time = "2025-09-29T23:19:38.296Z" }, - { url = "https://files.pythonhosted.org/packages/9c/fb/231d89e8637c808b997d172b18e9d4a4bc7bf31296196c260526055d1ea0/pandas-2.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d21f6d74eb1725c2efaa71a2bfc661a0689579b58e9c0ca58a739ff0b002b53", size = 11597846, upload-time = "2025-09-29T23:19:48.856Z" }, - { url = "https://files.pythonhosted.org/packages/5c/bd/bf8064d9cfa214294356c2d6702b716d3cf3bb24be59287a6a21e24cae6b/pandas-2.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3fd2f887589c7aa868e02632612ba39acb0b8948faf5cc58f0850e165bd46f35", size = 10729618, upload-time = "2025-09-29T23:39:08.659Z" }, - { url = "https://files.pythonhosted.org/packages/57/56/cf2dbe1a3f5271370669475ead12ce77c61726ffd19a35546e31aa8edf4e/pandas-2.3.3-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ecaf1e12bdc03c86ad4a7ea848d66c685cb6851d807a26aa245ca3d2017a1908", size = 11737212, upload-time = "2025-09-29T23:19:59.765Z" }, - { url = "https://files.pythonhosted.org/packages/e5/63/cd7d615331b328e287d8233ba9fdf191a9c2d11b6af0c7a59cfcec23de68/pandas-2.3.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b3d11d2fda7eb164ef27ffc14b4fcab16a80e1ce67e9f57e19ec0afaf715ba89", size = 12362693, upload-time = "2025-09-29T23:20:14.098Z" }, - { url = "https://files.pythonhosted.org/packages/a6/de/8b1895b107277d52f2b42d3a6806e69cfef0d5cf1d0ba343470b9d8e0a04/pandas-2.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a68e15f780eddf2b07d242e17a04aa187a7ee12b40b930bfdd78070556550e98", size = 12771002, upload-time = "2025-09-29T23:20:26.76Z" }, - { url = "https://files.pythonhosted.org/packages/87/21/84072af3187a677c5893b170ba2c8fbe450a6ff911234916da889b698220/pandas-2.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:371a4ab48e950033bcf52b6527eccb564f52dc826c02afd9a1bc0ab731bba084", size = 13450971, upload-time = "2025-09-29T23:20:41.344Z" }, - { url = "https://files.pythonhosted.org/packages/86/41/585a168330ff063014880a80d744219dbf1dd7a1c706e75ab3425a987384/pandas-2.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:a16dcec078a01eeef8ee61bf64074b4e524a2a3f4b3be9326420cabe59c4778b", size = 10992722, upload-time = "2025-09-29T23:20:54.139Z" }, + { url = "https://files.pythonhosted.org/packages/97/35/6411db530c618e0e0005187e35aa02ce60ae4c4c4d206964a2f978217c27/pandas-3.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a727a73cbdba2f7458dc82449e2315899d5140b449015d822f515749a46cbbe0", size = 10326926, upload-time = "2026-03-31T06:46:08.29Z" }, + { url = "https://files.pythonhosted.org/packages/c4/d3/b7da1d5d7dbdc5ef52ed7debd2b484313b832982266905315dad5a0bf0b1/pandas-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dbbd4aa20ca51e63b53bbde6a0fa4254b1aaabb74d2f542df7a7959feb1d760c", size = 9926987, upload-time = "2026-03-31T06:46:11.724Z" }, + { url = "https://files.pythonhosted.org/packages/52/77/9b1c2d6070b5dbe239a7bc889e21bfa58720793fb902d1e070695d87c6d0/pandas-3.0.2-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:339dda302bd8369dedeae979cb750e484d549b563c3f54f3922cb8ff4978c5eb", size = 10757067, upload-time = "2026-03-31T06:46:14.903Z" }, + { url = "https://files.pythonhosted.org/packages/20/17/ec40d981705654853726e7ac9aea9ddbb4a5d9cf54d8472222f4f3de06c2/pandas-3.0.2-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:61c2fd96d72b983a9891b2598f286befd4ad262161a609c92dc1652544b46b76", size = 11258787, upload-time = "2026-03-31T06:46:17.683Z" }, + { url = "https://files.pythonhosted.org/packages/90/e3/3f1126d43d3702ca8773871a81c9f15122a1f412342cc56284ffda5b1f70/pandas-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c934008c733b8bbea273ea308b73b3156f0181e5b72960790b09c18a2794fe1e", size = 11771616, upload-time = "2026-03-31T06:46:20.532Z" }, + { url = "https://files.pythonhosted.org/packages/2e/cf/0f4e268e1f5062e44a6bda9f925806721cd4c95c2b808a4c82ebe914f96b/pandas-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:60a80bb4feacbef5e1447a3f82c33209c8b7e07f28d805cfd1fb951e5cb443aa", size = 12337623, upload-time = "2026-03-31T06:46:23.754Z" }, + { url = "https://files.pythonhosted.org/packages/44/a0/97a6339859d4acb2536efb24feb6708e82f7d33b2ed7e036f2983fcced82/pandas-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:ed72cb3f45190874eb579c64fa92d9df74e98fd63e2be7f62bce5ace0ade61df", size = 9897372, upload-time = "2026-03-31T06:46:26.703Z" }, + { url = "https://files.pythonhosted.org/packages/8f/eb/781516b808a99ddf288143cec46b342b3016c3414d137da1fdc3290d8860/pandas-3.0.2-cp311-cp311-win_arm64.whl", hash = "sha256:f12b1a9e332c01e09510586f8ca9b108fd631fd656af82e452d7315ef6df5f9f", size = 9154922, upload-time = "2026-03-31T06:46:30.284Z" }, + { url = "https://files.pythonhosted.org/packages/f3/b0/c20bd4d6d3f736e6bd6b55794e9cd0a617b858eaad27c8f410ea05d953b7/pandas-3.0.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:232a70ebb568c0c4d2db4584f338c1577d81e3af63292208d615907b698a0f18", size = 10347921, upload-time = "2026-03-31T06:46:33.36Z" }, + { url = "https://files.pythonhosted.org/packages/35/d0/4831af68ce30cc2d03c697bea8450e3225a835ef497d0d70f31b8cdde965/pandas-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:970762605cff1ca0d3f71ed4f3a769ea8f85fc8e6348f6e110b8fea7e6eb5a14", size = 9888127, upload-time = "2026-03-31T06:46:36.253Z" }, + { url = "https://files.pythonhosted.org/packages/61/a9/16ea9346e1fc4a96e2896242d9bc674764fb9049b0044c0132502f7a771e/pandas-3.0.2-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:aff4e6f4d722e0652707d7bcb190c445fe58428500c6d16005b02401764b1b3d", size = 10399577, upload-time = "2026-03-31T06:46:39.224Z" }, + { url = "https://files.pythonhosted.org/packages/c4/a8/3a61a721472959ab0ce865ef05d10b0d6bfe27ce8801c99f33d4fa996e65/pandas-3.0.2-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ef8b27695c3d3dc78403c9a7d5e59a62d5464a7e1123b4e0042763f7104dc74f", size = 10880030, upload-time = "2026-03-31T06:46:42.412Z" }, + { url = "https://files.pythonhosted.org/packages/da/65/7225c0ea4d6ce9cb2160a7fb7f39804871049f016e74782e5dade4d14109/pandas-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f8d68083e49e16b84734eb1a4dcae4259a75c90fb6e2251ab9a00b61120c06ab", size = 11409468, upload-time = "2026-03-31T06:46:45.2Z" }, + { url = "https://files.pythonhosted.org/packages/fa/5b/46e7c76032639f2132359b5cf4c785dd8cf9aea5ea64699eac752f02b9db/pandas-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:32cc41f310ebd4a296d93515fcac312216adfedb1894e879303987b8f1e2b97d", size = 11936381, upload-time = "2026-03-31T06:46:48.293Z" }, + { url = "https://files.pythonhosted.org/packages/7b/8b/721a9cff6fa6a91b162eb51019c6243b82b3226c71bb6c8ef4a9bd65cbc6/pandas-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:a4785e1d6547d8427c5208b748ae2efb64659a21bd82bf440d4262d02bfa02a4", size = 9744993, upload-time = "2026-03-31T06:46:51.488Z" }, + { url = "https://files.pythonhosted.org/packages/d5/18/7f0bd34ae27b28159aa80f2a6799f47fda34f7fb938a76e20c7b7fe3b200/pandas-3.0.2-cp312-cp312-win_arm64.whl", hash = "sha256:08504503f7101300107ecdc8df73658e4347586db5cfdadabc1592e9d7e7a0fd", size = 9056118, upload-time = "2026-03-31T06:46:54.548Z" }, ] [[package]] @@ -1536,11 +1571,11 @@ wheels = [ [[package]] name = "parso" -version = "0.8.5" +version = "0.8.6" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d4/de/53e0bcf53d13e005bd8c92e7855142494f41171b34c2536b86187474184d/parso-0.8.5.tar.gz", hash = "sha256:034d7354a9a018bdce352f48b2a8a450f05e9d6ee85db84764e9b6bd96dafe5a", size = 401205, upload-time = "2025-08-23T15:15:28.028Z" } +sdist = { url = "https://files.pythonhosted.org/packages/81/76/a1e769043c0c0c9fe391b702539d594731a4362334cdf4dc25d0c09761e7/parso-0.8.6.tar.gz", hash = "sha256:2b9a0332696df97d454fa67b81618fd69c35a7b90327cbe6ba5c92d2c68a7bfd", size = 401621, upload-time = "2026-02-09T15:45:24.425Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/16/32/f8e3c85d1d5250232a5d3477a2a28cc291968ff175caeadaf3cc19ce0e4a/parso-0.8.5-py2.py3-none-any.whl", hash = "sha256:646204b5ee239c396d040b90f9e272e9a8017c630092bf59980beb62fd033887", size = 106668, upload-time = "2025-08-23T15:15:25.663Z" }, + { url = "https://files.pythonhosted.org/packages/b6/61/fae042894f4296ec49e3f193aff5d7c18440da9e48102c3315e1bc4519a7/parso-0.8.6-py2.py3-none-any.whl", hash = "sha256:2c549f800b70a5c4952197248825584cb00f033b29c692671d3bf08bf380baff", size = 106894, upload-time = "2026-02-09T15:45:21.391Z" }, ] [[package]] @@ -1560,7 +1595,7 @@ name = "pexpect" version = "4.9.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "ptyprocess" }, + { name = "ptyprocess", marker = "sys_platform != 'emscripten' and sys_platform != 'win32'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/42/92/cc564bf6381ff43ce1f4d06852fc19a2f11d180f23dc32d9588bee2f149d/pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f", size = 166450, upload-time = "2023-11-25T09:07:26.339Z" } wheels = [ @@ -1569,53 +1604,53 @@ wheels = [ [[package]] name = "pillow" -version = "12.1.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d0/02/d52c733a2452ef1ffcc123b68e6606d07276b0e358db70eabad7e40042b7/pillow-12.1.0.tar.gz", hash = "sha256:5c5ae0a06e9ea030ab786b0251b32c7e4ce10e58d983c0d5c56029455180b5b9", size = 46977283, upload-time = "2026-01-02T09:13:29.892Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/43/c4/bf8328039de6cc22182c3ef007a2abfbbdab153661c0a9aa78af8d706391/pillow-12.1.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:a83e0850cb8f5ac975291ebfc4170ba481f41a28065277f7f735c202cd8e0af3", size = 5304057, upload-time = "2026-01-02T09:10:46.627Z" }, - { url = "https://files.pythonhosted.org/packages/43/06/7264c0597e676104cc22ca73ee48f752767cd4b1fe084662620b17e10120/pillow-12.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b6e53e82ec2db0717eabb276aa56cf4e500c9a7cec2c2e189b55c24f65a3e8c0", size = 4657811, upload-time = "2026-01-02T09:10:49.548Z" }, - { url = "https://files.pythonhosted.org/packages/72/64/f9189e44474610daf83da31145fa56710b627b5c4c0b9c235e34058f6b31/pillow-12.1.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:40a8e3b9e8773876d6e30daed22f016509e3987bab61b3b7fe309d7019a87451", size = 6232243, upload-time = "2026-01-02T09:10:51.62Z" }, - { url = "https://files.pythonhosted.org/packages/ef/30/0df458009be6a4caca4ca2c52975e6275c387d4e5c95544e34138b41dc86/pillow-12.1.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:800429ac32c9b72909c671aaf17ecd13110f823ddb7db4dfef412a5587c2c24e", size = 8037872, upload-time = "2026-01-02T09:10:53.446Z" }, - { url = "https://files.pythonhosted.org/packages/e4/86/95845d4eda4f4f9557e25381d70876aa213560243ac1a6d619c46caaedd9/pillow-12.1.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0b022eaaf709541b391ee069f0022ee5b36c709df71986e3f7be312e46f42c84", size = 6345398, upload-time = "2026-01-02T09:10:55.426Z" }, - { url = "https://files.pythonhosted.org/packages/5c/1f/8e66ab9be3aaf1435bc03edd1ebdf58ffcd17f7349c1d970cafe87af27d9/pillow-12.1.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1f345e7bc9d7f368887c712aa5054558bad44d2a301ddf9248599f4161abc7c0", size = 7034667, upload-time = "2026-01-02T09:10:57.11Z" }, - { url = "https://files.pythonhosted.org/packages/f9/f6/683b83cb9b1db1fb52b87951b1c0b99bdcfceaa75febf11406c19f82cb5e/pillow-12.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d70347c8a5b7ccd803ec0c85c8709f036e6348f1e6a5bf048ecd9c64d3550b8b", size = 6458743, upload-time = "2026-01-02T09:10:59.331Z" }, - { url = "https://files.pythonhosted.org/packages/9a/7d/de833d63622538c1d58ce5395e7c6cb7e7dce80decdd8bde4a484e095d9f/pillow-12.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1fcc52d86ce7a34fd17cb04e87cfdb164648a3662a6f20565910a99653d66c18", size = 7159342, upload-time = "2026-01-02T09:11:01.82Z" }, - { url = "https://files.pythonhosted.org/packages/8c/40/50d86571c9e5868c42b81fe7da0c76ca26373f3b95a8dd675425f4a92ec1/pillow-12.1.0-cp311-cp311-win32.whl", hash = "sha256:3ffaa2f0659e2f740473bcf03c702c39a8d4b2b7ffc629052028764324842c64", size = 6328655, upload-time = "2026-01-02T09:11:04.556Z" }, - { url = "https://files.pythonhosted.org/packages/6c/af/b1d7e301c4cd26cd45d4af884d9ee9b6fab893b0ad2450d4746d74a6968c/pillow-12.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:806f3987ffe10e867bab0ddad45df1148a2b98221798457fa097ad85d6e8bc75", size = 7031469, upload-time = "2026-01-02T09:11:06.538Z" }, - { url = "https://files.pythonhosted.org/packages/48/36/d5716586d887fb2a810a4a61518a327a1e21c8b7134c89283af272efe84b/pillow-12.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:9f5fefaca968e700ad1a4a9de98bf0869a94e397fe3524c4c9450c1445252304", size = 2452515, upload-time = "2026-01-02T09:11:08.226Z" }, - { url = "https://files.pythonhosted.org/packages/20/31/dc53fe21a2f2996e1b7d92bf671cdb157079385183ef7c1ae08b485db510/pillow-12.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a332ac4ccb84b6dde65dbace8431f3af08874bf9770719d32a635c4ef411b18b", size = 5262642, upload-time = "2026-01-02T09:11:10.138Z" }, - { url = "https://files.pythonhosted.org/packages/ab/c1/10e45ac9cc79419cedf5121b42dcca5a50ad2b601fa080f58c22fb27626e/pillow-12.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:907bfa8a9cb790748a9aa4513e37c88c59660da3bcfffbd24a7d9e6abf224551", size = 4657464, upload-time = "2026-01-02T09:11:12.319Z" }, - { url = "https://files.pythonhosted.org/packages/ad/26/7b82c0ab7ef40ebede7a97c72d473bda5950f609f8e0c77b04af574a0ddb/pillow-12.1.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:efdc140e7b63b8f739d09a99033aa430accce485ff78e6d311973a67b6bf3208", size = 6234878, upload-time = "2026-01-02T09:11:14.096Z" }, - { url = "https://files.pythonhosted.org/packages/76/25/27abc9792615b5e886ca9411ba6637b675f1b77af3104710ac7353fe5605/pillow-12.1.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bef9768cab184e7ae6e559c032e95ba8d07b3023c289f79a2bd36e8bf85605a5", size = 8044868, upload-time = "2026-01-02T09:11:15.903Z" }, - { url = "https://files.pythonhosted.org/packages/0a/ea/f200a4c36d836100e7bc738fc48cd963d3ba6372ebc8298a889e0cfc3359/pillow-12.1.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:742aea052cf5ab5034a53c3846165bc3ce88d7c38e954120db0ab867ca242661", size = 6349468, upload-time = "2026-01-02T09:11:17.631Z" }, - { url = "https://files.pythonhosted.org/packages/11/8f/48d0b77ab2200374c66d344459b8958c86693be99526450e7aee714e03e4/pillow-12.1.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a6dfc2af5b082b635af6e08e0d1f9f1c4e04d17d4e2ca0ef96131e85eda6eb17", size = 7041518, upload-time = "2026-01-02T09:11:19.389Z" }, - { url = "https://files.pythonhosted.org/packages/1d/23/c281182eb986b5d31f0a76d2a2c8cd41722d6fb8ed07521e802f9bba52de/pillow-12.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:609e89d9f90b581c8d16358c9087df76024cf058fa693dd3e1e1620823f39670", size = 6462829, upload-time = "2026-01-02T09:11:21.28Z" }, - { url = "https://files.pythonhosted.org/packages/25/ef/7018273e0faac099d7b00982abdcc39142ae6f3bd9ceb06de09779c4a9d6/pillow-12.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:43b4899cfd091a9693a1278c4982f3e50f7fb7cff5153b05174b4afc9593b616", size = 7166756, upload-time = "2026-01-02T09:11:23.559Z" }, - { url = "https://files.pythonhosted.org/packages/8f/c8/993d4b7ab2e341fe02ceef9576afcf5830cdec640be2ac5bee1820d693d4/pillow-12.1.0-cp312-cp312-win32.whl", hash = "sha256:aa0c9cc0b82b14766a99fbe6084409972266e82f459821cd26997a488a7261a7", size = 6328770, upload-time = "2026-01-02T09:11:25.661Z" }, - { url = "https://files.pythonhosted.org/packages/a7/87/90b358775a3f02765d87655237229ba64a997b87efa8ccaca7dd3e36e7a7/pillow-12.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:d70534cea9e7966169ad29a903b99fc507e932069a881d0965a1a84bb57f6c6d", size = 7033406, upload-time = "2026-01-02T09:11:27.474Z" }, - { url = "https://files.pythonhosted.org/packages/5d/cf/881b457eccacac9e5b2ddd97d5071fb6d668307c57cbf4e3b5278e06e536/pillow-12.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:65b80c1ee7e14a87d6a068dd3b0aea268ffcabfe0498d38661b00c5b4b22e74c", size = 2452612, upload-time = "2026-01-02T09:11:29.309Z" }, - { url = "https://files.pythonhosted.org/packages/8b/bc/224b1d98cffd7164b14707c91aac83c07b047fbd8f58eba4066a3e53746a/pillow-12.1.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:ca94b6aac0d7af2a10ba08c0f888b3d5114439b6b3ef39968378723622fed377", size = 5228605, upload-time = "2026-01-02T09:13:14.084Z" }, - { url = "https://files.pythonhosted.org/packages/0c/ca/49ca7769c4550107de049ed85208240ba0f330b3f2e316f24534795702ce/pillow-12.1.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:351889afef0f485b84078ea40fe33727a0492b9af3904661b0abbafee0355b72", size = 4622245, upload-time = "2026-01-02T09:13:15.964Z" }, - { url = "https://files.pythonhosted.org/packages/73/48/fac807ce82e5955bcc2718642b94b1bd22a82a6d452aea31cbb678cddf12/pillow-12.1.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bb0984b30e973f7e2884362b7d23d0a348c7143ee559f38ef3eaab640144204c", size = 5247593, upload-time = "2026-01-02T09:13:17.913Z" }, - { url = "https://files.pythonhosted.org/packages/d2/95/3e0742fe358c4664aed4fd05d5f5373dcdad0b27af52aa0972568541e3f4/pillow-12.1.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:84cabc7095dd535ca934d57e9ce2a72ffd216e435a84acb06b2277b1de2689bd", size = 6989008, upload-time = "2026-01-02T09:13:20.083Z" }, - { url = "https://files.pythonhosted.org/packages/5a/74/fe2ac378e4e202e56d50540d92e1ef4ff34ed687f3c60f6a121bcf99437e/pillow-12.1.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:53d8b764726d3af1a138dd353116f774e3862ec7e3794e0c8781e30db0f35dfc", size = 5313824, upload-time = "2026-01-02T09:13:22.405Z" }, - { url = "https://files.pythonhosted.org/packages/f3/77/2a60dee1adee4e2655ac328dd05c02a955c1cd683b9f1b82ec3feb44727c/pillow-12.1.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5da841d81b1a05ef940a8567da92decaa15bc4d7dedb540a8c219ad83d91808a", size = 5963278, upload-time = "2026-01-02T09:13:24.706Z" }, - { url = "https://files.pythonhosted.org/packages/2d/71/64e9b1c7f04ae0027f788a248e6297d7fcc29571371fe7d45495a78172c0/pillow-12.1.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:75af0b4c229ac519b155028fa1be632d812a519abba9b46b20e50c6caa184f19", size = 7029809, upload-time = "2026-01-02T09:13:26.541Z" }, +version = "12.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8c/21/c2bcdd5906101a30244eaffc1b6e6ce71a31bd0742a01eb89e660ebfac2d/pillow-12.2.0.tar.gz", hash = "sha256:a830b1a40919539d07806aa58e1b114df53ddd43213d9c8b75847eee6c0182b5", size = 46987819, upload-time = "2026-04-01T14:46:17.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/68/e1/748f5663efe6edcfc4e74b2b93edfb9b8b99b67f21a854c3ae416500a2d9/pillow-12.2.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:8be29e59487a79f173507c30ddf57e733a357f67881430449bb32614075a40ab", size = 5354347, upload-time = "2026-04-01T14:42:44.255Z" }, + { url = "https://files.pythonhosted.org/packages/47/a1/d5ff69e747374c33a3b53b9f98cca7889fce1fd03d79cdc4e1bccc6c5a87/pillow-12.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:71cde9a1e1551df7d34a25462fc60325e8a11a82cc2e2f54578e5e9a1e153d65", size = 4695873, upload-time = "2026-04-01T14:42:46.452Z" }, + { url = "https://files.pythonhosted.org/packages/df/21/e3fbdf54408a973c7f7f89a23b2cb97a7ef30c61ab4142af31eee6aebc88/pillow-12.2.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f490f9368b6fc026f021db16d7ec2fbf7d89e2edb42e8ec09d2c60505f5729c7", size = 6280168, upload-time = "2026-04-01T14:42:49.228Z" }, + { url = "https://files.pythonhosted.org/packages/d3/f1/00b7278c7dd52b17ad4329153748f87b6756ec195ff786c2bdf12518337d/pillow-12.2.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8bd7903a5f2a4545f6fd5935c90058b89d30045568985a71c79f5fd6edf9b91e", size = 8088188, upload-time = "2026-04-01T14:42:51.735Z" }, + { url = "https://files.pythonhosted.org/packages/ad/cf/220a5994ef1b10e70e85748b75649d77d506499352be135a4989c957b701/pillow-12.2.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3997232e10d2920a68d25191392e3a4487d8183039e1c74c2297f00ed1c50705", size = 6394401, upload-time = "2026-04-01T14:42:54.343Z" }, + { url = "https://files.pythonhosted.org/packages/e9/bd/e51a61b1054f09437acfbc2ff9106c30d1eb76bc1453d428399946781253/pillow-12.2.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e74473c875d78b8e9d5da2a70f7099549f9eb37ded4e2f6a463e60125bccd176", size = 7079655, upload-time = "2026-04-01T14:42:56.954Z" }, + { url = "https://files.pythonhosted.org/packages/6b/3d/45132c57d5fb4b5744567c3817026480ac7fc3ce5d4c47902bc0e7f6f853/pillow-12.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:56a3f9c60a13133a98ecff6197af34d7824de9b7b38c3654861a725c970c197b", size = 6503105, upload-time = "2026-04-01T14:42:59.847Z" }, + { url = "https://files.pythonhosted.org/packages/7d/2e/9df2fc1e82097b1df3dce58dc43286aa01068e918c07574711fcc53e6fb4/pillow-12.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:90e6f81de50ad6b534cab6e5aef77ff6e37722b2f5d908686f4a5c9eba17a909", size = 7203402, upload-time = "2026-04-01T14:43:02.664Z" }, + { url = "https://files.pythonhosted.org/packages/bd/2e/2941e42858ebb67e50ae741473de81c2984e6eff7b397017623c676e2e8d/pillow-12.2.0-cp311-cp311-win32.whl", hash = "sha256:8c984051042858021a54926eb597d6ee3012393ce9c181814115df4c60b9a808", size = 6378149, upload-time = "2026-04-01T14:43:05.274Z" }, + { url = "https://files.pythonhosted.org/packages/69/42/836b6f3cd7f3e5fa10a1f1a5420447c17966044c8fbf589cc0452d5502db/pillow-12.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:6e6b2a0c538fc200b38ff9eb6628228b77908c319a005815f2dde585a0664b60", size = 7082626, upload-time = "2026-04-01T14:43:08.557Z" }, + { url = "https://files.pythonhosted.org/packages/c2/88/549194b5d6f1f494b485e493edc6693c0a16f4ada488e5bd974ed1f42fad/pillow-12.2.0-cp311-cp311-win_arm64.whl", hash = "sha256:9a8a34cc89c67a65ea7437ce257cea81a9dad65b29805f3ecee8c8fe8ff25ffe", size = 2463531, upload-time = "2026-04-01T14:43:10.743Z" }, + { url = "https://files.pythonhosted.org/packages/58/be/7482c8a5ebebbc6470b3eb791812fff7d5e0216c2be3827b30b8bb6603ed/pillow-12.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2d192a155bbcec180f8564f693e6fd9bccff5a7af9b32e2e4bf8c9c69dbad6b5", size = 5308279, upload-time = "2026-04-01T14:43:13.246Z" }, + { url = "https://files.pythonhosted.org/packages/d8/95/0a351b9289c2b5cbde0bacd4a83ebc44023e835490a727b2a3bd60ddc0f4/pillow-12.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f3f40b3c5a968281fd507d519e444c35f0ff171237f4fdde090dd60699458421", size = 4695490, upload-time = "2026-04-01T14:43:15.584Z" }, + { url = "https://files.pythonhosted.org/packages/de/af/4e8e6869cbed569d43c416fad3dc4ecb944cb5d9492defaed89ddd6fe871/pillow-12.2.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:03e7e372d5240cc23e9f07deca4d775c0817bffc641b01e9c3af208dbd300987", size = 6284462, upload-time = "2026-04-01T14:43:18.268Z" }, + { url = "https://files.pythonhosted.org/packages/e9/9e/c05e19657fd57841e476be1ab46c4d501bffbadbafdc31a6d665f8b737b6/pillow-12.2.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b86024e52a1b269467a802258c25521e6d742349d760728092e1bc2d135b4d76", size = 8094744, upload-time = "2026-04-01T14:43:20.716Z" }, + { url = "https://files.pythonhosted.org/packages/2b/54/1789c455ed10176066b6e7e6da1b01e50e36f94ba584dc68d9eebfe9156d/pillow-12.2.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7371b48c4fa448d20d2714c9a1f775a81155050d383333e0a6c15b1123dda005", size = 6398371, upload-time = "2026-04-01T14:43:23.443Z" }, + { url = "https://files.pythonhosted.org/packages/43/e3/fdc657359e919462369869f1c9f0e973f353f9a9ee295a39b1fea8ee1a77/pillow-12.2.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:62f5409336adb0663b7caa0da5c7d9e7bdbaae9ce761d34669420c2a801b2780", size = 7087215, upload-time = "2026-04-01T14:43:26.758Z" }, + { url = "https://files.pythonhosted.org/packages/8b/f8/2f6825e441d5b1959d2ca5adec984210f1ec086435b0ed5f52c19b3b8a6e/pillow-12.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:01afa7cf67f74f09523699b4e88c73fb55c13346d212a59a2db1f86b0a63e8c5", size = 6509783, upload-time = "2026-04-01T14:43:29.56Z" }, + { url = "https://files.pythonhosted.org/packages/67/f9/029a27095ad20f854f9dba026b3ea6428548316e057e6fc3545409e86651/pillow-12.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fc3d34d4a8fbec3e88a79b92e5465e0f9b842b628675850d860b8bd300b159f5", size = 7212112, upload-time = "2026-04-01T14:43:32.091Z" }, + { url = "https://files.pythonhosted.org/packages/be/42/025cfe05d1be22dbfdb4f264fe9de1ccda83f66e4fc3aac94748e784af04/pillow-12.2.0-cp312-cp312-win32.whl", hash = "sha256:58f62cc0f00fd29e64b29f4fd923ffdb3859c9f9e6105bfc37ba1d08994e8940", size = 6378489, upload-time = "2026-04-01T14:43:34.601Z" }, + { url = "https://files.pythonhosted.org/packages/5d/7b/25a221d2c761c6a8ae21bfa3874988ff2583e19cf8a27bf2fee358df7942/pillow-12.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:7f84204dee22a783350679a0333981df803dac21a0190d706a50475e361c93f5", size = 7084129, upload-time = "2026-04-01T14:43:37.213Z" }, + { url = "https://files.pythonhosted.org/packages/10/e1/542a474affab20fd4a0f1836cb234e8493519da6b76899e30bcc5d990b8b/pillow-12.2.0-cp312-cp312-win_arm64.whl", hash = "sha256:af73337013e0b3b46f175e79492d96845b16126ddf79c438d7ea7ff27783a414", size = 2463612, upload-time = "2026-04-01T14:43:39.421Z" }, + { url = "https://files.pythonhosted.org/packages/4e/b7/2437044fb910f499610356d1352e3423753c98e34f915252aafecc64889f/pillow-12.2.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0538bd5e05efec03ae613fd89c4ce0368ecd2ba239cc25b9f9be7ed426b0af1f", size = 5273969, upload-time = "2026-04-01T14:45:55.538Z" }, + { url = "https://files.pythonhosted.org/packages/f6/f4/8316e31de11b780f4ac08ef3654a75555e624a98db1056ecb2122d008d5a/pillow-12.2.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:394167b21da716608eac917c60aa9b969421b5dcbbe02ae7f013e7b85811c69d", size = 4659674, upload-time = "2026-04-01T14:45:58.093Z" }, + { url = "https://files.pythonhosted.org/packages/d4/37/664fca7201f8bb2aa1d20e2c3d5564a62e6ae5111741966c8319ca802361/pillow-12.2.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5d04bfa02cc2d23b497d1e90a0f927070043f6cbf303e738300532379a4b4e0f", size = 5288479, upload-time = "2026-04-01T14:46:01.141Z" }, + { url = "https://files.pythonhosted.org/packages/49/62/5b0ed78fce87346be7a5cfcfaaad91f6a1f98c26f86bdbafa2066c647ef6/pillow-12.2.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0c838a5125cee37e68edec915651521191cef1e6aa336b855f495766e77a366e", size = 7032230, upload-time = "2026-04-01T14:46:03.874Z" }, + { url = "https://files.pythonhosted.org/packages/c3/28/ec0fc38107fc32536908034e990c47914c57cd7c5a3ece4d8d8f7ffd7e27/pillow-12.2.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4a6c9fa44005fa37a91ebfc95d081e8079757d2e904b27103f4f5fa6f0bf78c0", size = 5355404, upload-time = "2026-04-01T14:46:06.33Z" }, + { url = "https://files.pythonhosted.org/packages/5e/8b/51b0eddcfa2180d60e41f06bd6d0a62202b20b59c68f5a132e615b75aecf/pillow-12.2.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:25373b66e0dd5905ed63fa3cae13c82fbddf3079f2c8bf15c6fb6a35586324c1", size = 6002215, upload-time = "2026-04-01T14:46:08.83Z" }, + { url = "https://files.pythonhosted.org/packages/bc/60/5382c03e1970de634027cee8e1b7d39776b778b81812aaf45b694dfe9e28/pillow-12.2.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:bfa9c230d2fe991bed5318a5f119bd6780cda2915cca595393649fc118ab895e", size = 7080946, upload-time = "2026-04-01T14:46:11.734Z" }, ] [[package]] name = "platformdirs" -version = "4.5.1" +version = "4.9.6" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cf/86/0248f086a84f01b37aaec0fa567b397df1a119f73c16f6c7a9aac73ea309/platformdirs-4.5.1.tar.gz", hash = "sha256:61d5cdcc6065745cdd94f0f878977f8de9437be93de97c1c12f853c9c0cdcbda", size = 21715, upload-time = "2025-12-05T13:52:58.638Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9f/4a/0883b8e3802965322523f0b200ecf33d31f10991d0401162f4b23c698b42/platformdirs-4.9.6.tar.gz", hash = "sha256:3bfa75b0ad0db84096ae777218481852c0ebc6c727b3168c1b9e0118e458cf0a", size = 29400, upload-time = "2026-04-09T00:04:10.812Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/28/3bfe2fa5a7b9c46fe7e13c97bda14c895fb10fa2ebf1d0abb90e0cea7ee1/platformdirs-4.5.1-py3-none-any.whl", hash = "sha256:d03afa3963c806a9bed9d5125c8f4cb2fdaf74a55ab60e5d59b3fde758104d31", size = 18731, upload-time = "2025-12-05T13:52:56.823Z" }, + { url = "https://files.pythonhosted.org/packages/75/a6/a0a304dc33b49145b21f4808d763822111e67d1c3a32b524a1baf947b6e1/platformdirs-4.9.6-py3-none-any.whl", hash = "sha256:e61adb1d5e5cb3441b4b7710bea7e4c12250ca49439228cc1021c00dcfac0917", size = 21348, upload-time = "2026-04-09T00:04:09.463Z" }, ] [[package]] name = "plotnine" -version = "0.15.2" +version = "0.15.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "matplotlib" }, @@ -1625,9 +1660,9 @@ dependencies = [ { name = "scipy" }, { name = "statsmodels" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/11/14/3adedabe6b8710caee34e4ac9f4edc48218a381594ee1980c323b8866577/plotnine-0.15.2.tar.gz", hash = "sha256:ec2e4cdf2d022eb0dab63ef4aa0017ce0d84c60bd99d55093e72637fddf757e6", size = 6787690, upload-time = "2025-12-12T10:41:37.249Z" } +sdist = { url = "https://files.pythonhosted.org/packages/63/d5/4eeed62b101aa879b87e9bbc9d0650700b5ebd1ffa743902286b039135a2/plotnine-0.15.3.tar.gz", hash = "sha256:2e8130db4673e0daccb1fd1dfc9f2a6cd2e7843b14f861f4ab861dde1639045f", size = 6788365, upload-time = "2026-01-28T16:35:34.708Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/58/27/4e6ffe2f095fbfd6285343aa6114903a4cf011564b4f1f2bb706341472df/plotnine-0.15.2-py3-none-any.whl", hash = "sha256:7dc508bc51625b9b9f945e274d8ee4463cf30b280749190a5b707e6828003fa6", size = 1332822, upload-time = "2025-12-12T10:41:34.203Z" }, + { url = "https://files.pythonhosted.org/packages/d8/79/eb59d10e13b151a514a8af3b1dcefb5e998b6c8c26bf0cf82d2b98f02c23/plotnine-0.15.3-py3-none-any.whl", hash = "sha256:39fd2ab8b6465275c8a283ce20d4b743dd865e94c74ae7d7d6f21a7eb31f62c1", size = 1332831, upload-time = "2026-01-28T16:35:32.241Z" }, ] [[package]] @@ -1641,30 +1676,30 @@ wheels = [ [[package]] name = "polars" -version = "1.37.1" +version = "1.39.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "polars-runtime-32" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/84/ae/dfebf31b9988c20998140b54d5b521f64ce08879f2c13d9b4d44d7c87e32/polars-1.37.1.tar.gz", hash = "sha256:0309e2a4633e712513401964b4d95452f124ceabf7aec6db50affb9ced4a274e", size = 715572, upload-time = "2026-01-12T23:27:03.267Z" } +sdist = { url = "https://files.pythonhosted.org/packages/93/ab/f19e592fce9e000da49c96bf35e77cef67f9cb4b040bfa538a2764c0263e/polars-1.39.3.tar.gz", hash = "sha256:2e016c7f3e8d14fa777ef86fe0477cec6c67023a20ba4c94d6e8431eefe4a63c", size = 728987, upload-time = "2026-03-20T11:16:24.836Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/08/75/ec73e38812bca7c2240aff481b9ddff20d1ad2f10dee4b3353f5eeaacdab/polars-1.37.1-py3-none-any.whl", hash = "sha256:377fed8939a2f1223c1563cfabdc7b4a3d6ff846efa1f2ddeb8644fafd9b1aff", size = 805749, upload-time = "2026-01-12T23:25:48.595Z" }, + { url = "https://files.pythonhosted.org/packages/b4/db/08f4ca10c5018813e7e0b59e4472302328b3d2ab1512f5a2157a814540e0/polars-1.39.3-py3-none-any.whl", hash = "sha256:c2b955ccc0a08a2bc9259785decf3d5c007b489b523bf2390cf21cec2bb82a56", size = 823985, upload-time = "2026-03-20T11:14:23.619Z" }, ] [[package]] name = "polars-runtime-32" -version = "1.37.1" +version = "1.39.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/40/0b/addabe5e8d28a5a4c9887a08907be7ddc3fce892dc38f37d14b055438a57/polars_runtime_32-1.37.1.tar.gz", hash = "sha256:68779d4a691da20a5eb767d74165a8f80a2bdfbde4b54acf59af43f7fa028d8f", size = 2818945, upload-time = "2026-01-12T23:27:04.653Z" } +sdist = { url = "https://files.pythonhosted.org/packages/17/39/c8688696bc22b6c501e3b82ef3be10e543c07a785af5660f30997cd22dd2/polars_runtime_32-1.39.3.tar.gz", hash = "sha256:c728e4f469cafab501947585f36311b8fb222d3e934c6209e83791e0df20b29d", size = 2872335, upload-time = "2026-03-20T11:16:26.581Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2a/a2/e828ea9f845796de02d923edb790e408ca0b560cd68dbd74bb99a1b3c461/polars_runtime_32-1.37.1-cp310-abi3-macosx_10_12_x86_64.whl", hash = "sha256:0b8d4d73ea9977d3731927740e59d814647c5198bdbe359bcf6a8bfce2e79771", size = 43499912, upload-time = "2026-01-12T23:25:51.182Z" }, - { url = "https://files.pythonhosted.org/packages/7e/46/81b71b7aa9e3703ee6e4ef1f69a87e40f58ea7c99212bf49a95071e99c8c/polars_runtime_32-1.37.1-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:c682bf83f5f352e5e02f5c16c652c48ca40442f07b236f30662b22217320ce76", size = 39695707, upload-time = "2026-01-12T23:25:54.289Z" }, - { url = "https://files.pythonhosted.org/packages/81/2e/20009d1fde7ee919e24040f5c87cb9d0e4f8e3f109b74ba06bc10c02459c/polars_runtime_32-1.37.1-cp310-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc82b5bbe70ca1a4b764eed1419f6336752d6ba9fc1245388d7f8b12438afa2c", size = 41467034, upload-time = "2026-01-12T23:25:56.925Z" }, - { url = "https://files.pythonhosted.org/packages/eb/21/9b55bea940524324625b1e8fd96233290303eb1bf2c23b54573487bbbc25/polars_runtime_32-1.37.1-cp310-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8362d11ac5193b994c7e9048ffe22ccfb976699cfbf6e128ce0302e06728894", size = 45142711, upload-time = "2026-01-12T23:26:00.817Z" }, - { url = "https://files.pythonhosted.org/packages/8c/25/c5f64461aeccdac6834a89f826d051ccd3b4ce204075e562c87a06ed2619/polars_runtime_32-1.37.1-cp310-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:04f5d5a2f013dca7391b7d8e7672fa6d37573a87f1d45d3dd5f0d9b5565a4b0f", size = 41638564, upload-time = "2026-01-12T23:26:04.186Z" }, - { url = "https://files.pythonhosted.org/packages/35/af/509d3cf6c45e764ccf856beaae26fc34352f16f10f94a7839b1042920a73/polars_runtime_32-1.37.1-cp310-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:fbfde7c0ca8209eeaed546e4a32cca1319189aa61c5f0f9a2b4494262bd0c689", size = 44721136, upload-time = "2026-01-12T23:26:07.088Z" }, - { url = "https://files.pythonhosted.org/packages/af/d1/5c0a83a625f72beef59394bebc57d12637997632a4f9d3ab2ffc2cc62bbf/polars_runtime_32-1.37.1-cp310-abi3-win_amd64.whl", hash = "sha256:da3d3642ae944e18dd17109d2a3036cb94ce50e5495c5023c77b1599d4c861bc", size = 44948288, upload-time = "2026-01-12T23:26:10.214Z" }, - { url = "https://files.pythonhosted.org/packages/10/f3/061bb702465904b6502f7c9081daee34b09ccbaa4f8c94cf43a2a3b6dd6f/polars_runtime_32-1.37.1-cp310-abi3-win_arm64.whl", hash = "sha256:55f2c4847a8d2e267612f564de7b753a4bde3902eaabe7b436a0a4abf75949a0", size = 41001914, upload-time = "2026-01-12T23:26:12.997Z" }, + { url = "https://files.pythonhosted.org/packages/3b/74/1b41205f7368c9375ab1dea91178eaa20435fe3eff036390a53a7660b416/polars_runtime_32-1.39.3-cp310-abi3-macosx_10_12_x86_64.whl", hash = "sha256:425c0b220b573fa097b4042edff73114cc6d23432a21dfd2dc41adf329d7d2e9", size = 45273243, upload-time = "2026-03-20T11:14:26.691Z" }, + { url = "https://files.pythonhosted.org/packages/90/bf/297716b3095fe719be20fcf7af1d2b6ab069c38199bbace2469608a69b3a/polars_runtime_32-1.39.3-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:ef5884711e3c617d7dc93519a7d038e242f5741cfe5fe9afd32d58845d86c562", size = 40842924, upload-time = "2026-03-20T11:14:31.154Z" }, + { url = "https://files.pythonhosted.org/packages/3d/3e/e65236d9d0d9babfa0ecba593413c06530fca60a8feb8f66243aa5dba92e/polars_runtime_32-1.39.3-cp310-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06b47f535eb1f97a9a1e5b0053ef50db3a4276e241178e37bbb1a38b1fa53b14", size = 43220650, upload-time = "2026-03-20T11:14:35.458Z" }, + { url = "https://files.pythonhosted.org/packages/b0/15/fc3e43f3fdf3f20b7dfb5abe871ab6162cf8fb4aeabf4cfad822d5dc4c79/polars_runtime_32-1.39.3-cp310-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8bc9e13dc1d2e828331f2fe8ccbc9757554dc4933a8d3e85e906b988178f95ed", size = 46877498, upload-time = "2026-03-20T11:14:40.14Z" }, + { url = "https://files.pythonhosted.org/packages/3c/81/bd5f895919e32c6ab0a7786cd0c0ca961cb03152c47c3645808b54383f31/polars_runtime_32-1.39.3-cp310-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:363d49e3a3e638fc943e2b9887940300a7d06789930855a178a4727949259dc2", size = 43380176, upload-time = "2026-03-20T11:14:45.566Z" }, + { url = "https://files.pythonhosted.org/packages/7a/3e/c86433c3b5ec0315bdfc7640d0c15d41f1216c0103a0eab9a9b5147d6c4c/polars_runtime_32-1.39.3-cp310-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:7c206bdcc7bc62ea038d6adea8e44b02f0e675e0191a54c810703b4895208ea4", size = 46485933, upload-time = "2026-03-20T11:14:51.155Z" }, + { url = "https://files.pythonhosted.org/packages/54/ce/200b310cf91f98e652eb6ea09fdb3a9718aa0293ebf113dce325797c8572/polars_runtime_32-1.39.3-cp310-abi3-win_amd64.whl", hash = "sha256:d66ca522517554a883446957539c40dc7b75eb0c2220357fb28bc8940d305339", size = 46995458, upload-time = "2026-03-20T11:14:56.074Z" }, + { url = "https://files.pythonhosted.org/packages/da/76/2d48927e0aa2abbdde08cbf4a2536883b73277d47fbeca95e952de86df34/polars_runtime_32-1.39.3-cp310-abi3-win_arm64.whl", hash = "sha256:f49f51461de63f13e5dd4eb080421c8f23f856945f3f8bd5b2b1f59da52c2860", size = 41857648, upload-time = "2026-03-20T11:15:01.142Z" }, ] [[package]] @@ -1685,11 +1720,11 @@ wheels = [ [[package]] name = "prometheus-client" -version = "0.24.1" +version = "0.25.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f0/58/a794d23feb6b00fc0c72787d7e87d872a6730dd9ed7c7b3e954637d8f280/prometheus_client-0.24.1.tar.gz", hash = "sha256:7e0ced7fbbd40f7b84962d5d2ab6f17ef88a72504dcf7c0b40737b43b2a461f9", size = 85616, upload-time = "2026-01-14T15:26:26.965Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1b/fb/d9aa83ffe43ce1f19e557c0971d04b90561b0cfd50762aafb01968285553/prometheus_client-0.25.0.tar.gz", hash = "sha256:5e373b75c31afb3c86f1a52fa1ad470c9aace18082d39ec0d2f918d11cc9ba28", size = 86035, upload-time = "2026-04-09T19:53:42.359Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/74/c3/24a2f845e3917201628ecaba4f18bab4d18a337834c1df2a159ee9d22a42/prometheus_client-0.24.1-py3-none-any.whl", hash = "sha256:150db128af71a5c2482b36e588fc8a6b95e498750da4b17065947c16070f4055", size = 64057, upload-time = "2026-01-14T15:26:24.42Z" }, + { url = "https://files.pythonhosted.org/packages/8d/9b/d4b1e644385499c8346fa9b622a3f030dce14cd6ef8a1871c221a17a67e7/prometheus_client-0.25.0-py3-none-any.whl", hash = "sha256:d5aec89e349a6ec230805d0df882f3807f74fd6c1a2fa86864e3c2279059fed1", size = 64154, upload-time = "2026-04-09T19:53:41.324Z" }, ] [[package]] @@ -1706,33 +1741,33 @@ wheels = [ [[package]] name = "protobuf" -version = "6.33.4" +version = "7.34.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/53/b8/cda15d9d46d03d4aa3a67cb6bffe05173440ccf86a9541afaf7ac59a1b6b/protobuf-6.33.4.tar.gz", hash = "sha256:dc2e61bca3b10470c1912d166fe0af67bfc20eb55971dcef8dfa48ce14f0ed91", size = 444346, upload-time = "2026-01-12T18:33:40.109Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6b/6b/a0e95cad1ad7cc3f2c6821fcab91671bd5b78bd42afb357bb4765f29bc41/protobuf-7.34.1.tar.gz", hash = "sha256:9ce42245e704cc5027be797c1db1eb93184d44d1cdd71811fb2d9b25ad541280", size = 454708, upload-time = "2026-03-20T17:34:47.036Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e0/be/24ef9f3095bacdf95b458543334d0c4908ccdaee5130420bf064492c325f/protobuf-6.33.4-cp310-abi3-win32.whl", hash = "sha256:918966612c8232fc6c24c78e1cd89784307f5814ad7506c308ee3cf86662850d", size = 425612, upload-time = "2026-01-12T18:33:29.656Z" }, - { url = "https://files.pythonhosted.org/packages/31/ad/e5693e1974a28869e7cd244302911955c1cebc0161eb32dfa2b25b6e96f0/protobuf-6.33.4-cp310-abi3-win_amd64.whl", hash = "sha256:8f11ffae31ec67fc2554c2ef891dcb561dae9a2a3ed941f9e134c2db06657dbc", size = 436962, upload-time = "2026-01-12T18:33:31.345Z" }, - { url = "https://files.pythonhosted.org/packages/66/15/6ee23553b6bfd82670207ead921f4d8ef14c107e5e11443b04caeb5ab5ec/protobuf-6.33.4-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:2fe67f6c014c84f655ee06f6f66213f9254b3a8b6bda6cda0ccd4232c73c06f0", size = 427612, upload-time = "2026-01-12T18:33:32.646Z" }, - { url = "https://files.pythonhosted.org/packages/2b/48/d301907ce6d0db75f959ca74f44b475a9caa8fcba102d098d3c3dd0f2d3f/protobuf-6.33.4-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:757c978f82e74d75cba88eddec479df9b99a42b31193313b75e492c06a51764e", size = 324484, upload-time = "2026-01-12T18:33:33.789Z" }, - { url = "https://files.pythonhosted.org/packages/92/1c/e53078d3f7fe710572ab2dcffd993e1e3b438ae71cfc031b71bae44fcb2d/protobuf-6.33.4-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:c7c64f259c618f0bef7bee042075e390debbf9682334be2b67408ec7c1c09ee6", size = 339256, upload-time = "2026-01-12T18:33:35.231Z" }, - { url = "https://files.pythonhosted.org/packages/e8/8e/971c0edd084914f7ee7c23aa70ba89e8903918adca179319ee94403701d5/protobuf-6.33.4-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:3df850c2f8db9934de4cf8f9152f8dc2558f49f298f37f90c517e8e5c84c30e9", size = 323311, upload-time = "2026-01-12T18:33:36.305Z" }, - { url = "https://files.pythonhosted.org/packages/75/b1/1dc83c2c661b4c62d56cc081706ee33a4fc2835bd90f965baa2663ef7676/protobuf-6.33.4-py3-none-any.whl", hash = "sha256:1fe3730068fcf2e595816a6c34fe66eeedd37d51d0400b72fabc848811fdc1bc", size = 170532, upload-time = "2026-01-12T18:33:39.199Z" }, + { url = "https://files.pythonhosted.org/packages/ec/11/3325d41e6ee15bf1125654301211247b042563bcc898784351252549a8ad/protobuf-7.34.1-cp310-abi3-macosx_10_9_universal2.whl", hash = "sha256:d8b2cc79c4d8f62b293ad9b11ec3aebce9af481fa73e64556969f7345ebf9fc7", size = 429247, upload-time = "2026-03-20T17:34:37.024Z" }, + { url = "https://files.pythonhosted.org/packages/eb/9d/aa69df2724ff63efa6f72307b483ce0827f4347cc6d6df24b59e26659fef/protobuf-7.34.1-cp310-abi3-manylinux2014_aarch64.whl", hash = "sha256:5185e0e948d07abe94bb76ec9b8416b604cfe5da6f871d67aad30cbf24c3110b", size = 325753, upload-time = "2026-03-20T17:34:38.751Z" }, + { url = "https://files.pythonhosted.org/packages/92/e8/d174c91fd48e50101943f042b09af9029064810b734e4160bbe282fa1caa/protobuf-7.34.1-cp310-abi3-manylinux2014_s390x.whl", hash = "sha256:403b093a6e28a960372b44e5eb081775c9b056e816a8029c61231743d63f881a", size = 340198, upload-time = "2026-03-20T17:34:39.871Z" }, + { url = "https://files.pythonhosted.org/packages/53/1b/3b431694a4dc6d37b9f653f0c64b0a0d9ec074ee810710c0c3da21d67ba7/protobuf-7.34.1-cp310-abi3-manylinux2014_x86_64.whl", hash = "sha256:8ff40ce8cd688f7265326b38d5a1bed9bfdf5e6723d49961432f83e21d5713e4", size = 324267, upload-time = "2026-03-20T17:34:41.1Z" }, + { url = "https://files.pythonhosted.org/packages/85/29/64de04a0ac142fb685fd09999bc3d337943fb386f3a0ec57f92fd8203f97/protobuf-7.34.1-cp310-abi3-win32.whl", hash = "sha256:34b84ce27680df7cca9f231043ada0daa55d0c44a2ddfaa58ec1d0d89d8bf60a", size = 426628, upload-time = "2026-03-20T17:34:42.536Z" }, + { url = "https://files.pythonhosted.org/packages/4d/87/cb5e585192a22b8bd457df5a2c16a75ea0db9674c3a0a39fc9347d84e075/protobuf-7.34.1-cp310-abi3-win_amd64.whl", hash = "sha256:e97b55646e6ce5cbb0954a8c28cd39a5869b59090dfaa7df4598a7fba869468c", size = 437901, upload-time = "2026-03-20T17:34:44.112Z" }, + { url = "https://files.pythonhosted.org/packages/88/95/608f665226bca68b736b79e457fded9a2a38c4f4379a4a7614303d9db3bc/protobuf-7.34.1-py3-none-any.whl", hash = "sha256:bb3812cd53aefea2b028ef42bd780f5b96407247f20c6ef7c679807e9d188f11", size = 170715, upload-time = "2026-03-20T17:34:45.384Z" }, ] [[package]] name = "psutil" -version = "7.2.1" +version = "7.2.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/73/cb/09e5184fb5fc0358d110fc3ca7f6b1d033800734d34cac10f4136cfac10e/psutil-7.2.1.tar.gz", hash = "sha256:f7583aec590485b43ca601dd9cea0dcd65bd7bb21d30ef4ddbf4ea6b5ed1bdd3", size = 490253, upload-time = "2025-12-29T08:26:00.169Z" } +sdist = { url = "https://files.pythonhosted.org/packages/aa/c6/d1ddf4abb55e93cebc4f2ed8b5d6dbad109ecb8d63748dd2b20ab5e57ebe/psutil-7.2.2.tar.gz", hash = "sha256:0746f5f8d406af344fd547f1c8daa5f5c33dbc293bb8d6a16d80b4bb88f59372", size = 493740, upload-time = "2026-01-28T18:14:54.428Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c5/cf/5180eb8c8bdf6a503c6919f1da28328bd1e6b3b1b5b9d5b01ae64f019616/psutil-7.2.1-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:b2e953fcfaedcfbc952b44744f22d16575d3aa78eb4f51ae74165b4e96e55f42", size = 128137, upload-time = "2025-12-29T08:26:27.759Z" }, - { url = "https://files.pythonhosted.org/packages/c5/2c/78e4a789306a92ade5000da4f5de3255202c534acdadc3aac7b5458fadef/psutil-7.2.1-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:05cc68dbb8c174828624062e73078e7e35406f4ca2d0866c272c2410d8ef06d1", size = 128947, upload-time = "2025-12-29T08:26:29.548Z" }, - { url = "https://files.pythonhosted.org/packages/29/f8/40e01c350ad9a2b3cb4e6adbcc8a83b17ee50dd5792102b6142385937db5/psutil-7.2.1-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5e38404ca2bb30ed7267a46c02f06ff842e92da3bb8c5bfdadbd35a5722314d8", size = 154694, upload-time = "2025-12-29T08:26:32.147Z" }, - { url = "https://files.pythonhosted.org/packages/06/e4/b751cdf839c011a9714a783f120e6a86b7494eb70044d7d81a25a5cd295f/psutil-7.2.1-cp36-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ab2b98c9fc19f13f59628d94df5cc4cc4844bc572467d113a8b517d634e362c6", size = 156136, upload-time = "2025-12-29T08:26:34.079Z" }, - { url = "https://files.pythonhosted.org/packages/44/ad/bbf6595a8134ee1e94a4487af3f132cef7fce43aef4a93b49912a48c3af7/psutil-7.2.1-cp36-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:f78baafb38436d5a128f837fab2d92c276dfb48af01a240b861ae02b2413ada8", size = 148108, upload-time = "2025-12-29T08:26:36.225Z" }, - { url = "https://files.pythonhosted.org/packages/1c/15/dd6fd869753ce82ff64dcbc18356093471a5a5adf4f77ed1f805d473d859/psutil-7.2.1-cp36-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:99a4cd17a5fdd1f3d014396502daa70b5ec21bf4ffe38393e152f8e449757d67", size = 147402, upload-time = "2025-12-29T08:26:39.21Z" }, - { url = "https://files.pythonhosted.org/packages/34/68/d9317542e3f2b180c4306e3f45d3c922d7e86d8ce39f941bb9e2e9d8599e/psutil-7.2.1-cp37-abi3-win_amd64.whl", hash = "sha256:b1b0671619343aa71c20ff9767eced0483e4fc9e1f489d50923738caf6a03c17", size = 136938, upload-time = "2025-12-29T08:26:41.036Z" }, - { url = "https://files.pythonhosted.org/packages/3e/73/2ce007f4198c80fcf2cb24c169884f833fe93fbc03d55d302627b094ee91/psutil-7.2.1-cp37-abi3-win_arm64.whl", hash = "sha256:0d67c1822c355aa6f7314d92018fb4268a76668a536f133599b91edd48759442", size = 133836, upload-time = "2025-12-29T08:26:43.086Z" }, + { url = "https://files.pythonhosted.org/packages/e7/36/5ee6e05c9bd427237b11b3937ad82bb8ad2752d72c6969314590dd0c2f6e/psutil-7.2.2-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ed0cace939114f62738d808fdcecd4c869222507e266e574799e9c0faa17d486", size = 129090, upload-time = "2026-01-28T18:15:22.168Z" }, + { url = "https://files.pythonhosted.org/packages/80/c4/f5af4c1ca8c1eeb2e92ccca14ce8effdeec651d5ab6053c589b074eda6e1/psutil-7.2.2-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:1a7b04c10f32cc88ab39cbf606e117fd74721c831c98a27dc04578deb0c16979", size = 129859, upload-time = "2026-01-28T18:15:23.795Z" }, + { url = "https://files.pythonhosted.org/packages/b5/70/5d8df3b09e25bce090399cf48e452d25c935ab72dad19406c77f4e828045/psutil-7.2.2-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:076a2d2f923fd4821644f5ba89f059523da90dc9014e85f8e45a5774ca5bc6f9", size = 155560, upload-time = "2026-01-28T18:15:25.976Z" }, + { url = "https://files.pythonhosted.org/packages/63/65/37648c0c158dc222aba51c089eb3bdfa238e621674dc42d48706e639204f/psutil-7.2.2-cp36-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b0726cecd84f9474419d67252add4ac0cd9811b04d61123054b9fb6f57df6e9e", size = 156997, upload-time = "2026-01-28T18:15:27.794Z" }, + { url = "https://files.pythonhosted.org/packages/8e/13/125093eadae863ce03c6ffdbae9929430d116a246ef69866dad94da3bfbc/psutil-7.2.2-cp36-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:fd04ef36b4a6d599bbdb225dd1d3f51e00105f6d48a28f006da7f9822f2606d8", size = 148972, upload-time = "2026-01-28T18:15:29.342Z" }, + { url = "https://files.pythonhosted.org/packages/04/78/0acd37ca84ce3ddffaa92ef0f571e073faa6d8ff1f0559ab1272188ea2be/psutil-7.2.2-cp36-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b58fabe35e80b264a4e3bb23e6b96f9e45a3df7fb7eed419ac0e5947c61e47cc", size = 148266, upload-time = "2026-01-28T18:15:31.597Z" }, + { url = "https://files.pythonhosted.org/packages/b4/90/e2159492b5426be0c1fef7acba807a03511f97c5f86b3caeda6ad92351a7/psutil-7.2.2-cp37-abi3-win_amd64.whl", hash = "sha256:eb7e81434c8d223ec4a219b5fc1c47d0417b12be7ea866e24fb5ad6e84b3d988", size = 137737, upload-time = "2026-01-28T18:15:33.849Z" }, + { url = "https://files.pythonhosted.org/packages/8c/c7/7bb2e321574b10df20cbde462a94e2b71d05f9bbda251ef27d104668306a/psutil-7.2.2-cp37-abi3-win_arm64.whl", hash = "sha256:8c233660f575a5a89e6d4cb65d9f938126312bca76d8fe087b947b3a1aaac9ee", size = 134617, upload-time = "2026-01-28T18:15:36.514Z" }, ] [[package]] @@ -1755,11 +1790,11 @@ wheels = [ [[package]] name = "pycparser" -version = "2.23" +version = "3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fe/cf/d2d3b9f5699fb1e4615c8e32ff220203e43b248e1dfcc6736ad9057731ca/pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2", size = 173734, upload-time = "2025-09-09T13:23:47.91Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1b/7d/92392ff7815c21062bea51aa7b87d45576f649f16458d78b7cf94b9ab2e6/pycparser-3.0.tar.gz", hash = "sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29", size = 103492, upload-time = "2026-01-21T14:26:51.89Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a0/e3/59cd50310fc9b59512193629e1984c1f95e5c8ae6e5d8c69532ccc65a7fe/pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934", size = 118140, upload-time = "2025-09-09T13:23:46.651Z" }, + { url = "https://files.pythonhosted.org/packages/0c/c3/44f3fbbfa403ea2a7c779186dc20772604442dde72947e7d01069cbe98e3/pycparser-3.0-py3-none-any.whl", hash = "sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992", size = 48172, upload-time = "2026-01-21T14:26:50.693Z" }, ] [[package]] @@ -1834,29 +1869,20 @@ wheels = [ [[package]] name = "pygments" -version = "2.19.2" +version = "2.20.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c3/b2/bc9c9196916376152d655522fdcebac55e66de6603a76a02bca1b6414f6c/pygments-2.20.0.tar.gz", hash = "sha256:6757cd03768053ff99f3039c1a36d6c0aa0b263438fcab17520b30a303a82b5f", size = 4955991, upload-time = "2026-03-29T13:29:33.898Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, + { url = "https://files.pythonhosted.org/packages/f4/7e/a72dd26f3b0f4f2bf1dd8923c85f7ceb43172af56d63c7383eb62b332364/pygments-2.20.0-py3-none-any.whl", hash = "sha256:81a9e26dd42fd28a23a2d169d86d7ac03b46e2f8b59ed4698fb4785f946d0176", size = 1231151, upload-time = "2026-03-29T13:29:30.038Z" }, ] [[package]] name = "pyparsing" -version = "3.3.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/33/c1/1d9de9aeaa1b89b0186e5fe23294ff6517fce1bc69149185577cd31016b2/pyparsing-3.3.1.tar.gz", hash = "sha256:47fad0f17ac1e2cad3de3b458570fbc9b03560aa029ed5e16ee5554da9a2251c", size = 1550512, upload-time = "2025-12-23T03:14:04.391Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8b/40/2614036cdd416452f5bf98ec037f38a1afb17f327cb8e6b652d4729e0af8/pyparsing-3.3.1-py3-none-any.whl", hash = "sha256:023b5e7e5520ad96642e2c6db4cb683d3970bd640cdf7115049a6e9c3682df82", size = 121793, upload-time = "2025-12-23T03:14:02.103Z" }, -] - -[[package]] -name = "pyreadline3" -version = "3.5.4" +version = "3.3.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0f/49/4cea918a08f02817aabae639e3d0ac046fef9f9180518a3ad394e22da148/pyreadline3-3.5.4.tar.gz", hash = "sha256:8d57d53039a1c75adba8e50dd3d992b28143480816187ea5efbd5c78e6c885b7", size = 99839, upload-time = "2024-09-19T02:40:10.062Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/91/9c6ee907786a473bf81c5f53cf703ba0957b23ab84c264080fb5a450416f/pyparsing-3.3.2.tar.gz", hash = "sha256:c777f4d763f140633dcb6d8a3eda953bf7a214dc4eff598413c070bcdc117cbc", size = 6851574, upload-time = "2026-01-21T03:57:59.36Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5a/dc/491b7661614ab97483abf2056be1deee4dc2490ecbf7bff9ab5cdbac86e1/pyreadline3-3.5.4-py3-none-any.whl", hash = "sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6", size = 83178, upload-time = "2024-09-19T02:40:08.598Z" }, + { url = "https://files.pythonhosted.org/packages/10/bd/c038d7cc38edc1aa5bf91ab8068b63d4308c66c4c8bb3cbba7dfbc049f9c/pyparsing-3.3.2-py3-none-any.whl", hash = "sha256:850ba148bd908d7e2411587e247a1e4f0327839c40e2e5e6d05a007ecc69911d", size = 122781, upload-time = "2026-01-21T03:57:55.912Z" }, ] [[package]] @@ -1871,7 +1897,7 @@ wheels = [ [[package]] name = "pytest" -version = "9.0.2" +version = "9.0.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, @@ -1880,9 +1906,9 @@ dependencies = [ { name = "pluggy" }, { name = "pygments" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7d/0d/549bd94f1a0a402dc8cf64563a117c0f3765662e2e668477624baeec44d5/pytest-9.0.3.tar.gz", hash = "sha256:b86ada508af81d19edeb213c681b1d48246c1a91d304c6c81a427674c17eb91c", size = 1572165, upload-time = "2026-04-07T17:16:18.027Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" }, + { url = "https://files.pythonhosted.org/packages/d4/24/a372aaf5c9b7208e7112038812994107bc65a84cd00e0354a88c2c77a617/pytest-9.0.3-py3-none-any.whl", hash = "sha256:2c5efc453d45394fdd706ade797c0a81091eccd1d6e4bccfcd476e2b8e0ab5d9", size = 375249, upload-time = "2026-04-07T17:16:16.13Z" }, ] [[package]] @@ -1898,31 +1924,37 @@ wheels = [ ] [[package]] -name = "python-json-logger" -version = "4.0.0" +name = "python-discovery" +version = "1.2.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/29/bf/eca6a3d43db1dae7070f70e160ab20b807627ba953663ba07928cdd3dc58/python_json_logger-4.0.0.tar.gz", hash = "sha256:f58e68eb46e1faed27e0f574a55a0455eecd7b8a5b88b85a784519ba3cff047f", size = 17683, upload-time = "2025-10-06T04:15:18.984Z" } +dependencies = [ + { name = "filelock" }, + { name = "platformdirs" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/de/ef/3bae0e537cfe91e8431efcba4434463d2c5a65f5a89edd47c6cf2f03c55f/python_discovery-1.2.2.tar.gz", hash = "sha256:876e9c57139eb757cb5878cbdd9ae5379e5d96266c99ef731119e04fffe533bb", size = 58872, upload-time = "2026-04-07T17:28:49.249Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/51/e5/fecf13f06e5e5f67e8837d777d1bc43fac0ed2b77a676804df5c34744727/python_json_logger-4.0.0-py3-none-any.whl", hash = "sha256:af09c9daf6a813aa4cc7180395f50f2a9e5fa056034c9953aec92e381c5ba1e2", size = 15548, upload-time = "2025-10-06T04:15:17.553Z" }, + { url = "https://files.pythonhosted.org/packages/d8/db/795879cc3ddfe338599bddea6388cc5100b088db0a4caf6e6c1af1c27e04/python_discovery-1.2.2-py3-none-any.whl", hash = "sha256:e1ae95d9af875e78f15e19aed0c6137ab1bb49c200f21f5061786490c9585c7a", size = 31894, upload-time = "2026-04-07T17:28:48.09Z" }, ] [[package]] -name = "pytz" -version = "2025.2" +name = "python-json-logger" +version = "4.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f8/bf/abbd3cdfb8fbc7fb3d4d38d320f2441b1e7cbe29be4f23797b4a2b5d8aac/pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3", size = 320884, upload-time = "2025-03-25T02:25:00.538Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f7/ff/3cc9165fd44106973cd7ac9facb674a65ed853494592541d339bdc9a30eb/python_json_logger-4.1.0.tar.gz", hash = "sha256:b396b9e3ed782b09ff9d6e4f1683d46c83ad0d35d2e407c09a9ebbf038f88195", size = 17573, upload-time = "2026-03-29T04:39:56.805Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225, upload-time = "2025-03-25T02:24:58.468Z" }, + { url = "https://files.pythonhosted.org/packages/27/be/0631a861af4d1c875f096c07d34e9a63639560a717130e7a87cbc82b7e3f/python_json_logger-4.1.0-py3-none-any.whl", hash = "sha256:132994765cf75bf44554be9aa49b06ef2345d23661a96720262716438141b6b2", size = 15021, upload-time = "2026-03-29T04:39:55.266Z" }, ] [[package]] name = "pywinpty" -version = "3.0.2" +version = "3.0.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f3/bb/a7cc2967c5c4eceb6cc49cfe39447d4bfc56e6c865e7c2249b6eb978935f/pywinpty-3.0.2.tar.gz", hash = "sha256:1505cc4cb248af42cb6285a65c9c2086ee9e7e574078ee60933d5d7fa86fb004", size = 30669, upload-time = "2025-10-03T21:16:29.205Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f7/54/37c7370ba91f579235049dc26cd2c5e657d2a943e01820844ffc81f32176/pywinpty-3.0.3.tar.gz", hash = "sha256:523441dc34d231fb361b4b00f8c99d3f16de02f5005fd544a0183112bcc22412", size = 31309, upload-time = "2026-02-04T21:51:09.524Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a6/a1/409c1651c9f874d598c10f51ff586c416625601df4bca315d08baec4c3e3/pywinpty-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:327790d70e4c841ebd9d0f295a780177149aeb405bca44c7115a3de5c2054b23", size = 2050304, upload-time = "2025-10-03T21:19:29.466Z" }, - { url = "https://files.pythonhosted.org/packages/02/4e/1098484e042c9485f56f16eb2b69b43b874bd526044ee401512234cf9e04/pywinpty-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:99fdd9b455f0ad6419aba6731a7a0d2f88ced83c3c94a80ff9533d95fa8d8a9e", size = 2050391, upload-time = "2025-10-03T21:19:01.642Z" }, + { url = "https://files.pythonhosted.org/packages/79/c3/3e75075c7f71735f22b66fab0481f2c98e3a4d58cba55cb50ba29114bcf6/pywinpty-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:dff25a9a6435f527d7c65608a7e62783fc12076e7d44487a4911ee91be5a8ac8", size = 2114430, upload-time = "2026-02-04T21:54:19.485Z" }, + { url = "https://files.pythonhosted.org/packages/8d/1e/8a54166a8c5e4f5cb516514bdf4090be4d51a71e8d9f6d98c0aa00fe45d4/pywinpty-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:fbc1e230e5b193eef4431cba3f39996a288f9958f9c9f092c8a961d930ee8f68", size = 236191, upload-time = "2026-02-04T21:50:36.239Z" }, + { url = "https://files.pythonhosted.org/packages/7c/d4/aeb5e1784d2c5bff6e189138a9ca91a090117459cea0c30378e1f2db3d54/pywinpty-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:c9081df0e49ffa86d15db4a6ba61530630e48707f987df42c9d3313537e81fc0", size = 2113098, upload-time = "2026-02-04T21:54:37.711Z" }, + { url = "https://files.pythonhosted.org/packages/b9/53/7278223c493ccfe4883239cf06c823c56460a8010e0fc778eef67858dc14/pywinpty-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:15e79d870e18b678fb8a5a6105fd38496b55697c66e6fc0378236026bc4d59e9", size = 234901, upload-time = "2026-02-04T21:53:31.35Z" }, ] [[package]] @@ -1990,11 +2022,11 @@ wheels = [ [[package]] name = "readchar" -version = "4.2.1" +version = "4.2.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/dd/f8/8657b8cbb4ebeabfbdf991ac40eca8a1d1bd012011bd44ad1ed10f5cb494/readchar-4.2.1.tar.gz", hash = "sha256:91ce3faf07688de14d800592951e5575e9c7a3213738ed01d394dcc949b79adb", size = 9685, upload-time = "2024-11-04T18:28:07.757Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ed/49/a10341024c45bed95d13197ec9ef0f4e2fd10b5ca6e7f8d7684d18082398/readchar-4.2.2.tar.gz", hash = "sha256:e3b270fe16fc90c50ac79107700330a133dd4c63d22939f5b03b4f24564d5dd8", size = 9762, upload-time = "2026-04-06T19:45:54.226Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a9/10/e4b1e0e5b6b6745c8098c275b69bc9d73e9542d5c7da4f137542b499ed44/readchar-4.2.1-py3-none-any.whl", hash = "sha256:a769305cd3994bb5fa2764aa4073452dc105a4ec39068ffe6efd3c20c60acc77", size = 9350, upload-time = "2024-11-04T18:28:02.859Z" }, + { url = "https://files.pythonhosted.org/packages/3d/ca/36133653e00939922dd1416f4c56177361289172a30563fcb9552c9ccde4/readchar-4.2.2-py3-none-any.whl", hash = "sha256:92daf7e42c52b0787e6c75d01ecfb9a94f4ceff3764958b570c1dddedd47b200", size = 9401, upload-time = "2026-04-06T19:45:52.993Z" }, ] [[package]] @@ -2013,7 +2045,7 @@ wheels = [ [[package]] name = "requests" -version = "2.32.5" +version = "2.33.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, @@ -2021,9 +2053,9 @@ dependencies = [ { name = "idna" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5f/a4/98b9c7c6428a668bf7e42ebb7c79d576a1c3c1e3ae2d47e674b468388871/requests-2.33.1.tar.gz", hash = "sha256:18817f8c57c6263968bc123d237e3b8b08ac046f5456bd1e307ee8f4250d3517", size = 134120, upload-time = "2026-03-30T16:09:15.531Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, + { url = "https://files.pythonhosted.org/packages/d7/8e/7540e8a2036f79a125c1d2ebadf69ed7901608859186c856fa0388ef4197/requests-2.33.1-py3-none-any.whl", hash = "sha256:4e6d1ef462f3626a1f0a0a9c42dd93c63bad33f9f1c1937509b8c5c8718ab56a", size = 64947, upload-time = "2026-03-30T16:09:13.83Z" }, ] [[package]] @@ -2061,15 +2093,15 @@ wheels = [ [[package]] name = "rich" -version = "14.2.0" +version = "14.3.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markdown-it-py" }, { name = "pygments" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fb/d2/8920e102050a0de7bfabeb4c4614a49248cf8d5d7a8d01885fbb24dc767a/rich-14.2.0.tar.gz", hash = "sha256:73ff50c7c0c1c77c8243079283f4edb376f0f6442433aecb8ce7e6d0b92d1fe4", size = 219990, upload-time = "2025-10-09T14:16:53.064Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e9/67/cae617f1351490c25a4b8ac3b8b63a4dda609295d8222bad12242dfdc629/rich-14.3.4.tar.gz", hash = "sha256:817e02727f2b25b40ef56f5aa2217f400c8489f79ca8f46ea2b70dd5e14558a9", size = 230524, upload-time = "2026-04-11T02:57:45.419Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/25/7a/b0178788f8dc6cafce37a212c99565fa1fe7872c70c6c9c1e1a372d9d88f/rich-14.2.0-py3-none-any.whl", hash = "sha256:76bc51fe2e57d2b1be1f96c524b890b816e334ab4c1e45888799bfaab0021edd", size = 243393, upload-time = "2025-10-09T14:16:51.245Z" }, + { url = "https://files.pythonhosted.org/packages/b3/76/6d163cfac87b632216f71879e6b2cf17163f773ff59c00b5ff4900a80fa3/rich-14.3.4-py3-none-any.whl", hash = "sha256:07e7adb4690f68864777b1450859253bed81a99a31ac321ac1817b2313558952", size = 310480, upload-time = "2026-04-11T02:57:47.484Z" }, ] [[package]] @@ -2124,28 +2156,27 @@ wheels = [ [[package]] name = "ruff" -version = "0.14.13" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/50/0a/1914efb7903174b381ee2ffeebb4253e729de57f114e63595114c8ca451f/ruff-0.14.13.tar.gz", hash = "sha256:83cd6c0763190784b99650a20fec7633c59f6ebe41c5cc9d45ee42749563ad47", size = 6059504, upload-time = "2026-01-15T20:15:16.918Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c3/ae/0deefbc65ca74b0ab1fd3917f94dc3b398233346a74b8bbb0a916a1a6bf6/ruff-0.14.13-py3-none-linux_armv6l.whl", hash = "sha256:76f62c62cd37c276cb03a275b198c7c15bd1d60c989f944db08a8c1c2dbec18b", size = 13062418, upload-time = "2026-01-15T20:14:50.779Z" }, - { url = "https://files.pythonhosted.org/packages/47/df/5916604faa530a97a3c154c62a81cb6b735c0cb05d1e26d5ad0f0c8ac48a/ruff-0.14.13-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:914a8023ece0528d5cc33f5a684f5f38199bbb566a04815c2c211d8f40b5d0ed", size = 13442344, upload-time = "2026-01-15T20:15:07.94Z" }, - { url = "https://files.pythonhosted.org/packages/4c/f3/e0e694dd69163c3a1671e102aa574a50357536f18a33375050334d5cd517/ruff-0.14.13-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d24899478c35ebfa730597a4a775d430ad0d5631b8647a3ab368c29b7e7bd063", size = 12354720, upload-time = "2026-01-15T20:15:09.854Z" }, - { url = "https://files.pythonhosted.org/packages/c3/e8/67f5fcbbaee25e8fc3b56cc33e9892eca7ffe09f773c8e5907757a7e3bdb/ruff-0.14.13-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9aaf3870f14d925bbaf18b8a2347ee0ae7d95a2e490e4d4aea6813ed15ebc80e", size = 12774493, upload-time = "2026-01-15T20:15:20.908Z" }, - { url = "https://files.pythonhosted.org/packages/6b/ce/d2e9cb510870b52a9565d885c0d7668cc050e30fa2c8ac3fb1fda15c083d/ruff-0.14.13-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac5b7f63dd3b27cc811850f5ffd8fff845b00ad70e60b043aabf8d6ecc304e09", size = 12815174, upload-time = "2026-01-15T20:15:05.74Z" }, - { url = "https://files.pythonhosted.org/packages/88/00/c38e5da58beebcf4fa32d0ddd993b63dfacefd02ab7922614231330845bf/ruff-0.14.13-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78d2b1097750d90ba82ce4ba676e85230a0ed694178ca5e61aa9b459970b3eb9", size = 13680909, upload-time = "2026-01-15T20:15:14.537Z" }, - { url = "https://files.pythonhosted.org/packages/61/61/cd37c9dd5bd0a3099ba79b2a5899ad417d8f3b04038810b0501a80814fd7/ruff-0.14.13-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:7d0bf87705acbbcb8d4c24b2d77fbb73d40210a95c3903b443cd9e30824a5032", size = 15144215, upload-time = "2026-01-15T20:15:22.886Z" }, - { url = "https://files.pythonhosted.org/packages/56/8a/85502d7edbf98c2df7b8876f316c0157359165e16cdf98507c65c8d07d3d/ruff-0.14.13-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a3eb5da8e2c9e9f13431032fdcbe7681de9ceda5835efee3269417c13f1fed5c", size = 14706067, upload-time = "2026-01-15T20:14:48.271Z" }, - { url = "https://files.pythonhosted.org/packages/7e/2f/de0df127feb2ee8c1e54354dc1179b4a23798f0866019528c938ba439aca/ruff-0.14.13-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:642442b42957093811cd8d2140dfadd19c7417030a7a68cf8d51fcdd5f217427", size = 14133916, upload-time = "2026-01-15T20:14:57.357Z" }, - { url = "https://files.pythonhosted.org/packages/0d/77/9b99686bb9fe07a757c82f6f95e555c7a47801a9305576a9c67e0a31d280/ruff-0.14.13-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4acdf009f32b46f6e8864af19cbf6841eaaed8638e65c8dac845aea0d703c841", size = 13859207, upload-time = "2026-01-15T20:14:55.111Z" }, - { url = "https://files.pythonhosted.org/packages/7d/46/2bdcb34a87a179a4d23022d818c1c236cb40e477faf0d7c9afb6813e5876/ruff-0.14.13-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:591a7f68860ea4e003917d19b5c4f5ac39ff558f162dc753a2c5de897fd5502c", size = 14043686, upload-time = "2026-01-15T20:14:52.841Z" }, - { url = "https://files.pythonhosted.org/packages/1a/a9/5c6a4f56a0512c691cf143371bcf60505ed0f0860f24a85da8bd123b2bf1/ruff-0.14.13-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:774c77e841cc6e046fc3e91623ce0903d1cd07e3a36b1a9fe79b81dab3de506b", size = 12663837, upload-time = "2026-01-15T20:15:18.921Z" }, - { url = "https://files.pythonhosted.org/packages/fe/bb/b920016ece7651fa7fcd335d9d199306665486694d4361547ccb19394c44/ruff-0.14.13-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:61f4e40077a1248436772bb6512db5fc4457fe4c49e7a94ea7c5088655dd21ae", size = 12805867, upload-time = "2026-01-15T20:14:59.272Z" }, - { url = "https://files.pythonhosted.org/packages/7d/b3/0bd909851e5696cd21e32a8fc25727e5f58f1934b3596975503e6e85415c/ruff-0.14.13-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6d02f1428357fae9e98ac7aa94b7e966fd24151088510d32cf6f902d6c09235e", size = 13208528, upload-time = "2026-01-15T20:15:03.732Z" }, - { url = "https://files.pythonhosted.org/packages/3b/3b/e2d94cb613f6bbd5155a75cbe072813756363eba46a3f2177a1fcd0cd670/ruff-0.14.13-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e399341472ce15237be0c0ae5fbceca4b04cd9bebab1a2b2c979e015455d8f0c", size = 13929242, upload-time = "2026-01-15T20:15:11.918Z" }, - { url = "https://files.pythonhosted.org/packages/6a/c5/abd840d4132fd51a12f594934af5eba1d5d27298a6f5b5d6c3be45301caf/ruff-0.14.13-py3-none-win32.whl", hash = "sha256:ef720f529aec113968b45dfdb838ac8934e519711da53a0456038a0efecbd680", size = 12919024, upload-time = "2026-01-15T20:14:43.647Z" }, - { url = "https://files.pythonhosted.org/packages/c2/55/6384b0b8ce731b6e2ade2b5449bf07c0e4c31e8a2e68ea65b3bafadcecc5/ruff-0.14.13-py3-none-win_amd64.whl", hash = "sha256:6070bd026e409734b9257e03e3ef18c6e1a216f0435c6751d7a8ec69cb59abef", size = 14097887, upload-time = "2026-01-15T20:15:01.48Z" }, - { url = "https://files.pythonhosted.org/packages/4d/e1/7348090988095e4e39560cfc2f7555b1b2a7357deba19167b600fdf5215d/ruff-0.14.13-py3-none-win_arm64.whl", hash = "sha256:7ab819e14f1ad9fe39f246cfcc435880ef7a9390d81a2b6ac7e01039083dd247", size = 13080224, upload-time = "2026-01-15T20:14:45.853Z" }, +version = "0.15.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/d9/aa3f7d59a10ef6b14fe3431706f854dbf03c5976be614a9796d36326810c/ruff-0.15.10.tar.gz", hash = "sha256:d1f86e67ebfdef88e00faefa1552b5e510e1d35f3be7d423dc7e84e63788c94e", size = 4631728, upload-time = "2026-04-09T14:06:09.884Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/eb/00/a1c2fdc9939b2c03691edbda290afcd297f1f389196172826b03d6b6a595/ruff-0.15.10-py3-none-linux_armv6l.whl", hash = "sha256:0744e31482f8f7d0d10a11fcbf897af272fefdfcb10f5af907b18c2813ff4d5f", size = 10563362, upload-time = "2026-04-09T14:06:21.189Z" }, + { url = "https://files.pythonhosted.org/packages/5c/15/006990029aea0bebe9d33c73c3e28c80c391ebdba408d1b08496f00d422d/ruff-0.15.10-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b1e7c16ea0ff5a53b7c2df52d947e685973049be1cdfe2b59a9c43601897b22e", size = 10951122, upload-time = "2026-04-09T14:06:02.236Z" }, + { url = "https://files.pythonhosted.org/packages/f2/c0/4ac978fe874d0618c7da647862afe697b281c2806f13ce904ad652fa87e4/ruff-0.15.10-py3-none-macosx_11_0_arm64.whl", hash = "sha256:93cc06a19e5155b4441dd72808fdf84290d84ad8a39ca3b0f994363ade4cebb1", size = 10314005, upload-time = "2026-04-09T14:06:00.026Z" }, + { url = "https://files.pythonhosted.org/packages/da/73/c209138a5c98c0d321266372fc4e33ad43d506d7e5dd817dd89b60a8548f/ruff-0.15.10-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83e1dd04312997c99ea6965df66a14fb4f03ba978564574ffc68b0d61fd3989e", size = 10643450, upload-time = "2026-04-09T14:05:42.137Z" }, + { url = "https://files.pythonhosted.org/packages/ec/76/0deec355d8ec10709653635b1f90856735302cb8e149acfdf6f82a5feb70/ruff-0.15.10-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8154d43684e4333360fedd11aaa40b1b08a4e37d8ffa9d95fee6fa5b37b6fab1", size = 10379597, upload-time = "2026-04-09T14:05:49.984Z" }, + { url = "https://files.pythonhosted.org/packages/dc/be/86bba8fc8798c081e28a4b3bb6d143ccad3fd5f6f024f02002b8f08a9fa3/ruff-0.15.10-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ab88715f3a6deb6bde6c227f3a123410bec7b855c3ae331b4c006189e895cef", size = 11146645, upload-time = "2026-04-09T14:06:12.246Z" }, + { url = "https://files.pythonhosted.org/packages/a8/89/140025e65911b281c57be1d385ba1d932c2366ca88ae6663685aed8d4881/ruff-0.15.10-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a768ff5969b4f44c349d48edf4ab4f91eddb27fd9d77799598e130fb628aa158", size = 12030289, upload-time = "2026-04-09T14:06:04.776Z" }, + { url = "https://files.pythonhosted.org/packages/88/de/ddacca9545a5e01332567db01d44bd8cf725f2db3b3d61a80550b48308ea/ruff-0.15.10-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ee3ef42dab7078bda5ff6a1bcba8539e9857deb447132ad5566a038674540d0", size = 11496266, upload-time = "2026-04-09T14:05:55.485Z" }, + { url = "https://files.pythonhosted.org/packages/bc/bb/7ddb00a83760ff4a83c4e2fc231fd63937cc7317c10c82f583302e0f6586/ruff-0.15.10-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51cb8cc943e891ba99989dd92d61e29b1d231e14811db9be6440ecf25d5c1609", size = 11256418, upload-time = "2026-04-09T14:05:57.69Z" }, + { url = "https://files.pythonhosted.org/packages/dc/8d/55de0d35aacf6cd50b6ee91ee0f291672080021896543776f4170fc5c454/ruff-0.15.10-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:e59c9bdc056a320fb9ea1700a8d591718b8faf78af065484e801258d3a76bc3f", size = 11288416, upload-time = "2026-04-09T14:05:44.695Z" }, + { url = "https://files.pythonhosted.org/packages/68/cf/9438b1a27426ec46a80e0a718093c7f958ef72f43eb3111862949ead3cc1/ruff-0.15.10-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:136c00ca2f47b0018b073f28cb5c1506642a830ea941a60354b0e8bc8076b151", size = 10621053, upload-time = "2026-04-09T14:05:52.782Z" }, + { url = "https://files.pythonhosted.org/packages/4c/50/e29be6e2c135e9cd4cb15fbade49d6a2717e009dff3766dd080fcb82e251/ruff-0.15.10-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8b80a2f3c9c8a950d6237f2ca12b206bccff626139be9fa005f14feb881a1ae8", size = 10378302, upload-time = "2026-04-09T14:06:14.361Z" }, + { url = "https://files.pythonhosted.org/packages/18/2f/e0b36a6f99c51bb89f3a30239bc7bf97e87a37ae80aa2d6542d6e5150364/ruff-0.15.10-py3-none-musllinux_1_2_i686.whl", hash = "sha256:e3e53c588164dc025b671c9df2462429d60357ea91af7e92e9d56c565a9f1b07", size = 10850074, upload-time = "2026-04-09T14:06:16.581Z" }, + { url = "https://files.pythonhosted.org/packages/11/08/874da392558ce087a0f9b709dc6ec0d60cbc694c1c772dab8d5f31efe8cb/ruff-0.15.10-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b0c52744cf9f143a393e284125d2576140b68264a93c6716464e129a3e9adb48", size = 11358051, upload-time = "2026-04-09T14:06:18.948Z" }, + { url = "https://files.pythonhosted.org/packages/e4/46/602938f030adfa043e67112b73821024dc79f3ab4df5474c25fa4c1d2d14/ruff-0.15.10-py3-none-win32.whl", hash = "sha256:d4272e87e801e9a27a2e8df7b21011c909d9ddd82f4f3281d269b6ba19789ca5", size = 10588964, upload-time = "2026-04-09T14:06:07.14Z" }, + { url = "https://files.pythonhosted.org/packages/25/b6/261225b875d7a13b33a6d02508c39c28450b2041bb01d0f7f1a83d569512/ruff-0.15.10-py3-none-win_amd64.whl", hash = "sha256:28cb32d53203242d403d819fd6983152489b12e4a3ae44993543d6fe62ab42ed", size = 11745044, upload-time = "2026-04-09T14:05:39.473Z" }, + { url = "https://files.pythonhosted.org/packages/58/ed/dea90a65b7d9e69888890fb14c90d7f51bf0c1e82ad800aeb0160e4bacfd/ruff-0.15.10-py3-none-win_arm64.whl", hash = "sha256:601d1610a9e1f1c2165a4f561eeaa2e2ea1e97f3287c5aa258d3dab8b57c6188", size = 11035607, upload-time = "2026-04-09T14:05:47.593Z" }, ] [[package]] @@ -2184,33 +2215,33 @@ wheels = [ [[package]] name = "scipy" -version = "1.17.0" +version = "1.17.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/56/3e/9cca699f3486ce6bc12ff46dc2031f1ec8eb9ccc9a320fdaf925f1417426/scipy-1.17.0.tar.gz", hash = "sha256:2591060c8e648d8b96439e111ac41fd8342fdeff1876be2e19dea3fe8930454e", size = 30396830, upload-time = "2026-01-10T21:34:23.009Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/1e/4b/c89c131aa87cad2b77a54eb0fb94d633a842420fa7e919dc2f922037c3d8/scipy-1.17.0-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:2abd71643797bd8a106dff97894ff7869eeeb0af0f7a5ce02e4227c6a2e9d6fd", size = 31381316, upload-time = "2026-01-10T21:24:33.42Z" }, - { url = "https://files.pythonhosted.org/packages/5e/5f/a6b38f79a07d74989224d5f11b55267714707582908a5f1ae854cf9a9b84/scipy-1.17.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:ef28d815f4d2686503e5f4f00edc387ae58dfd7a2f42e348bb53359538f01558", size = 27966760, upload-time = "2026-01-10T21:24:38.911Z" }, - { url = "https://files.pythonhosted.org/packages/c1/20/095ad24e031ee8ed3c5975954d816b8e7e2abd731e04f8be573de8740885/scipy-1.17.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:272a9f16d6bb4667e8b50d25d71eddcc2158a214df1b566319298de0939d2ab7", size = 20138701, upload-time = "2026-01-10T21:24:43.249Z" }, - { url = "https://files.pythonhosted.org/packages/89/11/4aad2b3858d0337756f3323f8960755704e530b27eb2a94386c970c32cbe/scipy-1.17.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:7204fddcbec2fe6598f1c5fdf027e9f259106d05202a959a9f1aecf036adc9f6", size = 22480574, upload-time = "2026-01-10T21:24:47.266Z" }, - { url = "https://files.pythonhosted.org/packages/85/bd/f5af70c28c6da2227e510875cadf64879855193a687fb19951f0f44cfd6b/scipy-1.17.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fc02c37a5639ee67d8fb646ffded6d793c06c5622d36b35cfa8fe5ececb8f042", size = 32862414, upload-time = "2026-01-10T21:24:52.566Z" }, - { url = "https://files.pythonhosted.org/packages/ef/df/df1457c4df3826e908879fe3d76bc5b6e60aae45f4ee42539512438cfd5d/scipy-1.17.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dac97a27520d66c12a34fd90a4fe65f43766c18c0d6e1c0a80f114d2260080e4", size = 35112380, upload-time = "2026-01-10T21:24:58.433Z" }, - { url = "https://files.pythonhosted.org/packages/5f/bb/88e2c16bd1dd4de19d80d7c5e238387182993c2fb13b4b8111e3927ad422/scipy-1.17.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ebb7446a39b3ae0fe8f416a9a3fdc6fba3f11c634f680f16a239c5187bc487c0", size = 34922676, upload-time = "2026-01-10T21:25:04.287Z" }, - { url = "https://files.pythonhosted.org/packages/02/ba/5120242cc735f71fc002cff0303d536af4405eb265f7c60742851e7ccfe9/scipy-1.17.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:474da16199f6af66601a01546144922ce402cb17362e07d82f5a6cf8f963e449", size = 37507599, upload-time = "2026-01-10T21:25:09.851Z" }, - { url = "https://files.pythonhosted.org/packages/52/c8/08629657ac6c0da198487ce8cd3de78e02cfde42b7f34117d56a3fe249dc/scipy-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:255c0da161bd7b32a6c898e7891509e8a9289f0b1c6c7d96142ee0d2b114c2ea", size = 36380284, upload-time = "2026-01-10T21:25:15.632Z" }, - { url = "https://files.pythonhosted.org/packages/6c/4a/465f96d42c6f33ad324a40049dfd63269891db9324aa66c4a1c108c6f994/scipy-1.17.0-cp311-cp311-win_arm64.whl", hash = "sha256:85b0ac3ad17fa3be50abd7e69d583d98792d7edc08367e01445a1e2076005379", size = 24370427, upload-time = "2026-01-10T21:25:20.514Z" }, - { url = "https://files.pythonhosted.org/packages/0b/11/7241a63e73ba5a516f1930ac8d5b44cbbfabd35ac73a2d08ca206df007c4/scipy-1.17.0-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:0d5018a57c24cb1dd828bcf51d7b10e65986d549f52ef5adb6b4d1ded3e32a57", size = 31364580, upload-time = "2026-01-10T21:25:25.717Z" }, - { url = "https://files.pythonhosted.org/packages/ed/1d/5057f812d4f6adc91a20a2d6f2ebcdb517fdbc87ae3acc5633c9b97c8ba5/scipy-1.17.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:88c22af9e5d5a4f9e027e26772cc7b5922fab8bcc839edb3ae33de404feebd9e", size = 27969012, upload-time = "2026-01-10T21:25:30.921Z" }, - { url = "https://files.pythonhosted.org/packages/e3/21/f6ec556c1e3b6ec4e088da667d9987bb77cc3ab3026511f427dc8451187d/scipy-1.17.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:f3cd947f20fe17013d401b64e857c6b2da83cae567adbb75b9dcba865abc66d8", size = 20140691, upload-time = "2026-01-10T21:25:34.802Z" }, - { url = "https://files.pythonhosted.org/packages/7a/fe/5e5ad04784964ba964a96f16c8d4676aa1b51357199014dce58ab7ec5670/scipy-1.17.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:e8c0b331c2c1f531eb51f1b4fc9ba709521a712cce58f1aa627bc007421a5306", size = 22463015, upload-time = "2026-01-10T21:25:39.277Z" }, - { url = "https://files.pythonhosted.org/packages/4a/69/7c347e857224fcaf32a34a05183b9d8a7aca25f8f2d10b8a698b8388561a/scipy-1.17.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5194c445d0a1c7a6c1a4a4681b6b7c71baad98ff66d96b949097e7513c9d6742", size = 32724197, upload-time = "2026-01-10T21:25:44.084Z" }, - { url = "https://files.pythonhosted.org/packages/d1/fe/66d73b76d378ba8cc2fe605920c0c75092e3a65ae746e1e767d9d020a75a/scipy-1.17.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9eeb9b5f5997f75507814ed9d298ab23f62cf79f5a3ef90031b1ee2506abdb5b", size = 35009148, upload-time = "2026-01-10T21:25:50.591Z" }, - { url = "https://files.pythonhosted.org/packages/af/07/07dec27d9dc41c18d8c43c69e9e413431d20c53a0339c388bcf72f353c4b/scipy-1.17.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:40052543f7bbe921df4408f46003d6f01c6af109b9e2c8a66dd1cf6cf57f7d5d", size = 34798766, upload-time = "2026-01-10T21:25:59.41Z" }, - { url = "https://files.pythonhosted.org/packages/81/61/0470810c8a093cdacd4ba7504b8a218fd49ca070d79eca23a615f5d9a0b0/scipy-1.17.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0cf46c8013fec9d3694dc572f0b54100c28405d55d3e2cb15e2895b25057996e", size = 37405953, upload-time = "2026-01-10T21:26:07.75Z" }, - { url = "https://files.pythonhosted.org/packages/92/ce/672ed546f96d5d41ae78c4b9b02006cedd0b3d6f2bf5bb76ea455c320c28/scipy-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:0937a0b0d8d593a198cededd4c439a0ea216a3f36653901ea1f3e4be949056f8", size = 36328121, upload-time = "2026-01-10T21:26:16.509Z" }, - { url = "https://files.pythonhosted.org/packages/9d/21/38165845392cae67b61843a52c6455d47d0cc2a40dd495c89f4362944654/scipy-1.17.0-cp312-cp312-win_arm64.whl", hash = "sha256:f603d8a5518c7426414d1d8f82e253e454471de682ce5e39c29adb0df1efb86b", size = 24314368, upload-time = "2026-01-10T21:26:23.087Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/7a/97/5a3609c4f8d58b039179648e62dd220f89864f56f7357f5d4f45c29eb2cc/scipy-1.17.1.tar.gz", hash = "sha256:95d8e012d8cb8816c226aef832200b1d45109ed4464303e997c5b13122b297c0", size = 30573822, upload-time = "2026-02-23T00:26:24.851Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/df/75/b4ce781849931fef6fd529afa6b63711d5a733065722d0c3e2724af9e40a/scipy-1.17.1-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:1f95b894f13729334fb990162e911c9e5dc1ab390c58aa6cbecb389c5b5e28ec", size = 31613675, upload-time = "2026-02-23T00:16:00.13Z" }, + { url = "https://files.pythonhosted.org/packages/f7/58/bccc2861b305abdd1b8663d6130c0b3d7cc22e8d86663edbc8401bfd40d4/scipy-1.17.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:e18f12c6b0bc5a592ed23d3f7b891f68fd7f8241d69b7883769eb5d5dfb52696", size = 28162057, upload-time = "2026-02-23T00:16:09.456Z" }, + { url = "https://files.pythonhosted.org/packages/6d/ee/18146b7757ed4976276b9c9819108adbc73c5aad636e5353e20746b73069/scipy-1.17.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:a3472cfbca0a54177d0faa68f697d8ba4c80bbdc19908c3465556d9f7efce9ee", size = 20334032, upload-time = "2026-02-23T00:16:17.358Z" }, + { url = "https://files.pythonhosted.org/packages/ec/e6/cef1cf3557f0c54954198554a10016b6a03b2ec9e22a4e1df734936bd99c/scipy-1.17.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:766e0dc5a616d026a3a1cffa379af959671729083882f50307e18175797b3dfd", size = 22709533, upload-time = "2026-02-23T00:16:25.791Z" }, + { url = "https://files.pythonhosted.org/packages/4d/60/8804678875fc59362b0fb759ab3ecce1f09c10a735680318ac30da8cd76b/scipy-1.17.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:744b2bf3640d907b79f3fd7874efe432d1cf171ee721243e350f55234b4cec4c", size = 33062057, upload-time = "2026-02-23T00:16:36.931Z" }, + { url = "https://files.pythonhosted.org/packages/09/7d/af933f0f6e0767995b4e2d705a0665e454d1c19402aa7e895de3951ebb04/scipy-1.17.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:43af8d1f3bea642559019edfe64e9b11192a8978efbd1539d7bc2aaa23d92de4", size = 35349300, upload-time = "2026-02-23T00:16:49.108Z" }, + { url = "https://files.pythonhosted.org/packages/b4/3d/7ccbbdcbb54c8fdc20d3b6930137c782a163fa626f0aef920349873421ba/scipy-1.17.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cd96a1898c0a47be4520327e01f874acfd61fb48a9420f8aa9f6483412ffa444", size = 35127333, upload-time = "2026-02-23T00:17:01.293Z" }, + { url = "https://files.pythonhosted.org/packages/e8/19/f926cb11c42b15ba08e3a71e376d816ac08614f769b4f47e06c3580c836a/scipy-1.17.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4eb6c25dd62ee8d5edf68a8e1c171dd71c292fdae95d8aeb3dd7d7de4c364082", size = 37741314, upload-time = "2026-02-23T00:17:12.576Z" }, + { url = "https://files.pythonhosted.org/packages/95/da/0d1df507cf574b3f224ccc3d45244c9a1d732c81dcb26b1e8a766ae271a8/scipy-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:d30e57c72013c2a4fe441c2fcb8e77b14e152ad48b5464858e07e2ad9fbfceff", size = 36607512, upload-time = "2026-02-23T00:17:23.424Z" }, + { url = "https://files.pythonhosted.org/packages/68/7f/bdd79ceaad24b671543ffe0ef61ed8e659440eb683b66f033454dcee90eb/scipy-1.17.1-cp311-cp311-win_arm64.whl", hash = "sha256:9ecb4efb1cd6e8c4afea0daa91a87fbddbce1b99d2895d151596716c0b2e859d", size = 24599248, upload-time = "2026-02-23T00:17:34.561Z" }, + { url = "https://files.pythonhosted.org/packages/35/48/b992b488d6f299dbe3f11a20b24d3dda3d46f1a635ede1c46b5b17a7b163/scipy-1.17.1-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:35c3a56d2ef83efc372eaec584314bd0ef2e2f0d2adb21c55e6ad5b344c0dcb8", size = 31610954, upload-time = "2026-02-23T00:17:49.855Z" }, + { url = "https://files.pythonhosted.org/packages/b2/02/cf107b01494c19dc100f1d0b7ac3cc08666e96ba2d64db7626066cee895e/scipy-1.17.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:fcb310ddb270a06114bb64bbe53c94926b943f5b7f0842194d585c65eb4edd76", size = 28172662, upload-time = "2026-02-23T00:18:01.64Z" }, + { url = "https://files.pythonhosted.org/packages/cf/a9/599c28631bad314d219cf9ffd40e985b24d603fc8a2f4ccc5ae8419a535b/scipy-1.17.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:cc90d2e9c7e5c7f1a482c9875007c095c3194b1cfedca3c2f3291cdc2bc7c086", size = 20344366, upload-time = "2026-02-23T00:18:12.015Z" }, + { url = "https://files.pythonhosted.org/packages/35/f5/906eda513271c8deb5af284e5ef0206d17a96239af79f9fa0aebfe0e36b4/scipy-1.17.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:c80be5ede8f3f8eded4eff73cc99a25c388ce98e555b17d31da05287015ffa5b", size = 22704017, upload-time = "2026-02-23T00:18:21.502Z" }, + { url = "https://files.pythonhosted.org/packages/da/34/16f10e3042d2f1d6b66e0428308ab52224b6a23049cb2f5c1756f713815f/scipy-1.17.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e19ebea31758fac5893a2ac360fedd00116cbb7628e650842a6691ba7ca28a21", size = 32927842, upload-time = "2026-02-23T00:18:35.367Z" }, + { url = "https://files.pythonhosted.org/packages/01/8e/1e35281b8ab6d5d72ebe9911edcdffa3f36b04ed9d51dec6dd140396e220/scipy-1.17.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:02ae3b274fde71c5e92ac4d54bc06c42d80e399fec704383dcd99b301df37458", size = 35235890, upload-time = "2026-02-23T00:18:49.188Z" }, + { url = "https://files.pythonhosted.org/packages/c5/5c/9d7f4c88bea6e0d5a4f1bc0506a53a00e9fcb198de372bfe4d3652cef482/scipy-1.17.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8a604bae87c6195d8b1045eddece0514d041604b14f2727bbc2b3020172045eb", size = 35003557, upload-time = "2026-02-23T00:18:54.74Z" }, + { url = "https://files.pythonhosted.org/packages/65/94/7698add8f276dbab7a9de9fb6b0e02fc13ee61d51c7c3f85ac28b65e1239/scipy-1.17.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f590cd684941912d10becc07325a3eeb77886fe981415660d9265c4c418d0bea", size = 37625856, upload-time = "2026-02-23T00:19:00.307Z" }, + { url = "https://files.pythonhosted.org/packages/a2/84/dc08d77fbf3d87d3ee27f6a0c6dcce1de5829a64f2eae85a0ecc1f0daa73/scipy-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:41b71f4a3a4cab9d366cd9065b288efc4d4f3c0b37a91a8e0947fb5bd7f31d87", size = 36549682, upload-time = "2026-02-23T00:19:07.67Z" }, + { url = "https://files.pythonhosted.org/packages/bc/98/fe9ae9ffb3b54b62559f52dedaebe204b408db8109a8c66fdd04869e6424/scipy-1.17.1-cp312-cp312-win_arm64.whl", hash = "sha256:f4115102802df98b2b0db3cce5cb9b92572633a1197c77b7553e5203f284a5b3", size = 24547340, upload-time = "2026-02-23T00:19:12.024Z" }, ] [[package]] @@ -2238,11 +2269,11 @@ wheels = [ [[package]] name = "setuptools" -version = "80.9.0" +version = "82.0.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/18/5d/3bf57dcd21979b887f014ea83c24ae194cfcd12b9e0fda66b957c69d1fca/setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c", size = 1319958, upload-time = "2025-05-27T00:56:51.443Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4f/db/cfac1baf10650ab4d1c111714410d2fbb77ac5a616db26775db562c8fab2/setuptools-82.0.1.tar.gz", hash = "sha256:7d872682c5d01cfde07da7bccc7b65469d3dca203318515ada1de5eda35efbf9", size = 1152316, upload-time = "2026-03-09T12:47:17.221Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486, upload-time = "2025-05-27T00:56:49.664Z" }, + { url = "https://files.pythonhosted.org/packages/9d/76/f789f7a86709c6b087c5a2f52f911838cad707cc613162401badc665acfe/setuptools-82.0.1-py3-none-any.whl", hash = "sha256:a59e362652f08dcd477c78bb6e7bd9d80a7995bc73ce773050228a348ce2e5bb", size = 1006223, upload-time = "2026-03-09T12:47:15.026Z" }, ] [[package]] @@ -2256,11 +2287,11 @@ wheels = [ [[package]] name = "soupsieve" -version = "2.8.2" +version = "2.8.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/93/f2/21d6ca70c3cf35d01ae9e01be534bf6b6b103c157a728082a5028350c310/soupsieve-2.8.2.tar.gz", hash = "sha256:78a66b0fdee2ab40b7199dc3e747ee6c6e231899feeaae0b9b98a353afd48fd8", size = 118601, upload-time = "2026-01-18T16:21:31.09Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7b/ae/2d9c981590ed9999a0d91755b47fc74f74de286b0f5cee14c9269041e6c4/soupsieve-2.8.3.tar.gz", hash = "sha256:3267f1eeea4251fb42728b6dfb746edc9acaffc4a45b27e19450b676586e8349", size = 118627, upload-time = "2026-01-20T04:27:02.457Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a6/9a/b4450ccce353e2430621b3bb571899ffe1033d5cd72c9e065110f95b1a63/soupsieve-2.8.2-py3-none-any.whl", hash = "sha256:0f4c2f6b5a5fb97a641cf69c0bd163670a0e45e6d6c01a2107f93a6a6f93c51a", size = 37016, upload-time = "2026-01-18T16:21:29.7Z" }, + { url = "https://files.pythonhosted.org/packages/46/2c/1462b1d0a634697ae9e55b3cecdcb64788e8b7d63f54d923fcd0bb140aed/soupsieve-2.8.3-py3-none-any.whl", hash = "sha256:ed64f2ba4eebeab06cc4962affce381647455978ffc1e36bb79a545b91f45a95", size = 37016, upload-time = "2026-01-20T04:27:01.012Z" }, ] [[package]] @@ -2449,33 +2480,31 @@ wheels = [ [[package]] name = "tornado" -version = "6.5.4" +version = "6.5.5" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/37/1d/0a336abf618272d53f62ebe274f712e213f5a03c0b2339575430b8362ef2/tornado-6.5.4.tar.gz", hash = "sha256:a22fa9047405d03260b483980635f0b041989d8bcc9a313f8fe18b411d84b1d7", size = 513632, upload-time = "2025-12-15T19:21:03.836Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f8/f1/3173dfa4a18db4a9b03e5d55325559dab51ee653763bb8745a75af491286/tornado-6.5.5.tar.gz", hash = "sha256:192b8f3ea91bd7f1f50c06955416ed76c6b72f96779b962f07f911b91e8d30e9", size = 516006, upload-time = "2026-03-10T21:31:02.067Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ab/a9/e94a9d5224107d7ce3cc1fab8d5dc97f5ea351ccc6322ee4fb661da94e35/tornado-6.5.4-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:d6241c1a16b1c9e4cc28148b1cda97dd1c6cb4fb7068ac1bedc610768dff0ba9", size = 443909, upload-time = "2025-12-15T19:20:48.382Z" }, - { url = "https://files.pythonhosted.org/packages/db/7e/f7b8d8c4453f305a51f80dbb49014257bb7d28ccb4bbb8dd328ea995ecad/tornado-6.5.4-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:2d50f63dda1d2cac3ae1fa23d254e16b5e38153758470e9956cbc3d813d40843", size = 442163, upload-time = "2025-12-15T19:20:49.791Z" }, - { url = "https://files.pythonhosted.org/packages/ba/b5/206f82d51e1bfa940ba366a8d2f83904b15942c45a78dd978b599870ab44/tornado-6.5.4-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1cf66105dc6acb5af613c054955b8137e34a03698aa53272dbda4afe252be17", size = 445746, upload-time = "2025-12-15T19:20:51.491Z" }, - { url = "https://files.pythonhosted.org/packages/8e/9d/1a3338e0bd30ada6ad4356c13a0a6c35fbc859063fa7eddb309183364ac1/tornado-6.5.4-cp39-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50ff0a58b0dc97939d29da29cd624da010e7f804746621c78d14b80238669335", size = 445083, upload-time = "2025-12-15T19:20:52.778Z" }, - { url = "https://files.pythonhosted.org/packages/50/d4/e51d52047e7eb9a582da59f32125d17c0482d065afd5d3bc435ff2120dc5/tornado-6.5.4-cp39-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5fb5e04efa54cf0baabdd10061eb4148e0be137166146fff835745f59ab9f7f", size = 445315, upload-time = "2025-12-15T19:20:53.996Z" }, - { url = "https://files.pythonhosted.org/packages/27/07/2273972f69ca63dbc139694a3fc4684edec3ea3f9efabf77ed32483b875c/tornado-6.5.4-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9c86b1643b33a4cd415f8d0fe53045f913bf07b4a3ef646b735a6a86047dda84", size = 446003, upload-time = "2025-12-15T19:20:56.101Z" }, - { url = "https://files.pythonhosted.org/packages/d1/83/41c52e47502bf7260044413b6770d1a48dda2f0246f95ee1384a3cd9c44a/tornado-6.5.4-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:6eb82872335a53dd063a4f10917b3efd28270b56a33db69009606a0312660a6f", size = 445412, upload-time = "2025-12-15T19:20:57.398Z" }, - { url = "https://files.pythonhosted.org/packages/10/c7/bc96917f06cbee182d44735d4ecde9c432e25b84f4c2086143013e7b9e52/tornado-6.5.4-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6076d5dda368c9328ff41ab5d9dd3608e695e8225d1cd0fd1e006f05da3635a8", size = 445392, upload-time = "2025-12-15T19:20:58.692Z" }, - { url = "https://files.pythonhosted.org/packages/0c/1a/d7592328d037d36f2d2462f4bc1fbb383eec9278bc786c1b111cbbd44cfa/tornado-6.5.4-cp39-abi3-win32.whl", hash = "sha256:1768110f2411d5cd281bac0a090f707223ce77fd110424361092859e089b38d1", size = 446481, upload-time = "2025-12-15T19:21:00.008Z" }, - { url = "https://files.pythonhosted.org/packages/d6/6d/c69be695a0a64fd37a97db12355a035a6d90f79067a3cf936ec2b1dc38cd/tornado-6.5.4-cp39-abi3-win_amd64.whl", hash = "sha256:fa07d31e0cd85c60713f2b995da613588aa03e1303d75705dca6af8babc18ddc", size = 446886, upload-time = "2025-12-15T19:21:01.287Z" }, - { url = "https://files.pythonhosted.org/packages/50/49/8dc3fd90902f70084bd2cd059d576ddb4f8bb44c2c7c0e33a11422acb17e/tornado-6.5.4-cp39-abi3-win_arm64.whl", hash = "sha256:053e6e16701eb6cbe641f308f4c1a9541f91b6261991160391bfc342e8a551a1", size = 445910, upload-time = "2025-12-15T19:21:02.571Z" }, + { url = "https://files.pythonhosted.org/packages/59/8c/77f5097695f4dd8255ecbd08b2a1ed8ba8b953d337804dd7080f199e12bf/tornado-6.5.5-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:487dc9cc380e29f58c7ab88f9e27cdeef04b2140862e5076a66fb6bb68bb1bfa", size = 445983, upload-time = "2026-03-10T21:30:44.28Z" }, + { url = "https://files.pythonhosted.org/packages/ab/5e/7625b76cd10f98f1516c36ce0346de62061156352353ef2da44e5c21523c/tornado-6.5.5-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:65a7f1d46d4bb41df1ac99f5fcb685fb25c7e61613742d5108b010975a9a6521", size = 444246, upload-time = "2026-03-10T21:30:46.571Z" }, + { url = "https://files.pythonhosted.org/packages/b2/04/7b5705d5b3c0fab088f434f9c83edac1573830ca49ccf29fb83bf7178eec/tornado-6.5.5-cp39-abi3-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e74c92e8e65086b338fd56333fb9a68b9f6f2fe7ad532645a290a464bcf46be5", size = 447229, upload-time = "2026-03-10T21:30:48.273Z" }, + { url = "https://files.pythonhosted.org/packages/34/01/74e034a30ef59afb4097ef8659515e96a39d910b712a89af76f5e4e1f93c/tornado-6.5.5-cp39-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:435319e9e340276428bbdb4e7fa732c2d399386d1de5686cb331ec8eee754f07", size = 448192, upload-time = "2026-03-10T21:30:51.22Z" }, + { url = "https://files.pythonhosted.org/packages/be/00/fe9e02c5a96429fce1a1d15a517f5d8444f9c412e0bb9eadfbe3b0fc55bf/tornado-6.5.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3f54aa540bdbfee7b9eb268ead60e7d199de5021facd276819c193c0fb28ea4e", size = 448039, upload-time = "2026-03-10T21:30:53.52Z" }, + { url = "https://files.pythonhosted.org/packages/82/9e/656ee4cec0398b1d18d0f1eb6372c41c6b889722641d84948351ae19556d/tornado-6.5.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:36abed1754faeb80fbd6e64db2758091e1320f6bba74a4cf8c09cd18ccce8aca", size = 447445, upload-time = "2026-03-10T21:30:55.541Z" }, + { url = "https://files.pythonhosted.org/packages/5a/76/4921c00511f88af86a33de770d64141170f1cfd9c00311aea689949e274e/tornado-6.5.5-cp39-abi3-win32.whl", hash = "sha256:dd3eafaaeec1c7f2f8fdcd5f964e8907ad788fe8a5a32c4426fbbdda621223b7", size = 448582, upload-time = "2026-03-10T21:30:57.142Z" }, + { url = "https://files.pythonhosted.org/packages/2c/23/f6c6112a04d28eed765e374435fb1a9198f73e1ec4b4024184f21faeb1ad/tornado-6.5.5-cp39-abi3-win_amd64.whl", hash = "sha256:6443a794ba961a9f619b1ae926a2e900ac20c34483eea67be4ed8f1e58d3ef7b", size = 448990, upload-time = "2026-03-10T21:30:58.857Z" }, + { url = "https://files.pythonhosted.org/packages/b7/c8/876602cbc96469911f0939f703453c1157b0c826ecb05bdd32e023397d4e/tornado-6.5.5-cp39-abi3-win_arm64.whl", hash = "sha256:2c9a876e094109333f888539ddb2de4361743e5d21eece20688e3e351e4990a6", size = 448016, upload-time = "2026-03-10T21:31:00.43Z" }, ] [[package]] name = "tqdm" -version = "4.67.1" +version = "4.67.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737, upload-time = "2024-11-24T20:12:22.481Z" } +sdist = { url = "https://files.pythonhosted.org/packages/09/a9/6ba95a270c6f1fbcd8dac228323f2777d886cb206987444e4bce66338dd4/tqdm-4.67.3.tar.gz", hash = "sha256:7d825f03f89244ef73f1d4ce193cb1774a8179fd96f31d7e1dcde62092b960bb", size = 169598, upload-time = "2026-02-03T17:35:53.048Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540, upload-time = "2024-11-24T20:12:19.698Z" }, + { url = "https://files.pythonhosted.org/packages/16/e1/3079a9ff9b8e11b846c6ac5c8b5bfb7ff225eee721825310c91b3b50304f/tqdm-4.67.3-py3-none-any.whl", hash = "sha256:ee1e4c0e59148062281c49d80b25b67771a127c85fc9676d3be5f243206826bf", size = 78374, upload-time = "2026-02-03T17:35:50.982Z" }, ] [[package]] @@ -2510,11 +2539,11 @@ wheels = [ [[package]] name = "tzdata" -version = "2025.3" +version = "2026.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5e/a7/c202b344c5ca7daf398f3b8a477eeb205cf3b6f32e7ec3a6bac0629ca975/tzdata-2025.3.tar.gz", hash = "sha256:de39c2ca5dc7b0344f2eba86f49d614019d29f060fc4ebc8a417896a620b56a7", size = 196772, upload-time = "2025-12-13T17:45:35.667Z" } +sdist = { url = "https://files.pythonhosted.org/packages/19/f5/cd531b2d15a671a40c0f66cf06bc3570a12cd56eef98960068ebbad1bf5a/tzdata-2026.1.tar.gz", hash = "sha256:67658a1903c75917309e753fdc349ac0efd8c27db7a0cb406a25be4840f87f98", size = 197639, upload-time = "2026-04-03T11:25:22.002Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c7/b0/003792df09decd6849a5e39c28b513c06e84436a54440380862b5aeff25d/tzdata-2025.3-py2.py3-none-any.whl", hash = "sha256:06a47e5700f3081aab02b2e513160914ff0694bce9947d6b76ebd6bf57cfc5d1", size = 348521, upload-time = "2025-12-13T17:45:33.889Z" }, + { url = "https://files.pythonhosted.org/packages/b0/70/d460bd685a170790ec89317e9bd33047988e4bce507b831f5db771e142de/tzdata-2026.1-py2.py3-none-any.whl", hash = "sha256:4b1d2be7ac37ceafd7327b961aa3a54e467efbdb563a23655fbfe0d39cfc42a9", size = 348952, upload-time = "2026-04-03T11:25:20.313Z" }, ] [[package]] @@ -2550,42 +2579,43 @@ wheels = [ [[package]] name = "uv" -version = "0.9.26" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ff/6a/ef4ea19097ecdfd7df6e608f93874536af045c68fd70aa628c667815c458/uv-0.9.26.tar.gz", hash = "sha256:8b7017a01cc48847a7ae26733383a2456dd060fc50d21d58de5ee14f6b6984d7", size = 3790483, upload-time = "2026-01-15T20:51:33.582Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/fe/e1/5c0b17833d5e3b51a897957348ff8d937a3cdfc5eea5c4a7075d8d7b9870/uv-0.9.26-py3-none-linux_armv6l.whl", hash = "sha256:7dba609e32b7bd13ef81788d580970c6ff3a8874d942755b442cffa8f25dba57", size = 22638031, upload-time = "2026-01-15T20:51:44.187Z" }, - { url = "https://files.pythonhosted.org/packages/a5/8b/68ac5825a615a8697e324f52ac0b92feb47a0ec36a63759c5f2931f0c3a0/uv-0.9.26-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b815e3b26eeed00e00f831343daba7a9d99c1506883c189453bb4d215f54faac", size = 21507805, upload-time = "2026-01-15T20:50:42.574Z" }, - { url = "https://files.pythonhosted.org/packages/0d/a2/664a338aefe009f6e38e47455ee2f64a21da7ad431dbcaf8b45d8b1a2b7a/uv-0.9.26-py3-none-macosx_11_0_arm64.whl", hash = "sha256:1b012e6c4dfe767f818cbb6f47d02c207c9b0c82fee69a5de6d26ffb26a3ef3c", size = 20249791, upload-time = "2026-01-15T20:50:49.835Z" }, - { url = "https://files.pythonhosted.org/packages/ba/3d/b8186a7dec1346ca4630c674b760517d28bffa813a01965f4b57596bacf3/uv-0.9.26-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:ea296b700d7c4c27acdfd23ffaef2b0ecdd0aa1b58d942c62ee87df3b30f06ac", size = 22039108, upload-time = "2026-01-15T20:51:00.675Z" }, - { url = "https://files.pythonhosted.org/packages/aa/a9/687fd587e7a3c2c826afe72214fb24b7f07b0d8b0b0300e6a53b554180ea/uv-0.9.26-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.musllinux_1_1_armv7l.whl", hash = "sha256:1ba860d2988efc27e9c19f8537a2f9fa499a8b7ebe4afbe2d3d323d72f9aee61", size = 22174763, upload-time = "2026-01-15T20:50:46.471Z" }, - { url = "https://files.pythonhosted.org/packages/38/69/7fa03ee7d59e562fca1426436f15a8c107447d41b34e0899e25ee69abfad/uv-0.9.26-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8610bdfc282a681a0a40b90495a478599aa3484c12503ef79ef42cd271fd80fe", size = 22189861, upload-time = "2026-01-15T20:51:15.618Z" }, - { url = "https://files.pythonhosted.org/packages/10/2d/4be446a2ec09f3c428632b00a138750af47c76b0b9f987e9a5b52fef0405/uv-0.9.26-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c4bf700bd071bd595084b9ee0a8d77c6a0a10ca3773d3771346a2599f306bd9c", size = 23005589, upload-time = "2026-01-15T20:50:57.185Z" }, - { url = "https://files.pythonhosted.org/packages/c3/16/860990b812136695a63a8da9fb5f819c3cf18ea37dcf5852e0e1b795ca0d/uv-0.9.26-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:89a7beea1c692f76a6f8da13beff3cbb43f7123609e48e03517cc0db5c5de87c", size = 24713505, upload-time = "2026-01-15T20:51:04.366Z" }, - { url = "https://files.pythonhosted.org/packages/01/43/5d7f360d551e62d8f8bf6624b8fca9895cea49ebe5fce8891232d7ed2321/uv-0.9.26-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:182f5c086c7d03ad447e522b70fa29a0302a70bcfefad4b8cd08496828a0e179", size = 24342500, upload-time = "2026-01-15T20:51:47.863Z" }, - { url = "https://files.pythonhosted.org/packages/9b/9c/2bae010a189e7d8e5dc555edcfd053b11ce96fad2301b919ba0d9dd23659/uv-0.9.26-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d8c62a501f13425b4b0ce1dd4c6b82f3ce5a5179e2549c55f4bb27cc0eb8ef8", size = 23222578, upload-time = "2026-01-15T20:51:36.85Z" }, - { url = "https://files.pythonhosted.org/packages/38/16/a07593a040fe6403c36f3b0a99b309f295cbfe19a1074dbadb671d5d4ef7/uv-0.9.26-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7e89798bd3df7dcc4b2b4ac4e2fc11d6b3ff4fe7d764aa3012d664c635e2922", size = 23250201, upload-time = "2026-01-15T20:51:19.117Z" }, - { url = "https://files.pythonhosted.org/packages/23/a0/45893e15ad3ab842db27c1eb3b8605b9b4023baa5d414e67cfa559a0bff0/uv-0.9.26-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:60a66f1783ec4efc87b7e1f9bd66e8fd2de3e3b30d122b31cb1487f63a3ea8b7", size = 22229160, upload-time = "2026-01-15T20:51:22.931Z" }, - { url = "https://files.pythonhosted.org/packages/fc/c0/20a597a5c253702a223b5e745cf8c16cd5dd053080f896bb10717b3bedec/uv-0.9.26-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:63c6a1f1187facba1fb45a2fa45396980631a3427ac11b0e3d9aa3ebcf2c73cf", size = 23090730, upload-time = "2026-01-15T20:51:26.611Z" }, - { url = "https://files.pythonhosted.org/packages/40/c9/744537867d9ab593fea108638b57cca1165a0889cfd989981c942b6de9a5/uv-0.9.26-py3-none-musllinux_1_1_i686.whl", hash = "sha256:c6d8650fbc980ccb348b168266143a9bd4deebc86437537caaf8ff2a39b6ea50", size = 22436632, upload-time = "2026-01-15T20:51:12.045Z" }, - { url = "https://files.pythonhosted.org/packages/6b/e2/be683e30262f2cf02dcb41b6c32910a6939517d50ec45f502614d239feb7/uv-0.9.26-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:25278f9298aa4dade38241a93d036739b0c87278dcfad1ec1f57e803536bfc49", size = 23480064, upload-time = "2026-01-15T20:50:53.333Z" }, - { url = "https://files.pythonhosted.org/packages/50/3e/4a7e6bc5db2beac9c4966f212805f1903d37d233f2e160737f0b24780ada/uv-0.9.26-py3-none-win32.whl", hash = "sha256:10d075e0193e3a0e6c54f830731c4cb965d6f4e11956e84a7bed7ed61d42aa27", size = 21000052, upload-time = "2026-01-15T20:51:40.753Z" }, - { url = "https://files.pythonhosted.org/packages/07/5d/eb80c6eff2a9f7d5cf35ec84fda323b74aa0054145db28baf72d35a7a301/uv-0.9.26-py3-none-win_amd64.whl", hash = "sha256:0315fc321f5644b12118f9928086513363ed9b29d74d99f1539fda1b6b5478ab", size = 23684930, upload-time = "2026-01-15T20:51:08.448Z" }, - { url = "https://files.pythonhosted.org/packages/ed/9d/3b2631931649b1783f5024796ca8ad2b42a01a829b9ce1202d973cc7bce5/uv-0.9.26-py3-none-win_arm64.whl", hash = "sha256:344ff38749b6cd7b7dfdfb382536f168cafe917ae3a5aa78b7a63746ba2a905b", size = 22158123, upload-time = "2026-01-15T20:51:30.939Z" }, +version = "0.11.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/dd/f3/8aceeab67ea69805293ab290e7ca8cc1b61a064d28b8a35c76d8eba063dd/uv-0.11.6.tar.gz", hash = "sha256:e3b21b7e80024c95ff339fcd147ac6fc3dd98d3613c9d45d3a1f4fd1057f127b", size = 4073298, upload-time = "2026-04-09T12:09:01.738Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1f/fe/4b61a3d5ad9d02e8a4405026ccd43593d7044598e0fa47d892d4dafe44c9/uv-0.11.6-py3-none-linux_armv6l.whl", hash = "sha256:ada04dcf89ddea5b69d27ac9cdc5ef575a82f90a209a1392e930de504b2321d6", size = 23780079, upload-time = "2026-04-09T12:08:56.609Z" }, + { url = "https://files.pythonhosted.org/packages/52/db/d27519a9e1a5ffee9d71af1a811ad0e19ce7ab9ae815453bef39dd479389/uv-0.11.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:5be013888420f96879c6e0d3081e7bcf51b539b034a01777041934457dfbedf3", size = 23214721, upload-time = "2026-04-09T12:09:32.228Z" }, + { url = "https://files.pythonhosted.org/packages/a6/8f/4399fa8b882bd7e0efffc829f73ab24d117d490a93e6bc7104a50282b854/uv-0.11.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:ffa5dc1cbb52bdce3b8447e83d1601a57ad4da6b523d77d4b47366db8b1ceb18", size = 21750109, upload-time = "2026-04-09T12:09:24.357Z" }, + { url = "https://files.pythonhosted.org/packages/32/07/5a12944c31c3dda253632da7a363edddb869ed47839d4d92a2dc5f546c93/uv-0.11.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:bfb107b4dade1d2c9e572992b06992d51dd5f2136eb8ceee9e62dd124289e825", size = 23551146, upload-time = "2026-04-09T12:09:10.439Z" }, + { url = "https://files.pythonhosted.org/packages/79/5b/2ec8b0af80acd1016ed596baf205ddc77b19ece288473b01926c4a9cf6db/uv-0.11.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.musllinux_1_1_armv7l.whl", hash = "sha256:9e2fe7ce12161d8016b7deb1eaad7905a76ff7afec13383333ca75e0c4b5425d", size = 23331192, upload-time = "2026-04-09T12:09:34.792Z" }, + { url = "https://files.pythonhosted.org/packages/62/7d/eea35935f2112b21c296a3e42645f3e4b1aa8bcd34dcf13345fbd55134b7/uv-0.11.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7ed9c6f70c25e8dfeedddf4eddaf14d353f5e6b0eb43da9a14d3a1033d51d915", size = 23337686, upload-time = "2026-04-09T12:09:18.522Z" }, + { url = "https://files.pythonhosted.org/packages/21/47/2584f5ab618f6ebe9bdefb2f765f2ca8540e9d739667606a916b35449eec/uv-0.11.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d68a013e609cebf82077cbeeb0809ed5e205257814273bfd31e02fc0353bbfc2", size = 25008139, upload-time = "2026-04-09T12:09:03.983Z" }, + { url = "https://files.pythonhosted.org/packages/95/81/497ae5c1d36355b56b97dc59f550c7e89d0291c163a3f203c6f341dff195/uv-0.11.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93f736dddca03dae732c6fdea177328d3bc4bf137c75248f3d433c57416a4311", size = 25712458, upload-time = "2026-04-09T12:09:07.598Z" }, + { url = "https://files.pythonhosted.org/packages/3c/1c/74083238e4fab2672b63575b9008f1ea418b02a714bcfcf017f4f6a309b6/uv-0.11.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e96a66abe53fced0e3389008b8d2eff8278cfa8bb545d75631ae8ceb9c929aba", size = 24915507, upload-time = "2026-04-09T12:08:50.892Z" }, + { url = "https://files.pythonhosted.org/packages/5a/ee/e14fe10ba455a823ed18233f12de6699a601890905420b5c504abf115116/uv-0.11.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b096311b2743b228df911a19532b3f18fa420bf9530547aecd6a8e04bbfaccd", size = 24971011, upload-time = "2026-04-09T12:08:54.016Z" }, + { url = "https://files.pythonhosted.org/packages/3c/a1/7b9c83eaadf98e343317ff6384a7227a4855afd02cdaf9696bcc71ee6155/uv-0.11.6-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:904d537b4a6e798015b4a64ff5622023bd4601b43b6cd1e5f423d63471f5e948", size = 23640234, upload-time = "2026-04-09T12:09:15.735Z" }, + { url = "https://files.pythonhosted.org/packages/d6/51/75ccdd23e76ff1703b70eb82881cd5b4d2a954c9679f8ef7e0136ef2cfab/uv-0.11.6-py3-none-manylinux_2_31_riscv64.musllinux_1_1_riscv64.whl", hash = "sha256:4ed8150c26b5e319381d75ae2ce6aba1e9c65888f4850f4e3b3fa839953c90a5", size = 24452664, upload-time = "2026-04-09T12:09:26.875Z" }, + { url = "https://files.pythonhosted.org/packages/4d/86/ace80fe47d8d48b5e3b5aee0b6eb1a49deaacc2313782870250b3faa36f5/uv-0.11.6-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:1c9218c8d4ac35ca6e617fb0951cc0ab2d907c91a6aea2617de0a5494cf162c0", size = 24494599, upload-time = "2026-04-09T12:09:37.368Z" }, + { url = "https://files.pythonhosted.org/packages/05/2d/4b642669b56648194f026de79bc992cbfc3ac2318b0a8d435f3c284934e8/uv-0.11.6-py3-none-musllinux_1_1_i686.whl", hash = "sha256:9e211c83cc890c569b86a4183fcf5f8b6f0c7adc33a839b699a98d30f1310d3a", size = 24159150, upload-time = "2026-04-09T12:09:13.17Z" }, + { url = "https://files.pythonhosted.org/packages/ae/24/7eecd76fe983a74fed1fc700a14882e70c4e857f1d562a9f2303d4286c12/uv-0.11.6-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:d2a1d2089afdf117ad19a4c1dd36b8189c00ae1ad4135d3bfbfced82342595cf", size = 25164324, upload-time = "2026-04-09T12:08:59.56Z" }, + { url = "https://files.pythonhosted.org/packages/27/e0/bbd4ba7c2e5067bbba617d87d306ec146889edaeeaa2081d3e122178ca08/uv-0.11.6-py3-none-win32.whl", hash = "sha256:6e8344f38fa29f85dcfd3e62dc35a700d2448f8e90381077ef393438dcd5012e", size = 22865693, upload-time = "2026-04-09T12:09:21.415Z" }, + { url = "https://files.pythonhosted.org/packages/a5/33/1983ce113c538a856f2d620d16e39691962ecceef091a84086c5785e32e5/uv-0.11.6-py3-none-win_amd64.whl", hash = "sha256:a28bea69c1186303d1200f155c7a28c449f8a4431e458fcf89360cc7ef546e40", size = 25371258, upload-time = "2026-04-09T12:09:40.52Z" }, + { url = "https://files.pythonhosted.org/packages/35/01/be0873f44b9c9bc250fcbf263367fcfc1f59feab996355bcb6b52fff080d/uv-0.11.6-py3-none-win_arm64.whl", hash = "sha256:a78f6d64b9950e24061bc7ec7f15ff8089ad7f5a976e7b65fcadce58fe02f613", size = 23869585, upload-time = "2026-04-09T12:09:29.425Z" }, ] [[package]] name = "virtualenv" -version = "20.36.1" +version = "21.2.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "distlib" }, { name = "filelock" }, { name = "platformdirs" }, + { name = "python-discovery" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/aa/a3/4d310fa5f00863544e1d0f4de93bddec248499ccf97d4791bc3122c9d4f3/virtualenv-20.36.1.tar.gz", hash = "sha256:8befb5c81842c641f8ee658481e42641c68b5eab3521d8e092d18320902466ba", size = 6032239, upload-time = "2026-01-09T18:21:01.296Z" } +sdist = { url = "https://files.pythonhosted.org/packages/97/c5/aff062c66b42e2183201a7ace10c6b2e959a9a16525c8e8ca8e59410d27a/virtualenv-21.2.1.tar.gz", hash = "sha256:b66ffe81301766c0d5e2208fc3576652c59d44e7b731fc5f5ed701c9b537fa78", size = 5844770, upload-time = "2026-04-09T18:47:11.482Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6a/2a/dc2228b2888f51192c7dc766106cd475f1b768c10caaf9727659726f7391/virtualenv-20.36.1-py3-none-any.whl", hash = "sha256:575a8d6b124ef88f6f51d56d656132389f961062a9177016a50e4f507bbcc19f", size = 6008258, upload-time = "2026-01-09T18:20:59.425Z" }, + { url = "https://files.pythonhosted.org/packages/20/0e/f083a76cb590e60dff3868779558eefefb8dfb7c9ed020babc7aa014ccbf/virtualenv-21.2.1-py3-none-any.whl", hash = "sha256:bd16b49c53562b28cf1a3ad2f36edb805ad71301dee70ddc449e5c88a9f919a2", size = 5828326, upload-time = "2026-04-09T18:47:09.331Z" }, ] [[package]] @@ -2593,7 +2623,12 @@ name = "vizta" version = "0.0.1" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version < '3.12'", + "python_full_version < '3.12' and platform_machine != 's390x' and sys_platform == 'win32'", + "python_full_version < '3.12' and platform_machine == 's390x' and sys_platform == 'win32'", + "python_full_version < '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten'", + "python_full_version < '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten'", + "python_full_version < '3.12' and platform_machine != 's390x' and sys_platform != 'emscripten' and sys_platform != 'win32'", + "python_full_version < '3.12' and platform_machine == 's390x' and sys_platform != 'emscripten' and sys_platform != 'win32'", ] dependencies = [ { name = "matplotlib", marker = "python_full_version < '3.12'" }, @@ -2609,7 +2644,12 @@ name = "vizta" version = "1.1.2" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.12'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'win32'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'win32'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform == 'emscripten'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform == 'emscripten'", + "python_full_version >= '3.12' and platform_machine != 's390x' and sys_platform != 'emscripten' and sys_platform != 'win32'", + "python_full_version >= '3.12' and platform_machine == 's390x' and sys_platform != 'emscripten' and sys_platform != 'win32'", ] dependencies = [ { name = "matplotlib", marker = "python_full_version >= '3.12'" }, @@ -2622,11 +2662,11 @@ wheels = [ [[package]] name = "wcwidth" -version = "0.2.14" +version = "0.6.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/24/30/6b0809f4510673dc723187aeaf24c7f5459922d01e2f794277a3dfb90345/wcwidth-0.2.14.tar.gz", hash = "sha256:4d478375d31bc5395a3c55c40ccdf3354688364cd61c4f6adacaa9215d0b3605", size = 102293, upload-time = "2025-09-22T16:29:53.023Z" } +sdist = { url = "https://files.pythonhosted.org/packages/35/a2/8e3becb46433538a38726c948d3399905a4c7cabd0df578ede5dc51f0ec2/wcwidth-0.6.0.tar.gz", hash = "sha256:cdc4e4262d6ef9a1a57e018384cbeb1208d8abbc64176027e2c2455c81313159", size = 159684, upload-time = "2026-02-06T19:19:40.919Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/af/b5/123f13c975e9f27ab9c0770f514345bd406d0e8d3b7a0723af9d43f710af/wcwidth-0.2.14-py2.py3-none-any.whl", hash = "sha256:a7bb560c8aee30f9957e5f9895805edd20602f2d7f720186dfd906e82b4982e1", size = 37286, upload-time = "2025-09-22T16:29:51.641Z" }, + { url = "https://files.pythonhosted.org/packages/68/5a/199c59e0a824a3db2b89c5d2dade7ab5f9624dbf6448dc291b46d5ec94d3/wcwidth-0.6.0-py3-none-any.whl", hash = "sha256:1a3a1e510b553315f8e146c54764f4fb6264ffad731b3d78088cdb1478ffbdad", size = 94189, upload-time = "2026-02-06T19:19:39.646Z" }, ] [[package]] From 8954ee0354ac57f69d63ecc0651fa5c476318d5f Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Sat, 11 Apr 2026 14:18:29 -0700 Subject: [PATCH 57/64] chore: bump versions --- Cargo.lock | 20 ++++++++++---------- Cargo.toml | 2 +- pyproject.toml | 4 ++-- python/speclib_builder/pyproject.toml | 4 ++-- python/timsquery_pyo3/pyproject.toml | 3 ++- uv.lock | 4 ++-- 6 files changed, 19 insertions(+), 18 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index aabacde..9c9c900 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -284,7 +284,7 @@ dependencies = [ [[package]] name = "array2d" -version = "0.26.0" +version = "0.27.0" dependencies = [ "serde", ] @@ -1377,7 +1377,7 @@ dependencies = [ [[package]] name = "calibrt" -version = "0.26.0" +version = "0.27.0" dependencies = [ "array2d", "insta", @@ -3739,7 +3739,7 @@ dependencies = [ [[package]] name = "micromzpaf" -version = "0.26.0" +version = "0.27.0" dependencies = [ "rustyms", "serde", @@ -6047,7 +6047,7 @@ dependencies = [ [[package]] name = "timscentroid" -version = "0.26.0" +version = "0.27.0" dependencies = [ "arrow", "async-trait", @@ -6077,7 +6077,7 @@ dependencies = [ [[package]] name = "timsquery" -version = "0.26.0" +version = "0.27.0" dependencies = [ "array2d", "arrow", @@ -6100,7 +6100,7 @@ dependencies = [ [[package]] name = "timsquery_cli" -version = "0.26.0" +version = "0.27.0" dependencies = [ "clap", "half", @@ -6119,7 +6119,7 @@ dependencies = [ [[package]] name = "timsquery_pyo3" -version = "0.26.0" +version = "0.27.0" dependencies = [ "numpy", "pyo3", @@ -6130,7 +6130,7 @@ dependencies = [ [[package]] name = "timsquery_viewer" -version = "0.26.0" +version = "0.27.0" dependencies = [ "calibrt", "clap", @@ -6174,7 +6174,7 @@ dependencies = [ [[package]] name = "timsseek" -version = "0.26.0" +version = "0.27.0" dependencies = [ "arrow", "calibrt", @@ -6198,7 +6198,7 @@ dependencies = [ [[package]] name = "timsseek_cli" -version = "0.26.0" +version = "0.27.0" dependencies = [ "clap", "indicatif", diff --git a/Cargo.toml b/Cargo.toml index 1e0f71d..545df67 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -25,7 +25,7 @@ default-members = [ ] [workspace.package] -version = "0.26.0" +version = "0.27.0" edition = "2024" authors = ["Sebastian Paez"] license = "Apache-2.0" diff --git a/pyproject.toml b/pyproject.toml index ab6488a..8eb46a6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "timsseek-workspace" -version = "0.26.0" +version = "0.27.0" requires-python = ">=3.11,<3.13" dependencies = [ "jupyter[python]>=1.1.1", @@ -48,7 +48,7 @@ packages = [ ] [tool.bumpver] -current_version = "0.26.0" +current_version = "0.27.0" version_pattern = "MAJOR.MINOR.PATCH[-PYTAGNUM]" tag_message = "v{new_version}" commit_message = "chore: bump version to {new_version}" diff --git a/python/speclib_builder/pyproject.toml b/python/speclib_builder/pyproject.toml index be61fa8..f244058 100644 --- a/python/speclib_builder/pyproject.toml +++ b/python/speclib_builder/pyproject.toml @@ -4,8 +4,8 @@ build-backend = "hatchling.build" [project] name = "speclib_builder" -version = "0.26.0" -requires-python = ">=3.11,<3.13" +version = "0.27.0" +requires-python = ">=3.11,<3.14" dependencies = [ "rich", "tqdm", diff --git a/python/timsquery_pyo3/pyproject.toml b/python/timsquery_pyo3/pyproject.toml index 6577d5f..5b51b13 100644 --- a/python/timsquery_pyo3/pyproject.toml +++ b/python/timsquery_pyo3/pyproject.toml @@ -4,7 +4,8 @@ build-backend = "maturin" [project] name = "timsquery_pyo3" -requires-python = ">=3.9" +version = "0.27.0" +requires-python = ">=3.11,<3.14" classifiers = [ "Programming Language :: Rust", "Programming Language :: Python :: Implementation :: CPython", diff --git a/uv.lock b/uv.lock index 3b3c50b..9b0cff1 100644 --- a/uv.lock +++ b/uv.lock @@ -2296,7 +2296,7 @@ wheels = [ [[package]] name = "speclib-builder" -version = "0.26.0" +version = "0.27.0" source = { editable = "python/speclib_builder" } dependencies = [ { name = "loguru" }, @@ -2409,7 +2409,7 @@ wheels = [ [[package]] name = "timsseek-workspace" -version = "0.26.0" +version = "0.27.0" source = { virtual = "." } dependencies = [ { name = "jupyter" }, From 844b242ace7cd26bb9bd321f15ab047b96036307 Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Sat, 11 Apr 2026 15:16:48 -0700 Subject: [PATCH 58/64] fix: address 3 critical review findings - Viewer deadlock: drop channel receiver before joining background thread in reset() and Drop, use try_send for Done messages so the background thread never blocks on a full channel. - RT fields: replace ambiguous query_rt_seconds/delta_rt/sq_delta_rt/ recalibrated_rt with explicit library_rt, calibrated_rt_seconds, obs_rt_seconds, and calibrated_sq_delta_rt computed from calibrated residuals. ML features updated accordingly. - Batch error handling: replace .unwrap() on run_pipeline with proper error propagation; abort batch on I/O errors (disk full, permissions) instead of retrying every remaining file. --- rust/timsquery_viewer/src/calibration.rs | 17 ++++++++---- rust/timsseek/src/ml/qvalues.rs | 8 +++--- rust/timsseek/src/scoring/apex_finding.rs | 8 ++++-- rust/timsseek/src/scoring/parquet_writer.rs | 12 +++------ rust/timsseek/src/scoring/pipeline.rs | 7 +++-- rust/timsseek/src/scoring/results.rs | 29 ++++++++++----------- rust/timsseek_cli/src/main.rs | 11 +++++++- 7 files changed, 55 insertions(+), 37 deletions(-) diff --git a/rust/timsquery_viewer/src/calibration.rs b/rust/timsquery_viewer/src/calibration.rs index abf6b01..2107367 100644 --- a/rust/timsquery_viewer/src/calibration.rs +++ b/rust/timsquery_viewer/src/calibration.rs @@ -285,12 +285,12 @@ impl ViewerCalibrationState { /// Stop and reset all state. Returns to Idle. pub fn reset(&mut self) { self.stop(); - // Wait for the thread to finish (non-blocking check; if it takes - // too long the Drop impl will also try). + // Drop the receiver BEFORE joining — if the background thread is + // blocked on tx.send(Done), dropping the receiver unblocks it. + self.receiver = None; if let Some(handle) = self.thread_handle.take() { let _ = handle.join(); } - self.receiver = None; self.phase = CalibrationPhase::Idle; self.n_scored = 0; self.n_calibrants = 0; @@ -419,7 +419,9 @@ impl ViewerCalibrationState { continue; } _ => { - let _ = tx.send(CalibrationMessage::Done { n_scored }); + if let Err(e) = tx.try_send(CalibrationMessage::Done { n_scored }) { + tracing::warn!("Calibration thread: failed to send Done on stop: {e}"); + } return; } } @@ -498,7 +500,9 @@ impl ViewerCalibrationState { }); } - let _ = tx.send(CalibrationMessage::Done { n_scored }); + if let Err(e) = tx.try_send(CalibrationMessage::Done { n_scored }) { + tracing::warn!("Calibration thread: failed to send Done on completion: {e}"); + } } // ----------------------------------------------------------------------- @@ -1079,6 +1083,9 @@ impl ViewerCalibrationState { impl Drop for ViewerCalibrationState { fn drop(&mut self) { self.stop(); + // Drop the receiver BEFORE joining — unblocks any tx.send(Done) + // in the background thread that would otherwise deadlock. + self.receiver = None; if let Some(handle) = self.thread_handle.take() { let _ = handle.join(); } diff --git a/rust/timsseek/src/ml/qvalues.rs b/rust/timsseek/src/ml/qvalues.rs index 1f56e1d..045beac 100644 --- a/rust/timsseek/src/ml/qvalues.rs +++ b/rust/timsseek/src/ml/qvalues.rs @@ -122,7 +122,7 @@ impl FeatureLike for CompetedCandidate { (s.precursor_mz / 5.0).round(), s.precursor_charge as f64, s.precursor_mobility as f64, - s.query_rt_seconds.round() as f64, + s.calibrated_rt_seconds.round() as f64, s.n_scored_fragments as f64, // Combined s.main_score as f64, @@ -131,8 +131,8 @@ impl FeatureLike for CompetedCandidate { s.delta_second_next as f64, s.obs_rt_seconds as f64, s.obs_mobility as f64, - s.delta_rt as f64, - s.sq_delta_rt as f64, + (s.obs_rt_seconds - s.calibrated_rt_seconds) as f64, + s.calibrated_sq_delta_rt as f64, s.delta_ms1_ms2_mobility as f64, s.sq_delta_ms1_ms2_mobility as f64, s.rising_cycles as f64, @@ -204,7 +204,7 @@ impl FeatureLike for CompetedCandidate { s.ms2_intensity_ratios[6] as f64, self.delta_group as f64, self.delta_group_ratio as f64, - s.recalibrated_rt as f64, + s.calibrated_rt_seconds as f64, s.calibrated_sq_delta_rt as f64, // Derived intensity features { diff --git a/rust/timsseek/src/scoring/apex_finding.rs b/rust/timsseek/src/scoring/apex_finding.rs index 4c6bd10..4ee66f3 100644 --- a/rust/timsseek/src/scoring/apex_finding.rs +++ b/rust/timsseek/src/scoring/apex_finding.rs @@ -89,8 +89,12 @@ pub struct PeptideMetadata { /// Library identifier for this peptide. pub library_id: u32, - /// Reference retention time (seconds) from library. - pub query_rt_seconds: f32, + /// Retention time from spectral library (unit-agnostic: iRT, minutes, or seconds). + pub library_rt: f32, + + /// Calibrated retention time in seconds (predicted observed RT for this run). + /// Equals library_rt when no calibration has been applied. + pub calibrated_rt_seconds: f32, /// Reference ion mobility (ook0) from library. pub ref_mobility_ook0: f32, diff --git a/rust/timsseek/src/scoring/parquet_writer.rs b/rust/timsseek/src/scoring/parquet_writer.rs index 38d2028..7269992 100644 --- a/rust/timsseek/src/scoring/parquet_writer.rs +++ b/rust/timsseek/src/scoring/parquet_writer.rs @@ -79,12 +79,10 @@ pub fn build_record_batch(results: &[FinalResult]) -> RecordBatch { precursor_charge: _, precursor_mobility: _, is_target: _, - query_rt_seconds: _, + library_rt: _, + calibrated_rt_seconds: _, obs_rt_seconds: _, - delta_rt: _, - sq_delta_rt: _, calibrated_sq_delta_rt: _, - recalibrated_rt: _, obs_mobility: _, delta_ms1_ms2_mobility: _, sq_delta_ms1_ms2_mobility: _, @@ -151,12 +149,10 @@ pub fn build_record_batch(results: &[FinalResult]) -> RecordBatch { "is_target" => DataType::Boolean, BooleanArray(|r: &FinalResult| Some(r.scoring.is_target)); // RT - "query_rt_seconds" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.query_rt_seconds)); + "library_rt" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.library_rt)); + "calibrated_rt_seconds" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.calibrated_rt_seconds)); "obs_rt_seconds" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.obs_rt_seconds)); - "delta_rt" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.delta_rt)); - "sq_delta_rt" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.sq_delta_rt)); "calibrated_sq_delta_rt" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.calibrated_sq_delta_rt)); - "recalibrated_rt" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.recalibrated_rt)); // Mobility "obs_mobility" => DataType::Float32, Float32Array(|r: &FinalResult| Some(r.scoring.obs_mobility)); diff --git a/rust/timsseek/src/scoring/pipeline.rs b/rust/timsseek/src/scoring/pipeline.rs index d96694f..160e790 100644 --- a/rust/timsseek/src/scoring/pipeline.rs +++ b/rust/timsseek/src/scoring/pipeline.rs @@ -344,11 +344,13 @@ impl Scorer { Some(TOP_N_FRAGMENTS), )?; + let library_rt = item.query.rt_seconds(); let metadata = super::apex_finding::PeptideMetadata { digest: item.digest.clone(), charge: item.query.precursor_charge(), library_id: extraction.chromatograms.eg.id() as u32, - query_rt_seconds: item.query.rt_seconds(), + library_rt, + calibrated_rt_seconds: library_rt, // no calibration in broad path ref_mobility_ook0: item.query.mobility_ook0(), ref_precursor_mz: item.query.mono_precursor_mz(), }; @@ -570,7 +572,8 @@ impl Scorer { digest: item.digest.clone(), charge: item.query.precursor_charge(), library_id: agg.eg.id() as u32, - query_rt_seconds: calibrated_rt.0, + library_rt: original_irt.0 as f32, + calibrated_rt_seconds: calibrated_rt.0, ref_mobility_ook0: item.query.mobility_ook0(), ref_precursor_mz: item.query.mono_precursor_mz(), }; diff --git a/rust/timsseek/src/scoring/results.rs b/rust/timsseek/src/scoring/results.rs index 6d63a56..ddb3625 100644 --- a/rust/timsseek/src/scoring/results.rs +++ b/rust/timsseek/src/scoring/results.rs @@ -24,12 +24,10 @@ pub struct ScoringFields { pub is_target: bool, // RT - pub query_rt_seconds: f32, + pub library_rt: f32, + pub calibrated_rt_seconds: f32, pub obs_rt_seconds: f32, - pub delta_rt: f32, - pub sq_delta_rt: f32, pub calibrated_sq_delta_rt: f32, - pub recalibrated_rt: f32, // Mobility pub obs_mobility: f32, @@ -201,8 +199,9 @@ pub struct ScoredCandidateBuilder { precursor_mobility: SetField, is_target: SetField, - // --- Reference RT / mobility (used to compute deltas) --- - query_rt_seconds: SetField, + // --- RT --- + library_rt: SetField, + calibrated_rt_seconds: SetField, // --- Observed RT / mobility --- obs_rt_seconds: SetField, @@ -278,7 +277,8 @@ impl ScoredCandidateBuilder { self.precursor_charge = SetField::Some(metadata.charge); self.precursor_mz = SetField::Some(metadata.ref_precursor_mz); self.precursor_mobility = SetField::Some(metadata.ref_mobility_ook0); - self.query_rt_seconds = SetField::Some(metadata.query_rt_seconds); + self.library_rt = SetField::Some(metadata.library_rt); + self.calibrated_rt_seconds = SetField::Some(metadata.calibrated_rt_seconds); self } @@ -394,9 +394,10 @@ impl ScoredCandidateBuilder { } let obs_rt_seconds = expect_some!(obs_rt_seconds); - let ref_rt = expect_some!(query_rt_seconds); - let delta_rt = obs_rt_seconds - ref_rt; - let sq_delta_rt = delta_rt * delta_rt; + let library_rt = expect_some!(library_rt); + let calibrated_rt_seconds = expect_some!(calibrated_rt_seconds); + let calibrated_delta_rt = obs_rt_seconds - calibrated_rt_seconds; + let calibrated_sq_delta_rt = calibrated_delta_rt * calibrated_delta_rt; let delta_ms1_ms2_mobility = expect_some!(delta_ms1_ms2_mobility); let sq_delta_ms1_ms2_mobility = delta_ms1_ms2_mobility * delta_ms1_ms2_mobility; @@ -416,12 +417,10 @@ impl ScoredCandidateBuilder { is_target: expect_some!(is_target), // RT - query_rt_seconds: ref_rt, + library_rt, + calibrated_rt_seconds, obs_rt_seconds, - delta_rt, - sq_delta_rt, - calibrated_sq_delta_rt: sq_delta_rt, - recalibrated_rt: ref_rt, + calibrated_sq_delta_rt, // Mobility obs_mobility: expect_some!(obs_mobility), diff --git a/rust/timsseek_cli/src/main.rs b/rust/timsseek_cli/src/main.rs index 2c4395e..0b0a159 100644 --- a/rust/timsseek_cli/src/main.rs +++ b/rust/timsseek_cli/src/main.rs @@ -311,7 +311,9 @@ fn process_single_file( max_qvalue, load_index_ms, ) - .unwrap(); + .map_err(|e| errors::CliError::DataReading { + source: format!("{}", e), + })?; info!("Successfully processed {:?}", dotd_file); Ok(report) @@ -556,6 +558,13 @@ fn main() -> std::result::Result<(), errors::CliError> { } Err(e) => { error!("Failed to process {:?}: {}", dotd_file, e); + // I/O errors are likely systemic (disk full, permissions) — + // abort the batch instead of failing every remaining file. + if matches!(e, errors::CliError::Io { .. }) { + failed_files.push((dotd_file.clone(), e)); + error!("Aborting batch due to I/O error"); + break; + } failed_files.push((dotd_file.clone(), e)); } } From f930b2156d2f9594f809ccd9cd7101c740124e35 Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Sat, 11 Apr 2026 15:30:32 -0700 Subject: [PATCH 59/64] fix: address important review findings (I1-I3) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Grid::reset() preserves bin center geometry instead of zeroing nodes; add Grid::reconfigure() for changing dimensions. - CalibrationState::update returns Result — rejects NaN/Inf coordinates and weights at the grid boundary instead of silently accumulating them. Propagated as error in CLI, logged as warning in viewer. - Replace bare .unwrap() on partial_cmp with descriptive .expect() messages documenting the invariant that NaN scores should not reach the sort phase. --- rust/calibrt/src/grid.rs | 28 +++++++++++++++++++----- rust/calibrt/src/lib.rs | 17 ++++++++------ rust/timsquery_viewer/src/calibration.rs | 6 +++-- rust/timsseek_cli/src/processing.rs | 17 +++++++++----- 4 files changed, 48 insertions(+), 20 deletions(-) diff --git a/rust/calibrt/src/grid.rs b/rust/calibrt/src/grid.rs index b156034..6e817db 100644 --- a/rust/calibrt/src/grid.rs +++ b/rust/calibrt/src/grid.rs @@ -83,8 +83,7 @@ impl Grid { pub fn add_point(&mut self, point: &Point) -> Result<(), CalibRtError> { let Point { library, observed, weight } = point; - // If the weight is infinite or NaN, we yell ... - if weight.is_infinite() || weight.is_nan() { + if !weight.is_finite() || !library.is_finite() || !observed.is_finite() { return Err(CalibRtError::UnsupportedWeight(*weight)); } @@ -174,10 +173,17 @@ impl Grid { Ok(()) } - /// Zero all node weights and suppression flags. Keeps allocation. + /// Zero all node weights and suppression flags, preserving bin geometry. + /// Restores each node center to the midpoint of its bin. pub fn reset(&mut self) { - for node in &mut self.nodes { - node.center = Point::default(); + for (i, node) in self.nodes.iter_mut().enumerate() { + let r = i / self.bins; + let c = i % self.bins; + node.center = Point { + library: self.x_range.0 + (c as f64 + 0.5) * (self.x_span / self.bins as f64), + observed: self.y_range.0 + (r as f64 + 0.5) * (self.y_span / self.bins as f64), + weight: 0.0, + }; node.suppressed = false; node.sum_wx = 0.0; node.sum_wy = 0.0; @@ -187,6 +193,18 @@ impl Grid { self.weights_b.reset_with_value(self.bins, self.bins, 0.0); } + /// Reset the grid with new dimensions and ranges. Reallocates if the + /// bin count changes. + pub fn reconfigure( + &mut self, + bins: usize, + x_range: (f64, f64), + y_range: (f64, f64), + ) -> Result<(), CalibRtError> { + *self = Self::new(bins, x_range, y_range)?; + Ok(()) + } + /// Read access to all grid cells. pub fn grid_cells(&self) -> &[Node] { &self.nodes diff --git a/rust/calibrt/src/lib.rs b/rust/calibrt/src/lib.rs index 7642006..6b03af7 100644 --- a/rust/calibrt/src/lib.rs +++ b/rust/calibrt/src/lib.rs @@ -204,11 +204,14 @@ impl CalibrationState { }) } - pub fn update(&mut self, points: impl Iterator, ObservedRTSeconds, f64)>) { + /// Feed points into the grid. Returns an error if any point has + /// non-finite coordinates or weight (NaN/Inf), indicating a bug upstream. + pub fn update(&mut self, points: impl Iterator, ObservedRTSeconds, f64)>) -> Result<(), CalibRtError> { for (lib_rt, obs_rt, w) in points { - let _ = self.grid.add_point(&Point { library: lib_rt.0, observed: obs_rt.0, weight: w }); + self.grid.add_point(&Point { library: lib_rt.0, observed: obs_rt.0, weight: w })?; } self.stale = true; + Ok(()) } pub fn fit(&mut self) { @@ -377,7 +380,7 @@ impl CalibrationState { let y_range = compute_range(snapshot.points.iter().map(|p| p[1]))?; let mut state = Self::new(snapshot.grid_size, x_range, y_range, snapshot.lookback)?; - state.update(snapshot.points.iter().map(|p| (LibraryRT(p[0]), ObservedRTSeconds(p[1]), p[2]))); + state.update(snapshot.points.iter().map(|p| (LibraryRT(p[0]), ObservedRTSeconds(p[1]), p[2])))?; state.fit(); Ok(state) } @@ -401,7 +404,7 @@ mod calibration_state_tests { }) .collect(); - state.update(points.into_iter()); + state.update(points.into_iter()).unwrap(); assert!(state.is_stale()); state.fit(); @@ -417,7 +420,7 @@ mod calibration_state_tests { fn test_reset_clears_state() { let mut state = CalibrationState::new(10, (0.0, 100.0), (0.0, 100.0), 30).unwrap(); let points = vec![(LibraryRT(25.0), ObservedRTSeconds(25.0), 1.0), (LibraryRT(75.0), ObservedRTSeconds(75.0), 1.0)]; - state.update(points.into_iter()); + state.update(points.into_iter()).unwrap(); state.fit(); assert!(state.curve().is_some()); @@ -433,14 +436,14 @@ mod calibration_state_tests { // First fit: y = x let points1: Vec<_> = (0..10).map(|i| (LibraryRT((i as f64) * 10.0 + 5.0), ObservedRTSeconds((i as f64) * 10.0 + 5.0), 1.0)).collect(); - state.update(points1.into_iter()); + state.update(points1.into_iter()).unwrap(); state.fit(); let curve1_pred = state.curve().unwrap().predict(LibraryRT(50.0)).unwrap(); // Reset and refit: y = 2x state.reset(); let points2: Vec<_> = (0..10).map(|i| (LibraryRT((i as f64) * 10.0 + 5.0), ObservedRTSeconds((i as f64) * 20.0 + 5.0), 1.0)).collect(); - state.update(points2.into_iter()); + state.update(points2.into_iter()).unwrap(); state.fit(); let curve2_pred = state.curve().unwrap().predict(LibraryRT(50.0)).unwrap(); diff --git a/rust/timsquery_viewer/src/calibration.rs b/rust/timsquery_viewer/src/calibration.rs index 2107367..f7e6f12 100644 --- a/rust/timsquery_viewer/src/calibration.rs +++ b/rust/timsquery_viewer/src/calibration.rs @@ -332,11 +332,13 @@ impl ViewerCalibrationState { // Reset first — each snapshot is the full heap, not a delta. if let Some(cs) = &mut self.calibration_state { cs.reset(); - cs.update( + if let Err(e) = cs.update( self.snapshot_points .iter() .map(|&(lib_rt, apex_rt, _weight)| (lib_rt, apex_rt, 1.0)), - ); // snapshot_points already typed + ) { + tracing::warn!("Calibration update rejected points: {:?}", e); + } cs.fit(); let has_curve = cs.curve().is_some(); let n_path = cs.path_indices().len(); diff --git a/rust/timsseek_cli/src/processing.rs b/rust/timsseek_cli/src/processing.rs index f7431a9..d2d0222 100644 --- a/rust/timsseek_cli/src/processing.rs +++ b/rust/timsseek_cli/src/processing.rs @@ -287,7 +287,8 @@ pub fn execute_pipeline( let phase4_start = Instant::now(); let mut competed = target_decoy_compete(results); competed.sort_unstable_by(|x, y| { - y.scoring.main_score.partial_cmp(&x.scoring.main_score).unwrap() + y.scoring.main_score.partial_cmp(&x.scoring.main_score) + .expect("NaN main_score should have been filtered during Phase 3 scoring") }); let phase4_ms = phase4_start.elapsed().as_millis() as u64; let total_after_competition = competed.len(); @@ -467,7 +468,8 @@ fn calibrate_from_phase1( } }) // Best = most shared fragments, then closest RT - .min_by(|a, b| b.0.cmp(&a.0).then(a.1.partial_cmp(&b.1).unwrap())) + .min_by(|a, b| b.0.cmp(&a.0).then(a.1.partial_cmp(&b.1) + .expect("NaN RT residual in calibrant matching"))) .map(|(_, _, rt)| rt)? } None => calib_item.query.rt_seconds(), @@ -504,7 +506,7 @@ fn calibrate_from_phase1( let mut cal_state = CalibratedGrid::new( config.grid_size, (min_x, max_x), (min_y, max_y), config.dp_lookback, )?; - cal_state.update(points.iter().map(|p| (LibraryRT(p.library), ObservedRTSeconds(p.observed), p.weight))); + cal_state.update(points.iter().map(|p| (LibraryRT(p.library), ObservedRTSeconds(p.observed), p.weight)))?; cal_state.fit(); let cal_curve = cal_state.curve() .ok_or(CalibRtError::NoPoints)? @@ -572,7 +574,8 @@ fn calibrate_from_phase1( (p.observed - predicted).abs() }) .collect(); - abs_residuals.sort_by(|a, b| a.partial_cmp(b).unwrap()); + abs_residuals.sort_by(|a, b| a.partial_cmp(b) + .expect("NaN calibration residual in tolerance estimation")); let mad_seconds = abs_residuals .get(abs_residuals.len() / 2) .copied() @@ -694,7 +697,8 @@ fn target_decoy_compete(mut results: Vec) -> Vec) -> Vec Date: Sat, 11 Apr 2026 15:36:59 -0700 Subject: [PATCH 60/64] fix: address remaining important review findings (I4-I10) - I4: Document count_falling_steps convention (apex counts as 1) - I5: get_frag_range returns Result instead of panicking on non-DIA files - I6: n_scored in calibration JSON now reflects Phase 1 library size, not calibrant count (which was redundant with n_calibrants) - I7: rt_range_seconds in calibration JSON now uses raw file RT range from the cycle mapping, not the calibrant subset - I8: Fold progress uses atomic eprintln! lines instead of split eprint!/eprintln! to avoid interleaving with progress bars - I9: CalibrantCandidate Ord uses f32::total_cmp for sound ordering - I10: Parquet writer returns Result from add/flush/close instead of panicking on write errors; propagated as TimsSeekError::Io --- rust/timsseek/src/ml/cv.rs | 6 ++--- rust/timsseek/src/scoring/apex_finding.rs | 2 ++ rust/timsseek/src/scoring/parquet_writer.rs | 30 ++++++++++++--------- rust/timsseek/src/scoring/pipeline.rs | 2 +- rust/timsseek_cli/src/main.rs | 25 ++++++++++------- rust/timsseek_cli/src/processing.rs | 17 ++++++++---- 6 files changed, 51 insertions(+), 31 deletions(-) diff --git a/rust/timsseek/src/ml/cv.rs b/rust/timsseek/src/ml/cv.rs index a02a35a..4929ab1 100644 --- a/rust/timsseek/src/ml/cv.rs +++ b/rust/timsseek/src/ml/cv.rs @@ -404,10 +404,9 @@ impl CrossValidatedScorer { self.fold_classifiers.clear(); (0..self.n_folds).for_each(|_| self.fold_classifiers.push(None)); for fold in 0..self.n_folds { - eprint!(" Training fold {}/{} ...", fold + 1, self.n_folds); let start = std::time::Instant::now(); self.fit_fold(fold, train_buffer, val_buffer)?; - eprintln!(" {:?}", start.elapsed()); + eprintln!(" Training fold {}/{} ... {:?}", fold + 1, self.n_folds, start.elapsed()); } Ok(()) @@ -417,7 +416,6 @@ impl CrossValidatedScorer { let mut scores = vec![0.0; self.data.len()]; let mut buffer = DataBuffer::default(); - eprint!(" Scoring folds ..."); let score_start = std::time::Instant::now(); for train_i in 0..self.n_folds { let early_stop_i = self.next_fold(train_i); @@ -443,7 +441,7 @@ impl CrossValidatedScorer { } } } - eprintln!(" {:?}", score_start.elapsed()); + eprintln!(" Scoring folds ... {:?}", score_start.elapsed()); let div_factor = (self.n_folds - 2) as f64; scores.iter_mut().for_each(|x| { diff --git a/rust/timsseek/src/scoring/apex_finding.rs b/rust/timsseek/src/scoring/apex_finding.rs index 4ee66f3..7535f35 100644 --- a/rust/timsseek/src/scoring/apex_finding.rs +++ b/rust/timsseek/src/scoring/apex_finding.rs @@ -816,6 +816,8 @@ impl<'a> PeakPicker<'a> { } } +/// Count non-increasing steps from `start` in direction `step`. +/// Returns at least 1 (the apex itself counts), capped at MAX_WIDTH. fn count_falling_steps(start: usize, step: i32, slc: &[f32]) -> u8 { const MAX_WIDTH: u8 = 10; let mut count = 0; diff --git a/rust/timsseek/src/scoring/parquet_writer.rs b/rust/timsseek/src/scoring/parquet_writer.rs index 7269992..7531478 100644 --- a/rust/timsseek/src/scoring/parquet_writer.rs +++ b/rust/timsseek/src/scoring/parquet_writer.rs @@ -61,7 +61,7 @@ fn expand_f32_array_columns( /// **COMPILE-TIME SAFETY:** The exhaustive destructure at the top ensures that /// adding a field to `ScoringFields` or `FinalResult` without updating this /// function causes a compile error. -pub fn build_record_batch(results: &[FinalResult]) -> RecordBatch { +pub fn build_record_batch(results: &[FinalResult]) -> std::io::Result { // ----------------------------------------------------------------------- // Exhaustive destructure for compile-time completeness check. // Every field must be listed -- no `..` allowed. @@ -229,7 +229,8 @@ pub fn build_record_batch(results: &[FinalResult]) -> RecordBatch { // Build the RecordBatch // ----------------------------------------------------------------------- let schema = Arc::new(Schema::new(fields)); - RecordBatch::try_new(schema, arrays).expect("schema/array mismatch in build_record_batch") + RecordBatch::try_new(schema, arrays) + .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e)) } // --------------------------------------------------------------------------- @@ -257,7 +258,7 @@ impl ResultParquetWriter { }; // Build schema from a zero-row batch - let empty_batch = build_record_batch(&[]); + let empty_batch = build_record_batch(&[])?; let schema = empty_batch.schema(); let props = WriterProperties::builder() @@ -274,25 +275,30 @@ impl ResultParquetWriter { }) } - pub fn add(&mut self, result: FinalResult) { + pub fn add(&mut self, result: FinalResult) -> std::io::Result<()> { self.buffer.push(result); if self.buffer.len() >= self.row_group_size { - self.flush(); + self.flush()?; } + Ok(()) } - fn flush(&mut self) { + fn flush(&mut self) -> std::io::Result<()> { if self.buffer.is_empty() { - return; + return Ok(()); } debug!("Flushing {} results to parquet", self.buffer.len()); - let batch = build_record_batch(&self.buffer); - self.writer.write(&batch).expect("parquet write failed"); + let batch = build_record_batch(&self.buffer)?; + self.writer.write(&batch) + .map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e))?; self.buffer.clear(); + Ok(()) } - pub fn close(mut self) { - self.flush(); - self.writer.close().expect("parquet close failed"); + pub fn close(mut self) -> std::io::Result<()> { + self.flush()?; + self.writer.close() + .map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e))?; + Ok(()) } } diff --git a/rust/timsseek/src/scoring/pipeline.rs b/rust/timsseek/src/scoring/pipeline.rs index 160e790..df64532 100644 --- a/rust/timsseek/src/scoring/pipeline.rs +++ b/rust/timsseek/src/scoring/pipeline.rs @@ -87,7 +87,7 @@ impl PartialOrd for CalibrantCandidate { impl Ord for CalibrantCandidate { fn cmp(&self, other: &Self) -> std::cmp::Ordering { - self.partial_cmp(other).unwrap_or(std::cmp::Ordering::Equal) + self.score.total_cmp(&other.score) } } diff --git a/rust/timsseek_cli/src/main.rs b/rust/timsseek_cli/src/main.rs index 0b0a159..1e7b19f 100644 --- a/rust/timsseek_cli/src/main.rs +++ b/rust/timsseek_cli/src/main.rs @@ -197,12 +197,18 @@ fn validate_inputs( }) } -fn get_frag_range(file: &TimsTofPath) -> TupleRange { - let reader = file.load_frame_reader().unwrap(); - let upper_mz = reader +fn get_frag_range(file: &TimsTofPath) -> Result, errors::CliError> { + let reader = file.load_frame_reader().map_err(|e| errors::CliError::DataReading { + source: format!("Failed to load frame reader: {:?}", e), + })?; + let dia_windows = reader .dia_windows .as_ref() - .expect("DIA windows should be present for a dia run") + .ok_or_else(|| errors::CliError::DataReading { + source: "File does not contain DIA windows — is this a DIA run?".to_string(), + })?; + + let upper_mz = dia_windows .iter() .map(|w| { w.isolation_mz @@ -213,9 +219,7 @@ fn get_frag_range(file: &TimsTofPath) -> TupleRange { }) .fold(0.0, f64::max); - let lower_mz = reader - .dia_windows - .expect("DIA windows should be present for a dia run") + let lower_mz = dia_windows .iter() .map(|w| { w.isolation_mz @@ -225,7 +229,10 @@ fn get_frag_range(file: &TimsTofPath) -> TupleRange { .fold(f64::MAX, f64::min) }) .fold(f64::MAX, f64::min); - TupleRange::try_new(lower_mz, upper_mz).unwrap() + + TupleRange::try_new(lower_mz, upper_mz).map_err(|e| errors::CliError::DataReading { + source: format!("Invalid DIA m/z range: {:?}", e), + }) } fn process_single_file( @@ -257,7 +264,7 @@ fn process_single_file( let load_index_ms = index_start.elapsed().as_millis() as u64; println!("Loading index ........... {:.1}s", load_index_ms as f64 / 1000.0); - let fragmented_range = get_frag_range(&timstofpath); + let fragmented_range = get_frag_range(&timstofpath)?; let pipeline = Scorer { index, diff --git a/rust/timsseek_cli/src/processing.rs b/rust/timsseek_cli/src/processing.rs index d2d0222..3555af0 100644 --- a/rust/timsseek_cli/src/processing.rs +++ b/rust/timsseek_cli/src/processing.rs @@ -241,15 +241,16 @@ pub fn execute_pipeline( .iter() .map(|p| (p[0], p[1], p[2])) .collect(); - let rt_lo = calibrant_points.iter().map(|p| p[1]).fold(f64::MAX, f64::min); - let rt_hi = calibrant_points.iter().map(|p| p[1]).fold(f64::MIN, f64::max); + let (rt_lo_ms, rt_hi_ms) = pipeline.index.ms1_cycle_mapping().range_milis(); + let rt_lo = rt_lo_ms as f64 / 1000.0; + let rt_hi = rt_hi_ms as f64 / 1000.0; let cal_json_path = out_path.directory.join("calibration.json"); if let Err(e) = calibration.save_json( &cal_points_tuples, [rt_lo, rt_hi], calib_config.grid_size, calib_config.dp_lookback, - calibrant_points.len(), + phase1_lib.len(), &cal_json_path, ) { tracing::warn!("Failed to save calibration: {}", e); @@ -339,10 +340,16 @@ pub fn execute_pipeline( })?; for res in data.into_iter() { if res.qvalue <= max_qvalue { - pq_writer.add(res); + pq_writer.add(res).map_err(|e| TimsSeekError::Io { + path: out_path_pq.clone().into(), + source: e, + })?; } } - pq_writer.close(); + pq_writer.close().map_err(|e| TimsSeekError::Io { + path: out_path_pq.clone().into(), + source: e, + })?; let phase6_ms = phase6_start.elapsed().as_millis() as u64; info!("Wrote final results to {:?}", out_path_pq); println!( From 1e908b98b4cd927f7b7aa956c228db5e7ca19275 Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Sat, 11 Apr 2026 15:40:26 -0700 Subject: [PATCH 61/64] chore: minor review suggestions - Extract save_calibration_dialog helper to deduplicate Paused/Done save button logic in the viewer - Fix redundant sqrt() call in apex_finding compute_pass_1 - Mark Parquet columns as non-nullable (no data is ever null, saves validity bitmap overhead) --- rust/timsquery_viewer/src/calibration.rs | 62 ++++++++------------- rust/timsseek/src/scoring/apex_finding.rs | 4 +- rust/timsseek/src/scoring/parquet_writer.rs | 2 +- 3 files changed, 27 insertions(+), 41 deletions(-) diff --git a/rust/timsquery_viewer/src/calibration.rs b/rust/timsquery_viewer/src/calibration.rs index f7e6f12..e807e5b 100644 --- a/rust/timsquery_viewer/src/calibration.rs +++ b/rust/timsquery_viewer/src/calibration.rs @@ -657,25 +657,7 @@ impl ViewerCalibrationState { self.stop(); } if ui.button("Save").clicked() { - if let Some(path) = rfd::FileDialog::new() - .set_file_name("calibration.json") - .add_filter("JSON", &["json"]) - .save_file() - { - let rt_range = if let crate::app::IndexedDataState::Loaded { - index, .. - } = indexed_data - { - let cycle_mapping = index.ms1_cycle_mapping(); - let (rt_min_ms, rt_max_ms) = cycle_mapping.range_milis(); - [rt_min_ms as f64 / 1000.0, rt_max_ms as f64 / 1000.0] - } else { - [0.0, 0.0] - }; - if let Err(e) = self.save_to_file(&path, rt_range) { - tracing::error!("Failed to save calibration: {}", e); - } - } + save_calibration_dialog(self, indexed_data); } } CalibrationPhase::Done => { @@ -683,25 +665,7 @@ impl ViewerCalibrationState { self.reset(); } if ui.button("Save").clicked() { - if let Some(path) = rfd::FileDialog::new() - .set_file_name("calibration.json") - .add_filter("JSON", &["json"]) - .save_file() - { - let rt_range = if let crate::app::IndexedDataState::Loaded { - index, .. - } = indexed_data - { - let cycle_mapping = index.ms1_cycle_mapping(); - let (rt_min_ms, rt_max_ms) = cycle_mapping.range_milis(); - [rt_min_ms as f64 / 1000.0, rt_max_ms as f64 / 1000.0] - } else { - [0.0, 0.0] - }; - if let Err(e) = self.save_to_file(&path, rt_range) { - tracing::error!("Failed to save calibration: {}", e); - } - } + save_calibration_dialog(self, indexed_data); } } } @@ -1098,6 +1062,28 @@ impl Drop for ViewerCalibrationState { // Helpers // --------------------------------------------------------------------------- +/// Show a file-save dialog and write the current calibration to JSON. +fn save_calibration_dialog( + state: &mut ViewerCalibrationState, + indexed_data: &crate::app::IndexedDataState, +) { + if let Some(path) = rfd::FileDialog::new() + .set_file_name("calibration.json") + .add_filter("JSON", &["json"]) + .save_file() + { + let rt_range = if let crate::app::IndexedDataState::Loaded { index, .. } = indexed_data { + let (rt_min_ms, rt_max_ms) = index.ms1_cycle_mapping().range_milis(); + [rt_min_ms as f64 / 1000.0, rt_max_ms as f64 / 1000.0] + } else { + [0.0, 0.0] + }; + if let Err(e) = state.save_to_file(&path, rt_range) { + tracing::error!("Failed to save calibration: {}", e); + } + } +} + /// Simple deterministic shuffle using a linear congruential generator. /// Avoids pulling in the `rand` crate just for this. fn simple_shuffle(indices: &mut [usize]) { diff --git a/rust/timsseek/src/scoring/apex_finding.rs b/rust/timsseek/src/scoring/apex_finding.rs index 7535f35..833227d 100644 --- a/rust/timsseek/src/scoring/apex_finding.rs +++ b/rust/timsseek/src/scoring/apex_finding.rs @@ -525,8 +525,8 @@ impl TraceScorer { } let sqrt_exp = expected.sqrt(); - ms2_sum_exp += expected; // sqrt_exp * sqrt_exp = expected - pred_norms.push((row_idx, expected.sqrt())); + ms2_sum_exp += expected; + pred_norms.push((row_idx, sqrt_exp)); pred_sqrt_sum += sqrt_exp; for (i, &intensity) in chrom.iter().enumerate() { diff --git a/rust/timsseek/src/scoring/parquet_writer.rs b/rust/timsseek/src/scoring/parquet_writer.rs index 7531478..f5c509d 100644 --- a/rust/timsseek/src/scoring/parquet_writer.rs +++ b/rust/timsseek/src/scoring/parquet_writer.rs @@ -20,7 +20,7 @@ macro_rules! columns { let mut fields: Vec = Vec::new(); let mut arrays: Vec> = Vec::new(); $( - fields.push(Field::new($name, $dtype, true)); + fields.push(Field::new($name, $dtype, false)); arrays.push(Arc::new(<$array_type>::from_iter( $results.iter().map($accessor) ))); From a84462c75bd3724a46e0f227e42177ede41d0429 Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Sat, 11 Apr 2026 19:22:23 -0700 Subject: [PATCH 62/64] perf: prescore skips PeptideMetadata construction MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit prescore() only needs ApexLocation — the metadata (including a cloned digest String per peptide) was built and immediately discarded. Call build_extraction directly instead of build_broad_extraction to avoid the unnecessary allocation in the Phase 1 hot loop. --- rust/timsseek/src/scoring/pipeline.rs | 34 ++++++++++++++------------- 1 file changed, 18 insertions(+), 16 deletions(-) diff --git a/rust/timsseek/src/scoring/pipeline.rs b/rust/timsseek/src/scoring/pipeline.rs index df64532..921e5ea 100644 --- a/rust/timsseek/src/scoring/pipeline.rs +++ b/rust/timsseek/src/scoring/pipeline.rs @@ -47,7 +47,6 @@ use super::apex_finding::{ TraceScorer, ApexLocation, ApexScore, - PeptideMetadata, RelativeIntensities, }; use super::full_results::ViewerResult; @@ -719,11 +718,17 @@ impl Scorer { &self, item: &QueryItemToScore, buffer: &mut TraceScorer, - ) -> Option<(ApexLocation, PeptideMetadata)> { - let (metadata, scoring_ctx) = tracing::span!(tracing::Level::TRACE, "prescore::extraction") - .in_scope(|| match self.build_broad_extraction(item) { - Ok(result) => Some(result), - Err(SkippingReason::RetentionTimeOutOfBounds) => None, + ) -> Option { + let scoring_ctx = tracing::span!(tracing::Level::TRACE, "prescore::extraction") + .in_scope(|| { + super::extraction::build_extraction( + &item.query, + item.expected_intensity.clone(), + &self.index, + &self.broad_tolerance, + Some(TOP_N_FRAGMENTS), + ) + .ok() })?; if scoring_ctx @@ -734,14 +739,11 @@ impl Scorer { return None; } - let apex_location = - tracing::span!(tracing::Level::TRACE, "prescore::scoring").in_scope(|| { - buffer - .find_apex_location(&scoring_ctx, &|idx| self.map_rt_index_to_milis(idx)) - .ok() - })?; - - Some((apex_location, metadata)) + tracing::span!(tracing::Level::TRACE, "prescore::scoring").in_scope(|| { + buffer + .find_apex_location(&scoring_ctx, &|idx| self.map_rt_index_to_milis(idx)) + .ok() + }) } /// Phase 1 batch: Prescore all peptides, collecting top-N calibrant candidates via bounded heaps. @@ -771,7 +773,7 @@ impl Scorer { .enumerate() .filter(|(_, x)| filter_fn(x)) .fold(init_fn, |(mut scorer, mut heap), (chunk_idx, item)| { - if let Some((loc, _meta)) = self.prescore(item, &mut scorer) { + if let Some(loc) = self.prescore(item, &mut scorer) { heap.push(CalibrantCandidate { score: loc.score, apex_rt: ObservedRTSeconds(loc.retention_time_ms as f32 / 1000.0), @@ -794,7 +796,7 @@ impl Scorer { let mut heap = CalibrantHeap::new(config.n_calibrants); for (chunk_idx, item) in items_to_score.iter().enumerate().filter(|(_, x)| filter_fn(x)) { - if let Some((loc, _meta)) = self.prescore(item, &mut scorer) { + if let Some(loc) = self.prescore(item, &mut scorer) { heap.push(CalibrantCandidate { score: loc.score, apex_rt: ObservedRTSeconds(loc.retention_time_ms as f32 / 1000.0), From 791c896090a9b420c1b21f40f26adea91d94c1a3 Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Sat, 11 Apr 2026 21:51:34 -0700 Subject: [PATCH 63/64] feat: optional rayon, Phase 1 timing breakdown, cleanup - Make rayon an optional dependency (default on). Serial mode is now --no-default-features instead of the dead serial_scoring feature. - Gate parallel code behind #[cfg(feature = "rayon")], serial behind #[cfg(not(feature = "rayon"))]. - Add PrescoreTimings struct with extraction/scoring breakdown and n_passed_filter/n_scored counters. Aggregated via fold/reduce in both parallel and serial paths. - Rename thread-summed timing fields to *_thread_ms to distinguish from wall-clock phase timings. - Remove dead serial_scoring feature, dead rayon re-export in cv.rs. - Remove per-item filter timing (measured clock overhead, not work). --- rust/timsseek/Cargo.toml | 4 +- rust/timsseek/src/ml/cv.rs | 1 - rust/timsseek/src/ml/qvalues.rs | 4 ++ rust/timsseek/src/scoring/accumulator.rs | 2 + rust/timsseek/src/scoring/mod.rs | 2 +- rust/timsseek/src/scoring/pipeline.rs | 62 ++++++++++++++++-------- rust/timsseek/src/scoring/timings.rs | 46 ++++++++++++++++-- rust/timsseek_cli/Cargo.toml | 4 +- rust/timsseek_cli/src/processing.rs | 26 ++++++---- 9 files changed, 113 insertions(+), 38 deletions(-) diff --git a/rust/timsseek/Cargo.toml b/rust/timsseek/Cargo.toml index a4a1be8..98abdbf 100644 --- a/rust/timsseek/Cargo.toml +++ b/rust/timsseek/Cargo.toml @@ -26,12 +26,12 @@ rusqlite = {workspace = true } serde = { workspace = true } serde_json = { workspace = true } tracing = { workspace = true } -rayon = { workspace = true } +rayon = { workspace = true, optional = true } parquet = { workspace = true } arrow = { workspace = true } [features] +default = ["rayon"] instrumentation = [] -serial_scoring = [] [lib] diff --git a/rust/timsseek/src/ml/cv.rs b/rust/timsseek/src/ml/cv.rs index 4929ab1..5dd45ca 100644 --- a/rust/timsseek/src/ml/cv.rs +++ b/rust/timsseek/src/ml/cv.rs @@ -18,7 +18,6 @@ pub use forust_ml::{ GradientBooster, Matrix, }; -pub use rayon::prelude::*; pub use std::collections::HashSet; pub struct GBMConfig { diff --git a/rust/timsseek/src/ml/qvalues.rs b/rust/timsseek/src/ml/qvalues.rs index 045beac..a57e77c 100644 --- a/rust/timsseek/src/ml/qvalues.rs +++ b/rust/timsseek/src/ml/qvalues.rs @@ -9,6 +9,7 @@ use super::{ TargetDecoy, }; use rand::prelude::*; +#[cfg(feature = "rayon")] use rayon::prelude::*; use tracing::debug; @@ -94,7 +95,10 @@ pub fn rescore(mut data: Vec) -> Vec { let mut scored = scorer.score(); // Sort by score descending + #[cfg(feature = "rayon")] scored.par_sort_unstable_by(|a, b| b.get_score().total_cmp(&a.get_score())); + #[cfg(not(feature = "rayon"))] + scored.sort_unstable_by(|a, b| b.get_score().total_cmp(&a.get_score())); assign_qval(&mut scored, |x| CompetedCandidate::get_score(x) as f32); debug!("Best:\n{:#?}", scored.first()); debug!("Worst:\n{:#?}", scored.last()); diff --git a/rust/timsseek/src/scoring/accumulator.rs b/rust/timsseek/src/scoring/accumulator.rs index ea1f1fa..e0a5df5 100644 --- a/rust/timsseek/src/scoring/accumulator.rs +++ b/rust/timsseek/src/scoring/accumulator.rs @@ -5,6 +5,7 @@ use super::results::ScoredCandidate; use super::timings::ScoreTimings; +#[cfg(feature = "rayon")] use rayon::iter::{ FromParallelIterator, IntoParallelIterator, @@ -63,6 +64,7 @@ impl FromIterator<(Option, ScoreTimings)> for IonSearchAccumula } } +#[cfg(feature = "rayon")] impl FromParallelIterator<(Option, ScoreTimings)> for IonSearchAccumulator { fn from_par_iter(par_iter: I) -> Self where diff --git a/rust/timsseek/src/scoring/mod.rs b/rust/timsseek/src/scoring/mod.rs index 05f269b..94adfde 100644 --- a/rust/timsseek/src/scoring/mod.rs +++ b/rust/timsseek/src/scoring/mod.rs @@ -18,7 +18,7 @@ pub use pipeline::{ Scorer, }; pub use results::{ScoredCandidate, CompetedCandidate, FinalResult, ScoringFields}; -pub use timings::{FileReport, PipelineReport, RunReport, ScoreTimings}; +pub use timings::{FileReport, PipelineReport, PrescoreTimings, RunReport, ScoreTimings}; pub const NUM_MS2_IONS: usize = 7; pub const NUM_MS1_IONS: usize = 3; diff --git a/rust/timsseek/src/scoring/pipeline.rs b/rust/timsseek/src/scoring/pipeline.rs index 921e5ea..62accd5 100644 --- a/rust/timsseek/src/scoring/pipeline.rs +++ b/rust/timsseek/src/scoring/pipeline.rs @@ -26,6 +26,7 @@ use crate::{ QueryItemToScore, ScorerQueriable, }; +#[cfg(feature = "rayon")] use rayon::prelude::*; use std::time::Instant; use timscentroid::rt_mapping::{ @@ -676,7 +677,7 @@ impl Scorer { self.fragmented_range.intersects(lims) }; - #[cfg(not(feature = "serial_scoring"))] + #[cfg(feature = "rayon")] let results: IonSearchAccumulator = { items_to_score .par_iter() @@ -690,13 +691,14 @@ impl Scorer { .collect() }; - #[cfg(feature = "serial_scoring")] + #[cfg(not(feature = "rayon"))] let results: IonSearchAccumulator = { let mut scorer = init_fn(); items_to_score .iter() .filter(filter_fn) .map(|item| { + let _span = tracing::span!(tracing::Level::TRACE, "score_calibrated_item").entered(); let mut t = ScoreTimings::default(); let result = self.score_calibrated_extraction(item, calibration, &mut scorer, &mut t); @@ -718,7 +720,9 @@ impl Scorer { &self, item: &QueryItemToScore, buffer: &mut TraceScorer, + timings: &mut super::timings::PrescoreTimings, ) -> Option { + let st = Instant::now(); let scoring_ctx = tracing::span!(tracing::Level::TRACE, "prescore::extraction") .in_scope(|| { super::extraction::build_extraction( @@ -729,8 +733,10 @@ impl Scorer { Some(TOP_N_FRAGMENTS), ) .ok() - })?; + }); + timings.extraction += st.elapsed(); + let scoring_ctx = scoring_ctx?; if scoring_ctx .expected_intensities .fragment_intensities @@ -739,11 +745,18 @@ impl Scorer { return None; } - tracing::span!(tracing::Level::TRACE, "prescore::scoring").in_scope(|| { + let st = Instant::now(); + let result = tracing::span!(tracing::Level::TRACE, "prescore::scoring").in_scope(|| { buffer .find_apex_location(&scoring_ctx, &|idx| self.map_rt_index_to_milis(idx)) .ok() - }) + }); + timings.scoring += st.elapsed(); + + if result.is_some() { + timings.n_scored += 1; + } + result } /// Phase 1 batch: Prescore all peptides, collecting top-N calibrant candidates via bounded heaps. @@ -756,6 +769,7 @@ impl Scorer { items_to_score: &[QueryItemToScore], speclib_offset: usize, config: &CalibrationConfig, + timings: &mut super::timings::PrescoreTimings, ) -> CalibrantHeap { let filter_fn = |x: &&QueryItemToScore| { let tmp = x.query.get_precursor_mz_limits(); @@ -763,17 +777,22 @@ impl Scorer { self.fragmented_range.intersects(lims) }; - #[cfg(not(feature = "serial_scoring"))] - let heap: CalibrantHeap = { - let init_fn = - || (TraceScorer::new(self.num_cycles()), CalibrantHeap::new(config.n_calibrants)); + #[cfg(feature = "rayon")] + let (heap, par_timings): (CalibrantHeap, super::timings::PrescoreTimings) = { + use super::timings::PrescoreTimings; + let init_fn = || ( + TraceScorer::new(self.num_cycles()), + CalibrantHeap::new(config.n_calibrants), + PrescoreTimings::default(), + ); items_to_score .par_iter() .enumerate() .filter(|(_, x)| filter_fn(x)) - .fold(init_fn, |(mut scorer, mut heap), (chunk_idx, item)| { - if let Some(loc) = self.prescore(item, &mut scorer) { + .fold(init_fn, |(mut scorer, mut heap, mut t), (chunk_idx, item)| { + t.n_passed_filter += 1; + if let Some(loc) = self.prescore(item, &mut scorer, &mut t) { heap.push(CalibrantCandidate { score: loc.score, apex_rt: ObservedRTSeconds(loc.retention_time_ms as f32 / 1000.0), @@ -781,22 +800,27 @@ impl Scorer { library_rt: LibraryRT(item.query.rt_seconds()), }); } - (scorer, heap) + (scorer, heap, t) }) - .map(|(_, heap)| heap) + .map(|(_, heap, t)| (heap, t)) .reduce( - || CalibrantHeap::new(config.n_calibrants), - |a, b| a.merge(b), + || (CalibrantHeap::new(config.n_calibrants), PrescoreTimings::default()), + |(a_heap, mut a_t), (b_heap, b_t)| { a_t += b_t; (a_heap.merge(b_heap), a_t) }, ) }; + #[cfg(feature = "rayon")] + { *timings += par_timings; } - #[cfg(feature = "serial_scoring")] + #[cfg(not(feature = "rayon"))] let heap: CalibrantHeap = { let mut scorer = TraceScorer::new(self.num_cycles()); let mut heap = CalibrantHeap::new(config.n_calibrants); - for (chunk_idx, item) in items_to_score.iter().enumerate().filter(|(_, x)| filter_fn(x)) - { - if let Some(loc) = self.prescore(item, &mut scorer) { + for (chunk_idx, item) in items_to_score.iter().enumerate() { + if !filter_fn(&item) { + continue; + } + timings.n_passed_filter += 1; + if let Some(loc) = self.prescore(item, &mut scorer, timings) { heap.push(CalibrantCandidate { score: loc.score, apex_rt: ObservedRTSeconds(loc.retention_time_ms as f32 / 1000.0), diff --git a/rust/timsseek/src/scoring/timings.rs b/rust/timsseek/src/scoring/timings.rs index 0c7b0dd..364908d 100644 --- a/rust/timsseek/src/scoring/timings.rs +++ b/rust/timsseek/src/scoring/timings.rs @@ -59,6 +59,43 @@ impl std::ops::AddAssign for ScoreTimings { } } +/// Timing breakdown for Phase 1 prescore. +#[derive(Debug, Default)] +pub struct PrescoreTimings { + /// Time spent building extractions (chromatogram collection). + pub extraction: Duration, + /// Time spent in apex scoring (find_apex_location). + pub scoring: Duration, + /// Number of items that passed the fragmented_range filter. + pub n_passed_filter: usize, + /// Number of items where prescore returned Some (successful apex). + pub n_scored: usize, +} + +impl Serialize for PrescoreTimings { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut state = serializer.serialize_struct("PrescoreTimings", 4)?; + state.serialize_field("extraction_thread_ms", &self.extraction.as_millis())?; + state.serialize_field("scoring_thread_ms", &self.scoring.as_millis())?; + state.serialize_field("n_passed_filter", &self.n_passed_filter)?; + state.serialize_field("n_scored", &self.n_scored)?; + state.end() + } +} + +impl std::ops::AddAssign for PrescoreTimings { + fn add_assign(&mut self, rhs: Self) { + self.extraction += rhs.extraction; + self.scoring += rhs.scoring; + self.n_passed_filter += rhs.n_passed_filter; + self.n_scored += rhs.n_scored; + } +} + /// Full pipeline report: per-phase timings and result-quality metrics. /// All timing fields are in milliseconds. #[derive(Debug, Default, Serialize)] @@ -68,11 +105,12 @@ pub struct PipelineReport { // Phase timings (all in ms) pub phase1_prescore_ms: u64, + pub phase1_detail: PrescoreTimings, pub phase2_calibration_ms: u64, - pub phase3_extraction_ms: u64, - pub phase3_scoring_ms: u64, - pub phase3_spectral_query_ms: u64, - pub phase3_assembly_ms: u64, + pub phase3_extraction_thread_ms: u64, + pub phase3_scoring_thread_ms: u64, + pub phase3_spectral_query_thread_ms: u64, + pub phase3_assembly_thread_ms: u64, pub phase4_competition_ms: u64, pub phase5_rescore_ms: u64, pub phase6_output_ms: u64, diff --git a/rust/timsseek_cli/Cargo.toml b/rust/timsseek_cli/Cargo.toml index 7e6dc99..35b07e2 100644 --- a/rust/timsseek_cli/Cargo.toml +++ b/rust/timsseek_cli/Cargo.toml @@ -5,7 +5,7 @@ edition.workspace = true license.workspace = true [dependencies] -timsseek = { path = "../timsseek" } +timsseek = { path = "../timsseek", default-features = false } timsquery = { path = "../timsquery" } regex = "1.11.1" @@ -34,4 +34,6 @@ path = "src/sample_speclib.rs" mimalloc = { workspace = true, features = ["secure"] } [features] +default = ["parallel"] +parallel = ["timsseek/rayon"] instrumentation = ["timsseek/instrumentation", "tracing-profile"] diff --git a/rust/timsseek_cli/src/processing.rs b/rust/timsseek_cli/src/processing.rs index 3555af0..61e7675 100644 --- a/rust/timsseek_cli/src/processing.rs +++ b/rust/timsseek_cli/src/processing.rs @@ -145,12 +145,16 @@ pub fn execute_pipeline( info!("Phase 1: Broad prescore (unrestricted RT)..."); } let phase1_start = Instant::now(); - let calibrants = phase1_prescore(phase1_lib, pipeline, chunk_size, &calib_config); + let (calibrants, phase1_timings) = phase1_prescore(phase1_lib, pipeline, chunk_size, &calib_config); let phase1_ms = phase1_start.elapsed().as_millis() as u64; info!( - "Phase 1 complete: {} calibrant candidates in {}ms", + "Phase 1 complete: {} calibrant candidates in {}ms (extraction: {}ms, scoring: {}ms, {} passed filter, {} scored)", calibrants.len(), - phase1_ms + phase1_ms, + phase1_timings.extraction.as_millis(), + phase1_timings.scoring.as_millis(), + phase1_timings.n_passed_filter, + phase1_timings.n_scored, ); println!( "Phase 1: Prescore ........ {:.1}s ({} calibrants)", @@ -365,11 +369,12 @@ pub fn execute_pipeline( Ok(PipelineReport { load_index_ms: 0, // set by caller after return phase1_prescore_ms: phase1_ms, + phase1_detail: phase1_timings, phase2_calibration_ms: phase2_ms, - phase3_extraction_ms: phase3_timings.extraction.as_millis() as u64, - phase3_scoring_ms: phase3_timings.scoring.as_millis() as u64, - phase3_spectral_query_ms: phase3_timings.spectral_query.as_millis() as u64, - phase3_assembly_ms: phase3_timings.assembly.as_millis() as u64, + phase3_extraction_thread_ms: phase3_timings.extraction.as_millis() as u64, + phase3_scoring_thread_ms: phase3_timings.scoring.as_millis() as u64, + phase3_spectral_query_thread_ms: phase3_timings.spectral_query.as_millis() as u64, + phase3_assembly_thread_ms: phase3_timings.assembly.as_millis() as u64, phase4_competition_ms: phase4_ms, phase5_rescore_ms: phase5_ms, phase6_output_ms: phase6_ms, @@ -390,20 +395,21 @@ fn phase1_prescore( pipeline: &Scorer, chunk_size: usize, config: &CalibrationConfig, -) -> Vec { +) -> (Vec, timsseek::scoring::PrescoreTimings) { let n_chunks = (speclib.as_slice().len() + chunk_size - 1) / chunk_size; let pb = make_progress_bar(n_chunks as u64, "Phase 1"); let mut global_heap = CalibrantHeap::new(config.n_calibrants); + let mut timings = timsseek::scoring::PrescoreTimings::default(); let mut offset = 0usize; for chunk in speclib.as_slice().chunks(chunk_size).progress_with(pb) { - let chunk_heap = pipeline.prescore_batch(chunk, offset, config); + let chunk_heap = pipeline.prescore_batch(chunk, offset, config, &mut timings); global_heap = global_heap.merge(chunk_heap); offset += chunk.len(); } - global_heap.into_vec() + (global_heap.into_vec(), timings) } #[cfg_attr( From cdfabd009ef3330b275e320105bb494e5ac41896 Mon Sep 17 00:00:00 2001 From: "J. Sebastian Paez" Date: Sat, 11 Apr 2026 23:12:25 -0700 Subject: [PATCH 64/64] refactor: add timed! macro and TimedStep for timing infrastructure Replace manual Instant::now()/elapsed()/println!() boilerplate with two primitives: timed! for hot-path Duration accumulation (pipeline.rs) and TimedStep for progressive CLI output with auto dot-padding and tracing spans (main.rs, processing.rs, cv.rs). Duration display uses {:?} for automatic unit selection everywhere. --- rust/timsseek/src/ml/cv.rs | 9 +-- rust/timsseek/src/scoring/pipeline.rs | 92 +++++++++++++-------------- rust/timsseek/src/scoring/timings.rs | 91 +++++++++++++++++++++++++- rust/timsseek_cli/src/main.rs | 22 +++---- rust/timsseek_cli/src/processing.rs | 69 ++++++-------------- 5 files changed, 167 insertions(+), 116 deletions(-) diff --git a/rust/timsseek/src/ml/cv.rs b/rust/timsseek/src/ml/cv.rs index 5dd45ca..d298a3b 100644 --- a/rust/timsseek/src/ml/cv.rs +++ b/rust/timsseek/src/ml/cv.rs @@ -1,4 +1,5 @@ pub use super::TargetDecoy; +use crate::scoring::timings::TimedStep; pub use forust_ml::constraints::{ Constraint, ConstraintMap, @@ -403,9 +404,9 @@ impl CrossValidatedScorer { self.fold_classifiers.clear(); (0..self.n_folds).for_each(|_| self.fold_classifiers.push(None)); for fold in 0..self.n_folds { - let start = std::time::Instant::now(); + let step = TimedStep::begin_stderr(format_args!(" Training fold {}/{}...", fold + 1, self.n_folds)); self.fit_fold(fold, train_buffer, val_buffer)?; - eprintln!(" Training fold {}/{} ... {:?}", fold + 1, self.n_folds, start.elapsed()); + step.finish(); } Ok(()) @@ -415,7 +416,7 @@ impl CrossValidatedScorer { let mut scores = vec![0.0; self.data.len()]; let mut buffer = DataBuffer::default(); - let score_start = std::time::Instant::now(); + let step = TimedStep::begin_stderr(" Scoring folds..."); for train_i in 0..self.n_folds { let early_stop_i = self.next_fold(train_i); @@ -440,7 +441,7 @@ impl CrossValidatedScorer { } } } - eprintln!(" Scoring folds ... {:?}", score_start.elapsed()); + step.finish(); let div_factor = (self.n_folds - 2) as f64; scores.iter_mut().for_each(|x| { diff --git a/rust/timsseek/src/scoring/pipeline.rs b/rust/timsseek/src/scoring/pipeline.rs index 62accd5..1b384d2 100644 --- a/rust/timsseek/src/scoring/pipeline.rs +++ b/rust/timsseek/src/scoring/pipeline.rs @@ -28,7 +28,7 @@ use crate::{ }; #[cfg(feature = "rayon")] use rayon::prelude::*; -use std::time::Instant; +use crate::timed; use timscentroid::rt_mapping::{ MS1CycleIndex, RTIndex, @@ -599,15 +599,14 @@ impl Scorer { buffer: &mut TraceScorer, timings: &mut ScoreTimings, ) -> Option { - let st = Instant::now(); - let (metadata, scoring_ctx) = + let (metadata, scoring_ctx) = timed!(timings.extraction, tracing::span!(tracing::Level::TRACE, "score_calibrated::extraction").in_scope( || match self.build_calibrated_extraction(item, calibration) { Ok(result) => Some(result), Err(_) => None, }, - )?; - timings.extraction += st.elapsed(); + ) + )?; if scoring_ctx .expected_intensities @@ -617,39 +616,37 @@ impl Scorer { return None; } - let st = Instant::now(); - let apex_score = + let apex_score = timed!(timings.scoring, tracing::span!(tracing::Level::TRACE, "score_calibrated::apex_scoring").in_scope( || { buffer .find_apex(&scoring_ctx, &|idx| self.map_rt_index_to_milis(idx)) .ok() }, - )?; - timings.scoring += st.elapsed(); + ) + )?; - let st = Instant::now(); - let spectral_tol = calibration.get_spectral_tolerance(); - let isotope_tol = calibration.get_isotope_tolerance(); - let (inner_collector, isotope_collector) = + let (inner_collector, isotope_collector) = timed!(timings.spectral_query, { + let spectral_tol = calibration.get_spectral_tolerance(); + let isotope_tol = calibration.get_isotope_tolerance(); tracing::span!(tracing::Level::TRACE, "score_calibrated::secondary_query") - .in_scope(|| self.execute_secondary_query(item, &apex_score, &spectral_tol, &isotope_tol)); - timings.spectral_query += st.elapsed(); + .in_scope(|| self.execute_secondary_query(item, &apex_score, &spectral_tol, &isotope_tol)) + }); let nqueries = scoring_ctx.chromatograms.fragments.num_ions() as u8; - let st = Instant::now(); - let out = tracing::span!(tracing::Level::TRACE, "score_calibrated::finalize").in_scope( - || { - self.finalize_results( - &metadata, - nqueries, - &apex_score, - &inner_collector, - &isotope_collector, - ) - }, + let out = timed!(timings.assembly, + tracing::span!(tracing::Level::TRACE, "score_calibrated::finalize").in_scope( + || { + self.finalize_results( + &metadata, + nqueries, + &apex_score, + &inner_collector, + &isotope_collector, + ) + }, + ) ); - timings.assembly += st.elapsed(); match out { Ok(res) => Some(res), @@ -722,21 +719,20 @@ impl Scorer { buffer: &mut TraceScorer, timings: &mut super::timings::PrescoreTimings, ) -> Option { - let st = Instant::now(); - let scoring_ctx = tracing::span!(tracing::Level::TRACE, "prescore::extraction") - .in_scope(|| { - super::extraction::build_extraction( - &item.query, - item.expected_intensity.clone(), - &self.index, - &self.broad_tolerance, - Some(TOP_N_FRAGMENTS), - ) - .ok() - }); - timings.extraction += st.elapsed(); + let scoring_ctx = timed!(timings.extraction, + tracing::span!(tracing::Level::TRACE, "prescore::extraction") + .in_scope(|| { + super::extraction::build_extraction( + &item.query, + item.expected_intensity.clone(), + &self.index, + &self.broad_tolerance, + Some(TOP_N_FRAGMENTS), + ) + .ok() + }) + )?; - let scoring_ctx = scoring_ctx?; if scoring_ctx .expected_intensities .fragment_intensities @@ -745,13 +741,13 @@ impl Scorer { return None; } - let st = Instant::now(); - let result = tracing::span!(tracing::Level::TRACE, "prescore::scoring").in_scope(|| { - buffer - .find_apex_location(&scoring_ctx, &|idx| self.map_rt_index_to_milis(idx)) - .ok() - }); - timings.scoring += st.elapsed(); + let result = timed!(timings.scoring, + tracing::span!(tracing::Level::TRACE, "prescore::scoring").in_scope(|| { + buffer + .find_apex_location(&scoring_ctx, &|idx| self.map_rt_index_to_milis(idx)) + .ok() + }) + ); if result.is_some() { timings.n_scored += 1; diff --git a/rust/timsseek/src/scoring/timings.rs b/rust/timsseek/src/scoring/timings.rs index 364908d..26359df 100644 --- a/rust/timsseek/src/scoring/timings.rs +++ b/rust/timsseek/src/scoring/timings.rs @@ -3,9 +3,98 @@ //! This module provides timing measurement for each stage of the scoring process. //! The timings are aggregated across parallel scoring operations to provide overall //! performance metrics. +//! +//! Two helpers reduce timing boilerplate: +//! +//! - [`timed!`] — times a block and accumulates elapsed into a `Duration` field. +//! - [`TimedStep`] — progressive CLI output: prints a label immediately, then +//! appends elapsed time when the work finishes. use serde::Serialize; -use std::time::Duration; +use std::fmt; +use std::time::{Duration, Instant}; + +/// Time a block, accumulate elapsed into `$target`, return the block's value. +/// +/// ```ignore +/// let ctx = timed!(timings.extraction, { +/// build_extraction(&query, &index, &tolerance) +/// })?; +/// ``` +#[doc(hidden)] +#[macro_export] +macro_rules! timed { + ($target:expr, $body:expr) => {{ + let __start = std::time::Instant::now(); + let __result = $body; + $target += __start.elapsed(); + __result + }}; +} + +/// A timed step that prints a dot-padded label immediately, opens a tracing +/// span, and appends elapsed time on finish. +/// +/// ```ignore +/// let step = TimedStep::begin("Loading speclib"); +/// let speclib = load_speclib()?; +/// let elapsed = step.finish_with(format_args!("{} entries", speclib.len())); +/// // terminal: "Loading speclib .......... 834.567ms (225178 entries)" +/// ``` +pub struct TimedStep { + start: Instant, + stderr: bool, + _span: tracing::span::EnteredSpan, +} + +/// Column width for dot-padded labels on stdout. +const LABEL_WIDTH: usize = 26; + +impl TimedStep { + /// Dot-pad `label` to stdout, open a tracing span, flush, start clock. + pub fn begin(label: impl fmt::Display) -> Self { + let label = label.to_string(); + let span = tracing::info_span!("step", label = label.as_str()); + let dots = LABEL_WIDTH.saturating_sub(label.len() + 1); + if dots > 0 { + print!("{label} {:. Self { + let label = label.to_string(); + let span = tracing::info_span!("step", label = label.as_str()); + eprint!("{label}"); + Self { start: Instant::now(), stderr: true, _span: span.entered() } + } + + /// Print ` {elapsed:?}\n`, return Duration. + pub fn finish(self) -> Duration { + let d = self.start.elapsed(); + self.emit(format_args!(" {:?}", d)); + d + } + + /// Print ` {elapsed:?} ({detail})\n`, return Duration. + pub fn finish_with(self, detail: impl fmt::Display) -> Duration { + let d = self.start.elapsed(); + self.emit(format_args!(" {:?} ({})", d, detail)); + d + } + + fn emit(&self, msg: fmt::Arguments<'_>) { + if self.stderr { + eprintln!("{msg}"); + } else { + println!("{msg}"); + } + } +} /// Accumulated timing measurements for the four scoring stages. /// diff --git a/rust/timsseek_cli/src/main.rs b/rust/timsseek_cli/src/main.rs index 1e7b19f..c1ba424 100644 --- a/rust/timsseek_cli/src/main.rs +++ b/rust/timsseek_cli/src/main.rs @@ -8,6 +8,7 @@ use timsquery::TimsTofPath; use timsquery::serde::load_index_auto; use timsquery::utils::TupleRange; use timsseek::scoring::Scorer; +use timsseek::scoring::timings::TimedStep; use tracing::{ error, info, @@ -252,7 +253,7 @@ fn process_single_file( path: Some(dotd_file.to_string_lossy().to_string()), })?; - let index_start = std::time::Instant::now(); + let step = TimedStep::begin("Loading index"); let index = load_index_auto( dotd_file.to_str().ok_or_else(|| errors::CliError::Io { source: "Invalid path encoding".to_string(), @@ -261,8 +262,7 @@ fn process_single_file( None, )? .into_eager()?; - let load_index_ms = index_start.elapsed().as_millis() as u64; - println!("Loading index ........... {:.1}s", load_index_ms as f64 / 1000.0); + let load_index_ms = step.finish().as_millis() as u64; let fragmented_range = get_frag_range(&timstofpath)?; @@ -505,31 +505,29 @@ fn main() -> std::result::Result<(), errors::CliError> { let mut successful_files: Vec = Vec::new(); // Load speclib once (shared across all files) - let speclib_start = std::time::Instant::now(); + let step = TimedStep::begin("Loading speclib"); info!("Building database from speclib file {:?}", validated.speclib_path); info!("Decoy generation strategy: {}", config.analysis.decoy_strategy); let speclib = timsseek::data_sources::speclib::Speclib::from_file( &validated.speclib_path, config.analysis.decoy_strategy, ).map_err(|e| errors::CliError::Config { source: format!("Failed to load speclib: {:?}", e) })?; - let load_speclib_ms = speclib_start.elapsed().as_millis() as u64; - println!("Loading speclib ......... {:.1}s ({} entries)", load_speclib_ms as f64 / 1000.0, speclib.len()); + let load_speclib_ms = step.finish_with(format_args!("{} entries", speclib.len())).as_millis() as u64; // Load calibration library once (if provided) - let calib_start = std::time::Instant::now(); - let calib_lib = match &validated.calib_lib_path { + let (calib_lib, load_calib_lib_ms) = match &validated.calib_lib_path { Some(p) => { + let step = TimedStep::begin("Loading calib lib"); info!("Loading calibration library from {:?}", p); let lib = timsseek::data_sources::speclib::Speclib::from_file( p, config.analysis.decoy_strategy, ).map_err(|e| errors::CliError::Config { source: format!("Failed to load calib lib: {:?}", e) })?; - println!("Loading calib lib ....... {:.1}s ({} entries)", calib_start.elapsed().as_secs_f64(), lib.len()); - Some(lib) + let ms = step.finish_with(format_args!("{} entries", lib.len())).as_millis() as u64; + (Some(lib), ms) } - None => None, + None => (None, 0), }; - let load_calib_lib_ms = calib_start.elapsed().as_millis() as u64; run_report.load_speclib_ms = load_speclib_ms; run_report.load_calib_lib_ms = load_calib_lib_ms; diff --git a/rust/timsseek_cli/src/processing.rs b/rust/timsseek_cli/src/processing.rs index 61e7675..4b71d1a 100644 --- a/rust/timsseek_cli/src/processing.rs +++ b/rust/timsseek_cli/src/processing.rs @@ -5,7 +5,7 @@ use indicatif::{ ProgressStyle, }; use std::io::IsTerminal; -use std::time::Instant; +use timsseek::scoring::timings::TimedStep; use timsquery::IndexedTimstofPeaks; use timsquery::MzMobilityStatsCollector; use timsquery::SpectralCollector; @@ -144,23 +144,16 @@ pub fn execute_pipeline( } else { info!("Phase 1: Broad prescore (unrestricted RT)..."); } - let phase1_start = Instant::now(); + let step = TimedStep::begin("Phase 1: Prescore"); let (calibrants, phase1_timings) = phase1_prescore(phase1_lib, pipeline, chunk_size, &calib_config); - let phase1_ms = phase1_start.elapsed().as_millis() as u64; + let phase1_ms = step.finish_with(format_args!("{} calibrants", calibrants.len())).as_millis() as u64; info!( - "Phase 1 complete: {} calibrant candidates in {}ms (extraction: {}ms, scoring: {}ms, {} passed filter, {} scored)", - calibrants.len(), - phase1_ms, - phase1_timings.extraction.as_millis(), - phase1_timings.scoring.as_millis(), + "Phase 1 detail: extraction {:?}, scoring {:?}, {} passed filter, {} scored", + phase1_timings.extraction, + phase1_timings.scoring, phase1_timings.n_passed_filter, phase1_timings.n_scored, ); - println!( - "Phase 1: Prescore ........ {:.1}s ({} calibrants)", - phase1_ms as f64 / 1000.0, - calibrants.len() - ); // === PHASE 2: Calibration (fit RT + measure errors + derive tolerances) === // Build lookup from main speclib when using a separate calib lib. @@ -200,7 +193,7 @@ pub fn execute_pipeline( .collect(); info!("Phase 2: Calibration..."); - let phase2_start = Instant::now(); + let step = TimedStep::begin("Phase 2: Calibrate"); let calibration = match calibrate_from_phase1( calibrants, phase1_lib, @@ -217,14 +210,11 @@ pub fn execute_pipeline( CalibrationResult::fallback(pipeline) } }; - let phase2_ms = phase2_start.elapsed().as_millis() as u64; - println!( - "Phase 2: Calibrate ....... {:.1}s ({} calibrants → {} path nodes)", - phase2_ms as f64 / 1000.0, + let phase2_ms = step.finish_with(format_args!( + "{} calibrants → {} path nodes", calibrant_points.len(), - // Path nodes info from the calibration log (already printed by calibrt) calibration.ridge_width_summary().map_or(0, |s| s.n_columns), - ); + )).as_millis() as u64; // Print tolerance summary if let Some(summary) = calibration.ridge_width_summary() { println!( @@ -265,7 +255,7 @@ pub fn execute_pipeline( // === PHASE 3: Narrow scoring with calibrated tolerances === info!("Phase 3: Scoring with calibrated extraction..."); - let phase3_start = Instant::now(); + let step = TimedStep::begin("Phase 3: Score"); let mut phase3_timings = ScoreTimings::default(); let results = phase3_score( &speclib, @@ -274,43 +264,24 @@ pub fn execute_pipeline( chunk_size, &mut phase3_timings, ); - let phase3_ms = phase3_start.elapsed().as_millis() as u64; - info!( - "Phase 3 complete: {} scored peptides in {:?}", - results.len(), - phase3_start.elapsed() - ); - println!( - "Phase 3: Score ........... {:.1}s ({} peptides)", - phase3_ms as f64 / 1000.0, - results.len() - ); + step.finish_with(format_args!("{} peptides", results.len())); let total_scored = results.len(); // === PHASE 4: Target-decoy competition === - let phase4_start = Instant::now(); + let step = TimedStep::begin("Phase 4: Compete"); let mut competed = target_decoy_compete(results); competed.sort_unstable_by(|x, y| { y.scoring.main_score.partial_cmp(&x.scoring.main_score) .expect("NaN main_score should have been filtered during Phase 3 scoring") }); - let phase4_ms = phase4_start.elapsed().as_millis() as u64; let total_after_competition = competed.len(); - println!( - "Phase 4: Compete ......... {:.1}s ({} candidates)", - phase4_ms as f64 / 1000.0, - total_after_competition - ); + let phase4_ms = step.finish_with(format_args!("{} candidates", total_after_competition)).as_millis() as u64; // === PHASE 5: Rescore === - let phase5_start = Instant::now(); + let step = TimedStep::begin("Phase 5: Rescore"); let data = rescore(competed); - let phase5_ms = phase5_start.elapsed().as_millis() as u64; - println!( - "Phase 5: Rescore ......... {:.1}s", - phase5_ms as f64 / 1000.0 - ); + let phase5_ms = step.finish().as_millis() as u64; // Collect q-value threshold counts — full report to log, key result to stdout let qval_report = report_qvalues_at_thresholds(&data, &[0.01, 0.05, 0.1, 0.5, 1.0]); @@ -332,7 +303,7 @@ pub fn execute_pipeline( } // === PHASE 6: Write Parquet output === - let phase6_start = Instant::now(); + let step = TimedStep::begin("Phase 6: Write output"); let out_path_pq = out_path.directory.join("results.parquet"); let mut pq_writer = timsseek::scoring::parquet_writer::ResultParquetWriter::new( &out_path_pq, @@ -354,12 +325,8 @@ pub fn execute_pipeline( path: out_path_pq.clone().into(), source: e, })?; - let phase6_ms = phase6_start.elapsed().as_millis() as u64; + let phase6_ms = step.finish().as_millis() as u64; info!("Wrote final results to {:?}", out_path_pq); - println!( - "Phase 6: Write output .... {:.1}s", - phase6_ms as f64 / 1000.0 - ); // Key result to stdout println!();