Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
60 commits
Select commit Hold shift + click to select a range
53cb1d5
Final cleaning
yger Dec 4, 2025
1781060
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Dec 4, 2025
45341a3
WIP
yger Dec 4, 2025
5bacdae
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Dec 4, 2025
3c1157e
WIP
yger Dec 5, 2025
bb0df41
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Dec 5, 2025
1b90a46
Cosmetic
yger Dec 5, 2025
cffc2d2
cosmetic
yger Dec 5, 2025
268c7a3
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Dec 5, 2025
9730050
WIP
yger Dec 5, 2025
99eb15c
Merge branch 'final_cleaning' of github.com:yger/spikeinterface into …
yger Dec 5, 2025
73f9371
WIP
yger Dec 5, 2025
d2e3ba5
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Dec 5, 2025
fc50c1b
Fixes
yger Dec 5, 2025
1188d11
Merge branch 'main' of https://github.com/SpikeInterface/spikeinterfa…
yger Dec 5, 2025
bb84390
Merge branch 'main' of github.com:spikeinterface/spikeinterface into …
yger Dec 8, 2025
984cb7f
clean template before merging
samuelgarcia Dec 8, 2025
2725d5b
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Dec 8, 2025
748e376
Merge branch 'final_cleaning' of github.com:yger/spikeinterface into …
yger Dec 8, 2025
856ceff
Merge branch 'SpikeInterface:main' into final_cleaning
yger Dec 8, 2025
a2a5e01
Merge branch 'final_cleaning' of github.com:yger/spikeinterface into …
yger Dec 8, 2025
be13d1b
Merge branch 'final_cleaning' of github.com:yger/spikeinterface into …
yger Dec 8, 2025
b48f9ec
WIP
yger Dec 8, 2025
b22cbaa
Merge branch 'final_cleaning' of github.com:yger/spikeinterface into …
yger Dec 9, 2025
b7006eb
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Dec 9, 2025
9001054
Cleaning with max std per channel
yger Dec 9, 2025
2c86b83
Cleaning with max std per channel
yger Dec 9, 2025
7201a89
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Dec 9, 2025
2f5dd0d
WIP
yger Dec 9, 2025
39450c7
Merge branch 'final_cleaning' of github.com:yger/spikeinterface into …
yger Dec 9, 2025
c8b49d9
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Dec 9, 2025
131366b
Cleaning
yger Dec 9, 2025
3f691d9
Cleaning
yger Dec 9, 2025
7619e92
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Dec 9, 2025
d790ad1
more pre clean clustering
samuelgarcia Dec 9, 2025
0736f55
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Dec 9, 2025
f9bca31
Broken SC2 left by Sam
yger Dec 9, 2025
ca5a077
Verbose
yger Dec 9, 2025
0b80955
trying to get old behavior
yger Dec 9, 2025
99410ff
Merge branch 'final_cleaning' of github.com:yger/spikeinterface into …
yger Dec 9, 2025
d16aa2c
Merge branch 'SpikeInterface:main' into final_cleaning
yger Dec 10, 2025
a869f89
more clean after clustering
samuelgarcia Dec 10, 2025
fb20f7f
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Dec 10, 2025
0b84b48
Merge branch 'final_cleaning' of github.com:yger/spikeinterface into …
samuelgarcia Dec 10, 2025
8b6f3d9
oups
samuelgarcia Dec 10, 2025
ed9bf21
Flatten tridesclous2 params
samuelgarcia Dec 10, 2025
1a61613
Merge branch 'final_cleaning' of github.com:yger/spikeinterface into …
samuelgarcia Dec 10, 2025
31b3655
Merge branch 'final_cleaning' of github.com:yger/spikeinterface into …
samuelgarcia Dec 10, 2025
a4224c3
Better variable naming
samuelgarcia Dec 11, 2025
31572ec
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Dec 11, 2025
c218faa
oups
samuelgarcia Dec 11, 2025
c2af1b2
update params for clean firing
samuelgarcia Dec 11, 2025
acf9c3c
Merge branch 'main' of github.com:SpikeInterface/spikeinterface into …
samuelgarcia Dec 12, 2025
fefce5e
oups
samuelgarcia Dec 12, 2025
309bdeb
more clean
samuelgarcia Dec 12, 2025
db5db24
Merge branch 'main' of github.com:SpikeInterface/spikeinterface into …
samuelgarcia Dec 12, 2025
76d5b15
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Dec 12, 2025
80dfb04
update lupin and sc2 version
samuelgarcia Dec 12, 2025
7255464
Merge branch 'flatten_tdc_params' of github.com:samuelgarcia/spikeint…
samuelgarcia Dec 12, 2025
d5520a6
yep
samuelgarcia Dec 12, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/spikeinterface/sorters/internal/lupin.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ class LupinSorter(ComponentsBasedSorter):

@classmethod
def get_sorter_version(cls):
return "2025.11"
return "2025.12"

@classmethod
def _run_from_folder(cls, sorter_output_folder, params, verbose):
Expand Down
2 changes: 1 addition & 1 deletion src/spikeinterface/sorters/internal/spyking_circus2.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ class Spykingcircus2Sorter(ComponentsBasedSorter):

@classmethod
def get_sorter_version(cls):
return "2025.10"
return "2025.12"

@classmethod
def _run_from_folder(cls, sorter_output_folder, params, verbose):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ def test_with_numpy_gather(self):
output_folder = self.cache_folder / sorter_name
sorter_params = self.SorterClass.default_params()

sorter_params["matching"]["gather_mode"] = "npy"
sorter_params["gather_mode"] = "npy"

sorting = run_sorter(
sorter_name,
Expand Down
179 changes: 99 additions & 80 deletions src/spikeinterface/sorters/internal/tridesclous2.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,61 +32,67 @@ class Tridesclous2Sorter(ComponentsBasedSorter):
_default_params = {
"apply_preprocessing": True,
"apply_motion_correction": False,
"motion_correction": {"preset": "dredge_fast"},
"motion_correction_preset": "dredge_fast",
"clustering_ms_before": 0.5,
"clustering_ms_after": 1.5,
"detection_radius_um": 150.0,
"features_radius_um": 75.0,
"template_radius_um": 100.0,
"freq_min": 150.0,
"freq_max": 6000.0,
"cache_preprocessing_mode": "auto",
"waveforms": {
"ms_before": 0.5,
"ms_after": 1.5,
"radius_um": 120.0,
},
"filtering": {
"freq_min": 150.0,
"freq_max": 6000.0,
"ftype": "bessel",
"filter_order": 2,
},
"detection": {"peak_sign": "neg", "detect_threshold": 5, "exclude_sweep_ms": 1.5, "radius_um": 150.0},
"selection": {"n_peaks_per_channel": 5000, "min_n_peaks": 20000},
"svd": {"n_components": 5},
"clustering": {
"recursive_depth": 3,
},
"peak_sign": "neg",
"detect_threshold": 5.0,
"n_peaks_per_channel": 5000,
"n_svd_components_per_channel": 5,
"n_pca_features": 6,
"clustering_recursive_depth": 3,
"ms_before": 2.0,
"ms_after": 3.0,
"template_sparsify_threshold": 1.5,
"template_min_snr_ptp": 3.5,
"template_max_jitter_ms": 0.2,
"min_firing_rate": 0.1,
"templates": {
"ms_before": 2.0,
"ms_after": 3.0,
"max_spikes_per_unit": 400,
"sparsity_threshold": 1.5,
"min_snr": 3.5,
"radius_um": 100.0,
"max_jitter_ms": 0.2,
},
"matching": {"method": "tdc-peeler", "method_kwargs": {}, "gather_mode": "memory"},
"gather_mode": "memory",
"job_kwargs": {},
"seed": None,
"save_array": True,
"debug": False,
}

_params_description = {
"apply_preprocessing": "Apply internal preprocessing or not",
"cache_preprocessing": "A dict contaning how to cache the preprocessed recording. mode='memory' | 'folder | 'zarr' ",
"waveforms": "A dictonary containing waveforms params: ms_before, ms_after, radius_um",
"filtering": "A dictonary containing filtering params: freq_min, freq_max",
"detection": "A dictonary containing detection params: peak_sign, detect_threshold, exclude_sweep_ms, radius_um",
"selection": "A dictonary containing selection params: n_peaks_per_channel, min_n_peaks",
"svd": "A dictonary containing svd params: n_components",
"clustering": "A dictonary containing clustering params: split_radius_um, merge_radius_um",
"templates": "A dictonary containing waveforms params for peeler: ms_before, ms_after",
"matching": "A dictonary containing matching params for matching: peak_shift_ms, radius_um",
"job_kwargs": "A dictionary containing job kwargs",
"save_array": "Save or not intermediate arrays",
"apply_motion_correction": "Apply motion correction or not",
"motion_correction_preset": "Motion correction preset",
"clustering_ms_before": "Milliseconds before the spike peak for clustering",
"clustering_ms_after": "Milliseconds after the spike peak for clustering",
"radius_um": "Radius for sparsity",
"freq_min": "Low frequency",
"freq_max": "High frequency",
"peak_sign": "Sign of peaks neg/pos/both",
"detect_threshold": "Treshold for peak detection",
"n_peaks_per_channel": "Number of spike per channel for clustering",
"n_svd_components_per_channel": "Number of SVD components per channel for clustering",
"n_pca_features": "Secondary PCA features reducation before local isosplit",
"clustering_recursive_depth": "Clustering recussivity",
"ms_before": "Milliseconds before the spike peak for template matching",
"ms_after": "Milliseconds after the spike peak for template matching",
"template_sparsify_threshold": "Threshold to sparsify templates before template matching",
"template_min_snr_ptp": "Threshold to remove templates before template matching",
"template_max_jitter_ms": "Threshold on jitters to remove templates before template matching",
"min_firing_rate": "To remove small cluster in size before template matching",
"gather_mode": "How to accumalte spike in matching : memory/npy",
"job_kwargs": "The famous and fabulous job_kwargs",
"seed": "Seed for random number",
"save_array": "Save or not intermediate arrays in the folder",
"debug": "Save debug files",
}

handle_multi_segment = True

@classmethod
def get_sorter_version(cls):
return "2025.11"
return "2025.12"

@classmethod
def _run_from_folder(cls, sorter_output_folder, params, verbose):
Expand All @@ -103,6 +109,8 @@ def _run_from_folder(cls, sorter_output_folder, params, verbose):
job_kwargs = fix_job_kwargs(job_kwargs)
job_kwargs["progress_bar"] = verbose

seed = params["seed"]

recording_raw = cls.load_recording_from_folder(sorter_output_folder.parent, with_warnings=False)

num_chans = recording_raw.get_num_channels()
Expand All @@ -126,12 +134,22 @@ def _run_from_folder(cls, sorter_output_folder, params, verbose):
rec_for_motion,
folder=sorter_output_folder / "motion",
output_motion_info=True,
**params["motion_correction"],
preset=params["motion_correction_preset"],
# **params["motion_correction"],
)
if verbose:
print("Done correct_motion()")

recording = bandpass_filter(recording_raw, **params["filtering"], margin_ms=20.0, dtype="float32")
# recording = bandpass_filter(recording_raw, **params["filtering"], margin_ms=20.0, dtype="float32")
recording = bandpass_filter(
recording_raw,
freq_min=params["freq_min"],
freq_max=params["freq_max"],
ftype="bessel",
filter_order=2,
margin_ms=20.0,
dtype="float32",
)

if apply_cmr:
recording = common_reference(recording)
Expand Down Expand Up @@ -170,8 +188,13 @@ def _run_from_folder(cls, sorter_output_folder, params, verbose):
cache_info = None

# detection
detection_params = params["detection"].copy()
detection_params["noise_levels"] = noise_levels
detection_params = dict(
peak_sign=params["peak_sign"],
detect_threshold=params["detect_threshold"],
exclude_sweep_ms=1.5,
radius_um=params["detection_radius_um"],
)

all_peaks = detect_peaks(
recording, method="locally_exclusive", method_kwargs=detection_params, job_kwargs=job_kwargs
)
Expand All @@ -180,32 +203,37 @@ def _run_from_folder(cls, sorter_output_folder, params, verbose):
print(f"detect_peaks(): {len(all_peaks)} peaks found")

# selection
selection_params = params["selection"].copy()
n_peaks = params["selection"]["n_peaks_per_channel"] * num_chans
n_peaks = max(selection_params["min_n_peaks"], n_peaks)
n_peaks = max(params["n_peaks_per_channel"] * num_chans, 20_000)
peaks = select_peaks(all_peaks, method="uniform", n_peaks=n_peaks)

if verbose:
print(f"select_peaks(): {len(peaks)} peaks kept for clustering")

# routing clustering params into the big IterativeISOSPLITClustering params tree
clustering_kwargs = deepcopy(clustering_methods["iterative-isosplit"]._default_params)
clustering_kwargs["peaks_svd"].update(params["waveforms"])
clustering_kwargs["peaks_svd"].update(params["svd"])
clustering_kwargs["split"].update(params["clustering"])
if params["debug"]:
clustering_kwargs["debug_folder"] = sorter_output_folder
clustering_kwargs["noise_levels"] = noise_levels
clustering_kwargs["clean_low_firing"]["min_firing_rate"] = params["min_firing_rate"]
clustering_kwargs["clean_low_firing"]["subsampling_factor"] = all_peaks.size / peaks.size

# if clustering_kwargs["clustering"]["clusterer"] == "isosplit6":
# have_sisosplit6 = importlib.util.find_spec("isosplit6") is not None
# if not have_sisosplit6:
# raise ValueError(
# "You want to run tridesclous2 with the isosplit6 (the C++) implementation, but this is not installed, please `pip install isosplit6`"
# )

# Clustering
clustering_kwargs = deepcopy(clustering_methods["iterative-isosplit"]._default_params)
clustering_kwargs["peaks_svd"]["ms_before"] = params["clustering_ms_before"]
clustering_kwargs["peaks_svd"]["ms_after"] = params["clustering_ms_after"]
clustering_kwargs["peaks_svd"]["radius_um"] = params["features_radius_um"]
clustering_kwargs["peaks_svd"]["n_components"] = params["n_svd_components_per_channel"]
clustering_kwargs["split"]["recursive_depth"] = params["clustering_recursive_depth"]
clustering_kwargs["split"]["method_kwargs"]["n_pca_features"] = params["n_pca_features"]
clustering_kwargs["clean_templates"]["sparsify_threshold"] = params["template_sparsify_threshold"]
clustering_kwargs["clean_templates"]["min_snr"] = params["template_min_snr_ptp"]
clustering_kwargs["clean_templates"]["max_jitter_ms"] = params["template_max_jitter_ms"]
clustering_kwargs["noise_levels"] = noise_levels
clustering_kwargs["clean_low_firing"]["min_firing_rate"] = params["min_firing_rate"]
clustering_kwargs["clean_low_firing"]["subsampling_factor"] = all_peaks.size / peaks.size

if params["debug"]:
clustering_kwargs["debug_folder"] = sorter_output_folder

unit_ids, clustering_label, more_outs = find_clusters_from_peaks(
recording,
peaks,
Expand Down Expand Up @@ -233,16 +261,14 @@ def _run_from_folder(cls, sorter_output_folder, params, verbose):
# preestimate the sparsity unsing peaks channel
spike_vector = sorting_pre_peeler.to_spike_vector(concatenated=True)
sparsity, unit_locations = compute_sparsity_from_peaks_and_label(
kept_peaks,
spike_vector["unit_index"],
sorting_pre_peeler.unit_ids,
recording,
params["templates"]["radius_um"],
kept_peaks, spike_vector["unit_index"], sorting_pre_peeler.unit_ids, recording, params["template_radius_um"]
)

# we recompute the template even if the clustering give it already because we use different ms_before/ms_after
nbefore = int(params["templates"]["ms_before"] * sampling_frequency / 1000.0)
nafter = int(params["templates"]["ms_after"] * sampling_frequency / 1000.0)
ms_before = params["ms_before"]
ms_after = params["ms_after"]
nbefore = int(ms_before * sampling_frequency / 1000.0)
nafter = int(ms_after * sampling_frequency / 1000.0)

templates_array = estimate_templates_with_accumulator(
recording_for_peeler,
Expand All @@ -266,31 +292,27 @@ def _run_from_folder(cls, sorter_output_folder, params, verbose):
is_in_uV=False,
)

# this clean and spasify more
# this clean and sparsify more
templates = clean_templates(
templates,
sparsify_threshold=params["templates"]["sparsity_threshold"],
sparsify_threshold=params["template_sparsify_threshold"],
noise_levels=noise_levels,
min_snr=params["templates"]["min_snr"],
max_jitter_ms=params["templates"]["max_jitter_ms"],
min_snr=params["template_min_snr_ptp"],
max_jitter_ms=params["template_max_jitter_ms"],
remove_empty=True,
)

## peeler
matching_method = params["matching"].pop("method")
gather_mode = params["matching"].pop("gather_mode", "memory")
matching_params = params["matching"].get("matching_kwargs", {}).copy()
if matching_method in ("tdc-peeler",):
matching_params["noise_levels"] = noise_levels

gather_mode = params["gather_mode"]
pipeline_kwargs = dict(gather_mode=gather_mode)
if gather_mode == "npy":
pipeline_kwargs["folder"] = sorter_output_folder / "matching"
method_kwargs = dict(noise_levels=noise_levels)
spikes = find_spikes_from_templates(
recording_for_peeler,
templates,
method=matching_method,
method_kwargs=matching_params,
method="tdc-peeler",
method_kwargs=method_kwargs,
pipeline_kwargs=pipeline_kwargs,
job_kwargs=job_kwargs,
)
Expand All @@ -306,9 +328,6 @@ def _run_from_folder(cls, sorter_output_folder, params, verbose):
if auto_merge:
from spikeinterface.sorters.internal.spyking_circus2 import final_cleaning_circus

# max_distance_um = merging_params.get("max_distance_um", 50)
# merging_params["max_distance_um"] = max(max_distance_um, 2 * max_motion)

analyzer_final = final_cleaning_circus(
recording_for_peeler,
sorting,
Expand Down