Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
48 commits
Select commit Hold shift + click to select a range
53cb1d5
Final cleaning
yger Dec 4, 2025
1781060
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Dec 4, 2025
45341a3
WIP
yger Dec 4, 2025
5bacdae
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Dec 4, 2025
3c1157e
WIP
yger Dec 5, 2025
bb0df41
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Dec 5, 2025
1b90a46
Cosmetic
yger Dec 5, 2025
cffc2d2
cosmetic
yger Dec 5, 2025
268c7a3
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Dec 5, 2025
9730050
WIP
yger Dec 5, 2025
99eb15c
Merge branch 'final_cleaning' of github.com:yger/spikeinterface into …
yger Dec 5, 2025
73f9371
WIP
yger Dec 5, 2025
d2e3ba5
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Dec 5, 2025
fc50c1b
Fixes
yger Dec 5, 2025
1188d11
Merge branch 'main' of https://github.com/SpikeInterface/spikeinterfa…
yger Dec 5, 2025
bb84390
Merge branch 'main' of github.com:spikeinterface/spikeinterface into …
yger Dec 8, 2025
984cb7f
clean template before merging
samuelgarcia Dec 8, 2025
2725d5b
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Dec 8, 2025
748e376
Merge branch 'final_cleaning' of github.com:yger/spikeinterface into …
yger Dec 8, 2025
856ceff
Merge branch 'SpikeInterface:main' into final_cleaning
yger Dec 8, 2025
a2a5e01
Merge branch 'final_cleaning' of github.com:yger/spikeinterface into …
yger Dec 8, 2025
be13d1b
Merge branch 'final_cleaning' of github.com:yger/spikeinterface into …
yger Dec 8, 2025
b48f9ec
WIP
yger Dec 8, 2025
b22cbaa
Merge branch 'final_cleaning' of github.com:yger/spikeinterface into …
yger Dec 9, 2025
b7006eb
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Dec 9, 2025
9001054
Cleaning with max std per channel
yger Dec 9, 2025
2c86b83
Cleaning with max std per channel
yger Dec 9, 2025
7201a89
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Dec 9, 2025
2f5dd0d
WIP
yger Dec 9, 2025
39450c7
Merge branch 'final_cleaning' of github.com:yger/spikeinterface into …
yger Dec 9, 2025
c8b49d9
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Dec 9, 2025
131366b
Cleaning
yger Dec 9, 2025
3f691d9
Cleaning
yger Dec 9, 2025
7619e92
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Dec 9, 2025
d790ad1
more pre clean clustering
samuelgarcia Dec 9, 2025
0736f55
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Dec 9, 2025
f9bca31
Broken SC2 left by Sam
yger Dec 9, 2025
ca5a077
Verbose
yger Dec 9, 2025
0b80955
trying to get old behavior
yger Dec 9, 2025
99410ff
Merge branch 'final_cleaning' of github.com:yger/spikeinterface into …
yger Dec 9, 2025
d16aa2c
Merge branch 'SpikeInterface:main' into final_cleaning
yger Dec 10, 2025
a869f89
more clean after clustering
samuelgarcia Dec 10, 2025
fb20f7f
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Dec 10, 2025
8b6f3d9
oups
samuelgarcia Dec 10, 2025
1a61613
Merge branch 'final_cleaning' of github.com:yger/spikeinterface into …
samuelgarcia Dec 10, 2025
a4224c3
Better variable naming
samuelgarcia Dec 11, 2025
31572ec
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Dec 11, 2025
5b3a27c
Merge branch 'main' into final_cleaning
samuelgarcia Dec 12, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 7 additions & 2 deletions src/spikeinterface/benchmark/benchmark_clustering.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,14 @@ def __init__(self, recording, gt_sorting, params, indices, peaks, exhaustive_gt=
self.method_kwargs = params["method_kwargs"]
self.result = {}

def run(self, **job_kwargs):
def run(self, verbose=True, **job_kwargs):
labels, peak_labels = find_clusters_from_peaks(
self.recording, self.peaks, method=self.method, method_kwargs=self.method_kwargs, job_kwargs=job_kwargs
self.recording,
self.peaks,
method=self.method,
method_kwargs=self.method_kwargs,
verbose=verbose,
job_kwargs=job_kwargs,
)
self.result["peak_labels"] = peak_labels

Expand Down
9 changes: 7 additions & 2 deletions src/spikeinterface/benchmark/benchmark_matching.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,14 @@ def __init__(self, recording, gt_sorting, params):
self.method_kwargs = params["method_kwargs"]
self.result = {}

def run(self, **job_kwargs):
def run(self, verbose=True, **job_kwargs):
spikes = find_spikes_from_templates(
self.recording, self.templates, method=self.method, method_kwargs=self.method_kwargs, job_kwargs=job_kwargs
self.recording,
self.templates,
method=self.method,
method_kwargs=self.method_kwargs,
verbose=verbose,
job_kwargs=job_kwargs,
)
unit_ids = self.templates.unit_ids
sorting = np.zeros(spikes.size, dtype=minimum_spike_dtype)
Expand Down
24 changes: 17 additions & 7 deletions src/spikeinterface/sorters/internal/lupin.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,8 +54,10 @@ class LupinSorter(ComponentsBasedSorter):
"clustering_recursive_depth": 3,
"ms_before": 1.0,
"ms_after": 2.5,
"sparsity_threshold": 1.5,
"template_min_snr": 2.5,
"template_sparsify_threshold": 1.5,
"template_min_snr_ptp": 4.0,
"template_max_jitter_ms": 0.2,
"min_firing_rate": 0.1,
"gather_mode": "memory",
"job_kwargs": {},
"seed": None,
Expand All @@ -80,8 +82,10 @@ class LupinSorter(ComponentsBasedSorter):
"clustering_recursive_depth": "Clustering recussivity",
"ms_before": "Milliseconds before the spike peak for template matching",
"ms_after": "Milliseconds after the spike peak for template matching",
"sparsity_threshold": "Threshold to sparsify templates before template matching",
"template_min_snr": "Threshold to remove templates before template matching",
"template_sparsify_threshold": "Threshold to sparsify templates before template matching",
"template_min_snr_ptp": "Threshold to remove templates before template matching",
"template_max_jitter_ms": "Threshold on jitters to remove templates before template matching",
"min_firing_rate": "To remove small cluster in size before template matching",
"gather_mode": "How to accumalte spike in matching : memory/npy",
"job_kwargs": "The famous and fabulous job_kwargs",
"seed": "Seed for random number",
Expand Down Expand Up @@ -232,6 +236,12 @@ def _run_from_folder(cls, sorter_output_folder, params, verbose):
clustering_kwargs["peaks_svd"]["n_components"] = params["n_svd_components_per_channel"]
clustering_kwargs["split"]["recursive_depth"] = params["clustering_recursive_depth"]
clustering_kwargs["split"]["method_kwargs"]["n_pca_features"] = params["n_pca_features"]
clustering_kwargs["clean_templates"]["sparsify_threshold"] = params["template_sparsify_threshold"]
clustering_kwargs["clean_templates"]["min_snr"] = params["template_min_snr_ptp"]
clustering_kwargs["clean_templates"]["max_jitter_ms"] = params["template_max_jitter_ms"]
clustering_kwargs["noise_levels"] = noise_levels
clustering_kwargs["clean_low_firing"]["min_firing_rate"] = params["min_firing_rate"]
clustering_kwargs["clean_low_firing"]["subsampling_factor"] = all_peaks.size / peaks.size

if params["debug"]:
clustering_kwargs["debug_folder"] = sorter_output_folder
Expand Down Expand Up @@ -290,10 +300,10 @@ def _run_from_folder(cls, sorter_output_folder, params, verbose):
# this spasify more
templates = clean_templates(
templates,
sparsify_threshold=params["sparsity_threshold"],
sparsify_threshold=params["template_sparsify_threshold"],
noise_levels=noise_levels,
min_snr=params["template_min_snr"],
max_jitter_ms=None,
min_snr=params["template_min_snr_ptp"],
max_jitter_ms=params["template_max_jitter_ms"],
remove_empty=True,
)

Expand Down
39 changes: 30 additions & 9 deletions src/spikeinterface/sorters/internal/spyking_circus2.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,8 @@ class Spykingcircus2Sorter(ComponentsBasedSorter):
"motion_correction": {"preset": "dredge_fast"},
"merging": {"max_distance_um": 50},
"clustering": {"method": "iterative-hdbscan", "method_kwargs": dict()},
"cleaning": {"min_snr": 5, "max_jitter_ms": 0.1, "sparsify_threshold": None},
"cleaning": {"min_snr": 5, "max_jitter_ms": 0.2, "sparsify_threshold": 1, "mean_sd_ratio_threshold": 3},
"min_firing_rate": 0.1,
"matching": {"method": "circus-omp", "method_kwargs": dict(), "pipeline_kwargs": dict()},
"apply_preprocessing": True,
"apply_whitening": True,
Expand Down Expand Up @@ -103,6 +104,7 @@ def _run_from_folder(cls, sorter_output_folder, params, verbose):
from spikeinterface.sortingcomponents.peak_detection import detect_peaks
from spikeinterface.sortingcomponents.peak_selection import select_peaks
from spikeinterface.sortingcomponents.clustering import find_clusters_from_peaks
from spikeinterface.sortingcomponents.clustering.tools import remove_small_cluster
from spikeinterface.sortingcomponents.matching import find_spikes_from_templates
from spikeinterface.sortingcomponents.tools import check_probe_for_drift_correction
from spikeinterface.sortingcomponents.tools import clean_templates
Expand All @@ -118,8 +120,6 @@ def _run_from_folder(cls, sorter_output_folder, params, verbose):
ms_before = params["general"].get("ms_before", 0.5)
ms_after = params["general"].get("ms_after", 1.5)
radius_um = params["general"].get("radius_um", 100.0)
detect_threshold = params["detection"]["method_kwargs"].get("detect_threshold", 5)
peak_sign = params["detection"].get("peak_sign", "neg")
deterministic = params["deterministic_peaks_detection"]
debug = params["debug"]
seed = params["seed"]
Expand Down Expand Up @@ -310,6 +310,9 @@ def _run_from_folder(cls, sorter_output_folder, params, verbose):
if verbose:
print("Kept %d peaks for clustering" % len(selected_peaks))

cleaning_kwargs = params.get("cleaning", {}).copy()
cleaning_kwargs["remove_empty"] = True

if clustering_method in [
"iterative-hdbscan",
"iterative-isosplit",
Expand All @@ -319,6 +322,10 @@ def _run_from_folder(cls, sorter_output_folder, params, verbose):
clustering_params.update(verbose=verbose)
clustering_params.update(seed=seed)
clustering_params.update(peaks_svd=params["general"])
if clustering_method in ["iterative-hdbscan", "iterative-isosplit"]:
clustering_params.update(clean_templates=cleaning_kwargs)
clustering_params["noise_levels"] = noise_levels

if debug:
clustering_params["debug_folder"] = sorter_output_folder / "clustering"

Expand All @@ -328,6 +335,7 @@ def _run_from_folder(cls, sorter_output_folder, params, verbose):
method=clustering_method,
method_kwargs=clustering_params,
extra_outputs=True,
verbose=verbose,
job_kwargs=job_kwargs,
)

Expand Down Expand Up @@ -365,7 +373,7 @@ def _run_from_folder(cls, sorter_output_folder, params, verbose):
else:
from spikeinterface.sortingcomponents.clustering.tools import get_templates_from_peaks_and_svd

dense_templates, new_sparse_mask = get_templates_from_peaks_and_svd(
dense_templates, new_sparse_mask, max_std_per_channel = get_templates_from_peaks_and_svd(
recording_w,
selected_peaks,
peak_labels,
Expand All @@ -375,16 +383,30 @@ def _run_from_folder(cls, sorter_output_folder, params, verbose):
more_outs["peaks_svd"],
more_outs["peak_svd_sparse_mask"],
operator="median",
return_max_std_per_channel=True,
)
# this release the peak_svd memmap file
templates = dense_templates.to_sparse(new_sparse_mask)

del more_outs

cleaning_kwargs = params.get("cleaning", {}).copy()
cleaning_kwargs["noise_levels"] = noise_levels
cleaning_kwargs["remove_empty"] = True
templates = clean_templates(templates, **cleaning_kwargs)
before_clean_ids = templates.unit_ids.copy()
cleaning_kwargs["max_std_per_channel"] = max_std_per_channel
cleaning_kwargs["verbose"] = verbose
templates = clean_templates(templates, noise_levels=noise_levels, **cleaning_kwargs)
remove_peak_mask = ~np.isin(peak_labels, templates.unit_ids)
peak_labels[remove_peak_mask] = -1

if params["min_firing_rate"] is not None:
peak_labels, to_keep = remove_small_cluster(
recording_w,
selected_peaks,
peak_labels,
min_firing_rate=params["min_firing_rate"],
subsampling_factor=peaks.size / selected_peaks.size,
verbose=verbose,
)
templates = templates.select_units(to_keep)

if verbose:
print("Kept %d clean clusters" % len(templates.unit_ids))
Expand Down Expand Up @@ -508,5 +530,4 @@ def final_cleaning_circus(
sparsity_overlap=sparsity_overlap,
**job_kwargs,
)

return final_sa
12 changes: 8 additions & 4 deletions src/spikeinterface/sorters/internal/tridesclous2.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,13 +51,15 @@ class Tridesclous2Sorter(ComponentsBasedSorter):
"clustering": {
"recursive_depth": 3,
},
"min_firing_rate": 0.1,
"templates": {
"ms_before": 2.0,
"ms_after": 3.0,
"max_spikes_per_unit": 400,
"sparsity_threshold": 1.5,
"min_snr": 2.5,
"min_snr": 3.5,
"radius_um": 100.0,
"max_jitter_ms": 0.2,
},
"matching": {"method": "tdc-peeler", "method_kwargs": {}, "gather_mode": "memory"},
"job_kwargs": {},
Expand Down Expand Up @@ -93,7 +95,6 @@ def _run_from_folder(cls, sorter_output_folder, params, verbose):
from spikeinterface.sortingcomponents.peak_detection import detect_peaks
from spikeinterface.sortingcomponents.peak_selection import select_peaks
from spikeinterface.sortingcomponents.clustering.main import find_clusters_from_peaks, clustering_methods
from spikeinterface.sortingcomponents.tools import remove_empty_templates
from spikeinterface.preprocessing import correct_motion
from spikeinterface.sortingcomponents.motion import InterpolateMotionRecording
from spikeinterface.sortingcomponents.tools import clean_templates, compute_sparsity_from_peaks_and_label
Expand Down Expand Up @@ -194,6 +195,9 @@ def _run_from_folder(cls, sorter_output_folder, params, verbose):
clustering_kwargs["split"].update(params["clustering"])
if params["debug"]:
clustering_kwargs["debug_folder"] = sorter_output_folder
clustering_kwargs["noise_levels"] = noise_levels
clustering_kwargs["clean_low_firing"]["min_firing_rate"] = params["min_firing_rate"]
clustering_kwargs["clean_low_firing"]["subsampling_factor"] = all_peaks.size / peaks.size

# if clustering_kwargs["clustering"]["clusterer"] == "isosplit6":
# have_sisosplit6 = importlib.util.find_spec("isosplit6") is not None
Expand Down Expand Up @@ -262,13 +266,13 @@ def _run_from_folder(cls, sorter_output_folder, params, verbose):
is_in_uV=False,
)

# this spasify more
# this clean and spasify more
templates = clean_templates(
templates,
sparsify_threshold=params["templates"]["sparsity_threshold"],
noise_levels=noise_levels,
min_snr=params["templates"]["min_snr"],
max_jitter_ms=None,
max_jitter_ms=params["templates"]["max_jitter_ms"],
remove_empty=True,
)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,9 @@
from spikeinterface.sortingcomponents.waveforms.peak_svd import extract_peaks_svd
from spikeinterface.sortingcomponents.clustering.merging_tools import merge_peak_labels_from_templates
from spikeinterface.sortingcomponents.clustering.itersplit_tools import split_clusters
from spikeinterface.sortingcomponents.clustering.tools import get_templates_from_peaks_and_svd
from spikeinterface.sortingcomponents.clustering.tools import get_templates_from_peaks_and_svd, remove_small_cluster
from spikeinterface.sortingcomponents.tools import clean_templates
from spikeinterface.core.recording_tools import get_noise_levels


class IterativeHDBSCANClustering:
Expand All @@ -30,6 +32,7 @@ class IterativeHDBSCANClustering:
_default_params = {
"peaks_svd": {"n_components": 5, "ms_before": 0.5, "ms_after": 1.5, "radius_um": 100.0},
"seed": None,
"noise_levels": None,
"split": {
"split_radius_um": 75.0,
"recursive": True,
Expand All @@ -43,8 +46,18 @@ class IterativeHDBSCANClustering:
"n_pca_features": 3,
},
},
"clean_templates": {
"sparsify_threshold": 1.0,
"min_snr": 2.5,
"remove_empty": True,
"max_jitter_ms": 0.2,
},
"merge_from_templates": dict(similarity_thresh=0.8, num_shifts=3, use_lags=True),
"merge_from_features": None,
"clean_low_firing": {
"min_firing_rate": 0.1,
"subsampling_factor": None,
},
"debug_folder": None,
"verbose": True,
}
Expand Down Expand Up @@ -116,7 +129,7 @@ def main_function(cls, recording, peaks, params, job_kwargs=dict()):
**split,
)

templates, new_sparse_mask = get_templates_from_peaks_and_svd(
templates, new_sparse_mask, max_std_per_channel = get_templates_from_peaks_and_svd(
recording,
peaks,
peak_labels,
Expand All @@ -126,8 +139,27 @@ def main_function(cls, recording, peaks, params, job_kwargs=dict()):
peaks_svd,
sparse_mask,
operator="median",
return_max_std_per_channel=True,
)

## Pre clean using templates (jitter, sparsify_threshold)
templates = templates.to_sparse(new_sparse_mask)
cleaning_kwargs = params["clean_templates"].copy()
cleaning_kwargs["verbose"] = verbose
cleaning_kwargs["max_std_per_channel"] = max_std_per_channel
if params["noise_levels"] is not None:
noise_levels = params["noise_levels"]
else:
noise_levels = get_noise_levels(recording, return_in_uV=False, **job_kwargs)
cleaning_kwargs["noise_levels"] = noise_levels
cleaned_templates = clean_templates(templates, **cleaning_kwargs)
mask_keep_ids = np.isin(templates.unit_ids, cleaned_templates.unit_ids)
to_remove_ids = templates.unit_ids[~mask_keep_ids]
to_remove_label_mask = np.isin(peak_labels, to_remove_ids)
peak_labels[to_remove_label_mask] = -1
templates = cleaned_templates
new_sparse_mask = templates.sparsity.mask.copy()
templates = templates.to_dense()
labels = templates.unit_ids

if verbose:
Expand All @@ -154,6 +186,21 @@ def main_function(cls, recording, peaks, params, job_kwargs=dict()):
is_in_uV=False,
)

# clean very small cluster before peeler
if (
params["clean_low_firing"]["subsampling_factor"] is not None
and params["clean_low_firing"]["min_firing_rate"] is not None
):
peak_labels, to_keep = remove_small_cluster(
recording,
peaks,
peak_labels,
min_firing_rate=params["clean_low_firing"]["min_firing_rate"],
subsampling_factor=params["clean_low_firing"]["subsampling_factor"],
verbose=verbose,
)
templates = templates.select_units(to_keep)

labels = templates.unit_ids

if debug_folder is not None:
Expand Down
Loading