From 1d55c4a5f5646c4ee21f91235338afc8d7fda3c6 Mon Sep 17 00:00:00 2001 From: cfuselli Date: Thu, 16 Feb 2023 12:37:16 +0000 Subject: [PATCH 01/56] software veto plugins --- straxen/plugins/__init__.py | 6 + straxen/plugins/raw_records_sv/__init__.py | 2 + .../plugins/raw_records_sv/raw_records_sv.py | 112 ++++++++++++++++++ 3 files changed, 120 insertions(+) create mode 100644 straxen/plugins/raw_records_sv/__init__.py create mode 100644 straxen/plugins/raw_records_sv/raw_records_sv.py diff --git a/straxen/plugins/__init__.py b/straxen/plugins/__init__.py index 5a15d7c18..425fa9413 100644 --- a/straxen/plugins/__init__.py +++ b/straxen/plugins/__init__.py @@ -83,3 +83,9 @@ from . import led_cal from .led_cal import * + + +#Software veto +from .raw_records_sv import * +from . import raw_records_sv + diff --git a/straxen/plugins/raw_records_sv/__init__.py b/straxen/plugins/raw_records_sv/__init__.py new file mode 100644 index 000000000..d58a78a27 --- /dev/null +++ b/straxen/plugins/raw_records_sv/__init__.py @@ -0,0 +1,2 @@ +from . import raw_records_sv +from .raw_records_sv import * \ No newline at end of file diff --git a/straxen/plugins/raw_records_sv/raw_records_sv.py b/straxen/plugins/raw_records_sv/raw_records_sv.py new file mode 100644 index 000000000..f26d24d15 --- /dev/null +++ b/straxen/plugins/raw_records_sv/raw_records_sv.py @@ -0,0 +1,112 @@ +import numpy as np +import strax +import straxen + +export, __all__ = strax.exporter() + +@export +class RawRecordsSoftwareVeto(strax.Plugin): + + """ + Software veto for raw records + Depends on event_info + + """ + + __version__ = '0.0.5' + + depends_on = ('raw_records', 'event_info') + provides = 'raw_records_sv' + data_kind = 'raw_records_sv' + + window = 0 # ns? should pass as options + + + def infer_dtype(self): + + d = 'raw_records' + return self.deps[d].dtype_for(d) + + def software_veto_mask(self, e): + + m = (e['x']**2 + e['y']**2) > 50**2 + + return m + + def compute(self, raw_records, events): + + ee = events[self.software_veto_mask(events)] + + return self.get_touching(raw_records, ee) + + + def get_touching(self, things, containers): + + # start with keep everything + mask = np.full(len(things), True) + + # throw away things inside every container + for i0, i1 in strax.touching_windows(things, containers, window=self.window): + mask[i0:i1] = False + + # return only the things outside the containers + return things[mask] + + + +@export +class RawRecordsDownSample(strax.Plugin): + + """ + Software data manipulation + The raw_records of big selected S2s are 'downsampled' + The lenght of the array stays the same, but with averaged values + so the data reduction is only at compressed level + """ + + __version__ = '0.0.0' + + depends_on = ('raw_records', 'peak_basics') + provides = 'raw_records_down_sample' + data_kind = 'raw_records_down_sample' + + window = 0 # ns? should pass as options + downsampling_factor = 5 # must be able to divide 110 with + + def infer_dtype(self): + + d = 'raw_records' + return self.deps[d].dtype_for(d) + + def peaks_mask(self, p): + + mask = p['type'] == 2 + mask &= p['area'] > 500000 # PE + mask &= p['range_50p_area'] > 5000 # ns + + return mask + + def compute(self, raw_records, peaks): + + pp = peaks[self.peaks_mask(peaks)] + mask = self.get_touching_mask(raw_records, pp) + + rr = raw_records.copy() + + rr['data'][mask] = raw_records['data'][mask].reshape(len(raw_records[mask]), + -1, + self.downsampling_factor, ).mean(axis=2).repeat(self.downsampling_factor, + axis=1) + + return rr + + + def get_touching_mask(self, things, containers): + + mask = np.full(len(things), False) + + for i0, i1 in strax.touching_windows(things, containers, window=self.window): + mask[i0:i1] = True + + return mask + From f1079399493c9e9aaff96f34e8e516802b8f17af Mon Sep 17 00:00:00 2001 From: cfuselli Date: Thu, 16 Feb 2023 13:56:44 +0000 Subject: [PATCH 02/56] adding software veto aqmon intervals --- straxen/plugins/aqmon_hits/aqmon_hits.py | 9 +++- straxen/plugins/events/veto_proximity.py | 2 +- straxen/plugins/raw_records/daqreader.py | 2 + .../plugins/raw_records_sv/raw_records_sv.py | 54 +++++++++++++++---- .../plugins/veto_intervals/veto_intervals.py | 20 +++++-- 5 files changed, 72 insertions(+), 15 deletions(-) diff --git a/straxen/plugins/aqmon_hits/aqmon_hits.py b/straxen/plugins/aqmon_hits/aqmon_hits.py index 10dd4a116..69c7fa04b 100644 --- a/straxen/plugins/aqmon_hits/aqmon_hits.py +++ b/straxen/plugins/aqmon_hits/aqmon_hits.py @@ -3,7 +3,7 @@ import strax import straxen -from straxen.plugins.raw_records.daqreader import ARTIFICIAL_DEADTIME_CHANNEL +from straxen.plugins.raw_records.daqreader import ARTIFICIAL_DEADTIME_CHANNEL, SOFTWARE_VETO_CHANNEL export, __all__ = strax.exporter() @@ -17,6 +17,7 @@ class AqmonChannels(IntEnum): MV_TRIGGER = 797 GPS_SYNC = 798 ARTIFICIAL_DEADTIME = ARTIFICIAL_DEADTIME_CHANNEL + SOFTWARE_VETO = SOFTWARE_VETO_CHANNEL # Analogue sum waveform SUM_WF = 800 # GPS sync acquisition monitor @@ -63,6 +64,7 @@ class AqmonHits(strax.Plugin): # Fake signals, 0 meaning that we won't find hits using # strax but just look for starts and stops (0, (int(AqmonChannels.ARTIFICIAL_DEADTIME),)), + (0, (int(AqmonChannels.SOFTWARE_VETO),)), ), track=True, help='Minimum hit threshold in ADC*counts above baseline. Specified ' @@ -117,7 +119,10 @@ def find_aqmon_hits_per_channel(self, records): aqmon_thresholds[np.array(channels)] = hit_threshold # Split the artificial deadtime ones and do those separately if there are any - is_artificial = records['channel'] == AqmonChannels.ARTIFICIAL_DEADTIME + # here we also add the software veto because it's treated in the exact same way + is_artificial = (records['channel'] == AqmonChannels.ARTIFICIAL_DEADTIME) + is_artificial |= (records['channel'] == AqmonChannels.SOFTWARE_VETO) + aqmon_hits = strax.find_hits(records[~is_artificial], min_amplitude=aqmon_thresholds) diff --git a/straxen/plugins/events/veto_proximity.py b/straxen/plugins/events/veto_proximity.py index ce194a7a6..77411a66b 100644 --- a/straxen/plugins/events/veto_proximity.py +++ b/straxen/plugins/events/veto_proximity.py @@ -40,7 +40,7 @@ class VetoProximity(strax.OverlapWindowPlugin): 'such that one will never cut events that are < YY ns.' ) - veto_names = ['busy', 'busy_he', 'hev', 'straxen_deadtime'] + veto_names = ['busy', 'busy_he', 'hev', 'straxen_deadtime', 'software_veto'] def infer_dtype(self): dtype = [] diff --git a/straxen/plugins/raw_records/daqreader.py b/straxen/plugins/raw_records/daqreader.py index 3f34e78c7..4a6deff94 100644 --- a/straxen/plugins/raw_records/daqreader.py +++ b/straxen/plugins/raw_records/daqreader.py @@ -10,10 +10,12 @@ export, __all__ = strax.exporter() __all__ += ['ARTIFICIAL_DEADTIME_CHANNEL'] +__all__ += ['SOFTWARE_VETO_CHANNEL'] # Just below the TPC acquisition monitor, see # https://xe1t-wiki.lngs.infn.it/doku.php?id=xenon:xenonnt:dsg:daq:channel_groups ARTIFICIAL_DEADTIME_CHANNEL = 799 +SOFTWARE_VETO_CHANNEL = 798 class ArtificialDeadtimeInserted(UserWarning): diff --git a/straxen/plugins/raw_records_sv/raw_records_sv.py b/straxen/plugins/raw_records_sv/raw_records_sv.py index f26d24d15..dd3d765df 100644 --- a/straxen/plugins/raw_records_sv/raw_records_sv.py +++ b/straxen/plugins/raw_records_sv/raw_records_sv.py @@ -4,6 +4,9 @@ export, __all__ = strax.exporter() +from straxen.plugins.raw_records.daqreader import SOFTWARE_VETO_CHANNEL + + @export class RawRecordsSoftwareVeto(strax.Plugin): @@ -15,9 +18,23 @@ class RawRecordsSoftwareVeto(strax.Plugin): __version__ = '0.0.5' - depends_on = ('raw_records', 'event_info') - provides = 'raw_records_sv' - data_kind = 'raw_records_sv' + depends_on = ('raw_records', 'raw_records_aqmon', 'event_info') + + provides = ( + 'raw_records_sv', + 'raw_records_aqmon_sv', + ) + + data_kind = immutabledict(zip(provides, provides)) + parallel = 'process' + chunk_target_size_mb = 50 + rechunk_on_save = immutabledict( + raw_records_sv=False, + raw_records_aqmon_sv=True, + ) + compressor = 'lz4' + input_timeout = 300 + window = 0 # ns? should pass as options @@ -35,12 +52,25 @@ def software_veto_mask(self, e): def compute(self, raw_records, events): - ee = events[self.software_veto_mask(events)] - - return self.get_touching(raw_records, ee) + result = dict() + + events_to_delete = events[self.software_veto_mask(events)] + + veto_mask = self.get_touching_mask(raw_records, events_to_delete) + result['raw_records_sv'] = raw_records[~veto_mask] + + dt = raw_records[0]['dt'] + + result['raw_records_aqmon_sv'] = _software_veto_time( + start=events_to_delete['time'], + end=events_to_delete['endtime'], + dt=dt + ) + + return result - def get_touching(self, things, containers): + def get_touching_mask(self, things, containers): # start with keep everything mask = np.full(len(things), True) @@ -50,9 +80,15 @@ def get_touching(self, things, containers): mask[i0:i1] = False # return only the things outside the containers - return things[mask] - + return mask + def _software_veto_time(self, start, end, dt): + return strax.dict_to_rec( + dict(time=[start], + length=[(end - start) // dt], + dt=np.repeat(dt, len(start)), + channel=np.repeat(SOFTWARE_VETO_CHANNEL, len(start)), + self.dtype_for('raw_records'))) @export class RawRecordsDownSample(strax.Plugin): diff --git a/straxen/plugins/veto_intervals/veto_intervals.py b/straxen/plugins/veto_intervals/veto_intervals.py index f8827d456..0f01e3099 100644 --- a/straxen/plugins/veto_intervals/veto_intervals.py +++ b/straxen/plugins/veto_intervals/veto_intervals.py @@ -29,8 +29,10 @@ class VetoIntervals(strax.OverlapWindowPlugin): - hev_* <= DDC10 hardware high energy veto - straxen_deadtime <= special case of deadtime introduced by the DAQReader-plugin + - software_veto <= raw_records are removed by software veto at event level + """ - __version__ = '1.1.1' + __version__ = '1.1.2' depends_on = 'aqmon_hits' provides = 'veto_intervals' data_kind = 'veto_intervals' @@ -90,8 +92,8 @@ def compute(self, aqmon_hits, start, end): # Straxen deadtime is special, it's a start and stop with no data # but already an interval so easily used here - artificial_deadtime = aqmon_hits[(aqmon_hits['channel'] == - AqmonChannels.ARTIFICIAL_DEADTIME)] + is_artificial = (aqmon_hits['channel'] == AqmonChannels.ARTIFICIAL_DEADTIME) + artificial_deadtime = aqmon_hits[is_artificial] n_artificial = len(artificial_deadtime) if n_artificial: @@ -100,6 +102,18 @@ def compute(self, aqmon_hits, start, end): result[vetos_seen:n_artificial]['veto_type'] = 'straxen_deadtime_veto' vetos_seen += n_artificial + # Software veto is also special, same as artificial deadtime + # so with no data but just time interval. Treated in the same way + is_software_veto = (aqmon_hits['channel'] == AqmonChannels.SOFTWARE_VETO) + software_veto = aqmon_hits[is_software_veto] + n_software_veto = len(software_veto) + + if n_software_veto: + result[vetos_seen:n_software_veto]['time'] = software_veto['time'] + result[vetos_seen:n_software_veto]['endtime'] = strax.endtime(software_veto) + result[vetos_seen:n_software_veto]['veto_type'] = 'software_veto' + vetos_seen += n_software_veto + result = result[:vetos_seen] result['veto_interval'] = result['endtime'] - result['time'] sort = np.argsort(result['time']) From 6a54996b4409e2dbc46e3f9e5650fa0b16e18b5c Mon Sep 17 00:00:00 2001 From: cfuselli Date: Thu, 16 Feb 2023 14:29:10 +0000 Subject: [PATCH 03/56] fixed, now works --- .../plugins/raw_records_sv/raw_records_sv.py | 30 ++++++++++++------- 1 file changed, 20 insertions(+), 10 deletions(-) diff --git a/straxen/plugins/raw_records_sv/raw_records_sv.py b/straxen/plugins/raw_records_sv/raw_records_sv.py index dd3d765df..67fc0b08b 100644 --- a/straxen/plugins/raw_records_sv/raw_records_sv.py +++ b/straxen/plugins/raw_records_sv/raw_records_sv.py @@ -1,6 +1,7 @@ import numpy as np import strax import straxen +from immutabledict import immutabledict export, __all__ = strax.exporter() @@ -8,6 +9,14 @@ @export +@strax.takes_config( + + # All these must have track=False, so the raw_records hash never changes! + + # DAQ settings -- should match settings given to redax + strax.Option('record_length', default=110, track=False, type=int, + help="Number of samples per raw_record"), + ) class RawRecordsSoftwareVeto(strax.Plugin): """ @@ -20,7 +29,7 @@ class RawRecordsSoftwareVeto(strax.Plugin): depends_on = ('raw_records', 'raw_records_aqmon', 'event_info') - provides = ( + provides = ( 'raw_records_sv', 'raw_records_aqmon_sv', ) @@ -40,9 +49,10 @@ class RawRecordsSoftwareVeto(strax.Plugin): def infer_dtype(self): - - d = 'raw_records' - return self.deps[d].dtype_for(d) + return { + d: strax.raw_record_dtype( + samples_per_record=self.config["record_length"]) + for d in self.provides} def software_veto_mask(self, e): @@ -50,7 +60,7 @@ def software_veto_mask(self, e): return m - def compute(self, raw_records, events): + def compute(self, raw_records, raw_records_aqmon, events): result = dict() @@ -61,7 +71,7 @@ def compute(self, raw_records, events): dt = raw_records[0]['dt'] - result['raw_records_aqmon_sv'] = _software_veto_time( + result['raw_records_aqmon_sv'] = self._software_veto_time( start=events_to_delete['time'], end=events_to_delete['endtime'], dt=dt @@ -84,11 +94,11 @@ def get_touching_mask(self, things, containers): def _software_veto_time(self, start, end, dt): return strax.dict_to_rec( - dict(time=[start], - length=[(end - start) // dt], + dict(time=start, + length=(end - start) // dt, dt=np.repeat(dt, len(start)), - channel=np.repeat(SOFTWARE_VETO_CHANNEL, len(start)), - self.dtype_for('raw_records'))) + channel=np.repeat(SOFTWARE_VETO_CHANNEL, len(start))), + self.dtype_for('raw_records_sv')) @export class RawRecordsDownSample(strax.Plugin): From 54d48e9645dede57e32b17a587919a4cfbcba1cc Mon Sep 17 00:00:00 2001 From: cfuselli Date: Thu, 16 Feb 2023 14:44:15 +0000 Subject: [PATCH 04/56] aah, it was me --- straxen/plugins/raw_records_sv/raw_records_sv.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/straxen/plugins/raw_records_sv/raw_records_sv.py b/straxen/plugins/raw_records_sv/raw_records_sv.py index 67fc0b08b..7cff047b4 100644 --- a/straxen/plugins/raw_records_sv/raw_records_sv.py +++ b/straxen/plugins/raw_records_sv/raw_records_sv.py @@ -20,9 +20,11 @@ class RawRecordsSoftwareVeto(strax.Plugin): """ + Software veto for raw records Depends on event_info + contact: Carlo Fuselli (cfuselli@nikhef.nl) """ __version__ = '0.0.5' From 4001eaeb99888c35ec40b2c6003ef2f02e230a4f Mon Sep 17 00:00:00 2001 From: cfuselli Date: Thu, 16 Feb 2023 14:51:39 +0000 Subject: [PATCH 05/56] inverted mask.. --- straxen/plugins/raw_records_sv/raw_records_sv.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/straxen/plugins/raw_records_sv/raw_records_sv.py b/straxen/plugins/raw_records_sv/raw_records_sv.py index 7cff047b4..7419b6b1a 100644 --- a/straxen/plugins/raw_records_sv/raw_records_sv.py +++ b/straxen/plugins/raw_records_sv/raw_records_sv.py @@ -69,7 +69,7 @@ def compute(self, raw_records, raw_records_aqmon, events): events_to_delete = events[self.software_veto_mask(events)] veto_mask = self.get_touching_mask(raw_records, events_to_delete) - result['raw_records_sv'] = raw_records[~veto_mask] + result['raw_records_sv'] = raw_records[veto_mask] dt = raw_records[0]['dt'] From d7bce26fc091ce22c2cd4ad87697238679aa2295 Mon Sep 17 00:00:00 2001 From: cfuselli Date: Thu, 16 Feb 2023 15:51:01 +0000 Subject: [PATCH 06/56] fixed veto proximity --- straxen/plugins/events/veto_proximity.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/straxen/plugins/events/veto_proximity.py b/straxen/plugins/events/veto_proximity.py index 77411a66b..5dc5e6aed 100644 --- a/straxen/plugins/events/veto_proximity.py +++ b/straxen/plugins/events/veto_proximity.py @@ -40,7 +40,7 @@ class VetoProximity(strax.OverlapWindowPlugin): 'such that one will never cut events that are < YY ns.' ) - veto_names = ['busy', 'busy_he', 'hev', 'straxen_deadtime', 'software_veto'] + veto_names = ['busy', 'busy_he', 'hev', 'straxen_deadtime', 'software'] def infer_dtype(self): dtype = [] @@ -88,16 +88,19 @@ def set_result_for_veto(self, result_buffer[f'time_to_previous_{veto_name}'] = self.time_no_aqmon_veto_found result_buffer[f'time_to_next_{veto_name}'] = self.time_no_aqmon_veto_found + selected_intervals = veto_intervals[veto_intervals['veto_type'] == f'{veto_name}_veto'] if not len(selected_intervals): return vetos_during_event = strax.touching_windows(selected_intervals, event_window) + # Figure out the vetos *during* an event for event_i, veto_window in enumerate(vetos_during_event): if veto_window[1] - veto_window[0]: + vetos_in_window = selected_intervals[veto_window[0]: veto_window[1]].copy() starts = np.clip(vetos_in_window['time'], From 200ab762557a2249723a3c28a76799e9ad0fac11 Mon Sep 17 00:00:00 2001 From: cfuselli Date: Fri, 17 Feb 2023 12:56:00 +0000 Subject: [PATCH 07/56] base plugin --- straxen/plugins/raw_records_sv/__init__.py | 7 +- .../raw_records_sv/_software_veto_base.py | 88 ++++++++++ straxen/plugins/raw_records_sv/radial_veto.py | 31 ++++ .../plugins/raw_records_sv/raw_records_sv.py | 160 ------------------ 4 files changed, 124 insertions(+), 162 deletions(-) create mode 100644 straxen/plugins/raw_records_sv/_software_veto_base.py create mode 100644 straxen/plugins/raw_records_sv/radial_veto.py delete mode 100644 straxen/plugins/raw_records_sv/raw_records_sv.py diff --git a/straxen/plugins/raw_records_sv/__init__.py b/straxen/plugins/raw_records_sv/__init__.py index d58a78a27..442fea920 100644 --- a/straxen/plugins/raw_records_sv/__init__.py +++ b/straxen/plugins/raw_records_sv/__init__.py @@ -1,2 +1,5 @@ -from . import raw_records_sv -from .raw_records_sv import * \ No newline at end of file +# from . import raw_records_sv +# from .raw_records_sv import * + +from . import radial_veto +from .radial_veto import * \ No newline at end of file diff --git a/straxen/plugins/raw_records_sv/_software_veto_base.py b/straxen/plugins/raw_records_sv/_software_veto_base.py new file mode 100644 index 000000000..87e635b2d --- /dev/null +++ b/straxen/plugins/raw_records_sv/_software_veto_base.py @@ -0,0 +1,88 @@ +import numpy as np +import strax +import straxen + +export, __all__ = strax.exporter() + +from straxen.plugins.raw_records.daqreader import SOFTWARE_VETO_CHANNEL + +@export +@strax.takes_config( + # DAQ settings -- should match settings given to redax + strax.Option('record_length', default=110, track=False, type=int, + help="Number of samples per raw_record"), + ) +class RawRecordsSoftwareVetoBase(strax.Plugin): + + """ + Software veto for raw records - yes, we throw them away forever! + + contact: Carlo Fuselli (cfuselli@nikhef.nl) + """ + + __version__ = '0.0.5' + + depends_on = ('raw_records', 'raw_records_aqmon', 'event_info') + + parallel = 'process' + chunk_target_size_mb = 50 + compressor = 'lz4' + input_timeout = 300 + + window = 0 # ns (should pass as option) + + + def infer_dtype(self): + return { + d: strax.raw_record_dtype( + samples_per_record=self.config["record_length"]) + for d in self.provides} + + def software_veto_mask(self, e): + + return NotImplementedError(""" + This is a base plugin, + please build a plugin with this function""") + + def compute(self, raw_records, raw_records_aqmon, events): + + result = dict() + dt = raw_records[0]['dt'] + events_to_delete = events[self.software_veto_mask(events)] + + veto_mask = self.get_touching_mask(raw_records, events_to_delete) + + # Result: raw_records to keep + result[self.provides[0]] = raw_records[veto_mask] + + # Result: aqmon to add + result[self.provides[1]] = strax.sort_by_time( + np.concatenate([ + raw_records_aqmon, + self._software_veto_time( + start=events_to_delete['time'], + end=events_to_delete['endtime'], + dt=dt + )])) + + return result + + def get_touching_mask(self, things, containers): + + # start with keep everything + mask = np.full(len(things), True) + + # throw away things inside every container + for i0, i1 in strax.touching_windows(things, containers, window=self.window): + mask[i0:i1] = False + + # return only the things outside the containers + return mask + + def _software_veto_time(self, start, end, dt): + return strax.dict_to_rec( + dict(time=start, + length=(end - start) // dt, + dt=np.repeat(dt, len(start)), + channel=np.repeat(SOFTWARE_VETO_CHANNEL, len(start))), + self.dtype_for(self.provides[0])) diff --git a/straxen/plugins/raw_records_sv/radial_veto.py b/straxen/plugins/raw_records_sv/radial_veto.py new file mode 100644 index 000000000..24ca9095e --- /dev/null +++ b/straxen/plugins/raw_records_sv/radial_veto.py @@ -0,0 +1,31 @@ +import strax +import straxen +from immutabledict import immutabledict +from straxen.plugins.raw_records_sv._software_veto_base import RawRecordsSoftwareVetoBase + +export, __all__ = strax.exporter() + +@export +class RawRecordsRadialVeto(RawRecordsSoftwareVetoBase): + """ + Radial sofrtare veto + Deletes raw records of events outside certain r + """ + + provides = ( + 'raw_records_sv', + 'raw_records_aqmon_sv', + ) + + data_kind = immutabledict(zip(provides, provides)) + + rechunk_on_save = immutabledict( + raw_records_sv=False, + raw_records_aqmon_sv=True, + ) + + def software_veto_mask(self, e): + + m = (e['x']**2 + e['y']**2) > 50**2 + + return m \ No newline at end of file diff --git a/straxen/plugins/raw_records_sv/raw_records_sv.py b/straxen/plugins/raw_records_sv/raw_records_sv.py deleted file mode 100644 index 7419b6b1a..000000000 --- a/straxen/plugins/raw_records_sv/raw_records_sv.py +++ /dev/null @@ -1,160 +0,0 @@ -import numpy as np -import strax -import straxen -from immutabledict import immutabledict - -export, __all__ = strax.exporter() - -from straxen.plugins.raw_records.daqreader import SOFTWARE_VETO_CHANNEL - - -@export -@strax.takes_config( - - # All these must have track=False, so the raw_records hash never changes! - - # DAQ settings -- should match settings given to redax - strax.Option('record_length', default=110, track=False, type=int, - help="Number of samples per raw_record"), - ) -class RawRecordsSoftwareVeto(strax.Plugin): - - """ - - Software veto for raw records - Depends on event_info - - contact: Carlo Fuselli (cfuselli@nikhef.nl) - """ - - __version__ = '0.0.5' - - depends_on = ('raw_records', 'raw_records_aqmon', 'event_info') - - provides = ( - 'raw_records_sv', - 'raw_records_aqmon_sv', - ) - - data_kind = immutabledict(zip(provides, provides)) - parallel = 'process' - chunk_target_size_mb = 50 - rechunk_on_save = immutabledict( - raw_records_sv=False, - raw_records_aqmon_sv=True, - ) - compressor = 'lz4' - input_timeout = 300 - - - window = 0 # ns? should pass as options - - - def infer_dtype(self): - return { - d: strax.raw_record_dtype( - samples_per_record=self.config["record_length"]) - for d in self.provides} - - def software_veto_mask(self, e): - - m = (e['x']**2 + e['y']**2) > 50**2 - - return m - - def compute(self, raw_records, raw_records_aqmon, events): - - result = dict() - - events_to_delete = events[self.software_veto_mask(events)] - - veto_mask = self.get_touching_mask(raw_records, events_to_delete) - result['raw_records_sv'] = raw_records[veto_mask] - - dt = raw_records[0]['dt'] - - result['raw_records_aqmon_sv'] = self._software_veto_time( - start=events_to_delete['time'], - end=events_to_delete['endtime'], - dt=dt - ) - - return result - - - def get_touching_mask(self, things, containers): - - # start with keep everything - mask = np.full(len(things), True) - - # throw away things inside every container - for i0, i1 in strax.touching_windows(things, containers, window=self.window): - mask[i0:i1] = False - - # return only the things outside the containers - return mask - - def _software_veto_time(self, start, end, dt): - return strax.dict_to_rec( - dict(time=start, - length=(end - start) // dt, - dt=np.repeat(dt, len(start)), - channel=np.repeat(SOFTWARE_VETO_CHANNEL, len(start))), - self.dtype_for('raw_records_sv')) - -@export -class RawRecordsDownSample(strax.Plugin): - - """ - Software data manipulation - The raw_records of big selected S2s are 'downsampled' - The lenght of the array stays the same, but with averaged values - so the data reduction is only at compressed level - """ - - __version__ = '0.0.0' - - depends_on = ('raw_records', 'peak_basics') - provides = 'raw_records_down_sample' - data_kind = 'raw_records_down_sample' - - window = 0 # ns? should pass as options - downsampling_factor = 5 # must be able to divide 110 with - - def infer_dtype(self): - - d = 'raw_records' - return self.deps[d].dtype_for(d) - - def peaks_mask(self, p): - - mask = p['type'] == 2 - mask &= p['area'] > 500000 # PE - mask &= p['range_50p_area'] > 5000 # ns - - return mask - - def compute(self, raw_records, peaks): - - pp = peaks[self.peaks_mask(peaks)] - mask = self.get_touching_mask(raw_records, pp) - - rr = raw_records.copy() - - rr['data'][mask] = raw_records['data'][mask].reshape(len(raw_records[mask]), - -1, - self.downsampling_factor, ).mean(axis=2).repeat(self.downsampling_factor, - axis=1) - - return rr - - - def get_touching_mask(self, things, containers): - - mask = np.full(len(things), False) - - for i0, i1 in strax.touching_windows(things, containers, window=self.window): - mask[i0:i1] = True - - return mask - From dd92b53eadc894cbc617806493ac22a69c9de36f Mon Sep 17 00:00:00 2001 From: cfuselli Date: Fri, 17 Feb 2023 13:00:14 +0000 Subject: [PATCH 08/56] edit init --- straxen/plugins/raw_records_sv/__init__.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/straxen/plugins/raw_records_sv/__init__.py b/straxen/plugins/raw_records_sv/__init__.py index 442fea920..8253e574a 100644 --- a/straxen/plugins/raw_records_sv/__init__.py +++ b/straxen/plugins/raw_records_sv/__init__.py @@ -1,5 +1,2 @@ -# from . import raw_records_sv -# from .raw_records_sv import * - from . import radial_veto from .radial_veto import * \ No newline at end of file From cfbb972322ce0cdbaff69ca6edb8dcd94e548c73 Mon Sep 17 00:00:00 2001 From: cfuselli Date: Wed, 22 Feb 2023 14:51:12 +0000 Subject: [PATCH 09/56] modified plugin structure --- straxen/plugins/raw_records_sv/__init__.py | 3 +-- .../raw_records_sv/_software_veto_base.py | 15 ++++++++++- .../{radial_veto.py => software_veto.py} | 27 +++++++++++-------- 3 files changed, 31 insertions(+), 14 deletions(-) rename straxen/plugins/raw_records_sv/{radial_veto.py => software_veto.py} (51%) diff --git a/straxen/plugins/raw_records_sv/__init__.py b/straxen/plugins/raw_records_sv/__init__.py index 8253e574a..c7026a248 100644 --- a/straxen/plugins/raw_records_sv/__init__.py +++ b/straxen/plugins/raw_records_sv/__init__.py @@ -1,2 +1 @@ -from . import radial_veto -from .radial_veto import * \ No newline at end of file +# we don't want to import software veto plugins by default \ No newline at end of file diff --git a/straxen/plugins/raw_records_sv/_software_veto_base.py b/straxen/plugins/raw_records_sv/_software_veto_base.py index 87e635b2d..5044815af 100644 --- a/straxen/plugins/raw_records_sv/_software_veto_base.py +++ b/straxen/plugins/raw_records_sv/_software_veto_base.py @@ -1,6 +1,7 @@ import numpy as np import strax import straxen +from immutabledict import immutabledict export, __all__ = strax.exporter() @@ -24,14 +25,26 @@ class RawRecordsSoftwareVetoBase(strax.Plugin): depends_on = ('raw_records', 'raw_records_aqmon', 'event_info') + provides = ( + 'raw_records_sv', + 'raw_records_aqmon_sv', + ) + + data_kind = immutabledict(zip(provides, provides)) + + rechunk_on_save = immutabledict( + raw_records_sv=False, + raw_records_aqmon_sv=True, + ) + parallel = 'process' chunk_target_size_mb = 50 compressor = 'lz4' input_timeout = 300 + # TODO test with window > 0 window = 0 # ns (should pass as option) - def infer_dtype(self): return { d: strax.raw_record_dtype( diff --git a/straxen/plugins/raw_records_sv/radial_veto.py b/straxen/plugins/raw_records_sv/software_veto.py similarity index 51% rename from straxen/plugins/raw_records_sv/radial_veto.py rename to straxen/plugins/raw_records_sv/software_veto.py index 24ca9095e..f20637e96 100644 --- a/straxen/plugins/raw_records_sv/radial_veto.py +++ b/straxen/plugins/raw_records_sv/software_veto.py @@ -6,26 +6,31 @@ export, __all__ = strax.exporter() @export -class RawRecordsRadialVeto(RawRecordsSoftwareVetoBase): +class RadialVeto(RawRecordsSoftwareVetoBase): """ Radial sofrtare veto Deletes raw records of events outside certain r """ - provides = ( - 'raw_records_sv', - 'raw_records_aqmon_sv', - ) + __version__ = '0.0.1' - data_kind = immutabledict(zip(provides, provides)) + def software_veto_mask(self, e): + + m = (e['x']**2 + e['y']**2) > 50**2 + + return m - rechunk_on_save = immutabledict( - raw_records_sv=False, - raw_records_aqmon_sv=True, - ) +@export +class HighEnergyVeto(RawRecordsSoftwareVetoBase): + """ + High energy sofrtare veto + Deletes raw records for events with high s1 and s2 area + """ + + __version__ = '0.0.1' def software_veto_mask(self, e): - m = (e['x']**2 + e['y']**2) > 50**2 + m = (e['s1_area'] > 1000) & (e['s2_area'] > 100000) return m \ No newline at end of file From aa48e4292787260f06cb058689498e1e366bd9c1 Mon Sep 17 00:00:00 2001 From: cfuselli Date: Wed, 22 Feb 2023 16:10:12 +0000 Subject: [PATCH 10/56] modified plugin structures --- straxen/plugins/raw_records_sv/software_veto.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/straxen/plugins/raw_records_sv/software_veto.py b/straxen/plugins/raw_records_sv/software_veto.py index f20637e96..46ab42d8d 100644 --- a/straxen/plugins/raw_records_sv/software_veto.py +++ b/straxen/plugins/raw_records_sv/software_veto.py @@ -12,7 +12,7 @@ class RadialVeto(RawRecordsSoftwareVetoBase): Deletes raw records of events outside certain r """ - __version__ = '0.0.1' + __version__ = 'radial-veto-0.0.1' def software_veto_mask(self, e): @@ -27,7 +27,7 @@ class HighEnergyVeto(RawRecordsSoftwareVetoBase): Deletes raw records for events with high s1 and s2 area """ - __version__ = '0.0.1' + __version__ = 'high-energy-veto-0.0.1' def software_veto_mask(self, e): From 0c0ffc0eecbdb28b42616d854359f185f298e58d Mon Sep 17 00:00:00 2001 From: cfuselli Date: Thu, 23 Feb 2023 12:07:44 +0000 Subject: [PATCH 11/56] refactor --- straxen/plugins/raw_records_sv/_software_veto_base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/straxen/plugins/raw_records_sv/_software_veto_base.py b/straxen/plugins/raw_records_sv/_software_veto_base.py index 5044815af..60b87dc58 100644 --- a/straxen/plugins/raw_records_sv/_software_veto_base.py +++ b/straxen/plugins/raw_records_sv/_software_veto_base.py @@ -51,7 +51,7 @@ def infer_dtype(self): samples_per_record=self.config["record_length"]) for d in self.provides} - def software_veto_mask(self, e): + def software_veto_mask(self, events): return NotImplementedError(""" This is a base plugin, From f35f576342528e2859baa7a555832c3449ebfdd9 Mon Sep 17 00:00:00 2001 From: cfuselli Date: Thu, 23 Feb 2023 12:15:46 +0000 Subject: [PATCH 12/56] add prescaling option --- .../plugins/raw_records_sv/_software_veto_base.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/straxen/plugins/raw_records_sv/_software_veto_base.py b/straxen/plugins/raw_records_sv/_software_veto_base.py index 60b87dc58..e744fd687 100644 --- a/straxen/plugins/raw_records_sv/_software_veto_base.py +++ b/straxen/plugins/raw_records_sv/_software_veto_base.py @@ -45,6 +45,13 @@ class RawRecordsSoftwareVetoBase(strax.Plugin): # TODO test with window > 0 window = 0 # ns (should pass as option) + # this is for pre_scaling (keep a fracion of the events we want to delete) + # 0 to delete all non-wanted raw_records + # 0.5 to keep half of the non-wanted raw_records + # 1 the software veto is basically deactivated + pre_scaling_factor = 0 + + def infer_dtype(self): return { d: strax.raw_record_dtype( @@ -61,8 +68,16 @@ def compute(self, raw_records, raw_records_aqmon, events): result = dict() dt = raw_records[0]['dt'] + + # define events of which to delete raw_records events_to_delete = events[self.software_veto_mask(events)] + # apply pre-scaling + r = np.random.random(len(events_to_delete)) + pre_scaling_mask = (r Date: Thu, 23 Feb 2023 12:26:22 +0000 Subject: [PATCH 13/56] url config --- .../raw_records_sv/_software_veto_base.py | 22 ++++++++++--------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/straxen/plugins/raw_records_sv/_software_veto_base.py b/straxen/plugins/raw_records_sv/_software_veto_base.py index e744fd687..4577fa23f 100644 --- a/straxen/plugins/raw_records_sv/_software_veto_base.py +++ b/straxen/plugins/raw_records_sv/_software_veto_base.py @@ -42,15 +42,17 @@ class RawRecordsSoftwareVetoBase(strax.Plugin): compressor = 'lz4' input_timeout = 300 - # TODO test with window > 0 - window = 0 # ns (should pass as option) - - # this is for pre_scaling (keep a fracion of the events we want to delete) - # 0 to delete all non-wanted raw_records - # 0.5 to keep half of the non-wanted raw_records - # 1 the software veto is basically deactivated - pre_scaling_factor = 0 + software_veto_touching_window = straxen.URLConfig( + default=int(0), infer_type=False, + help='Strax touching window for container and thing (raw_records and events).') + + software_veto_pre_scaling = straxen.URLConfig( + default=int(0), infer_type=False, + help='This sets the pre_scaling factor (keep a fracion of the events we want to delete)' + ' 0 to delete all non-wanted raw_records' + ' 0.5 to keep half of the non-wanted raw_records' + ' 1 the software veto is basically deactivated') def infer_dtype(self): return { @@ -74,7 +76,7 @@ def compute(self, raw_records, raw_records_aqmon, events): # apply pre-scaling r = np.random.random(len(events_to_delete)) - pre_scaling_mask = (r Date: Thu, 23 Feb 2023 13:37:26 +0000 Subject: [PATCH 14/56] allow for both peaks and event cuts --- .../raw_records_sv/_software_veto_base.py | 41 ++++++++++++------- .../plugins/raw_records_sv/software_veto.py | 16 ++++++++ 2 files changed, 43 insertions(+), 14 deletions(-) diff --git a/straxen/plugins/raw_records_sv/_software_veto_base.py b/straxen/plugins/raw_records_sv/_software_veto_base.py index 4577fa23f..f87e55bf3 100644 --- a/straxen/plugins/raw_records_sv/_software_veto_base.py +++ b/straxen/plugins/raw_records_sv/_software_veto_base.py @@ -23,7 +23,10 @@ class RawRecordsSoftwareVetoBase(strax.Plugin): __version__ = '0.0.5' - depends_on = ('raw_records', 'raw_records_aqmon', 'event_info') + # if extra dependency on i.e. peaks_proximity is needed, + # redefine the depends_on in the software_vet0.py plugin (see ExamplePeakLevel) + # keeping the order raw_records, raw_records_aqmon, peaks, events + depends_on = ('raw_records', 'raw_records_aqmon', 'peak_basics', 'event_info') provides = ( 'raw_records_sv', @@ -42,6 +45,8 @@ class RawRecordsSoftwareVetoBase(strax.Plugin): compressor = 'lz4' input_timeout = 300 + # can be either 'events' or 'peaks' + veto_mask_on = 'events' software_veto_touching_window = straxen.URLConfig( default=int(0), infer_type=False, @@ -60,27 +65,31 @@ def infer_dtype(self): samples_per_record=self.config["record_length"]) for d in self.provides} - def software_veto_mask(self, events): + def software_veto_mask(self, objects): return NotImplementedError(""" This is a base plugin, please build a plugin with this function""") - def compute(self, raw_records, raw_records_aqmon, events): + def compute(self, raw_records, raw_records_aqmon, peaks, events): result = dict() - dt = raw_records[0]['dt'] # define events of which to delete raw_records - events_to_delete = events[self.software_veto_mask(events)] - - # apply pre-scaling - r = np.random.random(len(events_to_delete)) - pre_scaling_mask = (rself.software_veto_pre_scaling) + objects_to_delete = objects_to_delete[pre_scaling_mask] # get mask of raw_records to delete - veto_mask = self.get_touching_mask(raw_records, events_to_delete) + veto_mask = self.get_touching_mask(raw_records, objects_to_delete) # Result: raw_records to keep result[self.provides[0]] = raw_records[veto_mask] @@ -90,15 +99,18 @@ def compute(self, raw_records, raw_records_aqmon, events): np.concatenate([ raw_records_aqmon, self._software_veto_time( - start=events_to_delete['time'], - end=events_to_delete['endtime'], - dt=dt + start=objects_to_delete['time'], + end=strax.endtime(objects_to_delete), + dt=raw_records[0]['dt'] )])) return result def get_touching_mask(self, things, containers): + # things = raw_records + # containers = i.e. events + # start with keep everything mask = np.full(len(things), True) @@ -110,6 +122,7 @@ def get_touching_mask(self, things, containers): return mask def _software_veto_time(self, start, end, dt): + return strax.dict_to_rec( dict(time=start, length=(end - start) // dt, diff --git a/straxen/plugins/raw_records_sv/software_veto.py b/straxen/plugins/raw_records_sv/software_veto.py index 46ab42d8d..7dd59cd27 100644 --- a/straxen/plugins/raw_records_sv/software_veto.py +++ b/straxen/plugins/raw_records_sv/software_veto.py @@ -33,4 +33,20 @@ def software_veto_mask(self, e): m = (e['s1_area'] > 1000) & (e['s2_area'] > 100000) + return m + +@export +class ExamplePeakLevel(RawRecordsSoftwareVetoBase): + """ + High energy sofrtare veto + Deletes raw records for events with high s1 and s2 area + """ + + __version__ = 'example-peak-level-0.0.1' + veto_mask_on = 'peaks' + + def software_veto_mask(self, p): + + m = (p['type'] == 2) & (p['area'] > 100000) + return m \ No newline at end of file From 47c1545e78fffccecd4d4df4548d60b115f246fc Mon Sep 17 00:00:00 2001 From: cfuselli Date: Thu, 23 Feb 2023 16:25:35 +0000 Subject: [PATCH 15/56] change deps, thx joran --- .../plugins/raw_records_sv/_software_veto_base.py | 14 +++----------- straxen/plugins/raw_records_sv/software_veto.py | 11 +++++++---- 2 files changed, 10 insertions(+), 15 deletions(-) diff --git a/straxen/plugins/raw_records_sv/_software_veto_base.py b/straxen/plugins/raw_records_sv/_software_veto_base.py index f87e55bf3..bdf56d4dd 100644 --- a/straxen/plugins/raw_records_sv/_software_veto_base.py +++ b/straxen/plugins/raw_records_sv/_software_veto_base.py @@ -26,7 +26,7 @@ class RawRecordsSoftwareVetoBase(strax.Plugin): # if extra dependency on i.e. peaks_proximity is needed, # redefine the depends_on in the software_vet0.py plugin (see ExamplePeakLevel) # keeping the order raw_records, raw_records_aqmon, peaks, events - depends_on = ('raw_records', 'raw_records_aqmon', 'peak_basics', 'event_info') + depends_on = ('raw_records', 'raw_records_aqmon', 'event_info') provides = ( 'raw_records_sv', @@ -45,9 +45,6 @@ class RawRecordsSoftwareVetoBase(strax.Plugin): compressor = 'lz4' input_timeout = 300 - # can be either 'events' or 'peaks' - veto_mask_on = 'events' - software_veto_touching_window = straxen.URLConfig( default=int(0), infer_type=False, help='Strax touching window for container and thing (raw_records and events).') @@ -71,17 +68,12 @@ def software_veto_mask(self, objects): This is a base plugin, please build a plugin with this function""") - def compute(self, raw_records, raw_records_aqmon, peaks, events): + def compute(self, raw_records, raw_records_aqmon, events): result = dict() # define events of which to delete raw_records - if self.veto_mask_on == 'events': - objects_to_delete = events[self.software_veto_mask(events)] - elif self.veto_mask_on == 'peaks': - objects_to_delete = peaks[self.software_veto_mask(peaks)] - else: - return NotImplementedError("Currently possible to cut on events or peaks data-kinds only") + objects_to_delete = events[self.software_veto_mask(events)] # apply pre-scaling and update objects to delete r = np.random.random(len(objects_to_delete)) diff --git a/straxen/plugins/raw_records_sv/software_veto.py b/straxen/plugins/raw_records_sv/software_veto.py index 7dd59cd27..a65131400 100644 --- a/straxen/plugins/raw_records_sv/software_veto.py +++ b/straxen/plugins/raw_records_sv/software_veto.py @@ -38,13 +38,16 @@ def software_veto_mask(self, e): @export class ExamplePeakLevel(RawRecordsSoftwareVetoBase): """ - High energy sofrtare veto - Deletes raw records for events with high s1 and s2 area + Example veto on peak level, needs to specify veto_mask_on """ - __version__ = 'example-peak-level-0.0.1' - veto_mask_on = 'peaks' + __version__ = 'example-peak-level-0.0.2' + depends_on = ('raw_records', 'raw_records_aqmon', 'peak_basics') + def compute(self, raw_records, raw_records_aqmon, peaks): + # base class written to work on events, but we just care about the time intervals + return super().compute(raw_records, raw_records_aqmon, events=peaks) + def software_veto_mask(self, p): m = (p['type'] == 2) & (p['area'] > 100000) From e81f6a1babb7a064a5270eac9a2667018838eac5 Mon Sep 17 00:00:00 2001 From: cfuselli Date: Sun, 5 Mar 2023 17:24:32 +0000 Subject: [PATCH 16/56] start work on bootstrax --- bin/bootstrax | 39 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/bin/bootstrax b/bin/bootstrax index 0b0715806..0e9930845 100755 --- a/bin/bootstrax +++ b/bin/bootstrax @@ -86,6 +86,9 @@ parser.add_argument( parser.add_argument( '--max_messages', type=int, default=10, help="number of max mailbox messages") +parser.add_argument( + '--software_veto', type=int, default=None, + help="Class name of veto plugin to apply (i.e.: RadialVeto)") actions = parser.add_mutually_exclusive_group() @@ -295,6 +298,11 @@ def new_context(cores=args.cores, strax.DataDirectory(output_folder)] context.storage[0].readonly = True context.storage[0].local_only = True + + if software_veto_is_on(): + + register_software_veto_plugins(context) + return context @@ -1642,6 +1650,37 @@ def cleanup_db(): abandon(mongo_id=rd['_id']) + + +########################### +# Software veto functions # +########################### + + + +def software_veto_is_on(): + """Decide if software veto is on based on arguments or rundoc""" + + # TODO + return True + +def register_software_veto_plugins(context): + """Based on the selection that we want to apply, + register the correct veto plugin. + Probably we want to pass it as argument and if possible overwrite it from rundoc. + Then register a copy of all the plugins. + """ + + # TODO + plugin_name = 'RadialVeto' + + import straxen.plugins.raw_records_sv.software_veto as software_veto + plugin = getattr(straxen.plugins.raw_records_sv.software_veto, plugin_name) + context.register(plugin) + + + + if __name__ == '__main__': if not args.undying: main() From 797312073429679b7da8b7ca9446f8d7aff9427d Mon Sep 17 00:00:00 2001 From: cfuselli Date: Sun, 5 Mar 2023 17:35:43 +0000 Subject: [PATCH 17/56] hack you --- straxen/plugins/events/event_area_per_channel.py | 3 ++- straxen/plugins/events/event_basics.py | 4 +++- straxen/plugins/merged_s2s/merged_s2s.py | 3 ++- straxen/plugins/merged_s2s_he/merged_s2s_he.py | 4 +++- straxen/plugins/peaklets/peaklets.py | 2 +- straxen/plugins/peaks/peaks.py | 3 ++- straxen/plugins/peaks_he/peaks_he.py | 3 ++- straxen/plugins/raw_records/daqreader.py | 11 +++++++++-- straxen/plugins/raw_records_coin_nv/nveto_recorder.py | 2 +- straxen/plugins/records/records.py | 2 +- straxen/plugins/records_nv/records_nv.py | 2 +- 11 files changed, 27 insertions(+), 12 deletions(-) diff --git a/straxen/plugins/events/event_area_per_channel.py b/straxen/plugins/events/event_area_per_channel.py index fd1593127..56c1a9e66 100644 --- a/straxen/plugins/events/event_area_per_channel.py +++ b/straxen/plugins/events/event_area_per_channel.py @@ -22,7 +22,8 @@ class EventAreaPerChannel(strax.LoopPlugin): def infer_dtype(self): # setting data type from peak dtype - pfields_=self.deps['peaks'].dtype_for('peaks').fields + _dtype_for = self.depends_on[1] + pfields_=self.deps[_dtype_for].dtype_for(_dtype_for).fields ## Populating data type infoline = {'s1': 'main S1', 's2': 'main S2', diff --git a/straxen/plugins/events/event_basics.py b/straxen/plugins/events/event_basics.py index 179d104c1..16ee3e7cf 100644 --- a/straxen/plugins/events/event_basics.py +++ b/straxen/plugins/events/event_basics.py @@ -146,7 +146,9 @@ def _set_posrec_save(self): parse x_mlp et cetera if needed to get the algorithms used and set required class attributes """ - posrec_fields = self.deps['peak_positions'].dtype_for('peak_positions').names + _dtype_for = self.depends_on[2] + + posrec_fields = self.deps[_dtype_for].dtype_for(_dtype_for).names posrec_names = [d.split('_')[-1] for d in posrec_fields if 'x_' in d] # Preserve order. "set" is not ordered and dtypes should always be ordered diff --git a/straxen/plugins/merged_s2s/merged_s2s.py b/straxen/plugins/merged_s2s/merged_s2s.py index 1e35460d8..3df4aff25 100644 --- a/straxen/plugins/merged_s2s/merged_s2s.py +++ b/straxen/plugins/merged_s2s/merged_s2s.py @@ -64,7 +64,8 @@ def setup(self): self.to_pe = self.gain_model def infer_dtype(self): - return strax.unpack_dtype(self.deps['peaklets'].dtype_for('peaklets')) + _dtype_for = self.depends_on[0] + return strax.unpack_dtype(self.deps[_dtype_for].dtype_for(_dtype_for)) def get_window_size(self): return 5 * (int(self.s2_merge_gap_thresholds[0][1]) diff --git a/straxen/plugins/merged_s2s_he/merged_s2s_he.py b/straxen/plugins/merged_s2s_he/merged_s2s_he.py index 99d4b8966..3bc12393a 100644 --- a/straxen/plugins/merged_s2s_he/merged_s2s_he.py +++ b/straxen/plugins/merged_s2s_he/merged_s2s_he.py @@ -25,7 +25,9 @@ def n_tpc_pmts(self): return self.n_he_pmts def infer_dtype(self): - return strax.unpack_dtype(self.deps['peaklets_he'].dtype_for('peaklets_he')) + _dtype_for = self.depends_on[0] # raw_records + + return strax.unpack_dtype(self.deps[_dtype_for].dtype_for(_dtype_for)) def compute(self, peaklets_he): # There are not any lone hits for the high energy channel, diff --git a/straxen/plugins/peaklets/peaklets.py b/straxen/plugins/peaklets/peaklets.py index 639b0e8d6..dd3ff9f33 100644 --- a/straxen/plugins/peaklets/peaklets.py +++ b/straxen/plugins/peaklets/peaklets.py @@ -311,7 +311,7 @@ def compute(self, records, start, end): # Drop the data_top field if n_top_pmts_if_digitize_top <= 0: - peaklets = drop_data_top_field(peaklets, self.dtype_for('peaklets')) + peaklets = drop_data_top_field(peaklets, self.dtype_for(self.provides[0])) return dict(peaklets=peaklets, lone_hits=lone_hits) diff --git a/straxen/plugins/peaks/peaks.py b/straxen/plugins/peaks/peaks.py index e544a701b..a2a084ae7 100644 --- a/straxen/plugins/peaks/peaks.py +++ b/straxen/plugins/peaks/peaks.py @@ -32,7 +32,8 @@ class Peaks(strax.Plugin): "It's now possible for a S1 to be inside a S2 post merging") def infer_dtype(self): - return self.deps['peaklets'].dtype_for('peaklets') + _dtype = self.depends_on[0] # peaklets + return self.deps[_dtype].dtype_for(_dtype) def compute(self, peaklets, merged_s2s): # Remove fake merged S2s from dirty hack, see above diff --git a/straxen/plugins/peaks_he/peaks_he.py b/straxen/plugins/peaks_he/peaks_he.py index 67bf25d44..af9f918f5 100644 --- a/straxen/plugins/peaks_he/peaks_he.py +++ b/straxen/plugins/peaks_he/peaks_he.py @@ -21,7 +21,8 @@ class PeaksHighEnergy(Peaks): child_ends_with = '_he' def infer_dtype(self): - return self.deps['peaklets_he'].dtype_for('peaklets') + _dtype_for = self.depends_on[0] + return self.deps[_dtype_for].dtype_for(_dtype_for) def compute(self, peaklets_he, merged_s2s_he): return super().compute(peaklets_he, merged_s2s_he) diff --git a/straxen/plugins/raw_records/daqreader.py b/straxen/plugins/raw_records/daqreader.py index 4a6deff94..5b664401c 100644 --- a/straxen/plugins/raw_records/daqreader.py +++ b/straxen/plugins/raw_records/daqreader.py @@ -190,11 +190,14 @@ def is_ready(self, chunk_i): return False def _load_chunk(self, path, start, end, kind='central'): + + _dtype_for = self.depends_on[0] # raw_records + records = [ strax.load_file( fn, compressor=self.config["daq_compressor"], - dtype=self.dtype_for('raw_records')) + dtype=self.dtype_for(_dtype_for)) for fn in sorted(glob.glob(f'{path}/*'))] records = np.concatenate(records) records = strax.sort_by_time(records) @@ -271,12 +274,16 @@ def _load_chunk(self, path, start, end, kind='central'): return result, break_time def _artificial_dead_time(self, start, end, dt): + + _dtype_for = self.depends_on[0] # raw_records + + return strax.dict_to_rec( dict(time=[start], length=[(end - start) // dt], dt=[dt], channel=[ARTIFICIAL_DEADTIME_CHANNEL]), - self.dtype_for('raw_records')) + self.dtype_for(_dtype_for)) def compute(self, chunk_i): dt_central = self.config['daq_chunk_duration'] diff --git a/straxen/plugins/raw_records_coin_nv/nveto_recorder.py b/straxen/plugins/raw_records_coin_nv/nveto_recorder.py index 3bc54e075..45c098a91 100644 --- a/straxen/plugins/raw_records_coin_nv/nveto_recorder.py +++ b/straxen/plugins/raw_records_coin_nv/nveto_recorder.py @@ -90,7 +90,7 @@ def setup(self): def infer_dtype(self): self.record_length = strax.record_length_from_dtype( - self.deps['raw_records_nv'].dtype_for('raw_records_nv')) + self.deps[self.depends_on].dtype_for(self.depends_on)) channel_range = self.channel_map['nveto'] n_channel = (channel_range[1] - channel_range[0]) + 1 diff --git a/straxen/plugins/records/records.py b/straxen/plugins/records/records.py index b1359421d..91ff81e07 100644 --- a/straxen/plugins/records/records.py +++ b/straxen/plugins/records/records.py @@ -121,7 +121,7 @@ class PulseProcessing(strax.Plugin): def infer_dtype(self): # Get record_length from the plugin making raw_records self.record_length = strax.record_length_from_dtype( - self.deps['raw_records'].dtype_for('raw_records')) + self.deps[self.depends_on].dtype_for(self.depends_on)) dtype = dict() for p in self.provides: diff --git a/straxen/plugins/records_nv/records_nv.py b/straxen/plugins/records_nv/records_nv.py index 8676acba0..d88c72771 100644 --- a/straxen/plugins/records_nv/records_nv.py +++ b/straxen/plugins/records_nv/records_nv.py @@ -53,7 +53,7 @@ def setup(self): def infer_dtype(self): record_length = strax.record_length_from_dtype( - self.deps['raw_records_coin_nv'].dtype_for('raw_records_coin_nv')) + self.deps[self.depends_on].dtype_for(self.depends_on)) dtype = strax.record_dtype(record_length) return dtype From 0cdf70df9eddc46d7a0da57fc7403c8a0f83896f Mon Sep 17 00:00:00 2001 From: cfuselli Date: Sun, 5 Mar 2023 18:36:38 +0000 Subject: [PATCH 18/56] fix indeces --- straxen/plugins/raw_records_coin_nv/nveto_recorder.py | 2 +- straxen/plugins/records/records.py | 2 +- straxen/plugins/records_nv/records_nv.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/straxen/plugins/raw_records_coin_nv/nveto_recorder.py b/straxen/plugins/raw_records_coin_nv/nveto_recorder.py index 45c098a91..27a3c3dbd 100644 --- a/straxen/plugins/raw_records_coin_nv/nveto_recorder.py +++ b/straxen/plugins/raw_records_coin_nv/nveto_recorder.py @@ -90,7 +90,7 @@ def setup(self): def infer_dtype(self): self.record_length = strax.record_length_from_dtype( - self.deps[self.depends_on].dtype_for(self.depends_on)) + self.deps[self.depends_on[0]].dtype_for(self.depends_on[0])) channel_range = self.channel_map['nveto'] n_channel = (channel_range[1] - channel_range[0]) + 1 diff --git a/straxen/plugins/records/records.py b/straxen/plugins/records/records.py index 91ff81e07..815f6c4db 100644 --- a/straxen/plugins/records/records.py +++ b/straxen/plugins/records/records.py @@ -121,7 +121,7 @@ class PulseProcessing(strax.Plugin): def infer_dtype(self): # Get record_length from the plugin making raw_records self.record_length = strax.record_length_from_dtype( - self.deps[self.depends_on].dtype_for(self.depends_on)) + self.deps[self.depends_on[0]].dtype_for(self.depends_on[0])) dtype = dict() for p in self.provides: diff --git a/straxen/plugins/records_nv/records_nv.py b/straxen/plugins/records_nv/records_nv.py index d88c72771..629a69f9c 100644 --- a/straxen/plugins/records_nv/records_nv.py +++ b/straxen/plugins/records_nv/records_nv.py @@ -53,7 +53,7 @@ def setup(self): def infer_dtype(self): record_length = strax.record_length_from_dtype( - self.deps[self.depends_on].dtype_for(self.depends_on)) + self.deps[self.depends_on[0]].dtype_for(self.depends_on[0])) dtype = strax.record_dtype(record_length) return dtype From ce9af246c8ec9b37989ca97039273ad48c9728ae Mon Sep 17 00:00:00 2001 From: cfuselli Date: Sun, 5 Mar 2023 22:34:01 +0000 Subject: [PATCH 19/56] start working on bootstrax --- bin/bootstrax | 72 +++++++++++++++++++++++++++++++++++++++++++++------ 1 file changed, 64 insertions(+), 8 deletions(-) diff --git a/bin/bootstrax b/bin/bootstrax index 0e9930845..c185439b7 100755 --- a/bin/bootstrax +++ b/bin/bootstrax @@ -299,10 +299,6 @@ def new_context(cores=args.cores, context.storage[0].readonly = True context.storage[0].local_only = True - if software_veto_is_on(): - - register_software_veto_plugins(context) - return context @@ -1201,6 +1197,17 @@ def run_strax(run_id, input_dir, targets, readout_threads, compressor, timeout=timeout, ) + if software_veto_is_on(): + + software_veto_register_raw_records(st) + software_veto_register_fly(st, run_id) + for t in targets: + post_processing.append(t+'_sv') + _post_processing = post_processing.copy() + for t in _post_processing: + post_processing.append(t+'_sv') + + for t in ('raw_records', 'records', 'records_nv', 'hitlets_nv'): # Set the (raw)records processor to the inferred one st._plugin_class_registry[t].compressor = records_compressor @@ -1664,7 +1671,7 @@ def software_veto_is_on(): # TODO return True -def register_software_veto_plugins(context): +def software_veto_register_raw_records(st): """Based on the selection that we want to apply, register the correct veto plugin. Probably we want to pass it as argument and if possible overwrite it from rundoc. @@ -1672,11 +1679,60 @@ def register_software_veto_plugins(context): """ # TODO - plugin_name = 'RadialVeto' + veto_name = 'RadialVeto' import straxen.plugins.raw_records_sv.software_veto as software_veto - plugin = getattr(straxen.plugins.raw_records_sv.software_veto, plugin_name) - context.register(plugin) + veto_plugin = getattr(straxen.plugins.raw_records_sv.software_veto, veto_name) + st.register(veto_plugin) + +def software_veto_register_fly(st, run_id): + """We make a copy of every original plugin + for the software veto adding a _sv to the provides. + Computation is tricky because requires initialisation + of plugin to copy. Also requires no hardcoded data_types + in plugins (see https://github.com/cfuselli/straxen/pull/3) + """ + + registry = st._plugin_class_registry.copy().items() + + _plugins = [] + for name, pl in registry: + if pl == straxen.DAQReader: + pass + elif pl == straxen.Fake1TDAQReader: + pass + elif pl == software_veto.RadialVeto: + pass + elif name.endswith('_sv'): # ext_timings_nv_sv_sv ?? + pass + elif pl in _plugins: + pass + else: + + # I need initialisation to save myself + _pl = st.get_single_plugin(run_id, name) + + provides = [prov.replace('_sv','')+'_sv' for prov in strax.to_str_tuple(pl.provides)] + depends_on = [deps.replace('_sv','')+'_sv' for deps in strax.to_str_tuple(pl.depends_on)] + + class TempPlugin(pl): + provides = provides + depends_on = depends_on + data_kind = _pl.data_kind + + if _pl.multi_output: + dtype = {prov+'_sv': _pl.dtype_for(prov) + for prov in pl.provides} + else: + dtype = _pl.dtype_for(name) + + def infer_dtype(self): + return self.dtype + + log.info(f"Registering software veto copy of {pl}") + st.register(TempPlugin) + + _plugins.append(pl) From a81a92b78e6235d624325cde6d752887092d78dd Mon Sep 17 00:00:00 2001 From: cfuselli Date: Mon, 6 Mar 2023 00:46:26 +0000 Subject: [PATCH 20/56] lot of fixing stuff --- bin/bootstrax | 95 +++++++++++++------ straxen/plugins/raw_records/daqreader.py | 2 +- .../plugins/veto_intervals/veto_intervals.py | 7 +- 3 files changed, 70 insertions(+), 34 deletions(-) diff --git a/bin/bootstrax b/bin/bootstrax index c185439b7..3d26e4cef 100755 --- a/bin/bootstrax +++ b/bin/bootstrax @@ -15,7 +15,10 @@ How to use For more info, see the documentation: https://straxen.readthedocs.io/en/latest/bootstrax.html """ -__version__ = '2.0.0' +__version__ = '3.0.0' + +import sys +sys.path.insert(0, '/daq_common/carlo/test_software/straxen_software_veto') import argparse import typing @@ -39,6 +42,7 @@ import straxen import threading import pandas as pd import typing as ty +from immutabledict import immutabledict import daqnt import fnmatch from glob import glob @@ -87,9 +91,8 @@ parser.add_argument( '--max_messages', type=int, default=10, help="number of max mailbox messages") parser.add_argument( - '--software_veto', type=int, default=None, - help="Class name of veto plugin to apply (i.e.: RadialVeto)") - + '--software_veto', default=False, + help="Class name of veto plugin to apply (i.e.: RadialVeto). It's overwritten by rundoc value.") actions = parser.add_mutually_exclusive_group() actions.add_argument( @@ -1197,21 +1200,26 @@ def run_strax(run_id, input_dir, targets, readout_threads, compressor, timeout=timeout, ) - if software_veto_is_on(): - - software_veto_register_raw_records(st) - software_veto_register_fly(st, run_id) - for t in targets: - post_processing.append(t+'_sv') - _post_processing = post_processing.copy() - for t in _post_processing: - post_processing.append(t+'_sv') - - for t in ('raw_records', 'records', 'records_nv', 'hitlets_nv'): # Set the (raw)records processor to the inferred one st._plugin_class_registry[t].compressor = records_compressor + if software_veto_is_on(args): + + software_veto_register_raw_records(st, args) + software_veto_register_fly(st, run_id) + + _post_processing = list(post_processing) + for t in targets: + _post_processing.append(t+'_sv') + for t in post_processing: + _post_processing.append(t+'_sv') + post_processing = tuple(_post_processing) + + for t in ('raw_records', 'records', 'records_nv', 'hitlets_nv'): + # Set the (raw)records processor to the inferred one + st._plugin_class_registry[t+'_sv'].compressor = records_compressor + # Make a function for running strax, call the function to process the run # This way, it can also be run inside a wrapper to profile strax def st_make(): @@ -1665,13 +1673,31 @@ def cleanup_db(): -def software_veto_is_on(): +def software_veto_is_on(args, rundoc=None): """Decide if software veto is on based on arguments or rundoc""" - # TODO - return True + # TODO add rundoc option + if (args.software_veto == None) | (args.software_veto is False): + return False + else: + return True + +def software_veto_get_name(args, rundoc=None): + """Get the needed raw_records plugin""" + + plugin_name = args.software_veto + + # TODO overwrite from rundoc + # TODO add option if you want to overwrite from argument + + + return plugin_name + + + -def software_veto_register_raw_records(st): + +def software_veto_register_raw_records(st, args, rundoc=None): """Based on the selection that we want to apply, register the correct veto plugin. Probably we want to pass it as argument and if possible overwrite it from rundoc. @@ -1679,7 +1705,7 @@ def software_veto_register_raw_records(st): """ # TODO - veto_name = 'RadialVeto' + veto_name = software_veto_get_name(args, rundoc=None) import straxen.plugins.raw_records_sv.software_veto as software_veto veto_plugin = getattr(straxen.plugins.raw_records_sv.software_veto, veto_name) @@ -1701,7 +1727,7 @@ def software_veto_register_fly(st, run_id): pass elif pl == straxen.Fake1TDAQReader: pass - elif pl == software_veto.RadialVeto: + elif pl == straxen.plugins.raw_records_sv.software_veto.RadialVeto: pass elif name.endswith('_sv'): # ext_timings_nv_sv_sv ?? pass @@ -1712,17 +1738,24 @@ def software_veto_register_fly(st, run_id): # I need initialisation to save myself _pl = st.get_single_plugin(run_id, name) - provides = [prov.replace('_sv','')+'_sv' for prov in strax.to_str_tuple(pl.provides)] - depends_on = [deps.replace('_sv','')+'_sv' for deps in strax.to_str_tuple(pl.depends_on)] - class TempPlugin(pl): - provides = provides - depends_on = depends_on - data_kind = _pl.data_kind + provides = [prov.replace('_sv','')+'_sv' for prov in strax.to_str_tuple(pl.provides)] + depends_on = [deps.replace('_sv','')+'_sv' for deps in strax.to_str_tuple(pl.depends_on)] + + if isinstance(pl.save_when, dict): + data_kind = _pl.data_kind + data_kind = {t.replace('_sv','')+'_sv':_pl.save_when[t] for t in _pl.data_kind} + else: + data_kind = _pl.data_kind+'_sv' + + if isinstance(pl.save_when, immutabledict): + save_when = immutabledict({t.replace('_sv','')+'_sv':_pl.save_when[t] for t in pl.save_when}) + + if isinstance(pl.rechunk_on_save, immutabledict): + rechunk_on_save = immutabledict({t.replace('_sv','')+'_sv':_pl.save_when[t] for t in pl.rechunk_on_save}) if _pl.multi_output: - dtype = {prov+'_sv': _pl.dtype_for(prov) - for prov in pl.provides} + dtype = {prov+'_sv': _pl.dtype_for(prov) for prov in pl.provides} else: dtype = _pl.dtype_for(name) @@ -1735,7 +1768,9 @@ def software_veto_register_fly(st, run_id): _plugins.append(pl) - +########################### +# __MAIN__ # +########################### if __name__ == '__main__': if not args.undying: diff --git a/straxen/plugins/raw_records/daqreader.py b/straxen/plugins/raw_records/daqreader.py index 5b664401c..db6648cff 100644 --- a/straxen/plugins/raw_records/daqreader.py +++ b/straxen/plugins/raw_records/daqreader.py @@ -191,7 +191,7 @@ def is_ready(self, chunk_i): def _load_chunk(self, path, start, end, kind='central'): - _dtype_for = self.depends_on[0] # raw_records + _dtype_for = self.provides[0] # raw_records records = [ strax.load_file( diff --git a/straxen/plugins/veto_intervals/veto_intervals.py b/straxen/plugins/veto_intervals/veto_intervals.py index 0f01e3099..28494a35c 100644 --- a/straxen/plugins/veto_intervals/veto_intervals.py +++ b/straxen/plugins/veto_intervals/veto_intervals.py @@ -109,9 +109,10 @@ def compute(self, aqmon_hits, start, end): n_software_veto = len(software_veto) if n_software_veto: - result[vetos_seen:n_software_veto]['time'] = software_veto['time'] - result[vetos_seen:n_software_veto]['endtime'] = strax.endtime(software_veto) - result[vetos_seen:n_software_veto]['veto_type'] = 'software_veto' + print(n_software_veto, vetos_seen, software_veto['time']) + result[vetos_seen:vetos_seen+n_software_veto]['time'] = software_veto['time'] + result[vetos_seen:vetos_seen+n_software_veto]['endtime'] = strax.endtime(software_veto) + result[vetos_seen:vetos_seen+n_software_veto]['veto_type'] = 'software_veto' vetos_seen += n_software_veto result = result[:vetos_seen] From b6171311a14731db19cc5f02e121a81fa0d87968 Mon Sep 17 00:00:00 2001 From: cfuselli Date: Mon, 6 Mar 2023 00:49:15 +0000 Subject: [PATCH 21/56] fix temp fly plugin --- bin/bootstrax | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/bin/bootstrax b/bin/bootstrax index 3d26e4cef..6d707b9f9 100755 --- a/bin/bootstrax +++ b/bin/bootstrax @@ -1736,28 +1736,27 @@ def software_veto_register_fly(st, run_id): else: # I need initialisation to save myself - _pl = st.get_single_plugin(run_id, name) + init_pl = st.get_single_plugin(run_id, name) class TempPlugin(pl): provides = [prov.replace('_sv','')+'_sv' for prov in strax.to_str_tuple(pl.provides)] depends_on = [deps.replace('_sv','')+'_sv' for deps in strax.to_str_tuple(pl.depends_on)] - if isinstance(pl.save_when, dict): - data_kind = _pl.data_kind - data_kind = {t.replace('_sv','')+'_sv':_pl.save_when[t] for t in _pl.data_kind} + if isinstance(init_pl.data_kind, dict): + data_kind = {t.replace('_sv','')+'_sv':init_pl.save_when[t] for t in init_pl.data_kind} else: - data_kind = _pl.data_kind+'_sv' + data_kind = init_pl.data_kind+'_sv' if isinstance(pl.save_when, immutabledict): - save_when = immutabledict({t.replace('_sv','')+'_sv':_pl.save_when[t] for t in pl.save_when}) + save_when = immutabledict({t.replace('_sv','')+'_sv':init_pl.save_when[t] for t in pl.save_when}) if isinstance(pl.rechunk_on_save, immutabledict): - rechunk_on_save = immutabledict({t.replace('_sv','')+'_sv':_pl.save_when[t] for t in pl.rechunk_on_save}) + rechunk_on_save = immutabledict({t.replace('_sv','')+'_sv':init_pl.save_when[t] for t in pl.rechunk_on_save}) - if _pl.multi_output: - dtype = {prov+'_sv': _pl.dtype_for(prov) for prov in pl.provides} + if init_pl.multi_output: + dtype = {prov+'_sv': init_pl.dtype_for(prov) for prov in pl.provides} else: - dtype = _pl.dtype_for(name) + dtype = init_pl.dtype_for(name) def infer_dtype(self): return self.dtype From 0fb2d174287e0b1ae4881ee85026e63f68997c59 Mon Sep 17 00:00:00 2001 From: cfuselli Date: Mon, 6 Mar 2023 00:51:46 +0000 Subject: [PATCH 22/56] forgot remove print --- straxen/plugins/veto_intervals/veto_intervals.py | 1 - 1 file changed, 1 deletion(-) diff --git a/straxen/plugins/veto_intervals/veto_intervals.py b/straxen/plugins/veto_intervals/veto_intervals.py index 28494a35c..b812ef1da 100644 --- a/straxen/plugins/veto_intervals/veto_intervals.py +++ b/straxen/plugins/veto_intervals/veto_intervals.py @@ -109,7 +109,6 @@ def compute(self, aqmon_hits, start, end): n_software_veto = len(software_veto) if n_software_veto: - print(n_software_veto, vetos_seen, software_veto['time']) result[vetos_seen:vetos_seen+n_software_veto]['time'] = software_veto['time'] result[vetos_seen:vetos_seen+n_software_veto]['endtime'] = strax.endtime(software_veto) result[vetos_seen:vetos_seen+n_software_veto]['veto_type'] = 'software_veto' From 5170710093705c2676616d721eb6b4d0c8f360a9 Mon Sep 17 00:00:00 2001 From: cfuselli Date: Mon, 6 Mar 2023 10:40:04 +0000 Subject: [PATCH 23/56] workaround for picklable classes, works --- bin/bootstrax | 61 ++++++++++++++++++++++++++++----------------------- 1 file changed, 34 insertions(+), 27 deletions(-) diff --git a/bin/bootstrax b/bin/bootstrax index 6d707b9f9..da292095e 100755 --- a/bin/bootstrax +++ b/bin/bootstrax @@ -17,9 +17,6 @@ https://straxen.readthedocs.io/en/latest/bootstrax.html """ __version__ = '3.0.0' -import sys -sys.path.insert(0, '/daq_common/carlo/test_software/straxen_software_veto') - import argparse import typing from datetime import datetime, timedelta, timezone @@ -276,6 +273,9 @@ if os.access(output_folder, os.W_OK) is not True: raise IOError(message) + + + def new_context(cores=args.cores, max_messages=args.max_messages, timeout=500, @@ -1216,10 +1216,6 @@ def run_strax(run_id, input_dir, targets, readout_threads, compressor, _post_processing.append(t+'_sv') post_processing = tuple(_post_processing) - for t in ('raw_records', 'records', 'records_nv', 'hitlets_nv'): - # Set the (raw)records processor to the inferred one - st._plugin_class_registry[t+'_sv'].compressor = records_compressor - # Make a function for running strax, call the function to process the run # This way, it can also be run inside a wrapper to profile strax def st_make(): @@ -1240,6 +1236,9 @@ def run_strax(run_id, input_dir, targets, readout_threads, compressor, config=strax_config, max_workers=cores) + for temp_plugin in software_veto_plugin_classes: + st.register(temp_plugin) + if len(post_processing): for post_target in post_processing: if post_target not in st._plugin_class_registry: @@ -1395,6 +1394,8 @@ def process_run(rd, send_heartbeats=args.production): run_strax_config.update(infer_target(rd)) run_strax_config.update(infer_mode(rd)) run_strax_config['debug'] = args.debug + + log.info(f"We'll try to process the run with pathos + dill") strax_proc = multiprocessing.Process( target=run_strax, kwargs=run_strax_config) @@ -1711,6 +1712,10 @@ def software_veto_register_raw_records(st, args, rundoc=None): veto_plugin = getattr(straxen.plugins.raw_records_sv.software_veto, veto_name) st.register(veto_plugin) + + +software_veto_plugin_classes = [] + def software_veto_register_fly(st, run_id): """We make a copy of every original plugin for the software veto adding a _sv to the provides. @@ -1735,34 +1740,36 @@ def software_veto_register_fly(st, run_id): pass else: + new_plugin_class = type(name+'_sv', (pl,), {}) + # I need initialisation to save myself init_pl = st.get_single_plugin(run_id, name) - class TempPlugin(pl): - provides = [prov.replace('_sv','')+'_sv' for prov in strax.to_str_tuple(pl.provides)] - depends_on = [deps.replace('_sv','')+'_sv' for deps in strax.to_str_tuple(pl.depends_on)] + new_plugin_class.provides = [prov.replace('_sv','')+'_sv' for prov in strax.to_str_tuple(pl.provides)] + new_plugin_classdepends_on = [deps.replace('_sv','')+'_sv' for deps in strax.to_str_tuple(pl.depends_on)] - if isinstance(init_pl.data_kind, dict): - data_kind = {t.replace('_sv','')+'_sv':init_pl.save_when[t] for t in init_pl.data_kind} - else: - data_kind = init_pl.data_kind+'_sv' + if isinstance(init_pl.data_kind, dict): + new_plugin_class.data_kind = {t.replace('_sv','')+'_sv':init_pl.save_when[t] for t in init_pl.data_kind} + else: + new_plugin_class.data_kind = init_pl.data_kind+'_sv' - if isinstance(pl.save_when, immutabledict): - save_when = immutabledict({t.replace('_sv','')+'_sv':init_pl.save_when[t] for t in pl.save_when}) + if isinstance(pl.save_when, immutabledict): + new_plugin_class.save_when = immutabledict({t.replace('_sv','')+'_sv':init_pl.save_when[t] for t in pl.save_when}) - if isinstance(pl.rechunk_on_save, immutabledict): - rechunk_on_save = immutabledict({t.replace('_sv','')+'_sv':init_pl.save_when[t] for t in pl.rechunk_on_save}) + if isinstance(pl.rechunk_on_save, immutabledict): + new_plugin_class.rechunk_on_save = immutabledict({t.replace('_sv','')+'_sv':init_pl.save_when[t] for t in pl.rechunk_on_save}) - if init_pl.multi_output: - dtype = {prov+'_sv': init_pl.dtype_for(prov) for prov in pl.provides} - else: - dtype = init_pl.dtype_for(name) - - def infer_dtype(self): - return self.dtype + if init_pl.multi_output: + new_plugin_class.dtype = {prov+'_sv': init_pl.dtype_for(prov) for prov in pl.provides} + else: + new_plugin_class.dtype = init_pl.dtype_for(name) - log.info(f"Registering software veto copy of {pl}") - st.register(TempPlugin) + def new_infer_dtype(self): + return self.dtype + + new_plugin_class.infer_dtype = new_infer_dtype + + software_veto_plugin_classes.append(new_plugin_class) _plugins.append(pl) From 47027484f557559f097646124e5c5ab14ef2f651 Mon Sep 17 00:00:00 2001 From: cfuselli Date: Mon, 6 Mar 2023 16:01:58 +0000 Subject: [PATCH 24/56] gave up, classes in a file --- bin/bootstrax | 109 +-- .../plugins/raw_records_sv/_build_copies.py | 129 ++++ .../raw_records_sv/_software_veto_copies.py | 626 ++++++++++++++++++ 3 files changed, 814 insertions(+), 50 deletions(-) create mode 100644 straxen/plugins/raw_records_sv/_build_copies.py create mode 100644 straxen/plugins/raw_records_sv/_software_veto_copies.py diff --git a/bin/bootstrax b/bin/bootstrax index da292095e..27c8a9267 100755 --- a/bin/bootstrax +++ b/bin/bootstrax @@ -16,12 +16,15 @@ For more info, see the documentation: https://straxen.readthedocs.io/en/latest/bootstrax.html """ __version__ = '3.0.0' +import sys +sys.path.insert(0, '/daq_common/carlo/test_software/straxen_software_veto') import argparse import typing from datetime import datetime, timedelta, timezone import logging import multiprocessing +import multiprocess import npshmex import os import os.path as osp @@ -1207,7 +1210,7 @@ def run_strax(run_id, input_dir, targets, readout_threads, compressor, if software_veto_is_on(args): software_veto_register_raw_records(st, args) - software_veto_register_fly(st, run_id) + # software_veto_register_fly(st, run_id) _post_processing = list(post_processing) for t in targets: @@ -1236,8 +1239,9 @@ def run_strax(run_id, input_dir, targets, readout_threads, compressor, config=strax_config, max_workers=cores) - for temp_plugin in software_veto_plugin_classes: - st.register(temp_plugin) + if software_veto_is_on(args): + import straxen.plugins.raw_records_sv._software_veto_copies as _software_veto_copies + st.register_all(_software_veto_copies) if len(post_processing): for post_target in post_processing: @@ -1396,7 +1400,7 @@ def process_run(rd, send_heartbeats=args.production): run_strax_config['debug'] = args.debug log.info(f"We'll try to process the run with pathos + dill") - strax_proc = multiprocessing.Process( + strax_proc = multiprocess.Process( target=run_strax, kwargs=run_strax_config) @@ -1714,64 +1718,69 @@ def software_veto_register_raw_records(st, args, rundoc=None): -software_veto_plugin_classes = [] +# software_veto_plugin_classes = [] -def software_veto_register_fly(st, run_id): - """We make a copy of every original plugin - for the software veto adding a _sv to the provides. - Computation is tricky because requires initialisation - of plugin to copy. Also requires no hardcoded data_types - in plugins (see https://github.com/cfuselli/straxen/pull/3) - """ +# def software_veto_register_fly(st, run_id): +# """We make a copy of every original plugin +# for the software veto adding a _sv to the provides. +# Computation is tricky because requires initialisation +# of plugin to copy. Also requires no hardcoded data_types +# in plugins (see https://github.com/cfuselli/straxen/pull/3) +# """ + +# registry = st._plugin_class_registry.copy().items() + +# _plugins = [] +# for name, pl in registry: +# if pl == straxen.DAQReader: +# pass +# elif pl == straxen.Fake1TDAQReader: +# pass +# elif pl == straxen.plugins.raw_records_sv.software_veto.RadialVeto: +# pass +# elif name.endswith('_sv'): # ext_timings_nv_sv_sv ?? +# pass +# elif pl in _plugins: +# pass +# else: + +# # I need initialisation to save myself +# init_pl = st.get_single_plugin(run_id, name) + +# new_plugin_class = type(name+'_sv', (pl,), {}) + +# new_plugin_class.__module__ = None - registry = st._plugin_class_registry.copy().items() +# new_plugin_class.provides = [prov.replace('_sv','')+'_sv' for prov in strax.to_str_tuple(pl.provides)] +# new_plugin_class.depends_on = [deps.replace('_sv','')+'_sv' for deps in strax.to_str_tuple(pl.depends_on)] - _plugins = [] - for name, pl in registry: - if pl == straxen.DAQReader: - pass - elif pl == straxen.Fake1TDAQReader: - pass - elif pl == straxen.plugins.raw_records_sv.software_veto.RadialVeto: - pass - elif name.endswith('_sv'): # ext_timings_nv_sv_sv ?? - pass - elif pl in _plugins: - pass - else: - - new_plugin_class = type(name+'_sv', (pl,), {}) +# if isinstance(init_pl.data_kind, dict): +# new_plugin_class.data_kind = {t.replace('_sv','')+'_sv':init_pl.save_when[t] for t in init_pl.data_kind} +# else: +# new_plugin_class.data_kind = init_pl.data_kind+'_sv' - # I need initialisation to save myself - init_pl = st.get_single_plugin(run_id, name) +# if isinstance(pl.save_when, immutabledict): +# new_plugin_class.save_when = immutabledict({t.replace('_sv','')+'_sv':init_pl.save_when[t] for t in pl.save_when}) - new_plugin_class.provides = [prov.replace('_sv','')+'_sv' for prov in strax.to_str_tuple(pl.provides)] - new_plugin_classdepends_on = [deps.replace('_sv','')+'_sv' for deps in strax.to_str_tuple(pl.depends_on)] +# if isinstance(pl.rechunk_on_save, immutabledict): +# new_plugin_class.rechunk_on_save = immutabledict({t.replace('_sv','')+'_sv':init_pl.save_when[t] for t in pl.rechunk_on_save}) - if isinstance(init_pl.data_kind, dict): - new_plugin_class.data_kind = {t.replace('_sv','')+'_sv':init_pl.save_when[t] for t in init_pl.data_kind} - else: - new_plugin_class.data_kind = init_pl.data_kind+'_sv' +# if init_pl.multi_output: +# new_plugin_class.dtype = {prov+'_sv': init_pl.dtype_for(prov) for prov in pl.provides} +# else: +# new_plugin_class.dtype = init_pl.dtype_for(name) + +# def new_infer_dtype(self): +# return self.dtype - if isinstance(pl.save_when, immutabledict): - new_plugin_class.save_when = immutabledict({t.replace('_sv','')+'_sv':init_pl.save_when[t] for t in pl.save_when}) +# new_plugin_class.infer_dtype = new_infer_dtype - if isinstance(pl.rechunk_on_save, immutabledict): - new_plugin_class.rechunk_on_save = immutabledict({t.replace('_sv','')+'_sv':init_pl.save_when[t] for t in pl.rechunk_on_save}) - if init_pl.multi_output: - new_plugin_class.dtype = {prov+'_sv': init_pl.dtype_for(prov) for prov in pl.provides} - else: - new_plugin_class.dtype = init_pl.dtype_for(name) - - def new_infer_dtype(self): - return self.dtype +# software_veto_plugin_classes.append(new_plugin_class) - new_plugin_class.infer_dtype = new_infer_dtype +# _plugins.append(pl) - software_veto_plugin_classes.append(new_plugin_class) - _plugins.append(pl) ########################### diff --git a/straxen/plugins/raw_records_sv/_build_copies.py b/straxen/plugins/raw_records_sv/_build_copies.py new file mode 100644 index 000000000..033f99f33 --- /dev/null +++ b/straxen/plugins/raw_records_sv/_build_copies.py @@ -0,0 +1,129 @@ +import sys +sys.path.insert(0, '/daq_common/carlo/test_software/straxen_software_veto') + + +import strax +import straxen +from straxen.plugins.raw_records_sv import software_veto +from immutabledict import immutabledict +import sys, os + +_dir = os.path.dirname(os.path.abspath(__file__)) +print(_dir) + +st = straxen.contexts.xenonnt_online(output_folder='') +registry = st._plugin_class_registry.copy().items() + +save_when_replacers = {"'":'', + '<':'', + '>':'', + '{':'', + '}':'', + '0':'', + '1':'', + '2':'', + '3':'', + '4':'', + ': ':'', + 'Save':': strax.Save', + } + +rechunk_replacers = {"'":'', + '<':'', + '>':'', + '{':'', + '}':'' + } + + +tofile = """ +from immutabledict import immutabledict +import numba +import numpy as np + +import strax +import straxen + +""" + + +_plugins = [] + +for name, pl in registry: + if pl == straxen.DAQReader: + pass + elif pl == straxen.Fake1TDAQReader: + pass + elif pl.__base__ == straxen.plugins.raw_records_sv._software_veto_base.RawRecordsSoftwareVetoBase: + pass + elif name.endswith('_sv'): # ext_timings_nv_sv_sv ?? + pass + elif pl in _plugins: + pass + else: + + print(name) + + # I need initialisation to save myself + init_pl = st.get_single_plugin('000000', name) + + provides = [prov.replace('_sv','')+'_sv' for prov in strax.to_str_tuple(pl.provides)] + depends_on = [deps.replace('_sv','')+'_sv' for deps in strax.to_str_tuple(pl.depends_on)] + + if isinstance(init_pl.data_kind, dict): + data_kind = {t.replace('_sv','')+'_sv':init_pl.save_when[t] for t in init_pl.data_kind} + else: + data_kind = init_pl.data_kind+'_sv' + + if isinstance(pl.save_when, immutabledict): + save_when = str(immutabledict({t.replace('_sv','')+'_sv':init_pl.save_when[t] for t in pl.save_when})) + for k, v in save_when_replacers.items(): + save_when = save_when.replace(k, v) + + if isinstance(pl.rechunk_on_save, immutabledict): + rechunk_on_save = str(immutabledict({t.replace('_sv','')+'_sv':init_pl.rechunk_on_save[t] for t in pl.rechunk_on_save})) + for k, v in rechunk_replacers.items(): + rechunk_on_save = rechunk_on_save.replace(k, v) + + if init_pl.multi_output: + dtype = str({prov+'_sv': init_pl.dtype_for(prov) for prov in pl.provides}) + dtype = dtype.replace('dtype(', '').replace(')])', ')]') + else: + dtype = init_pl.dtype_for(name) + + def new_infer_dtype(self): + return self.dtype + + + + classtofile = f""" + +class {pl.__name__}SV(straxen.{pl.__name__}): +depends_on = {depends_on} +provides = {provides} +dtype = {dtype} + +def new_infer_dtype(self): + return self.dtype + +""" + tofile += classtofile + + if isinstance(pl.save_when, immutabledict): + tofile += f""" +save_when = {save_when} +""" + + if isinstance(pl.rechunk_on_save, immutabledict): + tofile += f""" +rechunk_on_save = {rechunk_on_save} +""" + + _plugins.append(pl) + + +with open(os.path.join(_dir, '_software_veto_copies.py'), "w") as text_file: + text_file.write(tofile) + + +print('Finished') \ No newline at end of file diff --git a/straxen/plugins/raw_records_sv/_software_veto_copies.py b/straxen/plugins/raw_records_sv/_software_veto_copies.py new file mode 100644 index 000000000..5f2d07c7d --- /dev/null +++ b/straxen/plugins/raw_records_sv/_software_veto_copies.py @@ -0,0 +1,626 @@ + +from immutabledict import immutabledict +import numba +import numpy as np + +import strax +import straxen + + + +class AqmonHitsSV(straxen.AqmonHits): +depends_on = ['raw_records_aqmon_sv'] +provides = ['aqmon_hits_sv'] +dtype = [(('Start time since unix epoch [ns]', 'time'), ' Date: Mon, 6 Mar 2023 16:07:55 +0000 Subject: [PATCH 25/56] fix copies --- .gitignore | 1 + .../plugins/raw_records_sv/_build_copies.py | 14 +- .../raw_records_sv/_software_veto_copies.py | 618 +++++++++--------- 3 files changed, 317 insertions(+), 316 deletions(-) diff --git a/.gitignore b/.gitignore index 8ff9bb7ec..9b3ccce32 100644 --- a/.gitignore +++ b/.gitignore @@ -29,6 +29,7 @@ custom_data test_input_data *.zip last_bootstrax_exception.txt +bootstrax_exceptions # cProfile output *.prof diff --git a/straxen/plugins/raw_records_sv/_build_copies.py b/straxen/plugins/raw_records_sv/_build_copies.py index 033f99f33..9944cf56f 100644 --- a/straxen/plugins/raw_records_sv/_build_copies.py +++ b/straxen/plugins/raw_records_sv/_build_copies.py @@ -99,24 +99,24 @@ def new_infer_dtype(self): classtofile = f""" class {pl.__name__}SV(straxen.{pl.__name__}): -depends_on = {depends_on} -provides = {provides} -dtype = {dtype} + depends_on = {depends_on} + provides = {provides} + dtype = {dtype} -def new_infer_dtype(self): - return self.dtype + def new_infer_dtype(self): + return self.dtype """ tofile += classtofile if isinstance(pl.save_when, immutabledict): tofile += f""" -save_when = {save_when} + save_when = {save_when} """ if isinstance(pl.rechunk_on_save, immutabledict): tofile += f""" -rechunk_on_save = {rechunk_on_save} + rechunk_on_save = {rechunk_on_save} """ _plugins.append(pl) diff --git a/straxen/plugins/raw_records_sv/_software_veto_copies.py b/straxen/plugins/raw_records_sv/_software_veto_copies.py index 5f2d07c7d..c82b10397 100644 --- a/straxen/plugins/raw_records_sv/_software_veto_copies.py +++ b/straxen/plugins/raw_records_sv/_software_veto_copies.py @@ -9,618 +9,618 @@ class AqmonHitsSV(straxen.AqmonHits): -depends_on = ['raw_records_aqmon_sv'] -provides = ['aqmon_hits_sv'] -dtype = [(('Start time since unix epoch [ns]', 'time'), ' Date: Mon, 6 Mar 2023 16:14:01 +0000 Subject: [PATCH 26/56] fix copies --- straxen/plugins/raw_records_sv/_build_copies.py | 5 +++-- straxen/plugins/raw_records_sv/_software_veto_copies.py | 8 ++++---- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/straxen/plugins/raw_records_sv/_build_copies.py b/straxen/plugins/raw_records_sv/_build_copies.py index 9944cf56f..e5a58d3bf 100644 --- a/straxen/plugins/raw_records_sv/_build_copies.py +++ b/straxen/plugins/raw_records_sv/_build_copies.py @@ -25,14 +25,15 @@ '3':'', '4':'', ': ':'', - 'Save':': strax.Save', + 'Save':'=strax.Save', } rechunk_replacers = {"'":'', '<':'', '>':'', '{':'', - '}':'' + '}':'', + ':':'=' } diff --git a/straxen/plugins/raw_records_sv/_software_veto_copies.py b/straxen/plugins/raw_records_sv/_software_veto_copies.py index c82b10397..5228a11c9 100644 --- a/straxen/plugins/raw_records_sv/_software_veto_copies.py +++ b/straxen/plugins/raw_records_sv/_software_veto_copies.py @@ -467,9 +467,9 @@ def new_infer_dtype(self): return self.dtype - save_when = immutabledict(records_sv: strax.SaveWhen.TARGET, veto_regions_sv: strax.SaveWhen.TARGET, pulse_counts_sv: strax.SaveWhen.ALWAYS) + save_when = immutabledict(records_sv=strax.SaveWhen.TARGET, veto_regions_sv=strax.SaveWhen.TARGET, pulse_counts_sv=strax.SaveWhen.ALWAYS) - rechunk_on_save = immutabledict(records_sv: False, veto_regions_sv: True, pulse_counts_sv: True) + rechunk_on_save = immutabledict(records_sv= False, veto_regions_sv= True, pulse_counts_sv= True) class PulseProcessingHighEnergySV(straxen.PulseProcessingHighEnergy): @@ -481,7 +481,7 @@ def new_infer_dtype(self): return self.dtype - rechunk_on_save = immutabledict(records_he_sv: False, pulse_counts_he_sv: True) + rechunk_on_save = immutabledict(records_he_sv= False, pulse_counts_he_sv= True) class S2ReconPosDiffSV(straxen.S2ReconPosDiff): @@ -613,7 +613,7 @@ def new_infer_dtype(self): return self.dtype - save_when = immutabledict(raw_records_coin_nv_sv: strax.SaveWhen.TARGET, lone_raw_records_nv_sv: strax.SaveWhen.TARGET, lone_raw_record_statistics_nv_sv: strax.SaveWhen.ALWAYS) + save_when = immutabledict(raw_records_coin_nv_sv=strax.SaveWhen.TARGET, lone_raw_records_nv_sv=strax.SaveWhen.TARGET, lone_raw_record_statistics_nv_sv=strax.SaveWhen.ALWAYS) class nVetoExtTimingsSV(straxen.nVetoExtTimings): From bc239aae02a604b4b3c6a0504222a0cd9903bf62 Mon Sep 17 00:00:00 2001 From: cfuselli Date: Mon, 6 Mar 2023 16:46:37 +0000 Subject: [PATCH 27/56] fix fix fix --- bin/bootstrax | 7 + .../plugins/raw_records_sv/_build_copies.py | 7 +- .../raw_records_sv/_software_veto_copies.py | 183 ++++++++++++------ 3 files changed, 133 insertions(+), 64 deletions(-) diff --git a/bin/bootstrax b/bin/bootstrax index 27c8a9267..eb9ea34f0 100755 --- a/bin/bootstrax +++ b/bin/bootstrax @@ -326,6 +326,7 @@ if not args.undying: def main(): + if args.cores == -1: # Use all of the available cores on this machine args.cores = multiprocessing.cpu_count() @@ -1788,6 +1789,12 @@ def software_veto_register_raw_records(st, args, rundoc=None): ########################### if __name__ == '__main__': + + # to avoid warnings pymongo fork + multiprocessing.set_start_method('spawn') + multiprocess.set_start_method('spawn') + + if not args.undying: main() else: diff --git a/straxen/plugins/raw_records_sv/_build_copies.py b/straxen/plugins/raw_records_sv/_build_copies.py index e5a58d3bf..c13b7f187 100644 --- a/straxen/plugins/raw_records_sv/_build_copies.py +++ b/straxen/plugins/raw_records_sv/_build_copies.py @@ -72,9 +72,9 @@ depends_on = [deps.replace('_sv','')+'_sv' for deps in strax.to_str_tuple(pl.depends_on)] if isinstance(init_pl.data_kind, dict): - data_kind = {t.replace('_sv','')+'_sv':init_pl.save_when[t] for t in init_pl.data_kind} + data_kind = {t.replace('_sv','')+'_sv':init_pl.data_kind[t] for t in init_pl.data_kind} else: - data_kind = init_pl.data_kind+'_sv' + data_kind = "'"+str(init_pl.data_kind+'_sv')+"'" if isinstance(pl.save_when, immutabledict): save_when = str(immutabledict({t.replace('_sv','')+'_sv':init_pl.save_when[t] for t in pl.save_when})) @@ -103,8 +103,9 @@ class {pl.__name__}SV(straxen.{pl.__name__}): depends_on = {depends_on} provides = {provides} dtype = {dtype} + data_kind = {data_kind} - def new_infer_dtype(self): + def infer_dtype(self): return self.dtype """ diff --git a/straxen/plugins/raw_records_sv/_software_veto_copies.py b/straxen/plugins/raw_records_sv/_software_veto_copies.py index 5228a11c9..5e216bd8e 100644 --- a/straxen/plugins/raw_records_sv/_software_veto_copies.py +++ b/straxen/plugins/raw_records_sv/_software_veto_copies.py @@ -12,8 +12,9 @@ class AqmonHitsSV(straxen.AqmonHits): depends_on = ['raw_records_aqmon_sv'] provides = ['aqmon_hits_sv'] dtype = [(('Start time since unix epoch [ns]', 'time'), ' Date: Tue, 7 Mar 2023 12:34:14 +0000 Subject: [PATCH 28/56] dirty but maybe works --- bin/bootstrax | 2 +- .../plugins/raw_records_sv/_build_copies.py | 61 +- .../raw_records_sv/_software_veto_copies.py | 679 ------------------ 3 files changed, 47 insertions(+), 695 deletions(-) diff --git a/bin/bootstrax b/bin/bootstrax index eb9ea34f0..e6e9833d5 100755 --- a/bin/bootstrax +++ b/bin/bootstrax @@ -1400,7 +1400,7 @@ def process_run(rd, send_heartbeats=args.production): run_strax_config.update(infer_mode(rd)) run_strax_config['debug'] = args.debug - log.info(f"We'll try to process the run with pathos + dill") + log.info(f"We'll try to process the run with multiprocess instead of multiprocessing (so we use dill)") strax_proc = multiprocess.Process( target=run_strax, kwargs=run_strax_config) diff --git a/straxen/plugins/raw_records_sv/_build_copies.py b/straxen/plugins/raw_records_sv/_build_copies.py index c13b7f187..1cae101b1 100644 --- a/straxen/plugins/raw_records_sv/_build_copies.py +++ b/straxen/plugins/raw_records_sv/_build_copies.py @@ -79,21 +79,31 @@ if isinstance(pl.save_when, immutabledict): save_when = str(immutabledict({t.replace('_sv','')+'_sv':init_pl.save_when[t] for t in pl.save_when})) for k, v in save_when_replacers.items(): - save_when = save_when.replace(k, v) + save_when = save_when.replace(k, v) + save_when = f""" + save_when = {save_when} + """ + else: + save_when = '' if isinstance(pl.rechunk_on_save, immutabledict): rechunk_on_save = str(immutabledict({t.replace('_sv','')+'_sv':init_pl.rechunk_on_save[t] for t in pl.rechunk_on_save})) for k, v in rechunk_replacers.items(): rechunk_on_save = rechunk_on_save.replace(k, v) - + rechunk_on_save = f""" + rechunk_on_save = {rechunk_on_save} + """ + else: + rechunk_on_save = '' + if init_pl.multi_output: dtype = str({prov+'_sv': init_pl.dtype_for(prov) for prov in pl.provides}) dtype = dtype.replace('dtype(', '').replace(')])', ')]') else: dtype = init_pl.dtype_for(name) - - def new_infer_dtype(self): - return self.dtype + + compute_takes_chunk_i = init_pl.compute_takes_chunk_i + compute_takes_start_end = init_pl.compute_takes_start_end @@ -104,21 +114,42 @@ class {pl.__name__}SV(straxen.{pl.__name__}): provides = {provides} dtype = {dtype} data_kind = {data_kind} + {save_when} + {rechunk_on_save} + + def __init__(self): + super().__init__() + self.compute_takes_chunk_i = {compute_takes_chunk_i} + self.compute_takes_start_end = {compute_takes_start_end} def infer_dtype(self): return self.dtype -""" - tofile += classtofile - - if isinstance(pl.save_when, immutabledict): - tofile += f""" - save_when = {save_when} -""" + @property + def dep_mapping(self): + return {{k: v for k, v in zip(strax.to_str_tuple(self.depends_on), + strax.to_str_tuple(super().depends_on))}} - if isinstance(pl.rechunk_on_save, immutabledict): - tofile += f""" - rechunk_on_save = {rechunk_on_save} + @property + def prov_mapping(self): + return {{v: k for k, v in zip(strax.to_str_tuple(self.provides), + strax.to_str_tuple(super().provides))}} + + def compute(self, **kwargs): + mapping = self.dep_mapping + p_mapping = self.prov_mapping + + _kwargs = {{}} + for k,v in kwargs.items(): + if k not in ['chunk_i', 'end', 'start']: + _kwargs[mapping[k]] = v + else: + _kwargs[k] = v + + result = super().compute(**_kwargs) + + return {{p_mapping[k]: v for k,v in result.items()}} + """ _plugins.append(pl) diff --git a/straxen/plugins/raw_records_sv/_software_veto_copies.py b/straxen/plugins/raw_records_sv/_software_veto_copies.py index 5e216bd8e..c5791b396 100644 --- a/straxen/plugins/raw_records_sv/_software_veto_copies.py +++ b/straxen/plugins/raw_records_sv/_software_veto_copies.py @@ -6,682 +6,3 @@ import strax import straxen - - -class AqmonHitsSV(straxen.AqmonHits): - depends_on = ['raw_records_aqmon_sv'] - provides = ['aqmon_hits_sv'] - dtype = [(('Start time since unix epoch [ns]', 'time'), ' Date: Tue, 7 Mar 2023 12:37:53 +0000 Subject: [PATCH 29/56] dirty but maybe works --- .../plugins/raw_records_sv/_build_copies.py | 1 + .../raw_records_sv/_software_veto_copies.py | 3247 +++++++++++++++++ 2 files changed, 3248 insertions(+) diff --git a/straxen/plugins/raw_records_sv/_build_copies.py b/straxen/plugins/raw_records_sv/_build_copies.py index 1cae101b1..3479dbdb5 100644 --- a/straxen/plugins/raw_records_sv/_build_copies.py +++ b/straxen/plugins/raw_records_sv/_build_copies.py @@ -152,6 +152,7 @@ def compute(self, **kwargs): """ + tofile += classtofile _plugins.append(pl) diff --git a/straxen/plugins/raw_records_sv/_software_veto_copies.py b/straxen/plugins/raw_records_sv/_software_veto_copies.py index c5791b396..4e400491e 100644 --- a/straxen/plugins/raw_records_sv/_software_veto_copies.py +++ b/straxen/plugins/raw_records_sv/_software_veto_copies.py @@ -6,3 +6,3250 @@ import strax import straxen + + +class AqmonHitsSV(straxen.AqmonHits): + depends_on = ['raw_records_aqmon_sv'] + provides = ['aqmon_hits_sv'] + dtype = [(('Start time since unix epoch [ns]', 'time'), ' Date: Tue, 7 Mar 2023 13:10:53 +0000 Subject: [PATCH 30/56] fix data kind --- .../plugins/raw_records_sv/_build_copies.py | 2 +- .../raw_records_sv/_software_veto_copies.py | 20 +++++++++---------- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/straxen/plugins/raw_records_sv/_build_copies.py b/straxen/plugins/raw_records_sv/_build_copies.py index 3479dbdb5..c41b7881b 100644 --- a/straxen/plugins/raw_records_sv/_build_copies.py +++ b/straxen/plugins/raw_records_sv/_build_copies.py @@ -72,7 +72,7 @@ depends_on = [deps.replace('_sv','')+'_sv' for deps in strax.to_str_tuple(pl.depends_on)] if isinstance(init_pl.data_kind, dict): - data_kind = {t.replace('_sv','')+'_sv':init_pl.data_kind[t] for t in init_pl.data_kind} + data_kind = {t.replace('_sv','')+'_sv':init_pl.data_kind[t]+'_sv' for t in init_pl.data_kind} else: data_kind = "'"+str(init_pl.data_kind+'_sv')+"'" diff --git a/straxen/plugins/raw_records_sv/_software_veto_copies.py b/straxen/plugins/raw_records_sv/_software_veto_copies.py index 4e400491e..42af6a04f 100644 --- a/straxen/plugins/raw_records_sv/_software_veto_copies.py +++ b/straxen/plugins/raw_records_sv/_software_veto_copies.py @@ -2119,7 +2119,7 @@ class PeakletsSV(straxen.Peaklets): depends_on = ['records_sv'] provides = ['peaklets_sv', 'lone_hits_sv'] dtype = {'peaklets_sv': [(('Start time since unix epoch [ns]', 'time'), ' Date: Tue, 7 Mar 2023 13:43:22 +0000 Subject: [PATCH 31/56] fix infer_dtype, it worksgit add . ! --- .../plugins/raw_records_sv/_build_copies.py | 33 +- .../raw_records_sv/_software_veto_copies.py | 1457 +++++------------ 2 files changed, 424 insertions(+), 1066 deletions(-) diff --git a/straxen/plugins/raw_records_sv/_build_copies.py b/straxen/plugins/raw_records_sv/_build_copies.py index c41b7881b..fa6bd2d57 100644 --- a/straxen/plugins/raw_records_sv/_build_copies.py +++ b/straxen/plugins/raw_records_sv/_build_copies.py @@ -41,7 +41,7 @@ from immutabledict import immutabledict import numba import numpy as np - +import inspect import strax import straxen @@ -105,6 +105,20 @@ compute_takes_chunk_i = init_pl.compute_takes_chunk_i compute_takes_start_end = init_pl.compute_takes_start_end + if init_pl.multi_output: + output = """ + + p_mapping = {{v: k for k, v in zip(strax.to_str_tuple(self.provides), + strax.to_str_tuple(super().provides))}} + return {{p_mapping[k]: v for k,v in result.items()}} +""" + else: + output = """ + return result +""" + + + classtofile = f""" @@ -123,32 +137,21 @@ def __init__(self): self.compute_takes_start_end = {compute_takes_start_end} def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {{k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))}} - - @property - def prov_mapping(self): - return {{v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))}} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {{}} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {{p_mapping[k]: v for k,v in result.items()}} + {output} """ diff --git a/straxen/plugins/raw_records_sv/_software_veto_copies.py b/straxen/plugins/raw_records_sv/_software_veto_copies.py index 42af6a04f..ff0c96c52 100644 --- a/straxen/plugins/raw_records_sv/_software_veto_copies.py +++ b/straxen/plugins/raw_records_sv/_software_veto_copies.py @@ -2,7 +2,7 @@ from immutabledict import immutabledict import numba import numpy as np - +import inspect import strax import straxen @@ -22,32 +22,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -65,32 +56,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -108,32 +90,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -151,32 +124,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -194,32 +158,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -237,32 +192,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -280,32 +226,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -323,32 +260,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -366,32 +294,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -409,32 +328,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -452,32 +362,23 @@ def __init__(self): self.compute_takes_start_end = True def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -495,32 +396,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -538,32 +430,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -581,32 +464,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -624,32 +498,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -667,32 +532,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -710,32 +566,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -753,32 +600,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -796,32 +634,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -839,32 +668,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -882,32 +702,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -925,32 +736,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -968,32 +770,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -1011,32 +804,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -1054,32 +838,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -1097,32 +872,23 @@ def __init__(self): self.compute_takes_start_end = True def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -1140,32 +906,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -1183,32 +940,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -1226,32 +974,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -1269,32 +1008,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -1312,32 +1042,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -1355,32 +1076,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -1398,32 +1110,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -1441,32 +1144,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -1484,32 +1178,23 @@ def __init__(self): self.compute_takes_start_end = True def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -1527,32 +1212,23 @@ def __init__(self): self.compute_takes_start_end = True def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -1570,32 +1246,23 @@ def __init__(self): self.compute_takes_start_end = True def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -1613,32 +1280,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -1656,32 +1314,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -1699,32 +1348,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -1742,32 +1382,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -1785,32 +1416,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -1828,32 +1450,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -1871,32 +1484,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -1914,32 +1518,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -1957,32 +1552,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -2000,32 +1586,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -2043,32 +1620,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -2086,32 +1654,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -2129,32 +1688,26 @@ def __init__(self): self.compute_takes_start_end = True def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + + p_mapping = {{v: k for k, v in zip(strax.to_str_tuple(self.provides), + strax.to_str_tuple(super().provides))}} + return {{p_mapping[k]: v for k,v in result.items()}} + @@ -2172,32 +1725,26 @@ def __init__(self): self.compute_takes_start_end = True def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + + p_mapping = {{v: k for k, v in zip(strax.to_str_tuple(self.provides), + strax.to_str_tuple(super().provides))}} + return {{p_mapping[k]: v for k,v in result.items()}} + @@ -2215,32 +1762,23 @@ def __init__(self): self.compute_takes_start_end = True def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -2258,32 +1796,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -2301,32 +1830,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -2348,32 +1868,26 @@ def __init__(self): self.compute_takes_start_end = True def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + + p_mapping = {{v: k for k, v in zip(strax.to_str_tuple(self.provides), + strax.to_str_tuple(super().provides))}} + return {{p_mapping[k]: v for k,v in result.items()}} + @@ -2395,32 +1909,26 @@ def __init__(self): self.compute_takes_start_end = True def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + + p_mapping = {{v: k for k, v in zip(strax.to_str_tuple(self.provides), + strax.to_str_tuple(super().provides))}} + return {{p_mapping[k]: v for k,v in result.items()}} + @@ -2442,32 +1950,26 @@ def __init__(self): self.compute_takes_start_end = True def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + + p_mapping = {{v: k for k, v in zip(strax.to_str_tuple(self.provides), + strax.to_str_tuple(super().provides))}} + return {{p_mapping[k]: v for k,v in result.items()}} + @@ -2487,32 +1989,26 @@ def __init__(self): self.compute_takes_start_end = True def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + + p_mapping = {{v: k for k, v in zip(strax.to_str_tuple(self.provides), + strax.to_str_tuple(super().provides))}} + return {{p_mapping[k]: v for k,v in result.items()}} + @@ -2532,32 +2028,26 @@ def __init__(self): self.compute_takes_start_end = True def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + + p_mapping = {{v: k for k, v in zip(strax.to_str_tuple(self.provides), + strax.to_str_tuple(super().provides))}} + return {{p_mapping[k]: v for k,v in result.items()}} + @@ -2575,32 +2065,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -2618,32 +2099,23 @@ def __init__(self): self.compute_takes_start_end = True def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -2661,32 +2133,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -2704,32 +2167,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -2747,32 +2201,23 @@ def __init__(self): self.compute_takes_start_end = True def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -2790,32 +2235,23 @@ def __init__(self): self.compute_takes_start_end = True def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -2833,32 +2269,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -2876,32 +2303,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -2919,32 +2337,23 @@ def __init__(self): self.compute_takes_start_end = True def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -2962,32 +2371,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -3005,32 +2405,23 @@ def __init__(self): self.compute_takes_start_end = True def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -3048,32 +2439,23 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + @@ -3093,32 +2475,26 @@ def __init__(self): self.compute_takes_start_end = True def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + + p_mapping = {{v: k for k, v in zip(strax.to_str_tuple(self.provides), + strax.to_str_tuple(super().provides))}} + return {{p_mapping[k]: v for k,v in result.items()}} + @@ -3138,32 +2514,26 @@ def __init__(self): self.compute_takes_start_end = True def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + + p_mapping = {{v: k for k, v in zip(strax.to_str_tuple(self.provides), + strax.to_str_tuple(super().provides))}} + return {{p_mapping[k]: v for k,v in result.items()}} + @@ -3183,32 +2553,26 @@ def __init__(self): self.compute_takes_start_end = True def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + + p_mapping = {{v: k for k, v in zip(strax.to_str_tuple(self.provides), + strax.to_str_tuple(super().provides))}} + return {{p_mapping[k]: v for k,v in result.items()}} + @@ -3226,30 +2590,21 @@ def __init__(self): self.compute_takes_start_end = False def infer_dtype(self): + super().infer_dtype() return self.dtype - @property - def dep_mapping(self): - return {k: v for k, v in zip(strax.to_str_tuple(self.depends_on), - strax.to_str_tuple(super().depends_on))} - - @property - def prov_mapping(self): - return {v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))} - def compute(self, **kwargs): - mapping = self.dep_mapping - p_mapping = self.prov_mapping _kwargs = {} for k,v in kwargs.items(): if k not in ['chunk_i', 'end', 'start']: - _kwargs[mapping[k]] = v + _kwargs[k.replace('_sv', '')] = v else: _kwargs[k] = v result = super().compute(**_kwargs) - return {p_mapping[k]: v for k,v in result.items()} + + return result + From 94efe07a8724af14943836121799f25f29c8086a Mon Sep 17 00:00:00 2001 From: cfuselli Date: Tue, 7 Mar 2023 13:45:25 +0000 Subject: [PATCH 32/56] remove extra lines --- straxen/plugins/raw_records_sv/_build_copies.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/straxen/plugins/raw_records_sv/_build_copies.py b/straxen/plugins/raw_records_sv/_build_copies.py index fa6bd2d57..c4579bcbb 100644 --- a/straxen/plugins/raw_records_sv/_build_copies.py +++ b/straxen/plugins/raw_records_sv/_build_copies.py @@ -1,10 +1,5 @@ -import sys -sys.path.insert(0, '/daq_common/carlo/test_software/straxen_software_veto') - - import strax import straxen -from straxen.plugins.raw_records_sv import software_veto from immutabledict import immutabledict import sys, os @@ -55,8 +50,6 @@ pass elif pl == straxen.Fake1TDAQReader: pass - elif pl.__base__ == straxen.plugins.raw_records_sv._software_veto_base.RawRecordsSoftwareVetoBase: - pass elif name.endswith('_sv'): # ext_timings_nv_sv_sv ?? pass elif pl in _plugins: From 12e4566f644952ee21185fba4c4a7e7bb05fadc4 Mon Sep 17 00:00:00 2001 From: cfuselli Date: Tue, 7 Mar 2023 14:27:34 +0000 Subject: [PATCH 33/56] fix and add straxer compatibility --- bin/bootstrax | 69 ------------------ bin/straxer | 12 ++++ .../plugins/raw_records_sv/_build_copies.py | 11 +-- .../raw_records_sv/_software_veto_copies.py | 70 ++++++++----------- 4 files changed, 48 insertions(+), 114 deletions(-) diff --git a/bin/bootstrax b/bin/bootstrax index e6e9833d5..a3c46a0fe 100755 --- a/bin/bootstrax +++ b/bin/bootstrax @@ -1699,10 +1699,6 @@ def software_veto_get_name(args, rundoc=None): return plugin_name - - - - def software_veto_register_raw_records(st, args, rundoc=None): """Based on the selection that we want to apply, register the correct veto plugin. @@ -1719,71 +1715,6 @@ def software_veto_register_raw_records(st, args, rundoc=None): -# software_veto_plugin_classes = [] - -# def software_veto_register_fly(st, run_id): -# """We make a copy of every original plugin -# for the software veto adding a _sv to the provides. -# Computation is tricky because requires initialisation -# of plugin to copy. Also requires no hardcoded data_types -# in plugins (see https://github.com/cfuselli/straxen/pull/3) -# """ - -# registry = st._plugin_class_registry.copy().items() - -# _plugins = [] -# for name, pl in registry: -# if pl == straxen.DAQReader: -# pass -# elif pl == straxen.Fake1TDAQReader: -# pass -# elif pl == straxen.plugins.raw_records_sv.software_veto.RadialVeto: -# pass -# elif name.endswith('_sv'): # ext_timings_nv_sv_sv ?? -# pass -# elif pl in _plugins: -# pass -# else: - -# # I need initialisation to save myself -# init_pl = st.get_single_plugin(run_id, name) - -# new_plugin_class = type(name+'_sv', (pl,), {}) - -# new_plugin_class.__module__ = None - -# new_plugin_class.provides = [prov.replace('_sv','')+'_sv' for prov in strax.to_str_tuple(pl.provides)] -# new_plugin_class.depends_on = [deps.replace('_sv','')+'_sv' for deps in strax.to_str_tuple(pl.depends_on)] - -# if isinstance(init_pl.data_kind, dict): -# new_plugin_class.data_kind = {t.replace('_sv','')+'_sv':init_pl.save_when[t] for t in init_pl.data_kind} -# else: -# new_plugin_class.data_kind = init_pl.data_kind+'_sv' - -# if isinstance(pl.save_when, immutabledict): -# new_plugin_class.save_when = immutabledict({t.replace('_sv','')+'_sv':init_pl.save_when[t] for t in pl.save_when}) - -# if isinstance(pl.rechunk_on_save, immutabledict): -# new_plugin_class.rechunk_on_save = immutabledict({t.replace('_sv','')+'_sv':init_pl.save_when[t] for t in pl.rechunk_on_save}) - -# if init_pl.multi_output: -# new_plugin_class.dtype = {prov+'_sv': init_pl.dtype_for(prov) for prov in pl.provides} -# else: -# new_plugin_class.dtype = init_pl.dtype_for(name) - -# def new_infer_dtype(self): -# return self.dtype - -# new_plugin_class.infer_dtype = new_infer_dtype - - -# software_veto_plugin_classes.append(new_plugin_class) - -# _plugins.append(pl) - - - - ########################### # __MAIN__ # ########################### diff --git a/bin/straxer b/bin/straxer index f796b17fd..3c783b443 100644 --- a/bin/straxer +++ b/bin/straxer @@ -118,6 +118,10 @@ def parse_args(): '--print_alive', default=300, help='Print that straxer is still running every this many [seconds]') + parser.add_argument( + '--software_veto', default=False, + help="Class name of veto plugin to apply (i.e.: RadialVeto). It's overwritten by rundoc value.") + return parser.parse_args() @@ -143,6 +147,14 @@ def setup_context(args): if args.diagnose_sorting: st.set_config(dict(diagnose_sorting=True)) + if args.software_veto: + import straxen.plugins.raw_records_sv.software_veto as software_veto + veto_plugin = getattr(straxen.plugins.raw_records_sv.software_veto, args.software_veto) + st.register(veto_plugin) + + import straxen.plugins.raw_records_sv._software_veto_copies as _software_veto_copies + st.register_all(_software_veto_copies) + st.context_config['allow_multiprocess'] = args.multiprocess st.context_config['allow_shm'] = args.shm st.context_config['allow_lazy'] = not (args.notlazy is True) diff --git a/straxen/plugins/raw_records_sv/_build_copies.py b/straxen/plugins/raw_records_sv/_build_copies.py index c4579bcbb..b3e1ff522 100644 --- a/straxen/plugins/raw_records_sv/_build_copies.py +++ b/straxen/plugins/raw_records_sv/_build_copies.py @@ -1,7 +1,9 @@ +import sys, os +sys.path.insert(0, '/daq_common/carlo/test_software/straxen_software_veto') + import strax import straxen from immutabledict import immutabledict -import sys, os _dir = os.path.dirname(os.path.abspath(__file__)) print(_dir) @@ -100,10 +102,9 @@ if init_pl.multi_output: output = """ - - p_mapping = {{v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))}} - return {{p_mapping[k]: v for k,v in result.items()}} + p_mapping = {v: k for k, v in zip(strax.to_str_tuple(self.provides), + strax.to_str_tuple(super().provides))} + return {p_mapping[k]: v for k,v in result.items()} """ else: output = """ diff --git a/straxen/plugins/raw_records_sv/_software_veto_copies.py b/straxen/plugins/raw_records_sv/_software_veto_copies.py index ff0c96c52..af60d3b67 100644 --- a/straxen/plugins/raw_records_sv/_software_veto_copies.py +++ b/straxen/plugins/raw_records_sv/_software_veto_copies.py @@ -1703,10 +1703,9 @@ def compute(self, **kwargs): result = super().compute(**_kwargs) - - p_mapping = {{v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))}} - return {{p_mapping[k]: v for k,v in result.items()}} + p_mapping = {v: k for k, v in zip(strax.to_str_tuple(self.provides), + strax.to_str_tuple(super().provides))} + return {p_mapping[k]: v for k,v in result.items()} @@ -1740,10 +1739,9 @@ def compute(self, **kwargs): result = super().compute(**_kwargs) - - p_mapping = {{v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))}} - return {{p_mapping[k]: v for k,v in result.items()}} + p_mapping = {v: k for k, v in zip(strax.to_str_tuple(self.provides), + strax.to_str_tuple(super().provides))} + return {p_mapping[k]: v for k,v in result.items()} @@ -1883,10 +1881,9 @@ def compute(self, **kwargs): result = super().compute(**_kwargs) - - p_mapping = {{v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))}} - return {{p_mapping[k]: v for k,v in result.items()}} + p_mapping = {v: k for k, v in zip(strax.to_str_tuple(self.provides), + strax.to_str_tuple(super().provides))} + return {p_mapping[k]: v for k,v in result.items()} @@ -1924,10 +1921,9 @@ def compute(self, **kwargs): result = super().compute(**_kwargs) - - p_mapping = {{v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))}} - return {{p_mapping[k]: v for k,v in result.items()}} + p_mapping = {v: k for k, v in zip(strax.to_str_tuple(self.provides), + strax.to_str_tuple(super().provides))} + return {p_mapping[k]: v for k,v in result.items()} @@ -1965,10 +1961,9 @@ def compute(self, **kwargs): result = super().compute(**_kwargs) - - p_mapping = {{v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))}} - return {{p_mapping[k]: v for k,v in result.items()}} + p_mapping = {v: k for k, v in zip(strax.to_str_tuple(self.provides), + strax.to_str_tuple(super().provides))} + return {p_mapping[k]: v for k,v in result.items()} @@ -2004,10 +1999,9 @@ def compute(self, **kwargs): result = super().compute(**_kwargs) - - p_mapping = {{v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))}} - return {{p_mapping[k]: v for k,v in result.items()}} + p_mapping = {v: k for k, v in zip(strax.to_str_tuple(self.provides), + strax.to_str_tuple(super().provides))} + return {p_mapping[k]: v for k,v in result.items()} @@ -2043,10 +2037,9 @@ def compute(self, **kwargs): result = super().compute(**_kwargs) - - p_mapping = {{v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))}} - return {{p_mapping[k]: v for k,v in result.items()}} + p_mapping = {v: k for k, v in zip(strax.to_str_tuple(self.provides), + strax.to_str_tuple(super().provides))} + return {p_mapping[k]: v for k,v in result.items()} @@ -2490,10 +2483,9 @@ def compute(self, **kwargs): result = super().compute(**_kwargs) - - p_mapping = {{v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))}} - return {{p_mapping[k]: v for k,v in result.items()}} + p_mapping = {v: k for k, v in zip(strax.to_str_tuple(self.provides), + strax.to_str_tuple(super().provides))} + return {p_mapping[k]: v for k,v in result.items()} @@ -2529,10 +2521,9 @@ def compute(self, **kwargs): result = super().compute(**_kwargs) - - p_mapping = {{v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))}} - return {{p_mapping[k]: v for k,v in result.items()}} + p_mapping = {v: k for k, v in zip(strax.to_str_tuple(self.provides), + strax.to_str_tuple(super().provides))} + return {p_mapping[k]: v for k,v in result.items()} @@ -2568,10 +2559,9 @@ def compute(self, **kwargs): result = super().compute(**_kwargs) - - p_mapping = {{v: k for k, v in zip(strax.to_str_tuple(self.provides), - strax.to_str_tuple(super().provides))}} - return {{p_mapping[k]: v for k,v in result.items()}} + p_mapping = {v: k for k, v in zip(strax.to_str_tuple(self.provides), + strax.to_str_tuple(super().provides))} + return {p_mapping[k]: v for k,v in result.items()} From 756a6889f16dd5c70924dc301779c0d142c1c827 Mon Sep 17 00:00:00 2001 From: cfuselli Date: Tue, 7 Mar 2023 15:20:20 +0000 Subject: [PATCH 34/56] fix plugin repetition --- bin/straxer | 2 + .../plugins/raw_records_sv/_build_copies.py | 2 +- .../raw_records_sv/_software_veto_copies.py | 502 ------------------ 3 files changed, 3 insertions(+), 503 deletions(-) diff --git a/bin/straxer b/bin/straxer index 3c783b443..0cc91800b 100644 --- a/bin/straxer +++ b/bin/straxer @@ -11,6 +11,8 @@ import json import importlib import sys +sys.path.insert(0, '/daq_common/carlo/test_software/straxen_software_veto') + def parse_args(): parser = argparse.ArgumentParser( diff --git a/straxen/plugins/raw_records_sv/_build_copies.py b/straxen/plugins/raw_records_sv/_build_copies.py index b3e1ff522..47aa599f9 100644 --- a/straxen/plugins/raw_records_sv/_build_copies.py +++ b/straxen/plugins/raw_records_sv/_build_copies.py @@ -149,7 +149,7 @@ def compute(self, **kwargs): """ - tofile += classtofile + tofile += classtofile _plugins.append(pl) diff --git a/straxen/plugins/raw_records_sv/_software_veto_copies.py b/straxen/plugins/raw_records_sv/_software_veto_copies.py index af60d3b67..5d5dbe00a 100644 --- a/straxen/plugins/raw_records_sv/_software_veto_copies.py +++ b/straxen/plugins/raw_records_sv/_software_veto_copies.py @@ -110,244 +110,6 @@ def compute(self, **kwargs): -class CorrectedAreasSV(straxen.CorrectedAreas): - depends_on = ['event_basics_sv', 'event_positions_sv'] - provides = ['corrected_areas_sv'] - dtype = [(('Start time since unix epoch [ns]', 'time'), ' Date: Wed, 8 Mar 2023 12:51:56 +0000 Subject: [PATCH 35/56] final adjustments plugins --- straxen/plugins/raw_records_sv/_build_copies.py | 9 +++++++++ straxen/plugins/raw_records_sv/_software_veto_copies.py | 1 + 2 files changed, 10 insertions(+) diff --git a/straxen/plugins/raw_records_sv/_build_copies.py b/straxen/plugins/raw_records_sv/_build_copies.py index 47aa599f9..ef337fb05 100644 --- a/straxen/plugins/raw_records_sv/_build_copies.py +++ b/straxen/plugins/raw_records_sv/_build_copies.py @@ -147,9 +147,18 @@ def compute(self, **kwargs): {output} +""" + + if pl == straxen.PulseProcessing: + classtofile += """ + allow_sloppy_chunking = True """ tofile += classtofile + + + + _plugins.append(pl) diff --git a/straxen/plugins/raw_records_sv/_software_veto_copies.py b/straxen/plugins/raw_records_sv/_software_veto_copies.py index 5d5dbe00a..c1a7c50a6 100644 --- a/straxen/plugins/raw_records_sv/_software_veto_copies.py +++ b/straxen/plugins/raw_records_sv/_software_veto_copies.py @@ -1551,6 +1551,7 @@ class PulseProcessingSV(straxen.PulseProcessing): rechunk_on_save = immutabledict(records_sv= False, veto_regions_sv= True, pulse_counts_sv= True) + allow_sloppy_chunking = True def __init__(self): super().__init__() From 8601ec57b2225399302a157c2a12ed1ce3d5fd8d Mon Sep 17 00:00:00 2001 From: cfuselli Date: Wed, 8 Mar 2023 12:52:39 +0000 Subject: [PATCH 36/56] daq worflow in progress --- bin/bootstrax | 9 ++-- bin/restrax | 135 ++++++++++++++++++++++++++++++++++++++++++++++++-- bin/straxer | 20 ++++---- 3 files changed, 146 insertions(+), 18 deletions(-) diff --git a/bin/bootstrax b/bin/bootstrax index a3c46a0fe..2f317e1b6 100755 --- a/bin/bootstrax +++ b/bin/bootstrax @@ -115,8 +115,8 @@ args = parser.parse_args() # The folder that can be used for testing bootstrax (i.e. non production # mode). It will be written to: -test_data_folder = ('/data/test_processed/' if - os.path.exists('/data/test_processed/') +test_data_folder = ('/data/test_pre_processed/' if + os.path.exists('/data/test_pre_processed/') else './bootstrax/') # Timeouts in seconds @@ -1210,8 +1210,6 @@ def run_strax(run_id, input_dir, targets, readout_threads, compressor, if software_veto_is_on(args): - software_veto_register_raw_records(st, args) - # software_veto_register_fly(st, run_id) _post_processing = list(post_processing) for t in targets: @@ -1241,6 +1239,9 @@ def run_strax(run_id, input_dir, targets, readout_threads, compressor, max_workers=cores) if software_veto_is_on(args): + + software_veto_register_raw_records(st, args) + import straxen.plugins.raw_records_sv._software_veto_copies as _software_veto_copies st.register_all(_software_veto_copies) diff --git a/bin/restrax b/bin/restrax index fb482d077..3564915f8 100644 --- a/bin/restrax +++ b/bin/restrax @@ -359,7 +359,12 @@ class ReStrax(daq_core.DataBases): self.log.debug('start handle_run') data_docs = self._get_data_docs(run_doc) self.log.info(f'{run_doc["number"]} -> doing {len(data_docs)}') - # self.run_software_veto(run_doc) + + software_veto_is_on = self.software_veto_is_on(run_doc) + + if software_veto_is_on: + self.log.info(f'Software veto is on, considering only software_veto docs') + data_docs, original_docs, no_sv_version_docs = self.software_veto_docs(data_docs) # Split the work in files that we will compress and files that will skip compression compress_docs = [ @@ -382,11 +387,25 @@ class ReStrax(daq_core.DataBases): for move_doc in skip_docs: self._bypass_for_data_doc(move_doc) + if software_veto_is_on: + self.log.info(f'Software veto is on, now hacking metadata, dirnames and rundoc and rundb') + data_docs = self.software_veto_hack_copies_to_originals(run_doc, data_docs) + + # Move doc if we didn't find its software veto twin + for move_doc in no_sv_version_docs: + self.log.debug(f"Moving doc {move_doc['type']} because no SV copy was found") + self._bypass_for_data_doc(move_doc) + self.finalize_execute(data_docs) # Only remove the data that we rechunked (there are now two copies), # the moved data is always just a single copy self.remove_old_docs(compress_docs) + + if software_veto_is_on: + self.log.info(f'Software veto is on, removing also original docs') + self.remove_old_docs(original_docs) + self.log.info(f'{run_doc["number"]} succes') def _get_data_docs(self, run_doc: dict): @@ -405,9 +424,117 @@ class ReStrax(daq_core.DataBases): data_docs = sorted(data_docs, key=size, reverse=True) return data_docs - def run_software_veto(self, run_doc: dict): - """This is where we can add a software veto for specific runs""" - raise NotImplementedError + def software_veto_is_on(self, run_doc: dict): + """Get if software veto is on from the rundoc""" + + # TODO now true carlo testing + return True + + def software_veto_docs(self, data_docs: ty.List[dict]) -> (ty.List[dict], ty.List[dict], ty.List[dict]): + """Replace the entire docs with only the docs of software veto data""" + + #TODO find better way to define software veto docs? + # or just make it _software_veto instead of _sv? + # AND put the suffix somewhere common not hardcoded everywhere + + #TODO if no counterpart, keep also that + consider_sv_docs = [d for d in data_docs if d['type'].endswith('_sv')] + no_sv_version_docs = #TODO + original_docs = #TODO [d for d in data_docs if not d['type'].endswith('_sv')] + + return (consider_sv_docs, original_docs, no_sv_version_docs) + + + def software_veto_hack_copies_to_originals(self, run_doc: dict, data_docs: ty.List[dict], original_docs: ty.List[dict]) -> ty.List[dict]: + """ + Here we do difficult software veto stuff. We need to fake that the now recompressed + data is the same lineage as the original data. + We need to update the + """ + + for d in data_docs: + + type_in = d['type'] + type_orig = d['type'].replace('_sv', '') + + # TODO check if they are more than one + d_orig = [d for d in original_docs if d['type'] == type_orig][0] + + lineage_in = d['meta']['lineage_hash'] + lineage_orig = d_orig['meta']['lineage_hash'] + + dir_in = d['location'] + dir_orig = d_orig['location'] + dir_out = self.renamed_path(dir_in) + + + split = dir_out.split('-') + dir_out_hack = '-'.join(split[:-1]) + '-' + lineage_orig + os.rename(dir_out, dir_out_hack) + dir_out = dir_out_hack + + backend = strax.FileSytemBackend() + md_orig = backend.get_metadata(dir_orig) + md_in = backend.get_metadata(dir_in) + md_out = backend.get_metadata(dir_out) + + # Keep track of plugins used for veto + md_out['software_veto_lineage'] = md_out['lineage'] + + # Change filenames in chunk info + # TODO get correct number of chunks + for i_chunk in range(len(md_out['chunks'])): + + chunk_name = md_out['chunks'][i_chunk]['filename'] + run_id, lineage, chunk_number = chunk_name.split('-') + new_chunk_name = '-'.join([run_id, lineage_orig, chunk_number]) + md_out['chunks'][i_chunk]['filename'] = new_chunk_name + os.rename(chunk_name, new_chunk_name) + + # Change lineage, kind and type to original raw_records + for key in ['data_kind', 'data_type', 'lineage', 'lineage_hash']: + md_out[key] = md_orig[key] + + # TODO source from FileSaver + json_options = dict(sort_keys=True, indent=4) + new_md_json_name = '-'.join([type_orig, lineage_orig, 'metadata.json']) + with open(os.path.join(dir_out, new_md_json_name), mode='w') as f: + f.write(json.dumps(md_out, **json_options)) + + # TODO remove the old metadata file + old_md_json_name = '-'.join([type_in, lineage_in, 'metadata.json']) + os.remove(os.path.join(dir_out, old_md_json_name)) + + # TODO remove originals from rundb + # find example in ajax + self._delete_data_from_rundb(run_doc, original_docs) + + def _delete_data_from_rundb(self, run_doc, data_docs, reason='Software veto applied'): + """ + Delete data and update the rundoc + :param run_doc: rundoc + :param data_docs: docs of data to be deleted + """ + # TODO + # Remove the data location from the rundoc and append it to the 'deleted_data' entries + + self.log.info('Deleting original data from rundb') + for ddoc in data_docs: + self.log.info(f'Deleting {ddoc['location']}') + for k in ddoc.copy().keys(): + if k in ['location', 'meta', 'protocol']: + ddoc.pop(k) + + ddoc.update({'at': now(), 'by': f'restrax.{self.hostname}', 'reason': reason}) + if args.production: + run_coll.update_one({'_id': run_doc['_id']}, + {"$addToSet": {'deleted_data': ddoc}, + "$pull": {"data": + {"type": run_doc['type'], + "host": {'$in': ['daq', self.hostname]}}}}) + else: + log.info(f'Update ddoc with : {ddoc}') + def rechunk_docs(self, run_doc: dict, data_docs: ty.List[dict]) -> None: """ diff --git a/bin/straxer b/bin/straxer index 0cc91800b..c4f4e21ea 100644 --- a/bin/straxer +++ b/bin/straxer @@ -13,7 +13,6 @@ import sys sys.path.insert(0, '/daq_common/carlo/test_software/straxen_software_veto') - def parse_args(): parser = argparse.ArgumentParser( description='Process a single run with straxen', @@ -139,6 +138,14 @@ def setup_context(args): logging.info(f'set context kwargs {args.context_kwargs}') st = getattr(context_module, args.context)(**args.context_kwargs) + if args.software_veto is not False: + import straxen.plugins.raw_records_sv.software_veto as software_veto + veto_plugin = getattr(straxen.plugins.raw_records_sv.software_veto, args.software_veto) + st.register(veto_plugin) + + import straxen.plugins.raw_records_sv._software_veto_copies as _software_veto_copies + st.register_all(_software_veto_copies) + if args.config_kwargs: logging.info(f'set context options to {args.config_kwargs}') st.set_config(to_dict_tuple(args.config_kwargs)) @@ -149,14 +156,6 @@ def setup_context(args): if args.diagnose_sorting: st.set_config(dict(diagnose_sorting=True)) - if args.software_veto: - import straxen.plugins.raw_records_sv.software_veto as software_veto - veto_plugin = getattr(straxen.plugins.raw_records_sv.software_veto, args.software_veto) - st.register(veto_plugin) - - import straxen.plugins.raw_records_sv._software_veto_copies as _software_veto_copies - st.register_all(_software_veto_copies) - st.context_config['allow_multiprocess'] = args.multiprocess st.context_config['allow_shm'] = args.shm st.context_config['allow_lazy'] = not (args.notlazy is True) @@ -228,7 +227,8 @@ def main(args): # Reactivate after https://github.com/XENONnT/straxen/issues/586 logging.info(f'Checking availabilty') - logging.info(f'Available\n{str(st.available_for_run(args.run_id))}') + # Deactivate temporarily for software_veto + # logging.info(f'Available\n{str(st.available_for_run(args.run_id))}') logging.info('Infer start/end') try: From f84923ce8f65afc99565d2bf051156c22645db39 Mon Sep 17 00:00:00 2001 From: cfuselli Date: Wed, 8 Mar 2023 13:41:48 +0000 Subject: [PATCH 37/56] pre test commit --- bin/restrax | 57 +++++++++++++++++++++++++++++++++++------------------ 1 file changed, 38 insertions(+), 19 deletions(-) diff --git a/bin/restrax b/bin/restrax index 3564915f8..b8c5fa6f7 100644 --- a/bin/restrax +++ b/bin/restrax @@ -57,6 +57,8 @@ def parse_args(): help='Only bother with doing the recompression if there are more than this many chunks') parser.add_argument('--bypass_mode', action='store_true', help='Stop recompression and just rename folders. Use with care!') + parser.add_argument('--test_software_veto', action='store_true', + help='Test software veto overwriting info from rundoc') actions = parser.add_mutually_exclusive_group() actions.add_argument('--undying', action='store_true', help="Except any error and ignore it") @@ -234,8 +236,10 @@ class ReStrax(daq_core.DataBases): data_docs = [] for folder in folders: if os.path.exists(os.path.join(self.write_to, folder)): - # Don't do work twice - continue + self.log.info(f'Data exists, removing {os.path.join(self.write_to, folder)} to do it again') + # We are testing, delete everything and do it again + shutil.rmtree(os.path.join(self.write_to, folder)) + if len(split := folder.split('-')) and len(split[0]) == 6: run_id, data_type, lineage = split @@ -427,8 +431,9 @@ class ReStrax(daq_core.DataBases): def software_veto_is_on(self, run_doc: dict): """Get if software veto is on from the rundoc""" - # TODO now true carlo testing - return True + if args.test_software_veto and not args.production: + + return True def software_veto_docs(self, data_docs: ty.List[dict]) -> (ty.List[dict], ty.List[dict], ty.List[dict]): """Replace the entire docs with only the docs of software veto data""" @@ -437,10 +442,17 @@ class ReStrax(daq_core.DataBases): # or just make it _software_veto instead of _sv? # AND put the suffix somewhere common not hardcoded everywhere - #TODO if no counterpart, keep also that - consider_sv_docs = [d for d in data_docs if d['type'].endswith('_sv')] - no_sv_version_docs = #TODO - original_docs = #TODO [d for d in data_docs if not d['type'].endswith('_sv')] + #TODO find better variable names here + consider_sv_docs = [d for d in data_docs if d['type'].endswith('_sv')] + all_original_docs = [d for d in data_docs if not d['type'].endswith('_sv')] + + consider_sv_docs_types = [d['type'][:-3] for d in consider_sv_docs] + all_original_docs_types = [d['type'] for d in all_original_docs] + + no_sv_version_types = [t for t in original_docs_types if t not in consider_sv_docs_types] + + original_docs = [d for d in all_original_docs if d['type'] not in no_sv_version_types] + no_sv_version_docs = [d for d in all_original_docs if d['type'] in no_sv_version_types] return (consider_sv_docs, original_docs, no_sv_version_docs) @@ -452,13 +464,16 @@ class ReStrax(daq_core.DataBases): We need to update the """ + # TODO style of this function is terrible for d in data_docs: type_in = d['type'] type_orig = d['type'].replace('_sv', '') # TODO check if they are more than one - d_orig = [d for d in original_docs if d['type'] == type_orig][0] + d_orig = [d for d in original_docs if d['type'] == type_orig] + assert len(d_orig) == 1, " two copies of the same data?! " + d_orig = d_orig[0] lineage_in = d['meta']['lineage_hash'] lineage_orig = d_orig['meta']['lineage_hash'] @@ -467,7 +482,6 @@ class ReStrax(daq_core.DataBases): dir_orig = d_orig['location'] dir_out = self.renamed_path(dir_in) - split = dir_out.split('-') dir_out_hack = '-'.join(split[:-1]) + '-' + lineage_orig os.rename(dir_out, dir_out_hack) @@ -482,7 +496,6 @@ class ReStrax(daq_core.DataBases): md_out['software_veto_lineage'] = md_out['lineage'] # Change filenames in chunk info - # TODO get correct number of chunks for i_chunk in range(len(md_out['chunks'])): chunk_name = md_out['chunks'][i_chunk]['filename'] @@ -501,17 +514,18 @@ class ReStrax(daq_core.DataBases): with open(os.path.join(dir_out, new_md_json_name), mode='w') as f: f.write(json.dumps(md_out, **json_options)) - # TODO remove the old metadata file + # remove the old metadata file old_md_json_name = '-'.join([type_in, lineage_in, 'metadata.json']) os.remove(os.path.join(dir_out, old_md_json_name)) - # TODO remove originals from rundb - # find example in ajax + # remove originals from rundb self._delete_data_from_rundb(run_doc, original_docs) + + def _delete_data_from_rundb(self, run_doc, data_docs, reason='Software veto applied'): """ - Delete data and update the rundoc + Delete data and update the rundoc. Used for software veto. :param run_doc: rundoc :param data_docs: docs of data to be deleted """ @@ -840,10 +854,12 @@ class ReStrax(daq_core.DataBases): self.log.info('Rundoc updated') def remove_old_docs(self, done_data_docs: ty.List[dict]): - for data_doc in done_data_docs: - loc = data_doc.get('location', '??') - assert 'pre_processed' in loc - self._remove_dir(loc) + """Remove old docs only if in production mode""" + if args.production: + for data_doc in done_data_docs: + loc = data_doc.get('location', '??') + assert 'pre_processed' in loc + self._remove_dir(loc) def take_a_nap(self, dt: ty.Optional[int] = None): time.sleep(dt if dt is not None else self.nap_time) @@ -870,6 +886,9 @@ class ReStrax(daq_core.DataBases): self.log.info(f'Move {source} -> {dest}') if self.production: os.rename(source, dest) + else: + # if we are testing just copy the files + shutil.copytree(source, dest, dirs_exist_ok=True) def _remove_dir(self, directory: str) -> None: """Remove directory (when in production mode)""" From 497812948271963916de6fc7661bade3cdafd41e Mon Sep 17 00:00:00 2001 From: cfuselli Date: Wed, 8 Mar 2023 14:05:11 +0000 Subject: [PATCH 38/56] soome bugs, now works --- bin/restrax | 46 +++++++++++++++++++++++++++------------------- 1 file changed, 27 insertions(+), 19 deletions(-) diff --git a/bin/restrax b/bin/restrax index b8c5fa6f7..4ec3f8f92 100644 --- a/bin/restrax +++ b/bin/restrax @@ -33,6 +33,7 @@ import typing as ty from straxen import daq_core from memory_profiler import memory_usage import glob +import json from ast import literal_eval from straxen.daq_core import now @@ -186,6 +187,7 @@ class ReStrax(daq_core.DataBases): self.recompress_min_chunks = args.recompress_min_chunks self.bypass_mode = args.bypass_mode self.overwrite_settings() + self.test_software_veto = args.test_software_veto def infinite_loop(self, close=False) -> None: """Core of restrax, recompress the data followed by several validation steps""" @@ -251,7 +253,7 @@ class ReStrax(daq_core.DataBases): data_docs.append({'host': self.hostname, 'location': os.path.join(self.read_from, folder), 'type': data_type, - 'linage_hash': lineage, + 'meta': {'lineage_hash': lineage}, }) if not len(data_docs): return None @@ -393,12 +395,14 @@ class ReStrax(daq_core.DataBases): if software_veto_is_on: self.log.info(f'Software veto is on, now hacking metadata, dirnames and rundoc and rundb') - data_docs = self.software_veto_hack_copies_to_originals(run_doc, data_docs) - + data_docs = self.software_veto_hack_copies_to_originals(run_doc, data_docs, original_docs) + # Move doc if we didn't find its software veto twin for move_doc in no_sv_version_docs: self.log.debug(f"Moving doc {move_doc['type']} because no SV copy was found") self._bypass_for_data_doc(move_doc) + + self._delete_data_from_rundb(run_doc, original_docs) self.finalize_execute(data_docs) @@ -431,9 +435,10 @@ class ReStrax(daq_core.DataBases): def software_veto_is_on(self, run_doc: dict): """Get if software veto is on from the rundoc""" - if args.test_software_veto and not args.production: + if self.test_software_veto and not self.production: - return True + return True + def software_veto_docs(self, data_docs: ty.List[dict]) -> (ty.List[dict], ty.List[dict], ty.List[dict]): """Replace the entire docs with only the docs of software veto data""" @@ -449,7 +454,7 @@ class ReStrax(daq_core.DataBases): consider_sv_docs_types = [d['type'][:-3] for d in consider_sv_docs] all_original_docs_types = [d['type'] for d in all_original_docs] - no_sv_version_types = [t for t in original_docs_types if t not in consider_sv_docs_types] + no_sv_version_types = [t for t in all_original_docs_types if t not in consider_sv_docs_types] original_docs = [d for d in all_original_docs if d['type'] not in no_sv_version_types] no_sv_version_docs = [d for d in all_original_docs if d['type'] in no_sv_version_types] @@ -476,22 +481,25 @@ class ReStrax(daq_core.DataBases): d_orig = d_orig[0] lineage_in = d['meta']['lineage_hash'] + self.log.debug(f'carlo print d_orig {d_orig}') lineage_orig = d_orig['meta']['lineage_hash'] dir_in = d['location'] dir_orig = d_orig['location'] dir_out = self.renamed_path(dir_in) - split = dir_out.split('-') - dir_out_hack = '-'.join(split[:-1]) + '-' + lineage_orig - os.rename(dir_out, dir_out_hack) - dir_out = dir_out_hack - backend = strax.FileSytemBackend() md_orig = backend.get_metadata(dir_orig) md_in = backend.get_metadata(dir_in) md_out = backend.get_metadata(dir_out) + split = dir_out.split('-') + split[-2] = split[-2].replace('_sv', '') + dir_out_hack = '-'.join(split[:-1]) + '-' + lineage_orig + + os.rename(dir_out, dir_out_hack) + dir_out = dir_out_hack + # Keep track of plugins used for veto md_out['software_veto_lineage'] = md_out['lineage'] @@ -499,10 +507,11 @@ class ReStrax(daq_core.DataBases): for i_chunk in range(len(md_out['chunks'])): chunk_name = md_out['chunks'][i_chunk]['filename'] - run_id, lineage, chunk_number = chunk_name.split('-') - new_chunk_name = '-'.join([run_id, lineage_orig, chunk_number]) + data_type, lineage, chunk_number = chunk_name.split('-') + data_type = data_type.replace('_sv', '') + new_chunk_name = '-'.join([data_type, lineage_orig, chunk_number]) md_out['chunks'][i_chunk]['filename'] = new_chunk_name - os.rename(chunk_name, new_chunk_name) + os.rename(os.path.join(dir_out, chunk_name), os.path.join(dir_out, new_chunk_name)) # Change lineage, kind and type to original raw_records for key in ['data_kind', 'data_type', 'lineage', 'lineage_hash']: @@ -519,7 +528,6 @@ class ReStrax(daq_core.DataBases): os.remove(os.path.join(dir_out, old_md_json_name)) # remove originals from rundb - self._delete_data_from_rundb(run_doc, original_docs) @@ -534,20 +542,20 @@ class ReStrax(daq_core.DataBases): self.log.info('Deleting original data from rundb') for ddoc in data_docs: - self.log.info(f'Deleting {ddoc['location']}') + self.log.info(f"Deleting {ddoc['location']}") for k in ddoc.copy().keys(): if k in ['location', 'meta', 'protocol']: ddoc.pop(k) ddoc.update({'at': now(), 'by': f'restrax.{self.hostname}', 'reason': reason}) - if args.production: + if self.production: run_coll.update_one({'_id': run_doc['_id']}, {"$addToSet": {'deleted_data': ddoc}, "$pull": {"data": {"type": run_doc['type'], "host": {'$in': ['daq', self.hostname]}}}}) else: - log.info(f'Update ddoc with : {ddoc}') + self.log.info(f'Would update ddoc. But we are testing, so no db stuff!') def rechunk_docs(self, run_doc: dict, data_docs: ty.List[dict]) -> None: @@ -855,7 +863,7 @@ class ReStrax(daq_core.DataBases): def remove_old_docs(self, done_data_docs: ty.List[dict]): """Remove old docs only if in production mode""" - if args.production: + if self.production: for data_doc in done_data_docs: loc = data_doc.get('location', '??') assert 'pre_processed' in loc From cd2db903ab317e3c682d79489b837cd8a991a49a Mon Sep 17 00:00:00 2001 From: cfuselli Date: Wed, 8 Mar 2023 15:31:47 +0000 Subject: [PATCH 39/56] clean functions thx joran --- bin/restrax | 37 ++++++++++++++----------------------- 1 file changed, 14 insertions(+), 23 deletions(-) diff --git a/bin/restrax b/bin/restrax index 4ec3f8f92..93683dfb2 100644 --- a/bin/restrax +++ b/bin/restrax @@ -443,23 +443,15 @@ class ReStrax(daq_core.DataBases): def software_veto_docs(self, data_docs: ty.List[dict]) -> (ty.List[dict], ty.List[dict], ty.List[dict]): """Replace the entire docs with only the docs of software veto data""" - #TODO find better way to define software veto docs? - # or just make it _software_veto instead of _sv? - # AND put the suffix somewhere common not hardcoded everywhere + #TODO put the suffix _sv somewhere common not hardcoded everywhere + def _filter_docs(ts, docs): + return [d for d in docs if d['type'] in ts] - #TODO find better variable names here - consider_sv_docs = [d for d in data_docs if d['type'].endswith('_sv')] - all_original_docs = [d for d in data_docs if not d['type'].endswith('_sv')] - - consider_sv_docs_types = [d['type'][:-3] for d in consider_sv_docs] - all_original_docs_types = [d['type'] for d in all_original_docs] - - no_sv_version_types = [t for t in all_original_docs_types if t not in consider_sv_docs_types] - - original_docs = [d for d in all_original_docs if d['type'] not in no_sv_version_types] - no_sv_version_docs = [d for d in all_original_docs if d['type'] in no_sv_version_types] - - return (consider_sv_docs, original_docs, no_sv_version_docs) + all_targets = set([d['type'] for d in docs]) + no_sv = {t for t in all_targets if f'{t}_sv' not in all_targets} + sv = {t for t in all_targets if t.endswith('_sv')} + originals = all_targets - sv - no_sv + return _filter_docs(docs, sv), _filter_docs(docs, originals), _filter_docs(docs, no_sv) def software_veto_hack_copies_to_originals(self, run_doc: dict, data_docs: ty.List[dict], original_docs: ty.List[dict]) -> ty.List[dict]: @@ -862,12 +854,11 @@ class ReStrax(daq_core.DataBases): self.log.info('Rundoc updated') def remove_old_docs(self, done_data_docs: ty.List[dict]): - """Remove old docs only if in production mode""" - if self.production: - for data_doc in done_data_docs: - loc = data_doc.get('location', '??') - assert 'pre_processed' in loc - self._remove_dir(loc) + """Remove old docs, only if in production mode""" + for data_doc in done_data_docs: + loc = data_doc.get('location', '??') + assert 'pre_processed' in loc + self._remove_dir(loc) def take_a_nap(self, dt: ty.Optional[int] = None): time.sleep(dt if dt is not None else self.nap_time) @@ -900,7 +891,7 @@ class ReStrax(daq_core.DataBases): def _remove_dir(self, directory: str) -> None: """Remove directory (when in production mode)""" - self.log.info(f'Remove {directory}') + self.log.info(f'Remove {directory} (not if testing)') if self.production: shutil.rmtree(directory) From aa497e4906ff6a4fa430fb208abe342ea6911d61 Mon Sep 17 00:00:00 2001 From: cfuselli Date: Wed, 8 Mar 2023 17:15:10 +0000 Subject: [PATCH 40/56] change channel --- straxen/plugins/raw_records/daqreader.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/straxen/plugins/raw_records/daqreader.py b/straxen/plugins/raw_records/daqreader.py index db6648cff..c97e5b3cb 100644 --- a/straxen/plugins/raw_records/daqreader.py +++ b/straxen/plugins/raw_records/daqreader.py @@ -15,7 +15,7 @@ # Just below the TPC acquisition monitor, see # https://xe1t-wiki.lngs.infn.it/doku.php?id=xenon:xenonnt:dsg:daq:channel_groups ARTIFICIAL_DEADTIME_CHANNEL = 799 -SOFTWARE_VETO_CHANNEL = 798 +SOFTWARE_VETO_CHANNEL = 797 class ArtificialDeadtimeInserted(UserWarning): From 6ecb059a1235e6d4600925209f6d5b5e93c3853d Mon Sep 17 00:00:00 2001 From: cfuselli Date: Wed, 8 Mar 2023 17:19:17 +0000 Subject: [PATCH 41/56] revert temporary --- bin/restrax | 32 +++++++++++++++++++++++--------- 1 file changed, 23 insertions(+), 9 deletions(-) diff --git a/bin/restrax b/bin/restrax index 93683dfb2..145c8713d 100644 --- a/bin/restrax +++ b/bin/restrax @@ -443,15 +443,29 @@ class ReStrax(daq_core.DataBases): def software_veto_docs(self, data_docs: ty.List[dict]) -> (ty.List[dict], ty.List[dict], ty.List[dict]): """Replace the entire docs with only the docs of software veto data""" - #TODO put the suffix _sv somewhere common not hardcoded everywhere - def _filter_docs(ts, docs): - return [d for d in docs if d['type'] in ts] - - all_targets = set([d['type'] for d in docs]) - no_sv = {t for t in all_targets if f'{t}_sv' not in all_targets} - sv = {t for t in all_targets if t.endswith('_sv')} - originals = all_targets - sv - no_sv - return _filter_docs(docs, sv), _filter_docs(docs, originals), _filter_docs(docs, no_sv) + # #TODO put the suffix _sv somewhere common not hardcoded everywhere + # def _filter_docs(ts, docs): + # return [d for d in docs if d['type'] in ts] + + # all_targets = set([d['type'] for d in data_docs]) + # no_sv = {t for t in all_targets if f'{t}_sv' not in all_targets} + # sv = {t for t in all_targets if t.endswith('_sv')} + # originals = all_targets - sv - no_sv + # return _filter_docs(data_docs, sv), _filter_docs(data_docs, originals), _filter_docs(data_docs, no_sv) + + #TODO find better variable names here + consider_sv_docs = [d for d in data_docs if d['type'].endswith('_sv')] + all_original_docs = [d for d in data_docs if not d['type'].endswith('_sv')] + + consider_sv_docs_types = [d['type'][:-3] for d in consider_sv_docs] + all_original_docs_types = [d['type'] for d in all_original_docs] + + no_sv_version_types = [t for t in all_original_docs_types if t not in consider_sv_docs_types] + + original_docs = [d for d in all_original_docs if d['type'] not in no_sv_version_types] + no_sv_version_docs = [d for d in all_original_docs if d['type'] in no_sv_version_types] + + return (consider_sv_docs, original_docs, no_sv_version_docs) def software_veto_hack_copies_to_originals(self, run_doc: dict, data_docs: ty.List[dict], original_docs: ty.List[dict]) -> ty.List[dict]: From 7e79a14b295603e5447b91e9d323e696e8785b72 Mon Sep 17 00:00:00 2001 From: cfuselli Date: Sun, 12 Mar 2023 12:45:22 +0000 Subject: [PATCH 42/56] change channel --- straxen/plugins/raw_records/daqreader.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/straxen/plugins/raw_records/daqreader.py b/straxen/plugins/raw_records/daqreader.py index c97e5b3cb..c813f7334 100644 --- a/straxen/plugins/raw_records/daqreader.py +++ b/straxen/plugins/raw_records/daqreader.py @@ -15,7 +15,7 @@ # Just below the TPC acquisition monitor, see # https://xe1t-wiki.lngs.infn.it/doku.php?id=xenon:xenonnt:dsg:daq:channel_groups ARTIFICIAL_DEADTIME_CHANNEL = 799 -SOFTWARE_VETO_CHANNEL = 797 +SOFTWARE_VETO_CHANNEL = 792 class ArtificialDeadtimeInserted(UserWarning): From eabc99b7b2d31e4dba958318bf16ec8c4a6ca01b Mon Sep 17 00:00:00 2001 From: cfuselli Date: Mon, 13 Mar 2023 16:04:41 +0000 Subject: [PATCH 43/56] testing workflow --- bin/restrax | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/bin/restrax b/bin/restrax index 145c8713d..f3240d849 100644 --- a/bin/restrax +++ b/bin/restrax @@ -60,6 +60,9 @@ def parse_args(): help='Stop recompression and just rename folders. Use with care!') parser.add_argument('--test_software_veto', action='store_true', help='Test software veto overwriting info from rundoc') + parser.add_argument('--test_remove_existing', action='store_true', + help='Test software veto overwriting info from rundoc') + actions = parser.add_mutually_exclusive_group() actions.add_argument('--undying', action='store_true', help="Except any error and ignore it") @@ -188,6 +191,7 @@ class ReStrax(daq_core.DataBases): self.bypass_mode = args.bypass_mode self.overwrite_settings() self.test_software_veto = args.test_software_veto + self.test_remove_existing = args.test_remove_existing def infinite_loop(self, close=False) -> None: """Core of restrax, recompress the data followed by several validation steps""" @@ -237,10 +241,15 @@ class ReStrax(daq_core.DataBases): first_run = f'{int(self.process):06}' data_docs = [] for folder in folders: + + if os.path.exists(os.path.join(self.write_to, folder)): - self.log.info(f'Data exists, removing {os.path.join(self.write_to, folder)} to do it again') - # We are testing, delete everything and do it again - shutil.rmtree(os.path.join(self.write_to, folder)) + if self.test_remove_existing: + self.log.info(f'Data exists, removing {os.path.join(self.write_to, folder)} to do it again') + # We are testing, delete everything and do it again + shutil.rmtree(os.path.join(self.write_to, folder)) + else: + continue if len(split := folder.split('-')) and len(split[0]) == 6: run_id, data_type, lineage = split From 4b4722f2e0e195f0dba58a8cdc4161cb11fa0d59 Mon Sep 17 00:00:00 2001 From: cfuselli Date: Tue, 14 Mar 2023 16:29:17 +0000 Subject: [PATCH 44/56] add dummyveto, refactor restrax --- bin/restrax | 89 +++++++++++-------- .../plugins/raw_records_sv/software_veto.py | 14 +++ 2 files changed, 66 insertions(+), 37 deletions(-) diff --git a/bin/restrax b/bin/restrax index f3240d849..a89e7511c 100644 --- a/bin/restrax +++ b/bin/restrax @@ -241,14 +241,9 @@ class ReStrax(daq_core.DataBases): first_run = f'{int(self.process):06}' data_docs = [] for folder in folders: - - if os.path.exists(os.path.join(self.write_to, folder)): - if self.test_remove_existing: - self.log.info(f'Data exists, removing {os.path.join(self.write_to, folder)} to do it again') - # We are testing, delete everything and do it again - shutil.rmtree(os.path.join(self.write_to, folder)) - else: + if not self.test_remove_existing: + # Don't do the work again continue if len(split := folder.split('-')) and len(split[0]) == 6: @@ -258,7 +253,14 @@ class ReStrax(daq_core.DataBases): first_run = run_id if run_id != first_run: continue + self.log.info(f'Do {folder}') + if self.test_remove_existing: + if os.path.exists(os.path.join(self.write_to, folder)): + # We want to test again, we do the work again + self.log.info(f'Data exists, removing {os.path.join(self.write_to, folder)} to do it again') + #shutil.rmtree(os.path.join(self.write_to, folder)) + data_docs.append({'host': self.hostname, 'location': os.path.join(self.read_from, folder), 'type': data_type, @@ -268,6 +270,7 @@ class ReStrax(daq_core.DataBases): return None run_doc = self.run_coll.find_one({'number': int(first_run)}, projection=projection) run_doc['data'] = data_docs + return run_doc def _find_production_work(self, projection) -> ty.Optional[dict]: @@ -351,6 +354,10 @@ class ReStrax(daq_core.DataBases): def _bypass_for_data_doc(self, data_doc: dict) -> None: source = data_doc['location'] dest = self.renamed_path(source) + if _check_before_moving(source, dest): + self._move_dir(source, dest) + + def _check_before_moving(source, dest) -> bool: if os.path.exists(dest): # Honestly don't know how this could happen, but we have to be carefull here # We are dealing with single copies, so this is a tricky operation. @@ -366,8 +373,8 @@ class ReStrax(daq_core.DataBases): message = f'Trying to move {source}->{dest} but {source} does not exist?!' self.log.error(message) self.log_warning(message) - return - self._move_dir(source, dest) + return False + return True def handle_run(self, run_doc: dict) -> None: """For a given batch of data_docs of a given run, do all the rechunking steps""" @@ -445,36 +452,44 @@ class ReStrax(daq_core.DataBases): """Get if software veto is on from the rundoc""" if self.test_software_veto and not self.production: - return True + + elif self.production: + + + + + def software_veto_docs(self, data_docs: ty.List[dict]) -> (ty.List[dict], ty.List[dict], ty.List[dict]): """Replace the entire docs with only the docs of software veto data""" # #TODO put the suffix _sv somewhere common not hardcoded everywhere - # def _filter_docs(ts, docs): - # return [d for d in docs if d['type'] in ts] + def _filter_docs(docs, ts): + return [d for d in docs if d['type'] in ts] + + all_targets = set([d['type'] for d in data_docs]) + sv = {t for t in all_targets if t.endswith('_sv')} + all_targets -= sv + no_sv = {t for t in all_targets if f'{t}_sv' not in sv} + original_copies = all_targets - no_sv + return _filter_docs(data_docs, sv), _filter_docs(data_docs, original_copies), _filter_docs(data_docs, no_sv) - # all_targets = set([d['type'] for d in data_docs]) - # no_sv = {t for t in all_targets if f'{t}_sv' not in all_targets} - # sv = {t for t in all_targets if t.endswith('_sv')} - # originals = all_targets - sv - no_sv - # return _filter_docs(data_docs, sv), _filter_docs(data_docs, originals), _filter_docs(data_docs, no_sv) - #TODO find better variable names here - consider_sv_docs = [d for d in data_docs if d['type'].endswith('_sv')] - all_original_docs = [d for d in data_docs if not d['type'].endswith('_sv')] + # #TODO find better variable names here + # consider_sv_docs = [d for d in data_docs if d['type'].endswith('_sv')] + # all_original_docs = [d for d in data_docs if not d['type'].endswith('_sv')] - consider_sv_docs_types = [d['type'][:-3] for d in consider_sv_docs] - all_original_docs_types = [d['type'] for d in all_original_docs] + # consider_sv_docs_types = [d['type'][:-3] for d in consider_sv_docs] + # all_original_docs_types = [d['type'] for d in all_original_docs] - no_sv_version_types = [t for t in all_original_docs_types if t not in consider_sv_docs_types] + # no_sv_version_types = [t for t in all_original_docs_types if t not in consider_sv_docs_types] - original_docs = [d for d in all_original_docs if d['type'] not in no_sv_version_types] - no_sv_version_docs = [d for d in all_original_docs if d['type'] in no_sv_version_types] + # original_docs = [d for d in all_original_docs if d['type'] not in no_sv_version_types] + # no_sv_version_docs = [d for d in all_original_docs if d['type'] in no_sv_version_types] - return (consider_sv_docs, original_docs, no_sv_version_docs) + # return (consider_sv_docs, original_docs, no_sv_version_docs) def software_veto_hack_copies_to_originals(self, run_doc: dict, data_docs: ty.List[dict], original_docs: ty.List[dict]) -> ty.List[dict]: @@ -492,11 +507,10 @@ class ReStrax(daq_core.DataBases): # TODO check if they are more than one d_orig = [d for d in original_docs if d['type'] == type_orig] - assert len(d_orig) == 1, " two copies of the same data?! " + assert len(d_orig) == 1, f" two copies of the same data?! {type_orig}" d_orig = d_orig[0] lineage_in = d['meta']['lineage_hash'] - self.log.debug(f'carlo print d_orig {d_orig}') lineage_orig = d_orig['meta']['lineage_hash'] dir_in = d['location'] @@ -512,7 +526,10 @@ class ReStrax(daq_core.DataBases): split[-2] = split[-2].replace('_sv', '') dir_out_hack = '-'.join(split[:-1]) + '-' + lineage_orig - os.rename(dir_out, dir_out_hack) + if _check_before_moving(source=dir_out, dest=dir_out_hack): + os.rename(dir_out, dir_out_hack) + + dir_out = dir_out_hack # Keep track of plugins used for veto @@ -520,7 +537,6 @@ class ReStrax(daq_core.DataBases): # Change filenames in chunk info for i_chunk in range(len(md_out['chunks'])): - chunk_name = md_out['chunks'][i_chunk]['filename'] data_type, lineage, chunk_number = chunk_name.split('-') data_type = data_type.replace('_sv', '') @@ -533,6 +549,7 @@ class ReStrax(daq_core.DataBases): md_out[key] = md_orig[key] # TODO source from FileSaver + # write the new metadata file json_options = dict(sort_keys=True, indent=4) new_md_json_name = '-'.join([type_orig, lineage_orig, 'metadata.json']) with open(os.path.join(dir_out, new_md_json_name), mode='w') as f: @@ -542,8 +559,6 @@ class ReStrax(daq_core.DataBases): old_md_json_name = '-'.join([type_in, lineage_in, 'metadata.json']) os.remove(os.path.join(dir_out, old_md_json_name)) - # remove originals from rundb - def _delete_data_from_rundb(self, run_doc, data_docs, reason='Software veto applied'): @@ -880,8 +895,9 @@ class ReStrax(daq_core.DataBases): """Remove old docs, only if in production mode""" for data_doc in done_data_docs: loc = data_doc.get('location', '??') - assert 'pre_processed' in loc - self._remove_dir(loc) + if self.production: + assert 'pre_processed' in loc + self._remove_dir(loc) def take_a_nap(self, dt: ty.Optional[int] = None): time.sleep(dt if dt is not None else self.nap_time) @@ -914,9 +930,8 @@ class ReStrax(daq_core.DataBases): def _remove_dir(self, directory: str) -> None: """Remove directory (when in production mode)""" - self.log.info(f'Remove {directory} (not if testing)') - if self.production: - shutil.rmtree(directory) + self.log.info(f'Remove {directory}') + shutil.rmtree(directory) def log_warning(self, message: str, **kw) -> None: self.log.warning(message) diff --git a/straxen/plugins/raw_records_sv/software_veto.py b/straxen/plugins/raw_records_sv/software_veto.py index a65131400..ccb47484e 100644 --- a/straxen/plugins/raw_records_sv/software_veto.py +++ b/straxen/plugins/raw_records_sv/software_veto.py @@ -5,6 +5,20 @@ export, __all__ = strax.exporter() +@export +class DummyVeto(RawRecordsSoftwareVetoBase): + """ + Test software veto framework: keep eveything + """ + + __version__ = 'dummy-veto-0.0.1' + + def software_veto_mask(self, e): + + m = e['time'] > 0 + + return m + @export class RadialVeto(RawRecordsSoftwareVetoBase): """ From dfb34cef2fdbdd088d8f2a7fd6c77a982c006b8a Mon Sep 17 00:00:00 2001 From: cfuselli Date: Thu, 16 Mar 2023 08:33:49 +0000 Subject: [PATCH 45/56] refactor restrax sv --- bin/restrax | 195 +++++++++++++++++++++++++--------------------------- 1 file changed, 92 insertions(+), 103 deletions(-) diff --git a/bin/restrax b/bin/restrax index a89e7511c..e234ddcdf 100644 --- a/bin/restrax +++ b/bin/restrax @@ -135,6 +135,13 @@ class ReStrax(daq_core.DataBases): 'raw_records', 'raw_records_nv', 'raw_records_mv', 'raw_records_he', 'records', 'records_nv', 'records_mv') + # TODO do better here + _raw_record_types = raw_record_types + for t in raw_record_types: + _raw_record_types += (t+'_sv',) + raw_record_types = _raw_record_types + + exclude_modes: ty.Iterable = ( 'pmtgain', 'pmtap', 'exttrig', 'noise', 'nVeto_LASER_calibration', 'mv_diffuserballs', 'mv_fibres', 'mv_darkrate') @@ -259,7 +266,7 @@ class ReStrax(daq_core.DataBases): if os.path.exists(os.path.join(self.write_to, folder)): # We want to test again, we do the work again self.log.info(f'Data exists, removing {os.path.join(self.write_to, folder)} to do it again') - #shutil.rmtree(os.path.join(self.write_to, folder)) + shutil.rmtree(os.path.join(self.write_to, folder)) data_docs.append({'host': self.hostname, 'location': os.path.join(self.read_from, folder), @@ -354,10 +361,10 @@ class ReStrax(daq_core.DataBases): def _bypass_for_data_doc(self, data_doc: dict) -> None: source = data_doc['location'] dest = self.renamed_path(source) - if _check_before_moving(source, dest): + if self._check_before_moving(source, dest): self._move_dir(source, dest) - def _check_before_moving(source, dest) -> bool: + def _check_before_moving(self, source, dest) -> bool: if os.path.exists(dest): # Honestly don't know how this could happen, but we have to be carefull here # We are dealing with single copies, so this is a tricky operation. @@ -382,11 +389,8 @@ class ReStrax(daq_core.DataBases): data_docs = self._get_data_docs(run_doc) self.log.info(f'{run_doc["number"]} -> doing {len(data_docs)}') - software_veto_is_on = self.software_veto_is_on(run_doc) + data_docs, original_docs = self.software_veto_docs(data_docs) - if software_veto_is_on: - self.log.info(f'Software veto is on, considering only software_veto docs') - data_docs, original_docs, no_sv_version_docs = self.software_veto_docs(data_docs) # Split the work in files that we will compress and files that will skip compression compress_docs = [ @@ -396,6 +400,7 @@ class ReStrax(daq_core.DataBases): d for d in data_docs if self.should_skip_compression(run_doc, d) ] + self.log.debug(f'Compressing {len(compress_docs)} docs and skipping (i.e. move) {len(skip_docs)} docs') assert len(compress_docs) + len(skip_docs) == len(data_docs), "one and one is three?! " @@ -409,26 +414,17 @@ class ReStrax(daq_core.DataBases): for move_doc in skip_docs: self._bypass_for_data_doc(move_doc) - if software_veto_is_on: - self.log.info(f'Software veto is on, now hacking metadata, dirnames and rundoc and rundb') - data_docs = self.software_veto_hack_copies_to_originals(run_doc, data_docs, original_docs) + data_docs = self.software_veto_hack_copies_to_originals(run_doc, data_docs, original_docs) - # Move doc if we didn't find its software veto twin - for move_doc in no_sv_version_docs: - self.log.debug(f"Moving doc {move_doc['type']} because no SV copy was found") - self._bypass_for_data_doc(move_doc) - - self._delete_data_from_rundb(run_doc, original_docs) self.finalize_execute(data_docs) # Only remove the data that we rechunked (there are now two copies), # the moved data is always just a single copy self.remove_old_docs(compress_docs) - - if software_veto_is_on: - self.log.info(f'Software veto is on, removing also original docs') - self.remove_old_docs(original_docs) + + self.remove_old_docs(original_docs) + self._delete_data_from_rundb(run_doc, original_docs) self.log.info(f'{run_doc["number"]} succes') @@ -448,21 +444,8 @@ class ReStrax(daq_core.DataBases): data_docs = sorted(data_docs, key=size, reverse=True) return data_docs - def software_veto_is_on(self, run_doc: dict): - """Get if software veto is on from the rundoc""" - - if self.test_software_veto and not self.production: - return True - elif self.production: - - - - - - - - def software_veto_docs(self, data_docs: ty.List[dict]) -> (ty.List[dict], ty.List[dict], ty.List[dict]): + def software_veto_docs(self, data_docs: ty.List[dict]) -> (ty.List[dict], ty.List[dict]): """Replace the entire docs with only the docs of software veto data""" # #TODO put the suffix _sv somewhere common not hardcoded everywhere @@ -470,26 +453,15 @@ class ReStrax(daq_core.DataBases): return [d for d in docs if d['type'] in ts] all_targets = set([d['type'] for d in data_docs]) - sv = {t for t in all_targets if t.endswith('_sv')} - all_targets -= sv - no_sv = {t for t in all_targets if f'{t}_sv' not in sv} - original_copies = all_targets - no_sv - return _filter_docs(data_docs, sv), _filter_docs(data_docs, original_copies), _filter_docs(data_docs, no_sv) - - - # #TODO find better variable names here - # consider_sv_docs = [d for d in data_docs if d['type'].endswith('_sv')] - # all_original_docs = [d for d in data_docs if not d['type'].endswith('_sv')] - # consider_sv_docs_types = [d['type'][:-3] for d in consider_sv_docs] - # all_original_docs_types = [d['type'] for d in all_original_docs] - - # no_sv_version_types = [t for t in all_original_docs_types if t not in consider_sv_docs_types] - - # original_docs = [d for d in all_original_docs if d['type'] not in no_sv_version_types] - # no_sv_version_docs = [d for d in all_original_docs if d['type'] in no_sv_version_types] + original_copies = {t for t in all_targets if t+'_sv' in all_targets} + + to_process = all_targets - original_copies - # return (consider_sv_docs, original_docs, no_sv_version_docs) + for t in original_copies: + self.log.info(f'Detected SoftwareVeto for {t}, so I will process its SV copy') + + return _filter_docs(data_docs, to_process), _filter_docs(data_docs, original_copies) def software_veto_hack_copies_to_originals(self, run_doc: dict, data_docs: ty.List[dict], original_docs: ty.List[dict]) -> ty.List[dict]: @@ -498,69 +470,75 @@ class ReStrax(daq_core.DataBases): data is the same lineage as the original data. We need to update the """ + new_data_docs = data_docs.copy() # TODO style of this function is terrible - for d in data_docs: + for _i, d in enumerate(data_docs): - type_in = d['type'] - type_orig = d['type'].replace('_sv', '') + if d['type'].endswith('_sv'): - # TODO check if they are more than one - d_orig = [d for d in original_docs if d['type'] == type_orig] - assert len(d_orig) == 1, f" two copies of the same data?! {type_orig}" - d_orig = d_orig[0] + type_in = d['type'] + type_orig = d['type'].replace('_sv', '') - lineage_in = d['meta']['lineage_hash'] - lineage_orig = d_orig['meta']['lineage_hash'] + # TODO check if they are more than one + d_orig = [d for d in original_docs if d['type'] == type_orig] + assert len(d_orig) == 1, f" two copies of the same data?! {type_orig}" + d_orig = d_orig[0] - dir_in = d['location'] - dir_orig = d_orig['location'] - dir_out = self.renamed_path(dir_in) - - backend = strax.FileSytemBackend() - md_orig = backend.get_metadata(dir_orig) - md_in = backend.get_metadata(dir_in) - md_out = backend.get_metadata(dir_out) + lineage_in = d['meta']['lineage_hash'] + lineage_orig = d_orig['meta']['lineage_hash'] - split = dir_out.split('-') - split[-2] = split[-2].replace('_sv', '') - dir_out_hack = '-'.join(split[:-1]) + '-' + lineage_orig - - if _check_before_moving(source=dir_out, dest=dir_out_hack): - os.rename(dir_out, dir_out_hack) - + dir_in = d['location'] + dir_orig = d_orig['location'] + dir_out = self.renamed_path(dir_in) - dir_out = dir_out_hack + backend = strax.FileSytemBackend() + md_orig = backend.get_metadata(dir_orig) + md_in = backend.get_metadata(dir_in) + md_out = backend.get_metadata(dir_out) - # Keep track of plugins used for veto - md_out['software_veto_lineage'] = md_out['lineage'] - - # Change filenames in chunk info - for i_chunk in range(len(md_out['chunks'])): - chunk_name = md_out['chunks'][i_chunk]['filename'] - data_type, lineage, chunk_number = chunk_name.split('-') - data_type = data_type.replace('_sv', '') - new_chunk_name = '-'.join([data_type, lineage_orig, chunk_number]) - md_out['chunks'][i_chunk]['filename'] = new_chunk_name - os.rename(os.path.join(dir_out, chunk_name), os.path.join(dir_out, new_chunk_name)) - - # Change lineage, kind and type to original raw_records - for key in ['data_kind', 'data_type', 'lineage', 'lineage_hash']: - md_out[key] = md_orig[key] + split = dir_out.split('-') + split[-2] = split[-2].replace('_sv', '') + dir_out_hack = '-'.join(split[:-1]) + '-' + lineage_orig - # TODO source from FileSaver - # write the new metadata file - json_options = dict(sort_keys=True, indent=4) - new_md_json_name = '-'.join([type_orig, lineage_orig, 'metadata.json']) - with open(os.path.join(dir_out, new_md_json_name), mode='w') as f: - f.write(json.dumps(md_out, **json_options)) - - # remove the old metadata file - old_md_json_name = '-'.join([type_in, lineage_in, 'metadata.json']) - os.remove(os.path.join(dir_out, old_md_json_name)) - + if self._check_before_moving(source=dir_out, dest=dir_out_hack): + self.log.debug(f'from {dir_out} to {dir_out_hack}') + os.rename(dir_out, dir_out_hack) + + dir_out = dir_out_hack + # Keep track of plugins used for veto + md_out['software_veto_lineage'] = md_out['lineage'] + + # Change filenames in chunk info + for i_chunk in range(len(md_out['chunks'])): + chunk_name = md_out['chunks'][i_chunk]['filename'] + data_type, lineage, chunk_number = chunk_name.split('-') + data_type = data_type.replace('_sv', '') + new_chunk_name = '-'.join([data_type, lineage_orig, chunk_number]) + md_out['chunks'][i_chunk]['filename'] = new_chunk_name + os.rename(os.path.join(dir_out, chunk_name), os.path.join(dir_out, new_chunk_name)) + + # Change lineage, kind and type to original raw_records + for key in ['data_kind', 'data_type', 'lineage', 'lineage_hash']: + md_out[key] = md_orig[key] + + # TODO source from FileSaver + # write the new metadata file + json_options = dict(sort_keys=True, indent=4) + new_md_json_name = '-'.join([type_orig, lineage_orig, 'metadata.json']) + with open(os.path.join(dir_out, new_md_json_name), mode='w') as f: + f.write(json.dumps(md_out, **json_options)) + + # remove the old metadata file + old_md_json_name = '-'.join([type_in, lineage_in, 'metadata.json']) + os.remove(os.path.join(dir_out, old_md_json_name)) + + new_data_docs[_i]['location'] = dir_out + + return new_data_docs + def _delete_data_from_rundb(self, run_doc, data_docs, reason='Software veto applied'): """ Delete data and update the rundoc. Used for software veto. @@ -709,6 +687,7 @@ class ReStrax(daq_core.DataBases): # no need to recompress data if it's only one chunk self.log.debug(f'Skip {data_type} -> only {n_chunks} chunks') return True + self.log.debug(f'Compress {data_type}') return False def do_checks(self, data_docs: ty.List[dict]) -> None: @@ -851,6 +830,16 @@ class ReStrax(daq_core.DataBases): """ # Maybe could merge this with do checks? -> Avoid opening metadate twice? # Then again, that is SO minor in the grand scheme of things, that I just leave it like this for the moment + + if not self.production: + self.log.debug(f'Will now update rundb (no, just testing)') + for data_doc in data_docs: + dir_in = data_doc.get('location', '') + dir_out = self.renamed_path(dir_in) + storage_backend = strax.FileSytemBackend() + new_metadata = storage_backend.get_metadata(dir_out) + self.log.debug(f'{dir_in} --> {dir_out}') + if not self.production or not len(data_docs): return storage_backend = strax.FileSytemBackend() From 3c9fc82d25ee7982dfafc8be586ad632b48358bc Mon Sep 17 00:00:00 2001 From: cfuselli Date: Fri, 17 Mar 2023 15:43:34 +0000 Subject: [PATCH 46/56] change software veto docs --- bin/restrax | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/bin/restrax b/bin/restrax index e234ddcdf..fe4e64f78 100644 --- a/bin/restrax +++ b/bin/restrax @@ -58,8 +58,6 @@ def parse_args(): help='Only bother with doing the recompression if there are more than this many chunks') parser.add_argument('--bypass_mode', action='store_true', help='Stop recompression and just rename folders. Use with care!') - parser.add_argument('--test_software_veto', action='store_true', - help='Test software veto overwriting info from rundoc') parser.add_argument('--test_remove_existing', action='store_true', help='Test software veto overwriting info from rundoc') @@ -197,7 +195,6 @@ class ReStrax(daq_core.DataBases): self.recompress_min_chunks = args.recompress_min_chunks self.bypass_mode = args.bypass_mode self.overwrite_settings() - self.test_software_veto = args.test_software_veto self.test_remove_existing = args.test_remove_existing def infinite_loop(self, close=False) -> None: @@ -454,14 +451,19 @@ class ReStrax(daq_core.DataBases): all_targets = set([d['type'] for d in data_docs]) - original_copies = {t for t in all_targets if t+'_sv' in all_targets} - - to_process = all_targets - original_copies + have_sv_version = {t for t in all_targets if t+'_sv' in all_targets} # to delete + + to_process = all_targets - have_sv_version - for t in original_copies: + if len(have_sv_version): + # we don't want to have mixed vetoed and non vetoed types, unless it's raw_records types + no_sv_version = {t for t in all_targets if t+'_sv' not in all_targets and not t.endswith('_sv')} + assert len([t for t in no_sv_version if not t.startswith('raw_records')])==0, f'Something is wrong here, you are asking me to process a type that has no software veto version \n\n {no_sv_version} \n\n {self.raw_record_types}' + + for t in have_sv_version: self.log.info(f'Detected SoftwareVeto for {t}, so I will process its SV copy') - - return _filter_docs(data_docs, to_process), _filter_docs(data_docs, original_copies) + + return _filter_docs(data_docs, to_process), _filter_docs(data_docs, have_sv_version) def software_veto_hack_copies_to_originals(self, run_doc: dict, data_docs: ty.List[dict], original_docs: ty.List[dict]) -> ty.List[dict]: @@ -545,7 +547,6 @@ class ReStrax(daq_core.DataBases): :param run_doc: rundoc :param data_docs: docs of data to be deleted """ - # TODO # Remove the data location from the rundoc and append it to the 'deleted_data' entries self.log.info('Deleting original data from rundb') From 9ac9d77b2f9ca2f52a173e0600dd03702a546bc4 Mon Sep 17 00:00:00 2001 From: cfuselli Date: Fri, 17 Mar 2023 17:13:35 +0000 Subject: [PATCH 47/56] bootstrax now fantastic, accept rd --- bin/bootstrax | 80 ++++++++++++++++++++++++--------------------------- 1 file changed, 38 insertions(+), 42 deletions(-) diff --git a/bin/bootstrax b/bin/bootstrax index 2f317e1b6..b4ce5a213 100755 --- a/bin/bootstrax +++ b/bin/bootstrax @@ -91,7 +91,7 @@ parser.add_argument( '--max_messages', type=int, default=10, help="number of max mailbox messages") parser.add_argument( - '--software_veto', default=False, + '--software_veto_overwrite', default=None, help="Class name of veto plugin to apply (i.e.: RadialVeto). It's overwritten by rundoc value.") actions = parser.add_mutually_exclusive_group() @@ -516,7 +516,7 @@ def keep_target(targets, compare_with, n_fails): return kept_targets -def infer_target(rd: dict) -> dict: +def infer_target(rd: dict, software_veto_on: bool = False) -> dict: """ Check if the target should be overridden based on the mode of the DAQ for this run :param rd: rundoc @@ -525,6 +525,15 @@ def infer_target(rd: dict) -> dict: targets = args.targets.copy() post_process = args.post_process.copy() + if software_veto_on: + # TODO study which targets_sv is better to have in targets or post + _post = list(post_process) + for t in targets: + _post.append(t+'_sv') + for t in post_process: + _post.append(t+'_sv') + post_process = tuple(_post) + if args.fix_target: return {'targets': strax.to_str_tuple(targets), 'post_processing': strax.to_str_tuple(post_process)} @@ -1180,7 +1189,7 @@ def manual_fail(*, mongo_id=None, number=None, reason=''): def run_strax(run_id, input_dir, targets, readout_threads, compressor, run_start_time, samples_per_record, cores, max_messages, timeout, daq_chunk_duration, daq_overlap_chunk_duration, post_processing, - records_compressor, debug=False): + records_compressor, software_veto_name=None, debug=False): # Check mongo connection ping_dbs() # Clear the swap memory used by npshmmex @@ -1204,19 +1213,19 @@ def run_strax(run_id, input_dir, targets, readout_threads, compressor, timeout=timeout, ) + if software_veto_name is not None: + # If the software veto is on, let's register the correct extra plugins + st = software_veto_register_plugins(st, software_veto_name) + for t in ('raw_records', 'records', 'records_nv', 'hitlets_nv'): # Set the (raw)records processor to the inferred one st._plugin_class_registry[t].compressor = records_compressor + try: + st._plugin_class_registry[t+'_sv'].compressor = records_compressor + except: + pass - if software_veto_is_on(args): - - _post_processing = list(post_processing) - for t in targets: - _post_processing.append(t+'_sv') - for t in post_processing: - _post_processing.append(t+'_sv') - post_processing = tuple(_post_processing) # Make a function for running strax, call the function to process the run # This way, it can also be run inside a wrapper to profile strax @@ -1238,13 +1247,6 @@ def run_strax(run_id, input_dir, targets, readout_threads, compressor, config=strax_config, max_workers=cores) - if software_veto_is_on(args): - - software_veto_register_raw_records(st, args) - - import straxen.plugins.raw_records_sv._software_veto_copies as _software_veto_copies - st.register_all(_software_veto_copies) - if len(post_processing): for post_target in post_processing: if post_target not in st._plugin_class_registry: @@ -1397,11 +1399,13 @@ def process_run(rd, send_heartbeats=args.production): except Exception as e: fail(f"Could not find start in datetime.datetime object: {str(e)}") - run_strax_config.update(infer_target(rd)) + software_veto_on = software_veto_get_name(rd, args)['software_veto_name'] is not None + run_strax_config.update(infer_target(rd, software_veto_on)) run_strax_config.update(infer_mode(rd)) + run_strax_config.update(software_veto_get_name(rd, args)) run_strax_config['debug'] = args.debug - log.info(f"We'll try to process the run with multiprocess instead of multiprocessing (so we use dill)") + log.info(f"Processing run!") strax_proc = multiprocess.Process( target=run_strax, kwargs=run_strax_config) @@ -1680,39 +1684,31 @@ def cleanup_db(): -def software_veto_is_on(args, rundoc=None): +def software_veto_get_name(rd, args=None): """Decide if software veto is on based on arguments or rundoc""" + if args is not None: + if args.software_veto_overwrite is not None: + return {'software_veto_name': args.software_veto_overwrite} - # TODO add rundoc option - if (args.software_veto == None) | (args.software_veto is False): - return False - else: - return True - -def software_veto_get_name(args, rundoc=None): - """Get the needed raw_records plugin""" - - plugin_name = args.software_veto + return {'software_veto_name': rd.get('software_veto', None)} - # TODO overwrite from rundoc - # TODO add option if you want to overwrite from argument - - - return plugin_name -def software_veto_register_raw_records(st, args, rundoc=None): +def software_veto_register_plugins(st, veto_name): """Based on the selection that we want to apply, register the correct veto plugin. Probably we want to pass it as argument and if possible overwrite it from rundoc. Then register a copy of all the plugins. """ - # TODO - veto_name = software_veto_get_name(args, rundoc=None) - import straxen.plugins.raw_records_sv.software_veto as software_veto - veto_plugin = getattr(straxen.plugins.raw_records_sv.software_veto, veto_name) - st.register(veto_plugin) + raw_records_sv_plugin = getattr(software_veto, veto_name) + st.register(raw_records_sv_plugin) + + import straxen.plugins.raw_records_sv._software_veto_copies as _software_veto_copies + st.register_all(_software_veto_copies) + + return st + From 8a211eb8ff997070c4722d064897c2a8a5bb75c5 Mon Sep 17 00:00:00 2001 From: cfuselli Date: Fri, 17 Mar 2023 17:21:18 +0000 Subject: [PATCH 48/56] bootstrax fail nv_sv --- bin/bootstrax | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/bin/bootstrax b/bin/bootstrax index b4ce5a213..f0330cd01 100755 --- a/bin/bootstrax +++ b/bin/bootstrax @@ -527,6 +527,7 @@ def infer_target(rd: dict, software_veto_on: bool = False) -> dict: if software_veto_on: # TODO study which targets_sv is better to have in targets or post + # TODO ERROR ERROR ERROR this gives errors if you try to make _nv_sv and does not exist _post = list(post_process) for t in targets: _post.append(t+'_sv') @@ -534,6 +535,7 @@ def infer_target(rd: dict, software_veto_on: bool = False) -> dict: _post.append(t+'_sv') post_process = tuple(_post) + if args.fix_target: return {'targets': strax.to_str_tuple(targets), 'post_processing': strax.to_str_tuple(post_process)} @@ -599,6 +601,8 @@ def infer_target(rd: dict, software_veto_on: bool = False) -> dict: f'processing up to {targets} and postprocessing ' f'to {post_process}') + + if targets is None or not len(targets): targets = 'raw_records' if post_process is None or not len(post_process): From 3d3ddd955118cd68dcdff0d36095ff248e38fc68 Mon Sep 17 00:00:00 2001 From: cfuselli Date: Sun, 26 Mar 2023 00:45:24 +0000 Subject: [PATCH 49/56] fixed restrax.. badly --- bin/bootstrax | 13 +++++---- bin/restrax | 29 ++++++++++++------- .../raw_records_sv/_software_veto_base.py | 2 +- 3 files changed, 28 insertions(+), 16 deletions(-) diff --git a/bin/bootstrax b/bin/bootstrax index f0330cd01..703ee91e6 100755 --- a/bin/bootstrax +++ b/bin/bootstrax @@ -527,12 +527,15 @@ def infer_target(rd: dict, software_veto_on: bool = False) -> dict: if software_veto_on: # TODO study which targets_sv is better to have in targets or post - # TODO ERROR ERROR ERROR this gives errors if you try to make _nv_sv and does not exist + # TODO THIS is bad!! + _targets = list(targets) + _targets.append('raw_records_sv') + _targets.append('peaklets_sv') + _post = list(post_process) - for t in targets: - _post.append(t+'_sv') - for t in post_process: - _post.append(t+'_sv') + _post.append('event_info_sv') + + targets = tuple(_targets) post_process = tuple(_post) diff --git a/bin/restrax b/bin/restrax index fe4e64f78..edb243853 100644 --- a/bin/restrax +++ b/bin/restrax @@ -386,6 +386,7 @@ class ReStrax(daq_core.DataBases): data_docs = self._get_data_docs(run_doc) self.log.info(f'{run_doc["number"]} -> doing {len(data_docs)}') + # Software veto data_docs, original_docs = self.software_veto_docs(data_docs) @@ -411,15 +412,16 @@ class ReStrax(daq_core.DataBases): for move_doc in skip_docs: self._bypass_for_data_doc(move_doc) + # Software veto data_docs = self.software_veto_hack_copies_to_originals(run_doc, data_docs, original_docs) - self.finalize_execute(data_docs) # Only remove the data that we rechunked (there are now two copies), # the moved data is always just a single copy self.remove_old_docs(compress_docs) + # Software veto self.remove_old_docs(original_docs) self._delete_data_from_rundb(run_doc, original_docs) @@ -451,19 +453,26 @@ class ReStrax(daq_core.DataBases): all_targets = set([d['type'] for d in data_docs]) - have_sv_version = {t for t in all_targets if t+'_sv' in all_targets} # to delete + # Software veto is not on (or broken) + if not all(x in all_targets for x in ['raw_records_sv', 'raw_records_aqmon_sv']): + normal_targets = {t for t in all_targets if not t.endswith('_sv')} + return _filter_docs(data_docs, normal_targets), _filter_docs(data_docs, []) + + # Software veto is working + else: + sv_targets = {t+'_sv' for t in all_targets if t+'_sv' in all_targets} + rr_to_pass = {t for t in all_targets if t.startswith('raw_records') and t+'_sv' not in all_targets} - to_process = all_targets - have_sv_version + to_delete = all_targets - sv_targets - rr_to_pass + to_process = all_targets - to_delete - if len(have_sv_version): - # we don't want to have mixed vetoed and non vetoed types, unless it's raw_records types - no_sv_version = {t for t in all_targets if t+'_sv' not in all_targets and not t.endswith('_sv')} - assert len([t for t in no_sv_version if not t.startswith('raw_records')])==0, f'Something is wrong here, you are asking me to process a type that has no software veto version \n\n {no_sv_version} \n\n {self.raw_record_types}' + for t in to_delete: + self.log.debug(f'Detected SV for {t} (or missing SV copy), so I will delete it') - for t in have_sv_version: - self.log.info(f'Detected SoftwareVeto for {t}, so I will process its SV copy') + for t in to_process: + self.log.debug(f'Will process {t}') - return _filter_docs(data_docs, to_process), _filter_docs(data_docs, have_sv_version) + return _filter_docs(data_docs, to_process), _filter_docs(data_docs, to_delete) def software_veto_hack_copies_to_originals(self, run_doc: dict, data_docs: ty.List[dict], original_docs: ty.List[dict]) -> ty.List[dict]: diff --git a/straxen/plugins/raw_records_sv/_software_veto_base.py b/straxen/plugins/raw_records_sv/_software_veto_base.py index bdf56d4dd..633ce8a5f 100644 --- a/straxen/plugins/raw_records_sv/_software_veto_base.py +++ b/straxen/plugins/raw_records_sv/_software_veto_base.py @@ -93,7 +93,7 @@ def compute(self, raw_records, raw_records_aqmon, events): self._software_veto_time( start=objects_to_delete['time'], end=strax.endtime(objects_to_delete), - dt=raw_records[0]['dt'] + dt=10 # TODO TODO TODO TODO bad )])) return result From a5dabe00da19963497249ac8a5dd7565011e4b40 Mon Sep 17 00:00:00 2001 From: cfuselli Date: Sun, 26 Mar 2023 10:34:28 +0000 Subject: [PATCH 50/56] remove carlo paths --- bin/bootstrax | 2 -- straxen/plugins/raw_records_sv/_build_copies.py | 2 -- 2 files changed, 4 deletions(-) diff --git a/bin/bootstrax b/bin/bootstrax index 703ee91e6..9458d897a 100755 --- a/bin/bootstrax +++ b/bin/bootstrax @@ -16,8 +16,6 @@ For more info, see the documentation: https://straxen.readthedocs.io/en/latest/bootstrax.html """ __version__ = '3.0.0' -import sys -sys.path.insert(0, '/daq_common/carlo/test_software/straxen_software_veto') import argparse import typing diff --git a/straxen/plugins/raw_records_sv/_build_copies.py b/straxen/plugins/raw_records_sv/_build_copies.py index ef337fb05..4ded24a07 100644 --- a/straxen/plugins/raw_records_sv/_build_copies.py +++ b/straxen/plugins/raw_records_sv/_build_copies.py @@ -1,6 +1,4 @@ import sys, os -sys.path.insert(0, '/daq_common/carlo/test_software/straxen_software_veto') - import strax import straxen from immutabledict import immutabledict From b9fb24da72626753005968e112516367816aaa8f Mon Sep 17 00:00:00 2001 From: cfuselli Date: Sun, 26 Mar 2023 11:02:35 +0000 Subject: [PATCH 51/56] bug daqreader --- straxen/plugins/raw_records/daqreader.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/straxen/plugins/raw_records/daqreader.py b/straxen/plugins/raw_records/daqreader.py index c813f7334..427b23c68 100644 --- a/straxen/plugins/raw_records/daqreader.py +++ b/straxen/plugins/raw_records/daqreader.py @@ -275,7 +275,7 @@ def _load_chunk(self, path, start, end, kind='central'): def _artificial_dead_time(self, start, end, dt): - _dtype_for = self.depends_on[0] # raw_records + _dtype_for = self.provides[0] # raw_records return strax.dict_to_rec( From 53ffbd7f501e260ab65bb21190d464b270f4ef16 Mon Sep 17 00:00:00 2001 From: cfuselli Date: Fri, 31 Mar 2023 09:44:52 +0000 Subject: [PATCH 52/56] updates for testing --- bin/bootstrax | 11 +++++++++-- bin/straxer | 8 ++++++++ straxen/plugins/raw_records_sv/software_veto.py | 2 +- 3 files changed, 18 insertions(+), 3 deletions(-) diff --git a/bin/bootstrax b/bin/bootstrax index 9458d897a..b855478bb 100755 --- a/bin/bootstrax +++ b/bin/bootstrax @@ -91,6 +91,9 @@ parser.add_argument( parser.add_argument( '--software_veto_overwrite', default=None, help="Class name of veto plugin to apply (i.e.: RadialVeto). It's overwritten by rundoc value.") +parser.add_argument( + '--test_input_folder', default=None, + help="Add to storage") actions = parser.add_mutually_exclusive_group() actions.add_argument( @@ -299,7 +302,8 @@ def new_context(cores=args.cores, # Keep the rundb but set it to readonly and local only, delete # all other storage frontends except fo the test folder. context.storage = [context.storage[0], - strax.DataDirectory(output_folder)] + strax.DataDirectory(output_folder), + strax.DataDirectory(args.test_input_folder, readonly=True)] context.storage[0].readonly = True context.storage[0].local_only = True @@ -1342,7 +1346,10 @@ def process_run(rd, send_heartbeats=args.production): # or use the test-dir: if not osp.exists(loc): loc = os.path.join('/live_data/xenonnt_bootstrax_test/', run_id) - + # or maybe it's in the /data dir: + if not osp.exists(loc): + loc = os.path.join('/data/xenonnt_bootstrax_test/', run_id) + else: for dd in rd['data']: if 'type' not in dd: diff --git a/bin/straxer b/bin/straxer index c4f4e21ea..cd6bf9943 100644 --- a/bin/straxer +++ b/bin/straxer @@ -115,6 +115,10 @@ def parse_args(): '--add_folder', default='', help='Also add folder to st.storage') + parser.add_argument( + '--add_input_folder', + default='', + help='Also add folder to st.storage in readonly mode') parser.add_argument( '--print_alive', default=300, @@ -188,6 +192,10 @@ def setup_context(args): st.storage += [ strax.DataDirectory('./strax_data')] + if args.add_input_folder != '': + if os.path.exists(args.add_input_folder): + st.storage += [strax.DataDirectory(args.add_input_folder, readonly=True)] + if args.add_folder != '': for sf in st.storage: # Set all others to read only diff --git a/straxen/plugins/raw_records_sv/software_veto.py b/straxen/plugins/raw_records_sv/software_veto.py index ccb47484e..c08f18a7b 100644 --- a/straxen/plugins/raw_records_sv/software_veto.py +++ b/straxen/plugins/raw_records_sv/software_veto.py @@ -45,7 +45,7 @@ class HighEnergyVeto(RawRecordsSoftwareVetoBase): def software_veto_mask(self, e): - m = (e['s1_area'] > 1000) & (e['s2_area'] > 100000) + m = (e['s1_area'] > 1e4) & (e['s2_area'] > 1e5) return m From 4174b59758f51a954d0f609d7d00c6077887d7d4 Mon Sep 17 00:00:00 2001 From: cfuselli Date: Fri, 31 Mar 2023 09:52:12 +0000 Subject: [PATCH 53/56] fix test --- bin/bootstrax | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/bin/bootstrax b/bin/bootstrax index b855478bb..8a2ac832d 100755 --- a/bin/bootstrax +++ b/bin/bootstrax @@ -302,8 +302,9 @@ def new_context(cores=args.cores, # Keep the rundb but set it to readonly and local only, delete # all other storage frontends except fo the test folder. context.storage = [context.storage[0], - strax.DataDirectory(output_folder), - strax.DataDirectory(args.test_input_folder, readonly=True)] + strax.DataDirectory(output_folder)] + if args.test_input_folder is not None: + context.storage += [strax.DataDirectory(args.test_input_folder, readonly=True)] context.storage[0].readonly = True context.storage[0].local_only = True @@ -1349,7 +1350,7 @@ def process_run(rd, send_heartbeats=args.production): # or maybe it's in the /data dir: if not osp.exists(loc): loc = os.path.join('/data/xenonnt_bootstrax_test/', run_id) - + else: for dd in rd['data']: if 'type' not in dd: From 77cfb9f39b3bdac46e5f2ac091c7ebf2426937b3 Mon Sep 17 00:00:00 2001 From: cfuselli Date: Fri, 31 Mar 2023 13:04:20 +0000 Subject: [PATCH 54/56] add bootstrax profile mem --- bin/bootstrax | 47 +++++++++++++++++++++++++++++------------------ 1 file changed, 29 insertions(+), 18 deletions(-) diff --git a/bin/bootstrax b/bin/bootstrax index 8a2ac832d..0cc4644ea 100755 --- a/bin/bootstrax +++ b/bin/bootstrax @@ -52,6 +52,9 @@ parser = argparse.ArgumentParser( parser.add_argument( '--debug', action='store_true', help="Start strax processes with debug logging.") +parser.add_argument( + '--profile_memory', action='store_true', + help="Get memory usage.") parser.add_argument( '--profile', type=str, default='false', help="Run strax in profiling mode. Specify target filename as argument.") @@ -1735,23 +1738,31 @@ if __name__ == '__main__': multiprocessing.set_start_method('spawn') multiprocess.set_start_method('spawn') - - if not args.undying: - main() + if args.profile_memory: + from memory_profiler import memory_usage + import time + start = time.time() + mem = memory_usage(proc=(main, (args,))) + print(f"Memory profiler says peak RAM usage was: {max(mem):.1f} MB") + print(f'Took {time.time() - start:.1f} s = {(time.time() - start) / 3600:.2f} h ') + print('Bye, bye') else: - while True: - try: - main() - except (KeyboardInterrupt, SystemExit): - raise - except Exception as fatal_error: - log.error(f'Fatal warning:\tran into {fatal_error}. Try ' - f'logging error and restart bootstrax') + if not args.undying: + main() + else: + while True: try: - log_warning(f'Fatal warning:\tran into {fatal_error}', - priority='error') - except Exception as warning_error: - log.error(f'Fatal warning:\tcould not log {warning_error}') - # This usually only takes a minute or two - time.sleep(60) - log.warning('Restarting main loop') + main() + except (KeyboardInterrupt, SystemExit): + raise + except Exception as fatal_error: + log.error(f'Fatal warning:\tran into {fatal_error}. Try ' + f'logging error and restart bootstrax') + try: + log_warning(f'Fatal warning:\tran into {fatal_error}', + priority='error') + except Exception as warning_error: + log.error(f'Fatal warning:\tcould not log {warning_error}') + # This usually only takes a minute or two + time.sleep(60) + log.warning('Restarting main loop') From 5a6b3a190d3bf7339008ce3d2ff4ffb84c550430 Mon Sep 17 00:00:00 2001 From: cfuselli Date: Fri, 31 Mar 2023 13:06:58 +0000 Subject: [PATCH 55/56] add bootstrax profile mem --- bin/bootstrax | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bin/bootstrax b/bin/bootstrax index 0cc4644ea..7b0d02fa0 100755 --- a/bin/bootstrax +++ b/bin/bootstrax @@ -1742,7 +1742,7 @@ if __name__ == '__main__': from memory_profiler import memory_usage import time start = time.time() - mem = memory_usage(proc=(main, (args,))) + mem = memory_usage(proc=main) print(f"Memory profiler says peak RAM usage was: {max(mem):.1f} MB") print(f'Took {time.time() - start:.1f} s = {(time.time() - start) / 3600:.2f} h ') print('Bye, bye') From 2f98996f6b7ed9e3397ee1db15ba47e6d6efabfe Mon Sep 17 00:00:00 2001 From: cfuselli Date: Fri, 31 Mar 2023 13:29:37 +0000 Subject: [PATCH 56/56] can t do it --- bin/bootstrax | 48 ++++++++++++++++++------------------------------ 1 file changed, 18 insertions(+), 30 deletions(-) diff --git a/bin/bootstrax b/bin/bootstrax index 7b0d02fa0..584f218e6 100755 --- a/bin/bootstrax +++ b/bin/bootstrax @@ -52,9 +52,6 @@ parser = argparse.ArgumentParser( parser.add_argument( '--debug', action='store_true', help="Start strax processes with debug logging.") -parser.add_argument( - '--profile_memory', action='store_true', - help="Get memory usage.") parser.add_argument( '--profile', type=str, default='false', help="Run strax in profiling mode. Specify target filename as argument.") @@ -1728,7 +1725,7 @@ def software_veto_register_plugins(st, veto_name): -########################### +########################### # __MAIN__ # ########################### @@ -1738,31 +1735,22 @@ if __name__ == '__main__': multiprocessing.set_start_method('spawn') multiprocess.set_start_method('spawn') - if args.profile_memory: - from memory_profiler import memory_usage - import time - start = time.time() - mem = memory_usage(proc=main) - print(f"Memory profiler says peak RAM usage was: {max(mem):.1f} MB") - print(f'Took {time.time() - start:.1f} s = {(time.time() - start) / 3600:.2f} h ') - print('Bye, bye') + if not args.undying: + main() else: - if not args.undying: - main() - else: - while True: + while True: + try: + main() + except (KeyboardInterrupt, SystemExit): + raise + except Exception as fatal_error: + log.error(f'Fatal warning:\tran into {fatal_error}. Try ' + f'logging error and restart bootstrax') try: - main() - except (KeyboardInterrupt, SystemExit): - raise - except Exception as fatal_error: - log.error(f'Fatal warning:\tran into {fatal_error}. Try ' - f'logging error and restart bootstrax') - try: - log_warning(f'Fatal warning:\tran into {fatal_error}', - priority='error') - except Exception as warning_error: - log.error(f'Fatal warning:\tcould not log {warning_error}') - # This usually only takes a minute or two - time.sleep(60) - log.warning('Restarting main loop') + log_warning(f'Fatal warning:\tran into {fatal_error}', + priority='error') + except Exception as warning_error: + log.error(f'Fatal warning:\tcould not log {warning_error}') + # This usually only takes a minute or two + time.sleep(60) + log.warning('Restarting main loop')