From 0b46e16180b1114d761c256a2a47ad930ca30871 Mon Sep 17 00:00:00 2001 From: Xavier Buffat Date: Wed, 26 Mar 2025 09:55:15 +0100 Subject: [PATCH 01/14] Add possibility to set moment of individual source --- xfields/slicers/compressed_profile.py | 36 +++++++++++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/xfields/slicers/compressed_profile.py b/xfields/slicers/compressed_profile.py index 1895690e..0f3fad25 100644 --- a/xfields/slicers/compressed_profile.py +++ b/xfields/slicers/compressed_profile.py @@ -220,3 +220,39 @@ def get_moment_profile(self, moment_name, i_turn): i_start_in_moments_data:i_end_in_moments_data]) return z_out, moment_out + +def get_source_moment_profile(self, moment_name, i_turn,i_source): + """ + Get the moment profile for a given turn. + + Parameters + ---------- + moment_name : str + The name of the moment to get + i_turn : int + The turn index, 0 <= i_turn < self.num_turns + + Returns + ------- + z_out : np.ndarray + The z positions within the moment profile + moment_out : np.ndarray + The moment profile + """ + + z_out = self._arr2ctx(np.zeros(self._N_1)) + moment_out = self._arr2ctx(np.zeros(self._N_1)) + i_moment = self.moments_names.index(moment_name) + _z_P = self._z_P or 0 + + z_out = ( + self._z_a + self.dz / 2 + - i_source * _z_P + self.dz * self._arr2ctx(np.arange(self._N_1))) + + i_start_in_moments_data = (self._N_S - i_source - 1) * self._N_aux + i_end_in_moments_data = i_start_in_moments_data + self._N_1 + moment_out = ( + self.data[i_moment, i_turn, + i_start_in_moments_data:i_end_in_moments_data]) + + return z_out, moment_out From 1c53670a8d170553b92a8696f921a97ea086a2a2 Mon Sep 17 00:00:00 2001 From: Xavier Buffat Date: Wed, 26 Mar 2025 10:22:34 +0100 Subject: [PATCH 02/14] faking bunches without reducing the compressed profile length --- xfields/beam_elements/element_with_slicer.py | 2 +- .../beam_elements/waketracker/waketracker.py | 34 ++++++++++++++++++- 2 files changed, 34 insertions(+), 2 deletions(-) diff --git a/xfields/beam_elements/element_with_slicer.py b/xfields/beam_elements/element_with_slicer.py index 270b8ab4..e46fa80b 100644 --- a/xfields/beam_elements/element_with_slicer.py +++ b/xfields/beam_elements/element_with_slicer.py @@ -71,7 +71,7 @@ def __init__(self, bunch_spacing_zeta=bunch_spacing_zeta, slicer_moments=slicer_moments) - if with_compressed_profile: + if with_compressed_profile: #TODO with a bunch selection, number of sources and targets should differ self._initialize_moments( zeta_range=zeta_range, # These are [a, b] in the paper num_slices=num_slices, # Per bunch, this is N_1 in the paper diff --git a/xfields/beam_elements/waketracker/waketracker.py b/xfields/beam_elements/waketracker/waketracker.py index e21a6688..60ec5758 100644 --- a/xfields/beam_elements/waketracker/waketracker.py +++ b/xfields/beam_elements/waketracker/waketracker.py @@ -61,11 +61,29 @@ def __init__(self, components, self.components = components self.pipeline_manager = None + self.fake_coupled_bunch_phases = {} + if fake_coupled_bunch_phase_x is not None: + self.fake_coupled_bunch_phases['x'] = fake_coupled_bunch_phase_x + assert beta_x is not None and beta_x > 0 + self.beta_x = beta_x + if fake_coupled_bunch_phase_y is not None: + self.fake_coupled_bunch_phases['y'] = fake_coupled_bunch_phase_y + assert beta_y is not None and beta_y > 0 + self.beta_y = beta_y + if self.fake_coupled_bunch_phases: + assert bunch_selection is not None and filling_scheme is not None + assert bunch_selection, "When faking a coupled bunch mode, only one bunch should be selected as ref." + all_slicer_moments = [] for cc in self.components: assert not hasattr(cc, 'moments_data') or cc.moments_data is None all_slicer_moments += cc.source_moments + if self.fake_coupled_bunch_phases: + for moment_name in self.fake_coupled_bunch_phases.keys(): + if moment_name in all_slicer_moments: + all_slicer_moments.append('p'+moment_name) + self.all_slicer_moments = list(set(all_slicer_moments)) super().__init__( @@ -90,7 +108,6 @@ def __init__(self, components, circumference=circumference) self._flatten = _flatten - all_slicer_moments = list(set(all_slicer_moments)) def init_pipeline(self, pipeline_manager, element_name, partner_names): @@ -129,12 +146,27 @@ def track(self, particles): if status and status.on_hold == True: return status + if self.fake_coupled_bunch_phases: + self._compute_fake_bunch_moments() + for wf in self.components: wf._conv_data.track(particles, i_slot_particles=self.i_slot_particles, i_slice_particles=self.i_slice_particles, moments_data=self.moments_data) + def _compute_fake_bunch_moments(self): + conjugate_names = {'x':'px','y':'py'} + filled_slots = self.filling_scheme.nonzero()[0] + for bunch_number,slot in enumerate(filled_slots): + if slot != self.bunch_selection[0]: + moments = {} + for moment_name in self.fake_coupled_bunch_phases.keys(): + complex_normalised_moments = self.moments_data.get_source_moment_profile(moment_name,0,0) + 1j*self.beta_y*self.moments_data.get_source_moment_profile(conjugate_names[moment_name],0,0) + moments[moment_name] = np.real(complex_normalised_moments*np.exp(1j*self.fake_coupled_bunch_phases[moment_name]*(slot-self.bunch_selection[0]))) + moments['num_particles'] = self.moments_data.get_source_moment_profile('num_particles',0,0) + self.moments_data.set_moments(bunch_number,0,moments) + @property def zeta_range(self): return self.slicer.zeta_range From 18b0b9a516bca303a1c4cc3840a05935e1120505 Mon Sep 17 00:00:00 2001 From: Xavier Buffat Date: Wed, 26 Mar 2025 11:17:30 +0100 Subject: [PATCH 03/14] runs --- .../beam_elements/waketracker/waketracker.py | 21 ++++++++++++------- xfields/slicers/compressed_profile.py | 2 +- 2 files changed, 14 insertions(+), 9 deletions(-) diff --git a/xfields/beam_elements/waketracker/waketracker.py b/xfields/beam_elements/waketracker/waketracker.py index 60ec5758..847ae388 100644 --- a/xfields/beam_elements/waketracker/waketracker.py +++ b/xfields/beam_elements/waketracker/waketracker.py @@ -51,6 +51,9 @@ def __init__(self, components, filling_scheme=None, bunch_selection=None, num_turns=1, + fake_coupled_bunch_phase_x = None, + fake_coupled_bunch_phase_y = None, + beta_x = None, beta_y = None, circumference=None, log_moments=None, _flatten=False, @@ -62,14 +65,15 @@ def __init__(self, components, self.pipeline_manager = None self.fake_coupled_bunch_phases = {} + self.betas = {} if fake_coupled_bunch_phase_x is not None: self.fake_coupled_bunch_phases['x'] = fake_coupled_bunch_phase_x assert beta_x is not None and beta_x > 0 - self.beta_x = beta_x + self.betas['x'] = beta_x if fake_coupled_bunch_phase_y is not None: self.fake_coupled_bunch_phases['y'] = fake_coupled_bunch_phase_y assert beta_y is not None and beta_y > 0 - self.beta_y = beta_y + self.betas['y'] = beta_y if self.fake_coupled_bunch_phases: assert bunch_selection is not None and filling_scheme is not None assert bunch_selection, "When faking a coupled bunch mode, only one bunch should be selected as ref." @@ -157,14 +161,15 @@ def track(self, particles): def _compute_fake_bunch_moments(self): conjugate_names = {'x':'px','y':'py'} - filled_slots = self.filling_scheme.nonzero()[0] - for bunch_number,slot in enumerate(filled_slots): + for bunch_number,slot in enumerate(self.slicer.filled_slots): if slot != self.bunch_selection[0]: moments = {} for moment_name in self.fake_coupled_bunch_phases.keys(): - complex_normalised_moments = self.moments_data.get_source_moment_profile(moment_name,0,0) + 1j*self.beta_y*self.moments_data.get_source_moment_profile(conjugate_names[moment_name],0,0) + z_dummy,mom = self.moments_data.get_source_moment_profile(moment_name,0,0) + z_dummy,mom_conj = self.moments_data.get_source_moment_profile(conjugate_names[moment_name],0,0) + complex_normalised_moments = mom + (1j*self.betas[moment_name])*mom_conj moments[moment_name] = np.real(complex_normalised_moments*np.exp(1j*self.fake_coupled_bunch_phases[moment_name]*(slot-self.bunch_selection[0]))) - moments['num_particles'] = self.moments_data.get_source_moment_profile('num_particles',0,0) + z_dummy,moments['num_particles'] = self.moments_data.get_source_moment_profile('num_particles',0,0) self.moments_data.set_moments(bunch_number,0,moments) @property @@ -190,7 +195,7 @@ def num_turns(self): @property def circumference(self): return self.moments_data.circumference - + def __add__(self, other): if other == 0: @@ -208,7 +213,7 @@ def __add__(self, other): 'Bunch spacing zeta is not consistent') else: xo.assert_allclose(self.bunch_spacing_zeta, other.bunch_spacing_zeta, atol=1e-12, rtol=0) - if self.filling_scheme is None: + if self.filling_scheme is None: # TODO I don't know who wrote this, but it's bullshit assert other.filling_scheme is None, ( 'Filling scheme is not consistent') else: diff --git a/xfields/slicers/compressed_profile.py b/xfields/slicers/compressed_profile.py index 0f3fad25..fdcb02ac 100644 --- a/xfields/slicers/compressed_profile.py +++ b/xfields/slicers/compressed_profile.py @@ -221,7 +221,7 @@ def get_moment_profile(self, moment_name, i_turn): return z_out, moment_out -def get_source_moment_profile(self, moment_name, i_turn,i_source): + def get_source_moment_profile(self, moment_name, i_turn,i_source): """ Get the moment profile for a given turn. From c3c416a6b8c663f87d5ac03dec22eb841e728dc6 Mon Sep 17 00:00:00 2001 From: Xavier Buffat Date: Thu, 27 Mar 2025 11:59:53 +0100 Subject: [PATCH 04/14] Reduced number of targets in convolution when specifing bunch selection --- xfields/beam_elements/element_with_slicer.py | 8 ++++++++ xfields/beam_elements/waketracker/convolution.py | 8 +++++--- xfields/beam_elements/waketracker/waketracker.py | 7 ++----- 3 files changed, 15 insertions(+), 8 deletions(-) diff --git a/xfields/beam_elements/element_with_slicer.py b/xfields/beam_elements/element_with_slicer.py index e46fa80b..8d68b702 100644 --- a/xfields/beam_elements/element_with_slicer.py +++ b/xfields/beam_elements/element_with_slicer.py @@ -77,6 +77,7 @@ def __init__(self, num_slices=num_slices, # Per bunch, this is N_1 in the paper bunch_spacing_zeta=bunch_spacing_zeta, # This is P in the paper filling_scheme=filling_scheme, + bunch_selection = bunch_selection, num_turns=num_turns, circumference=circumference) @@ -104,6 +105,7 @@ def _initialize_moments( num_slices=None, # Per bunch, this is N_1 in the paper bunch_spacing_zeta=None, # This is P in the paper filling_scheme=None, + bunch_selection = None, num_turns=1, circumference=None): @@ -112,12 +114,18 @@ def _initialize_moments( num_periods = i_last_bunch + 1 else: num_periods = 1 + if bunch_selection is not None: + num_targets = 1+bunch_selection[-1]-bunch_selection[0] + else: + num_targets = None + self.moments_data = CompressedProfile( moments=self.source_moments + ['result'], zeta_range=zeta_range, num_slices=num_slices, bunch_spacing_zeta=bunch_spacing_zeta, num_periods=num_periods, + num_targets=num_targets, num_turns=num_turns, circumference=circumference, _context=self.context) diff --git a/xfields/beam_elements/waketracker/convolution.py b/xfields/beam_elements/waketracker/convolution.py index 17fc2d0a..99390bc7 100644 --- a/xfields/beam_elements/waketracker/convolution.py +++ b/xfields/beam_elements/waketracker/convolution.py @@ -81,13 +81,13 @@ def _initialize_conv_data(self, _flatten=False, moments_data=None, beta0=None): self._M_aux = moments_data._M_aux self._N_1 = moments_data._N_1 self._N_S = moments_data._N_S - self._N_T = moments_data._N_S + self._N_T = moments_data._N_T self._BB = 1 # B in the paper # (for now we assume that B=0 is the first bunch in time and the # last one in zeta) self._AA = self._BB - self._N_S - self._CC = self._AA - self._DD = self._BB + self._DD = self.waketracker.slicer.bunch_selection[0] + self._CC = self._DD - self._N_T # Build wake matrix self.z_wake = _build_z_wake(moments_data._z_a, moments_data._z_b, @@ -151,6 +151,7 @@ def track(self, particles, i_slot_particles, i_slice_particles, # Compute convolution self._compute_convolution(moment_names=self.component.source_moments, moments_data=moments_data) + # Apply kicks interpolated_result = particles.zeta * 0 assert moments_data.moments_names[-1] == 'result' @@ -164,6 +165,7 @@ def track(self, particles, i_slot_particles, i_slice_particles, i_slot_particles=i_slot_particles, i_slice_particles=i_slice_particles, out=interpolated_result) + # interpolated result will be zero for lost particles (so nothing to # do for them) scaling_constant = particles.q0**2 * qe**2 / ( diff --git a/xfields/beam_elements/waketracker/waketracker.py b/xfields/beam_elements/waketracker/waketracker.py index 847ae388..59c9becc 100644 --- a/xfields/beam_elements/waketracker/waketracker.py +++ b/xfields/beam_elements/waketracker/waketracker.py @@ -108,6 +108,7 @@ def __init__(self, components, num_slices=num_slices, # Per bunch, this is N_1 in the paper bunch_spacing_zeta=bunch_spacing_zeta, # This is P in the paper filling_scheme=filling_scheme, + bunch_selection=bunch_selection, num_turns=num_turns, circumference=circumference) @@ -143,16 +144,12 @@ def track(self, particles): cc._conv_data._initialize_conv_data(_flatten=self._flatten, moments_data=self.moments_data, beta0=beta0) - # Use common slicer from parent class to measure all moments status = super().track(particles) - if status and status.on_hold == True: return status - if self.fake_coupled_bunch_phases: self._compute_fake_bunch_moments() - for wf in self.components: wf._conv_data.track(particles, i_slot_particles=self.i_slot_particles, @@ -162,7 +159,7 @@ def track(self, particles): def _compute_fake_bunch_moments(self): conjugate_names = {'x':'px','y':'py'} for bunch_number,slot in enumerate(self.slicer.filled_slots): - if slot != self.bunch_selection[0]: + if slot != self.bunch_selection[0]: moments = {} for moment_name in self.fake_coupled_bunch_phases.keys(): z_dummy,mom = self.moments_data.get_source_moment_profile(moment_name,0,0) From 72f525de335d03af80175745f67fea0cc0e65240 Mon Sep 17 00:00:00 2001 From: Xavier Buffat Date: Mon, 7 Apr 2025 11:08:45 +0200 Subject: [PATCH 05/14] revert to main --- xfields/beam_elements/element_with_slicer.py | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/xfields/beam_elements/element_with_slicer.py b/xfields/beam_elements/element_with_slicer.py index 8d68b702..270b8ab4 100644 --- a/xfields/beam_elements/element_with_slicer.py +++ b/xfields/beam_elements/element_with_slicer.py @@ -71,13 +71,12 @@ def __init__(self, bunch_spacing_zeta=bunch_spacing_zeta, slicer_moments=slicer_moments) - if with_compressed_profile: #TODO with a bunch selection, number of sources and targets should differ + if with_compressed_profile: self._initialize_moments( zeta_range=zeta_range, # These are [a, b] in the paper num_slices=num_slices, # Per bunch, this is N_1 in the paper bunch_spacing_zeta=bunch_spacing_zeta, # This is P in the paper filling_scheme=filling_scheme, - bunch_selection = bunch_selection, num_turns=num_turns, circumference=circumference) @@ -105,7 +104,6 @@ def _initialize_moments( num_slices=None, # Per bunch, this is N_1 in the paper bunch_spacing_zeta=None, # This is P in the paper filling_scheme=None, - bunch_selection = None, num_turns=1, circumference=None): @@ -114,18 +112,12 @@ def _initialize_moments( num_periods = i_last_bunch + 1 else: num_periods = 1 - if bunch_selection is not None: - num_targets = 1+bunch_selection[-1]-bunch_selection[0] - else: - num_targets = None - self.moments_data = CompressedProfile( moments=self.source_moments + ['result'], zeta_range=zeta_range, num_slices=num_slices, bunch_spacing_zeta=bunch_spacing_zeta, num_periods=num_periods, - num_targets=num_targets, num_turns=num_turns, circumference=circumference, _context=self.context) From af3f2cf09ce621c414b2b5eda87e98b99b3c598f Mon Sep 17 00:00:00 2001 From: Xavier Buffat Date: Mon, 7 Apr 2025 11:09:33 +0200 Subject: [PATCH 06/14] expose waketracker for other packages (mostly xwakes) --- xfields/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/xfields/__init__.py b/xfields/__init__.py index e68c3215..1b7319c1 100644 --- a/xfields/__init__.py +++ b/xfields/__init__.py @@ -25,6 +25,7 @@ from .beam_elements.temp_slicer import TempSlicer from .beam_elements.electroncloud import ElectronCloud from .beam_elements.electronlens_interpolated import ElectronLensInterpolated +from .beam_elements.waketracker import WakeTracker from .general import _pkg_root from .config_tools import replace_spacecharge_with_quasi_frozen From c92174e33182b73dff1c4f455733ac8cef859d27 Mon Sep 17 00:00:00 2001 From: Xavier Buffat Date: Mon, 7 Apr 2025 11:10:53 +0200 Subject: [PATCH 07/14] revert to main --- xfields/beam_elements/waketracker/convolution.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/xfields/beam_elements/waketracker/convolution.py b/xfields/beam_elements/waketracker/convolution.py index 99390bc7..17fc2d0a 100644 --- a/xfields/beam_elements/waketracker/convolution.py +++ b/xfields/beam_elements/waketracker/convolution.py @@ -81,13 +81,13 @@ def _initialize_conv_data(self, _flatten=False, moments_data=None, beta0=None): self._M_aux = moments_data._M_aux self._N_1 = moments_data._N_1 self._N_S = moments_data._N_S - self._N_T = moments_data._N_T + self._N_T = moments_data._N_S self._BB = 1 # B in the paper # (for now we assume that B=0 is the first bunch in time and the # last one in zeta) self._AA = self._BB - self._N_S - self._DD = self.waketracker.slicer.bunch_selection[0] - self._CC = self._DD - self._N_T + self._CC = self._AA + self._DD = self._BB # Build wake matrix self.z_wake = _build_z_wake(moments_data._z_a, moments_data._z_b, @@ -151,7 +151,6 @@ def track(self, particles, i_slot_particles, i_slice_particles, # Compute convolution self._compute_convolution(moment_names=self.component.source_moments, moments_data=moments_data) - # Apply kicks interpolated_result = particles.zeta * 0 assert moments_data.moments_names[-1] == 'result' @@ -165,7 +164,6 @@ def track(self, particles, i_slot_particles, i_slice_particles, i_slot_particles=i_slot_particles, i_slice_particles=i_slice_particles, out=interpolated_result) - # interpolated result will be zero for lost particles (so nothing to # do for them) scaling_constant = particles.q0**2 * qe**2 / ( From a535efb0ba5f18c61e4770135fb7a743c43be17c Mon Sep 17 00:00:00 2001 From: Xavier Buffat Date: Mon, 7 Apr 2025 11:12:54 +0200 Subject: [PATCH 08/14] fake multibunch wake --- .../beam_elements/waketracker/waketracker.py | 22 ++++++++++--------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/xfields/beam_elements/waketracker/waketracker.py b/xfields/beam_elements/waketracker/waketracker.py index 59c9becc..af7e5594 100644 --- a/xfields/beam_elements/waketracker/waketracker.py +++ b/xfields/beam_elements/waketracker/waketracker.py @@ -1,5 +1,4 @@ from typing import Tuple - import numpy as np from scipy.constants import c as clight @@ -158,17 +157,20 @@ def track(self, particles): def _compute_fake_bunch_moments(self): conjugate_names = {'x':'px','y':'py'} + for moment_name in self.fake_coupled_bunch_phases.keys(): + z_dummy,mom = self.moments_data.get_source_moment_profile(moment_name,0,self.bunch_selection[0]) + z_dummy,mom_conj = self.moments_data.get_source_moment_profile(conjugate_names[moment_name],0,self.bunch_selection[0]) + complex_normalised_moments = mom + (1j*self.betas[moment_name])*mom_conj + for bunch_number,slot in enumerate(self.slicer.filled_slots): + if slot != self.bunch_selection[0]: + moments = {} + moments[moment_name] = np.real(complex_normalised_moments*np.exp(1j*self.fake_coupled_bunch_phases[moment_name]*(self.bunch_selection[0]-slot))) + self.moments_data.set_moments(bunch_number,0,moments) + moments = {} + z_dummy,moments['num_particles'] = self.moments_data.get_source_moment_profile('num_particles',0,self.bunch_selection[0]) for bunch_number,slot in enumerate(self.slicer.filled_slots): if slot != self.bunch_selection[0]: - moments = {} - for moment_name in self.fake_coupled_bunch_phases.keys(): - z_dummy,mom = self.moments_data.get_source_moment_profile(moment_name,0,0) - z_dummy,mom_conj = self.moments_data.get_source_moment_profile(conjugate_names[moment_name],0,0) - complex_normalised_moments = mom + (1j*self.betas[moment_name])*mom_conj - moments[moment_name] = np.real(complex_normalised_moments*np.exp(1j*self.fake_coupled_bunch_phases[moment_name]*(slot-self.bunch_selection[0]))) - z_dummy,moments['num_particles'] = self.moments_data.get_source_moment_profile('num_particles',0,0) self.moments_data.set_moments(bunch_number,0,moments) - @property def zeta_range(self): return self.slicer.zeta_range @@ -210,7 +212,7 @@ def __add__(self, other): 'Bunch spacing zeta is not consistent') else: xo.assert_allclose(self.bunch_spacing_zeta, other.bunch_spacing_zeta, atol=1e-12, rtol=0) - if self.filling_scheme is None: # TODO I don't know who wrote this, but it's bullshit + if self.filling_scheme is None: assert other.filling_scheme is None, ( 'Filling scheme is not consistent') else: From 9c0ccd2fb9e938d04c8596a19c19ffd04ec3e37d Mon Sep 17 00:00:00 2001 From: Xavier Buffat Date: Wed, 16 Apr 2025 15:26:04 +0200 Subject: [PATCH 09/14] Adjust number of targets when a bunch selection is provided --- xfields/beam_elements/element_with_slicer.py | 10 ++++++++++ xfields/beam_elements/waketracker/convolution.py | 8 +++++--- 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/xfields/beam_elements/element_with_slicer.py b/xfields/beam_elements/element_with_slicer.py index 270b8ab4..714fc9c2 100644 --- a/xfields/beam_elements/element_with_slicer.py +++ b/xfields/beam_elements/element_with_slicer.py @@ -77,6 +77,7 @@ def __init__(self, num_slices=num_slices, # Per bunch, this is N_1 in the paper bunch_spacing_zeta=bunch_spacing_zeta, # This is P in the paper filling_scheme=filling_scheme, + bunch_selection=bunch_selection, num_turns=num_turns, circumference=circumference) @@ -104,19 +105,28 @@ def _initialize_moments( num_slices=None, # Per bunch, this is N_1 in the paper bunch_spacing_zeta=None, # This is P in the paper filling_scheme=None, + bunch_selection=None, num_turns=1, circumference=None): + if filling_scheme is not None: i_last_bunch = np.where(filling_scheme)[0][-1] num_periods = i_last_bunch + 1 else: num_periods = 1 + + if bunch_selection is None: + num_targets = num_periods + else: + num_targets = 1+ np.max(bunch_selection)-np.min(bunch_selection) + self.moments_data = CompressedProfile( moments=self.source_moments + ['result'], zeta_range=zeta_range, num_slices=num_slices, bunch_spacing_zeta=bunch_spacing_zeta, + num_targets = num_targets, num_periods=num_periods, num_turns=num_turns, circumference=circumference, diff --git a/xfields/beam_elements/waketracker/convolution.py b/xfields/beam_elements/waketracker/convolution.py index 17fc2d0a..e7198a22 100644 --- a/xfields/beam_elements/waketracker/convolution.py +++ b/xfields/beam_elements/waketracker/convolution.py @@ -1,4 +1,5 @@ import numpy as np +from matplotlib import pyplot as plt from scipy.constants import e as qe import xobjects as xo @@ -81,13 +82,13 @@ def _initialize_conv_data(self, _flatten=False, moments_data=None, beta0=None): self._M_aux = moments_data._M_aux self._N_1 = moments_data._N_1 self._N_S = moments_data._N_S - self._N_T = moments_data._N_S + self._N_T = moments_data._N_T self._BB = 1 # B in the paper # (for now we assume that B=0 is the first bunch in time and the # last one in zeta) self._AA = self._BB - self._N_S - self._CC = self._AA - self._DD = self._BB + self._DD = -1*np.min(self.waketracker.slicer.bunch_selection)+1 + self._CC = self._DD - self._N_T # Build wake matrix self.z_wake = _build_z_wake(moments_data._z_a, moments_data._z_b, @@ -97,6 +98,7 @@ def _initialize_conv_data(self, _flatten=False, moments_data=None, beta0=None): moments_data.dz, self._AA, self._BB, self._CC, self._DD, moments_data._z_P) + assert beta0 is not None # here below I had to add float() to beta0 because when using Cupy # context particles.beta0[0] turns out to be a 0d array. To be checked From 3928d5f3eb59a79c4c9c7932c809ac6f0b6b55f2 Mon Sep 17 00:00:00 2001 From: Xavier Buffat Date: Wed, 7 May 2025 14:42:58 +0200 Subject: [PATCH 10/14] Fix typo with context --- xfields/beam_elements/waketracker/waketracker.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/xfields/beam_elements/waketracker/waketracker.py b/xfields/beam_elements/waketracker/waketracker.py index af7e5594..7715664c 100644 --- a/xfields/beam_elements/waketracker/waketracker.py +++ b/xfields/beam_elements/waketracker/waketracker.py @@ -100,7 +100,7 @@ def __init__(self, components, num_turns=num_turns, circumference=circumference, with_compressed_profile=True, - _context=self.context) + _context=self._context) self._initialize_moments( zeta_range=zeta_range, # These are [a, b] in the paper From 141d18470939834ce353fa27861cdaa3e25fe171 Mon Sep 17 00:00:00 2001 From: Xavier Buffat Date: Wed, 7 May 2025 14:43:53 +0200 Subject: [PATCH 11/14] Add cast for compatibility with GPU --- xfields/beam_elements/waketracker/convolution.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/xfields/beam_elements/waketracker/convolution.py b/xfields/beam_elements/waketracker/convolution.py index e7198a22..f75665ab 100644 --- a/xfields/beam_elements/waketracker/convolution.py +++ b/xfields/beam_elements/waketracker/convolution.py @@ -248,6 +248,6 @@ def _build_z_wake(z_a, z_b, num_turns, n_aux, m_aux, circumference, dz, z_p = 0 for ii, ll in enumerate(range( - cc - bb + 1, dd - aa)): + int(cc - bb + 1), int(dd - aa))): z_wake[tt, ii * n_aux:(ii + 1) * n_aux] = temp_z + ll * z_p return z_wake From a85a4a1dee2508b7be869ef7568712f16eb1837b Mon Sep 17 00:00:00 2001 From: Xavier Buffat Date: Fri, 9 May 2025 12:00:23 +0200 Subject: [PATCH 12/14] Convolution with scipy fft which allows multiprocessing --- xfields/beam_elements/waketracker/convolution.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/xfields/beam_elements/waketracker/convolution.py b/xfields/beam_elements/waketracker/convolution.py index f75665ab..e913fd8c 100644 --- a/xfields/beam_elements/waketracker/convolution.py +++ b/xfields/beam_elements/waketracker/convolution.py @@ -1,3 +1,4 @@ +import time import numpy as np from matplotlib import pyplot as plt from scipy.constants import e as qe @@ -65,13 +66,17 @@ def __init__(self, component, waketracker=None, _flatten=False, log_moments=None def my_rfft(self, data, **kwargs): if type(self._context) in (xo.ContextCpu, xo.ContextCupy): - return self._context.nplike_lib.fft.rfft(data, **kwargs) + if hasattr(self._context,'omp_num_threads'): + kwargs['workers'] = self._context.omp_num_threads + return self._context.splike_lib.fft.rfft(data, **kwargs) else: raise NotImplementedError('Waketacker implemented only for CPU and Cupy') def my_irfft(self, data, **kwargs): if type(self._context) in (xo.ContextCpu, xo.ContextCupy): - return self._context.nplike_lib.fft.irfft(data, **kwargs) + if hasattr(self._context,'omp_num_threads'): + kwargs['workers'] = self._context.omp_num_threads + return self._context.splike_lib.fft.irfft(data, **kwargs) else: raise NotImplementedError('Waketacker implemented only for CPU and Cupy') From a546dd30a1b7a08939a90573fba1d044a711f9bb Mon Sep 17 00:00:00 2001 From: Xavier Buffat Date: Fri, 9 May 2025 14:40:50 +0200 Subject: [PATCH 13/14] Parallelise computation of fake moments --- .../beam_elements/waketracker/waketracker.py | 53 +++++++++++++++---- 1 file changed, 42 insertions(+), 11 deletions(-) diff --git a/xfields/beam_elements/waketracker/waketracker.py b/xfields/beam_elements/waketracker/waketracker.py index 7715664c..e7ae50a3 100644 --- a/xfields/beam_elements/waketracker/waketracker.py +++ b/xfields/beam_elements/waketracker/waketracker.py @@ -1,5 +1,6 @@ from typing import Tuple import numpy as np +from multiprocessing import Process from scipy.constants import c as clight from scipy.constants import e as qe @@ -120,7 +121,6 @@ def init_pipeline(self, pipeline_manager, element_name, partner_names): partner_names=partner_names) def track(self, particles): - # Find first active particle to get beta0 if particles.state[0] > 0: beta0 = particles.beta0[0] @@ -155,22 +155,53 @@ def track(self, particles): i_slice_particles=self.i_slice_particles, moments_data=self.moments_data) + def _dephase_and_add_moment(self,moment_name,mom,start,span): + moments = {} + for bunch_number in range(start,start+span): + slot = self.slicer.filled_slots[bunch_number] + if slot != self.bunch_selection[0]: + moments[moment_name] = np.real(mom*np.exp(1j*self.fake_coupled_bunch_phases[moment_name]*(self.bunch_selection[0]-slot))) + self.moments_data.set_moments(bunch_number,0,moments) + + def _add_moment(self,moment_name,mom,start,span): + moments = {} + moments[moment_name] = mom + for bunch_number in range(start,start+span): + slot = self.slicer.filled_slots[bunch_number] + if slot != self.bunch_selection[0]: + self.moments_data.set_moments(bunch_number,0,moments) + + def loop_multiprocess(self,func,moment_name,mom): + if hasattr(self._context,'omp_num_threads') and self._context.omp_num_threads > 1: + num_chunks = self._context.omp_num_threads + num_filled_slots = len(self.slicer.filled_slots) + chunk_size = int(np.ceil(num_filled_slots/num_chunks)) + last_chunk_size = num_filled_slots-(num_chunks-1)*chunk_size + processes = [] + for i_chunck in range(1,num_chunks-1): + process = Process(target=func, args=(moment_name,mom,i_chunck*chunk_size,chunk_size)) + process.start() + processes.append(process) + if num_chunks > 1: + process = Process(target=func, args=(moment_name,mom,(num_chunks-1)*chunk_size,last_chunk_size)) + process.start() + processes.append(process) + func(moment_name,mom,0,chunk_size) + for process in processes: + process.join() + else: + func(moment_name,mom,0,len(self.slicer.filled_slots)) + def _compute_fake_bunch_moments(self): conjugate_names = {'x':'px','y':'py'} for moment_name in self.fake_coupled_bunch_phases.keys(): z_dummy,mom = self.moments_data.get_source_moment_profile(moment_name,0,self.bunch_selection[0]) z_dummy,mom_conj = self.moments_data.get_source_moment_profile(conjugate_names[moment_name],0,self.bunch_selection[0]) complex_normalised_moments = mom + (1j*self.betas[moment_name])*mom_conj - for bunch_number,slot in enumerate(self.slicer.filled_slots): - if slot != self.bunch_selection[0]: - moments = {} - moments[moment_name] = np.real(complex_normalised_moments*np.exp(1j*self.fake_coupled_bunch_phases[moment_name]*(self.bunch_selection[0]-slot))) - self.moments_data.set_moments(bunch_number,0,moments) - moments = {} - z_dummy,moments['num_particles'] = self.moments_data.get_source_moment_profile('num_particles',0,self.bunch_selection[0]) - for bunch_number,slot in enumerate(self.slicer.filled_slots): - if slot != self.bunch_selection[0]: - self.moments_data.set_moments(bunch_number,0,moments) + self.loop_multiprocess(self._dephase_and_add_moment,moment_name,complex_normalised_moments) + z_dummy,num_particles = self.moments_data.get_source_moment_profile('num_particles',0,self.bunch_selection[0]) + self.loop_multiprocess(self._add_moment,'num_particles',num_particles) + @property def zeta_range(self): return self.slicer.zeta_range From 2ca76945051b799095379e066185162a22bae033 Mon Sep 17 00:00:00 2001 From: Xavier Buffat Date: Wed, 14 May 2025 09:16:19 +0200 Subject: [PATCH 14/14] Remove loops in fake wake computation using numpy/cupy calls instead --- .../beam_elements/waketracker/convolution.py | 4 +- .../beam_elements/waketracker/waketracker.py | 53 ++++--------------- 2 files changed, 13 insertions(+), 44 deletions(-) diff --git a/xfields/beam_elements/waketracker/convolution.py b/xfields/beam_elements/waketracker/convolution.py index e913fd8c..56e0b343 100644 --- a/xfields/beam_elements/waketracker/convolution.py +++ b/xfields/beam_elements/waketracker/convolution.py @@ -66,7 +66,7 @@ def __init__(self, component, waketracker=None, _flatten=False, log_moments=None def my_rfft(self, data, **kwargs): if type(self._context) in (xo.ContextCpu, xo.ContextCupy): - if hasattr(self._context,'omp_num_threads'): + if hasattr(self._context,'omp_num_threads') and self._context.omp_num_threads > 1: kwargs['workers'] = self._context.omp_num_threads return self._context.splike_lib.fft.rfft(data, **kwargs) else: @@ -74,7 +74,7 @@ def my_rfft(self, data, **kwargs): def my_irfft(self, data, **kwargs): if type(self._context) in (xo.ContextCpu, xo.ContextCupy): - if hasattr(self._context,'omp_num_threads'): + if hasattr(self._context,'omp_num_threads') and self._context.omp_num_threads > 1: kwargs['workers'] = self._context.omp_num_threads return self._context.splike_lib.fft.irfft(data, **kwargs) else: diff --git a/xfields/beam_elements/waketracker/waketracker.py b/xfields/beam_elements/waketracker/waketracker.py index e7ae50a3..e892f076 100644 --- a/xfields/beam_elements/waketracker/waketracker.py +++ b/xfields/beam_elements/waketracker/waketracker.py @@ -1,6 +1,5 @@ from typing import Tuple import numpy as np -from multiprocessing import Process from scipy.constants import c as clight from scipy.constants import e as qe @@ -125,7 +124,7 @@ def track(self, particles): if particles.state[0] > 0: beta0 = particles.beta0[0] else: - i_alive = np.where(particles.state > 0)[0] + i_alive = self._context.nplike_lib.where(particles.state > 0)[0] if len(i_alive) == 0: return i_first = i_alive[0] @@ -147,60 +146,30 @@ def track(self, particles): status = super().track(particles) if status and status.on_hold == True: return status + if self.fake_coupled_bunch_phases: self._compute_fake_bunch_moments() + for wf in self.components: wf._conv_data.track(particles, i_slot_particles=self.i_slot_particles, i_slice_particles=self.i_slice_particles, moments_data=self.moments_data) - def _dephase_and_add_moment(self,moment_name,mom,start,span): - moments = {} - for bunch_number in range(start,start+span): - slot = self.slicer.filled_slots[bunch_number] - if slot != self.bunch_selection[0]: - moments[moment_name] = np.real(mom*np.exp(1j*self.fake_coupled_bunch_phases[moment_name]*(self.bunch_selection[0]-slot))) - self.moments_data.set_moments(bunch_number,0,moments) - - def _add_moment(self,moment_name,mom,start,span): - moments = {} - moments[moment_name] = mom - for bunch_number in range(start,start+span): - slot = self.slicer.filled_slots[bunch_number] - if slot != self.bunch_selection[0]: - self.moments_data.set_moments(bunch_number,0,moments) - - def loop_multiprocess(self,func,moment_name,mom): - if hasattr(self._context,'omp_num_threads') and self._context.omp_num_threads > 1: - num_chunks = self._context.omp_num_threads - num_filled_slots = len(self.slicer.filled_slots) - chunk_size = int(np.ceil(num_filled_slots/num_chunks)) - last_chunk_size = num_filled_slots-(num_chunks-1)*chunk_size - processes = [] - for i_chunck in range(1,num_chunks-1): - process = Process(target=func, args=(moment_name,mom,i_chunck*chunk_size,chunk_size)) - process.start() - processes.append(process) - if num_chunks > 1: - process = Process(target=func, args=(moment_name,mom,(num_chunks-1)*chunk_size,last_chunk_size)) - process.start() - processes.append(process) - func(moment_name,mom,0,chunk_size) - for process in processes: - process.join() - else: - func(moment_name,mom,0,len(self.slicer.filled_slots)) - def _compute_fake_bunch_moments(self): conjugate_names = {'x':'px','y':'py'} + n_slots = int(self._context.nplike_lib.max(self.slicer.filled_slots))+1 for moment_name in self.fake_coupled_bunch_phases.keys(): z_dummy,mom = self.moments_data.get_source_moment_profile(moment_name,0,self.bunch_selection[0]) z_dummy,mom_conj = self.moments_data.get_source_moment_profile(conjugate_names[moment_name],0,self.bunch_selection[0]) complex_normalised_moments = mom + (1j*self.betas[moment_name])*mom_conj - self.loop_multiprocess(self._dephase_and_add_moment,moment_name,complex_normalised_moments) - z_dummy,num_particles = self.moments_data.get_source_moment_profile('num_particles',0,self.bunch_selection[0]) - self.loop_multiprocess(self._add_moment,'num_particles',num_particles) + slots = self._context.nplike_lib.transpose(self._context.nplike_lib.tile(self.slicer.filled_slots,(len(complex_normalised_moments),1))) + complex_normalised_moments = self._context.nplike_lib.tile(complex_normalised_moments,(n_slots,1)) + all_beam_moments = self._context.nplike_lib.real(complex_normalised_moments*self._context.nplike_lib.exp(1j*self.fake_coupled_bunch_phases[moment_name]*(self.bunch_selection[0]-slots))) + self.moments_data.set_all_beam_moments(moment_name,0,all_beam_moments) + z_dummy,mom = self.moments_data.get_source_moment_profile('num_particles',0,self.bunch_selection[0]) + all_beam_num_particles = self._context.nplike_lib.tile(mom,(n_slots,1)) + self.moments_data.set_all_beam_moments('num_particles',0,all_beam_num_particles) @property def zeta_range(self):