Skip to content

Commit 8684a41

Browse files
committed
Merge branch 'multibunch_dev'
2 parents ec92206 + aaf6948 commit 8684a41

32 files changed

+2181
-205
lines changed

communication_helpers.py

Lines changed: 83 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,13 @@
11
import numpy as np
22
from PyHEADTAIL.particles.particles import Particles
3+
import json
4+
import pickle
35

46

57
def combine_float_buffers(list_of_buffers):
68
N_buffers = len(list_of_buffers)
79
len_buffers = np.array(map(lambda seq: float(len(seq)), list_of_buffers))
8-
return np.concatenate([np.array([float(N_buffers)]), len_buffers]+list_of_buffers)
10+
return np.array(np.concatenate([np.array([float(N_buffers)]), len_buffers]+list_of_buffers),dtype=np.float64)
911

1012
def split_float_buffers(megabuffer):
1113
i_mbuf = 0
@@ -39,7 +41,7 @@ def buffer_2_list_of_strings(buf):
3941
return strlist
4042

4143

42-
def beam_2_buffer(beam):
44+
def beam_2_buffer(beam, mode='pickle', verbose=False):
4345

4446
#print beam
4547

@@ -50,32 +52,62 @@ def beam_2_buffer(beam):
5052
raise ValueError('particlenumber_per_mp is a vector! Not implemented!')
5153

5254

55+
# if not hasattr(beam, 'slice_info'):
56+
# sl_info_buf = np.array([-1., 0., 0., 0.])
57+
# elif beam.slice_info is None:
58+
# sl_info_buf = np.array([-1., 0., 0., 0.])
59+
# elif beam.slice_info == 'unsliced':
60+
# sl_info_buf = np.array([0., 0., 0., 0.])
61+
# else:
62+
# sl_info_buf = np.array([1.,
63+
# beam.slice_info['z_bin_center'],
64+
# beam.slice_info['z_bin_right'],
65+
# beam.slice_info['z_bin_left']])
66+
67+
# Prepare slice info buffer
5368
if not hasattr(beam, 'slice_info'):
54-
sl_info_buf = np.array([-1., 0., 0., 0.])
55-
elif beam.slice_info is None:
56-
sl_info_buf = np.array([-1., 0., 0., 0.])
57-
elif beam.slice_info == 'unsliced':
58-
sl_info_buf = np.array([0., 0., 0., 0.])
69+
sinfo = None
70+
else:
71+
sinfo = beam.slice_info
72+
73+
# Beam data buffer
74+
if mode=='json':
75+
sinfo_str = json.dumps(sinfo)
76+
sinfo_int = np.array(map(ord, sinfo_str), dtype=np.int)
77+
sinfo_float_buf = sinfo_int.astype(np.float, casting='safe')
78+
elif mode=='pickle':
79+
pss = pickle.dumps(sinfo, protocol=2)
80+
# Pad to have a multiple of 8 bytes
81+
s1arr = np.frombuffer(pss, dtype='S1')
82+
ll = len(s1arr)
83+
s1arr_padded = np.concatenate((s1arr, np.zeros(8-ll%8, dtype='S1')))
84+
# Cast to array of floats
85+
f8arr = np.frombuffer(s1arr_padded, dtype=np.float64)
86+
sinfo_float_buf = np.concatenate((np.array([ll], dtype=np.float64),f8arr))
5987
else:
60-
sl_info_buf = np.array([1.,
61-
beam.slice_info['z_bin_center'],
62-
beam.slice_info['z_bin_right'],
63-
beam.slice_info['z_bin_left']])
64-
65-
buf = np.concatenate((
66-
np.array([float(beam.macroparticlenumber)]),
67-
np.array([float(beam.particlenumber_per_mp)]),
88+
raise ValueError('Unknown mode!')
89+
90+
91+
buf = np.array(np.concatenate((
92+
np.array([np.float64(beam.macroparticlenumber)]),
93+
np.array([np.float64(beam.particlenumber_per_mp)]),
6894
np.array([beam.charge]),
6995
np.array([beam.mass]),
7096
np.array([beam.circumference]),
7197
np.array([beam.gamma]),
7298
np.atleast_1d(np.float_(beam.id)),
7399
beam.x, beam.xp, beam.y, beam.yp, beam.z, beam.dp,
74-
sl_info_buf))
100+
np.array([float(len(sinfo_float_buf))]),sinfo_float_buf)), dtype=np.float64)
101+
102+
if verbose:
103+
print('mode=%s'%mode)
104+
print('beam.macroparticlenumber:%d'%beam.macroparticlenumber)
105+
print('len(buf):%d'%len(buf))
106+
print('len(sinfo_float_buf):%d'%len(sinfo_float_buf))
75107

76108
return buf
77109

78-
def buffer_2_beam(buf):
110+
def buffer_2_beam(buf, mode='pickle'):
79111

80112
if buf[0]<0:
81113
beam=None
@@ -121,8 +153,11 @@ def buffer_2_beam(buf):
121153
dp = buf[i_buf:i_buf+macroparticlenumber]
122154
i_buf += macroparticlenumber
123155

124-
slice_info_buf = buf[i_buf:i_buf+4]
125-
i_buf += 4
156+
slice_info_buf_size = int(buf[i_buf])
157+
i_buf += 1
158+
159+
slice_info_buf = buf[i_buf:i_buf+slice_info_buf_size]
160+
i_buf += slice_info_buf_size
126161

127162
beam = Particles(macroparticlenumber=macroparticlenumber,
128163
particlenumber_per_mp=particlenumber_per_mp, charge=charge,
@@ -136,15 +171,35 @@ def buffer_2_beam(buf):
136171
'dp':np.atleast_1d(dp)})
137172

138173
beam.id = np.atleast_1d(id_)
174+
139175

140-
if slice_info_buf[0] < 0.:
141-
beam.slice_info = None
142-
elif slice_info_buf[0] == 0.:
143-
beam.slice_info = 'unsliced'
176+
if mode=='json':
177+
si_int = slice_info_buf.astype(np.int)
178+
si_str = ''.join(map(unichr, list(si_int)))
179+
beam.slice_info = json.loads(si_str)
180+
elif mode=='pickle':
181+
# Get length in bytes
182+
llrec = int(slice_info_buf[0])
183+
s1back_padded = np.frombuffer(slice_info_buf[1:].tobytes(), dtype='S1')
184+
s1back = s1back_padded[:llrec]
185+
pss_rec = s1back.tobytes()
186+
beam.slice_info = pickle.loads(pss_rec)
144187
else:
145-
beam.slice_info = {\
146-
'z_bin_center':slice_info_buf[1] ,
147-
'z_bin_right':slice_info_buf[2],
148-
'z_bin_left':slice_info_buf[3]}
149-
188+
raise ValueError('Unknown mode!')
189+
190+
191+
192+
# if slice_info_buf[0] < 0.:
193+
# beam.slice_info = None
194+
# elif slice_info_buf[0] == 0.:
195+
# beam.slice_info = 'unsliced'
196+
# else:
197+
# beam.slice_info = {\
198+
# 'z_bin_center':slice_info_buf[1] ,
199+
# 'z_bin_right':slice_info_buf[2],
200+
# 'z_bin_left':slice_info_buf[3]}
201+
202+
203+
204+
150205
return beam

gen_multibunch_beam.py

Lines changed: 59 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,59 @@
1+
from scipy.constants import c as clight, e as qe
2+
import numpy as np
3+
4+
from PyHEADTAIL.particles.slicing import UniformBinSlicer
5+
6+
def gen_matched_multibunch_beam(machine, n_macroparticles_per_bunch, filling_pattern,
7+
b_spac_s, bunch_intensity, epsn_x, epsn_y, sigma_z, non_linear_long_matching, min_inten_slice4EC):
8+
9+
bucket_length_m = machine.circumference/(machine.longitudinal_map.harmonics[0])
10+
b_spac_m = b_spac_s*machine.beta*clight
11+
b_spac_buckets = np.round(b_spac_m/bucket_length_m)
12+
13+
if np.abs(b_spac_buckets*bucket_length_m-b_spac_m)/b_spac_m > 0.03:
14+
raise ValueError('Bunch spacing is not a multiple of the bucket length!')
15+
16+
if non_linear_long_matching:
17+
generate_bunch = machine.generate_6D_Gaussian_bunch_matched
18+
else:
19+
generate_bunch = machine.generate_6D_Gaussian_bunch
20+
21+
list_genbunches = []
22+
for i_slot, inten_slot in enumerate(filling_pattern):
23+
if inten_slot>0:
24+
bunch = generate_bunch(n_macroparticles_per_bunch, inten_slot*bunch_intensity, epsn_x, epsn_y, sigma_z=sigma_z)
25+
bunch.z -= b_spac_buckets*bucket_length_m*i_slot
26+
list_genbunches.append(bunch)
27+
28+
beam = sum(list_genbunches)
29+
30+
bucket = machine.longitudinal_map.get_bucket(gamma=machine.gamma, mass=machine.mass, charge=machine.charge)
31+
# z_beam_center = bucket.z_ufp_separatrix + bucket_length - self.circumference/2.
32+
33+
# Here the center of the bucket
34+
bucket.z_sfp
35+
36+
# I want to re-separate the bunches
37+
buncher = UniformBinSlicer(n_slices = 0, z_sample_points = np.arange(bucket.z_sfp-len(filling_pattern)*bucket_length_m*b_spac_buckets,
38+
bucket.z_sfp+bucket_length_m, bucket_length_m*b_spac_buckets))
39+
buncher_slice_set = beam.get_slices(buncher, statistics=True)
40+
list_bunches = beam.extract_slices(buncher, include_non_sliced='never')
41+
# The bunch at position 0 is the tail
42+
43+
# If last bunch is empty remove it
44+
if (list_bunches[0].intensity<min_inten_slice4EC):
45+
list_bunches = list_bunches[1:]
46+
47+
# Add further information to bunches
48+
for i_bb, bb in enumerate(list_bunches[::-1]): # I want bunch 0 at the head of the train
49+
slice4EC = bb.intensity>min_inten_slice4EC
50+
bb.slice_info['slice_4_EC'] = slice4EC
51+
bb.slice_info['interact_with_EC'] = slice4EC
52+
bb.slice_info['N_bunches_tot_beam'] = len(list_bunches)
53+
bb.slice_info['i_bunch'] = i_bb
54+
bb.slice_info['i_turn'] = 0
55+
56+
57+
58+
59+
return list_bunches

0 commit comments

Comments
 (0)