|
3 | 3 | # Copyright (c) CERN, 2024. # |
4 | 4 | # ######################################### # |
5 | 5 |
|
6 | | -import json |
7 | 6 | import numpy as np |
8 | 7 | from pathlib import Path |
9 | 8 | import time |
10 | 9 | start_time = time.time() |
11 | | -import sys, os, contextlib |
12 | 10 |
|
| 11 | +import xobjects as xo |
13 | 12 | import xpart as xp |
14 | 13 | import xtrack as xt |
15 | 14 | import xcoll as xc |
16 | 15 |
|
17 | | -if len(sys.argv) < 4: |
18 | | - raise ValueError("Need three arguments: beam, plane, and tilt.") |
19 | | -beam = int(sys.argv[1]) |
20 | | -plane = str(sys.argv[2]) |
21 | | -tilt = int(sys.argv[3])*1.e-6 |
22 | 16 |
|
| 17 | +# We do the majority of the script on the default context to be able to use prebuilt kernels |
| 18 | +context = xo.ContextCpu() |
23 | 19 |
|
24 | | -# On a modern CPU, we get ~5000 particle*turns/s |
25 | | -# So this script should take a bit less than 40 minutes |
26 | | -# beam = 1 |
27 | | -# plane = 'H' |
28 | | -# tilt = 0 # rad !! |
29 | 20 |
|
30 | | -num_turns = 200 |
31 | | -num_particles = 5000 |
32 | | -engine = 'everest' |
| 21 | +# This script takes around 3 minutes on a modern CPU (90s preparation+interpolation, 90s tracking) |
| 22 | +beam = 1 |
| 23 | +plane = 'H' |
| 24 | +tilt = 0 # rad !! |
| 25 | + |
| 26 | +num_turns = 200 |
| 27 | +num_particles = 50000 |
33 | 28 |
|
34 | 29 | path_in = xc._pkg_root.parent / 'examples' |
35 | 30 | path_out = Path.cwd() |
36 | 31 |
|
37 | 32 |
|
38 | 33 | # Load from json |
39 | | -with open(os.devnull, 'w') as fid: |
40 | | - with contextlib.redirect_stdout(fid): |
41 | | - line = xt.Line.from_json(path_in / 'machines' / f'lhc_run3_b{beam}.json') |
| 34 | +line = xt.Line.from_json(path_in / 'machines' / f'lhc_run3_b{beam}.json') |
42 | 35 |
|
43 | 36 |
|
44 | 37 | # Aperture model check |
45 | 38 | print('\nAperture model check on imported model:') |
46 | | -with open(os.devnull, 'w') as fid: |
47 | | - with contextlib.redirect_stdout(fid): |
48 | | - df_imported = line.check_aperture() |
| 39 | +df_imported = line.check_aperture() |
49 | 40 | assert not np.any(df_imported.has_aperture_problem) |
50 | 41 |
|
51 | 42 |
|
52 | 43 | # Initialise collmanager |
53 | 44 | coll_manager = xc.CollimatorManager.from_yaml(path_in / 'colldb' / f'lhc_run3_crystals.yaml', line=line, |
54 | | - beam=beam, ignore_crystals=False, record_impacts=True) |
| 45 | + beam=beam, ignore_crystals=False, _context=context) |
55 | 46 |
|
56 | 47 |
|
57 | 48 | # Install collimators into line |
58 | | -if engine == 'everest': |
59 | | - coll_manager.install_everest_collimators(verbose=True) |
60 | | -else: |
61 | | - raise ValueError(f"Unknown scattering engine {engine}!") |
| 49 | +coll_manager.install_everest_collimators(verbose=True) |
62 | 50 |
|
63 | 51 |
|
64 | 52 | # Aperture model check |
65 | 53 | print('\nAperture model check after introducing collimators:') |
66 | | -with open(os.devnull, 'w') as fid: |
67 | | - with contextlib.redirect_stdout(fid): |
68 | | - df_with_coll = line.check_aperture() |
| 54 | +df_with_coll = line.check_aperture() |
69 | 55 | assert not np.any(df_with_coll.has_aperture_problem) |
70 | 56 |
|
71 | 57 |
|
72 | 58 | # Primary crystal |
73 | | -tcpc = f"tcpc{plane.lower()}.a{6 if plane=='V' else 4 if beam==1 else 5}{'l' if beam==1 else 'r'}7.b{beam}" |
| 59 | +tcpc = f"tcpc{plane.lower()}.a{6 if plane=='V' else 4 if f'{beam}'=='1' else 5}{'l' if f'{beam}'=='1' else 'r'}7.b{beam}" |
74 | 60 |
|
75 | 61 |
|
76 | 62 | # Build the tracker |
|
84 | 70 | coll_manager.set_openings() |
85 | 71 |
|
86 | 72 |
|
| 73 | +# Optimise the line |
| 74 | +line.optimize_for_tracking() |
| 75 | + |
| 76 | + |
87 | 77 | # # Generate initial pencil distribution on crystal |
88 | 78 | # part = coll_manager.generate_pencil_on_collimator(tcpc, num_particles=num_particles) |
89 | 79 | # Generate initial halo |
|
97 | 87 | _buffer=coll_manager._part_buffer |
98 | 88 | ) |
99 | 89 |
|
100 | | -# Optimise the line |
101 | | -line.optimize_for_tracking() |
102 | | -idx = line.element_names.index(tcpc) |
103 | | -part.at_element = idx |
104 | | -part.start_tracking_at_element = idx |
105 | | - |
106 | 90 |
|
107 | 91 | # Apply settings |
108 | 92 | line[tcpc].tilt_L = tilt |
|
111 | 95 | line[tcpc].ydim = 0.05 |
112 | 96 |
|
113 | 97 |
|
114 | | -# Track |
| 98 | +# Move the line to an OpenMP context to be able to use all cores |
| 99 | +line.discard_tracker() |
| 100 | +line.build_tracker(_context=xo.ContextCpu(omp_num_threads='auto')) |
| 101 | +# Should move iobuffer as well in case of impacts |
| 102 | + |
| 103 | + |
| 104 | +# Track! |
115 | 105 | coll_manager.enable_scattering() |
116 | | -line.track(part, num_turns=num_turns, time=True) |
| 106 | +line.track(part, num_turns=num_turns, time=True, with_progress=1) |
117 | 107 | coll_manager.disable_scattering() |
118 | 108 | print(f"Done tracking in {line.time_last_track:.1f}s.") |
119 | 109 |
|
120 | 110 |
|
| 111 | +# Move the line back to the default context to be able to use all prebuilt kernels for the aperture interpolation |
| 112 | +line.discard_tracker() |
| 113 | +line.build_tracker(_context=xo.ContextCpu()) |
| 114 | + |
| 115 | + |
121 | 116 | # Save lossmap to json, which can be loaded, combined (for more statistics), |
122 | 117 | # and plotted with the 'lossmaps' package |
123 | | -# _ = coll_manager.lossmap(part, file=Path(path_out,f'lossmap_B{beam}{plane}.json')) |
| 118 | +coll_manager.lossmap(part, file=Path(path_out,f'lossmap_B{beam}{plane}.json')) |
124 | 119 |
|
125 | 120 |
|
126 | 121 | # Save a summary of the collimator losses to a text file |
|
129 | 124 | print(summary) |
130 | 125 |
|
131 | 126 |
|
132 | | -# Impacts |
133 | | -# ======= |
134 | | -imp = coll_manager.impacts.to_pandas() |
135 | | -outfile = Path(path_out, f'cry_{round(tilt*1e6)}urad_impacts_B{beam}{plane}.json') |
136 | | -imp.to_json(outfile) |
| 127 | +# # Impacts |
| 128 | +# # ======= |
| 129 | +# imp = coll_manager.impacts.to_pandas() |
| 130 | +# outfile = Path(path_out, f'cry_{round(tilt*1e6)}urad_impacts_B{beam}{plane}.json') |
| 131 | +# imp.to_json(outfile) |
| 132 | + |
| 133 | +# # Only keep the first impact |
| 134 | +# imp.drop_duplicates(subset=['parent_id'], inplace=True) |
| 135 | +# assert np.unique(imp.interaction_type) == ['Enter Jaw'] |
137 | 136 |
|
138 | | -# Only keep the first impact |
139 | | -imp.drop_duplicates(subset=['parent_id'], inplace=True) |
140 | | -assert np.unique(imp.interaction_type) == ['Enter Jaw'] |
| 137 | +# # Only keep those first impacts that are on the crystal |
| 138 | +# first_impacts = imp[imp.collimator==tcpc].parent_id |
| 139 | +# num_first_impacts = len(first_impacts) |
| 140 | +# assert num_first_impacts==len(np.unique(first_impacts)) |
141 | 141 |
|
142 | | -# Only keep those first impacts that are on the crystal |
143 | | -first_impacts = imp[imp.collimator==tcpc].parent_id |
144 | | -num_first_impacts = len(first_impacts) |
145 | | -assert num_first_impacts==len(np.unique(first_impacts)) |
| 142 | +# ineff = nabs/num_first_impacts |
| 143 | +# outfile = Path(path_out, f'cry_{round(tilt*1e6)}urad_ineff_B{beam}{plane}.json') |
| 144 | +# with outfile.open('w') as fid: |
| 145 | +# json.dump({'first_impacts': num_first_impacts, 'nabs': nabs, 'ineff': ineff}, fid) |
146 | 146 |
|
147 | | -ineff = nabs/num_first_impacts |
148 | | -outfile = Path(path_out, f'cry_{round(tilt*1e6)}urad_ineff_B{beam}{plane}.json') |
149 | | -with outfile.open('w') as fid: |
150 | | - json.dump({'first_impacts': num_first_impacts, 'nabs': nabs, 'ineff': ineff}, fid) |
| 147 | +# print(f"Out of {num_first_impacts} particles hitting the crystal {tcpc} (angle {round(tilt*1e6)}urad) first, {round(nabs)} are absorbed in the crystal (for an inefficiency of {ineff:5}.") |
| 148 | +# print(f"Critical angle is {round(line[tcpc].critical_angle*1.e6, 1)}urad.") |
151 | 149 |
|
152 | | -print(f"Out of {num_first_impacts} particles hitting the crystal {tcpc} (angle {round(tilt*1e6)}urad) first, {round(nabs)} are absorbed in the crystal (for an inefficiency of {ineff:5}.") |
153 | | -print(f"Critical angle is {round(line[tcpc].critical_angle*1.e6, 1)}urad.") |
154 | 150 | print(f"Total calculation time {time.time()-start_time}s") |
155 | 151 |
|
156 | | -exit() |
0 commit comments