Skip to content

Commit cdad450

Browse files
authored
Linter configuration and fixes (#109)
1 parent 6d0bb76 commit cdad450

23 files changed

+962
-765
lines changed

doc/conf.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
# -*- coding: utf-8 -*-
21
#
32
# pyLHC documentation build configuration file, created by
43
# sphinx-quickstart on Tue Feb 6 12:10:18 2018.
@@ -97,7 +96,7 @@ def about_package(init_posixpath: pathlib.Path) -> dict:
9796

9897
# Override link in 'Edit on Github'
9998
rst_prolog = f"""
100-
:github_url: {ABOUT_PYLHC['__url__']}
99+
:github_url: {ABOUT_PYLHC["__url__"]}
101100
"""
102101

103102
# The version info for the project you're documenting, acts as replacement for

pylhc/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
:copyright: pyLHC/OMC-Team working group.
88
:license: MIT, see the LICENSE.md file for details.
99
"""
10+
1011
__title__ = "pylhc"
1112
__description__ = "An accelerator physics script collection for the OMC team at CERN."
1213
__url__ = "https://github.com/pylhc/pylhc"

pylhc/bpm_calibration.py

Lines changed: 30 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,7 @@
5151
5252
default: ``beta``
5353
"""
54+
5455
from pathlib import Path
5556

5657
import tfs
@@ -74,33 +75,29 @@ def _get_params() -> dict:
7475
"""
7576

7677
return EntryPointParameters(
77-
inputdir=dict(
78-
type=Path,
79-
required=True,
80-
help="Measurements path."
81-
),
82-
outputdir=dict(
83-
type=Path,
84-
required=True,
85-
help="Output directory where to write the calibration factors.",
86-
),
87-
ips=dict(
88-
type=int,
89-
nargs="+",
90-
choices=IPS,
91-
required=False,
92-
help="IPs to compute calibration factors for.",
93-
),
94-
method=dict(
95-
type=str,
96-
required=False,
97-
choices=METHODS,
98-
default=METHODS[0],
99-
help=(
78+
inputdir={"type": Path, "required": True, "help": "Measurements path."},
79+
outputdir={
80+
"type": Path,
81+
"required": True,
82+
"help": "Output directory where to write the calibration factors.",
83+
},
84+
ips={
85+
"type": int,
86+
"nargs": "+",
87+
"choices": IPS,
88+
"required": False,
89+
"help": "IPs to compute calibration factors for.",
90+
},
91+
method={
92+
"type": str,
93+
"required": False,
94+
"choices": METHODS,
95+
"default": METHODS[0],
96+
"help": (
10097
"Method to be used to compute the calibration factors. "
10198
"The Beta function is used by default."
10299
),
103-
),
100+
},
104101
)
105102

106103

@@ -113,16 +110,18 @@ def main(opt):
113110
factors = get_calibration_factors_from_dispersion(opt.ips, opt.inputdir)
114111

115112
# Fill NaN with 1 because of missing BPMs and that fit cannot be done everywhere
116-
for plane in factors.keys():
117-
factors[plane] = factors[plane].fillna(1)
118-
LOG.debug("".join([f"\nPlane {plane}:\n{factors[plane]}" for plane in factors.keys()]))
113+
for plane in factors:
114+
factors[plane] = factors[plane].infer_objects().fillna(1)
115+
LOG.debug("".join([f"\nPlane {plane}:\n{factors[plane]}" for plane in factors]))
119116

120117
# Write the TFS file to the desired output directory
121118
opt.outputdir.mkdir(parents=True, exist_ok=True)
122-
for plane in factors.keys():
123-
tfs.write(opt.outputdir / f"{CALIBRATION_NAME[opt.method]}{plane.lower()}{EXT}",
124-
factors[plane].reset_index(),
125-
save_index=False)
119+
for plane in factors:
120+
tfs.write(
121+
opt.outputdir / f"{CALIBRATION_NAME[opt.method]}{plane.lower()}{EXT}",
122+
factors[plane].reset_index(),
123+
save_index=False,
124+
)
126125

127126
return factors
128127

pylhc/bsrt_analysis.py

Lines changed: 50 additions & 48 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@
1111
- If provided a `TfsDataFrame` file with timestamps, plots of the 2D distribution and comparison
1212
of fit parameters to cross sections are added.
1313
"""
14+
1415
import datetime
1516
import glob
1617
import gzip
@@ -22,10 +23,10 @@
2223
import pandas as pd
2324
import parse
2425
import pytz
25-
2626
import tfs
2727
from generic_parser import EntryPointParameters, entrypoint
2828
from omc3.utils import logging_tools, time_tools
29+
2930
from pylhc.constants.general import TFS_SUFFIX, TIME_COLUMN
3031
from pylhc.forced_da_analysis import get_approximate_index
3132

@@ -38,47 +39,52 @@
3839

3940
def get_params():
4041
return EntryPointParameters(
41-
directory=dict(
42-
flags=["-d", "--directory"],
43-
required=True,
44-
type=str,
45-
help="Directory containing the logged BSRT files.",
46-
),
47-
beam=dict(
48-
flags=["-b", "--beam"],
49-
required=True,
50-
choices=["B1", "B2"],
51-
type=str,
52-
help="Beam for which analysis is performed.",
53-
),
54-
outputdir=dict(
55-
flags=["-o", "--outputdir"],
56-
type=str,
57-
default=None,
58-
help=(
42+
directory={
43+
"flags": ["-d", "--directory"],
44+
"required": True,
45+
"type": str,
46+
"help": "Directory containing the logged BSRT files.",
47+
},
48+
beam={
49+
"flags": ["-b", "--beam"],
50+
"required": True,
51+
"choices": ["B1", "B2"],
52+
"type": str,
53+
"help": "Beam for which analysis is performed.",
54+
},
55+
outputdir={
56+
"flags": ["-o", "--outputdir"],
57+
"type": str,
58+
"default": None,
59+
"help": (
5960
"Directory in which plots and dataframe will be saved in. If omitted, "
6061
"no data will be saved."
6162
),
62-
),
63-
starttime=dict(
64-
flags=["--starttime"],
65-
type=int,
66-
help="Start of time window for analysis in milliseconds UTC.",
67-
),
68-
endtime=dict(
69-
flags=["--endtime"],
70-
type=int,
71-
help="End of time window for analysis in milliseconds UTC.",
72-
),
73-
kick_df=dict(
74-
flags=["--kick_df"],
75-
default=None,
76-
help=(
63+
},
64+
starttime={
65+
"flags": ["--starttime"],
66+
"type": int,
67+
"help": "Start of time window for analysis in milliseconds UTC.",
68+
},
69+
endtime={
70+
"flags": ["--endtime"],
71+
"type": int,
72+
"help": "End of time window for analysis in milliseconds UTC.",
73+
},
74+
kick_df={
75+
"flags": ["--kick_df"],
76+
"default": None,
77+
"help": (
7778
f"TFS with column {TIME_COLUMN} with time stamps to be added in the plots. "
7879
f"Additionally, cross section at these timestamps will be plotted.",
7980
),
80-
),
81-
show_plots=dict(flags=["--show_plots"], type=bool, default=False, help="Show BSRT plots."),
81+
},
82+
show_plots={
83+
"flags": ["--show_plots"],
84+
"type": bool,
85+
"default": False,
86+
"help": "Show BSRT plots.",
87+
},
8288
)
8389

8490

@@ -159,7 +165,7 @@ def _select_files(opt, files_df):
159165

160166
def _load_files_in_df(opt):
161167
files_df = pd.DataFrame(
162-
data={"FILES": glob.glob(str(Path(opt.directory) / _get_bsrt_logger_fname(opt.beam, "*")))}
168+
data={"FILES": glob.glob(str(Path(opt.directory) / _get_bsrt_logger_fname(opt.beam, "*")))} # noqa: PTH207
163169
)
164170

165171
files_df = files_df.assign(
@@ -175,8 +181,7 @@ def _load_files_in_df(opt):
175181
)
176182
files_df = files_df.assign(TIME=[f.timestamp() for f in files_df["TIMESTAMP"]])
177183

178-
files_df = files_df.sort_values(by=["TIME"]).reset_index(drop=True).set_index("TIME")
179-
return files_df
184+
return files_df.sort_values(by=["TIME"]).reset_index(drop=True).set_index("TIME")
180185

181186

182187
def _get_timestamp_from_name(name, formatstring):
@@ -189,7 +194,7 @@ def _get_timestamp_from_name(name, formatstring):
189194
def _check_and_fix_entries(entry):
190195
# pd.to_csv does not handle np.array as entries nicely, converting to list circumvents this
191196
for key, val in entry.items():
192-
if isinstance(val, (np.ndarray, tuple)):
197+
if isinstance(val, (np.ndarray | tuple)):
193198
entry[key] = list(val)
194199
if np.array(val).size == 0:
195200
entry[key] = np.nan
@@ -199,7 +204,8 @@ def _check_and_fix_entries(entry):
199204
def _load_pickled_data(opt, files_df):
200205
merged_df = pd.DataFrame()
201206
for bsrtfile in files_df["FILES"]:
202-
data = pickle.load(gzip.open(bsrtfile, "rb"))
207+
with gzip.open(bsrtfile, "rb") as f:
208+
data = pickle.load(f)
203209
new_df = pd.DataFrame.from_records([_check_and_fix_entries(entry) for entry in data])
204210
merged_df = pd.concat([merged_df, new_df], axis="index", ignore_index=True)
205211

@@ -224,7 +230,6 @@ def _add_kick_lines(ax, df):
224230

225231

226232
def _fit_var(ax, bsrt_df, plot_dict, opt):
227-
228233
ax[plot_dict["idx"]].plot(
229234
bsrt_df.index, [entry[plot_dict["fitidx"]] for entry in bsrt_df["lastFitResults"]]
230235
)
@@ -234,7 +239,6 @@ def _fit_var(ax, bsrt_df, plot_dict, opt):
234239

235240

236241
def plot_fit_variables(opt, bsrt_df):
237-
238242
fig, ax = plt.subplots(nrows=2, ncols=3, figsize=(20, 9), sharex=True, constrained_layout=True)
239243

240244
plot_dicts = [
@@ -292,8 +296,8 @@ def _full_crossection(ax, bsrt_df, plot_dict, opt):
292296
ax,
293297
bsrt_df.reset_index(),
294298
"TimeIndex",
295-
f'projPositionSet{plot_dict["idx"]}',
296-
f'projDataSet{plot_dict["idx"]}',
299+
f"projPositionSet{plot_dict['idx']}",
300+
f"projDataSet{plot_dict['idx']}",
297301
)
298302
ax.plot(
299303
bsrt_df.index,
@@ -326,7 +330,6 @@ def _full_crossection(ax, bsrt_df, plot_dict, opt):
326330

327331

328332
def plot_full_crosssection(opt, bsrt_df):
329-
330333
plot_dicts = [
331334
{"idx": 1, "fitresult": 3, "fiterror": 4, "title": "Horizontal Cross section"},
332335
{"idx": 2, "fitresult": 8, "fiterror": 9, "title": "Vertical Cross section"},
@@ -344,7 +347,7 @@ def plot_full_crosssection(opt, bsrt_df):
344347

345348
def _gauss(x, *p):
346349
a, b, c = p
347-
return a * np.exp(-((x - b) ** 2) / (2.0 * c ** 2.0))
350+
return a * np.exp(-((x - b) ** 2) / (2.0 * c**2.0))
348351

349352

350353
def _reshaped_imageset(df):
@@ -408,7 +411,6 @@ def plot_crosssection_for_timesteps(opt, bsrt_df):
408411

409412

410413
def _aux_variables(ax, bsrt_df, plot_dict, opt):
411-
412414
ax.plot(
413415
bsrt_df.index, bsrt_df[plot_dict["variable1"]], color="red", label=plot_dict["variable1"]
414416
)

pylhc/bsrt_logger.py

Lines changed: 14 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -9,11 +9,13 @@
99
1010
Original authors: E. H. Maclean, T. Persson and G. Trad.
1111
"""
12+
1213
import datetime as dt
1314
import os
1415
import pickle
1516
import sys
1617
import time
18+
from pathlib import Path
1719

1820
from omc3.definitions import formats
1921
from omc3.utils.mock import cern_network_import
@@ -34,8 +36,7 @@ def parse_timestamp(thistime):
3436
]
3537
for fmat in accepted_time_input_format:
3638
try:
37-
dtobject = dt.datetime.strptime(thistime, fmat)
38-
return dtobject
39+
return dt.datetime.strptime(thistime, fmat)
3940
except ValueError:
4041
pass
4142
timefmatstring = ""
@@ -53,22 +54,21 @@ def parse_timestamp(thistime):
5354

5455
# function to help write output from datetime objects in standard format throughout code
5556
def convert_to_data_output_format(dtobject):
56-
output_timestamp = dtobject.strftime(formats.TIME)
57-
return output_timestamp
57+
return dtobject.strftime(formats.TIME)
5858

5959

6060
##########################################
6161

6262

63-
if __name__ == '__main__':
63+
if __name__ == "__main__":
6464
# Create a PyJapc instance with selector SCT.USER.ALL
6565
# INCA is automatically configured based on the timing domain you specify here
6666

6767
CycleName = "LHC.USER.ALL"
6868
INCAacc = "LHC"
69-
noSetFlag = True
69+
no_set_flag = True
7070

71-
japc = pyjapc.PyJapc(selector=CycleName, incaAcceleratorName=INCAacc, noSet=noSetFlag)
71+
japc = pyjapc.PyJapc(selector=CycleName, incaAcceleratorName=INCAacc, noSet=no_set_flag)
7272
japc.rbacLogin()
7373
acquesitions_per_file = 100
7474
j = 0
@@ -79,28 +79,25 @@ def convert_to_data_output_format(dtobject):
7979
B1_image = japc.getParam("LHC.BSRTS.5R4.B1/Image")
8080
B2_image = japc.getParam("LHC.BSRTS.5L4.B2/Image")
8181
if t == 0:
82-
allB1data = []
83-
allB2data = []
82+
all_b1_data = []
83+
all_b2_data = []
8484
B1_IMGtime = B1_image["acqTime"]
8585
B2_IMGtime = B2_image["acqTime"]
8686
B1_IMGtime_dt = parse_timestamp(B1_IMGtime)
8787
B2_IMGtime_dt = parse_timestamp(B2_IMGtime)
8888
B1_IMGtime_st = convert_to_data_output_format(B1_IMGtime_dt)
8989
B2_IMGtime_st = convert_to_data_output_format(B2_IMGtime_dt)
9090

91-
allB1data.append(B1_image)
92-
allB2data.append(B2_image)
91+
all_b1_data.append(B1_image)
92+
all_b2_data.append(B2_image)
9393
t += 1
9494
if t == acquesitions_per_file:
9595
j += 1
9696
f1name = "data_BSRT_B1_" + B1_IMGtime_st + ".dat"
9797
f2name = "data_BSRT_B2_" + B2_IMGtime_st + ".dat"
98-
f1 = open(f1name, "wb")
99-
f2 = open(f2name, "wb")
100-
pickle.dump(allB1data, f1)
101-
pickle.dump(allB2data, f2)
102-
f1.close()
103-
f2.close()
98+
with Path(f1name).open("wb") as f1, Path(f2name).open("wb") as f2:
99+
pickle.dump(all_b1_data, f1)
100+
pickle.dump(all_b2_data, f2)
104101
os.system("gzip " + f1name)
105102
os.system("gzip " + f2name)
106103
t = 0

0 commit comments

Comments
 (0)