Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions doc/conf.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# pyLHC documentation build configuration file, created by
# sphinx-quickstart on Tue Feb 6 12:10:18 2018.
Expand Down Expand Up @@ -97,7 +96,7 @@ def about_package(init_posixpath: pathlib.Path) -> dict:

# Override link in 'Edit on Github'
rst_prolog = f"""
:github_url: {ABOUT_PYLHC['__url__']}
:github_url: {ABOUT_PYLHC["__url__"]}
"""

# The version info for the project you're documenting, acts as replacement for
Expand Down
1 change: 1 addition & 0 deletions pylhc/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
:copyright: pyLHC/OMC-Team working group.
:license: MIT, see the LICENSE.md file for details.
"""

__title__ = "pylhc"
__description__ = "An accelerator physics script collection for the OMC team at CERN."
__url__ = "https://github.com/pylhc/pylhc"
Expand Down
61 changes: 30 additions & 31 deletions pylhc/bpm_calibration.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@

default: ``beta``
"""

from pathlib import Path

import tfs
Expand All @@ -74,33 +75,29 @@ def _get_params() -> dict:
"""

return EntryPointParameters(
inputdir=dict(
type=Path,
required=True,
help="Measurements path."
),
outputdir=dict(
type=Path,
required=True,
help="Output directory where to write the calibration factors.",
),
ips=dict(
type=int,
nargs="+",
choices=IPS,
required=False,
help="IPs to compute calibration factors for.",
),
method=dict(
type=str,
required=False,
choices=METHODS,
default=METHODS[0],
help=(
inputdir={"type": Path, "required": True, "help": "Measurements path."},
outputdir={
"type": Path,
"required": True,
"help": "Output directory where to write the calibration factors.",
},
ips={
"type": int,
"nargs": "+",
"choices": IPS,
"required": False,
"help": "IPs to compute calibration factors for.",
},
method={
"type": str,
"required": False,
"choices": METHODS,
"default": METHODS[0],
"help": (
"Method to be used to compute the calibration factors. "
"The Beta function is used by default."
),
),
},
)


Expand All @@ -113,16 +110,18 @@ def main(opt):
factors = get_calibration_factors_from_dispersion(opt.ips, opt.inputdir)

# Fill NaN with 1 because of missing BPMs and that fit cannot be done everywhere
for plane in factors.keys():
factors[plane] = factors[plane].fillna(1)
LOG.debug("".join([f"\nPlane {plane}:\n{factors[plane]}" for plane in factors.keys()]))
for plane in factors:
factors[plane] = factors[plane].infer_objects().fillna(1)
LOG.debug("".join([f"\nPlane {plane}:\n{factors[plane]}" for plane in factors]))

# Write the TFS file to the desired output directory
opt.outputdir.mkdir(parents=True, exist_ok=True)
for plane in factors.keys():
tfs.write(opt.outputdir / f"{CALIBRATION_NAME[opt.method]}{plane.lower()}{EXT}",
factors[plane].reset_index(),
save_index=False)
for plane in factors:
tfs.write(
opt.outputdir / f"{CALIBRATION_NAME[opt.method]}{plane.lower()}{EXT}",
factors[plane].reset_index(),
save_index=False,
)

return factors

Expand Down
98 changes: 50 additions & 48 deletions pylhc/bsrt_analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
- If provided a `TfsDataFrame` file with timestamps, plots of the 2D distribution and comparison
of fit parameters to cross sections are added.
"""

import datetime
import glob
import gzip
Expand All @@ -22,10 +23,10 @@
import pandas as pd
import parse
import pytz

import tfs
from generic_parser import EntryPointParameters, entrypoint
from omc3.utils import logging_tools, time_tools

from pylhc.constants.general import TFS_SUFFIX, TIME_COLUMN
from pylhc.forced_da_analysis import get_approximate_index

Expand All @@ -38,47 +39,52 @@

def get_params():
return EntryPointParameters(
directory=dict(
flags=["-d", "--directory"],
required=True,
type=str,
help="Directory containing the logged BSRT files.",
),
beam=dict(
flags=["-b", "--beam"],
required=True,
choices=["B1", "B2"],
type=str,
help="Beam for which analysis is performed.",
),
outputdir=dict(
flags=["-o", "--outputdir"],
type=str,
default=None,
help=(
directory={
"flags": ["-d", "--directory"],
"required": True,
"type": str,
"help": "Directory containing the logged BSRT files.",
},
beam={
"flags": ["-b", "--beam"],
"required": True,
"choices": ["B1", "B2"],
"type": str,
"help": "Beam for which analysis is performed.",
},
outputdir={
"flags": ["-o", "--outputdir"],
"type": str,
"default": None,
"help": (
"Directory in which plots and dataframe will be saved in. If omitted, "
"no data will be saved."
),
),
starttime=dict(
flags=["--starttime"],
type=int,
help="Start of time window for analysis in milliseconds UTC.",
),
endtime=dict(
flags=["--endtime"],
type=int,
help="End of time window for analysis in milliseconds UTC.",
),
kick_df=dict(
flags=["--kick_df"],
default=None,
help=(
},
starttime={
"flags": ["--starttime"],
"type": int,
"help": "Start of time window for analysis in milliseconds UTC.",
},
endtime={
"flags": ["--endtime"],
"type": int,
"help": "End of time window for analysis in milliseconds UTC.",
},
kick_df={
"flags": ["--kick_df"],
"default": None,
"help": (
f"TFS with column {TIME_COLUMN} with time stamps to be added in the plots. "
f"Additionally, cross section at these timestamps will be plotted.",
),
),
show_plots=dict(flags=["--show_plots"], type=bool, default=False, help="Show BSRT plots."),
},
show_plots={
"flags": ["--show_plots"],
"type": bool,
"default": False,
"help": "Show BSRT plots.",
},
)


Expand Down Expand Up @@ -159,7 +165,7 @@ def _select_files(opt, files_df):

def _load_files_in_df(opt):
files_df = pd.DataFrame(
data={"FILES": glob.glob(str(Path(opt.directory) / _get_bsrt_logger_fname(opt.beam, "*")))}
data={"FILES": glob.glob(str(Path(opt.directory) / _get_bsrt_logger_fname(opt.beam, "*")))} # noqa: PTH207
)

files_df = files_df.assign(
Expand All @@ -175,8 +181,7 @@ def _load_files_in_df(opt):
)
files_df = files_df.assign(TIME=[f.timestamp() for f in files_df["TIMESTAMP"]])

files_df = files_df.sort_values(by=["TIME"]).reset_index(drop=True).set_index("TIME")
return files_df
return files_df.sort_values(by=["TIME"]).reset_index(drop=True).set_index("TIME")


def _get_timestamp_from_name(name, formatstring):
Expand All @@ -189,7 +194,7 @@ def _get_timestamp_from_name(name, formatstring):
def _check_and_fix_entries(entry):
# pd.to_csv does not handle np.array as entries nicely, converting to list circumvents this
for key, val in entry.items():
if isinstance(val, (np.ndarray, tuple)):
if isinstance(val, (np.ndarray | tuple)):
entry[key] = list(val)
if np.array(val).size == 0:
entry[key] = np.nan
Expand All @@ -199,7 +204,8 @@ def _check_and_fix_entries(entry):
def _load_pickled_data(opt, files_df):
merged_df = pd.DataFrame()
for bsrtfile in files_df["FILES"]:
data = pickle.load(gzip.open(bsrtfile, "rb"))
with gzip.open(bsrtfile, "rb") as f:
data = pickle.load(f)
new_df = pd.DataFrame.from_records([_check_and_fix_entries(entry) for entry in data])
merged_df = pd.concat([merged_df, new_df], axis="index", ignore_index=True)

Expand All @@ -224,7 +230,6 @@ def _add_kick_lines(ax, df):


def _fit_var(ax, bsrt_df, plot_dict, opt):

ax[plot_dict["idx"]].plot(
bsrt_df.index, [entry[plot_dict["fitidx"]] for entry in bsrt_df["lastFitResults"]]
)
Expand All @@ -234,7 +239,6 @@ def _fit_var(ax, bsrt_df, plot_dict, opt):


def plot_fit_variables(opt, bsrt_df):

fig, ax = plt.subplots(nrows=2, ncols=3, figsize=(20, 9), sharex=True, constrained_layout=True)

plot_dicts = [
Expand Down Expand Up @@ -292,8 +296,8 @@ def _full_crossection(ax, bsrt_df, plot_dict, opt):
ax,
bsrt_df.reset_index(),
"TimeIndex",
f'projPositionSet{plot_dict["idx"]}',
f'projDataSet{plot_dict["idx"]}',
f"projPositionSet{plot_dict['idx']}",
f"projDataSet{plot_dict['idx']}",
)
ax.plot(
bsrt_df.index,
Expand Down Expand Up @@ -326,7 +330,6 @@ def _full_crossection(ax, bsrt_df, plot_dict, opt):


def plot_full_crosssection(opt, bsrt_df):

plot_dicts = [
{"idx": 1, "fitresult": 3, "fiterror": 4, "title": "Horizontal Cross section"},
{"idx": 2, "fitresult": 8, "fiterror": 9, "title": "Vertical Cross section"},
Expand All @@ -344,7 +347,7 @@ def plot_full_crosssection(opt, bsrt_df):

def _gauss(x, *p):
a, b, c = p
return a * np.exp(-((x - b) ** 2) / (2.0 * c ** 2.0))
return a * np.exp(-((x - b) ** 2) / (2.0 * c**2.0))


def _reshaped_imageset(df):
Expand Down Expand Up @@ -408,7 +411,6 @@ def plot_crosssection_for_timesteps(opt, bsrt_df):


def _aux_variables(ax, bsrt_df, plot_dict, opt):

ax.plot(
bsrt_df.index, bsrt_df[plot_dict["variable1"]], color="red", label=plot_dict["variable1"]
)
Expand Down
31 changes: 14 additions & 17 deletions pylhc/bsrt_logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,13 @@

Original authors: E. H. Maclean, T. Persson and G. Trad.
"""

import datetime as dt
import os
import pickle
import sys
import time
from pathlib import Path

from omc3.definitions import formats
from omc3.utils.mock import cern_network_import
Expand All @@ -34,8 +36,7 @@ def parse_timestamp(thistime):
]
for fmat in accepted_time_input_format:
try:
dtobject = dt.datetime.strptime(thistime, fmat)
return dtobject
return dt.datetime.strptime(thistime, fmat)
except ValueError:
pass
timefmatstring = ""
Expand All @@ -53,22 +54,21 @@ def parse_timestamp(thistime):

# function to help write output from datetime objects in standard format throughout code
def convert_to_data_output_format(dtobject):
output_timestamp = dtobject.strftime(formats.TIME)
return output_timestamp
return dtobject.strftime(formats.TIME)


##########################################


if __name__ == '__main__':
if __name__ == "__main__":
# Create a PyJapc instance with selector SCT.USER.ALL
# INCA is automatically configured based on the timing domain you specify here

CycleName = "LHC.USER.ALL"
INCAacc = "LHC"
noSetFlag = True
no_set_flag = True

japc = pyjapc.PyJapc(selector=CycleName, incaAcceleratorName=INCAacc, noSet=noSetFlag)
japc = pyjapc.PyJapc(selector=CycleName, incaAcceleratorName=INCAacc, noSet=no_set_flag)
japc.rbacLogin()
acquesitions_per_file = 100
j = 0
Expand All @@ -79,28 +79,25 @@ def convert_to_data_output_format(dtobject):
B1_image = japc.getParam("LHC.BSRTS.5R4.B1/Image")
B2_image = japc.getParam("LHC.BSRTS.5L4.B2/Image")
if t == 0:
allB1data = []
allB2data = []
all_b1_data = []
all_b2_data = []
B1_IMGtime = B1_image["acqTime"]
B2_IMGtime = B2_image["acqTime"]
B1_IMGtime_dt = parse_timestamp(B1_IMGtime)
B2_IMGtime_dt = parse_timestamp(B2_IMGtime)
B1_IMGtime_st = convert_to_data_output_format(B1_IMGtime_dt)
B2_IMGtime_st = convert_to_data_output_format(B2_IMGtime_dt)

allB1data.append(B1_image)
allB2data.append(B2_image)
all_b1_data.append(B1_image)
all_b2_data.append(B2_image)
t += 1
if t == acquesitions_per_file:
j += 1
f1name = "data_BSRT_B1_" + B1_IMGtime_st + ".dat"
f2name = "data_BSRT_B2_" + B2_IMGtime_st + ".dat"
f1 = open(f1name, "wb")
f2 = open(f2name, "wb")
pickle.dump(allB1data, f1)
pickle.dump(allB2data, f2)
f1.close()
f2.close()
with Path(f1name).open("wb") as f1, Path(f2name).open("wb") as f2:
pickle.dump(all_b1_data, f1)
pickle.dump(all_b2_data, f2)
os.system("gzip " + f1name)
os.system("gzip " + f2name)
t = 0
Expand Down
Loading
Loading