diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
new file mode 100644
index 0000000..b0f60bf
--- /dev/null
+++ b/.github/CODEOWNERS
@@ -0,0 +1,8 @@
+# https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners
+
+# GLOBAL CODEOWNERS ---
+# These owners will be the default owners for everything in
+# the repo. Unless a later match takes precedence,
+# they will be requested for review when someone opens a pull request.
+* @pylhc/approved-reviewers
+* @Mael-Le-Garrec
diff --git a/.github/workflows/README.md b/.github/workflows/README.md
new file mode 100644
index 0000000..2cd1108
--- /dev/null
+++ b/.github/workflows/README.md
@@ -0,0 +1,28 @@
+# Continous Integration Workflows
+
+This package implements different workflows for CI.
+They are organised as follows.
+
+### Documentation
+
+The `documentation` workflow triggers on any push to master, builds the documentation and pushes it to the `gh-pages` branch (if the build is successful).
+
+### Testing Suite
+
+Tests are ensured in the `tests` workflow, which triggers on all pushes.
+It runs on a matrix of all supported operating systems for all supported Python versions.
+
+### Test Coverage
+
+Test coverage is calculated in the `coverage` wokflow, which triggers on pushes to `master` and any push to a `pull request`.
+It reports the coverage results of the test suite to `CodeClimate`.
+
+### Regular Testing
+
+A `cron` workflow triggers every Monday at 3am (UTC time) and runs the full testing suite, on all available operating systems and supported Python versions.
+It also runs on `Python 3.x` so that newly released Python versions that would break tests are automatically included.
+
+### Publishing
+
+Publishing to `PyPI` is done through the `publish` workflow, which triggers anytime a `release` is made of the Github repository.
+It builds a `wheel`, checks it, and pushes to `PyPI` if checks are successful.
diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml
new file mode 100644
index 0000000..cd7be82
--- /dev/null
+++ b/.github/workflows/coverage.yml
@@ -0,0 +1,16 @@
+# Runs all tests and pushes coverage report to codeclimate
+name: Coverage
+
+on: # Runs on all push events to master branch and any push related to a pull request
+ push:
+ branches:
+ - master
+ pull_request: # so that codeclimate gets coverage and reports on the diff
+
+jobs:
+ coverage:
+ if: false # disabled for now
+ uses: pylhc/.github/.github/workflows/coverage.yml@master
+ with:
+ src-dir: omc3_gui
+ secrets: inherit
diff --git a/.github/workflows/cron.yml b/.github/workflows/cron.yml
new file mode 100644
index 0000000..50b5449
--- /dev/null
+++ b/.github/workflows/cron.yml
@@ -0,0 +1,12 @@
+# Runs all tests on master on Mondays at 3 am (UTC time)
+name: Cron Testing
+
+
+on:
+ schedule:
+ - cron: '* 3 * * mon'
+
+jobs:
+ tests:
+ if: false # disabled for now
+ uses: pylhc/.github/.github/workflows/cron.yml@master
diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml
new file mode 100644
index 0000000..e1195b9
--- /dev/null
+++ b/.github/workflows/documentation.yml
@@ -0,0 +1,14 @@
+# Build documentation
+# The build is uploaded as artifact if the triggering event is a push for a pull request
+# The build is published to github pages if the triggering event is a push to the master branch (PR merge)
+name: Build and upload documentation
+
+on: # Runs on any push event in a PR or any push event to master
+ pull_request:
+ push:
+ branches:
+ - 'master'
+
+jobs:
+ documentation:
+ uses: pylhc/.github/.github/workflows/documentation.yml@master
diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml
new file mode 100644
index 0000000..95fcfb7
--- /dev/null
+++ b/.github/workflows/publish.yml
@@ -0,0 +1,11 @@
+# Publishes to PyPI upon creation of a release
+name: Upload Package to PyPI
+
+on: # Runs everytime a release is added to the repository
+ release:
+ types: [created]
+
+jobs:
+ deploy:
+ uses: pylhc/.github/.github/workflows/publish.yml@master
+ secrets: inherit
diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml
new file mode 100644
index 0000000..69d776b
--- /dev/null
+++ b/.github/workflows/tests.yml
@@ -0,0 +1,16 @@
+# Runs all tests
+name: All Tests
+
+defaults:
+ run:
+ shell: bash
+
+on: # Runs on any push event to any branch except master (the coverage workflow takes care of that)
+ push:
+ branches-ignore:
+ - 'master'
+
+jobs:
+ tests:
+ if: false # disabled for now
+ uses: pylhc/.github/.github/workflows/tests.yml@master
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..f3faabc
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,210 @@
+# Originally created by .ignore support plugin (hsz.mobi)
+
+### Python template
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*pycache*
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+pip-wheel-metadata/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+.hypothesis/
+.pytest_cache/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx build directory and sphinx-gallery generated documents
+docs/_build/
+doc/_build/
+doc_build/
+.doc_build
+.doc_build/
+.docs/gallery
+.docs/gallery/
+.docs/gen_modules
+.docs/gen_modules/
+
+# PyBuilder
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints/
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+.python-version
+
+
+# celery beat schedule file
+celerybeat-schedule
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+site/
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
+
+### JetBrains template
+# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and WebStorm
+# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
+
+# User-specific stuff
+
+# *.iml
+# *.ipr
+
+# CMake
+cmake-build-*/
+
+# File-based project format
+*.iws
+
+# IntelliJ
+.idea/
+out/
+.idea_modules/
+
+# JIRA plugin
+atlassian-ide-plugin.xml
+
+# Crashlytics plugin (for Android Studio and IntelliJ)
+com_crashlytics_export_strings.xml
+crashlytics.properties
+crashlytics-build.properties
+fabric.properties
+
+### macOS template
+# General
+.DS_Store
+.AppleDouble
+.LSOverride
+
+# Icon must end with two \r
+Icon
+
+# Thumbnails
+._*
+
+# Files that might appear in the root of a volume
+.DocumentRevisions-V100
+.fseventsd
+.Spotlight-V100
+.TemporaryItems
+.Trashes
+.VolumeIcon.icns
+.com.apple.timemachine.donotpresent
+
+# Directories potentially created on remote AFP share
+.AppleDB
+.AppleDesktop
+Network Trash Folder
+Temporary Items
+.apdisk
+
+### JupyterNotebooks template
+# gitignore template for Jupyter Notebooks
+# website: http://jupyter.org/
+
+.ipynb_checkpoints
+*/.ipynb_checkpoints/*
+
+# Remove previous ipynb_checkpoints
+# git rm -r .ipynb_checkpoints/
+#
+
+# VSCode
+.vscode
+.vscode/
+
+# Neovim
+.nvimlog
+*.swap
+
+### OMC Users
+# files for testing things (jdilly convention)
+tst_*
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index b765876..88f4852 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -11,8 +11,8 @@ variables:
# The PY_VERSION and ACC_PY_BASE_IMAGE_TAG variables control the default Python and Acc-Py versions used by Acc-Py jobs.
# It is recommended to keep the two values consistent.
# More details https://acc-py.web.cern.ch/gitlab-mono/acc-co/devops/python/acc-py-gitlab-ci-templates/docs/templates/master/generated/v2.html#global-variables.
- PY_VERSION: '3.7'
- ACC_PY_BASE_IMAGE_TAG: '2020.11'
+ PY_VERSION: '3.11'
+ ACC_PY_BASE_IMAGE_TAG: '2023.06'
# Build a source distribution for chroma-gui.
diff --git a/.zenodo.json b/.zenodo.json
new file mode 100644
index 0000000..d110738
--- /dev/null
+++ b/.zenodo.json
@@ -0,0 +1,15 @@
+{
+ "creators": [
+ {
+ "name": "OMC-Team",
+ "affiliation": "CERN"
+ },
+ {
+ "name": "Maël Le Garrec",
+ "affiliation": "CERN",
+ "orcid": "0000-0002-8146-2340"
+ }
+ ],
+ "title": "Chroma-GUI",
+ "description": "A GUI to compute and display chromaticity measurements"
+}
\ No newline at end of file
diff --git a/README.md b/README.md
index a290e72..2b5949e 100644
--- a/README.md
+++ b/README.md
@@ -1,9 +1,15 @@
# Non-Linear Chromaticity GUI
-The Chromaticity GUI is a tool to compute non-linear chromaticity via
-measurements done in the CCC.
+The Chromaticity GUI is a tool to compute non-linear chromaticity via measurements done in the CCC.
-# Running
+
+## Running via acc-py environment
+
+* [Create a virtual environment via `acc-py`](https://pylhc.github.io/packages/development/howto_venv.html) if you do not have one already.
+* Install the package via `pip install chroma-gui[cern]` or `pip install chroma-gui[all]`
+* Run the GUI via `python -m chroma_gui`
+
+## Running via acc-py apps
Be sure to have the `/acc` directory mounted, which can be done via:
@@ -18,10 +24,11 @@ source /acc/local/share/python/acc-py/base/pro/setup.sh
acc-py app run chroma-gui
```
-# Deployment
+### Deployment via acc-py
* Change the version in [__init__.py](./chroma_gui/__init__.py)
* Update the [CHANGELOG](./CHANGELOG.md)
+* Commit changes to gitlab repository (wait for CI to finish)
```bash
alias acc-py="/acc/local/share/python/acc-py/apps/acc-py-cli/pro/bin/acc-py"
diff --git a/chroma_gui/__init__.py b/chroma_gui/__init__.py
index 06f00c8..eb3abfc 100644
--- a/chroma_gui/__init__.py
+++ b/chroma_gui/__init__.py
@@ -1,6 +1,18 @@
"""
-Documentation for the chroma_gui package
+The Chroma GUI
+~~~~~~~~~~~~~~
+``chroma_gui`` is a tool to compute non-linear chromaticity via
+measurements done in the CCC.
+
+:copyright: pyLHC/OMC-Team working group.
"""
+__title__ = "omc3-gui"
+__description__ = "A tool to compute non-linear chromaticity via measurements done in the CCC."
+__url__ = "https://github.com/pylhc/chroma_gui"
__version__ = "0.0.26"
+__author__ = "pylhc"
+__author_email__ = "pylhc@github.com"
+
+__all__ = [__version__]
diff --git a/chroma_gui/chromaticity/__init__.py b/chroma_gui/chromaticity/__init__.py
index 568c207..05c3252 100644
--- a/chroma_gui/chromaticity/__init__.py
+++ b/chroma_gui/chromaticity/__init__.py
@@ -1,7 +1,14 @@
+"""
+Chromaticity Module
+-------------------
+
+This module contains functions related to the chromaticity.
+"""
+# ruff: noqa
from .chroma_fct import (
- get_chromaticity,
construct_chroma_tfs,
- get_maximum_chromaticity,
+ get_chromaticity,
get_chromaticity_df_with_notation,
get_chromaticity_formula,
+ get_maximum_chromaticity,
)
diff --git a/chroma_gui/chromaticity/chroma_fct.py b/chroma_gui/chromaticity/chroma_fct.py
index f7c5198..1b2784a 100644
--- a/chroma_gui/chromaticity/chroma_fct.py
+++ b/chroma_gui/chromaticity/chroma_fct.py
@@ -1,20 +1,34 @@
-import pandas as pd
-import numpy as np
-from math import factorial
+"""
+Chromaticity Functions
+----------------------
+
+Functions to compute the chromaticity.
+"""
+from __future__ import annotations
+
from functools import partial
-from scipy.optimize import curve_fit
+from math import factorial
+
+import numpy as np
+import pandas as pd
import tfs
+from scipy.optimize import curve_fit
def chromaticity_func(x, *args):
- '''
- Returns the taylor expansion of the chromaticity
+ """
+ Returns the taylor expansion of the chromaticity
+
+
+ .. code-block::
+
q0
- + q1 * x
- + q2 * x**2 * 1/2!
- + q3 * x**3 * 1/3!
- ...
- '''
+ + q1 * x
+ + q2 * x**2 * 1/2!
+ + q3 * x**3 * 1/3!
+ ...
+
+ """
res = 0
for order, val in enumerate(args):
res += val * x ** (order) * (1 / factorial(order))
@@ -22,14 +36,14 @@ def chromaticity_func(x, *args):
def get_chromaticity_formula(order):
- dpp = r'\left( \frac{\Delta p}{p} \right)'
- dpp = '\\delta'
- chroma = f'Q({dpp}) = Q_0 '
- chroma += f'+ Q\' \cdot {dpp} '
-
- for o in range(2, order+1):
- q_str = "Q" + "'"*o
- chroma += f'+ \\frac{{1}}{{{o}!}} {q_str} \cdot {dpp}^{o} '
+ dpp = r"\left( \frac{\Delta p}{p} \right)"
+ dpp = "\\delta"
+ chroma = f"Q({dpp}) = Q_0 "
+ chroma += f"+ Q' \cdot {dpp} "
+
+ for o in range(2, order + 1):
+ q_str = "Q" + "'" * o
+ chroma += f"+ \\frac{{1}}{{{o}!}} {q_str} \cdot {dpp}^{o} "
return f"${chroma}$"
@@ -37,7 +51,7 @@ def construct_chroma_tfs(fit_orders):
max_fit_order = max(fit_orders)
q_val = [f"Q{o}" for o in range(max_fit_order + 1)]
q_err = [f"Q{o}_ERR" for o in range(max_fit_order + 1)]
- chroma_tfs = tfs.TfsDataFrame(columns=['AXIS', 'BEAM', 'UP_TO_ORDER', *q_val, *q_err])
+ chroma_tfs = tfs.TfsDataFrame(columns=["AXIS", "BEAM", "UP_TO_ORDER", *q_val, *q_err])
return chroma_tfs
@@ -53,20 +67,20 @@ def get_chromaticity(filename, chroma_tfs, dpp_range, fit_orders, axis):
max_fit_order = max(fit_orders)
data = tfs.read(filename)
- data = data.sort_values(by=['DPP'])
- data = data[(data['DPP'] > dpp_range[0]) & (data['DPP'] < dpp_range[1])]
+ data = data.sort_values(by=["DPP"])
+ data = data[(data["DPP"] > dpp_range[0]) & (data["DPP"] < dpp_range[1])]
# Create a list of all the fit functions, we're going to fit against all orders
fit_funcs = list()
- for order in range(min_fit_order, max_fit_order+1):
+ for order in range(min_fit_order, max_fit_order + 1):
# Initial guesses for the chroma, Q0, Q1, then 1e3, 1e6, 1e9, etc
- p0 = np.array([0.3, 2, *[pow(10, int(o)*3) for o in range(1, order)]], dtype='float64')
-
+ p0 = np.array([0.3, 2, *[pow(10, int(o) * 3) for o in range(1, order)]], dtype="float64")
+
# Create the fit function with all the parameters
- f = partial(curve_fit, chromaticity_func, data['DPP'], data[f'Q{axis}'], p0=p0)
+ f = partial(curve_fit, chromaticity_func, data["DPP"], data[f"Q{axis}"], p0=p0)
# Apply the errors to the fit if we got some
- if data[f'Q{axis}ERR'].all() != 0:
- f = partial(f, sigma=data[f'Q{axis}ERR'])
+ if data[f"Q{axis}ERR"].all() != 0:
+ f = partial(f, sigma=data[f"Q{axis}ERR"])
fit_funcs.append(f)
# Finally call the function and store the result!
@@ -76,36 +90,40 @@ def get_chromaticity(filename, chroma_tfs, dpp_range, fit_orders, axis):
# Populate the chromaticity TFS
order = i + min_fit_order
- remaining = [0] * ((max_fit_order - min_fit_order) - (len(popt)-(min_fit_order+1))) # we have Q0, so +1
-
- new_data = tfs.TfsDataFrame([[axis, data.headers['BEAM'], order, *popt, *remaining, *std, *remaining]],
- columns=chroma_tfs.columns)
+ remaining = [0] * (
+ (max_fit_order - min_fit_order) - (len(popt) - (min_fit_order + 1))
+ ) # we have Q0, so +1
+
+ new_data = tfs.TfsDataFrame(
+ [[axis, data.headers["BEAM"], order, *popt, *remaining, *std, *remaining]],
+ columns=chroma_tfs.columns,
+ )
chroma_tfs = pd.concat([chroma_tfs, new_data], ignore_index=True)
- chroma_tfs.headers['MIN_FIT_ORDER'] = min(fit_orders)
- chroma_tfs.headers['MAX_FIT_ORDER'] = max(fit_orders)
+ chroma_tfs.headers["MIN_FIT_ORDER"] = min(fit_orders)
+ chroma_tfs.headers["MAX_FIT_ORDER"] = max(fit_orders)
return chroma_tfs
def get_maximum_chromaticity(chroma_tfs):
- df = chroma_tfs[chroma_tfs['UP_TO_ORDER'] == chroma_tfs['UP_TO_ORDER'].max()]
- df = df.drop('UP_TO_ORDER', axis=1)
+ df = chroma_tfs[chroma_tfs["UP_TO_ORDER"] == chroma_tfs["UP_TO_ORDER"].max()]
+ df = df.drop("UP_TO_ORDER", axis=1)
return df
def get_chromaticity_df_with_notation(chroma_tfs):
"""
- Returns a dataFrame with the chromaticity with the headers set with exponents and the values divided
+ Returns a dataFrame with the chromaticity with the headers set with exponents and the values divided
"""
- max_order = chroma_tfs.headers['MAX_FIT_ORDER']
- headers = ['BEAM', 'AXIS']
- for order in range(max_order+1):
+ max_order = chroma_tfs.headers["MAX_FIT_ORDER"]
+ headers = ["BEAM", "AXIS"]
+ for order in range(max_order + 1):
prime = f"({order})" if order > 0 else ""
power = (order - 1) * 3 if order > 0 else 0
if order == 0:
- headers.append('Q')
+ headers.append("Q")
elif order == 1:
headers.append(f"Q^{prime}")
else:
@@ -113,14 +131,14 @@ def get_chromaticity_df_with_notation(chroma_tfs):
values = []
for index, row in chroma_tfs.iterrows():
- beam = row['BEAM']
- axis = row['AXIS']
+ beam = row["BEAM"]
+ axis = row["AXIS"]
new_row = [beam, axis]
- for order in range(max_order+1):
+ for order in range(max_order + 1):
power = (order - 1) * 3 if order > 0 else 0
- val = round(row[f'Q{order}'] / 10 ** power, 2)
- err = round(row[f'Q{order}_ERR'] / 10 ** power, 2)
+ val = round(row[f"Q{order}"] / 10**power, 2)
+ err = round(row[f"Q{order}_ERR"] / 10**power, 2)
new_row.append(rf"{val} ± {err}")
values.append(new_row)
diff --git a/chroma_gui/cleaning/__init__.py b/chroma_gui/cleaning/__init__.py
index f588e84..aa785e8 100644
--- a/chroma_gui/cleaning/__init__.py
+++ b/chroma_gui/cleaning/__init__.py
@@ -1,3 +1,10 @@
+"""
+Cleaning Module
+---------------
+
+Functions to clean the chromaticity data.
+"""
+# ruff: noqa
from . import constants
-from . import plateau
-from . import clean
\ No newline at end of file
+from . import clean
+from . import plateau
\ No newline at end of file
diff --git a/chroma_gui/cleaning/clean.py b/chroma_gui/cleaning/clean.py
index be1bb27..1000ea6 100644
--- a/chroma_gui/cleaning/clean.py
+++ b/chroma_gui/cleaning/clean.py
@@ -1,12 +1,19 @@
-import tfs
-import numpy as np
-import pandas as pd
+"""
+Clean
+-----
+
+Main functions to clean the chromaticity data.
+"""
+from __future__ import annotations
+
import logging
-from scipy import signal
-import nafflib
+import nafflib
+import numpy as np
+import pandas as pd
+import tfs
from dateutil.parser import isoparse
-from pathlib import Path
+from scipy import signal
logger = logging.getLogger('chroma_GUI - Cleaning')
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
@@ -168,7 +175,7 @@ def get_avg_tune_from_naff(raw_data, start_plateau, end_plateau, variables, seco
# Process each chunk
for i in range(chunks):
data = merged_data[plane][elements_per_chunk * i: elements_per_chunk * (i+1)]
- spectrum, _, _ = NAFFlib.get_tunes(data, 20)
+ spectrum, _, _ = nafflib.get_tunes(data, 20)
for frequency in spectrum:
in_window = window[plane][0] <= frequency <= window[plane][1]
diff --git a/chroma_gui/cleaning/constants.py b/chroma_gui/cleaning/constants.py
index 1a33e87..1788846 100644
--- a/chroma_gui/cleaning/constants.py
+++ b/chroma_gui/cleaning/constants.py
@@ -1,8 +1,17 @@
-RF_VARIABLE = 'ALB.SR4.B{beam}:FGC_FREQ'
+"""
+Constants
+---------
-TUNE_VARS = ['LHC.BQBBQ.CONTINUOUS_HS.B{beam}:EIGEN_FREQ_1',
- 'LHC.BQBBQ.CONTINUOUS_HS.B{beam}:EIGEN_FREQ_2']
-X_VAR_INDICATOR = 'EIGEN_FREQ_1'
+Constants for the cleaning module.
+"""
-DPP_FILE = "dpp_B{beam}.tfs"
-CLEANED_DPP_FILE = "dpp_cleaned_B{beam}.tfs"
+RF_VARIABLE: str = "ALB.SR4.B{beam}:FGC_FREQ"
+
+TUNE_VARS: list[str] = [
+ "LHC.BQBBQ.CONTINUOUS_HS.B{beam}:EIGEN_FREQ_1",
+ "LHC.BQBBQ.CONTINUOUS_HS.B{beam}:EIGEN_FREQ_2",
+]
+X_VAR_INDICATOR: str = "EIGEN_FREQ_1"
+
+DPP_FILE: str = "dpp_B{beam}.tfs"
+CLEANED_DPP_FILE: str = "dpp_cleaned_B{beam}.tfs"
diff --git a/chroma_gui/cleaning/plateau.py b/chroma_gui/cleaning/plateau.py
index 182f538..daa0ae3 100644
--- a/chroma_gui/cleaning/plateau.py
+++ b/chroma_gui/cleaning/plateau.py
@@ -1,18 +1,29 @@
+"""
+Plateau Finder
+--------------
+
+Functions to find the tune plateaus of the measurement.
+"""
+
+from __future__ import annotations
+
+from datetime import datetime
+
+import pandas as pd
+import tfs
+
from chroma_gui.cleaning.constants import (
+ DPP_FILE,
RF_VARIABLE,
TUNE_VARS,
X_VAR_INDICATOR,
- DPP_FILE,
)
-import pandas as pd
-from datetime import datetime
-import tfs
# Read the data to get the RF first
def get_time_data(line):
- timestamp, value = line.split(',')
- timestamp = datetime.strptime(timestamp, '%Y-%m-%d %H:%M:%S.%f')
+ timestamp, value = line.split(",")
+ timestamp = datetime.strptime(timestamp, "%Y-%m-%d %H:%M:%S.%f")
value = float(value)
return timestamp, value
@@ -24,46 +35,46 @@ def append(df, dict_to_add):
def construct_rf_data_from_csv(timber_data, rf_beam):
- rf_data = pd.DataFrame(columns=['TIME', 'F_RF'])
+ rf_data = pd.DataFrame(columns=["TIME", "F_RF"])
with open(timber_data) as f:
freq = 0 # boolean flag to check the variable
for i, line in enumerate(f):
- if line.startswith('#'):
+ if line.startswith("#"):
continue
- if line.startswith('VARIABLE'):
+ if line.startswith("VARIABLE"):
freq = 0
- if line[len('VARIABLE: '):].strip() == RF_VARIABLE.format(beam=rf_beam):
+ if line[len("VARIABLE: ") :].strip() == RF_VARIABLE.format(beam=rf_beam):
freq = 1
continue
- if freq and not line.startswith('Timestamp') and line.strip() != '':
+ if freq and not line.startswith("Timestamp") and line.strip() != "":
timestamp, value = get_time_data(line)
- rf_data = append(rf_data, {'TIME': timestamp, 'F_RF': value})
+ rf_data = append(rf_data, {"TIME": timestamp, "F_RF": value})
return rf_data
# The timestamps aren't always equal for each variable
# The idea is to get the last known frequency for a specific timestamp
def get_rf(rf_data, timestamp, nominal_rf):
- mask = rf_data['TIME'] <= timestamp
+ mask = rf_data["TIME"] <= timestamp
rf_before = rf_data.loc[mask]
if len(rf_before) == 0: # can happen if data has been redacted
return nominal_rf
last_rf = rf_before.iloc[-1]
- return last_rf['F_RF']
+ return last_rf["F_RF"]
# Same thing for the DPP
def get_dpp(dpp_data, timestamp):
- mask = dpp_data['TIME'] <= timestamp
+ mask = dpp_data["TIME"] <= timestamp
index = dpp_data.loc[mask].index[-1]
- last_dpp = dpp_data.iloc[index]['DPP']
+ last_dpp = dpp_data.iloc[index]["DPP"]
# Sometimes the DPP is in the middle of two plateaus, fix that
- dpp_m1 = dpp_data.iloc[index - 1]['DPP']
- dpp_p1 = dpp_data.iloc[index + 1]['DPP']
+ dpp_m1 = dpp_data.iloc[index - 1]["DPP"]
+ dpp_p1 = dpp_data.iloc[index + 1]["DPP"]
if last_dpp != dpp_m1 and last_dpp != dpp_p1:
return dpp_m1
@@ -81,36 +92,36 @@ def get_tunes_plateaus(timber_data, beam, rf_beam, start_time, end_time, nominal
# If the nominal_rf is None, set it right now as it is the first point of the measurement
if nominal_rf is None:
- nominal_rf = rf_data['F_RF'].iloc[0]
+ nominal_rf = rf_data["F_RF"].iloc[0]
- data = pd.DataFrame(columns=['TIME', 'F_RF', 'QX', 'QY', 'DPP'])
+ data = pd.DataFrame(columns=["TIME", "F_RF", "QX", "QY", "DPP"])
with open(timber_data) as f:
tune = 0 # boolean flag to check the variable
- axis = ''
+ axis = ""
for i, line in enumerate(f):
- if line.startswith('#'):
+ if line.startswith("#"):
continue
- if line.startswith('VARIABLE'):
+ if line.startswith("VARIABLE"):
tune = 0
beam_tune_vars = [var.format(beam=beam) for var in TUNE_VARS]
- if line[len('VARIABLE: '):].strip() in beam_tune_vars:
+ if line[len("VARIABLE: ") :].strip() in beam_tune_vars:
tune = 1
- axis = 'X' if X_VAR_INDICATOR in line else 'Y'
+ axis = "X" if X_VAR_INDICATOR in line else "Y"
continue
- if tune and not line.startswith('Timestamp') and line.strip() != '':
+ if tune and not line.startswith("Timestamp") and line.strip() != "":
timestamp, value = get_time_data(line)
if timestamp < start_time or timestamp > end_time:
continue
- qx = value if axis == 'X' else 0
- qy = value if axis == 'Y' else 0
+ qx = value if axis == "X" else 0
+ qy = value if axis == "Y" else 0
# Check if the timestamp exists already
# If not, add the line
- mask = data['TIME'] == timestamp
+ mask = data["TIME"] == timestamp
if data[mask].empty:
# Get the RF frequency of the point
freq = get_rf(rf_data, timestamp, nominal_rf)
@@ -119,23 +130,25 @@ def get_tunes_plateaus(timber_data, beam, rf_beam, start_time, end_time, nominal
dpp = (-1 / alpha) * (freq - nominal_rf) / nominal_rf
# And add the tunes
- to_add = {'TIME': timestamp, 'F_RF': freq, 'QX': qx, 'QY': qy, 'DPP': dpp}
+ to_add = {"TIME": timestamp, "F_RF": freq, "QX": qx, "QY": qy, "DPP": dpp}
data = append(data, to_add)
else:
i = data[mask].index.to_numpy()[0]
if qy:
- data.at[i, 'QY'] = qy
+ data.at[i, "QY"] = qy
elif qx:
- data.at[i, 'QX'] = qx
+ data.at[i, "QX"] = qx
# Fix types
- new_data = data.astype({'TIME': 'string', 'F_RF': 'float64', 'QX': 'float64', 'QY': 'float64', 'DPP': 'float64'})
+ new_data = data.astype(
+ {"TIME": "string", "F_RF": "float64", "QX": "float64", "QY": "float64", "DPP": "float64"}
+ )
# Create the TFS with its headers
tfs_data = tfs.TfsDataFrame(new_data)
- tfs_data.headers['ALFA'] = alpha
- tfs_data.headers['F_RF'] = nominal_rf
- tfs_data.headers['BEAM'] = f'B{beam}'
+ tfs_data.headers["ALFA"] = alpha
+ tfs_data.headers["F_RF"] = nominal_rf
+ tfs_data.headers["BEAM"] = f"B{beam}"
return tfs_data
@@ -144,8 +157,12 @@ def create_plateau(path, timber_data, rf_beam, start_time, end_time, nominal_rf,
"""
Wrapper function to get plateaus from B1 and B2 and save them
"""
- plateau_b1 = get_tunes_plateaus(path / timber_data, 1, rf_beam, start_time, end_time, nominal_rf, alpha['B1'])
- plateau_b2 = get_tunes_plateaus(path / timber_data, 2, rf_beam, start_time, end_time, nominal_rf, alpha['B2'])
+ plateau_b1 = get_tunes_plateaus(
+ path / timber_data, 1, rf_beam, start_time, end_time, nominal_rf, alpha["B1"]
+ )
+ plateau_b2 = get_tunes_plateaus(
+ path / timber_data, 2, rf_beam, start_time, end_time, nominal_rf, alpha["B2"]
+ )
tfs.write(path / DPP_FILE.format(beam=1), plateau_b1)
tfs.write(path / DPP_FILE.format(beam=2), plateau_b2)
diff --git a/chroma_gui/constants.py b/chroma_gui/constants.py
index ac40314..255ce77 100644
--- a/chroma_gui/constants.py
+++ b/chroma_gui/constants.py
@@ -1,12 +1,18 @@
+"""
+Constants
+---------
+
+This module contains all the constants used in the chroma_gui.
+"""
from pathlib import Path
# Config file
-CONFIG = Path.home() / ".chroma_gui"
+CONFIG: Path = Path.home() / ".chroma_gui"
# Resources
-RESOURCES = Path(__file__).parent / "resources"
+RESOURCES: Path = Path(__file__).parent / "resources"
# Chromaticity
-CHROMA_FILE = "chromaticity.tfs"
-CHROMA_COEFFS = RESOURCES / "chromaticity_coefficients.json"
-RESPONSE_MATRICES = RESOURCES / "response_matrices.json"
\ No newline at end of file
+CHROMA_FILE: str = "chromaticity.tfs"
+CHROMA_COEFFS: Path = RESOURCES / "chromaticity_coefficients.json"
+RESPONSE_MATRICES: Path = RESOURCES / "response_matrices.json"
\ No newline at end of file
diff --git a/chroma_gui/corrections/__init__.py b/chroma_gui/corrections/__init__.py
index 5a59974..80d0689 100644
--- a/chroma_gui/corrections/__init__.py
+++ b/chroma_gui/corrections/__init__.py
@@ -1 +1,8 @@
+"""
+Corrections Module
+------------------
+
+This module contains the functions to correct chromaticity via the response matrix.
+"""
+# ruff: noqa
from . import response_matrix
diff --git a/chroma_gui/corrections/response_matrix.py b/chroma_gui/corrections/response_matrix.py
index d628746..3a2ff61 100644
--- a/chroma_gui/corrections/response_matrix.py
+++ b/chroma_gui/corrections/response_matrix.py
@@ -1,16 +1,28 @@
-import pandas as pd
-import numpy as np
+"""
+Response Matrix
+---------------
+
+Functions to create the response matrix to correct chromaticity.
+"""
+
+from __future__ import annotations
+
+import logging
from copy import deepcopy
+from pathlib import Path
+
+import matplotlib.pyplot as plt
+import numpy as np
+import pandas as pd
import tfs
-import logging
from numpy.linalg import pinv
-import matplotlib.pyplot as plt
-from pathlib import Path
RESOURCES = Path(__file__).parent.parent / "resources"
logger = logging.getLogger("response_matrix")
-logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
+logging.basicConfig(
+ level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s"
+)
def get_rdt_directory(rdt):
@@ -20,17 +32,20 @@ def get_rdt_directory(rdt):
"""
j, k, l, m = [int(r) for r in rdt[1:-2]]
rdt_type = "normal" if (l + m) % 2 == 0 else "skew"
- orders = dict(((1, "dipole"),
- (2, "quadrupole"),
- (3, "sextupole"),
- (4, "octupole"),
- (5, "decapole"),
- (6, "dodecapole"),
- (7, "tetradecapole"),
- (8, "hexadecapole"),
- ))
- return f'{rdt_type}_{orders[j+k+l+m]}'
-
+ orders = dict(
+ (
+ (1, "dipole"),
+ (2, "quadrupole"),
+ (3, "sextupole"),
+ (4, "octupole"),
+ (5, "decapole"),
+ (6, "dodecapole"),
+ (7, "tetradecapole"),
+ (8, "hexadecapole"),
+ )
+ )
+ return f"{rdt_type}_{orders[j + k + l + m]}"
+
def save_full_rdt_df(model, measurements, names, rdt, output=None):
"""
@@ -39,11 +54,14 @@ def save_full_rdt_df(model, measurements, names, rdt, output=None):
# Create the columns of the dataFrame: f1004 re => BPMs, f1004 img => BPMs
model = tfs.read(model)
- bpm_names = sorted(list(model['NAME']))
+ bpm_names = sorted(list(model["NAME"]))
rdt_directory = get_rdt_directory(rdt)
- columns = [[f'{rdt} RE', f'{rdt} IMAG', f'{rdt} AMP'], bpm_names] # The MultiIndex allows to "embed" columns
+ columns = [
+ [f"{rdt} RE", f"{rdt} IMAG", f"{rdt} AMP"],
+ bpm_names,
+ ] # The MultiIndex allows to "embed" columns
columns_multi = pd.MultiIndex.from_product(columns, names=[f"{rdt}", "BPMs"])
# Create the dataframe
@@ -51,17 +69,35 @@ def save_full_rdt_df(model, measurements, names, rdt, output=None):
# Add the data to the DF from every simulation
for i, (kcd_dir, name) in enumerate(zip(measurements, names)):
- rdt_df = tfs.read(kcd_dir / 'rdt' / rdt_directory / f'{rdt}.tfs')
+ rdt_df = tfs.read(kcd_dir / "rdt" / rdt_directory / f"{rdt}.tfs")
# Get the RDT values, join the model with the outer method to get the missing BPMs
- real = rdt_df[['NAME', 'REAL']].merge(model['NAME'], how='right').set_index('NAME').sort_index().squeeze()
- img = rdt_df[['NAME', 'IMAG']].merge(model['NAME'], how='right').set_index('NAME').sort_index().squeeze()
- amp = rdt_df[['NAME', 'AMP']].merge(model['NAME'], how='right').set_index('NAME').sort_index().squeeze()
+ real = (
+ rdt_df[["NAME", "REAL"]]
+ .merge(model["NAME"], how="right")
+ .set_index("NAME")
+ .sort_index()
+ .squeeze()
+ )
+ img = (
+ rdt_df[["NAME", "IMAG"]]
+ .merge(model["NAME"], how="right")
+ .set_index("NAME")
+ .sort_index()
+ .squeeze()
+ )
+ amp = (
+ rdt_df[["NAME", "AMP"]]
+ .merge(model["NAME"], how="right")
+ .set_index("NAME")
+ .sort_index()
+ .squeeze()
+ )
# Add the data with the KCD name as the index
- full_df.loc[name, f'{rdt} RE'] = [real]
- full_df.loc[name, f'{rdt} IMAG'] = [img]
- full_df.loc[name, f'{rdt} AMP'] = [amp]
+ full_df.loc[name, f"{rdt} RE"] = [real]
+ full_df.loc[name, f"{rdt} IMAG"] = [img]
+ full_df.loc[name, f"{rdt} AMP"] = [amp]
# Each data point is a list with only one value, just take the value
full_df = full_df.applymap(lambda x: x[0])
@@ -73,7 +109,9 @@ def save_full_rdt_df(model, measurements, names, rdt, output=None):
class ResponseMatrix:
def __init__(self, correctors, simulation_path, beam):
- self.correctors = correctors # List of circuits used as dict {"corrector": "value used as base"}
+ self.correctors = (
+ correctors # List of circuits used as dict {"corrector": "value used as base"}
+ )
# Store the data in two ways:
# the original data
@@ -153,18 +191,17 @@ def _add_base_rdt_observable(self, rdt, corrector_name="KCD"):
"""
# Read the tracking analysis result for f1004
# Generated by the script that ran the simulations
- rdt_df = pd.read_csv(self.simulation_path / f'complete_{rdt}_B{self.beam}.csv',
- header=[0, 1], index_col=0)
+ rdt_df = pd.read_csv(
+ self.simulation_path / f"complete_{rdt}_B{self.beam}.csv", header=[0, 1], index_col=0
+ )
# Get the reference corrector without any aditional strength
reference = [corr for corr in rdt_df.index if "None" in corr][0]
for corrector in self.correctors.keys():
# Get the Δ of RDT, compared to the base without any MCD
- real = rdt_df.loc[corrector][f'{rdt} RE'] \
- - rdt_df.loc[reference][f'{rdt} RE']
+ real = rdt_df.loc[corrector][f"{rdt} RE"] - rdt_df.loc[reference][f"{rdt} RE"]
- imag = rdt_df.loc[corrector][f'{rdt} IMAG'] \
- - rdt_df.loc[reference][f'{rdt} IMAG']
+ imag = rdt_df.loc[corrector][f"{rdt} IMAG"] - rdt_df.loc[reference][f"{rdt} IMAG"]
# Sort the index so we're sure to have the values where we want them
real = real.sort_index()
@@ -185,18 +222,24 @@ def _add_base_chromaticity_observable(self, order):
Helper function to add the simulated chromaticity to the response matrix
"""
# Read the PTC Normal files containing Q'''x and Q'''y
- ptc_files = {kcd: tfs.read(self.simulation_path / f'ptc_normal_{kcd.split(".")[-1]}.tfs') for kcd in
- self.correctors.keys()}
+ ptc_files = {
+ kcd: tfs.read(self.simulation_path / f"ptc_normal_{kcd.split('.')[-1]}.tfs")
+ for kcd in self.correctors.keys()
+ }
# Read the PTC Normal file without any MCD powering, to serve as base
- ptc_base = tfs.read(self.simulation_path / f'ptc_normal_NoneB{self.beam}.tfs')
+ ptc_base = tfs.read(self.simulation_path / f"ptc_normal_NoneB{self.beam}.tfs")
for corrector in self.correctors.keys():
- chroma_base = ptc_base[ptc_base['ORDER1'] == order]
- chroma = ptc_files[corrector][ptc_files[corrector]['ORDER1'] == order]
- dqx = chroma[chroma['NAME'] == 'DQ1']['VALUE'].values[0] - \
- chroma_base[chroma['NAME'] == 'DQ1']['VALUE'].values[0]
- dqy = chroma[chroma['NAME'] == 'DQ2']['VALUE'].values[0] - \
- chroma_base[chroma['NAME'] == 'DQ2']['VALUE'].values[0]
+ chroma_base = ptc_base[ptc_base["ORDER1"] == order]
+ chroma = ptc_files[corrector][ptc_files[corrector]["ORDER1"] == order]
+ dqx = (
+ chroma[chroma["NAME"] == "DQ1"]["VALUE"].values[0]
+ - chroma_base[chroma["NAME"] == "DQ1"]["VALUE"].values[0]
+ )
+ dqy = (
+ chroma[chroma["NAME"] == "DQ2"]["VALUE"].values[0]
+ - chroma_base[chroma["NAME"] == "DQ2"]["VALUE"].values[0]
+ )
self._add_simulated_global_observable(f"DQ{order}X", corrector, dqx)
self._add_simulated_global_observable(f"DQ{order}Y", corrector, dqy)
@@ -207,13 +250,12 @@ def add_rdt_observable(self, measurement, model, rdt, corrector_name="KCD"):
"""
# Create a dataframe using the model BPMs, containing both measurements
self.model_path = model
- observed_df = save_full_rdt_df(model=model,
- measurements=[measurement],
- names=["Measurement"],
- rdt=rdt)
+ observed_df = save_full_rdt_df(
+ model=model, measurements=[measurement], names=["Measurement"], rdt=rdt
+ )
- re_kcd = observed_df.loc['Measurement'][f'{rdt} RE'].sort_index()
- imag_kcd = observed_df.loc['Measurement'][f'{rdt} IMAG'].sort_index()
+ re_kcd = observed_df.loc["Measurement"][f"{rdt} RE"].sort_index()
+ imag_kcd = observed_df.loc["Measurement"][f"{rdt} IMAG"].sort_index()
self._add_measured_local_observable(f"B{self.beam}_{rdt}_re", re_kcd)
self._add_measured_local_observable(f"B{self.beam}_{rdt}_imag", imag_kcd)
@@ -226,30 +268,30 @@ def add_chromaticity_observable(self, measurement, order, weight):
Adds the given chromaticity measurement and its simulation counterpart to the response matrix
"""
# Get the observed chromaticity
- chroma_df = tfs.read(measurement / 'chromaticity.tfs')
+ chroma_df = tfs.read(measurement / "chromaticity.tfs")
- mask = (chroma_df['BEAM'] == f'B{self.beam}')
+ mask = chroma_df["BEAM"] == f"B{self.beam}"
# Errors to correct
# X Axis
- mask_p = mask & (chroma_df['AXIS'] == 'X')
- dq3x_p = chroma_df[mask_p]['Q3'].values[0]
+ mask_p = mask & (chroma_df["AXIS"] == "X")
+ dq3x_p = chroma_df[mask_p]["Q3"].values[0]
# Y Axis
- mask_p = mask & (chroma_df['AXIS'] == 'Y')
- dq3y_p = chroma_df[mask_p]['Q3'].values[0]
+ mask_p = mask & (chroma_df["AXIS"] == "Y")
+ dq3y_p = chroma_df[mask_p]["Q3"].values[0]
# There is only one value here, a factor is applied to counterbalance the other observables
dqx = dq3x_p
dqy = dq3y_p
# Add the data to the response matrix
- self._add_measured_global_observable(f'DQ{order}X', dqx)
- self._add_measured_global_observable(f'DQ{order}Y', dqy)
+ self._add_measured_global_observable(f"DQ{order}X", dqx)
+ self._add_measured_global_observable(f"DQ{order}Y", dqy)
# Since it is a global observable, a weight needs to be applied to counterbalance other observables
- self.weights[f'DQ{order}X'] = weight
- self.weights[f'DQ{order}Y'] = weight
+ self.weights[f"DQ{order}X"] = weight
+ self.weights[f"DQ{order}Y"] = weight
# Load the simulated data
self._add_base_chromaticity_observable(order=3)
@@ -261,12 +303,12 @@ def add_zero_chromaticity_observable(self, order, weight):
"""
# Add the dummy measurement
zero = np.float64(0)
- self._add_measured_global_observable(f'DQ{order}X', zero)
- self._add_measured_global_observable(f'DQ{order}Y', zero)
+ self._add_measured_global_observable(f"DQ{order}X", zero)
+ self._add_measured_global_observable(f"DQ{order}Y", zero)
# Set the weights
- self.weights[f'DQ{order}X'] = weight
- self.weights[f'DQ{order}Y'] = weight
+ self.weights[f"DQ{order}X"] = weight
+ self.weights[f"DQ{order}Y"] = weight
# Load the simulated data
self._add_base_chromaticity_observable(order=3)
@@ -278,23 +320,27 @@ def add_zero_rdt_observable(self, model, rdt, corrector_name):
self.model_path = model
# Read the simulated RDT to have the right BPMs and basically replace values by 0
- rdt_df = pd.read_csv(self.simulation_path / f'complete_{rdt}_B{self.beam}.csv', header=[0, 1], index_col=0)
+ rdt_df = pd.read_csv(
+ self.simulation_path / f"complete_{rdt}_B{self.beam}.csv", header=[0, 1], index_col=0
+ )
rdt_df = rdt_df.iloc[0:1]
- rdt_df.index = ['Measurement']
+ rdt_df.index = ["Measurement"]
for col in rdt_df.columns:
rdt_df[col].values[:] = 0
# Then as usual, the "zero" measurement is taken as regular Measurement
observed_df = rdt_df
- re_kcd = observed_df.loc['Measurement'][f'{rdt} RE'].sort_index()
- imag_kcd = observed_df.loc['Measurement'][f'{rdt} IMAG'].sort_index()
+ re_kcd = observed_df.loc["Measurement"][f"{rdt} RE"].sort_index()
+ imag_kcd = observed_df.loc["Measurement"][f"{rdt} IMAG"].sort_index()
self._add_measured_local_observable(f"B{self.beam}_{rdt}_re", re_kcd)
self._add_measured_local_observable(f"B{self.beam}_{rdt}_imag", imag_kcd)
# Load the simulated data
self._add_base_rdt_observable(rdt, corrector_name=corrector_name)
- def _clean_local_observables(self, inside_arc_number, clean_nan, clean_outliers, clean_IR, quartiles):
+ def _clean_local_observables(
+ self, inside_arc_number, clean_nan, clean_outliers, clean_IR, quartiles
+ ):
"""
Clean the local observables values:
- Removes all the indices (BPMs) containing NaN values from all simulations and measurement
@@ -327,8 +373,8 @@ def get_bpms_to_remove(series):
def remove_bpms(series, bpms):
# Remove the BPMs from
df = series.reset_index()
- df = df[~df['BPMs'].isin(bpms)]
- df = df.set_index('BPMs').squeeze()
+ df = df[~df["BPMs"].isin(bpms)]
+ df = df.set_index("BPMs").squeeze()
return df
# Iterate a first time to get all the BPMs to remove
@@ -345,11 +391,13 @@ def remove_bpms(series, bpms):
# And then remove those BPMs from all data
for observable in self.simulated_local_observables.keys():
for corrector in self.correctors:
- self.simulated_local_observables[observable][corrector] =\
- remove_bpms(self.simulated_local_observables[observable][corrector], bpms)
+ self.simulated_local_observables[observable][corrector] = remove_bpms(
+ self.simulated_local_observables[observable][corrector], bpms
+ )
for observable in self.measured_local_observables:
- self.measured_local_observables[observable] =\
- remove_bpms(self.measured_local_observables[observable], bpms)
+ self.measured_local_observables[observable] = remove_bpms(
+ self.measured_local_observables[observable], bpms
+ )
# Update the BPMs that are still in use
for bpm in bpms:
@@ -363,15 +411,16 @@ def _remove_ir_bpms(self, inside_arc_number):
Only the BPMs starting with "BPM." will be kept whatever the number.
e.g. BPMs such as "BPMSY" will be removed
"""
+
def filter_bpm(df):
df = pd.DataFrame(data=df)
df = df.reset_index()
- df = df[df['BPMs'].str.startswith('BPM.')]
- mask = df['BPMs'].str.split('.', expand=True)[1]
- mask = mask.str.split('L', expand=True)[0].str.split('R', expand=True)[0]
+ df = df[df["BPMs"].str.startswith("BPM.")]
+ mask = df["BPMs"].str.split(".", expand=True)[1]
+ mask = mask.str.split("L", expand=True)[0].str.split("R", expand=True)[0]
df = df[mask.astype(int) >= inside_arc_number]
- df = df.set_index('BPMs').squeeze()
+ df = df.set_index("BPMs").squeeze()
return df
@@ -384,8 +433,9 @@ def filter_bpm(df):
for observable in self.simulated_local_observables.keys():
for corrector in self.correctors:
original_len = len(self.simulated_local_observables[observable][corrector])
- self.simulated_local_observables[observable][corrector] = \
- filter_bpm(self.simulated_local_observables[observable][corrector])
+ self.simulated_local_observables[observable][corrector] = filter_bpm(
+ self.simulated_local_observables[observable][corrector]
+ )
end_len = len(self.simulated_local_observables[observable][corrector])
# Update the remaining BPMS, it will be the same for each observables/corr
@@ -393,15 +443,18 @@ def filter_bpm(df):
# Measured observables
for observable in self.measured_local_observables:
- self.measured_local_observables[observable] = \
- filter_bpm(self.measured_local_observables[observable])
+ self.measured_local_observables[observable] = filter_bpm(
+ self.measured_local_observables[observable]
+ )
# Update the bpms still in use
for bpm in self.bpms_in_use:
if bpm not in remaining_bpms:
self.bpms_in_use.remove(bpm)
- logger.info(f"Removed {original_len - end_len} IR BPMs from data (BPM.(N). < {inside_arc_number})")
+ logger.info(
+ f"Removed {original_len - end_len} IR BPMs from data (BPM.(N). < {inside_arc_number})"
+ )
def _remove_nan_local_observables(self):
"""
@@ -412,7 +465,9 @@ def _remove_nan_local_observables(self):
# Iterate over the simulated and measured local observables to get the BPMs indices to remove
for observable in self.simulated_local_observables.keys():
for corrector in self.correctors:
- indices += list(np.where(np.isnan(self.simulated_local_observables[observable][corrector]))[0])
+ indices += list(
+ np.where(np.isnan(self.simulated_local_observables[observable][corrector]))[0]
+ )
for observable in self.measured_local_observables.keys():
indices += list(np.where(np.isnan(self.measured_local_observables[observable]))[0])
@@ -423,17 +478,19 @@ def _remove_nan_local_observables(self):
remaining_bpms = []
for observable in self.simulated_local_observables.keys():
for corrector in self.correctors:
- self.simulated_local_observables[observable][corrector] = \
+ self.simulated_local_observables[observable][corrector] = (
self.simulated_local_observables[observable][corrector].drop(
- self.simulated_local_observables[observable][corrector].index[indices])
+ self.simulated_local_observables[observable][corrector].index[indices]
+ )
+ )
# Update the remaining BPMS, it will be the same for each observables/corr
remaining_bpms = list(self.simulated_local_observables[observable][corrector].index)
for observable in self.measured_local_observables.keys():
- self.measured_local_observables[observable] = \
- self.measured_local_observables[observable].drop(
- self.measured_local_observables[observable].index[indices])
-
+ self.measured_local_observables[observable] = self.measured_local_observables[
+ observable
+ ].drop(self.measured_local_observables[observable].index[indices])
+
# Update the bpms still in use
for bpm in self.bpms_in_use:
if bpm not in remaining_bpms:
@@ -456,7 +513,9 @@ def _get_response_matrix(self):
# Global (e.g. chromaticity), to be multiplied by a factor
for observable in self.simulated_global_observables.keys():
- values += [self.simulated_global_observables[observable][corrector]] * self.weights[observable]
+ values += [self.simulated_global_observables[observable][corrector]] * self.weights[
+ observable
+ ]
# Divide the observables by the strength of the corrector, and add it to the temporary response matrix
values = np.array(values)
@@ -467,8 +526,10 @@ def _get_response_matrix(self):
for corrector in self.correctors.keys():
r_matrix.append(tmp_r_matrix[corrector])
r_matrix = np.vstack(r_matrix).T
- logger.info(f"Creating a response matrix with {r_matrix.shape[0]} observables "
- f"and {r_matrix.shape[1]} correctors")
+ logger.info(
+ f"Creating a response matrix with {r_matrix.shape[0]} observables "
+ f"and {r_matrix.shape[1]} correctors"
+ )
# Replace the NaN values in the Matrix in case there was no cleaning
r_matrix[np.isnan(r_matrix)] = 0
@@ -507,23 +568,35 @@ def _copy_original_observables(self):
# Simulated
for observable in self.original_simulated_local_observables.keys():
for corrector in self.correctors:
- self.simulated_local_observables[observable][corrector] = \
+ self.simulated_local_observables[observable][corrector] = (
self.original_simulated_local_observables[observable][corrector].copy()
+ )
for observable in self.original_simulated_global_observables.keys():
for corrector in self.correctors:
- self.simulated_global_observables[observable][corrector] = \
+ self.simulated_global_observables[observable][corrector] = (
self.original_simulated_global_observables[observable][corrector].copy()
+ )
# Measured
for observable in self.original_measured_local_observables.keys():
- self.measured_local_observables[observable] = \
- self.original_measured_local_observables[observable].copy()
+ self.measured_local_observables[observable] = self.original_measured_local_observables[
+ observable
+ ].copy()
for observable in self.original_measured_global_observables.keys():
- self.measured_global_observables[observable] = \
+ self.measured_global_observables[observable] = (
self.original_measured_global_observables[observable].copy()
-
- def get_corrections(self, clean_nan=True, clean_outliers=True, clean_IR=True,
- rcond=0.01, inside_arc_number=10, quartiles=None, decimals_round=0):
+ )
+
+ def get_corrections(
+ self,
+ clean_nan=True,
+ clean_outliers=True,
+ clean_IR=True,
+ rcond=0.01,
+ inside_arc_number=10,
+ quartiles=None,
+ decimals_round=0,
+ ):
"""
Computes corrections for the previously given observables.
"""
@@ -534,7 +607,9 @@ def get_corrections(self, clean_nan=True, clean_outliers=True, clean_IR=True,
self._copy_original_observables()
# Start by removing the NaN values and the IR BPMs in the observables
- self._clean_local_observables(inside_arc_number, clean_nan, clean_outliers, clean_IR, quartiles)
+ self._clean_local_observables(
+ inside_arc_number, clean_nan, clean_outliers, clean_IR, quartiles
+ )
# Get the response matrix from the simulated local and global observables
r_matrix = self._get_response_matrix()
@@ -551,7 +626,7 @@ def get_corrections(self, clean_nan=True, clean_outliers=True, clean_IR=True,
corrections = {}
for key, val in zip(self.correctors, values):
# Negative because it's a correction
- corrections[f'{key}'] = -round(val, decimals_round)
+ corrections[f"{key}"] = -round(val, decimals_round)
return corrections
@@ -559,29 +634,30 @@ def plot_rdt_with_cleaning(self, component, rdt, model, output=None, ylim=None):
"""
Plots the given RDT component (real, imag, amp) with the cleaned BPMs highlighted
"""
+
def format_unit(unit: int):
num, denum = unit.as_integer_ratio()
if num == 0:
- unit = ''
- elif denum == '':
- unit = f'{num}'
+ unit = ""
+ elif denum == "":
+ unit = f"{num}"
elif denum == -1:
- unit = 'f{-num}'
+ unit = "f{-num}"
else:
- unit = f'{num}/{denum}'
+ unit = f"{num}/{denum}"
return unit
def get_s_from_bpm(series):
list_s = []
list_val = []
for index, value in series.items():
- s = model[model['NAME'] == index]['S'].values[0]
+ s = model[model["NAME"] == index]["S"].values[0]
list_s.append(s)
list_val.append(value)
return zip(*sorted(zip(list_s, list_val)))
def amp(re, im):
- return (re ** 2 + im ** 2) ** 0.5
+ return (re**2 + im**2) ** 0.5
def scatter_plot(series, label):
x, y = get_s_from_bpm(series)
@@ -590,42 +666,55 @@ def scatter_plot(series, label):
model = tfs.read(model)
# Get the BPMs that have been cleaned out
- index = self.original_measured_local_observables[f'B{self.beam}_{rdt}_re'].index.\
- symmetric_difference(self.measured_local_observables[f'B{self.beam}_{rdt}_re'].index)
+ index = self.original_measured_local_observables[
+ f"B{self.beam}_{rdt}_re"
+ ].index.symmetric_difference(
+ self.measured_local_observables[f"B{self.beam}_{rdt}_re"].index
+ )
fig, ax = plt.subplots(figsize=(15, 6))
if "real" == component:
print(self.model_path)
- scatter_plot(self.measured_local_observables[f'B{self.beam}_{rdt}_re'], label="Real Clean")
- scatter_plot(self.original_measured_local_observables[f'B{self.beam}_{rdt}_re'].loc[index], label="Removed BPM")
+ scatter_plot(
+ self.measured_local_observables[f"B{self.beam}_{rdt}_re"], label="Real Clean"
+ )
+ scatter_plot(
+ self.original_measured_local_observables[f"B{self.beam}_{rdt}_re"].loc[index],
+ label="Removed BPM",
+ )
if "imag" == component:
- scatter_plot(self.measured_local_observables[f'B{self.beam}_{rdt}_imag'],
- label="Imag Clean")
- scatter_plot(self.original_measured_local_observables[f'B{self.beam}_{rdt}_imag'].\
- loc[index],
- label="Removed BPM")
+ scatter_plot(
+ self.measured_local_observables[f"B{self.beam}_{rdt}_imag"], label="Imag Clean"
+ )
+ scatter_plot(
+ self.original_measured_local_observables[f"B{self.beam}_{rdt}_imag"].loc[index],
+ label="Removed BPM",
+ )
if "amp" == component:
- a = amp(self.measured_local_observables[f'B{self.beam}_{rdt}_re'],
- self.measured_local_observables[f'B{self.beam}_{rdt}_imag'])
+ a = amp(
+ self.measured_local_observables[f"B{self.beam}_{rdt}_re"],
+ self.measured_local_observables[f"B{self.beam}_{rdt}_imag"],
+ )
scatter_plot(a, label="Amplitude Clean")
- a = amp(self.original_measured_local_observables[f'B{self.beam}_{rdt}_re'].loc[index],
- self.original_measured_local_observables[f'B{self.beam}_{rdt}_imag'].\
- loc[index])
+ a = amp(
+ self.original_measured_local_observables[f"B{self.beam}_{rdt}_re"].loc[index],
+ self.original_measured_local_observables[f"B{self.beam}_{rdt}_imag"].loc[index],
+ )
scatter_plot(a, label="Removed BPM")
# Get the unit of the y axis
- j, k, l, m = [int(e) for e in rdt[1:-2]]
+ j, k, l, m = [int(e) for e in rdt[1:-2]] # noqa: E741
unit_magnet = -(j + k + l + m) # unit is m^(-order)
unit = unit_magnet + 1
- unit += (j+k)/2 + (l+m)/2
+ unit += (j + k) / 2 + (l + m) / 2
unit = format_unit(unit)
ax.legend()
- ax.set_xlabel('s [m]')
- ax.set_ylabel(f'{component.title()} [$m^{{{unit}}}$]')
+ ax.set_xlabel("s [m]")
+ ax.set_ylabel(f"{component.title()} [$m^{{{unit}}}$]")
if ylim is not None:
ax.set_ylim(*ylim)
if output:
plt.savefig(output)
- logger.info(f'{component.title()} component of {rdt} saved to {output}')
+ logger.info(f"{component.title()} component of {rdt} saved to {output}")
diff --git a/chroma_gui/main.py b/chroma_gui/main.py
index 37693e3..24fac7d 100644
--- a/chroma_gui/main.py
+++ b/chroma_gui/main.py
@@ -1,82 +1,90 @@
-import time
-from datetime import datetime
-import typing
+"""
+Main
+----
+
+Main module for the chroma-gui.
+"""
+from __future__ import annotations
+
import ast
-import sys
import json
-from json import JSONDecodeError
-import tfs
-from typing import Tuple, List, Dict
-import pyperclip
-import matplotlib.pyplot as plt
-
import logging
-from pathlib import Path
-from dataclasses import dataclass, field, fields, asdict
+import sys
import traceback
+import typing
+from dataclasses import dataclass, field, fields
+from datetime import datetime
+from json import JSONDecodeError
+from pathlib import Path
+
+import matplotlib.pyplot as plt
+import pyperclip
# PyQt libraries
import qtawesome as qta
-from PyQt5.QtGui import QPalette, QStandardItem, QFontMetrics
+import tfs
+from PyQt5 import QtTest, uic
from PyQt5.QtCore import (
- QDateTime,
- pyqtSignal,
- QThread,
QAbstractTableModel,
- QModelIndex,
- Qt,
+ QDateTime,
QEvent,
+ QModelIndex,
QSize,
+ Qt,
+ QThread,
+ pyqtSignal,
pyqtSlot,
)
-from PyQt5 import uic, QtTest
+from PyQt5.QtGui import QFontMetrics, QPalette, QStandardItem
from PyQt5.QtWidgets import (
- QLabel,
- QMainWindow,
QApplication,
+ QComboBox,
QDialog,
QFileDialog,
+ QHBoxLayout,
+ QHeaderView,
+ QLabel,
+ QLayout,
+ QListWidgetItem,
+ QMainWindow,
QMessageBox,
- QTableView,
QSizePolicy,
- QHeaderView,
- QComboBox,
QStyledItemDelegate,
+ QTableView,
qApp,
- QListWidgetItem,
- QHBoxLayout,
- QLayout,
)
+import chroma_gui.cleaning.constants as cleaning_constants
+import chroma_gui.timber as timber
+
# Chroma-GUI specific libraries
from chroma_gui import __version__ as chroma_gui_version
-import chroma_gui.timber as timber
-from chroma_gui.timber import (
- get_variables_names_from_csv,
- read_variables_from_csv,
+from chroma_gui.chromaticity import (
+ construct_chroma_tfs,
+ get_chromaticity,
+ get_chromaticity_df_with_notation,
+ get_chromaticity_formula,
+ get_maximum_chromaticity,
)
-from chroma_gui.plotting.widget import MplWidget, mathTex_to_QPixmap
+from chroma_gui.cleaning import clean, plateau
+from chroma_gui.constants import CHROMA_COEFFS, CHROMA_FILE, CONFIG, RESPONSE_MATRICES
+from chroma_gui.corrections import response_matrix
from chroma_gui.plotting import (
+ plot_chromaticity,
plot_dpp,
plot_freq,
plot_timber,
- plot_chromaticity,
save_chromaticity_plot,
)
-from chroma_gui.cleaning import plateau, clean
-from chroma_gui.chromaticity import (
- get_chromaticity,
- construct_chroma_tfs,
- get_maximum_chromaticity,
- get_chromaticity_df_with_notation,
- get_chromaticity_formula
+from chroma_gui.plotting.widget import MplWidget, mathTex_to_QPixmap
+from chroma_gui.timber import (
+ get_variables_names_from_csv,
)
-import chroma_gui.cleaning.constants as cleaning_constants
-from chroma_gui.constants import CHROMA_FILE, RESPONSE_MATRICES, CONFIG, CHROMA_COEFFS
-from chroma_gui.corrections import response_matrix
-logger = logging.getLogger('chroma_GUI')
-logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
+logger = logging.getLogger("chroma_GUI")
+logging.basicConfig(
+ level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s"
+)
RESOURCES = Path(__file__).parent / "resources"
@@ -92,7 +100,7 @@ def __init__(self, dataframe: tfs.TfsDataFrame, parent=None):
self._dataframe = dataframe
def rowCount(self, parent=QModelIndex()) -> int:
- """ Override method from QAbstractTableModel
+ """Override method from QAbstractTableModel
Return row count of the pandas DataFrame
"""
@@ -149,9 +157,10 @@ class Measurement:
"""
holds measurement specific data such as paths and measurement times
"""
+
# Paths for the measurement itself and the two beam models
path: Path
- model_path: Dict[str, Path] = field(default_factory=lambda: {"B1": None, "B2": None})
+ model_path: dict[str, Path] = field(default_factory=lambda: {"B1": None, "B2": None})
# Metadata about the measurement
description: str = None
@@ -166,11 +175,11 @@ class Measurement:
cleaning_end_time: datetime = None
@classmethod
- def from_folder(cls: typing.Type["Measurement"], path: Path):
+ def from_folder(cls: Measurement, path: Path):
"""
Returns a Measurement object created via the "measurement.info" contained in the given folder
"""
- measurement_info_path = Path(path) / 'measurement.info'
+ measurement_info_path = Path(path) / "measurement.info"
measurement_info = json.load(open(measurement_info_path))
measurement = cls(**measurement_info)
@@ -184,7 +193,7 @@ def from_folder(cls: typing.Type["Measurement"], path: Path):
setattr(measurement, field.name, datetime.fromisoformat(value))
elif field.type is Path: # Paths
setattr(measurement, field.name, Path(value))
- elif field.type is Dict[str, Path]: # Dictionaries with Paths
+ elif field.type is dict[str, Path]: # Dictionaries with Paths
for key in value.keys():
getattr(measurement, field.name)[key] = Path(value[key])
@@ -193,11 +202,10 @@ def from_folder(cls: typing.Type["Measurement"], path: Path):
def get_alpha(self):
alpha = {"B1": None, "B2": None}
for beam in alpha.keys():
- twiss = tfs.read(Path(self.model_path[beam]) / 'twiss.dat')
- alpha[beam] = twiss.headers['ALFA']
+ twiss = tfs.read(Path(self.model_path[beam]) / "twiss.dat")
+ alpha[beam] = twiss.headers["ALFA"]
return alpha
-
def get_timber_status(self):
"""
Check if a timber extraction data exists and return a small message with details.
@@ -239,14 +247,14 @@ def get_chroma_status(self):
path_chroma = Path(self.path / CHROMA_FILE)
if path_chroma.exists():
chroma_tfs = tfs.read(path_chroma)
- return True, (chroma_tfs.headers['MIN_FIT_ORDER'], chroma_tfs.headers['MAX_FIT_ORDER'])
+ return True, (chroma_tfs.headers["MIN_FIT_ORDER"], chroma_tfs.headers["MAX_FIT_ORDER"])
return False, None
def save_as_json(self):
"""
Saves the measurement fields as json
"""
- measurement_info_path = Path(self.path) / 'measurement.info'
+ measurement_info_path = Path(self.path) / "measurement.info"
# Converts the types to something json can serialize
data = {}
@@ -259,14 +267,14 @@ def save_as_json(self):
data[field.name] = value.isoformat()
elif field.type is Path: # Paths
data[field.name] = str(value)
- elif field.type is Dict[str, Path]: # Dictionaries with Paths
+ elif field.type is dict[str, Path]: # Dictionaries with Paths
data[field.name] = {}
for key in value.keys():
data[field.name][key] = str(value[key])
else:
data[field.name] = value
- json.dump(data, open(measurement_info_path, 'w'), indent=4)
+ json.dump(data, open(measurement_info_path, "w"), indent=4)
class ExternalProgram(QThread):
@@ -301,7 +309,7 @@ def extractTimber(self):
# If the user tells us to extract the RAW data, save it
if main_window.rawBBQCheckBox.isChecked():
data = timber.extract.extract_raw_variables(start, end)
- #timber.extract.save_as_pickle(measurement_path, data)
+ # timber.extract.save_as_pickle(measurement_path, data)
timber.extract.save_as_hdf(measurement_path, data)
logger.info("PyTimber extraction finished")
@@ -309,26 +317,63 @@ def extractTimber(self):
def createPlateaus(self):
path, filename, rf_beam, start, end, nominal_rf, alpha = self.args
- plateau.create_plateau(path, filename, rf_beam, start_time=start,
- end_time=end, nominal_rf=nominal_rf, alpha=alpha)
+ plateau.create_plateau(
+ path,
+ filename,
+ rf_beam,
+ start_time=start,
+ end_time=end,
+ nominal_rf=nominal_rf,
+ alpha=alpha,
+ )
self.finished.emit()
def cleanTune(self):
- (input_file_B1, input_file_B2, output_path, output_filename_B1, output_filename_B2, qx_window, qy_window,
- quartiles, plateau_length, bad_tunes) = self.args
+ (
+ input_file_B1,
+ input_file_B2,
+ output_path,
+ output_filename_B1,
+ output_filename_B2,
+ qx_window,
+ qy_window,
+ quartiles,
+ plateau_length,
+ bad_tunes,
+ ) = self.args
# Reset the progress bar
self.base_progress = 0
self.progress_callback(0)
# Beam 1
- clean.clean_data_for_beam(input_file_B1, output_path, output_filename_B1, qx_window, qy_window, quartiles,
- plateau_length, bad_tunes, method="bbq", signal=self.progress)
+ clean.clean_data_for_beam(
+ input_file_B1,
+ output_path,
+ output_filename_B1,
+ qx_window,
+ qy_window,
+ quartiles,
+ plateau_length,
+ bad_tunes,
+ method="bbq",
+ signal=self.progress,
+ )
# Beam 2
self.base_progress = 100 # max value is 200
- clean.clean_data_for_beam(input_file_B2, output_path, output_filename_B2, qx_window, qy_window, quartiles,
- plateau_length, bad_tunes, method="bbq", signal=self.progress)
+ clean.clean_data_for_beam(
+ input_file_B2,
+ output_path,
+ output_filename_B2,
+ qx_window,
+ qy_window,
+ quartiles,
+ plateau_length,
+ bad_tunes,
+ method="bbq",
+ signal=self.progress,
+ )
self.progress_callback(100)
self.finished.emit()
@@ -339,8 +384,20 @@ def progress_callback(self, progress):
main_window.cleaningProgressBar.setValue(self.base_progress + int(progress))
def cleanTuneRawBBQ(self):
- (input_file, input_file_raw, output_path, output_filename_B1, output_filename_B2, qx_window, qy_window,
- plateau_length, seconds_step, kernel_size, method, bad_tunes) = self.args
+ (
+ input_file,
+ input_file_raw,
+ output_path,
+ output_filename_B1,
+ output_filename_B2,
+ qx_window,
+ qy_window,
+ plateau_length,
+ seconds_step,
+ kernel_size,
+ method,
+ bad_tunes,
+ ) = self.args
quartiles = None
@@ -349,15 +406,41 @@ def cleanTuneRawBBQ(self):
self.progress_callback(0)
# Beam 1
- clean.clean_data_for_beam(input_file, output_path, output_filename_B1, qx_window, qy_window, quartiles,
- plateau_length, bad_tunes, method=method, raw_bbq_file=input_file_raw,
- seconds_step=seconds_step, kernel_size=kernel_size, beam=1, signal=self.progress)
+ clean.clean_data_for_beam(
+ input_file,
+ output_path,
+ output_filename_B1,
+ qx_window,
+ qy_window,
+ quartiles,
+ plateau_length,
+ bad_tunes,
+ method=method,
+ raw_bbq_file=input_file_raw,
+ seconds_step=seconds_step,
+ kernel_size=kernel_size,
+ beam=1,
+ signal=self.progress,
+ )
# Beam 2
self.base_progress = 100
- clean.clean_data_for_beam(input_file, output_path, output_filename_B2, qx_window, qy_window, quartiles,
- plateau_length, bad_tunes, method=method, raw_bbq_file=input_file_raw,
- seconds_step=seconds_step, kernel_size=kernel_size, beam=2, signal=self.progress)
+ clean.clean_data_for_beam(
+ input_file,
+ output_path,
+ output_filename_B2,
+ qx_window,
+ qy_window,
+ quartiles,
+ plateau_length,
+ bad_tunes,
+ method=method,
+ raw_bbq_file=input_file_raw,
+ seconds_step=seconds_step,
+ kernel_size=kernel_size,
+ beam=2,
+ signal=self.progress,
+ )
self.progress_callback(100)
self.finished.emit()
@@ -370,26 +453,36 @@ def computeChroma(self):
# Beam 1
if input_file_B1 is not None:
- chroma_tfs = get_chromaticity(input_file_B1, chroma_tfs, dpp_range, fit_orders, 'X')
- chroma_tfs = get_chromaticity(input_file_B1, chroma_tfs, dpp_range, fit_orders, 'Y')
+ chroma_tfs = get_chromaticity(input_file_B1, chroma_tfs, dpp_range, fit_orders, "X")
+ chroma_tfs = get_chromaticity(input_file_B1, chroma_tfs, dpp_range, fit_orders, "Y")
# Beam 2
if input_file_B2 is not None:
- chroma_tfs = get_chromaticity(input_file_B2, chroma_tfs, dpp_range, fit_orders, 'X')
- chroma_tfs = get_chromaticity(input_file_B2, chroma_tfs, dpp_range, fit_orders, 'Y')
+ chroma_tfs = get_chromaticity(input_file_B2, chroma_tfs, dpp_range, fit_orders, "X")
+ chroma_tfs = get_chromaticity(input_file_B2, chroma_tfs, dpp_range, fit_orders, "Y")
tfs.write(output_path / CHROMA_FILE, chroma_tfs)
self.finished.emit()
def computeCorrections(self):
- optics_paths, measurement_path, method, observables, chroma_factor, rcond, keep_dq3_constant,\
- keep_rdt_constant, clean_nan, clean_outliers, clean_IR_number, optics_name = self.args
-
- text = {1: '',
- 2: ''}
- corr_sum = {1: 0,
- 2: 0}
+ (
+ optics_paths,
+ measurement_path,
+ method,
+ observables,
+ chroma_factor,
+ rcond,
+ keep_dq3_constant,
+ keep_rdt_constant,
+ clean_nan,
+ clean_outliers,
+ clean_IR_number,
+ optics_name,
+ ) = self.args
+
+ text = {1: "", 2: ""}
+ corr_sum = {1: 0, 2: 0}
if method == "Global":
chromaticity_values = tfs.read(measurement_path / CHROMA_FILE)
coefficients = json.load(open(CHROMA_COEFFS))
@@ -399,17 +492,22 @@ def computeCorrections(self):
order = obs.split("DQ")[1]
# Get the measured values
- mask = chromaticity_values['BEAM'] == f'B{beam}'
- mask = mask & (chromaticity_values['UP_TO_ORDER'] == chromaticity_values['UP_TO_ORDER'].max())
- mask_x = mask & (chromaticity_values['AXIS'] == 'X')
- mask_y = mask & (chromaticity_values['AXIS'] == 'Y')
- dqx = chromaticity_values[mask_x][f'Q{order}'].values[0]
- dqy = chromaticity_values[mask_y][f'Q{order}'].values[0]
+ mask = chromaticity_values["BEAM"] == f"B{beam}"
+ mask = mask & (
+ chromaticity_values["UP_TO_ORDER"]
+ == chromaticity_values["UP_TO_ORDER"].max()
+ )
+ mask_x = mask & (chromaticity_values["AXIS"] == "X")
+ mask_y = mask & (chromaticity_values["AXIS"] == "Y")
+ dqx = chromaticity_values[mask_x][f"Q{order}"].values[0]
+ dqy = chromaticity_values[mask_y][f"Q{order}"].values[0]
# The chromaticity is simply an affine function that depends on the corrector strength
# Get the point where dqx and dqy cross to minimize both planes
- dq_corr = (dqy - dqx) / (coefficients[str(beam)][order][0] - coefficients[str(beam)][order][1])
- text[beam] = text[beam] + f'DQ{order}Corrector.B{beam} = {dq_corr:6.4f} ;\n'
+ dq_corr = (dqy - dqx) / (
+ coefficients[str(beam)][order][0] - coefficients[str(beam)][order][1]
+ )
+ text[beam] = text[beam] + f"DQ{order}Corrector.B{beam} = {dq_corr:6.4f} ;\n"
# Update the sum
corr_sum[beam] += dq_corr
@@ -420,20 +518,42 @@ def computeCorrections(self):
logger.info(f"Computing corrections for Beam {beam}")
# Get the strengths of the magnets used for simulation
- strengths_mcd = json.load(open(RESOURCES / "corrections" / optics_name / "normal_decapole" / "strengths.json"))
+ strengths_mcd = json.load(
+ open(
+ RESOURCES
+ / "corrections"
+ / optics_name
+ / "normal_decapole"
+ / "strengths.json"
+ )
+ )
# Create the basic response matrix object
simulations = Path(RESOURCES / "corrections" / optics_name / "normal_decapole")
- resp = response_matrix.ResponseMatrix(strengths_mcd[str(beam)], simulations, beam=beam)
+ resp = response_matrix.ResponseMatrix(
+ strengths_mcd[str(beam)], simulations, beam=beam
+ )
# Add the observables
# Add the RDT to the response matrix
if keep_rdt_constant:
- model_path = RESOURCES / "corrections" / optics_name / "normal_decapole" / f"twiss_b{beam}.tfs"
- resp.add_zero_rdt_observable(model_path, 'f1004_x')
+ model_path = (
+ RESOURCES
+ / "corrections"
+ / optics_name
+ / "normal_decapole"
+ / f"twiss_b{beam}.tfs"
+ )
+ resp.add_zero_rdt_observable(model_path, "f1004_x")
elif "f1004" in observables:
optics_path = optics_paths[beam]
- model_path = RESOURCES / "corrections" / optics_name / "normal_decapole" / f"twiss_b{beam}.tfs"
+ model_path = (
+ RESOURCES
+ / "corrections"
+ / optics_name
+ / "normal_decapole"
+ / f"twiss_b{beam}.tfs"
+ )
resp.add_rdt_observable(Path(optics_path), model_path, "f1004_x")
# Add the Chromaticity to the response matrix
@@ -444,20 +564,21 @@ def computeCorrections(self):
resp.add_chromaticity_observable(chroma_path, order=3, weight=chroma_factor)
# Get the corrections
- corrections = resp.get_corrections(rcond=rcond,
- clean_nan=clean_nan,
- clean_outliers=clean_outliers,
- clean_IR=(clean_IR_number != 0),
- inside_arc_number=clean_IR_number
- )
+ corrections = resp.get_corrections(
+ rcond=rcond,
+ clean_nan=clean_nan,
+ clean_outliers=clean_outliers,
+ clean_IR=(clean_IR_number != 0),
+ inside_arc_number=clean_IR_number,
+ )
# Set the text edits with the computed corrections
for key, val in corrections.items():
if val > 0:
text[beam] += f"{key} := {key} + {val} ;\n"
else:
- text[beam] += f"{key} := {key} - {val*-1} ;\n"
-
+ text[beam] += f"{key} := {key} - {val * -1} ;\n"
+
corr_sum[beam] += val
else:
logger.error(f"Invalid method for corrections: {method}")
@@ -467,11 +588,11 @@ def computeCorrections(self):
# Set the text for the corrections
main_window = findMainWindow()
for beam in [1, 2]:
- main_window.corrections[f'B{beam}'] = text[beam]
+ main_window.corrections[f"B{beam}"] = text[beam]
# Display the sum of corrections
- main_window.sumB1Label.setText(str(round(corr_sum[1],2)))
- main_window.sumB2Label.setText(str(round(corr_sum[2],2)))
+ main_window.sumB1Label.setText(str(round(corr_sum[1], 2)))
+ main_window.sumB2Label.setText(str(round(corr_sum[2], 2)))
self.finished.emit()
@@ -596,20 +717,21 @@ class Config:
"""
Class for storing user preferences
"""
+
# New Measurement Window
- model_path: Path = Path('/user/slops/data/LHC_DATA/OP_DATA/Betabeat/')
- measurements_path: Path = Path('/user/slops/data/LHC_DATA/OP_DATA/Betabeat/')
+ model_path: Path = Path("/user/slops/data/LHC_DATA/OP_DATA/Betabeat/")
+ measurements_path: Path = Path("/user/slops/data/LHC_DATA/OP_DATA/Betabeat/")
# Timber
extract_raw_timber: bool = False
# Cleaning
rf_beam: float = 1
- qx_window: Tuple[float, float] = (0.24, 0.31)
- qy_window: Tuple[float, float] = (0.29, 0.34)
- quartiles: Tuple[float, float] = (0.20, 0.80)
+ qx_window: tuple[float, float] = (0.24, 0.31)
+ qy_window: tuple[float, float] = (0.29, 0.34)
+ quartiles: tuple[float, float] = (0.20, 0.80)
plateau_length: int = 15
- bad_tune_lines: List[Tuple[float, float]] = field(default_factory=lambda: [(0.2665, 0.2670)])
+ bad_tune_lines: list[tuple[float, float]] = field(default_factory=lambda: [(0.2665, 0.2670)])
plot_dpp: bool = False
plot_delta_rf: bool = False
@@ -618,10 +740,8 @@ class Config:
rcParams: str = None
@classmethod
- def from_dict(cls: typing.Type["Config"], obj: dict):
- return cls(
- **obj
- )
+ def from_dict(cls: Config, obj: dict):
+ return cls(**obj)
def save_field(self, field, data):
"""
@@ -630,7 +750,7 @@ def save_field(self, field, data):
logger.info(f"Saving config field {field}")
# Read the config
- config_fp = open(CONFIG, 'r+')
+ config_fp = open(CONFIG, "r+")
file = json.load(config_fp)
file[field] = data
config_fp.close()
@@ -718,15 +838,14 @@ def __init__(self, parent=None):
self.setInfoIcons()
# R2 scores for each chromaticity fit
- self.r2scores = {"B1": {"X": 0, "Y": 0},
- "B2": {"X": 0, "Y": 0}}
+ self.r2scores = {"B1": {"X": 0, "Y": 0}, "B2": {"X": 0, "Y": 0}}
def setInfoIcons(self):
"""
Iterate through all the labels in the class that have a tooltip, and place a proper info icon next to it
"""
for name, obj in vars(self).items():
- if type(obj) == QLabel:
+ if type(obj) is QLabel:
text = obj.text()
tooltip = obj.toolTip()
if tooltip.strip() != "":
@@ -755,11 +874,12 @@ def setInfoIcons(self):
def applyMplStyle(self):
if self.config.rcParams is None:
plt.style.use(RESOURCES / "chroma_gui.mplstyle")
- else:
- for line in self.config.rcParams.split('\n'):
- if not line.startswith("#") and line.strip() != "":
- key, value = [e.strip() for e in line.split(':')]
- plt.style.use({key: value})
+ return
+
+ for line in self.config.rcParams.split("\n"):
+ if not line.startswith("#") and line.strip() != "":
+ key, value = [e.strip() for e in line.split(":")]
+ plt.style.use({key: value})
def loadConfig(self):
if CONFIG.exists():
@@ -781,7 +901,9 @@ def loadConfig(self):
self.qyWindowHigh.setValue(self.config.qy_window[1])
self.q1Quartile.setValue(self.config.quartiles[0])
self.q3Quartile.setValue(self.config.quartiles[1])
- self.rfBeamComboBox.setCurrentIndex(self.config.rf_beam - 1) # Index starts at 0: Beam - 1 = index
+ self.rfBeamComboBox.setCurrentIndex(
+ self.config.rf_beam - 1
+ ) # Index starts at 0: Beam - 1 = index
self.plateauLength.setValue(self.config.plateau_length)
self.badTunesLineEdit.setText(str(self.config.bad_tune_lines))
@@ -792,14 +914,18 @@ def loadConfig(self):
def updateLineEdits(self):
# Update the labels in the Main Window
alpha = self.measurement.get_alpha()
- self.alfaB1LineEdit.setText(str(alpha['B1']))
- self.alfaB2LineEdit.setText(str(alpha['B2']))
+ self.alfaB1LineEdit.setText(str(alpha["B1"]))
+ self.alfaB2LineEdit.setText(str(alpha["B2"]))
self.nominalRfLineEdit.setText(str(self.measurement.nominal_rf))
self.descriptionPlainTextEdit.setPlainText(self.measurement.description)
# Set the extraction dates via Qt objects
- start = QDateTime.fromString(self.measurement.start_time.strftime("%Y-%m-%dT%H:%M:%S"), 'yyyy-MM-ddThh:mm:ss')
- end = QDateTime.fromString(self.measurement.end_time.strftime("%Y-%m-%dT%H:%M:%S"), 'yyyy-MM-ddThh:mm:ss')
+ start = QDateTime.fromString(
+ self.measurement.start_time.strftime("%Y-%m-%dT%H:%M:%S"), "yyyy-MM-ddThh:mm:ss"
+ )
+ end = QDateTime.fromString(
+ self.measurement.end_time.strftime("%Y-%m-%dT%H:%M:%S"), "yyyy-MM-ddThh:mm:ss"
+ )
self.startTimeTimberEdit.setDateTime(start)
self.endTimeTimberEdit.setDateTime(end)
@@ -816,12 +942,22 @@ def updateLineEdits(self):
self.updateTimberTable(self.measurement)
self.enableCleaningTab(True)
# Get the times from the measurement.info, or set them to the Timber extraction times
- cleaning_start = QDateTime.fromString(self.measurement.cleaning_start_time.strftime("%Y-%m-%dT%H:%M:%S"),
- 'yyyy-MM-ddThh:mm:ss') if self.measurement.cleaning_start_time \
- else start
- cleaning_end = QDateTime.fromString(self.measurement.cleaning_end_time.strftime("%Y-%m-%dT%H:%M:%S"),
- 'yyyy-MM-ddThh:mm:ss') if self.measurement.cleaning_end_time \
- else end
+ cleaning_start = (
+ QDateTime.fromString(
+ self.measurement.cleaning_start_time.strftime("%Y-%m-%dT%H:%M:%S"),
+ "yyyy-MM-ddThh:mm:ss",
+ )
+ if self.measurement.cleaning_start_time
+ else start
+ )
+ cleaning_end = (
+ QDateTime.fromString(
+ self.measurement.cleaning_end_time.strftime("%Y-%m-%dT%H:%M:%S"),
+ "yyyy-MM-ddThh:mm:ss",
+ )
+ if self.measurement.cleaning_end_time
+ else end
+ )
self.startPlateauDateTimeEdit.setDateTime(cleaning_start)
self.endPlateauDateTimeEdit.setDateTime(cleaning_end)
@@ -871,7 +1007,7 @@ def setCorrectionComboBox(self):
self.layoutObservables.update()
# Display the available correction methods
- self.available_observables = json.load(open(RESPONSE_MATRICES))['AVAILABLE_OBSERVABLES']
+ self.available_observables = json.load(open(RESPONSE_MATRICES))["AVAILABLE_OBSERVABLES"]
self.correctionMethodComboBox.addItems(self.available_observables.keys())
# Set the possible optics the response matrix was computed for
@@ -923,28 +1059,30 @@ def openMeasurementClicked(self):
str(self.config.measurements_path),
)
if not folder:
- QMessageBox.warning(self,
- "Failed to open directory",
- f"The directory '{folder}' could not be opened")
+ QMessageBox.warning(
+ self, "Failed to open directory", f"The directory '{folder}' could not be opened"
+ )
return False
# Try to open the measurement information file
try:
self.measurement = Measurement.from_folder(Path(folder))
except OSError as e:
- QMessageBox.warning(self,
- "Failed to open measurement",
- f"{str(e)}")
+ QMessageBox.warning(self, "Failed to open measurement", f"{str(e)}")
logger.error(e)
except JSONDecodeError as e:
- QMessageBox.warning(self,
- "Failed to open measurement",
- f"The file 'measurement.info' is not a valid JSON file")
+ QMessageBox.warning(
+ self,
+ "Failed to open measurement",
+ "The file 'measurement.info' is not a valid JSON file",
+ )
logger.error(e)
except KeyError as e:
- QMessageBox.warning(self,
- "Failed to open measurement",
- f"The file 'measurement.info' does not contain the required keys")
+ QMessageBox.warning(
+ self,
+ "Failed to open measurement",
+ "The file 'measurement.info' does not contain the required keys",
+ )
logger.error(e)
if self.measurement is not None:
@@ -956,12 +1094,14 @@ def saveSettingsClicked(self):
"""
# Check if a measurement has been opened or created already, otherwise why would we save something?
if not self.measurement:
- logger.warning("No measurement has been opened or created, the settings can't be saved.")
+ logger.warning(
+ "No measurement has been opened or created, the settings can't be saved."
+ )
return
# Nominal RF
nominal_rf = self.nominalRfLineEdit.text()
- if nominal_rf.strip() == "0" or nominal_rf.strip() == 'None':
+ if nominal_rf.strip() == "0" or nominal_rf.strip() == "None":
nominal_rf = None
if nominal_rf is not None:
nominal_rf = float(nominal_rf)
@@ -972,7 +1112,7 @@ def saveSettingsClicked(self):
# Save the measurement
self.measurement.save_as_json()
- logger.info('Settings saved!')
+ logger.info("Settings saved!")
def startThread(self, main_function, finish_function, *args):
"""
@@ -980,8 +1120,9 @@ def startThread(self, main_function, finish_function, *args):
Arguments:
- main_function: method of the class `ExternalProgram` to be started as main function of the thread
- finish_function: method of the class `MainWindow` to be called when the thread has finished
- - *args: arguments to be passed to the instantiation of the `ExternalProgram` class, those are the
- `main_function` arguments
+ - args: arguments to be passed to the instantiation of the `ExternalProgram` class,
+ those are the `main_function` arguments
+
"""
# Check if we've got a thread already running
try:
@@ -1029,7 +1170,7 @@ def createPlateausClicked(self):
# Check of the nominal RF exists
# If it is 0 or None, the plateau creation will take the first value in the dataFrame
- if nominal_rf.strip() == "0" or nominal_rf.strip() == 'None':
+ if nominal_rf.strip() == "0" or nominal_rf.strip() == "None":
nominal_rf = None
msg = "The nominal frequency is not set. The first point of the data extracted will be taken. "
msg += "Be sure that this point is the expected one!"
@@ -1044,8 +1185,17 @@ def createPlateausClicked(self):
# Start the plateau creation
logger.info("Starting Plateau Creation")
- self.startThread("createPlateaus", "plateauFinished", self.measurement.path, timber.constants.FILENAME,
- rf_beam, start, end, nominal_rf, self.measurement.get_alpha())
+ self.startThread(
+ "createPlateaus",
+ "plateauFinished",
+ self.measurement.path,
+ timber.constants.FILENAME,
+ rf_beam,
+ start,
+ end,
+ nominal_rf,
+ self.measurement.get_alpha(),
+ )
def cleanDataClicked(self):
# Get values from the GUI
@@ -1078,38 +1228,41 @@ def cleanDataClicked(self):
logger.info("Starting Tune Cleaning")
if not self.useRawBBQCheckBox.isChecked(): # classic BBQ
- self.startThread("cleanTune",
- "cleaningFinished",
- self.measurement.path / cleaning_constants.DPP_FILE.format(beam=1),
- self.measurement.path / cleaning_constants.DPP_FILE.format(beam=2),
- self.measurement.path,
- cleaning_constants.CLEANED_DPP_FILE.format(beam=1),
- cleaning_constants.CLEANED_DPP_FILE.format(beam=2),
- (qx_low, qx_high),
- (qy_low, qy_high),
- (q1_quartile, q3_quartile),
- plateau_length,
- bad_tunes)
+ self.startThread(
+ "cleanTune",
+ "cleaningFinished",
+ self.measurement.path / cleaning_constants.DPP_FILE.format(beam=1),
+ self.measurement.path / cleaning_constants.DPP_FILE.format(beam=2),
+ self.measurement.path,
+ cleaning_constants.CLEANED_DPP_FILE.format(beam=1),
+ cleaning_constants.CLEANED_DPP_FILE.format(beam=2),
+ (qx_low, qx_high),
+ (qy_low, qy_high),
+ (q1_quartile, q3_quartile),
+ plateau_length,
+ bad_tunes,
+ )
else: # raw BBQ
# Select the method to be called for processing the raw BBQ
method = "raw_bbq_spectrogram"
if self.useNAFFCheckBox.isChecked():
method = "raw_bbq_naff"
- self.startThread("cleanTuneRawBBQ",
- "cleaningFinished",
- self.measurement.path / cleaning_constants.DPP_FILE.format(beam=1),
- self.measurement.path / timber.constants.FILENAME_HDF,
- self.measurement.path,
- cleaning_constants.CLEANED_DPP_FILE.format(beam=1),
- cleaning_constants.CLEANED_DPP_FILE.format(beam=2),
- (qx_low, qx_high),
- (qy_low, qy_high),
- plateau_length,
- int(seconds_step),
- int(kernel_size),
- method,
- bad_tunes,
- )
+ self.startThread(
+ "cleanTuneRawBBQ",
+ "cleaningFinished",
+ self.measurement.path / cleaning_constants.DPP_FILE.format(beam=1),
+ self.measurement.path / timber.constants.FILENAME_HDF,
+ self.measurement.path,
+ cleaning_constants.CLEANED_DPP_FILE.format(beam=1),
+ cleaning_constants.CLEANED_DPP_FILE.format(beam=2),
+ (qx_low, qx_high),
+ (qy_low, qy_high),
+ plateau_length,
+ int(seconds_step),
+ int(kernel_size),
+ method,
+ bad_tunes,
+ )
def plateauFinished(self, measurement=None):
logger.info("Plateaus done!")
@@ -1182,17 +1335,33 @@ def updateRawTunePlot(self, measurement):
# Beam 1
filepath = measurement.path / cleaning_constants.DPP_FILE.format(beam=1)
- plot_freq(self.plotRawTuneB1Widget.canvas.fig, self.plotRawTuneB1Widget.canvas.ax, filepath,
- 'Raw Tune Measurement for Beam 1', dpp_flag=dpp_flag, delta_rf_flag=delta_rf_flag,
- qx_flag=show_qx_flag, qy_flag=show_qy_flag, rf_flag=show_rf_flag)
+ plot_freq(
+ self.plotRawTuneB1Widget.canvas.fig,
+ self.plotRawTuneB1Widget.canvas.ax,
+ filepath,
+ "Raw Tune Measurement for Beam 1",
+ dpp_flag=dpp_flag,
+ delta_rf_flag=delta_rf_flag,
+ qx_flag=show_qx_flag,
+ qy_flag=show_qy_flag,
+ rf_flag=show_rf_flag,
+ )
self.plotRawTuneB1Widget.canvas.draw()
self.plotRawTuneB1Widget.show()
# Beam 2
filepath = measurement.path / cleaning_constants.DPP_FILE.format(beam=2)
- plot_freq(self.plotRawTuneB2Widget.canvas.fig, self.plotRawTuneB2Widget.canvas.ax, filepath,
- f'Raw Tune Measurement for Beam 2', dpp_flag=dpp_flag, delta_rf_flag=delta_rf_flag,
- qx_flag=show_qx_flag, qy_flag=show_qy_flag, rf_flag=show_rf_flag)
+ plot_freq(
+ self.plotRawTuneB2Widget.canvas.fig,
+ self.plotRawTuneB2Widget.canvas.ax,
+ filepath,
+ "Raw Tune Measurement for Beam 2",
+ dpp_flag=dpp_flag,
+ delta_rf_flag=delta_rf_flag,
+ qx_flag=show_qx_flag,
+ qy_flag=show_qy_flag,
+ rf_flag=show_rf_flag,
+ )
self.plotRawTuneB2Widget.canvas.draw()
self.plotRawTuneB2Widget.show()
@@ -1217,17 +1386,35 @@ def updateCleanedTunePlot(self, measurement):
# Beam 1
filepath = measurement.path / cleaning_constants.CLEANED_DPP_FILE.format(beam=1)
- plot_freq(self.plotCleanTuneB1Widget.canvas.fig, self.plotCleanTuneB1Widget.canvas.ax, filepath,
- 'Cleaned Tune Measurement for Beam 1', dpp_flag=dpp_flag, delta_rf_flag=delta_rf_flag,
- plot_style="line", qx_flag=show_qx_flag, qy_flag=show_qy_flag, rf_flag=show_rf_flag)
+ plot_freq(
+ self.plotCleanTuneB1Widget.canvas.fig,
+ self.plotCleanTuneB1Widget.canvas.ax,
+ filepath,
+ "Cleaned Tune Measurement for Beam 1",
+ dpp_flag=dpp_flag,
+ delta_rf_flag=delta_rf_flag,
+ plot_style="line",
+ qx_flag=show_qx_flag,
+ qy_flag=show_qy_flag,
+ rf_flag=show_rf_flag,
+ )
self.plotCleanTuneB1Widget.canvas.draw()
self.plotCleanTuneB1Widget.show()
# Beam 2
filepath = measurement.path / cleaning_constants.CLEANED_DPP_FILE.format(beam=2)
- plot_freq(self.plotCleanTuneB2Widget.canvas.fig, self.plotCleanTuneB2Widget.canvas.ax, filepath,
- f'Cleaned Tune Measurement for Beam 2', dpp_flag=dpp_flag, delta_rf_flag=delta_rf_flag,
- plot_style="line", qx_flag=show_qx_flag, qy_flag=show_qy_flag, rf_flag=show_rf_flag)
+ plot_freq(
+ self.plotCleanTuneB2Widget.canvas.fig,
+ self.plotCleanTuneB2Widget.canvas.ax,
+ filepath,
+ f"Cleaned Tune Measurement for Beam 2",
+ dpp_flag=dpp_flag,
+ delta_rf_flag=delta_rf_flag,
+ plot_style="line",
+ qx_flag=show_qx_flag,
+ qy_flag=show_qy_flag,
+ rf_flag=show_rf_flag,
+ )
self.plotCleanTuneB2Widget.canvas.draw()
self.plotCleanTuneB2Widget.show()
@@ -1269,12 +1456,19 @@ def computeChromaClicked(self):
input_file_B2 = None
logger.info("Starting Chromaticity Computing")
- self.startThread("computeChroma", "chromaFinished",
- input_file_B1, input_file_B2, output_path, fit_orders, dpp_range)
+ self.startThread(
+ "computeChroma",
+ "chromaFinished",
+ input_file_B1,
+ input_file_B2,
+ output_path,
+ fit_orders,
+ dpp_range,
+ )
return
def chromaFinished(self, measurement=None):
- logger.info('Chromaticity finished computing')
+ logger.info("Chromaticity finished computing")
if not measurement:
measurement = self.measurement
self.updateChromaTables(measurement)
@@ -1305,8 +1499,12 @@ def updateChromaTables(self, measurement):
self.chromaticityFormulaLabel.setPixmap(pixmap)
# Beam 1 and Beam 2 models
- self.chromaB1TableModel = ChromaticityTableModel(chroma_tfs[chroma_tfs['BEAM'] == 'B1'].drop('BEAM', axis=1))
- self.chromaB2TableModel = ChromaticityTableModel(chroma_tfs[chroma_tfs['BEAM'] == 'B2'].drop('BEAM', axis=1))
+ self.chromaB1TableModel = ChromaticityTableModel(
+ chroma_tfs[chroma_tfs["BEAM"] == "B1"].drop("BEAM", axis=1)
+ )
+ self.chromaB2TableModel = ChromaticityTableModel(
+ chroma_tfs[chroma_tfs["BEAM"] == "B2"].drop("BEAM", axis=1)
+ )
# Set the model of the beam depending on the tab selected
current_beam = self.beamChromaticityTabWidget.currentIndex() + 1 # index starts at 0
@@ -1355,21 +1553,45 @@ def updateChromaPlots(self, measurement):
# Beam 1
dpp_file_b1 = measurement.path / cleaning_constants.CLEANED_DPP_FILE.format(beam=1)
- self.r2scores['B1']['X'] = plot_chromaticity(self.plotChromaB1XWidget.canvas.fig,
- self.plotChromaB1XWidget.canvas.ax,
- dpp_file_b1, chroma_tfs_file, 'X', fit_orders, "B1")
- self.r2scores['B1']['Y'] = plot_chromaticity(self.plotChromaB1YWidget.canvas.fig,
- self.plotChromaB1YWidget.canvas.ax,
- dpp_file_b1, chroma_tfs_file, 'Y', fit_orders, "B1")
+ self.r2scores["B1"]["X"] = plot_chromaticity(
+ self.plotChromaB1XWidget.canvas.fig,
+ self.plotChromaB1XWidget.canvas.ax,
+ dpp_file_b1,
+ chroma_tfs_file,
+ "X",
+ fit_orders,
+ "B1",
+ )
+ self.r2scores["B1"]["Y"] = plot_chromaticity(
+ self.plotChromaB1YWidget.canvas.fig,
+ self.plotChromaB1YWidget.canvas.ax,
+ dpp_file_b1,
+ chroma_tfs_file,
+ "Y",
+ fit_orders,
+ "B1",
+ )
# Beam 2
dpp_file_b2 = measurement.path / cleaning_constants.CLEANED_DPP_FILE.format(beam=2)
- self.r2scores['B2']['X'] = plot_chromaticity(self.plotChromaB2XWidget.canvas.fig,
- self.plotChromaB2XWidget.canvas.ax,
- dpp_file_b2, chroma_tfs_file, 'X', fit_orders, "B2")
- self.r2scores['B2']['Y'] = plot_chromaticity(self.plotChromaB2YWidget.canvas.fig,
- self.plotChromaB2YWidget.canvas.ax,
- dpp_file_b2, chroma_tfs_file, 'Y', fit_orders, "B2")
+ self.r2scores["B2"]["X"] = plot_chromaticity(
+ self.plotChromaB2XWidget.canvas.fig,
+ self.plotChromaB2XWidget.canvas.ax,
+ dpp_file_b2,
+ chroma_tfs_file,
+ "X",
+ fit_orders,
+ "B2",
+ )
+ self.r2scores["B2"]["Y"] = plot_chromaticity(
+ self.plotChromaB2YWidget.canvas.fig,
+ self.plotChromaB2YWidget.canvas.ax,
+ dpp_file_b2,
+ chroma_tfs_file,
+ "Y",
+ fit_orders,
+ "B2",
+ )
# Set the r2 scores
self.updateR2scores()
@@ -1378,10 +1600,18 @@ def savePlotsClicked(self):
path = self.measurement.path / "plots"
path.mkdir(exist_ok=True)
- save_chromaticity_plot(self.plotChromaB1XWidget.canvas.fig, path / "Beam1_Qx", formats=['png', 'pdf'])
- save_chromaticity_plot(self.plotChromaB1YWidget.canvas.fig, path / "Beam1_Qy", formats=['png', 'pdf'])
- save_chromaticity_plot(self.plotChromaB2XWidget.canvas.fig, path / "Beam2_Qx", formats=['png', 'pdf'])
- save_chromaticity_plot(self.plotChromaB2YWidget.canvas.fig, path / "Beam2_Qy", formats=['png', 'pdf'])
+ save_chromaticity_plot(
+ self.plotChromaB1XWidget.canvas.fig, path / "Beam1_Qx", formats=["png", "pdf"]
+ )
+ save_chromaticity_plot(
+ self.plotChromaB1YWidget.canvas.fig, path / "Beam1_Qy", formats=["png", "pdf"]
+ )
+ save_chromaticity_plot(
+ self.plotChromaB2XWidget.canvas.fig, path / "Beam2_Qx", formats=["png", "pdf"]
+ )
+ save_chromaticity_plot(
+ self.plotChromaB2YWidget.canvas.fig, path / "Beam2_Qy", formats=["png", "pdf"]
+ )
logger.info(f"Saved Chromaticity plots to {path}")
def copyTableClicked(self):
@@ -1398,7 +1628,7 @@ def copyTableClicked(self):
if format_table == "Markdown":
df_text = df.to_markdown(index=False)
elif format_table == "LaTeX":
- df.columns = [df.columns[0]] + [f'${c}$' for c in df.columns[1:]]
+ df.columns = [df.columns[0]] + [f"${c}$" for c in df.columns[1:]]
df_text = df.to_latex(index=False)
df_text = df_text.replace("(", "{(")
df_text = df_text.replace(")", ")}")
@@ -1417,15 +1647,17 @@ def useRawBBQCheckBoxClicked(self, value):
raw_enabled = value == 2
# Turn ON or OFF some features depending on if the user wants to use the raw BBQ or not
- self.useNAFFCheckBox.setEnabled(True == raw_enabled)
- self.secondsStep.setEnabled(True == raw_enabled)
- self.kernelSize.setEnabled(True == raw_enabled)
- self.q1Quartile.setEnabled(False == raw_enabled)
- self.q3Quartile.setEnabled(False == raw_enabled)
+ self.useNAFFCheckBox.setEnabled(raw_enabled)
+ self.secondsStep.setEnabled(raw_enabled)
+ self.kernelSize.setEnabled(raw_enabled)
+ self.q1Quartile.setEnabled(not raw_enabled)
+ self.q3Quartile.setEnabled(not raw_enabled)
# Enable or disable functionalities depending on the selected method
if raw_enabled:
- self.useNAFFCheckBoxClicked(int(self.useNAFFCheckBox.isChecked()) * 2) # Send a 0 or 2 depending on the state
+ self.useNAFFCheckBoxClicked(
+ int(self.useNAFFCheckBox.isChecked()) * 2
+ ) # Send a 0 or 2 depending on the state
def useNAFFCheckBoxClicked(self, value):
"""
@@ -1434,7 +1666,7 @@ def useNAFFCheckBoxClicked(self, value):
"""
# Turn ON or OFF some raw bbq functions
naff_enabled = value == 2
- self.kernelSize.setEnabled(False == naff_enabled)
+ self.kernelSize.setEnabled(not naff_enabled)
def timberVariableSelectionChanged(self, item):
"""
@@ -1453,7 +1685,9 @@ def timberVariableSelectionChanged(self, item):
def updateTimberTable(self, measurement):
# Get the available variables from the extracted data
- available_variables = get_variables_names_from_csv(measurement.path / timber.constants.FILENAME)
+ available_variables = get_variables_names_from_csv(
+ measurement.path / timber.constants.FILENAME
+ )
for variable in available_variables:
item = QListWidgetItem(variable)
item.setFlags(Qt.ItemIsUserCheckable | Qt.ItemIsSelectable | Qt.ItemIsEnabled)
@@ -1471,16 +1705,21 @@ def updateTimberPlot(self, measurement):
# Add the widgets to the layout
self.timberDataLayout.addWidget(self.plotTimberWidget)
- plot_timber(self.plotTimberWidget.canvas.fig,
- self.plotTimberWidget.canvas.ax,
- measurement.path / timber.constants.FILENAME,
- self.selectedTimberVariables)
+ plot_timber(
+ self.plotTimberWidget.canvas.fig,
+ self.plotTimberWidget.canvas.ax,
+ measurement.path / timber.constants.FILENAME,
+ self.selectedTimberVariables,
+ )
self.plotTimberWidget.canvas.draw()
self.plotTimberWidget.show()
def setChromaticityOrders(self, orders):
- all_orders = [int(self.chromaOrderComboBox.itemText(i)) for i in range(self.chromaOrderComboBox.count())]
+ all_orders = [
+ int(self.chromaOrderComboBox.itemText(i))
+ for i in range(self.chromaOrderComboBox.count())
+ ]
for order in all_orders:
index = self.chromaOrderComboBox.findText(str(order))
@@ -1544,34 +1783,36 @@ def correctionButtonClicked(self):
# The computations are too fast, add a cooldown so the user knobs it has computed
for beam in self.corrections.keys():
- text_edit = getattr(self, f'correction{beam}TextEdit')
+ text_edit = getattr(self, f"correction{beam}TextEdit")
text_edit.setHtml("Computing...")
QtTest.QTest.qWait(1000)
logger.info("Starting Response Matrix creation")
- self.startThread("computeCorrections",
- "correctionsFinished",
- optics_paths,
- self.measurement.path,
- method,
- observables,
- chroma_factor,
- rcond,
- keep_dq3_constant,
- keep_rdt_constant,
- clean_nan,
- clean_outliers,
- clean_IR,
- optics_name)
+ self.startThread(
+ "computeCorrections",
+ "correctionsFinished",
+ optics_paths,
+ self.measurement.path,
+ method,
+ observables,
+ chroma_factor,
+ rcond,
+ keep_dq3_constant,
+ keep_rdt_constant,
+ clean_nan,
+ clean_outliers,
+ clean_IR,
+ optics_name,
+ )
def correctionsFinished(self):
for beam in self.corrections.keys():
if self.corrections[beam] is None:
continue
- text = self.corrections[f'{beam}'].replace('\n', '
')
+ text = self.corrections[f"{beam}"].replace("\n", "
")
text = text.replace(" ", " ")
- text_edit = getattr(self, f'correction{beam}TextEdit')
+ text_edit = getattr(self, f"correction{beam}TextEdit")
text_edit.setHtml(text)
logger.info("Corrections done!")
@@ -1581,6 +1822,7 @@ def rcParamsClicked(self):
rcparams_dialog = MplRcParamsDialog(self)
rcparams_dialog.show()
+
class MplRcParamsDialog(QDialog, rcparams_window_class):
def __init__(self, parent=None):
QDialog.__init__(self, parent)
@@ -1626,6 +1868,7 @@ def accept(self, buttonClicked):
logger.info("Matplotlib rcParams written")
+
class NewMeasurementDialog(QDialog, new_measurement_class):
def __init__(self, parent=None):
QDialog.__init__(self, parent)
@@ -1661,17 +1904,20 @@ def openModel(self):
return folder
def createMeasurement(self):
- model_path = {'B1': Path(self.modelB1LineEdit.text()),
- 'B2': Path(self.modelB2LineEdit.text())
- }
+ model_path = {
+ "B1": Path(self.modelB1LineEdit.text()),
+ "B2": Path(self.modelB2LineEdit.text()),
+ }
# Set the start and end time of the timber extraction to the current date
now = datetime.now()
- measurement = Measurement(path=Path(self.locationLineEdit.text()),
- description=self.descriptionTextEdit.toPlainText(),
- model_path=model_path,
- start_time=now,
- end_time=now)
+ measurement = Measurement(
+ path=Path(self.locationLineEdit.text()),
+ description=self.descriptionTextEdit.toPlainText(),
+ model_path=model_path,
+ start_time=now,
+ end_time=now,
+ )
measurement.save_as_json()
main_window = findMainWindow()
diff --git a/chroma_gui/plotting/__init__.py b/chroma_gui/plotting/__init__.py
index 59e3fdd..512312f 100644
--- a/chroma_gui/plotting/__init__.py
+++ b/chroma_gui/plotting/__init__.py
@@ -1,5 +1,12 @@
-from . import widget
-from .functions import (
+"""
+Plotting Module
+---------------
+
+This module contains functions to plot chromaticity data.
+"""
+# ruff: noqa
+from . import widget
+from .functions import (
plot_dpp,
plot_freq,
plot_chromaticity,
diff --git a/chroma_gui/plotting/functions.py b/chroma_gui/plotting/functions.py
index 29f1b7a..28f8e47 100644
--- a/chroma_gui/plotting/functions.py
+++ b/chroma_gui/plotting/functions.py
@@ -1,3 +1,11 @@
+"""
+Functions
+---------
+
+Functions to plot chromaticity data.
+"""
+from __future__ import annotations
+
from datetime import datetime
import matplotlib.dates as mdates
from matplotlib.ticker import FormatStrFormatter
diff --git a/chroma_gui/plotting/widget.py b/chroma_gui/plotting/widget.py
index 745d9ce..3a0f1a8 100644
--- a/chroma_gui/plotting/widget.py
+++ b/chroma_gui/plotting/widget.py
@@ -1,18 +1,24 @@
-# Imports
-from PyQt5 import QtWidgets, QtGui, QtCore
-from matplotlib.figure import Figure
+"""
+Widget
+------
+
+Defines the Matplotlib widget.
+"""
+from __future__ import annotations
+
+import matplotlib
+from matplotlib.backends.backend_agg import FigureCanvasAgg
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as Canvas
from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT
-from matplotlib.backends.backend_agg import FigureCanvasAgg
-import matplotlib
-
+from matplotlib.figure import Figure
+from PyQt5 import QtGui, QtWidgets
# Ensure using PyQt5 backend
matplotlib.use('QT5Agg')
-# Matplotlib canvas class to create figure
class MplCanvas(Canvas):
+ """ Matplotlib canvas class to create figure """
def __init__(self):
self.fig = Figure()
self.ax = self.fig.add_subplot(111)
@@ -21,8 +27,9 @@ def __init__(self):
Canvas.updateGeometry(self)
-# Matplotlib widget
class MplWidget(QtWidgets.QWidget):
+ """ Matplotlib widget class """
+
def __init__(self, parent=None):
QtWidgets.QWidget.__init__(self, parent) # Inherit from QWidget
# Create the canvas object
@@ -36,7 +43,13 @@ def __init__(self, parent=None):
self.setLayout(self.vbl)
-def mathTex_to_QPixmap(mathTex, fs):
+def mathTex_to_QPixmap(mathTex: str, fs: int) -> QtGui.QPixmap:
+ """ Convert mathTex string to QPixmap
+
+ Args:
+ mathTex (str): mathTex string
+ fs (int): font size
+ """
#---- set up a mpl figure instance ----
fig = matplotlib.figure.Figure()
fig.patch.set_facecolor('none')
diff --git a/chroma_gui/timber/__init__.py b/chroma_gui/timber/__init__.py
index 2a82594..4714def 100644
--- a/chroma_gui/timber/__init__.py
+++ b/chroma_gui/timber/__init__.py
@@ -1,3 +1,10 @@
+"""
+Timber Module
+-------------
+
+This module contains all the functions to extract data from Timber.
+"""
+# ruff: noqa
from . import extract
from . import constants
diff --git a/chroma_gui/timber/constants.py b/chroma_gui/timber/constants.py
index cc84c80..286cb95 100644
--- a/chroma_gui/timber/constants.py
+++ b/chroma_gui/timber/constants.py
@@ -1,35 +1,44 @@
+"""
+Constants
+---------
+
+Constants used in Timber extraction.
+"""
+
# Files to be saved
-FILENAME = "./TIMBER_DATA.csv"
-BACKUP_FILENAME = "./TIMBER_DATA_{now}.csv"
+FILENAME: str = "./TIMBER_DATA.csv"
+BACKUP_FILENAME: str = "./TIMBER_DATA_{now}.csv"
-FILENAME_PKL = "./TIMBER_RAW_BBQ.pkl.gz"
-BACKUP_FILENAME_PKL = "./TIMBER_RAW_BBQ_{now}.pkl.gz"
+FILENAME_PKL: str = "./TIMBER_RAW_BBQ.pkl.gz"
+BACKUP_FILENAME_PKL: str = "./TIMBER_RAW_BBQ_{now}.pkl.gz"
-FILENAME_HDF = "./TIMBER_RAW_BBQ.hdf"
-BACKUP_FILENAME_HDF = "./TIMBER_RAW_BBQ_{now}.hdf"
+FILENAME_HDF: str = "./TIMBER_RAW_BBQ.hdf"
+BACKUP_FILENAME_HDF: str = "./TIMBER_RAW_BBQ_{now}.hdf"
# Variables to query for
-TIMBER_VARS = ['LHC.BQBBQ.CONTINUOUS_HS.B1:EIGEN_FREQ_1', # Good BBQ data usually, sometimes noisy
- 'LHC.BQBBQ.CONTINUOUS_HS.B1:EIGEN_FREQ_2',
- 'LHC.BQBBQ.CONTINUOUS_HS.B2:EIGEN_FREQ_1',
- 'LHC.BQBBQ.CONTINUOUS_HS.B2:EIGEN_FREQ_2',
- 'LHC.BQBBQ.CONTINUOUS_HS.B1:ACQ_DATA_H', # Raw BBQ
- 'LHC.BQBBQ.CONTINUOUS_HS.B1:ACQ_DATA_V',
- 'LHC.BQBBQ.CONTINUOUS_HS.B2:ACQ_DATA_H', # Raw BBQ
- 'LHC.BQBBQ.CONTINUOUS_HS.B2:ACQ_DATA_V',
- 'LHC.BOFSU:RADIAL_TRIM_B1',
- 'BFC.LHC:TuneFBAcq:tuneB1H', # Completely off for some reason sometimes
- 'BFC.LHC:TuneFBAcq:tuneB1V',
- 'BFC.LHC:TuneFBAcq:tuneB2H',
- 'BFC.LHC:TuneFBAcq:tuneB2V',
- 'BFC.LHC:RadialLoopFBAcq:fradialLoopTrim', # DPP data, doesn't always contain something
- 'ALB.SR4.B1:FGC_FREQ', # RF Frequency
- 'ALB.SR4.B2:FGC_FREQ'
- ]
+TIMBER_VARS: list[str] = [
+ "LHC.BQBBQ.CONTINUOUS_HS.B1:EIGEN_FREQ_1", # Good BBQ data usually, sometimes noisy
+ "LHC.BQBBQ.CONTINUOUS_HS.B1:EIGEN_FREQ_2",
+ "LHC.BQBBQ.CONTINUOUS_HS.B2:EIGEN_FREQ_1",
+ "LHC.BQBBQ.CONTINUOUS_HS.B2:EIGEN_FREQ_2",
+ "LHC.BQBBQ.CONTINUOUS_HS.B1:ACQ_DATA_H", # Raw BBQ
+ "LHC.BQBBQ.CONTINUOUS_HS.B1:ACQ_DATA_V",
+ "LHC.BQBBQ.CONTINUOUS_HS.B2:ACQ_DATA_H", # Raw BBQ
+ "LHC.BQBBQ.CONTINUOUS_HS.B2:ACQ_DATA_V",
+ "LHC.BOFSU:RADIAL_TRIM_B1",
+ "BFC.LHC:TuneFBAcq:tuneB1H", # Completely off for some reason sometimes
+ "BFC.LHC:TuneFBAcq:tuneB1V",
+ "BFC.LHC:TuneFBAcq:tuneB2H",
+ "BFC.LHC:TuneFBAcq:tuneB2V",
+ "BFC.LHC:RadialLoopFBAcq:fradialLoopTrim", # DPP data, doesn't always contain something
+ "ALB.SR4.B1:FGC_FREQ", # RF Frequency
+ "ALB.SR4.B2:FGC_FREQ",
+]
# Vars not to be saved in the CSV
-TIMBER_RAW_VARS = ['LHC.BQBBQ.CONTINUOUS_HS.B1:ACQ_DATA_H',
- 'LHC.BQBBQ.CONTINUOUS_HS.B1:ACQ_DATA_V',
- 'LHC.BQBBQ.CONTINUOUS_HS.B2:ACQ_DATA_H',
- 'LHC.BQBBQ.CONTINUOUS_HS.B2:ACQ_DATA_V',
- ]
+TIMBER_RAW_VARS: list[str] = [
+ "LHC.BQBBQ.CONTINUOUS_HS.B1:ACQ_DATA_H",
+ "LHC.BQBBQ.CONTINUOUS_HS.B1:ACQ_DATA_V",
+ "LHC.BQBBQ.CONTINUOUS_HS.B2:ACQ_DATA_H",
+ "LHC.BQBBQ.CONTINUOUS_HS.B2:ACQ_DATA_V",
+]
diff --git a/chroma_gui/timber/extract.py b/chroma_gui/timber/extract.py
index 4136e69..e359965 100644
--- a/chroma_gui/timber/extract.py
+++ b/chroma_gui/timber/extract.py
@@ -1,20 +1,27 @@
-from fileinput import filename
+"""
+Extract
+-------
+
+Functions to extract data from Timber.
+"""
+from __future__ import annotations
-import pytimber
-from datetime import datetime
-import os
import logging
+import os
+from datetime import datetime
+
import pandas as pd
+import pytimber
from chroma_gui.timber.constants import (
- FILENAME,
BACKUP_FILENAME,
- TIMBER_VARS,
- TIMBER_RAW_VARS,
- FILENAME_PKL,
+ BACKUP_FILENAME_HDF,
BACKUP_FILENAME_PKL,
+ FILENAME,
FILENAME_HDF,
- BACKUP_FILENAME_HDF,
+ FILENAME_PKL,
+ TIMBER_RAW_VARS,
+ TIMBER_VARS,
)
@@ -57,7 +64,7 @@ def save_as_csv(path, start_time, end_time, data):
# Make a symlink to TIMBER_DATA.csv
try:
os.remove(path / FILENAME)
- except:
+ except OSError:
pass
os.symlink(backup_name, path / FILENAME)
@@ -77,7 +84,7 @@ def save_as_pickle(path, data):
# Make a symlink to TIMBER_RAW_DATA.pkl.gz
try:
os.remove(filename)
- except:
+ except OSError:
pass
os.symlink(backup_filename, filename)
@@ -97,7 +104,7 @@ def save_as_hdf(path, data):
# Make a symlink to TIMBER_RAW_DATA.hdf
try:
os.remove(filename)
- except:
+ except OSError:
pass
os.symlink(backup_filename, filename)
diff --git a/doc/Makefile b/doc/Makefile
new file mode 100644
index 0000000..e5b85ec
--- /dev/null
+++ b/doc/Makefile
@@ -0,0 +1,154 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS =
+# SPHINXBUILD = sphinx-build
+SPHINXBUILD = python -msphinx
+PAPER =
+BUILDDIR = _build
+
+# Internal variables.
+PAPEROPT_a4 = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+# the i18n builder cannot share the environment and doctrees with the others
+I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+
+.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
+
+help:
+ @echo "Please use \`make ' where is one of"
+ @echo " html to make standalone HTML files"
+ @echo " dirhtml to make HTML files named index.html in directories"
+ @echo " singlehtml to make a single large HTML file"
+ @echo " pickle to make pickle files"
+ @echo " json to make JSON files"
+ @echo " htmlhelp to make HTML files and a HTML help project"
+ @echo " qthelp to make HTML files and a qthelp project"
+ @echo " devhelp to make HTML files and a Devhelp project"
+ @echo " epub to make an epub"
+ @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+ @echo " latexpdf to make LaTeX files and run them through pdflatex"
+ @echo " text to make text files"
+ @echo " man to make manual pages"
+ @echo " texinfo to make Texinfo files"
+ @echo " info to make Texinfo files and run them through makeinfo"
+ @echo " gettext to make PO message catalogs"
+ @echo " changes to make an overview of all changed/added/deprecated items"
+ @echo " linkcheck to check all external links for integrity"
+ @echo " doctest to run all doctests embedded in the documentation (if enabled)"
+
+clean:
+ -rm -rf $(BUILDDIR)/*
+
+html:
+ $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+dirhtml:
+ $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+singlehtml:
+ $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+ @echo
+ @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
+pickle:
+ $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+ @echo
+ @echo "Build finished; now you can process the pickle files."
+
+json:
+ $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+ @echo
+ @echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+ $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+ @echo
+ @echo "Build finished; now you can run HTML Help Workshop with the" \
+ ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+qthelp:
+ $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+ @echo
+ @echo "Build finished; now you can run "qcollectiongenerator" with the" \
+ ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+ @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/chroma_gui.qhcp"
+ @echo "To view the help file:"
+ @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/chroma_gui.qhc"
+
+devhelp:
+ $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+ @echo
+ @echo "Build finished."
+ @echo "To view the help file:"
+ @echo "# mkdir -p $$HOME/.local/share/devhelp/chroma_gui"
+ @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/chroma_gui"
+ @echo "# devhelp"
+
+epub:
+ $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+ @echo
+ @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
+
+latex:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo
+ @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+ @echo "Run \`make' in that directory to run these through (pdf)latex" \
+ "(use \`make latexpdf' here to do that automatically)."
+
+latexpdf:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through pdflatex..."
+ $(MAKE) -C $(BUILDDIR)/latex all-pdf
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+text:
+ $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+ @echo
+ @echo "Build finished. The text files are in $(BUILDDIR)/text."
+
+man:
+ $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
+ @echo
+ @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+texinfo:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo
+ @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
+ @echo "Run \`make' in that directory to run these through makeinfo" \
+ "(use \`make info' here to do that automatically)."
+
+info:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo "Running Texinfo files through makeinfo..."
+ make -C $(BUILDDIR)/texinfo info
+ @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
+
+gettext:
+ $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
+ @echo
+ @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
+
+changes:
+ $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+ @echo
+ @echo "The overview file is in $(BUILDDIR)/changes."
+
+linkcheck:
+ $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+ @echo
+ @echo "Link check complete; look for any errors in the above output " \
+ "or in $(BUILDDIR)/linkcheck/output.txt."
+
+doctest:
+ $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+ @echo "Testing of doctests in the sources finished, look at the " \
+ "results in $(BUILDDIR)/doctest/output.txt."
diff --git a/doc/_static/css/custom.css b/doc/_static/css/custom.css
new file mode 100644
index 0000000..97970f4
--- /dev/null
+++ b/doc/_static/css/custom.css
@@ -0,0 +1,295 @@
+:root {
+ --nav-side-width: 300px; /* default is 300px */
+ /* for 100% width */
+ /*--nav-content-width: 100%;*/
+ /*--local-toc-width: 300px;*/
+ /*--nav-content-width-wide: calc(100% - var(--local-toc-width)); /* 100% here is fullscreen */
+ /*--local-toc-left: calc(100% - var(--local-toc-width)); /* 100% here is w/o sidebar */
+
+ /* for fixed widths */
+ --nav-content-width: 800px; /* default is 800px */
+ --nav-content-width-wide: var(--nav-content-width);
+ --local-toc-width: calc(100% - var(--nav-content-width-wide));
+ --local-toc-left: calc(var(--nav-content-width-wide) + var(--nav-side-width));
+}
+
+/* main content width */
+.wy-nav-content {
+ max-width: var(--nav-content-width);
+}
+
+/* Sidebar width */
+.wy-nav-side {
+ width: var(--nav-side-width);
+}
+
+.wy-side-nav-search {
+ background: rgb(243,244,247);
+}
+
+.wy-side-nav-search > a {
+ color: black;
+}
+
+.wy-side-nav-search> a img.logo {
+ width: 50%;
+}
+
+
+.wy-side-nav-search > div.version {
+ color: black;
+}
+
+.wy-nav-content-wrap {
+ background: inherit;
+}
+
+.wy-side-nav-search input[type="text"] {
+ border: none;
+ box-shadow: none;
+ background: white;
+ border-radius: 0;
+ font-size: 100%;
+}
+
+.wy-menu-vertical li.current a,
+.wy-menu-vertical li.toctree-l1.current > a {
+ border: none;
+}
+
+.ethical-rtd > div.ethical-sidebar,
+.ethical-rtd > div.ethical-footer {
+ display: none !important;
+}
+
+h1 {
+ text-transform: uppercase;
+ font-family: inherit;
+ font-weight: 200;
+}
+
+h2,
+.rst-content .toctree-wrapper p.caption {
+ font-family: inherit;
+ font-weight: 200;
+}
+
+.rst-content a:visited {
+ color: #3091d1;
+}
+
+/* Begin code */
+.rst-content pre.literal-block,
+.rst-content div[class^="highlight"] {
+ border: none;
+}
+
+.rst-content pre.literal-block,
+.rst-content div[class^="highlight"] pre,
+.rst-content .linenodiv pre {
+ font-size: 80%;
+}
+
+.highlight {
+ background: #f6f8fa;
+ border-radius: 6px;
+}
+
+.highlight .kn,
+.highlight .k {
+ color: #d73a49;
+}
+
+.highlight .nn {
+ color: inherit;
+ font-weight: inherit;
+}
+
+.highlight .nc {
+ color: #e36209;
+ font-weight: inherit;
+}
+
+.highlight .fm,
+.highlight .nd,
+.highlight .nf,
+.highlight .nb {
+ color: #6f42c1;
+}
+
+.highlight .bp,
+.highlight .n {
+ color: inherit;
+}
+
+.highlight .kc,
+.highlight .s1,
+.highlight .s2,
+.highlight .mi,
+.highlight .mf,
+.highlight .bp,
+.highlight .bn,
+.highlight .ow {
+ color: #005cc5;
+ font-weight: inherit;
+}
+
+.highlight .c1 {
+ color: #6a737d;
+}
+
+.rst-content code.xref {
+ padding: .2em .4em;
+ background: rgba(27,31,35,.05);
+ border-radius: 6px;
+ border: none;
+}
+/* End code */
+
+.rst-content dl:not(.docutils) dt,
+.rst-content dl:not(.docutils) dl dt {
+ background: rgb(243,244,247);
+}
+
+.rst-content dl:not(.docutils) dt.field-odd,
+.rst-content dl:not(.docutils) dt.field-odd {
+ text-transform: uppercase;
+ background: inherit;
+ border: none;
+ padding: 6px 0;
+}
+
+.rst-content dl:not(.docutils) .property {
+ text-transform: uppercase;
+ font-style: normal;
+ padding-right: 12px;
+}
+
+em.sig-param span.n:first-child, em.sig-param span.n:nth-child(2) {
+ color: black;
+ font-style: normal;
+}
+
+em.sig-param span.n:nth-child(3),
+em.sig-param span.n:nth-child(3) a {
+ color: inherit;
+ font-weight: normal;
+ font-style: normal;
+}
+
+em.sig-param span.default_value {
+ font-family: SFMono-Regular,Menlo,Monaco,Consolas,"Liberation Mono","Courier New",Courier,monospace;
+ font-style: normal;
+ font-size: 90%;
+}
+
+.sig-paren {
+ padding: 0 4px;
+}
+
+.wy-table-responsive table td,
+.wy-table-responsive table th {
+ white-space: normal;
+}
+
+.wy-table-bordered-all,
+.rst-content table.docutils {
+ border: none;
+}
+
+.wy-table-bordered-all td,
+.rst-content table.docutils td {
+ border: none;
+}
+
+.wy-table-odd td,
+.wy-table-striped tr:nth-child(2n-1) td,
+.rst-content table.docutils:not(.field-list) tr:nth-child(2n-1) td {
+ background: rgb(243,244,247);
+}
+
+.wy-table td,
+.rst-content table.docutils td,
+.rst-content table.field-list td,
+.wy-table th,
+.rst-content table.docutils th,
+.rst-content table.field-list th {
+ padding: 16px;
+}
+
+
+/* Create local table of contents
+ ------------------------------
+ inspired by https://github.com/readthedocs/sphinx_rtd_theme/pull/919
+ and https://github.com/readthedocs/sphinx_rtd_theme/issues/764
+ see also _templates/layout.html
+ */
+
+#local-table-of-contents {
+ padding-bottom: 20px;
+ /* display: none; */
+}
+
+/* Mask entry of main header (chapter) */
+#local-table-of-contents a[href="#"]{
+ /*display: none;*/
+}
+
+/* indent subsections */
+#local-table-of-contents ul > ul {
+ padding-left: 0px;
+ margin-left: 20px;
+ padding-right: 0;
+ padding-bottom: 5px;
+}
+
+
+#local-table-of-contents-title {
+ margin-bottom: 10px;
+}
+
+/* Show in Sidebar if window width is larger than nav-side + nav-content + toc-width */
+@media screen and (min-width: 1200px) {
+ .wy-nav-content {
+ max-width: var(--nav-content-width-wide);
+ }
+
+ #local-table-of-contents {
+ display: block;
+ position: fixed;
+ margin-left: 15px;
+ overflow-y: auto;
+ height: 95%;
+ top: 45px;
+ left: var(--local-toc-left);
+ width: var(--local-toc-width);
+ }
+
+ #local-table-of-contents-title {
+ display: block;
+ font-size: 16px;
+ width: 100%;
+ padding-top: 10px;
+ padding-bottom: 5px;
+ }
+}
+
+/* Custom styles for bibliography */
+.citation {
+ display: block!important;
+}
+
+.citation .label {
+ display: block;
+ font-weight: bold;
+ margin-bottom: 0.5em;
+}
+
+.citation p {
+ margin: 0;
+ padding-left: 1em;
+}
+
+.citation cite {
+ display: block;
+}
diff --git a/doc/_static/img/omc_logo.pdf b/doc/_static/img/omc_logo.pdf
new file mode 100644
index 0000000..cbde539
Binary files /dev/null and b/doc/_static/img/omc_logo.pdf differ
diff --git a/doc/_static/img/omc_logo.svg b/doc/_static/img/omc_logo.svg
new file mode 100644
index 0000000..c04959c
--- /dev/null
+++ b/doc/_static/img/omc_logo.svg
@@ -0,0 +1,95 @@
+
+
diff --git a/doc/_templates/layout.html b/doc/_templates/layout.html
new file mode 100644
index 0000000..aa67d6d
--- /dev/null
+++ b/doc/_templates/layout.html
@@ -0,0 +1,12 @@
+{% extends "!layout.html" %}
+{% block document %}
+ {%- if toc|length > title|length + 75 %}
+
+ {%- endif %}
+
+ {{ super() }}
+{% endblock %}
+
diff --git a/doc/bibliography.rst b/doc/bibliography.rst
new file mode 100644
index 0000000..ecea785
--- /dev/null
+++ b/doc/bibliography.rst
@@ -0,0 +1,4 @@
+Bibliography
+************
+
+This bibliography is intentionally left blank.
\ No newline at end of file
diff --git a/doc/conf.py b/doc/conf.py
new file mode 100644
index 0000000..6d90be6
--- /dev/null
+++ b/doc/conf.py
@@ -0,0 +1,243 @@
+# -*- coding: utf-8 -*-
+#
+# chroma_gui documentation build configuration file, created by
+# sphinx-quickstart on Tue Feb 6 12:10:18 2018.
+#
+# This file is execfile()d with the current directory set to its
+# containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+#
+import pathlib
+import sys
+# ignore numpy warnings, see:
+# https://stackoverflow.com/questions/40845304/runtimewarning-numpy-dtype-size-changed-may-indicate-binary-incompatibility
+import warnings
+
+warnings.filterwarnings("ignore", message="numpy.dtype size changed")
+warnings.filterwarnings("ignore", message="numpy.ufunc size changed")
+
+
+TOPLEVEL_DIR = pathlib.Path(__file__).parent.parent.absolute()
+ABOUT_FILE = TOPLEVEL_DIR / "chroma_gui" / "__init__.py"
+
+if str(TOPLEVEL_DIR) not in sys.path:
+ sys.path.insert(0, str(TOPLEVEL_DIR))
+
+ABOUT: dict = {}
+with ABOUT_FILE.open("r") as f:
+ exec(f.read(), ABOUT)
+
+# -- General configuration ------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+#
+# needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be
+# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
+# ones.
+extensions = ['sphinx.ext.autodoc',
+ 'sphinx.ext.doctest',
+ 'sphinx.ext.todo',
+ 'sphinx.ext.coverage',
+ 'sphinx.ext.mathjax',
+ 'sphinx.ext.viewcode',
+ 'sphinx.ext.githubpages',
+ 'sphinx.ext.napoleon',
+ 'sphinx.ext.autosectionlabel',
+ ]
+autosectionlabel_prefix_document = True
+autosectionlabel_maxdepth = 2
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix(es) of source filenames.
+# You can specify multiple suffix as a list of string:
+#
+# source_suffix = ['.rst', '.md']
+source_suffix = '.rst'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = ABOUT["__title__"]
+copyright_ = '2022-2025, pyLHC/OMC-TEAM'
+author = ABOUT["__author__"]
+
+rst_prolog = f"""
+:github_url: {ABOUT['__url__']}
+"""
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+version = ABOUT["__version__"][:3]
+# The full version, including alpha/beta/rc tags.
+release = ABOUT["__version__"]
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#
+# This is also used if you do content translation via gettext catalogs.
+# Usually you set "language" from the command line for these cases.
+language = "en"
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+# This patterns also effect to html_static_path and html_extra_path
+exclude_patterns = []
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# If true, `todo` and `todoList` produce output, else they produce nothing.
+todo_include_todos = True
+
+# -- Options for HTML output ----------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
+#
+html_theme = 'sphinx_rtd_theme'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further. For a list of options available for each theme, see the
+# documentation.
+html_theme_options = {
+ 'collapse_navigation': False,
+ 'display_version': True,
+ 'logo_only': True,
+ 'navigation_depth': 2,
+}
+
+
+# Name of an image file (path relative to the configuration directory)
+# that is the logo of the docs, or URL that points an image file for the logo.
+# It is placed at the top of the sidebar;
+# its width should therefore not exceed 200 pixels.
+html_logo = '_static/img/omc_logo.svg'
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+#
+html_static_path = ['_static']
+
+# A dictionary of values to pass into the template engine’s context for all
+# pages. Single values can also be put in this dictionary using the
+# -A command-line option of sphinx-build.
+html_context = {
+ 'display_github': True,
+ # the following are only needed if :github_url: is not set
+ 'github_user': author,
+ 'github_repo': project,
+ 'github_version': 'master/doc/',
+}
+
+# A list of CSS files. The entry must be a filename string or a tuple
+# containing the filename string and the attributes dictionary.
+# The filename must be relative to the html_static_path, or a full URI with
+# scheme like https://example.org/style.css.
+# The attributes is used for attributes of tag.
+# It defaults to an empty list.
+#
+html_css_files = ["css/custom.css"]
+
+smartquotes_action = "qe" # renders only quotes and ellipses (...) but not dashes (option: D)
+
+# Custom sidebar templates, must be a dictionary that maps document names
+# to template names.
+#
+# This is required for the alabaster theme
+# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars
+html_sidebars = {
+ '**': [
+ 'relations.html', # needs 'show_related': True theme option to display
+ 'searchbox.html',
+ ]
+}
+
+# -- Options for HTMLHelp output ------------------------------------------
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'chroma_guidoc'
+
+# -- Options for LaTeX output ---------------------------------------------
+
+latex_elements = {
+ # The paper size ('letterpaper' or 'a4paper').
+ #
+ # 'papersize': 'letterpaper',
+
+ # The font size ('10pt', '11pt' or '12pt').
+ #
+ # 'pointsize': '10pt',
+
+ # Additional stuff for the LaTeX preamble.
+ #
+ # 'preamble': '',
+
+ # Latex figure (float) alignment
+ #
+ # 'figure_align': 'htbp',
+}
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title,
+# author, documentclass [howto, manual, or own class]).
+latex_documents = [
+ (master_doc, 'chroma_gui.tex', u'chroma_gui Documentation',
+ u'OMC-TEAM', 'manual'),
+]
+
+# -- Options for manual page output ---------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+ (master_doc, 'chroma_gui', u'chroma_gui Documentation',
+ [author], 1)
+]
+
+# -- Options for Texinfo output -------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+# dir menu entry, description, category)
+texinfo_documents = [
+ (master_doc, 'chroma_gui', u'chroma_gui Documentation',
+ author, 'chroma_gui', 'One line description of project.',
+ 'Miscellaneous'),
+]
+
+# -- Autodoc Configuration ---------------------------------------------------
+
+# Add here all modules to be mocked up. When the dependencies are not met
+# at building time.
+autodoc_mock_imports = [
+ "qtpy", "pyqtgraph", "accwidgets", "pytimber",
+ "PySide2",
+ "PyQt5", "PyQt5", "PyQt5.QtGui", "PyQt5.QtCore", "PyQt5.QtWidgets",
+ "shiboken2",
+ "matplotlib.backends.backend_qt5agg",
+ ]
+
+# -- Type Aliases --------------------------------------------------------------
+
+# This is to tell Sphinx how to print some specific type annotations
+# See: https://stackoverflow.com/a/67483317
+# See: https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#confval-autodoc_type_aliases
+autodoc_type_aliases = {"ArrayLike": "ArrayLike"}
diff --git a/doc/index.rst b/doc/index.rst
new file mode 100644
index 0000000..caa6bf9
--- /dev/null
+++ b/doc/index.rst
@@ -0,0 +1,36 @@
+Welcome to Chroma-GUI's documentation!
+======================================
+
+``chroma_gui`` is a tool to compute non-linear chromaticity via
+measurements done in the CCC at `CERN `_.
+
+Package Reference
+=================
+
+.. toctree::
+ :caption: Main
+ :maxdepth: 1
+ :glob:
+
+ main/*
+
+
+.. toctree::
+ :caption: Modules
+ :maxdepth: 1
+ :glob:
+
+ modules/*
+
+.. toctree::
+ :caption: Bibliography
+ :maxdepth: 1
+
+ bibliography
+
+Indices and tables
+==================
+
+* :ref:`genindex`
+* :ref:`modindex`
+* :ref:`search`
diff --git a/doc/main/chroma_gui.rst b/doc/main/chroma_gui.rst
new file mode 100644
index 0000000..c1152b2
--- /dev/null
+++ b/doc/main/chroma_gui.rst
@@ -0,0 +1,6 @@
+Chroma-GUI
+**********
+
+.. automodule:: chroma_gui.main
+ :members:
+ :noindex:
diff --git a/doc/make.bat b/doc/make.bat
new file mode 100644
index 0000000..989225e
--- /dev/null
+++ b/doc/make.bat
@@ -0,0 +1,190 @@
+@ECHO OFF
+
+REM Command file for Sphinx documentation
+
+if "%SPHINXBUILD%" == "" (
+ set SPHINXBUILD=sphinx-build
+)
+set BUILDDIR=_build
+set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
+set I18NSPHINXOPTS=%SPHINXOPTS% .
+if NOT "%PAPER%" == "" (
+ set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
+ set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
+)
+
+if "%1" == "" goto help
+
+if "%1" == "help" (
+ :help
+ echo.Please use `make ^` where ^ is one of
+ echo. html to make standalone HTML files
+ echo. dirhtml to make HTML files named index.html in directories
+ echo. singlehtml to make a single large HTML file
+ echo. pickle to make pickle files
+ echo. json to make JSON files
+ echo. htmlhelp to make HTML files and a HTML help project
+ echo. qthelp to make HTML files and a qthelp project
+ echo. devhelp to make HTML files and a Devhelp project
+ echo. epub to make an epub
+ echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
+ echo. text to make text files
+ echo. man to make manual pages
+ echo. texinfo to make Texinfo files
+ echo. gettext to make PO message catalogs
+ echo. changes to make an overview over all changed/added/deprecated items
+ echo. linkcheck to check all external links for integrity
+ echo. doctest to run all doctests embedded in the documentation if enabled
+ goto end
+)
+
+if "%1" == "clean" (
+ for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
+ del /q /s %BUILDDIR%\*
+ goto end
+)
+
+if "%1" == "html" (
+ %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The HTML pages are in %BUILDDIR%/html.
+ goto end
+)
+
+if "%1" == "dirhtml" (
+ %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
+ goto end
+)
+
+if "%1" == "singlehtml" (
+ %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
+ goto end
+)
+
+if "%1" == "pickle" (
+ %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can process the pickle files.
+ goto end
+)
+
+if "%1" == "json" (
+ %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can process the JSON files.
+ goto end
+)
+
+if "%1" == "htmlhelp" (
+ %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can run HTML Help Workshop with the ^
+.hhp project file in %BUILDDIR%/htmlhelp.
+ goto end
+)
+
+if "%1" == "qthelp" (
+ %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can run "qcollectiongenerator" with the ^
+.qhcp project file in %BUILDDIR%/qthelp, like this:
+ echo.^> qcollectiongenerator %BUILDDIR%\qthelp\omc3_gui.qhcp
+ echo.To view the help file:
+ echo.^> assistant -collectionFile %BUILDDIR%\qthelp\omc3_gui.ghc
+ goto end
+)
+
+if "%1" == "devhelp" (
+ %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished.
+ goto end
+)
+
+if "%1" == "epub" (
+ %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The epub file is in %BUILDDIR%/epub.
+ goto end
+)
+
+if "%1" == "latex" (
+ %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
+ goto end
+)
+
+if "%1" == "text" (
+ %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The text files are in %BUILDDIR%/text.
+ goto end
+)
+
+if "%1" == "man" (
+ %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The manual pages are in %BUILDDIR%/man.
+ goto end
+)
+
+if "%1" == "texinfo" (
+ %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
+ goto end
+)
+
+if "%1" == "gettext" (
+ %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
+ goto end
+)
+
+if "%1" == "changes" (
+ %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.The overview file is in %BUILDDIR%/changes.
+ goto end
+)
+
+if "%1" == "linkcheck" (
+ %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Link check complete; look for any errors in the above output ^
+or in %BUILDDIR%/linkcheck/output.txt.
+ goto end
+)
+
+if "%1" == "doctest" (
+ %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Testing of doctests in the sources finished, look at the ^
+results in %BUILDDIR%/doctest/output.txt.
+ goto end
+)
+
+:end
diff --git a/doc/modules/chromaticity.rst b/doc/modules/chromaticity.rst
new file mode 100644
index 0000000..7448115
--- /dev/null
+++ b/doc/modules/chromaticity.rst
@@ -0,0 +1,10 @@
+Chromaticity
+************
+
+.. automodule:: chroma_gui.chromaticity
+ :members:
+ :noindex:
+
+.. automodule:: chroma_gui.chromaticity.chroma_fct
+ :members:
+ :noindex:
\ No newline at end of file
diff --git a/doc/modules/cleaning.rst b/doc/modules/cleaning.rst
new file mode 100644
index 0000000..7bddfa4
--- /dev/null
+++ b/doc/modules/cleaning.rst
@@ -0,0 +1,18 @@
+Cleaning
+********
+
+.. automodule:: chroma_gui.cleaning
+ :members:
+ :noindex:
+
+.. automodule:: chroma_gui.cleaning.clean
+ :members:
+ :noindex:
+
+.. automodule:: chroma_gui.cleaning.constants
+ :members:
+ :noindex:
+
+.. automodule:: chroma_gui.cleaning.plateau
+ :members:
+ :noindex:
\ No newline at end of file
diff --git a/doc/modules/corrections.rst b/doc/modules/corrections.rst
new file mode 100644
index 0000000..8020485
--- /dev/null
+++ b/doc/modules/corrections.rst
@@ -0,0 +1,10 @@
+Corrections
+***********
+
+.. automodule:: chroma_gui.corrections
+ :members:
+ :noindex:
+
+.. automodule:: chroma_gui.corrections.response_matrix
+ :members:
+ :noindex:
\ No newline at end of file
diff --git a/doc/modules/plotting.rst b/doc/modules/plotting.rst
new file mode 100644
index 0000000..45ad798
--- /dev/null
+++ b/doc/modules/plotting.rst
@@ -0,0 +1,14 @@
+Plotting
+********
+
+.. automodule:: chroma_gui.plotting
+ :members:
+ :noindex:
+
+.. automodule:: chroma_gui.plotting.functions
+ :members:
+ :noindex:
+
+.. automodule:: chroma_gui.plotting.widget
+ :members:
+ :noindex:
\ No newline at end of file
diff --git a/doc/modules/timber.rst b/doc/modules/timber.rst
new file mode 100644
index 0000000..c1c8d3d
--- /dev/null
+++ b/doc/modules/timber.rst
@@ -0,0 +1,15 @@
+Timber
+******
+
+.. automodule:: chroma_gui.timber
+ :members:
+ :noindex:
+
+.. automodule:: chroma_gui.timber.constants
+ :members:
+ :noindex:
+
+.. automodule:: chroma_gui.timber.extract
+ :members:
+ :noindex:
+
diff --git a/pyproject.toml b/pyproject.toml
index 311bc0f..6ff3833 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,3 +1,129 @@
[build-system]
-requires = ["setuptools>64", "numpy"]
-build-backend = "setuptools.build_meta"
+requires = ["hatchling"]
+build-backend = "hatchling.build"
+
+[tool.hatch.version]
+path = "chroma_gui/__init__.py"
+
+[tool.hatch.build.targets.sdist]
+exclude = [
+ "/.github",
+ "/doc",
+ "/tests",
+]
+
+[tool.hatch.build.targets.wheel]
+packages = ["chroma_gui"]
+
+[project]
+name = "chroma-gui"
+readme = "README.md"
+description = "QT Graphical User Interface wrapper of the ``omc3`` package"
+authors = [
+ {name = "OMC Team", email = "pylhc@github.com"}, # see zenodo file / commits for details
+]
+license = "MIT"
+dynamic = ["version"]
+requires-python = ">=3.10"
+
+classifiers = [
+ "Intended Audience :: Science/Research",
+ "License :: OSI Approved :: MIT License",
+ "Natural Language :: English",
+ "Operating System :: OS Independent",
+ "Programming Language :: Python :: 3 :: Only",
+ "Programming Language :: Python :: 3.10",
+ "Programming Language :: Python :: 3.11",
+ "Programming Language :: Python :: 3.12",
+ "Programming Language :: Python :: 3.13",
+ "Programming Language :: Python :: Implementation :: CPython",
+ "Topic :: Scientific/Engineering :: Physics",
+ "Topic :: Scientific/Engineering :: Visualization",
+ "Topic :: Scientific/Engineering",
+ "Topic :: Software Development :: Libraries :: Python Modules",
+ "Typing :: Typed",
+]
+
+dependencies = [
+ "matplotlib",
+ "tfs-pandas",
+ "pyqt5 >= 5.14", # for Qt.MarkdownText
+ "pandas",
+ "numpy",
+ "scipy",
+ "tabulate",
+ "pyperclip",
+ "qtawesome",
+ "nafflib",
+ "tables",
+]
+
+
+[project.optional-dependencies]
+test = [
+ "pytest >= 7.0",
+]
+doc = [
+ "sphinx >= 7.0",
+ "sphinx_rtd_theme >= 2.0",
+]
+cern = [
+ "pytimber",
+]
+
+all = [
+ "chroma_gui[cern]",
+ "chroma_gui[test]",
+ "chroma_gui[doc]",
+]
+
+[project.urls]
+homepage = "https://github.com/pylhc/chroma_gui"
+repository = "https://github.com/pylhc/chroma_gui"
+documentation = "https://pylhc.github.io/chroma_gui/"
+changelog = "https://github.com/pylhc/chroma_gui/blob/master/CHANGELOG.md"
+
+# ----- Testing ----- #
+
+[tool.pytest.ini_options]
+markers = [
+ "basic: basic tests run for every commit",
+ "extended: test run on PRs",
+ "cern_network: tests that require access to afs or the technical network",
+]
+# Helpful for pytest-debugging (leave commented out on commit):
+#log_cli = true
+#log_cli_level = "DEBUG"
+
+
+# ----- Dev Tools Configuration ----- #
+
+[tool.ruff]
+exclude = [
+ ".eggs",
+ ".git",
+ ".mypy_cache",
+ ".venv",
+ "_build",
+ "build",
+ "dist",
+]
+
+# Assume Python 3.10+
+target-version = "py310"
+
+line-length = 100
+indent-width = 4
+
+[tool.ruff.lint]
+# Allow unused variables when underscore-prefixed.
+dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
+ignore = [
+ "FBT001", # boolean-type-hint-positional-argument
+ "FBT002", # boolean-default-value-positional-argument
+ "PT019", # pytest-fixture-param-without-value (but suggested solution fails)
+]
+
+# Allow fix for all enabled rules (when `--fix`) is provided.
+fixable = ["ALL"]
+unfixable = []
diff --git a/setup.cfg b/setup.cfg
deleted file mode 100644
index 1c61824..0000000
--- a/setup.cfg
+++ /dev/null
@@ -1,47 +0,0 @@
-[metadata]
-name = chroma-gui
-version = attr: chroma_gui.__version__
-author = "Maël le Garrec"
-author_email = "mael.le.garrec@cern.ch"
-url = "https://gitlab.cern.ch/mlegarre/nl-chroma-gui"
-description = "Non-Linear Chromaticity GUI"
-long_description = file: README.md
-long_description_content_type = text/markdown
-classifiers =
- Programming Language :: Python :: 3
- Operating System :: OS Independent
-
-
-[options]
-zip_safe = True
-include_package_data = True
-packages = find:
-
-install_requires =
- matplotlib
- tfs-pandas
- pytimber
- pyqt5 >= 5.14 # for Qt.MarkdownText
- pandas
- numpy
- scipy
- tabulate
- pyperclip
- qtawesome
- nafflib
- tables
-
-[options.entry_points]
-console_scripts =
- chroma-gui = chroma_gui.main:main
-
-[options.extras_require]
-test =
- pytest
-
-doc =
- sphinx
- acc-py-sphinx
-
-[options.package_data]
-* = resources/**/*, resources/*
diff --git a/chroma_gui/tests/__init__.py b/tests/__init__.py
similarity index 100%
rename from chroma_gui/tests/__init__.py
rename to tests/__init__.py
diff --git a/chroma_gui/tests/test_chroma_gui.py b/tests/test_chroma_gui.py
similarity index 100%
rename from chroma_gui/tests/test_chroma_gui.py
rename to tests/test_chroma_gui.py