Skip to content
Merged
Show file tree
Hide file tree
Changes from 29 commits
Commits
Show all changes
31 commits
Select commit Hold shift + click to select a range
4a2a5b0
First attempt at xtrack implementation
jgray-19 Jul 2, 2025
269276b
Add xtrack to the toml
jgray-19 Jul 2, 2025
d4e6081
Toml again?
jgray-19 Jul 2, 2025
f594135
Add setuptools to toml
jgray-19 Jul 2, 2025
0e0291e
License update (for xtrack)
jgray-19 Jul 2, 2025
f5453ea
More setuptools xsuite stuff
jgray-19 Jul 2, 2025
af2aa3a
Add xpart
jgray-19 Jul 2, 2025
24d9fde
Fix typo
jgray-19 Jul 2, 2025
ed3adb8
Fix floating-point representation in example_line fixture
jgray-19 Jul 3, 2025
a9e2965
Add platform check for xtrack kernel compilation in test_convert_xsuite
jgray-19 Jul 3, 2025
1804ac0
Enhance MAD-NG and XTRACK modules with improved error handling and ad…
jgray-19 Jul 3, 2025
18ca2e9
Refactor variable names for clarity in MAD-NG and enhance documentati…
jgray-19 Jul 3, 2025
f4ba3f0
Improve documentation in MAD-NG module with clearer descriptions and …
jgray-19 Jul 3, 2025
5f4ad71
Update turn_by_turn/madng.py
jgray-19 Jul 3, 2025
1be3165
Update pyproject.toml
jgray-19 Jul 3, 2025
1655685
Update turn_by_turn/xtrack.py
jgray-19 Jul 3, 2025
0383609
Remove unnecessary TYPE_CHECKING import and adjust type hint for conv…
jgray-19 Jul 3, 2025
612f762
Refactor tests and modules to improve consistency and clarity; update…
jgray-19 Jul 3, 2025
67b80a7
Update documentation and improve code clarity; disable display_versio…
jgray-19 Jul 4, 2025
46c0fc8
Enhance documentation for turn_by_turn; add usage examples for read_t…
jgray-19 Jul 4, 2025
f8278de
Remove load_tbt_data import from package namespace
jgray-19 Jul 4, 2025
3337d79
Fix ImportError handling for tfs package in write_tbt function
jgray-19 Jul 4, 2025
2bd2a14
Enhance docstring for example_line fixture to clarify its purpose and…
jgray-19 Jul 4, 2025
337a340
Some ruff formatting
jgray-19 Jul 5, 2025
c612290
Refactor documentation in index.rst and io.py for clarity and structu…
jgray-19 Jul 5, 2025
aff359a
Reorder import statements in __init__.py for consistency
jgray-19 Jul 5, 2025
d6dfba0
minor stuff
JoschD Jul 8, 2025
24cc58d
added API header
JoschD Jul 8, 2025
1d0b150
Improve formatting in test_xtrack.py and xtrack_line.py.
jgray-19 Jul 8, 2025
283b751
Refactor particle ID handling in convert_to_tbt for clarity and consi…
jgray-19 Jul 8, 2025
49f7d04
Clarify type annotations in convert_to_tbt functions for consistency …
jgray-19 Jul 9, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
51 changes: 38 additions & 13 deletions doc/conf.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
#
# TFS-Pandas documentation build configuration file, created by
# sphinx-quickstart on Tue Feb 6 12:10:18 2018.
Expand Down Expand Up @@ -90,7 +89,7 @@ def about_package(init_posixpath: pathlib.Path) -> dict:

# Override link in 'Edit on Github'
rst_prolog = f"""
:github_url: {ABOUT_TBT['__url__']}
:github_url: {ABOUT_TBT["__url__"]}
"""

# The version info for the project you're documenting, acts as replacement for
Expand Down Expand Up @@ -120,6 +119,9 @@ def about_package(init_posixpath: pathlib.Path) -> dict:
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True

# Activate nitpicky mode for sphinx to warn about missing references
# nitpicky = True

# -- Options for HTML output ----------------------------------------------

# The theme to use for HTML and HTML Help pages. See the documentation for
Expand All @@ -130,7 +132,7 @@ def about_package(init_posixpath: pathlib.Path) -> dict:
html_logo = "_static/img/omc_logo.svg"
html_static_path = ["_static"]
html_context = {
# "css_files": ["_static/css/custom.css"],
# "css_files": ["_static/css/custom.css"],
"display_github": True,
# the following are only needed if :github_url: is not set
"github_user": author,
Expand All @@ -141,17 +143,18 @@ def about_package(init_posixpath: pathlib.Path) -> dict:
"css/custom.css",
]

smartquotes_action = "qe" # renders only quotes and ellipses (...) but not dashes (option: D)
# renders only quotes and ellipses (...) but not dashes (option: D)
smartquotes_action = "qe"

# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
html_theme_options = {
'collapse_navigation': False,
'display_version': True,
'logo_only': True,
'navigation_depth': 1,
"collapse_navigation": False,
"version_selector": True, # sphinx-rtd-theme>=3.0, formerly 'display_version'
"logo_only": True,
"navigation_depth": 2,
}

# Add any paths that contain custom static files (such as style sheets) here,
Expand All @@ -163,11 +166,11 @@ def about_package(init_posixpath: pathlib.Path) -> dict:
# pages. Single values can also be put in this dictionary using the
# -A command-line option of sphinx-build.
html_context = {
'display_github': True,
"display_github": True,
# the following are only needed if :github_url: is not set
'github_user': author,
'github_repo': project,
'github_version': 'master/doc/',
"github_user": author,
"github_repo": project,
"github_version": "master/doc/",
}
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
Expand Down Expand Up @@ -207,7 +210,13 @@ def about_package(init_posixpath: pathlib.Path) -> dict:
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, "turn_by_turn.tex", "turn_by_turn Documentation", "pyLHC/OMC-TEAM", "manual"),
(
master_doc,
"turn_by_turn.tex",
"turn_by_turn Documentation",
"pyLHC/OMC-TEAM",
"manual",
),
]

# -- Options for manual page output ---------------------------------------
Expand All @@ -232,3 +241,19 @@ def about_package(init_posixpath: pathlib.Path) -> dict:
"Miscellaneous",
),
]

# -- Instersphinx Configuration ----------------------------------------------

# Example configuration for intersphinx: refer to the Python standard library.
# use in refs e.g:
# :ref:`comparison manual <python:comparisons>`
intersphinx_mapping = {
"python": ("https://docs.python.org/3/", None),
"numpy": ("https://numpy.org/doc/stable/", None),
"pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None),
"matplotlib": ("https://matplotlib.org/stable/", None),
"scipy": ("https://docs.scipy.org/doc/scipy/", None),
"cpymad": ("https://hibtc.github.io/cpymad/", None),
"tfs": ("https://pylhc.github.io/tfs/", None),
"sdds": ("https://pylhc.github.io/sdds/", None),
}
38 changes: 35 additions & 3 deletions doc/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,41 @@ Welcome to turn_by_turn' documentation!

It provides a custom dataclass ``TbtData`` to do so, with attributes corresponding to the relevant measurements information.

How to Use turn_by_turn
-----------------------

There are two main ways to create a ``TbtData`` object:

1. **Reading from file (disk):**
Use ``read_tbt`` to load measurement data from a file on disk. This is the standard entry point for working with measurement files in supported formats.

2. **In-memory conversion:**
Use ``convert_to_tbt`` to convert data that is already loaded in memory (such as a pandas DataFrame, tfs DataFrame, or xtrack.Line) into a ``TbtData`` object. This is useful for workflows where you generate or manipulate data in Python before standardizing it.

Both methods produce a ``TbtData`` object, which can then be used for further analysis or written out to supported formats.

Supported Modules and Limitations
---------------------------------

Different modules support different file formats and workflows (disk reading vs. in-memory conversion). For a detailed table of which modules support which features, and any important limitations, see the documentation for the :mod:`turn_by_turn.io` module.

- Only ``madng`` and ``xtrack`` support in-memory conversion.
- Most modules are for disk reading only.
- Some modules (e.g., ``esrf``) are experimental or have limited support.
- For writing, see the next section.

Writing Data
------------

To write a ``TbtData`` object to disk, use the ``write_tbt`` function. This function supports writing in the LHC SDDS format by default, as well as other supported formats depending on the ``datatype`` argument. The output format is determined by the ``datatype`` you specify, but for most workflows, SDDS is the standard output.

Example::

from turn_by_turn.io import write_tbt
write_tbt("output.sdds", tbt_data)

Package Reference
=================
-----------------

.. toctree::
:caption: Modules
Expand All @@ -24,9 +57,8 @@ Package Reference


Indices and tables
==================
------------------

* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`

1 change: 0 additions & 1 deletion doc/modules/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -16,4 +16,3 @@
.. automodule:: turn_by_turn.utils
:members:
:noindex:

4 changes: 4 additions & 0 deletions doc/readers/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -33,5 +33,9 @@
:noindex:

.. automodule:: turn_by_turn.madng
:members:
:noindex:

.. automodule:: turn_by_turn.xtrack_line
:members:
:noindex:
17 changes: 15 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@ requires-python = ">=3.10"
classifiers = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3 :: Only",
Expand All @@ -48,14 +47,26 @@ dependencies = [
"pandas >= 2.1",
"sdds >= 0.4",
"h5py >= 2.9",
"tfs-pandas >= 4.0.0", # for madng (could be an optional dependency)
]

[project.optional-dependencies]
madng = [
"tfs-pandas >= 4.0.0", # for reading MAD-NG files (Could do everything in memory with just pandas)
]

xtrack = [
"xtrack >= 0.84.7", # for xtrack
"setuptools >= 65", # for xtrack
"xpart >= 0.23.0", # for xtrack
]

test = [
"pytest>=7.0",
"pytest-cov>=2.9",
"turn_by_turn[madng]",
"turn_by_turn[xtrack]",
]

doc = [
"sphinx >= 7.0",
"sphinx_rtd_theme >= 2.0",
Expand All @@ -64,6 +75,8 @@ doc = [
all = [
"turn_by_turn[test]",
"turn_by_turn[doc]",
"turn_by_turn[madng]",
"turn_by_turn[xtrack]",
]

[project.urls]
Expand Down
46 changes: 46 additions & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
import numpy as np
import pandas as pd
import pytest
from turn_by_turn.structures import TbtData, TransverseData

@pytest.fixture(scope="session")
def example_fake_tbt():
"""
Returns a TbtData object using simulation data taken from MAD-NG.
This data is also used for the tests in xtrack, so change the numbers
at your own risk.

It is possible to run the MAD-NG in the inputs folder to regenerate the data.
Also, xtrack produces the same data, so you can use the xtrack test fixture
`example_line`.
"""
names = np.array(["BPM1", "BPM3", "BPM2"])
# First BPM
bpm1_p1_x = np.array([ 1e-3, 0.002414213831,-0.0009999991309])
bpm1_p1_y = np.array([-1e-3, 0.0004142133507, 0.001000000149])
bpm1_p2_x = np.array([-1e-3,-0.002414213831, 0.0009999991309])
bpm1_p2_y = np.array([ 1e-3,-0.0004142133507,-0.001000000149])

# Second BPM
bpm3_p1_x = np.array([ 0.002414213831,-0.0009999991309,-0.002414214191])
bpm3_p1_y = np.array([ 0.0004142133507, 0.001000000149,-0.0004142129907])
bpm3_p2_x = np.array([-0.002414213831, 0.0009999991309, 0.002414214191])
bpm3_p2_y = np.array([-0.0004142133507,-0.001000000149, 0.0004142129907])

# Third BPM
bpm2_p1_x = np.array([-0.0009999999503,-0.0004142138307, 0.0009999998012])
bpm2_p1_y = np.array([ 0.00100000029,-0.002414213351,-0.001000001159])
bpm2_p2_x = np.array([ 0.0009999999503, 0.0004142138307,-0.0009999998012])
bpm2_p2_y = np.array([-0.00100000029, 0.002414213351, 0.001000001159])

matrix = [
TransverseData( # first particle
X=pd.DataFrame(index=names, data=[bpm1_p1_x, bpm2_p1_x, bpm3_p1_x]),
Y=pd.DataFrame(index=names, data=[bpm1_p1_y, bpm2_p1_y, bpm3_p1_y]),
),
TransverseData( # second particle
X=pd.DataFrame(index=names, data=[bpm1_p2_x, bpm2_p2_x, bpm3_p2_x]),
Y=pd.DataFrame(index=names, data=[bpm1_p2_y, bpm2_p2_y, bpm3_p2_y]),
),
]
return TbtData(matrices=matrix, bunch_ids=[0, 1], nturns=3)
70 changes: 15 additions & 55 deletions tests/test_madng.py
Original file line number Diff line number Diff line change
@@ -1,86 +1,46 @@

from datetime import datetime

import numpy as np
import pandas as pd
import pytest
from pathlib import Path

from tests.test_lhc_and_general import INPUTS_DIR, compare_tbt
from turn_by_turn import madng, read_tbt, write_tbt
from turn_by_turn.structures import TbtData, TransverseData
from turn_by_turn.structures import TbtData


def test_read_ng(_ng_file):
original = _original_simulation_data()

def test_read_ng(_ng_file: Path, example_fake_tbt: TbtData):
# Check directly from the module
new = madng.read_tbt(_ng_file)
compare_tbt(original, new, no_binary=True)
compare_tbt(example_fake_tbt, new, no_binary=True)

# Check from the main function
new = read_tbt(_ng_file, datatype="madng")
compare_tbt(original, new, no_binary=True)
compare_tbt(example_fake_tbt, new, no_binary=True)

def test_write_ng(_ng_file, tmp_path):
original_tbt = _original_simulation_data()

def test_write_ng(_ng_file: Path, tmp_path: Path, example_fake_tbt: TbtData):
# Write the data
from_tbt = tmp_path / "from_tbt.tfs"
madng.write_tbt(from_tbt, original_tbt)
madng.write_tbt(from_tbt, example_fake_tbt)

# Read the written data
new_tbt = madng.read_tbt(from_tbt)
compare_tbt(original_tbt, new_tbt, no_binary=True)
compare_tbt(example_fake_tbt, new_tbt, no_binary=True)

# Check from the main function
original_tbt = read_tbt(_ng_file, datatype="madng")
write_tbt(from_tbt, original_tbt, datatype="madng")
written_tbt = read_tbt(_ng_file, datatype="madng")
write_tbt(from_tbt, written_tbt, datatype="madng")

new_tbt = read_tbt(from_tbt, datatype="madng")
compare_tbt(original_tbt, new_tbt, no_binary=True)
assert original_tbt.date == new_tbt.date
compare_tbt(written_tbt, new_tbt, no_binary=True)
assert written_tbt.date == new_tbt.date

def test_error_ng(_error_file):
def test_error_ng(_error_file: Path):
with pytest.raises(ValueError):
read_tbt(_error_file, datatype="madng")

# ---- Helpers ---- #
def _original_simulation_data() -> TbtData:
# Create a TbTData object with the original data
names = np.array(["BPM1", "BPM3", "BPM2"])
bpm1_p1_x = np.array([ 1e-3, 0.002414213831,-0.0009999991309])
bpm1_p1_y = np.array([-1e-3, 0.0004142133507, 0.001000000149])
bpm1_p2_x = np.array([-1e-3,-0.002414213831, 0.0009999991309])
bpm1_p2_y = np.array([ 1e-3,-0.0004142133507,-0.001000000149])

bpm2_p1_x = np.array([-0.0009999999503,-0.0004142138307, 0.0009999998012])
bpm2_p1_y = np.array([ 0.00100000029,-0.002414213351,-0.001000001159])
bpm2_p2_x = np.array([ 0.0009999999503, 0.0004142138307,-0.0009999998012])
bpm2_p2_y = np.array([-0.00100000029, 0.002414213351, 0.001000001159])

bpm3_p1_x = np.array([ 0.002414213831,-0.0009999991309,-0.002414214191])
bpm3_p1_y = np.array([ 0.0004142133507, 0.001000000149,-0.0004142129907])
bpm3_p2_x = np.array([-0.002414213831, 0.0009999991309, 0.002414214191])
bpm3_p2_y = np.array([-0.0004142133507,-0.001000000149, 0.0004142129907])

matrix = [
TransverseData( # first particle
X=pd.DataFrame(index=names, data=[bpm1_p1_x, bpm2_p1_x, bpm3_p1_x]),
Y=pd.DataFrame(index=names, data=[bpm1_p1_y, bpm2_p1_y, bpm3_p1_y]),
),
TransverseData( # second particle
X=pd.DataFrame(index=names, data=[bpm1_p2_x, bpm2_p2_x, bpm3_p2_x]),
Y=pd.DataFrame(index=names, data=[bpm1_p2_y, bpm2_p2_y, bpm3_p2_y]),
),
]
return TbtData(matrices=matrix, bunch_ids=[1, 2], nturns=3)


# ---- Fixtures ---- #
@pytest.fixture
def _ng_file(tmp_path):
def _ng_file(tmp_path: Path) -> Path:
return INPUTS_DIR / "madng" / "fodo_track.tfs"

@pytest.fixture
def _error_file(tmp_path):
def _error_file(tmp_path: Path) -> Path:
return INPUTS_DIR / "madng" / "fodo_track_error.tfs"
Loading