Skip to content

Commit ede936a

Browse files
authored
Linter configuration and fixes (#138)
1 parent 2e57d58 commit ede936a

File tree

15 files changed

+181
-76
lines changed

15 files changed

+181
-76
lines changed

doc/conf.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -99,7 +99,7 @@ def about_package(init_posixpath: pathlib.Path) -> dict:
9999

100100
# Override link in 'Edit on Github'
101101
rst_prolog = f"""
102-
:github_url: {ABOUT_TFS['__url__']}
102+
:github_url: {ABOUT_TFS["__url__"]}
103103
"""
104104

105105
# The version info for the project you're documenting, acts as replacement for

pyproject.toml

Lines changed: 55 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -86,9 +86,7 @@ repository = "https://github.com/pylhc/tfs"
8686
documentation = "https://pylhc.github.io/tfs/ "
8787
changelog = "https://github.com/pylhc/tfs/blob/master/CHANGELOG.md"
8888

89-
90-
[tool.ruff]
91-
target-version = "py310" # Assume Python 3.10+
89+
# ----- Tests Configuration ----- #
9290

9391
[tool.pytest.ini_options]
9492
addopts = "--cov-report=xml --cov-report term-missing --cov-config=pyproject.toml --cov=tfs"
@@ -98,4 +96,57 @@ testpaths = ["tests"]
9896
exclude_also = [
9997
"if TYPE_CHECKING:", # do not count type checking imports (ignored at runtime) for coverage
10098
"except ImportError:", # do not count missing optional dependencies set to None, we monkeypatch and test that
101-
]
99+
]
100+
101+
# ----- Dev Tools Configuration ----- #
102+
103+
[tool.ruff]
104+
exclude = [
105+
".eggs",
106+
".git",
107+
".mypy_cache",
108+
".venv",
109+
"_build",
110+
"build",
111+
"dist",
112+
]
113+
114+
# Assume Python 3.10+
115+
target-version = "py310"
116+
117+
line-length = 110
118+
indent-width = 4
119+
120+
[tool.ruff.lint]
121+
# Allow unused variables when underscore-prefixed.
122+
dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
123+
ignore = [
124+
"E501", # line too long
125+
"FBT001", # boolean-type-hint-positional-argument
126+
"FBT002", # boolean-default-value-positional-argument
127+
"PT019", # pytest-fixture-param-without-value (but suggested solution fails)
128+
]
129+
extend-select = [
130+
"F", # Pyflakes rules
131+
"W", # PyCodeStyle warnings
132+
"E", # PyCodeStyle errors
133+
"I", # Sort imports properly
134+
"A", # Detect shadowed builtins
135+
"N", # enforce naming conventions, e.g. ClassName vs function_name
136+
"UP", # Warn if certain things can changed due to newer Python versions
137+
"C4", # Catch incorrect use of comprehensions, dict, list, etc
138+
"FA", # Enforce from __future__ import annotations
139+
"FBT", # detect boolean traps
140+
"ISC", # Good use of string concatenation
141+
"BLE", # disallow catch-all exceptions
142+
"ICN", # Use common import conventions
143+
"RET", # Good return practices
144+
"SIM", # Common simplification rules
145+
"TID", # Some good import practices
146+
"TC", # Enforce importing certain types in a TYPE_CHECKING block
147+
"PTH", # Use pathlib instead of os.path
148+
"NPY", # Some numpy-specific things
149+
]
150+
# Allow fix for all enabled rules (when `--fix`) is provided.
151+
fixable = ["ALL"]
152+
unfixable = []

tests/conftest.py

Lines changed: 19 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -47,46 +47,50 @@ def _tfs_madng_file() -> pathlib.Path:
4747

4848
@pytest.fixture
4949
def _pd_dataframe() -> pd.DataFrame:
50+
rng = np.random.default_rng()
5051
return pd.DataFrame(
5152
index=range(3),
52-
columns="a b c d e".split(),
53-
data=np.random.rand(3, 5),
53+
columns=["a", "b", "c", "d", "e"],
54+
data=rng.random(size=(3, 5)),
5455
)
5556

5657

5758
@pytest.fixture
5859
def _tfs_dataframe() -> TfsDataFrame:
60+
rng = np.random.default_rng()
5961
return TfsDataFrame(
6062
index=range(15),
61-
columns="a b c d e".split(),
62-
data=np.random.rand(15, 5),
63+
columns=["a", "b", "c", "d", "e"],
64+
data=rng.random(size=(15, 5)),
6365
headers={"Title": "Tfs Title", "Value": 3.3663},
6466
)
6567

6668

6769
@pytest.fixture
6870
def _tfs_dataframe_booleans() -> TfsDataFrame:
6971
"""TfsDataFrame with boolean values in the headers and data (1 column)."""
72+
rng = np.random.default_rng()
7073
df = TfsDataFrame(
7174
index=range(15),
72-
columns="a b c d e".split(),
73-
data=np.random.rand(15, 5),
75+
columns=["a", "b", "c", "d", "e"],
76+
data=rng.random(size=(15, 5)),
7477
headers={"Title": "Bool Test", "Bool1": True, "Bool2": False, "Bool3": 1},
7578
)
76-
df["bools"] = np.random.rand(15) > 0.5 # random from 0 to 1 and then boolean check
79+
df["bools"] = rng.random(15) > 0.5 # random from 0 to 1 and then boolean check
7780
return df
7881

7982

8083
@pytest.fixture
8184
def _tfs_dataframe_complex() -> TfsDataFrame:
8285
"""TfsDataFrame with complex values in the headers and data (1 column)."""
86+
rng = np.random.default_rng()
8387
df = TfsDataFrame(
8488
index=range(15),
85-
columns="a b c d e".split(),
86-
data=np.random.rand(15, 5),
89+
columns=["a", "b", "c", "d", "e"],
90+
data=rng.random(size=(15, 5)),
8791
headers={"Title": "Complex Test", "Complex1": 1 + 2j, "Complex2": -4 - 17.9j},
8892
)
89-
df["complex"] = np.random.rand(15) + np.random.rand(15) * 1j
93+
df["complex"] = rng.random(15) + rng.random(15) * 1j
9094
return df
9195

9296

@@ -96,10 +100,11 @@ def _tfs_dataframe_madng() -> TfsDataFrame:
96100
TfsDataFrame with both booleans and complex
97101
values in the headers and data (1 column each).
98102
"""
103+
rng = np.random.default_rng()
99104
df = TfsDataFrame(
100105
index=range(15),
101-
columns="a b c d e".split(),
102-
data=np.random.rand(15, 5),
106+
columns=["a", "b", "c", "d", "e"],
107+
data=rng.random(size=(15, 5)),
103108
headers={
104109
"Title": "MADNG Test",
105110
"Bool1": True,
@@ -109,6 +114,6 @@ def _tfs_dataframe_madng() -> TfsDataFrame:
109114
"Complex2": -94.6 - 67.9j,
110115
},
111116
)
112-
df["bools"] = np.random.rand(15) > 0.5 # random from 0 to 1 and then boolean check
113-
df["complex"] = np.random.rand(15) + np.random.rand(15) * 1j
117+
df["bools"] = rng.random(15) > 0.5 # random from 0 to 1 and then boolean check
118+
df["complex"] = rng.random(15) + rng.random(15) * 1j
114119
return df

tests/test_collection.py

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,6 @@ def _get_filename(self, template, plane=""):
2121

2222

2323
class TestRead:
24-
2524
def test_read_pathlib_input(
2625
self, _input_dir_pathlib: pathlib.Path, _tfs_x: TfsDataFrame, _tfs_y: TfsDataFrame
2726
):
@@ -48,13 +47,13 @@ def test_read_str_input(self, _input_dir_str: str, _tfs_x: TfsDataFrame, _tfs_y:
4847

4948

5049
class TestWrite:
51-
5250
def test_write(self, _tfs_x: TfsDataFrame, _tfs_y: TfsDataFrame, tmp_path):
5351
c = CollectionTest(tmp_path)
5452
file_x_path = tmp_path / "nofile_x.tfs"
5553
assert not file_x_path.is_file()
5654

57-
c.nofile_x = _tfs_y # only assigns dataframe without writing (use _tfs_y so that we can set _tfs_x below)
55+
# only assigns dataframe without writing (use _tfs_y so that we can set _tfs_x below)
56+
c.nofile_x = _tfs_y
5857
assert not file_x_path.is_file()
5958
assert_tfs_frame_equal(_tfs_y, c.nofile_x)
6059

@@ -130,7 +129,9 @@ def test_buffer_flush(self, _input_dir_str: str, _tfs_x: TfsDataFrame, _tfs_y: T
130129
assert tfs_x_after_flush.loc["BPMSX.4L2.B1", "NUMBER"] == -199
131130
assert tfs_y_after_flush.loc["BPMSX.4L2.B1", "NUMBER"] == -19
132131

133-
def test_buffer_flush_nowrite(self, _input_dir_str: str, _tfs_x: TfsDataFrame, _tfs_y: TfsDataFrame, tmp_path):
132+
def test_buffer_flush_nowrite(
133+
self, _input_dir_str: str, _tfs_x: TfsDataFrame, _tfs_y: TfsDataFrame, tmp_path
134+
):
134135
c = CollectionTest(tmp_path, allow_write=True)
135136

136137
c.file_x = _tfs_x.copy()
@@ -155,7 +156,6 @@ def test_buffer_flush_nowrite(self, _input_dir_str: str, _tfs_x: TfsDataFrame, _
155156

156157

157158
class TestFilenames:
158-
159159
def test_tfscollection_getfilename_not_implemented(self):
160160
with pytest.raises(NotImplementedError):
161161
TfsCollection._get_filename("doesnt matter") # noqa: SLF001
@@ -206,7 +206,6 @@ def test_get_path(self, _input_dir_pathlib: pathlib.Path):
206206

207207

208208
class TestOther:
209-
210209
def test_access_methods(self, _input_dir_pathlib: pathlib.Path):
211210
c = CollectionTest(_input_dir_pathlib, allow_write=False)
212211

tests/test_compression.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,8 @@
1212

1313
from .conftest import INPUTS_DIR
1414

15-
SUPPORTED_EXTENSIONS: tuple[str] = ["gz", "bz2", "zip", "xz", "zst", "tar", "tar.gz"] # through pandas
15+
# Compression extensions supported through pandas
16+
SUPPORTED_EXTENSIONS: tuple[str] = ("gz", "bz2", "zip", "xz", "zst", "tar", "tar.gz")
1617

1718
# ----- Compression tests with 'classic' TFS files (no MAD-NG features) ----- #
1819

tests/test_frame.py

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -117,7 +117,16 @@ def test_header_print(self):
117117
assert str(val) in print_out
118118

119119
def test_long_headers_print(self):
120-
headers = {"p1": 1, "p2": "hello", "p3": 3, "p4": 4, "p5": 5, "p6": 6, "p7": "string", "p8": "long"}
120+
headers = {
121+
"p1": 1,
122+
"p2": "hello",
123+
"p3": 3,
124+
"p4": 4,
125+
"p5": 5,
126+
"p6": 6,
127+
"p7": "string",
128+
"p8": "long",
129+
}
121130
df = TfsDataFrame(headers=headers)
122131
print_out = str(df)
123132
assert "Headers" in print_out

tests/test_reader.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,9 @@ def test_tfs_read_write_read_pathlib_input(self, _tfs_filex: pathlib.Path, tmp_p
7171

7272
def test_read_write_wise_header(self, _tfs_file_wise, tmp_path):
7373
original_text = _tfs_file_wise.read_text()
74-
original_header_lines = [line for line in original_text.splitlines() if line.strip().startswith(HEADER)]
74+
original_header_lines = [
75+
line for line in original_text.splitlines() if line.strip().startswith(HEADER)
76+
]
7577
df = read_tfs(_tfs_file_wise)
7678

7779
assert len(df.headers) == len(original_header_lines)
@@ -111,7 +113,9 @@ def test_read_file_with_empty_lines_in_header(self, _tfs_file_empty_lines, _tfs_
111113
df_for_compare = read_tfs(_tfs_filex)
112114
assert_tfs_frame_equal(df, df_for_compare)
113115

114-
def test_read_file_single_header_empty_line_in_header(self, _tfs_file_single_header_empty_line, _tfs_filex):
116+
def test_read_file_single_header_empty_line_in_header(
117+
self, _tfs_file_single_header_empty_line, _tfs_filex
118+
):
115119
"""Very special, but this was a case that failed in the past."""
116120
df = read_tfs(_tfs_file_single_header_empty_line)
117121
assert len(df.headers) == 1

tests/test_testing.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,6 @@
55

66

77
class TestAssertTfsDataFrameEqual:
8-
98
def test_no_headers_equal(self):
109
df1 = TfsDataFrame({"a": [1, 2, 3], "b": [4, 5, 6]})
1110
assert_tfs_frame_equal(df1, df1) # we expect True

tests/test_validation.py

Lines changed: 12 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -74,7 +74,9 @@ def test_validate_raises_on_wrong_unique_behavior(self, validation_mode):
7474
validate(df, "", non_unique_behavior="invalid", compatibility=validation_mode)
7575

7676
@pytest.mark.parametrize("validation_mode", ["madx", "mad-x", "madng", "MAD-NG"])
77-
def test_validation_raises_space_in_colname(self, _space_in_colnames_tfs_path: pathlib.Path, validation_mode):
77+
def test_validation_raises_space_in_colname(
78+
self, _space_in_colnames_tfs_path: pathlib.Path, validation_mode
79+
):
7880
# Read file has a space in a column name which should raise
7981
with pytest.raises(SpaceinColumnNameError, match="TFS-Columns can not contain spaces."):
8082
_ = read_tfs(_space_in_colnames_tfs_path, index="NAME", validate=validation_mode)
@@ -124,22 +126,26 @@ class TestMADXFailures:
124126
def test_madx_validation_raises_if_no_headers(self, _pd_dataframe, validation_mode):
125127
"""MAD-X expects at least a 'TYPE' header. If there are no headers, we raise."""
126128
df = _pd_dataframe
127-
with pytest.raises(MADXCompatibilityError, match="Headers should be present in MAD-X compatibility mode"):
129+
with pytest.raises(
130+
MADXCompatibilityError, match="Headers should be present in MAD-X compatibility mode"
131+
):
128132
validate(df, compatibility=validation_mode)
129133

130134
@pytest.mark.parametrize("validation_mode", ["madx", "mad-x", "mAd-X"])
131135
def test_madx_validation_raises_on_boolean_headers(self, _tfs_booleans_file, validation_mode):
132136
df = read_tfs(_tfs_booleans_file)
133137
with pytest.raises(
134-
MADXCompatibilityError, match="TFS-Headers can not contain boolean values in MAD-X compatibility mode"
138+
MADXCompatibilityError,
139+
match="TFS-Headers can not contain boolean values in MAD-X compatibility mode",
135140
):
136141
validate(df, compatibility=validation_mode)
137142

138143
@pytest.mark.parametrize("validation_mode", ["madx", "mad-x", "mAd-X"])
139144
def test_madx_validation_raises_on_complex_headers(self, _tfs_complex_file, validation_mode):
140145
df = read_tfs(_tfs_complex_file)
141146
with pytest.raises(
142-
MADXCompatibilityError, match="TFS-Headers can not contain complex values in MAD-X compatibility mode"
147+
MADXCompatibilityError,
148+
match="TFS-Headers can not contain complex values in MAD-X compatibility mode",
143149
):
144150
validate(df, compatibility=validation_mode)
145151

@@ -148,7 +154,8 @@ def test_madx_validation_raises_on_none_headers(self, _tfs_dataframe, validation
148154
df = _tfs_dataframe
149155
df.headers["NONEVALUE"] = None
150156
with pytest.raises(
151-
MADXCompatibilityError, match="TFS-Headers can not contain 'None' values in MAD-X compatibility mode"
157+
MADXCompatibilityError,
158+
match="TFS-Headers can not contain 'None' values in MAD-X compatibility mode",
152159
):
153160
validate(df, compatibility=validation_mode)
154161

0 commit comments

Comments
 (0)