Skip to content

Commit 6abc39f

Browse files
committed
chore: run pre commit hooks
1 parent d40eb99 commit 6abc39f

27 files changed

+134
-159
lines changed

.pre-commit-config.yaml

Lines changed: 64 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -1,35 +1,65 @@
1+
default_language_version:
2+
python: python3.12
3+
4+
exclude: |
5+
(?x)(
6+
^\.venv/|
7+
^\.mypy_cache/|
8+
^\.pytest_cache/|
9+
^build/|
10+
^dist/|
11+
^docs/_build/|
12+
^migrations/
13+
)
14+
115
repos:
2-
- repo: https://github.com/PyCQA/docformatter
3-
rev: v1.7.5
4-
hooks:
5-
- id: docformatter
6-
args: ["--wrap-summaries=100", "--wrap-descriptions=100"]
7-
files: ^src/
8-
- repo: https://github.com/pycqa/isort
9-
rev: 5.13.2
10-
hooks:
11-
- id: isort
12-
files: ^src/
13-
args: ["--profile", "black"]
14-
- repo: https://github.com/psf/black
15-
rev: 24.8.0
16-
hooks:
17-
- id: black
18-
files: ^src/
19-
- repo: https://github.com/pycqa/flake8
20-
rev: 7.1.0
21-
hooks:
22-
- id: flake8
23-
additional_dependencies: [flake8-docstrings]
24-
files: ^src/
25-
args: ["--max-line-length=100", "--extend-ignore=E203,W503,D401"]
26-
- repo: https://github.com/pycqa/pydocstyle
27-
rev: 6.3.0
28-
hooks:
29-
- id: pydocstyle
30-
files: ^src/
31-
- repo: https://github.com/pre-commit/mirrors-mypy
32-
rev: v1.10.0
33-
hooks:
34-
- id: mypy
35-
files: ^src/
16+
- repo: https://github.com/PyCQA/docformatter
17+
rev: v1.7.7 # use master until a 1.7.6+ release lands
18+
hooks:
19+
- id: docformatter
20+
args: ["--wrap-summaries=100", "--wrap-descriptions=100"]
21+
files: ^src/
22+
23+
24+
- repo: https://github.com/pycqa/isort
25+
rev: 6.0.1
26+
hooks:
27+
- id: isort
28+
files: ^(src|tests)/
29+
args: ["--profile", "black", "--line-length", "100"]
30+
31+
- repo: https://github.com/psf/black
32+
rev: 25.1.0
33+
hooks:
34+
- id: black
35+
files: ^(src|tests)/
36+
args: ["--line-length", "100"]
37+
38+
- repo: https://github.com/pycqa/flake8
39+
rev: 7.3.0
40+
hooks:
41+
- id: flake8
42+
files: ^(src|tests)/
43+
additional_dependencies:
44+
- flake8-docstrings
45+
# add plugins you like, e.g. flake8-bugbear
46+
# - flake8-bugbear
47+
args: ["--max-line-length=88", "--extend-ignore=E203,W503,D401"]
48+
49+
# Remove this if you keep flake8-docstrings above (to avoid duplicate checks)
50+
# - repo: https://github.com/pycqa/pydocstyle
51+
# rev: 6.3.0
52+
# hooks:
53+
# - id: pydocstyle
54+
# files: ^(src|tests)/
55+
56+
- repo: https://github.com/pre-commit/mirrors-mypy
57+
rev: v1.17.1
58+
hooks:
59+
- id: mypy
60+
files: ^src/
61+
# Add stubs your project needs:
62+
additional_dependencies:
63+
- types-requests
64+
- types-PyYAML
65+
# If you have mypy.ini/pyproject.toml, it will be picked up automatically

src/ssspx/bench.py

Lines changed: 3 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -117,21 +117,15 @@ def main(argv: List[str] | None = None) -> None:
117117
argv: Optional argument list for testing.
118118
"""
119119
parser = argparse.ArgumentParser(description=__doc__)
120-
parser.add_argument(
121-
"--trials", type=int, default=1, help="Number of trials per configuration"
122-
)
120+
parser.add_argument("--trials", type=int, default=1, help="Number of trials per configuration")
123121
parser.add_argument(
124122
"--sizes",
125123
nargs="+",
126124
default=["10,20", "20,40"],
127125
help="Size pairs as n,m (e.g. 1000,5000). Defaults to a small demo.",
128126
)
129-
parser.add_argument(
130-
"--seed-base", type=int, default=0, help="Base seed for random graphs"
131-
)
132-
parser.add_argument(
133-
"--out-csv", type=Path, help="Optional path to write per-trial CSV data"
134-
)
127+
parser.add_argument("--seed-base", type=int, default=0, help="Base seed for random graphs")
128+
parser.add_argument("--out-csv", type=Path, help="Optional path to write per-trial CSV data")
135129
parser.add_argument(
136130
"--mem",
137131
action="store_true",

src/ssspx/cli.py

Lines changed: 11 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -10,13 +10,7 @@
1010
from pathlib import Path
1111
from typing import List, Optional, Tuple
1212

13-
from .exceptions import (
14-
ConfigError,
15-
GraphFormatError,
16-
InputError,
17-
NotSupportedError,
18-
SSSPXError,
19-
)
13+
from .exceptions import ConfigError, GraphFormatError, InputError, NotSupportedError, SSSPXError
2014
from .export import export_dag_graphml, export_dag_json
2115
from .graph import Graph
2216
from .io import read_graph
@@ -110,26 +104,16 @@ def main(argv: Optional[List[str]] = None) -> int:
110104
default=None,
111105
help="Comma-separated list of source vertex ids",
112106
)
113-
p.add_argument(
114-
"--target", type=int, default=None, help="Target vertex id for path output"
115-
)
107+
p.add_argument("--target", type=int, default=None, help="Target vertex id for path output")
116108

117-
p.add_argument(
118-
"--no-transform", action="store_true", help="Disable outdegree transform"
119-
)
120-
p.add_argument(
121-
"--target-outdeg", type=int, default=4, help="Outdegree cap when transforming"
122-
)
109+
p.add_argument("--no-transform", action="store_true", help="Disable outdegree transform")
110+
p.add_argument("--target-outdeg", type=int, default=4, help="Outdegree cap when transforming")
123111
p.add_argument("--frontier", choices=["block", "heap"], default="block")
124112

125113
# Profiling + export
126114
p.add_argument("--profile", action="store_true", help="Enable cProfile")
127-
p.add_argument(
128-
"--profile-out", type=str, default=None, help="Dump .prof file to this path"
129-
)
130-
p.add_argument(
131-
"--export-json", type=str, default=None, help="Write shortest-path DAG as JSON"
132-
)
115+
p.add_argument("--profile-out", type=str, default=None, help="Dump .prof file to this path")
116+
p.add_argument("--export-json", type=str, default=None, help="Write shortest-path DAG as JSON")
133117
p.add_argument(
134118
"--export-graphml",
135119
type=str,
@@ -165,9 +149,7 @@ def main(argv: Optional[List[str]] = None) -> int:
165149
)
166150

167151
stream = sys.stdout if args.log_json else sys.stderr
168-
level = (
169-
"info" if args.log_json and args.log_level == "warning" else args.log_level
170-
)
152+
level = "info" if args.log_json and args.log_level == "warning" else args.log_level
171153
logger = StdLogger(level=level, json_fmt=args.log_json, stream=stream)
172154

173155
if args.sources is not None:
@@ -194,15 +176,11 @@ def main(argv: Optional[List[str]] = None) -> int:
194176
t0 = time.perf_counter()
195177
if args.profile:
196178
with ProfileSession(dump_path=args.profile_out) as prof:
197-
solver = SSSPSolver(
198-
G, sources[0], config=cfg, logger=logger, sources=sources
199-
)
179+
solver = SSSPSolver(G, sources[0], config=cfg, logger=logger, sources=sources)
200180
res = solver.solve()
201181
sys.stderr.write(prof.report().to_text(lines=40))
202182
else:
203-
solver = SSSPSolver(
204-
G, sources[0], config=cfg, logger=logger, sources=sources
205-
)
183+
solver = SSSPSolver(G, sources[0], config=cfg, logger=logger, sources=sources)
206184
res = solver.solve()
207185
wall_ms = (time.perf_counter() - t0) * 1000.0
208186
_, peak = tracemalloc.get_traced_memory()
@@ -212,15 +190,11 @@ def main(argv: Optional[List[str]] = None) -> int:
212190
t0 = time.perf_counter()
213191
if args.profile:
214192
with ProfileSession(dump_path=args.profile_out) as prof:
215-
solver = SSSPSolver(
216-
G, sources[0], config=cfg, logger=logger, sources=sources
217-
)
193+
solver = SSSPSolver(G, sources[0], config=cfg, logger=logger, sources=sources)
218194
res = solver.solve()
219195
sys.stderr.write(prof.report().to_text(lines=40))
220196
else:
221-
solver = SSSPSolver(
222-
G, sources[0], config=cfg, logger=logger, sources=sources
223-
)
197+
solver = SSSPSolver(G, sources[0], config=cfg, logger=logger, sources=sources)
224198
res = solver.solve()
225199
wall_ms = (time.perf_counter() - t0) * 1000.0
226200
peak_mib = None

src/ssspx/frontier.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -186,9 +186,7 @@ def pull(self) -> Tuple[Set[Vertex], Float]:
186186

187187
got = self._consume_block_prefix(self._d0, self.M, chosen, pulled_keys)
188188
if got < self.M:
189-
got += self._consume_block_prefix(
190-
self._d1, self.M - got, chosen, pulled_keys
191-
)
189+
got += self._consume_block_prefix(self._d1, self.M - got, chosen, pulled_keys)
192190
new_bounds: List[Float] = []
193191
for blk in self._d1:
194192
if blk:

src/ssspx/graph_numpy.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -25,9 +25,7 @@ def __post_init__(self) -> None:
2525
"""Validate initialization arguments and allocate adjacency storage."""
2626
if not isinstance(self.n, int) or self.n <= 0:
2727
raise InputError("Graph.n must be a positive integer.")
28-
self.adj: List[np.ndarray] = [
29-
np.zeros((0, 2), dtype=float) for _ in range(self.n)
30-
]
28+
self.adj: List[np.ndarray] = [np.zeros((0, 2), dtype=float) for _ in range(self.n)]
3129

3230
def add_edge(self, u: Vertex, v: Vertex, w: Float) -> None:
3331
"""Add a directed edge from ``u`` to ``v``."""

src/ssspx/io.py

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -135,11 +135,7 @@ def _read_graphml(path: Path) -> Tuple[int, EdgeList]:
135135
w_attr = edge.attrib.get("weight")
136136
if w_attr is None:
137137
data = edge.find(f"{ns}data[@key='w']")
138-
w = (
139-
float(data.text)
140-
if (data is not None and data.text is not None)
141-
else 1.0
142-
)
138+
w = float(data.text) if (data is not None and data.text is not None) else 1.0
143139
else:
144140
w = float(w_attr)
145141
edges.append((u, v, w))

src/ssspx/logger.py

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -50,9 +50,7 @@ def __init__(
5050
def _enabled(self, level: str) -> bool: # pragma: no cover - thin wrapper
5151
return self._levels[level] >= self._levels.get(self.level, 20)
5252

53-
def log(
54-
self, level: str, event: str, **fields: Any
55-
) -> None: # pragma: no cover - simple I/O
53+
def log(self, level: str, event: str, **fields: Any) -> None: # pragma: no cover - simple I/O
5654
"""Emit a log ``event`` at ``level`` with additional ``fields``."""
5755
if not self._enabled(level):
5856
return
@@ -69,8 +67,6 @@ def info(self, event: str, **fields: Any) -> None: # pragma: no cover - passthr
6967
"""Emit an ``INFO`` event."""
7068
self.log("info", event, **fields)
7169

72-
def debug(
73-
self, event: str, **fields: Any
74-
) -> None: # pragma: no cover - passthrough
70+
def debug(self, event: str, **fields: Any) -> None: # pragma: no cover - passthrough
7571
"""Emit a ``DEBUG`` event."""
7672
self.log("debug", event, **fields)

src/ssspx/path.py

Lines changed: 1 addition & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -31,12 +31,7 @@ def reconstruct_path_basic(
3131
if source == target:
3232
return [source]
3333

34-
if (
35-
target < 0
36-
or target >= len(predecessors)
37-
or source < 0
38-
or source >= len(predecessors)
39-
):
34+
if target < 0 or target >= len(predecessors) or source < 0 or source >= len(predecessors):
4035
raise ValueError("source/target out of range.")
4136

4237
# Walk backwards from target to source

src/ssspx/solver.py

Lines changed: 5 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -265,11 +265,7 @@ def _find_pivots(self, B: Float, S: Set[Vertex]) -> Tuple[Set[Vertex], Set[Verte
265265
children: Dict[Vertex, List[Vertex]] = {u: [] for u in W}
266266
for v in W:
267267
p = self.pred[v]
268-
if (
269-
p is not None
270-
and p in W
271-
and self.dhat[p] + self._weight(p, v) == self.dhat[v]
272-
):
268+
if p is not None and p in W and self.dhat[p] + self._weight(p, v) == self.dhat[v]:
273269
children[p].append(v)
274270

275271
P: Set[Vertex] = set()
@@ -337,9 +333,7 @@ def _bmssp(self, level: int, B: Float, S: Set[Vertex]) -> Tuple[Float, Set[Verte
337333
elif B_i_prime <= val < B_i:
338334
K_pairs.append((v, val))
339335

340-
extra_pairs = [
341-
(x, self.dhat[x]) for x in S_i if B_i_prime <= self.dhat[x] < B_i
342-
]
336+
extra_pairs = [(x, self.dhat[x]) for x in S_i if B_i_prime <= self.dhat[x] < B_i]
343337
if K_pairs or extra_pairs:
344338
D.batch_prepend(K_pairs + extra_pairs)
345339

@@ -362,11 +356,7 @@ def solve(self) -> SSSPResult:
362356
_Bprime, _U = self._bmssp(top_level, B, S0)
363357

364358
# If we transformed, compress distances back to original vertices (predecessors omitted).
365-
if (
366-
self.cfg.use_transform
367-
and self._mapping is not None
368-
and self._clone2orig is not None
369-
):
359+
if self.cfg.use_transform and self._mapping is not None and self._clone2orig is not None:
370360
comp: List[Float] = [math.inf] * self._G_orig.n
371361
best_clone: List[int] = [-1] * self._G_orig.n
372362
for u_orig, clones in self._mapping.items():
@@ -405,11 +395,7 @@ def path(self, target_original: Vertex) -> List[Vertex]:
405395
raise AlgorithmError("Call solve() before requesting paths.")
406396

407397
# No transform: reconstruct directly in original space
408-
if (
409-
not self.cfg.use_transform
410-
or self._mapping is None
411-
or self._clone2orig is None
412-
):
398+
if not self.cfg.use_transform or self._mapping is None or self._clone2orig is None:
413399
src = self.root[target_original]
414400
if src < 0:
415401
return []
@@ -420,9 +406,7 @@ def path(self, target_original: Vertex) -> List[Vertex]:
420406
raise AlgorithmError("target out of range for original graph.")
421407

422408
if self._best_clone_for_orig is None:
423-
raise AlgorithmError(
424-
"Internal state missing best-clone cache. Call solve() first."
425-
)
409+
raise AlgorithmError("Internal state missing best-clone cache. Call solve() first.")
426410

427411
start_clone = self._best_clone_for_orig[target_original]
428412
if start_clone < 0:

src/ssspx/transform.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -8,9 +8,7 @@
88
from .graph import Float, Graph, Vertex
99

1010

11-
def constant_outdegree_transform(
12-
G: Graph, delta: int
13-
) -> Tuple[Graph, Dict[Vertex, List[Vertex]]]:
11+
def constant_outdegree_transform(G: Graph, delta: int) -> Tuple[Graph, Dict[Vertex, List[Vertex]]]:
1412
"""Split vertices so that every vertex has out-degree at most ``delta``.
1513
1614
The transformation replaces a vertex with a chain of clones. Each clone

0 commit comments

Comments
 (0)