-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathjustfile
More file actions
235 lines (188 loc) · 6.93 KB
/
justfile
File metadata and controls
235 lines (188 loc) · 6.93 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
set windows-shell := ["powershell.exe", "-NoLogo", "-Command"]
set unstable := true
set script-interpreter := ['uv', 'run', '--script']
export PYTHONPATH := source_directory()
[private]
default:
@just --list --list-submodules
# install the uv package manager
[linux]
[macos]
install-uv:
curl -LsSf https://astral.sh/uv/install.sh | sh
# install the uv package manager
[windows]
install-uv:
powershell -ExecutionPolicy ByPass -c "irm https://astral.sh/uv/install.ps1 | iex"
# setup the venv, pre-commit hooks
setup python="python":
uv venv -p {{ python }}
@just install-precommit
# install git pre-commit hooks
install-precommit:
@just run --no-default-groups --group precommit --exact --isolated pre-commit install
# update and install development dependencies
install *OPTS:
@just install-precommit
uv sync {{ OPTS }}
_install-docs:
uv sync --no-default-groups --group docs --all-extras
# run static type checking with mypy
check-types-mypy *RUN_ARGS:
@just run --no-default-groups --all-extras --group typing {{ RUN_ARGS }} mypy
# run static type checking with pyright
check-types-pyright *RUN_ARGS:
@just run --no-default-groups --all-extras --group typing {{ RUN_ARGS }} pyright
# run all static type checking
check-types: check-types-mypy check-types-pyright
# run all static type checking in an isolated environment
check-types-isolated:
@just check-types-mypy --exact --isolated
@just check-types-pyright --exact --isolated
# run package checks
check-package:
uv pip check
# remove doc build artifacts
[script]
clean-docs:
import shutil
shutil.rmtree('./doc/build', ignore_errors=True)
# remove the virtual environment
[script]
clean-env:
import shutil
import sys
shutil.rmtree(".venv", ignore_errors=True)
# remove all git ignored files
clean-git-ignored:
git clean -fdX
# remove all non repository artifacts
clean: clean-docs clean-env clean-git-ignored
# build html documentation
build-docs-html: _install-docs
@just run sphinx-build --fresh-env --builder html --doctree-dir ./doc/build/doctrees ./doc/source ./doc/build/html
# build pdf documentation
build-docs-pdf: _install-docs
@just run sphinx-build --fresh-env --builder latexpdf --doctree-dir ./doc/build/doctrees ./doc/source ./doc/build/pdf
# build the docs
build-docs: build-docs-html
# build docs and package
build: build-docs-html
uv build
# open the html documentation
[script]
open-docs:
import os
import webbrowser
webbrowser.open(f'file://{os.getcwd()}/doc/build/html/index.html')
# build and open the documentation
docs: _install-docs build-docs-html open-docs
# serve the documentation, with auto-reload
docs-live:
@just run --no-default-groups --group docs sphinx-autobuild doc/source doc/build --open-browser --watch src --port 8000 --delay 1
_link_check:
-uv run --no-default-groups --group docs sphinx-build -b linkcheck -Q -D linkcheck_timeout=10 ./doc/source ./doc/build
# check the documentation links for broken links
[script]
check-docs-links: _link_check
import os
import sys
import json
from pathlib import Path
# The json output isn't valid, so we have to fix it before we can process.
data = json.loads(f"[{','.join((Path(os.getcwd()) / 'doc/build/output.json').read_text().splitlines())}]")
broken_links = [link for link in data if link["status"] not in {"working", "redirected", "unchecked", "ignored"}]
if broken_links:
for link in broken_links:
print(f"[{link['status']}] {link['filename']}:{link['lineno']} -> {link['uri']}", file=sys.stderr)
sys.exit(1)
# lint the documentation
check-docs:
@just run --no-default-groups --group lint doc8 --ignore-path ./doc/build --max-line-length 100 -q ./doc
# fetch the intersphinx references for the given package
[script]
fetch-refs LIB: _install-docs
import os
from pathlib import Path
import logging as _logging
import sys
import runpy
from sphinx.ext.intersphinx import inspect_main
_logging.basicConfig()
libs = runpy.run_path(Path(os.getcwd()) / "doc/source/conf.py").get("intersphinx_mapping")
url = libs.get("{{ LIB }}", None)
if not url:
sys.exit(f"Unrecognized {{ LIB }}, must be one of: {', '.join(libs.keys())}")
if url[1] is None:
url = f"{url[0].rstrip('/')}/objects.inv"
else:
url = url[1]
raise SystemExit(inspect_main([url]))
# lint the code
check-lint:
@just run --no-default-groups --group lint ruff check --select I
@just run --no-default-groups --group lint ruff check
# check if the code needs formatting
check-format:
@just run --no-default-groups --group lint ruff format --check
# check that the readme renders
check-readme:
@just run --no-default-groups --group lint -m readme_renderer ./README.md -o /tmp/README.html
_check-readme-quiet:
@just --quiet check-readme
# sort the python imports
sort-imports:
@just run --no-default-groups --group lint ruff check --fix --select I
# format the code and sort imports
format: sort-imports
just --fmt --unstable
@just run --no-default-groups --group lint ruff format
# sort the imports and fix linting issues
lint: sort-imports
@just run --no-default-groups --group lint ruff check --fix
# fix formatting, linting issues and import sorting
fix: lint format
# run all static checks
check: check-lint check-format check-types check-package check-docs check-readme
# run all checks including documentation link checking (slow)
check-all: check check-docs-links
# run tests
test *TESTS:
@just run --no-default-groups --exact --group test --isolated pytest {{ TESTS }} --cov
# debug an test
debug-test *TESTS:
@just run pytest \
-o addopts='-ra -q' \
-s --trace --pdbcls=IPython.terminal.debugger:Pdb \
{{ TESTS }}
# run the pre-commit checks
precommit:
@just run pre-commit
# generate the test coverage report
coverage:
@just run --no-default-groups --group coverage coverage combine --keep *.coverage
@just run --no-default-groups --group coverage coverage report
@just run --no-default-groups --group coverage coverage xml
# run the command in the virtual environment
run +ARGS:
uv run {{ ARGS }}
# validate the given version string against the lib version
[script]
validate_version VERSION:
import re
import tomllib
import enum_properties
from packaging.version import Version
raw_version = "{{ VERSION }}".lstrip("v")
version_obj = Version(raw_version)
# the version should be normalized
assert str(version_obj) == raw_version
# make sure all places the version appears agree
assert raw_version == tomllib.load(open('pyproject.toml', 'rb'))['project']['version']
assert raw_version == enum_properties.__version__
print(raw_version)
# issue a relase for the given semver string (e.g. 2.1.0)
release VERSION: install check-all
@just validate_version v{{ VERSION }}
git tag -s v{{ VERSION }} -m "{{ VERSION }} Release"
git push origin v{{ VERSION }}