-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy path.pre-commit-config.yaml
162 lines (146 loc) · 5.77 KB
/
.pre-commit-config.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
# See https://pre-commit.com for more information
# See https://pre-commit.com/hooks.html for more hooks
# Don't run pre-commit on notebooks folder
exclude: ^notebooks
repos:
- repo: local
hooks:
# Fails if the OpenAPI schema is not up to date. This ensures that developers
# always re-generate and commit the latest openapi.json file before they can pass the build.
- id: generate-and-diff-openapi-schema
name: Generate and Assert that OpenAPI Schema is up to date
entry: >-
python ./scripts/generate-openapi.py generate-and-diff
--existing-spec ./openapi.json
--output-spec ./openapi.json
--fail-on-diff
language: system
# run this hook if openapi.json or any of the src/*.py files change (since those files generate openapi.json)
files: ^openapi\.json$|^src/.*\.py$
pass_filenames: false
always_run: false
# If any FastAPI code has changed, check that it does not introduce any breaking API
# changes compared to the openapi.json on the main branch.
- id: diff-openapi-schema-against-main
name: Catch breaking API schema changes compared to main branch
entry: |
bash -c '
set -ex;
# generate the OpenAPI spec from the latest fastapi app code;
python ./scripts/generate-openapi.py generate --output-spec openapi.json;
# Determine which ref to use: remote if available, otherwise local
if git show refs/heads/main:openapi.json > /dev/null 2>&1; then
OPENAPI_REF="refs/heads/main:openapi.json"
else
OPENAPI_REF="origin/main:openapi.json"
fi
# Load the OpenAPI schema from the determined ref
git show $OPENAPI_REF > ./openapi-main.json
# set the openapi-main.json to be deleted when this hook finishes;
trap "rm openapi-main.json" EXIT;
# compare the recently generated OpenAPI schema to the one in main and fail if ;
# the recently generated one would introduce breaking changes;
docker run --rm \
--volume $PWD:/data \
tufin/oasdiff changelog \
/data/openapi-main.json \
/data/openapi.json \
--fail-on ERR
'
language: system
# run this hook if openapi.json or any of the src/*.py files change (since those files generate openapi.json)
files: ^openapi\.json$|^src/.*\.py$
pass_filenames: false
always_run: false
verbose: false
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.6.0
hooks:
# Fails if there are any ">>>>>" lines in files due to merge conflicts.
- id: check-merge-conflict
# Trims trailing whitespace. Allow a single space on the end of .md lines for hard line breaks.
- id: trailing-whitespace
args: [--markdown-linebreak-ext=md]
# Makes sure files end in a newline and only a newline;
- id: end-of-file-fixer
exclude: ^openapi\.json$
# Attempts to load all TOML files to verify syntax.
- id: check-toml
# Attempts to load all yaml files to verify syntax; unsafe: only check syntax, do not load yaml
- id: check-yaml
args: ["--unsafe"]
# Check for symlinks that do not point to anything.
- id: check-symlinks
# Fail if staged files are above a certain size.
# To add a large file, use 'git lfs track <file>; git add <file> to track large files with
# git-lfs rather than commiting them directly to the git history
- id: check-added-large-files
args: ["--maxkb=500"]
# HALT! Before you exclude a large file and commit it, forever
# bloating our repo size, did you:
# (1) use a CLI tool like imageoptim to compress them if they are images
# (2) think hard about whether using DVC or git-lfs is more appropriate
# for the file--such as in the case of CSV files or other data
# This can be confusing. Reach out for help in our chat to help decide
# how to deal adding these large files you have :)
exclude: |
(?x)(
^example/large/file.csv|
^example/large/sklearn-model.pkl
)
# Sort requirements in requirements.txt files.
- id: requirements-txt-fixer
# Prevent addition of new git submodules.
- id: forbid-new-submodules
# Prevent committing directly to trunk (since Bitbucket wants us to pay for this feature)
- id: no-commit-to-branch
args: ["--branch=main"]
# Detects the presence of private keys
- id: detect-private-key
- repo: https://github.com/pre-commit/mirrors-mypy
rev: "v1.10.0"
hooks:
- id: mypy
args:
[
--no-strict-optional,
--ignore-missing-imports,
--config-file=./pyproject.toml,
]
- repo: https://github.com/psf/black
rev: 24.4.2
hooks:
- id: black
args:
- --config=./pyproject.toml
- repo: https://github.com/PyCQA/pylint
rev: v3.2.2
hooks:
- id: pylint
args:
- --rcfile=./pyproject.toml
- repo: https://github.com/PyCQA/flake8
rev: 7.0.0
hooks:
- id: flake8
args:
- --toml-config=./pyproject.toml
additional_dependencies:
- radon
- flake8-docstrings
- Flake8-pyproject
- repo: https://github.com/pycqa/isort
rev: 5.13.2
hooks:
- id: isort
args:
- --settings-path=./pyproject.toml
- repo: https://github.com/PyCQA/autoflake
rev: v2.3.1
hooks:
- id: autoflake
args:
- --in-place
- --remove-all-unused-imports
- --remove-unused-variable
- --ignore-init-module-imports