Skip to content

Commit 082afc7

Browse files
authored
Update pre-commit dependencies (#353)
* Update pre-commit dependencies * Fix `flake8` errors (two typos in var names) * Fix `mypy` importing errors * Fix `mypy` code errors * Update `conda/dev.yml`
1 parent ef8372a commit 082afc7

File tree

5 files changed

+25
-24
lines changed

5 files changed

+25
-24
lines changed

.pre-commit-config.yaml

Lines changed: 12 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -4,35 +4,41 @@ fail_fast: true
44

55
repos:
66
- repo: https://github.com/pre-commit/pre-commit-hooks
7-
rev: v3.4.0
7+
rev: v5.0.0
88
hooks:
99
- id: trailing-whitespace
1010
- id: end-of-file-fixer
1111
- id: check-yaml
1212
exclude: conda/meta.yaml
1313

1414
- repo: https://github.com/psf/black
15-
rev: 22.3.0
15+
rev: 24.10.0
1616
hooks:
1717
- id: black
1818

1919
- repo: https://github.com/PyCQA/isort
20-
rev: 5.12.0
20+
rev: 5.13.2
2121
hooks:
2222
- id: isort
2323

2424
# Need to use flake8 GitHub mirror due to CentOS git issue with GitLab
2525
# https://github.com/pre-commit/pre-commit/issues/1206
2626
- repo: https://github.com/pycqa/flake8
27-
rev: 3.8.4
27+
rev: 7.1.1
2828
hooks:
2929
- id: flake8
3030
args: ["--config=setup.cfg"]
3131
additional_dependencies: [flake8-isort]
3232
exclude: analysis_data_preprocess
3333

3434
- repo: https://github.com/pre-commit/mirrors-mypy
35-
rev: v0.790
35+
rev: v1.11.2
3636
hooks:
3737
- id: mypy
38-
args: ["--config=setup.cfg"]
38+
args: ["--config=setup.cfg", "--install-types", "--non-interactive"]
39+
exclude: test*
40+
41+
# https://pre-commit.ci/#configuration
42+
ci:
43+
autofix_prs: false
44+
autoupdate_schedule: monthly

conda/dev.yml

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -13,11 +13,11 @@ dependencies:
1313
# Developer Tools
1414
# =================
1515
# If versions are updated, also update 'rev' in `.pre-commit.config.yaml`
16-
- black=22.8.0
17-
- flake8=5.0.4
18-
- flake8-isort=4.2.0
19-
- mypy=0.982
20-
- pre-commit=2.20.0
16+
- black=24.10.0
17+
- flake8=7.1.1
18+
- flake8-isort=6.1.1
19+
- mypy=1.11.2
20+
- pre-commit=4.0.1
2121
- tbump=6.9.0
2222
# Documentation
2323
# =================

zstash/create.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,6 @@
2424

2525
def create():
2626
cache: str
27-
exclude: str
2827
cache, args = setup_create()
2928

3029
# Check config fields

zstash/extract.py

Lines changed: 6 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -254,7 +254,7 @@ def extract_database(
254254
# This is because we may have different versions of the
255255
# same file across many tars.
256256
insert_idx: int
257-
iter_inx: int
257+
iter_idx: int
258258
insert_idx, iter_idx = 0, 1
259259
for iter_idx in range(1, len(matches)):
260260
# If the filenames are unique, just increment insert_idx.
@@ -351,10 +351,10 @@ def multiprocess_extract(
351351
for db_row in matches:
352352
tar = db_row.tar
353353
workers_idx: int
354-
for worker_idx in range(len(workers_to_tars)):
355-
if tar in workers_to_tars[worker_idx]:
354+
for workers_idx in range(len(workers_to_tars)):
355+
if tar in workers_to_tars[workers_idx]:
356356
# This worker gets this db_row.
357-
workers_to_matches[worker_idx].append(db_row)
357+
workers_to_matches[workers_idx].append(db_row)
358358

359359
tar_ordering: List[str] = sorted([tar for tar in tar_to_size])
360360
monitor: parallel.PrintMonitor = parallel.PrintMonitor(tar_ordering)
@@ -444,8 +444,7 @@ def extractFiles( # noqa: C901
444444
if multiprocess_worker:
445445
# All messages to the logger will now be sent to
446446
# this queue, instead of sys.stdout.
447-
# error: Argument 1 to "StreamHandler" has incompatible type "PrintQueue"; expected "Optional[IO[str]]"
448-
sh = logging.StreamHandler(multiprocess_worker.print_queue) # type: ignore
447+
sh = logging.StreamHandler(multiprocess_worker.print_queue)
449448
sh.setLevel(logging.DEBUG)
450449
formatter: logging.Formatter = logging.Formatter("%(levelname)s: %(message)s")
451450
sh.setFormatter(formatter)
@@ -539,8 +538,7 @@ def extractFiles( # noqa: C901
539538
# error: Name 'tarfile.ExFileObject' is not defined
540539
extracted_file: Optional[tarfile.ExFileObject] = tar.extractfile(tarinfo) # type: ignore
541540
if extracted_file:
542-
# error: Name 'tarfile.ExFileObject' is not defined
543-
fin: tarfile.ExFileObject = extracted_file # type: ignore
541+
fin: tarfile.ExFileObject = extracted_file
544542
else:
545543
raise TypeError("Invalid extracted_file={}".format(extracted_file))
546544
try:

zstash/hpss_utils.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -176,8 +176,7 @@ def add_file(
176176
tar: tarfile.TarFile, file_name: str, follow_symlinks: bool
177177
) -> Tuple[int, int, datetime, Optional[str]]:
178178

179-
# FIXME: error: "TarFile" has no attribute "offset"
180-
offset: int = tar.offset # type: ignore
179+
offset: int = tar.offset
181180
tarinfo: tarfile.TarInfo = tar.gettarinfo(file_name)
182181
# Change the size of any hardlinks from 0 to the size of the actual file
183182
if tarinfo.islnk():
@@ -215,8 +214,7 @@ def add_file(
215214
fileobj.write(null_bytes * (tarfile.BLOCKSIZE - remainder))
216215
blocks += 1
217216
# Increase the offset by the amount already saved to the tar
218-
# FIXME: error: "TarFile" has no attribute "offset"
219-
tar.offset += blocks * tarfile.BLOCKSIZE # type: ignore
217+
tar.offset += blocks * tarfile.BLOCKSIZE
220218
break
221219
f.close()
222220
md5 = hash_md5.hexdigest()

0 commit comments

Comments
 (0)