Skip to content

Commit c5b73d4

Browse files
authored
Release v0.13.0 (#104)
2 parents daad9ee + fa3c954 commit c5b73d4

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

65 files changed

+1116
-223
lines changed

.github/workflows/test.yaml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,6 @@ jobs:
4040
- "3.13"
4141
cpython-beta: "3.14"
4242
pypys:
43-
- "3.10"
4443
- "3.11"
4544
cache-key-hash-files:
4645
- "pyproject.toml"

.pre-commit-config.yaml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ repos:
2525
- id: "trailing-whitespace"
2626

2727
- repo: "https://github.com/asottile/pyupgrade"
28-
rev: "v3.19.1"
28+
rev: "v3.20.0"
2929
hooks:
3030
- id: "pyupgrade"
3131
name: "Enforce Python 3.9+ idioms"
@@ -47,7 +47,7 @@ repos:
4747
- id: "isort"
4848

4949
- repo: "https://github.com/pycqa/flake8"
50-
rev: "7.2.0"
50+
rev: "7.3.0"
5151
hooks:
5252
- id: "flake8"
5353
additional_dependencies:
@@ -59,7 +59,7 @@ repos:
5959
- id: "editorconfig-checker"
6060

6161
- repo: "https://github.com/python-jsonschema/check-jsonschema"
62-
rev: "0.33.0"
62+
rev: "0.33.2"
6363
hooks:
6464
- id: "check-dependabot"
6565
- id: "check-readthedocs"

CHANGELOG.rst

Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,56 @@ Please see the fragment files in the `changelog.d directory`_.
2525

2626
.. scriv-insert-here
2727
28+
.. _changelog-0.13.0:
29+
30+
0.13.0 - 2025-07-24
31+
===================
32+
33+
Python support
34+
--------------
35+
36+
* Drop support for PyPy 3.10.
37+
38+
Fixed
39+
-----
40+
41+
* Fall back to importing from source code if importing from byte code fails.
42+
43+
This resolves a problem importing Flask when byte-compiled,
44+
due to its ``sansio`` subdirectory, which has no ``__init__.py`` file
45+
and whose submodules currently fail to import from the byte code table.
46+
47+
Documentation
48+
-------------
49+
50+
* Document compatible ruff configurations.
51+
52+
* Add a logo and description to the sidebar on subpages.
53+
54+
* Update the homepage sidebar layout and design.
55+
56+
Development
57+
-----------
58+
59+
* Fix performance testing issues.
60+
61+
Zip-based bytecode import times were skewed during testing
62+
because ``zipimport`` doesn't use PEP 3147 ``__pycache__/`` subdirectories.
63+
This is now accounted for by the performance testing script's setup steps,
64+
and zip-based import times are now accurate for comparison.
65+
66+
Also, the total size of the source code and byte code trees is captured.
67+
68+
* Begin to expand and better automate performance testing.
69+
70+
This includes the ability to install from a ``requirements.txt`` file,
71+
to execute an arbitrary Python file, and to plot comparison bar charts
72+
of import times and package content sizes.
73+
74+
* Use chipshot to standardize headers.
75+
76+
* Prefer the new Python 3.14 ``compression.lzma`` namespace.
77+
2878
.. _changelog-0.12.0:
2979

3080
0.12.0 - 2025-05-15

assets/generate-perftest-directory.py

Lines changed: 0 additions & 39 deletions
This file was deleted.

assets/performance/__init__.py

Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,50 @@
1+
# This file is a part of sqliteimport <https://github.com/kurtmckee/sqliteimport>
2+
# Copyright 2024-2025 Kurt McKee <[email protected]>
3+
# SPDX-License-Identifier: MIT
4+
5+
import enum
6+
import pathlib
7+
8+
REPO_ROOT = pathlib.Path(__file__).parent.parent.parent
9+
PACKAGE_DIRECTORY = REPO_ROOT / "build/perftest"
10+
STATS = REPO_ROOT / "build/perfstats"
11+
12+
13+
class Importer(enum.StrEnum):
14+
filesystem = "filesystem"
15+
zipimport = "zipimport"
16+
sqliteimport = "sqliteimport"
17+
18+
19+
class CodeType(enum.StrEnum):
20+
source = "source"
21+
bytecode = "bytecode"
22+
23+
24+
class Measurement(enum.StrEnum):
25+
time = "time"
26+
size = "size"
27+
28+
29+
PACKAGE_PATHS: dict[Importer, dict[CodeType, pathlib.Path]] = {
30+
Importer.filesystem: {
31+
CodeType.source: PACKAGE_DIRECTORY,
32+
CodeType.bytecode: PACKAGE_DIRECTORY,
33+
},
34+
Importer.zipimport: {
35+
CodeType.source: STATS / "source.zip",
36+
CodeType.bytecode: STATS / "bytecode.zip",
37+
},
38+
Importer.sqliteimport: {
39+
CodeType.source: STATS / "source.sqlite3",
40+
CodeType.bytecode: STATS / "bytecode.sqlite3",
41+
},
42+
}
43+
44+
LOG_PATHS: dict[Importer, dict[CodeType, pathlib.Path]] = {
45+
# This is a dictionary comprehension.
46+
importer: {
47+
code_type: STATS / f"{importer}.{code_type}.log" for code_type in CodeType
48+
}
49+
for importer in Importer
50+
}

assets/performance/__main__.py

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
# This file is a part of sqliteimport <https://github.com/kurtmckee/sqliteimport>
2+
# Copyright 2024-2025 Kurt McKee <[email protected]>
3+
# SPDX-License-Identifier: MIT
4+
5+
import pathlib
6+
import sys
7+
8+
import click
9+
10+
# Add this directory to the Python path so imports work.
11+
assets = pathlib.Path(__file__).parent.parent
12+
sys.path.insert(0, str(assets))
13+
14+
15+
if __name__ == "__main__":
16+
from performance.clean import clean
17+
from performance.collect import collect
18+
from performance.compile import compile_
19+
from performance.generate import generate
20+
from performance.plot import plot
21+
from performance.run import run
22+
23+
group = click.Group()
24+
group.add_command(clean)
25+
group.add_command(collect)
26+
group.add_command(compile_)
27+
group.add_command(generate)
28+
group.add_command(plot)
29+
group.add_command(run)
30+
group()

assets/performance/clean.py

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
# This file is a part of sqliteimport <https://github.com/kurtmckee/sqliteimport>
2+
# Copyright 2024-2025 Kurt McKee <[email protected]>
3+
# SPDX-License-Identifier: MIT
4+
5+
import shutil
6+
7+
import click
8+
9+
from . import PACKAGE_DIRECTORY
10+
from . import STATS
11+
12+
13+
@click.command()
14+
def clean() -> None:
15+
"""
16+
Erase files and directories generated by performance testing script.
17+
"""
18+
19+
for path in (PACKAGE_DIRECTORY, STATS):
20+
if path.is_dir():
21+
shutil.rmtree(path)

assets/performance/collect.py

Lines changed: 128 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,128 @@
1+
# This file is a part of sqliteimport <https://github.com/kurtmckee/sqliteimport>
2+
# Copyright 2024-2025 Kurt McKee <[email protected]>
3+
# SPDX-License-Identifier: MIT
4+
5+
import itertools
6+
import json
7+
import pathlib
8+
import typing
9+
10+
import click
11+
12+
from . import LOG_PATHS
13+
from . import PACKAGE_PATHS
14+
from . import STATS
15+
from . import CodeType
16+
from . import Importer
17+
from . import Measurement
18+
19+
20+
@click.command()
21+
def collect() -> None:
22+
"""
23+
Collect import and sizing stats and write them to a JSON file.
24+
"""
25+
26+
stats = {
27+
Measurement.time: parse_import_log(),
28+
Measurement.size: get_size_stats(),
29+
}
30+
output_path = STATS / "stats.json"
31+
output_path.parent.mkdir(exist_ok=True)
32+
output_path.write_text(json.dumps(stats, indent=2, sort_keys=True))
33+
34+
35+
def parse_import_log() -> dict[str, dict[str, dict[str, int]]]:
36+
"""Parse an import log to extract per-module cumulative import times (in µs)."""
37+
38+
stats: dict[str, dict[str, dict[str, int]]] = {}
39+
for importer, code_type in itertools.product(Importer, CodeType):
40+
stats.setdefault(code_type, {})[importer] = {"-cumulative_us": 0}
41+
file = LOG_PATHS[importer][code_type]
42+
if not file.is_file():
43+
click.echo(f"{file} not found")
44+
continue
45+
46+
content = file.read_text()
47+
48+
for _, cumulative_us, module in split_columns(content):
49+
if module.startswith(" "):
50+
# The module is an indented submodule.
51+
# Only top-level modules are recorded here.
52+
continue
53+
stats[code_type][importer][module] = cumulative_us
54+
stats[code_type][importer]["-cumulative_us"] += cumulative_us
55+
56+
return stats
57+
58+
59+
def split_columns(text: str) -> typing.Iterator[tuple[int, int, str]]:
60+
for line in text.splitlines():
61+
prefix, _, remainder = line.partition(": ")
62+
if prefix != "import time":
63+
continue
64+
try:
65+
self, cumulative, module = remainder.split(" | ")
66+
yield int(self.strip()), int(cumulative.strip()), module.rstrip()
67+
except (TypeError, ValueError):
68+
continue
69+
70+
71+
def get_size_stats() -> dict[Importer, dict[CodeType, int]]:
72+
"""Get the size of items on disk.
73+
74+
This only considers the raw sum of the sizes of files on disk.
75+
It does not, for example, calculate the size of the filesystem blocks
76+
that are consumed by individual files.
77+
"""
78+
79+
importer: Importer
80+
code_type: CodeType
81+
82+
sizes: dict[Importer, dict[CodeType, int]] = {
83+
importer: {code_type: -1 for code_type in CodeType} for importer in Importer
84+
}
85+
for importer, code_type in itertools.product(Importer, CodeType):
86+
path = PACKAGE_PATHS[importer][code_type]
87+
if path.is_dir():
88+
sizes[importer][code_type] = get_directory_size(path, code_type)
89+
elif path.is_file():
90+
sizes[importer][code_type] = path.stat().st_size
91+
else:
92+
typing.assert_never((importer, code_type))
93+
94+
return sizes
95+
96+
97+
def get_directory_size(directory: pathlib.Path, code_type: CodeType) -> int:
98+
"""Get the size of a directory.
99+
100+
PEP3147-compatible ``*.pyc`` files will be included in the calculation
101+
if the *code_type* indicates byte code.
102+
"""
103+
104+
size = 0
105+
106+
directories = [directory]
107+
while directories:
108+
directory = directories.pop()
109+
for path in directory.glob("*"):
110+
if path.is_dir():
111+
# Skip `__pycache__/` directories unless calculating byte code size.
112+
if path.name == "__pycache__" and code_type != CodeType.bytecode:
113+
continue
114+
directories.append(path)
115+
116+
elif path.is_file():
117+
# Skip `*.pyc` files unless calculating byte code size.
118+
# Even then, the `*.pyc` files must be in a `__pycache__/` directory.
119+
if path.suffix == ".pyc":
120+
if not (
121+
code_type == CodeType.bytecode
122+
and path.parent.name == "__pycache__"
123+
):
124+
continue
125+
126+
size += path.stat().st_size
127+
128+
return size

0 commit comments

Comments
 (0)