Skip to content

more tests and more optimized weights #15

more tests and more optimized weights

more tests and more optimized weights #15

Workflow file for this run

name: Release
on:
push:
branches: [main]
paths:
- "pyproject.toml"
- "sinonym/**"
workflow_dispatch:
# Avoid overlapping publishes on rapid pushes
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions:
contents: read
id-token: write # required for PyPI trusted publishing
jobs:
build:
runs-on: ubuntu-latest
outputs:
should_publish: ${{ steps.decide.outputs.should_publish }}
new_version: ${{ steps.decide.outputs.new_version }}
old_version: ${{ steps.decide.outputs.old_version }}
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0 # need history for version comparison
- uses: actions/setup-python@v5
with:
python-version: "3.11"
- uses: astral-sh/setup-uv@v6
- name: Cache uv
uses: actions/cache@v4
with:
path: |
~/.cache/uv
.venv
key: uv-${{ runner.os }}-${{ hashFiles('uv.lock', 'pyproject.toml') }}
restore-keys: |
uv-${{ runner.os }}-
- name: Detect version change (pyproject.toml)
id: decide
shell: bash
run: |
BEFORE_SHA="${{ github.event.before }}"
echo "BEFORE_SHA=$BEFORE_SHA"
python - <<'PY'
import os, subprocess, tomllib
out = open(os.environ["GITHUB_OUTPUT"], "a")
with open("pyproject.toml","rb") as f:
new_version = tomllib.load(f)["project"]["version"]
old_version = ""
before = os.environ.get("BEFORE_SHA")
if before and before != "0000000000000000000000000000000000000000":
try:
blob = subprocess.check_output(["git","show",f"{before}:pyproject.toml"])
old_version = tomllib.loads(blob.decode())["project"]["version"]
except subprocess.CalledProcessError:
pass
should = str(bool(new_version and new_version != old_version)).lower()
print(f"Old version: {old_version}")
print(f"New version: {new_version}")
print(f"Should publish: {should}")
out.write(f"new_version={new_version}\n")
out.write(f"old_version={old_version}\n")
out.write(f"should_publish={should}\n")
PY
- name: Install dependencies (dev group)
if: steps.decide.outputs.should_publish == 'true'
run: uv sync --group dev
- name: Check test status against baseline (118 failures expected)
if: steps.decide.outputs.should_publish == 'true'
run: uv run python scripts/check_test_status.py
- name: Build sdist + wheel
if: steps.decide.outputs.should_publish == 'true'
run: uv build
- name: Validate metadata (twine check)
if: steps.decide.outputs.should_publish == 'true'
run: uvx twine check dist/*
- name: Smoke test import from sdist
if: steps.decide.outputs.should_publish == 'true'
run: |
uv pip install --system dist/*.tar.gz --force-reinstall
cd /tmp
python -c "import sinonym; print('✓ sdist import OK:', sinonym.__file__)"
- name: Smoke test import from wheel + resources
if: steps.decide.outputs.should_publish == 'true'
run: |
uv pip install --system dist/*.whl --force-reinstall
cd /tmp
python - <<'PY'
import sys, sinonym, pypinyin
print("✓ Wheel import OK")
print("PY:", sys.executable)
print("sinonym from:", sinonym.__file__)
print("pypinyin OK:", pypinyin.__version__)
try:
import importlib.resources as ir
files = list((ir.files("sinonym.data")).iterdir())
print("✓ data files present:", [f.name for f in files][:8], "…")
except Exception as e:
print("! data access failed:", e)
raise
PY
- uses: actions/upload-artifact@v4
if: steps.decide.outputs.should_publish == 'true'
with:
name: dist
path: dist
publish:
needs: build
if: needs.build.outputs.should_publish == 'true'
runs-on: ubuntu-latest
permissions:
id-token: write
contents: read
steps:
- uses: actions/download-artifact@v4
with:
name: dist
path: dist
- name: Publish to PyPI (Trusted Publishing)
uses: pypa/gh-action-pypi-publish@release/v1
with:
print-hash: true