Skip to content

[AI-assisted] feat: Implement comprehensive PyPI release pipeline with safety checks #8

[AI-assisted] feat: Implement comprehensive PyPI release pipeline with safety checks

[AI-assisted] feat: Implement comprehensive PyPI release pipeline with safety checks #8

Workflow file for this run

name: CI/CD Pipeline
on:
push:
branches: [ main, develop ]
pull_request:
branches: [ main ]
release:
types: [ published ]
jobs:
lint-and-type-check:
name: Linting and Type Checking
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["3.10", "3.11", "3.12", "3.13"]
steps:
- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Cache pip dependencies
uses: actions/cache@v4
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-${{ hashFiles('**/pyproject.toml') }}
restore-keys: |
${{ runner.os }}-pip-
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -e .[dev]
- name: Run ruff linting
run: |
ruff check src/ tests/
ruff format --check src/ tests/
- name: Run mypy type checking
run: |
mypy src/ --strict
test:
name: Tests
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["3.10", "3.11", "3.12", "3.13"]
steps:
- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Cache pip dependencies
uses: actions/cache@v4
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-${{ hashFiles('**/pyproject.toml') }}
restore-keys: |
${{ runner.os }}-pip-
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -e .[dev]
- name: Run tests with coverage
run: |
pytest --cov=src --cov-report=xml --cov-report=html --cov-report=term-missing tests/
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v4
with:
file: ./coverage.xml
flags: unittests
name: codecov-umbrella
fail_ci_if_error: false
- name: Archive coverage reports
uses: actions/upload-artifact@v4
if: matrix.python-version == '3.11'
with:
name: coverage-report
path: htmlcov/
security-scan:
name: Security Scanning
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Python 3.11
uses: actions/setup-python@v5
with:
python-version: "3.11"
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -e .[dev]
- name: Run bandit security scan
run: |
bandit -r src/ -f json -o bandit-report.json || true
- name: Run safety check
run: |
safety check --json --output safety-report.json || true
- name: Archive security reports
uses: actions/upload-artifact@v4
with:
name: security-reports
path: |
bandit-report.json
safety-report.json
build:
name: Build Package
runs-on: ubuntu-latest
needs: [lint-and-type-check, test]
steps:
- uses: actions/checkout@v4
- name: Set up Python 3.11
uses: actions/setup-python@v5
with:
python-version: "3.11"
- name: Install build dependencies
run: |
python -m pip install --upgrade pip
pip install build twine
- name: Build package
run: |
python -m build
- name: Check package
run: |
twine check dist/*
- name: Archive build artifacts
uses: actions/upload-artifact@v4
with:
name: dist-packages
path: dist/
integration-tests:
name: Integration Tests
runs-on: ubuntu-latest
needs: [lint-and-type-check, test]
steps:
- uses: actions/checkout@v4
- name: Set up Python 3.11
uses: actions/setup-python@v5
with:
python-version: "3.11"
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -e .[dev]
- name: Run integration tests
run: |
pytest tests/integration/ -v --tb=short
- name: Test CLI functionality
run: |
# Test basic CLI commands
aletheia-probe --help
aletheia-probe config || true # May fail without config, that's OK
aletheia-probe status || true # May fail without backends, that's OK
cross-platform-integration:
name: Cross-Platform Integration Tests
runs-on: ${{ matrix.os }}
needs: [lint-and-type-check, test]
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
strategy:
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
python-version: ["3.11"]
fail-fast: false
steps:
- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Cache pip dependencies
uses: actions/cache@v4
with:
path: |
~/.cache/pip
~/Library/Caches/pip
~\AppData\Local\pip\Cache
key: ${{ runner.os }}-pip-${{ hashFiles('**/pyproject.toml') }}
restore-keys: |
${{ runner.os }}-pip-
- name: Cache assessment data
uses: actions/cache@v4
with:
path: |
~/.config/aletheia-probe
~/.aletheia-probe
~\AppData\Local\aletheia-probe
~\AppData\Roaming\aletheia-probe
key: ${{ runner.os }}-assessment-data-${{ github.run_number }}
restore-keys: |
${{ runner.os }}-assessment-data-
# Platform-specific system dependencies
- name: Install system dependencies (Ubuntu)
if: matrix.os == 'ubuntu-latest'
run: |
sudo apt-get update
sudo apt-get install -y unrar git curl
- name: Install system dependencies (macOS)
if: matrix.os == 'macos-latest'
run: |
# Install unar (macOS equivalent of unrar)
brew install unar || echo "unar installation failed, trying unrar"
brew install unrar || echo "RAR tools installation completed with warnings"
# git and curl are pre-installed on macOS
- name: Install system dependencies (Windows)
if: matrix.os == 'windows-latest'
run: |
# Install via chocolatey (available on GitHub Actions Windows runners)
choco install unrar -y || echo "unrar installation failed, trying 7zip"
choco install 7zip -y || echo "7zip installation failed"
# git is pre-installed on GitHub Actions Windows runners
- name: Setup Unicode encoding for Windows
if: matrix.os == 'windows-latest'
run: |
# Set UTF-8 encoding for Python and console output
echo "PYTHONIOENCODING=utf-8" >> $GITHUB_ENV
echo "PYTHONUTF8=1" >> $GITHUB_ENV
# Set console code page to UTF-8 (Windows only)
chcp 65001 || echo "Could not set UTF-8 code page"
shell: bash
- name: Install Python package
run: |
python -m pip install --upgrade pip
pip install -e .
- name: Validate installation and check status
run: |
aletheia-probe --help
aletheia-probe status || echo "Status check completed with warnings"
- name: Limited data sync for testing
run: |
echo "Starting limited data sync for CI/CD testing..."
# Sync specific backends to keep CI/CD time reasonable (5-7 minutes max)
# Use platform-appropriate timeout command
if command -v timeout >/dev/null 2>&1; then
timeout 300 aletheia-probe sync doaj bealls retraction_watch || echo "Sync completed with timeout/warnings"
elif command -v gtimeout >/dev/null 2>&1; then
gtimeout 300 aletheia-probe sync doaj bealls retraction_watch || echo "Sync completed with timeout/warnings"
else
# No timeout available, rely on GitHub Actions timeout-minutes
aletheia-probe sync doaj bealls retraction_watch || echo "Sync completed with warnings"
fi
shell: bash
timeout-minutes: 8
- name: Test basic assessment functionality
run: |
echo "Testing basic assessment functionality..."
# Test with well-known legitimate journal
echo "Testing legitimate journal assessment..."
aletheia-probe journal "Nature" --format json || echo "Nature assessment completed with warnings"
# Test with a pattern that might indicate predatory behavior
echo "Testing predatory pattern assessment..."
aletheia-probe journal "International Journal of Advanced Research" --format json || echo "Predatory pattern assessment completed with warnings"
# Test journal status without full assessment
echo "Testing status command..."
aletheia-probe status || echo "Status command completed with warnings"
- name: Test with sample BibTeX data
run: |
echo "Testing BibTeX assessment functionality..."
# Create a sample BibTeX file using echo commands (cross-platform)
echo "@article{nature_test_2024," > sample_test.bib
echo " title={Sample Nature Article}," >> sample_test.bib
echo " author={Test Author}," >> sample_test.bib
echo " journal={Nature}," >> sample_test.bib
echo " year={2024}," >> sample_test.bib
echo " volume={123}," >> sample_test.bib
echo " pages={1-10}" >> sample_test.bib
echo "}" >> sample_test.bib
echo "" >> sample_test.bib
echo "@article{arxiv_test_2024," >> sample_test.bib
echo " title={Sample ArXiv Preprint}," >> sample_test.bib
echo " author={Another Author}," >> sample_test.bib
echo " journal={arXiv preprint}," >> sample_test.bib
echo " year={2024}" >> sample_test.bib
echo "}" >> sample_test.bib
echo "" >> sample_test.bib
echo "@article{ieee_test_2024," >> sample_test.bib
echo " title={Sample IEEE Article}," >> sample_test.bib
echo " author={IEEE Author}," >> sample_test.bib
echo " journal={IEEE Computer}," >> sample_test.bib
echo " year={2024}," >> sample_test.bib
echo " volume={57}," >> sample_test.bib
echo " pages={20-30}" >> sample_test.bib
echo "}" >> sample_test.bib
echo "Created sample_test.bib"
# Test BibTeX assessment
aletheia-probe bibtex sample_test.bib --format json || echo "BibTeX assessment completed with warnings"
# Test that exit codes work properly
aletheia-probe bibtex sample_test.bib || echo "BibTeX exit code test completed"
- name: Test configuration and cache
run: |
echo "Testing configuration and cache functionality..."
# Test config display
aletheia-probe config || echo "Config show completed with warnings"
# Test status (shows cache and backend status)
aletheia-probe status || echo "Status check completed with warnings"
- name: Platform-specific validation
run: |
echo "Running platform-specific validation for ${{ matrix.os }}..."
# Check that all required commands are available
python -c "
import subprocess
import sys
def check_command(cmd):
try:
result = subprocess.run([cmd, '--help' if cmd != 'git' else '--version'],
capture_output=True, timeout=10)
print(f'[OK] {cmd}: Available (exit code {result.returncode})')
return True
except Exception as e:
print(f'[ERROR] {cmd}: Not available - {e}')
return False
commands = ['python', 'git']
# Check for RAR tools on non-Windows platforms
if '${{ matrix.os }}' == 'macos-latest':
# Try both unar and unrar on macOS
for cmd in ['unar', 'unrar']:
try:
subprocess.run([cmd, '--help'], capture_output=True, timeout=10)
commands.append(cmd)
print(f'[INFO] Using {cmd} for RAR support on macOS')
break
except:
continue
elif '${{ matrix.os }}' == 'ubuntu-latest':
commands.append('unrar')
all_good = all(check_command(cmd) for cmd in commands)
status = 'PASSED' if all_good else 'PARTIAL'
print(f'Platform validation: {status}')
"
- name: Archive test artifacts
uses: actions/upload-artifact@v4
if: always()
with:
name: integration-artifacts-${{ matrix.os }}-py${{ matrix.python-version }}
path: |
sample_test.bib
~/.config/aletheia-probe/logs/
~/.aletheia-probe/logs/
~\AppData\Local\aletheia-probe\logs\
~\AppData\Roaming\aletheia-probe\logs\
performance-tests:
name: Performance Tests
runs-on: ubuntu-latest
if: github.event_name == 'pull_request' || github.ref == 'refs/heads/main'
steps:
- uses: actions/checkout@v4
- name: Set up Python 3.11
uses: actions/setup-python@v5
with:
python-version: "3.11"
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -e .[dev] pytest-benchmark
- name: Run performance benchmarks
run: |
pytest tests/ -k "benchmark" --benchmark-only --benchmark-json=benchmark.json || true
- name: Archive benchmark results
uses: actions/upload-artifact@v4
if: always()
with:
name: benchmark-results
path: benchmark.json
# Disabled until GitHub Pages is configured
# To enable: Go to repository Settings → Pages → Source → GitHub Actions
# deploy-docs:
# name: Deploy Documentation
# runs-on: ubuntu-latest
# if: github.ref == 'refs/heads/main' && github.event_name == 'push'
# needs: [lint-and-type-check, test, build]
#
# permissions:
# contents: read
# pages: write
# id-token: write
#
# steps:
# - uses: actions/checkout@v4
#
# - name: Set up Python 3.11
# uses: actions/setup-python@v5
# with:
# python-version: "3.11"
#
# - name: Install dependencies
# run: |
# python -m pip install --upgrade pip
# pip install -e .[docs] || pip install -e .[dev]
#
# - name: Build documentation
# run: |
# # Generate coverage reports for docs
# pytest --cov=src --cov-report=html tests/ || true
# mkdir -p docs/coverage
# cp -r htmlcov/* docs/coverage/ || true
#
# # Generate API documentation
# mkdir -p docs/api
# # Add sphinx-apidoc or similar here if available
#
# - name: Setup Pages
# uses: actions/configure-pages@v5
#
# - name: Upload documentation
# uses: actions/upload-pages-artifact@v3
# with:
# path: 'docs/'
#
# - name: Deploy to GitHub Pages
# id: deployment
# uses: actions/deploy-pages@v4
test-pypi-publish:
name: Test PyPI Publishing (TestPyPI)
runs-on: ubuntu-latest
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
needs: [lint-and-type-check, test, build, integration-tests]
environment: test-pypi
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0 # Fetch all history for version validation
- name: Set up Python 3.11
uses: actions/setup-python@v5
with:
python-version: "3.11"
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install build twine packaging
- name: Validate version format
run: |
python -c "
import re
import sys
from pathlib import Path
# Read version from pyproject.toml
content = Path('pyproject.toml').read_text()
match = re.search(r'version = \"(.+?)\"', content)
if not match:
print('ERROR: Could not find version in pyproject.toml')
sys.exit(1)
version = match.group(1)
print(f'Found version: {version}')
# Validate version format (PEP 440)
if not re.match(r'^\d+\.\d+\.\d+(?:\.(?:dev|alpha|beta|rc)\d+)?$', version):
print(f'ERROR: Version {version} does not match PEP 440 format')
print('Expected format: X.Y.Z or X.Y.Z.devN or X.Y.Z.alphaN, etc.')
sys.exit(1)
print(f'✓ Version {version} is valid')
"
- name: Build package
run: |
python -m build
- name: Check package
run: |
twine check dist/*
- name: Test installation from built package
run: |
# Create a temporary virtual environment
python -m venv test_env
source test_env/bin/activate
# Install the built wheel
pip install dist/*.whl
# Test that the CLI is available
aletheia-probe --help
# Cleanup
deactivate
rm -rf test_env
- name: Publish to TestPyPI
if: success()
env:
TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ secrets.TESTPYPI_API_TOKEN }}
run: |
echo "Publishing to TestPyPI..."
twine upload --repository testpypi dist/* --verbose || echo "Note: Upload may fail if version already exists on TestPyPI"
- name: Test installation from TestPyPI
if: success()
run: |
echo "Waiting 30 seconds for TestPyPI to process the upload..."
sleep 30
# Create a fresh virtual environment
python -m venv testpypi_env
source testpypi_env/bin/activate
# Try to install from TestPyPI
pip install --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple/ aletheia-probe || echo "Installation from TestPyPI may fail if package was just uploaded"
# Cleanup
deactivate
rm -rf testpypi_env
- name: Archive test artifacts
uses: actions/upload-artifact@v4
if: always()
with:
name: testpypi-artifacts
path: dist/
publish:
name: Publish to PyPI (Production)
runs-on: ubuntu-latest
if: github.event_name == 'release' && github.event.action == 'published'
needs: [lint-and-type-check, test, build, integration-tests, cross-platform-integration]
environment: production
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0 # Fetch all history for version validation
- name: Set up Python 3.11
uses: actions/setup-python@v5
with:
python-version: "3.11"
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install build twine packaging requests
- name: Validate version matches git tag
run: |
python -c "
import re
import sys
import os
from pathlib import Path
# Read version from pyproject.toml
content = Path('pyproject.toml').read_text()
match = re.search(r'version = \"(.+?)\"', content)
if not match:
print('ERROR: Could not find version in pyproject.toml')
sys.exit(1)
pyproject_version = match.group(1)
print(f'Version in pyproject.toml: {pyproject_version}')
# Get git tag
git_ref = os.environ.get('GITHUB_REF', '')
if git_ref.startswith('refs/tags/'):
git_tag = git_ref.replace('refs/tags/', '')
# Remove 'v' prefix if present
git_version = git_tag.lstrip('v')
print(f'Git tag version: {git_version}')
if pyproject_version != git_version:
print(f'ERROR: Version mismatch!')
print(f' pyproject.toml: {pyproject_version}')
print(f' git tag: {git_version}')
sys.exit(1)
print(f'✓ Versions match: {pyproject_version}')
else:
print('WARNING: Not running from a tag, skipping tag validation')
"
- name: Check if version exists on PyPI
run: |
python -c "
import sys
import re
import requests
from pathlib import Path
# Read version from pyproject.toml
content = Path('pyproject.toml').read_text()
match = re.search(r'version = \"(.+?)\"', content)
version = match.group(1)
# Check PyPI
response = requests.get(f'https://pypi.org/pypi/aletheia-probe/{version}/json')
if response.status_code == 200:
print(f'ERROR: Version {version} already exists on PyPI!')
print(f'Please bump the version in pyproject.toml before releasing.')
sys.exit(1)
elif response.status_code == 404:
print(f'✓ Version {version} does not exist on PyPI - safe to publish')
else:
print(f'WARNING: Could not verify version on PyPI (status {response.status_code})')
print('Proceeding with caution...')
"
- name: Validate package metadata
run: |
python -c "
import sys
from pathlib import Path
import re
content = Path('pyproject.toml').read_text()
# Check required fields
required = ['name', 'version', 'description', 'readme', 'license', 'authors']
missing = []
for field in required:
if field not in content:
missing.append(field)
if missing:
print(f'ERROR: Missing required fields in pyproject.toml: {missing}')
sys.exit(1)
print('✓ All required metadata fields present')
# Validate URLs
if 'Homepage' not in content:
print('WARNING: Homepage URL not set')
if 'Repository' not in content:
print('WARNING: Repository URL not set')
print('✓ Package metadata validation passed')
"
- name: Build package
run: |
python -m build
- name: Check package with twine
run: |
twine check dist/* --strict
- name: Display package contents
run: |
echo "📦 Package contents:"
tar -tzf dist/*.tar.gz | head -30
echo ""
echo "📊 Package size:"
du -h dist/*
- name: Publish to PyPI
env:
TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }}
run: |
echo "🚀 Publishing to PyPI..."
twine upload dist/* --verbose
- name: Verify published package
run: |
python -c "
import sys
import time
import requests
from pathlib import Path
import re
# Read version from pyproject.toml
content = Path('pyproject.toml').read_text()
match = re.search(r'version = \"(.+?)\"', content)
version = match.group(1)
print(f'Waiting for PyPI to index version {version}...')
# Wait up to 2 minutes for the package to appear
for i in range(24):
response = requests.get(f'https://pypi.org/pypi/aletheia-probe/{version}/json')
if response.status_code == 200:
print(f'✓ Package successfully published to PyPI!')
print(f'🔗 https://pypi.org/project/aletheia-probe/{version}/')
sys.exit(0)
time.sleep(5)
print('WARNING: Package not yet visible on PyPI after 2 minutes')
print('This may be normal - check manually at: https://pypi.org/project/aletheia-probe/')
"
- name: Archive release artifacts
uses: actions/upload-artifact@v4
with:
name: release-packages
path: dist/
notify:
name: Notify
runs-on: ubuntu-latest
if: always()
needs: [lint-and-type-check, test, build, integration-tests, cross-platform-integration]
steps:
- name: Notify on success
if: ${{ needs.lint-and-type-check.result == 'success' && needs.test.result == 'success' && needs.build.result == 'success' && needs.cross-platform-integration.result == 'success' }}
run: |
echo "✅ All checks passed!"
- name: Notify on failure
if: ${{ needs.lint-and-type-check.result == 'failure' || needs.test.result == 'failure' || needs.build.result == 'failure' || needs.cross-platform-integration.result == 'failure' }}
run: |
echo "❌ Some checks failed!"
exit 1