release #390
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| name: release | |
| on: | |
| # Auto-trigger after all build/test workflows complete | |
| workflow_run: | |
| workflows: ["wheels", "wheels-docker", "wheels-arm64", "wstest", "main"] | |
| types: [completed] | |
| # Manual dispatch for debugging | |
| workflow_dispatch: | |
| jobs: | |
| check-all-workflows: | |
| name: Check if all workflows completed | |
| runs-on: ubuntu-latest | |
| outputs: | |
| all_complete: ${{ steps.check.outputs.all_complete }} | |
| wheels_run_id: ${{ steps.check.outputs.wheels_run_id }} | |
| wheels_docker_run_id: ${{ steps.check.outputs.wheels_docker_run_id }} | |
| wheels_arm64_run_id: ${{ steps.check.outputs.wheels_arm64_run_id }} | |
| wstest_run_id: ${{ steps.check.outputs.wstest_run_id }} | |
| main_run_id: ${{ steps.check.outputs.main_run_id }} | |
| steps: | |
| - name: Check all required workflows completed | |
| id: check | |
| uses: actions/github-script@v7 | |
| with: | |
| script: | | |
| const requiredWorkflows = ['wheels', 'wheels-docker', 'wheels-arm64', 'wstest', 'main']; | |
| // Handle both workflow_run and workflow_dispatch triggers | |
| const commitSha = context.payload.workflow_run?.head_sha || context.sha; | |
| const triggeredBy = context.payload.workflow_run?.name || 'manual (workflow_dispatch)'; | |
| console.log('─────────────────────────────────────────────────'); | |
| console.log('🔍 Checking workflow completion status'); | |
| console.log('─────────────────────────────────────────────────'); | |
| console.log(`Event: ${context.eventName}`); | |
| console.log(`Commit SHA: ${commitSha}`); | |
| console.log(`Triggered by: ${triggeredBy}`); | |
| console.log(''); | |
| // Get all workflow runs for this commit | |
| const { data: runs } = await github.rest.actions.listWorkflowRunsForRepo({ | |
| owner: context.repo.owner, | |
| repo: context.repo.repo, | |
| head_sha: commitSha, | |
| per_page: 100 | |
| }); | |
| // Group by workflow name and find latest run for each | |
| const latestRuns = {}; | |
| for (const run of runs.workflow_runs) { | |
| const workflowName = run.name; | |
| if (requiredWorkflows.includes(workflowName)) { | |
| if (!latestRuns[workflowName] || run.id > latestRuns[workflowName].id) { | |
| latestRuns[workflowName] = run; | |
| } | |
| } | |
| } | |
| // Check if all required workflows completed successfully | |
| console.log('Required workflows status:'); | |
| const allComplete = requiredWorkflows.every(name => { | |
| const run = latestRuns[name]; | |
| const complete = run && run.status === 'completed' && run.conclusion === 'success'; | |
| const status = run ? `${run.status}/${run.conclusion}` : 'not found'; | |
| console.log(` ${complete ? '✅' : '⏳'} ${name.padEnd(20)} : ${status}`); | |
| return complete; | |
| }); | |
| console.log(''); | |
| if (!allComplete) { | |
| console.log('⏳ Not all workflows complete yet - exiting early'); | |
| console.log(' This is normal! Release will proceed once all workflows finish.'); | |
| } else { | |
| console.log('✅ All workflows complete - proceeding with release!'); | |
| } | |
| console.log('─────────────────────────────────────────────────'); | |
| core.setOutput('all_complete', allComplete ? 'true' : 'false'); | |
| // Output run IDs for artifact downloads (using sanitized names) | |
| core.setOutput('wheels_run_id', latestRuns['wheels']?.id || ''); | |
| core.setOutput('wheels_docker_run_id', latestRuns['wheels-docker']?.id || ''); | |
| core.setOutput('wheels_arm64_run_id', latestRuns['wheels-arm64']?.id || ''); | |
| core.setOutput('wstest_run_id', latestRuns['wstest']?.id || ''); | |
| core.setOutput('main_run_id', latestRuns['main']?.id || ''); | |
| identifiers: | |
| needs: check-all-workflows | |
| if: needs.check-all-workflows.outputs.all_complete == 'true' | |
| # GitHub needs to know where .cicd/workflows/identifiers.yml lives at parse time, | |
| # and submodules aren't included in that context! thus the following does NOT work: | |
| # uses: ./.cicd/workflows/identifiers.yml | |
| # we MUST reference the remote repo directly: | |
| uses: wamp-proto/wamp-cicd/.github/workflows/identifiers.yml@main | |
| # IMPORTANT: we still need .cicd as a Git submodule in the using repo though! | |
| # because e.g. identifiers.yml wants to access scripts/sanitize.sh ! | |
| # Development GitHub releases (consolidates wheels from both workflows) | |
| release-development: | |
| name: Development GitHub Release | |
| needs: [check-all-workflows, identifiers] | |
| runs-on: ubuntu-latest | |
| # Only create releases for development builds (explicit positive list) | |
| if: | | |
| needs.check-all-workflows.outputs.all_complete == 'true' && | |
| (github.event_name == 'workflow_dispatch' || | |
| (github.event_name == 'workflow_run' && github.event.workflow_run.conclusion == 'success')) && | |
| needs.identifiers.outputs.release_type == 'development' | |
| env: | |
| RELEASE_TYPE: ${{ needs.identifiers.outputs.release_type }} | |
| RELEASE_NAME: ${{ needs.identifiers.outputs.release_name }} | |
| steps: | |
| - name: Checkout code | |
| uses: actions/checkout@v4 | |
| with: | |
| submodules: recursive | |
| - name: Download all wheel artifacts (from wheels workflow) | |
| uses: actions/download-artifact@v4 | |
| with: | |
| pattern: wheels-* | |
| merge-multiple: true | |
| path: dist/ | |
| run-id: ${{ needs.check-all-workflows.outputs.wheels_run_id }} | |
| github-token: ${{ secrets.GITHUB_TOKEN }} | |
| continue-on-error: true | |
| - name: Download source distribution | |
| uses: actions/download-artifact@v4 | |
| with: | |
| name: source-distribution | |
| path: dist/ | |
| run-id: ${{ needs.check-all-workflows.outputs.wheels_run_id }} | |
| github-token: ${{ secrets.GITHUB_TOKEN }} | |
| continue-on-error: true | |
| - name: Debug - List downloaded files | |
| run: | | |
| echo "======================================================================" | |
| echo "==> DEBUG: Files in dist/ after downloading source-distribution" | |
| echo "======================================================================" | |
| echo "Using wheels_run_id: ${{ needs.check-all-workflows.outputs.wheels_run_id }}" | |
| echo "" | |
| ls -la dist/ | |
| echo "" | |
| echo "*.tar.gz files:" | |
| find dist/ -name "*.tar.gz" -ls || echo "None found" | |
| echo "" | |
| echo "*.verify.txt files:" | |
| find dist/ -name "*.verify.txt" -ls || echo "None found" | |
| echo "" | |
| shell: bash | |
| - name: Re-verify source distribution integrity (chain of custody) | |
| run: | | |
| echo "======================================================================" | |
| echo "==> Source Distribution Re-Verification (Chain of Custody)" | |
| echo "======================================================================" | |
| echo "" | |
| echo "OpenSSL version:" | |
| openssl version | |
| echo "" | |
| echo "Re-verifying artifact integrity at release workflow." | |
| echo "Comparing against original verification from wheels workflow." | |
| echo "" | |
| HAS_ERRORS=0 | |
| for tarball in dist/*.tar.gz; do | |
| if [ ! -f "$tarball" ]; then | |
| echo "⚠️ No source distribution found - skipping verification" | |
| continue | |
| fi | |
| BASENAME=$(basename "$tarball") | |
| VERIFY_FILE="dist/${BASENAME%.tar.gz}.verify.txt" | |
| if [ ! -f "$VERIFY_FILE" ]; then | |
| echo "⚠️ Warning: No original verification report found for $BASENAME" | |
| echo " Expected: $VERIFY_FILE" | |
| echo " Artifact may have been created without verification." | |
| echo "" | |
| HAS_ERRORS=1 | |
| continue | |
| fi | |
| echo "==> Re-verifying: $BASENAME" | |
| echo "" | |
| # Re-compute SHA256 hash | |
| echo "Computing current SHA256 fingerprint..." | |
| CURRENT_SHA256=$(openssl sha256 "$tarball" | awk '{print $2}') | |
| echo "Current SHA256: $CURRENT_SHA256" | |
| echo "" | |
| # Extract original SHA256 from verification report | |
| echo "Extracting original SHA256 from verification report..." | |
| echo "DEBUG: Contents of $VERIFY_FILE:" | |
| cat "$VERIFY_FILE" | |
| echo "" | |
| echo "DEBUG: Lines matching 'SHA256':" | |
| grep -i "SHA256" "$VERIFY_FILE" || echo "(no matches found)" | |
| echo "" | |
| ORIGINAL_SHA256=$(grep -E "^SHA(2-)?256\(" "$VERIFY_FILE" | awk -F'= ' '{print $2}' | tr -d ' ' || echo "") | |
| if [ -z "$ORIGINAL_SHA256" ]; then | |
| echo "❌ ERROR: Could not extract SHA256 from verification report" | |
| echo " The verification report may have an unexpected format" | |
| HAS_ERRORS=1 | |
| continue | |
| fi | |
| echo "Original SHA256: $ORIGINAL_SHA256" | |
| echo "" | |
| # Compare hashes | |
| if [ "$CURRENT_SHA256" = "$ORIGINAL_SHA256" ]; then | |
| echo "✅ SHA256 MATCH - Artifact integrity confirmed through pipeline" | |
| else | |
| echo "❌ SHA256 MISMATCH - Artifact corrupted during transfer!" | |
| echo "" | |
| echo "This indicates corruption between:" | |
| echo " 1. wheels workflow (artifact creation)" | |
| echo " 2. release workflow (artifact consumption)" | |
| echo "" | |
| echo "Expected: $ORIGINAL_SHA256" | |
| echo "Got: $CURRENT_SHA256" | |
| echo "" | |
| HAS_ERRORS=1 | |
| fi | |
| echo "" | |
| # Re-run gzip integrity test | |
| echo "Re-running gzip integrity test..." | |
| if gzip -tv "$tarball" 2>&1 | tee /tmp/gzip_output.txt; then | |
| GZIP_EXIT=$? | |
| if [ $GZIP_EXIT -eq 0 ]; then | |
| echo "✅ Gzip test PASS" | |
| else | |
| echo "❌ Gzip test FAIL (exit code $GZIP_EXIT)" | |
| HAS_ERRORS=1 | |
| fi | |
| else | |
| GZIP_EXIT=$? | |
| echo "❌ Gzip test FAIL (exit code $GZIP_EXIT)" | |
| cat /tmp/gzip_output.txt | |
| HAS_ERRORS=1 | |
| fi | |
| echo "" | |
| # Re-run tar extraction test | |
| echo "Re-running tar extraction test..." | |
| if tar -tzf "$tarball" > /dev/null 2>&1; then | |
| echo "✅ Tar extraction test PASS" | |
| else | |
| TAR_EXIT=$? | |
| echo "❌ Tar extraction test FAIL (exit code $TAR_EXIT)" | |
| HAS_ERRORS=1 | |
| fi | |
| echo "" | |
| echo "------------------------------------------------------------------------" | |
| echo "Original verification report (first 30 lines):" | |
| echo "------------------------------------------------------------------------" | |
| head -30 "$VERIFY_FILE" | |
| echo "" | |
| echo "... (full report available in dist/$VERIFY_FILE)" | |
| echo "" | |
| done | |
| if [ $HAS_ERRORS -eq 1 ]; then | |
| echo "======================================================================" | |
| echo "❌ RE-VERIFICATION FAILED" | |
| echo "======================================================================" | |
| echo "" | |
| echo "Source distribution failed integrity checks at release workflow." | |
| echo "This indicates either:" | |
| echo " 1. Corruption during artifact transfer" | |
| echo " 2. Packaging bug not caught at origin" | |
| echo "" | |
| echo "DO NOT PROCEED WITH RELEASE - investigate and fix first." | |
| echo "" | |
| exit 1 | |
| else | |
| echo "======================================================================" | |
| echo "✅ All source distributions re-verified successfully" | |
| echo "======================================================================" | |
| echo "" | |
| echo "Chain of custody confirmed: wheels workflow → release workflow" | |
| echo "Cryptographic integrity maintained throughout pipeline." | |
| fi | |
| shell: bash | |
| - name: Download Linux wheels without NVX | |
| uses: actions/download-artifact@v4 | |
| with: | |
| name: linux-wheels-no-nvx | |
| path: dist/ | |
| run-id: ${{ needs.check-all-workflows.outputs.wheels_run_id }} | |
| github-token: ${{ secrets.GITHUB_TOKEN }} | |
| continue-on-error: true | |
| - name: Download manylinux wheel artifacts (from wheels-docker workflow) | |
| uses: actions/download-artifact@v4 | |
| with: | |
| pattern: artifacts-* | |
| merge-multiple: true | |
| path: wheelhouse/ | |
| run-id: ${{ needs.check-all-workflows.outputs.wheels_docker_run_id }} | |
| github-token: ${{ secrets.GITHUB_TOKEN }} | |
| continue-on-error: true | |
| - name: Download ARM64 wheel artifacts (from wheels-arm64 workflow) | |
| uses: actions/download-artifact@v4 | |
| with: | |
| pattern: artifacts-arm64-* | |
| merge-multiple: true | |
| path: wheelhouse-arm64/ | |
| run-id: ${{ needs.check-all-workflows.outputs.wheels_arm64_run_id }} | |
| github-token: ${{ secrets.GITHUB_TOKEN }} | |
| continue-on-error: true | |
| - name: Download wstest conformance summary | |
| uses: actions/download-artifact@v4 | |
| with: | |
| pattern: conformance-summary-* | |
| merge-multiple: true | |
| path: wstest-results/ | |
| run-id: ${{ needs.check-all-workflows.outputs.wstest_run_id }} | |
| github-token: ${{ secrets.GITHUB_TOKEN }} | |
| continue-on-error: true | |
| - name: Download FlatBuffers schema artifacts | |
| uses: actions/download-artifact@v4 | |
| with: | |
| pattern: flatbuffers-schema-* | |
| merge-multiple: true | |
| path: flatbuffers-schema/ | |
| run-id: ${{ needs.check-all-workflows.outputs.main_run_id }} | |
| github-token: ${{ secrets.GITHUB_TOKEN }} | |
| continue-on-error: true | |
| - name: Download WebSocket conformance HTML reports with-nvx (for RTD) | |
| uses: actions/download-artifact@v4 | |
| with: | |
| name: websocket-conformance-docs-quick-with-nvx | |
| path: websocket-conformance/with-nvx/ | |
| run-id: ${{ needs.check-all-workflows.outputs.wstest_run_id }} | |
| github-token: ${{ secrets.GITHUB_TOKEN }} | |
| continue-on-error: true | |
| - name: Download WebSocket conformance HTML reports without-nvx (for RTD) | |
| uses: actions/download-artifact@v4 | |
| with: | |
| name: websocket-conformance-docs-quick-without-nvx | |
| path: websocket-conformance/without-nvx/ | |
| run-id: ${{ needs.check-all-workflows.outputs.wstest_run_id }} | |
| github-token: ${{ secrets.GITHUB_TOKEN }} | |
| continue-on-error: true | |
| - name: Consolidate all artifacts | |
| run: | | |
| echo "==> Consolidating all artifacts into unified release directory..." | |
| mkdir -p release-artifacts | |
| # Copy wheels from wheels workflow | |
| if [ -d "dist" ]; then | |
| echo "Copying wheels workflow artifacts..." | |
| find dist -type f \( -name "*.whl" -o -name "*.tar.gz" \) -exec cp {} release-artifacts/ \; | |
| fi | |
| # Copy wheels from wheels-docker workflow | |
| if [ -d "wheelhouse" ]; then | |
| echo "Copying wheels-docker workflow artifacts..." | |
| find wheelhouse -type f \( -name "*.whl" -o -name "*.tar.gz" \) -exec cp {} release-artifacts/ \; | |
| fi | |
| # Copy ARM64 wheels from wheels-arm64 workflow | |
| if [ -d "wheelhouse-arm64" ]; then | |
| echo "Copying wheels-arm64 workflow artifacts..." | |
| find wheelhouse-arm64 -type f \( -name "*.whl" -o -name "*.tar.gz" \) -exec cp {} release-artifacts/ \; | |
| fi | |
| # Copy wstest conformance results | |
| if [ -d "wstest-results" ]; then | |
| echo "Copying wstest conformance results..." | |
| find wstest-results -type f -exec cp {} release-artifacts/ \; | |
| fi | |
| # Package FlatBuffers schema as tarball | |
| if [ -d "flatbuffers-schema" ]; then | |
| echo "Packaging FlatBuffers schema..." | |
| tar -czf release-artifacts/flatbuffers-schema.tar.gz -C flatbuffers-schema . | |
| fi | |
| # Package WebSocket conformance reports for RTD | |
| if [ -d "websocket-conformance" ]; then | |
| echo "Packaging WebSocket conformance reports for RTD..." | |
| CONFORMANCE_TARBALL="autobahn-python-websocket-conformance-${RELEASE_NAME}.tar.gz" | |
| tar -czf "release-artifacts/${CONFORMANCE_TARBALL}" -C websocket-conformance . | |
| echo "Created: ${CONFORMANCE_TARBALL}" | |
| fi | |
| echo "" | |
| echo "==> Unified release artifact inventory:" | |
| ls -la release-artifacts/ || echo "No artifacts found" | |
| echo "" | |
| echo "Wheels: $(find release-artifacts -name "*.whl" | wc -l)" | |
| echo "Source dists: $(find release-artifacts -name "*.tar.gz" ! -name "flatbuffers-schema.tar.gz" ! -name "autobahn-python-websocket-conformance-*.tar.gz" | wc -l)" | |
| echo "Wstest reports: $(find release-artifacts -name "*wstest*" | wc -l)" | |
| echo "FlatBuffers schema: $(ls release-artifacts/flatbuffers-schema.tar.gz 2>/dev/null && echo 'packaged' || echo 'not found')" | |
| echo "Conformance reports: $(ls release-artifacts/autobahn-python-websocket-conformance-*.tar.gz 2>/dev/null && echo 'packaged' || echo 'not found')" | |
| - name: Install jinja2-cli for template rendering | |
| run: | | |
| pip install jinja2-cli | |
| - name: Render release notes from Jinja2 template | |
| run: | | |
| echo "==> Preparing release notes using Jinja2 template..." | |
| echo "Release type: $RELEASE_TYPE" | |
| echo "Release name: $RELEASE_NAME" | |
| # Collect template variables | |
| COMMIT_SHA="${GITHUB_SHA::8}" | |
| BUILD_DATE="$(date -u +'%Y-%m-%d %H:%M:%S UTC')" | |
| WHEEL_COUNT="$(find release-artifacts -name "*.whl" | wc -l)" | |
| SDIST_COUNT="$(find release-artifacts -name "*.tar.gz" | wc -l)" | |
| # Render template using jinja2 | |
| jinja2 .github/templates/release-development.md.j2 \ | |
| -D release_name="$RELEASE_NAME" \ | |
| -D commit_sha="$COMMIT_SHA" \ | |
| -D build_date="$BUILD_DATE" \ | |
| -D wheel_count="$WHEEL_COUNT" \ | |
| -D sdist_count="$SDIST_COUNT" \ | |
| -o release-notes.md | |
| echo "" | |
| echo "==> Generated release notes:" | |
| cat release-notes.md | |
| - name: Create development GitHub release | |
| env: | |
| GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | |
| run: | | |
| echo "==> Creating development GitHub release..." | |
| echo "Release type: $RELEASE_TYPE" | |
| echo "Release name: $RELEASE_NAME" | |
| # Delete existing release if it exists (development builds may be rebuilt) | |
| gh release delete "$RELEASE_NAME" --repo "$GITHUB_REPOSITORY" --yes || true | |
| # Create the release using rendered notes | |
| gh release create "$RELEASE_NAME" \ | |
| --repo "$GITHUB_REPOSITORY" \ | |
| --title "Development Build $RELEASE_NAME" \ | |
| --notes-file release-notes.md \ | |
| --prerelease \ | |
| release-artifacts/* | |
| echo "✅ Release $RELEASE_NAME created successfully" | |
| # Nightly and stable GitHub releases (consolidates wheels from both workflows) | |
| release-nightly: | |
| name: Nightly & Stable GitHub Releases | |
| needs: [check-all-workflows, identifiers] | |
| runs-on: ubuntu-latest | |
| # Only create releases for nightly and stable builds (explicit positive list) | |
| if: | | |
| needs.check-all-workflows.outputs.all_complete == 'true' && | |
| (github.event_name == 'workflow_dispatch' || | |
| (github.event_name == 'workflow_run' && github.event.workflow_run.conclusion == 'success')) && | |
| (needs.identifiers.outputs.release_type == 'nightly' || needs.identifiers.outputs.release_type == 'stable') | |
| env: | |
| RELEASE_TYPE: ${{ needs.identifiers.outputs.release_type }} | |
| RELEASE_NAME: ${{ needs.identifiers.outputs.release_name }} | |
| steps: | |
| - name: Checkout code | |
| uses: actions/checkout@v4 | |
| with: | |
| submodules: recursive | |
| - name: Download all wheel artifacts (from wheels workflow) | |
| uses: actions/download-artifact@v4 | |
| with: | |
| pattern: wheels-* | |
| merge-multiple: true | |
| path: dist/ | |
| run-id: ${{ needs.check-all-workflows.outputs.wheels_run_id }} | |
| github-token: ${{ secrets.GITHUB_TOKEN }} | |
| continue-on-error: true | |
| - name: Download source distribution | |
| uses: actions/download-artifact@v4 | |
| with: | |
| name: source-distribution | |
| path: dist/ | |
| run-id: ${{ needs.check-all-workflows.outputs.wheels_run_id }} | |
| github-token: ${{ secrets.GITHUB_TOKEN }} | |
| continue-on-error: true | |
| - name: Debug - List downloaded files | |
| run: | | |
| echo "======================================================================" | |
| echo "==> DEBUG: Files in dist/ after downloading source-distribution" | |
| echo "======================================================================" | |
| echo "Using wheels_run_id: ${{ needs.check-all-workflows.outputs.wheels_run_id }}" | |
| echo "" | |
| ls -la dist/ | |
| echo "" | |
| echo "*.tar.gz files:" | |
| find dist/ -name "*.tar.gz" -ls || echo "None found" | |
| echo "" | |
| echo "*.verify.txt files:" | |
| find dist/ -name "*.verify.txt" -ls || echo "None found" | |
| echo "" | |
| shell: bash | |
| - name: Re-verify source distribution integrity (chain of custody) | |
| run: | | |
| echo "======================================================================" | |
| echo "==> Source Distribution Re-Verification (Chain of Custody)" | |
| echo "======================================================================" | |
| echo "" | |
| echo "OpenSSL version:" | |
| openssl version | |
| echo "" | |
| echo "Re-verifying artifact integrity at release workflow." | |
| echo "Comparing against original verification from wheels workflow." | |
| echo "" | |
| HAS_ERRORS=0 | |
| for tarball in dist/*.tar.gz; do | |
| if [ ! -f "$tarball" ]; then | |
| echo "⚠️ No source distribution found - skipping verification" | |
| continue | |
| fi | |
| BASENAME=$(basename "$tarball") | |
| VERIFY_FILE="dist/${BASENAME%.tar.gz}.verify.txt" | |
| if [ ! -f "$VERIFY_FILE" ]; then | |
| echo "⚠️ Warning: No original verification report found for $BASENAME" | |
| echo " Expected: $VERIFY_FILE" | |
| echo " Artifact may have been created without verification." | |
| echo "" | |
| HAS_ERRORS=1 | |
| continue | |
| fi | |
| echo "==> Re-verifying: $BASENAME" | |
| echo "" | |
| # Re-compute SHA256 hash | |
| echo "Computing current SHA256 fingerprint..." | |
| CURRENT_SHA256=$(openssl sha256 "$tarball" | awk '{print $2}') | |
| echo "Current SHA256: $CURRENT_SHA256" | |
| echo "" | |
| # Extract original SHA256 from verification report | |
| echo "Extracting original SHA256 from verification report..." | |
| echo "DEBUG: Contents of $VERIFY_FILE:" | |
| cat "$VERIFY_FILE" | |
| echo "" | |
| echo "DEBUG: Lines matching 'SHA256':" | |
| grep -i "SHA256" "$VERIFY_FILE" || echo "(no matches found)" | |
| echo "" | |
| ORIGINAL_SHA256=$(grep -E "^SHA(2-)?256\(" "$VERIFY_FILE" | awk -F'= ' '{print $2}' | tr -d ' ' || echo "") | |
| if [ -z "$ORIGINAL_SHA256" ]; then | |
| echo "❌ ERROR: Could not extract SHA256 from verification report" | |
| echo " The verification report may have an unexpected format" | |
| HAS_ERRORS=1 | |
| continue | |
| fi | |
| echo "Original SHA256: $ORIGINAL_SHA256" | |
| echo "" | |
| # Compare hashes | |
| if [ "$CURRENT_SHA256" = "$ORIGINAL_SHA256" ]; then | |
| echo "✅ SHA256 MATCH - Artifact integrity confirmed through pipeline" | |
| else | |
| echo "❌ SHA256 MISMATCH - Artifact corrupted during transfer!" | |
| echo "" | |
| echo "This indicates corruption between:" | |
| echo " 1. wheels workflow (artifact creation)" | |
| echo " 2. release workflow (artifact consumption)" | |
| echo "" | |
| echo "Expected: $ORIGINAL_SHA256" | |
| echo "Got: $CURRENT_SHA256" | |
| echo "" | |
| HAS_ERRORS=1 | |
| fi | |
| echo "" | |
| # Re-run gzip integrity test | |
| echo "Re-running gzip integrity test..." | |
| if gzip -tv "$tarball" 2>&1 | tee /tmp/gzip_output.txt; then | |
| GZIP_EXIT=$? | |
| if [ $GZIP_EXIT -eq 0 ]; then | |
| echo "✅ Gzip test PASS" | |
| else | |
| echo "❌ Gzip test FAIL (exit code $GZIP_EXIT)" | |
| HAS_ERRORS=1 | |
| fi | |
| else | |
| GZIP_EXIT=$? | |
| echo "❌ Gzip test FAIL (exit code $GZIP_EXIT)" | |
| cat /tmp/gzip_output.txt | |
| HAS_ERRORS=1 | |
| fi | |
| echo "" | |
| # Re-run tar extraction test | |
| echo "Re-running tar extraction test..." | |
| if tar -tzf "$tarball" > /dev/null 2>&1; then | |
| echo "✅ Tar extraction test PASS" | |
| else | |
| TAR_EXIT=$? | |
| echo "❌ Tar extraction test FAIL (exit code $TAR_EXIT)" | |
| HAS_ERRORS=1 | |
| fi | |
| echo "" | |
| echo "------------------------------------------------------------------------" | |
| echo "Original verification report (first 30 lines):" | |
| echo "------------------------------------------------------------------------" | |
| head -30 "$VERIFY_FILE" | |
| echo "" | |
| echo "... (full report available in dist/$VERIFY_FILE)" | |
| echo "" | |
| done | |
| if [ $HAS_ERRORS -eq 1 ]; then | |
| echo "======================================================================" | |
| echo "❌ RE-VERIFICATION FAILED" | |
| echo "======================================================================" | |
| echo "" | |
| echo "Source distribution failed integrity checks at release workflow." | |
| echo "This indicates either:" | |
| echo " 1. Corruption during artifact transfer" | |
| echo " 2. Packaging bug not caught at origin" | |
| echo "" | |
| echo "DO NOT PROCEED WITH RELEASE - investigate and fix first." | |
| echo "" | |
| exit 1 | |
| else | |
| echo "======================================================================" | |
| echo "✅ All source distributions re-verified successfully" | |
| echo "======================================================================" | |
| echo "" | |
| echo "Chain of custody confirmed: wheels workflow → release workflow" | |
| echo "Cryptographic integrity maintained throughout pipeline." | |
| fi | |
| shell: bash | |
| - name: Download Linux wheels without NVX | |
| uses: actions/download-artifact@v4 | |
| with: | |
| name: linux-wheels-no-nvx | |
| path: dist/ | |
| run-id: ${{ needs.check-all-workflows.outputs.wheels_run_id }} | |
| github-token: ${{ secrets.GITHUB_TOKEN }} | |
| continue-on-error: true | |
| - name: Download manylinux wheel artifacts (from wheels-docker workflow) | |
| uses: actions/download-artifact@v4 | |
| with: | |
| pattern: artifacts-* | |
| merge-multiple: true | |
| path: wheelhouse/ | |
| run-id: ${{ needs.check-all-workflows.outputs.wheels_docker_run_id }} | |
| github-token: ${{ secrets.GITHUB_TOKEN }} | |
| continue-on-error: true | |
| - name: Download ARM64 wheel artifacts (from wheels-arm64 workflow) | |
| uses: actions/download-artifact@v4 | |
| with: | |
| pattern: artifacts-arm64-* | |
| merge-multiple: true | |
| path: wheelhouse-arm64/ | |
| run-id: ${{ needs.check-all-workflows.outputs.wheels_arm64_run_id }} | |
| github-token: ${{ secrets.GITHUB_TOKEN }} | |
| continue-on-error: true | |
| - name: Download wstest conformance summary | |
| uses: actions/download-artifact@v4 | |
| with: | |
| pattern: conformance-summary-* | |
| merge-multiple: true | |
| path: wstest-results/ | |
| run-id: ${{ needs.check-all-workflows.outputs.wstest_run_id }} | |
| github-token: ${{ secrets.GITHUB_TOKEN }} | |
| continue-on-error: true | |
| - name: Download FlatBuffers schema artifacts | |
| uses: actions/download-artifact@v4 | |
| with: | |
| pattern: flatbuffers-schema-* | |
| merge-multiple: true | |
| path: flatbuffers-schema/ | |
| run-id: ${{ needs.check-all-workflows.outputs.main_run_id }} | |
| github-token: ${{ secrets.GITHUB_TOKEN }} | |
| continue-on-error: true | |
| - name: Download WebSocket conformance HTML reports with-nvx (for RTD) | |
| uses: actions/download-artifact@v4 | |
| with: | |
| name: websocket-conformance-docs-quick-with-nvx | |
| path: websocket-conformance/with-nvx/ | |
| run-id: ${{ needs.check-all-workflows.outputs.wstest_run_id }} | |
| github-token: ${{ secrets.GITHUB_TOKEN }} | |
| continue-on-error: true | |
| - name: Download WebSocket conformance HTML reports without-nvx (for RTD) | |
| uses: actions/download-artifact@v4 | |
| with: | |
| name: websocket-conformance-docs-quick-without-nvx | |
| path: websocket-conformance/without-nvx/ | |
| run-id: ${{ needs.check-all-workflows.outputs.wstest_run_id }} | |
| github-token: ${{ secrets.GITHUB_TOKEN }} | |
| continue-on-error: true | |
| - name: Consolidate all artifacts | |
| run: | | |
| echo "==> Consolidating all artifacts into unified release directory..." | |
| mkdir -p release-artifacts | |
| # Copy wheels from wheels workflow | |
| if [ -d "dist" ]; then | |
| echo "Copying wheels workflow artifacts..." | |
| find dist -type f \( -name "*.whl" -o -name "*.tar.gz" \) -exec cp {} release-artifacts/ \; | |
| fi | |
| # Copy wheels from wheels-docker workflow | |
| if [ -d "wheelhouse" ]; then | |
| echo "Copying wheels-docker workflow artifacts..." | |
| find wheelhouse -type f \( -name "*.whl" -o -name "*.tar.gz" \) -exec cp {} release-artifacts/ \; | |
| fi | |
| # Copy ARM64 wheels from wheels-arm64 workflow | |
| if [ -d "wheelhouse-arm64" ]; then | |
| echo "Copying wheels-arm64 workflow artifacts..." | |
| find wheelhouse-arm64 -type f \( -name "*.whl" -o -name "*.tar.gz" \) -exec cp {} release-artifacts/ \; | |
| fi | |
| # Copy wstest conformance results | |
| if [ -d "wstest-results" ]; then | |
| echo "Copying wstest conformance results..." | |
| find wstest-results -type f -exec cp {} release-artifacts/ \; | |
| fi | |
| # Package FlatBuffers schema as tarball | |
| if [ -d "flatbuffers-schema" ]; then | |
| echo "Packaging FlatBuffers schema..." | |
| tar -czf release-artifacts/flatbuffers-schema.tar.gz -C flatbuffers-schema . | |
| fi | |
| # Package WebSocket conformance reports for RTD | |
| if [ -d "websocket-conformance" ]; then | |
| echo "Packaging WebSocket conformance reports for RTD..." | |
| CONFORMANCE_TARBALL="autobahn-python-websocket-conformance-${RELEASE_NAME}.tar.gz" | |
| tar -czf "release-artifacts/${CONFORMANCE_TARBALL}" -C websocket-conformance . | |
| echo "Created: ${CONFORMANCE_TARBALL}" | |
| fi | |
| echo "" | |
| echo "==> Unified release artifact inventory:" | |
| ls -la release-artifacts/ || echo "No artifacts found" | |
| echo "" | |
| echo "Wheels: $(find release-artifacts -name "*.whl" | wc -l)" | |
| echo "Source dists: $(find release-artifacts -name "*.tar.gz" ! -name "flatbuffers-schema.tar.gz" ! -name "autobahn-python-websocket-conformance-*.tar.gz" | wc -l)" | |
| echo "Wstest reports: $(find release-artifacts -name "*wstest*" | wc -l)" | |
| echo "FlatBuffers schema: $(ls release-artifacts/flatbuffers-schema.tar.gz 2>/dev/null && echo 'packaged' || echo 'not found')" | |
| echo "Conformance reports: $(ls release-artifacts/autobahn-python-websocket-conformance-*.tar.gz 2>/dev/null && echo 'packaged' || echo 'not found')" | |
| - name: Install jinja2-cli for template rendering | |
| run: | | |
| pip install jinja2-cli | |
| - name: Render release notes from Jinja2 template | |
| run: | | |
| echo "==> Preparing release notes using Jinja2 template..." | |
| echo "Release type: $RELEASE_TYPE" | |
| echo "Release name: $RELEASE_NAME" | |
| # Collect template variables | |
| COMMIT_SHA="${GITHUB_SHA::8}" | |
| BUILD_DATE="$(date -u +'%Y-%m-%d %H:%M:%S UTC')" | |
| WHEEL_COUNT="$(find release-artifacts -name "*.whl" | wc -l)" | |
| SDIST_COUNT="$(find release-artifacts -name "*.tar.gz" | wc -l)" | |
| # Select template based on release type | |
| if [ "$RELEASE_TYPE" = "stable" ]; then | |
| TEMPLATE=".github/templates/release-stable.md.j2" | |
| else | |
| TEMPLATE=".github/templates/release-nightly.md.j2" | |
| fi | |
| # Render template using jinja2 | |
| jinja2 "$TEMPLATE" \ | |
| -D release_name="$RELEASE_NAME" \ | |
| -D commit_sha="$COMMIT_SHA" \ | |
| -D build_date="$BUILD_DATE" \ | |
| -D wheel_count="$WHEEL_COUNT" \ | |
| -D sdist_count="$SDIST_COUNT" \ | |
| -o release-notes.md | |
| echo "" | |
| echo "==> Generated release notes:" | |
| cat release-notes.md | |
| - name: Create unified GitHub release | |
| env: | |
| GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | |
| run: | | |
| echo "==> Creating unified GitHub release..." | |
| echo "Release type: $RELEASE_TYPE" | |
| echo "Release name: $RELEASE_NAME" | |
| # Delete existing release if it exists (for nightly builds) | |
| gh release delete "$RELEASE_NAME" --repo "$GITHUB_REPOSITORY" --yes || true | |
| # Set release title based on type | |
| if [ "$RELEASE_TYPE" = "stable" ]; then | |
| TITLE="Release $RELEASE_NAME" | |
| else | |
| TITLE="Nightly Build $RELEASE_NAME" | |
| fi | |
| # Create the release using rendered notes | |
| gh release create "$RELEASE_NAME" \ | |
| --repo "$GITHUB_REPOSITORY" \ | |
| --title "$TITLE" \ | |
| --notes-file release-notes.md \ | |
| release-artifacts/* | |
| echo "✅ Release $RELEASE_NAME created successfully" | |
| # Stable release publishing: PyPI and RTD (consolidates from both wheel workflows) | |
| release-stable: | |
| name: Stable Release (PyPI & RTD) | |
| needs: [check-all-workflows, identifiers, release-nightly] | |
| runs-on: ubuntu-latest | |
| # Only publish to PyPI for stable releases (explicit positive list) | |
| if: | | |
| needs.check-all-workflows.outputs.all_complete == 'true' && | |
| needs.identifiers.outputs.release_type == 'stable' | |
| env: | |
| RELEASE_TYPE: ${{ needs.identifiers.outputs.release_type }} | |
| RELEASE_NAME: ${{ needs.identifiers.outputs.release_name }} | |
| environment: | |
| name: pypi | |
| url: https://pypi.org/p/autobahn | |
| permissions: | |
| id-token: write # For trusted publishing | |
| steps: | |
| - name: Checkout code | |
| uses: actions/checkout@v4 | |
| with: | |
| submodules: recursive | |
| - name: Download macOS wheels | |
| uses: actions/download-artifact@v4 | |
| with: | |
| name: wheels-macos-arm64 | |
| path: dist/ | |
| run-id: ${{ needs.check-all-workflows.outputs.wheels_run_id }} | |
| github-token: ${{ secrets.GITHUB_TOKEN }} | |
| continue-on-error: true | |
| - name: Download Windows wheels | |
| uses: actions/download-artifact@v4 | |
| with: | |
| name: wheels-windows-x86_64 | |
| path: dist/ | |
| run-id: ${{ needs.check-all-workflows.outputs.wheels_run_id }} | |
| github-token: ${{ secrets.GITHUB_TOKEN }} | |
| continue-on-error: true | |
| - name: Download source distribution | |
| uses: actions/download-artifact@v4 | |
| with: | |
| name: source-distribution | |
| path: dist/ | |
| run-id: ${{ needs.check-all-workflows.outputs.wheels_run_id }} | |
| github-token: ${{ secrets.GITHUB_TOKEN }} | |
| continue-on-error: true | |
| - name: Debug - List downloaded files | |
| run: | | |
| echo "======================================================================" | |
| echo "==> DEBUG: Files in dist/ after downloading source-distribution" | |
| echo "======================================================================" | |
| echo "Using wheels_run_id: ${{ needs.check-all-workflows.outputs.wheels_run_id }}" | |
| echo "" | |
| ls -la dist/ | |
| echo "" | |
| echo "*.tar.gz files:" | |
| find dist/ -name "*.tar.gz" -ls || echo "None found" | |
| echo "" | |
| echo "*.verify.txt files:" | |
| find dist/ -name "*.verify.txt" -ls || echo "None found" | |
| echo "" | |
| shell: bash | |
| - name: Re-verify source distribution integrity (chain of custody) | |
| run: | | |
| echo "======================================================================" | |
| echo "==> Source Distribution Re-Verification (Chain of Custody)" | |
| echo "======================================================================" | |
| echo "" | |
| echo "OpenSSL version:" | |
| openssl version | |
| echo "" | |
| echo "Re-verifying artifact integrity at release workflow." | |
| echo "Comparing against original verification from wheels workflow." | |
| echo "" | |
| HAS_ERRORS=0 | |
| for tarball in dist/*.tar.gz; do | |
| if [ ! -f "$tarball" ]; then | |
| echo "⚠️ No source distribution found - skipping verification" | |
| continue | |
| fi | |
| BASENAME=$(basename "$tarball") | |
| VERIFY_FILE="dist/${BASENAME%.tar.gz}.verify.txt" | |
| if [ ! -f "$VERIFY_FILE" ]; then | |
| echo "⚠️ Warning: No original verification report found for $BASENAME" | |
| echo " Expected: $VERIFY_FILE" | |
| echo " Artifact may have been created without verification." | |
| echo "" | |
| HAS_ERRORS=1 | |
| continue | |
| fi | |
| echo "==> Re-verifying: $BASENAME" | |
| echo "" | |
| # Re-compute SHA256 hash | |
| echo "Computing current SHA256 fingerprint..." | |
| CURRENT_SHA256=$(openssl sha256 "$tarball" | awk '{print $2}') | |
| echo "Current SHA256: $CURRENT_SHA256" | |
| echo "" | |
| # Extract original SHA256 from verification report | |
| echo "Extracting original SHA256 from verification report..." | |
| echo "DEBUG: Contents of $VERIFY_FILE:" | |
| cat "$VERIFY_FILE" | |
| echo "" | |
| echo "DEBUG: Lines matching 'SHA256':" | |
| grep -i "SHA256" "$VERIFY_FILE" || echo "(no matches found)" | |
| echo "" | |
| ORIGINAL_SHA256=$(grep -E "^SHA(2-)?256\(" "$VERIFY_FILE" | awk -F'= ' '{print $2}' | tr -d ' ' || echo "") | |
| if [ -z "$ORIGINAL_SHA256" ]; then | |
| echo "❌ ERROR: Could not extract SHA256 from verification report" | |
| echo " The verification report may have an unexpected format" | |
| HAS_ERRORS=1 | |
| continue | |
| fi | |
| echo "Original SHA256: $ORIGINAL_SHA256" | |
| echo "" | |
| # Compare hashes | |
| if [ "$CURRENT_SHA256" = "$ORIGINAL_SHA256" ]; then | |
| echo "✅ SHA256 MATCH - Artifact integrity confirmed through pipeline" | |
| else | |
| echo "❌ SHA256 MISMATCH - Artifact corrupted during transfer!" | |
| echo "" | |
| echo "This indicates corruption between:" | |
| echo " 1. wheels workflow (artifact creation)" | |
| echo " 2. release workflow (artifact consumption)" | |
| echo "" | |
| echo "Expected: $ORIGINAL_SHA256" | |
| echo "Got: $CURRENT_SHA256" | |
| echo "" | |
| HAS_ERRORS=1 | |
| fi | |
| echo "" | |
| # Re-run gzip integrity test | |
| echo "Re-running gzip integrity test..." | |
| if gzip -tv "$tarball" 2>&1 | tee /tmp/gzip_output.txt; then | |
| GZIP_EXIT=$? | |
| if [ $GZIP_EXIT -eq 0 ]; then | |
| echo "✅ Gzip test PASS" | |
| else | |
| echo "❌ Gzip test FAIL (exit code $GZIP_EXIT)" | |
| HAS_ERRORS=1 | |
| fi | |
| else | |
| GZIP_EXIT=$? | |
| echo "❌ Gzip test FAIL (exit code $GZIP_EXIT)" | |
| cat /tmp/gzip_output.txt | |
| HAS_ERRORS=1 | |
| fi | |
| echo "" | |
| # Re-run tar extraction test | |
| echo "Re-running tar extraction test..." | |
| if tar -tzf "$tarball" > /dev/null 2>&1; then | |
| echo "✅ Tar extraction test PASS" | |
| else | |
| TAR_EXIT=$? | |
| echo "❌ Tar extraction test FAIL (exit code $TAR_EXIT)" | |
| HAS_ERRORS=1 | |
| fi | |
| echo "" | |
| echo "------------------------------------------------------------------------" | |
| echo "Original verification report (first 30 lines):" | |
| echo "------------------------------------------------------------------------" | |
| head -30 "$VERIFY_FILE" | |
| echo "" | |
| echo "... (full report available in dist/$VERIFY_FILE)" | |
| echo "" | |
| done | |
| if [ $HAS_ERRORS -eq 1 ]; then | |
| echo "======================================================================" | |
| echo "❌ RE-VERIFICATION FAILED" | |
| echo "======================================================================" | |
| echo "" | |
| echo "Source distribution failed integrity checks at release workflow." | |
| echo "This indicates either:" | |
| echo " 1. Corruption during artifact transfer" | |
| echo " 2. Packaging bug not caught at origin" | |
| echo "" | |
| echo "DO NOT PROCEED WITH RELEASE - investigate and fix first." | |
| echo "" | |
| exit 1 | |
| else | |
| echo "======================================================================" | |
| echo "✅ All source distributions re-verified successfully" | |
| echo "======================================================================" | |
| echo "" | |
| echo "Chain of custody confirmed: wheels workflow → release workflow" | |
| echo "Cryptographic integrity maintained throughout pipeline." | |
| fi | |
| shell: bash | |
| - name: Download Linux wheels without NVX | |
| uses: actions/download-artifact@v4 | |
| with: | |
| name: linux-wheels-no-nvx | |
| path: dist/ | |
| run-id: ${{ needs.check-all-workflows.outputs.wheels_run_id }} | |
| github-token: ${{ secrets.GITHUB_TOKEN }} | |
| continue-on-error: true | |
| - name: Download manylinux wheels with NVX (from wheels-docker) | |
| uses: actions/download-artifact@v4 | |
| with: | |
| pattern: artifacts-* | |
| merge-multiple: true | |
| path: dist/ | |
| run-id: ${{ needs.check-all-workflows.outputs.wheels_docker_run_id }} | |
| github-token: ${{ secrets.GITHUB_TOKEN }} | |
| continue-on-error: true | |
| - name: Download ARM64 wheels with NVX (from wheels-arm64) | |
| uses: actions/download-artifact@v4 | |
| with: | |
| pattern: artifacts-arm64-* | |
| merge-multiple: true | |
| path: dist/ | |
| run-id: ${{ needs.check-all-workflows.outputs.wheels_arm64_run_id }} | |
| github-token: ${{ secrets.GITHUB_TOKEN }} | |
| continue-on-error: true | |
| - name: List artifacts for PyPI publishing | |
| run: | | |
| echo "Publishing to PyPI for release: $RELEASE_NAME" | |
| ls -la dist/ | |
| echo "" | |
| echo "macOS wheels: $(find dist -name "*macos*.whl" 2>/dev/null | wc -l)" | |
| echo "Windows wheels: $(find dist -name "*win*.whl" 2>/dev/null | wc -l)" | |
| echo "Linux manylinux wheels: $(find dist -name "*manylinux*.whl" 2>/dev/null | wc -l)" | |
| echo "Linux fallback wheels: $(find dist -name "*linux*.whl" ! -name "*manylinux*.whl" 2>/dev/null | wc -l)" | |
| echo "Source distributions: $(find dist -name "*.tar.gz" 2>/dev/null | wc -l)" | |
| echo "" | |
| echo "Total PyPI artifacts: $(find dist -type f \( -name "*.whl" -o -name "*.tar.gz" \) | wc -l)" | |
| - name: Clean non-package files and unsupported wheels from dist/ | |
| run: | | |
| echo "==> Removing non-package files from dist/ before PyPI upload..." | |
| find dist/ -type f ! \( -name "*.whl" -o -name "*.tar.gz" \) -delete | |
| echo "==> Removing plain linux_* wheels (PyPI only accepts manylinux_*)..." | |
| # PyPI rejects plain linux_x86_64/linux_aarch64 tags - they must be manylinux_* | |
| find dist/ -name "*-linux_x86_64.whl" -delete | |
| find dist/ -name "*-linux_aarch64.whl" -delete | |
| echo "" | |
| echo "Remaining files for PyPI:" | |
| ls -la dist/ | |
| echo "" | |
| echo "Total PyPI artifacts: $(find dist -type f \( -name "*.whl" -o -name "*.tar.gz" \) | wc -l)" | |
| - name: Check if version already exists on PyPI | |
| id: pypi_check | |
| run: | | |
| # Extract version from release name (v25.9.1 -> 25.9.1) | |
| VERSION="${RELEASE_NAME#v}" | |
| echo "Checking if autobahn version ${VERSION} exists on PyPI..." | |
| # Query PyPI JSON API | |
| HTTP_CODE=$(curl -s -o /tmp/pypi_response.json -w "%{http_code}" "https://pypi.org/pypi/autobahn/${VERSION}/json") | |
| if [ "${HTTP_CODE}" = "200" ]; then | |
| echo "⚠️ WARNING: Version ${VERSION} already exists on PyPI!" | |
| echo "⚠️ PyPI does not allow re-uploading the same version." | |
| echo "⚠️ Skipping PyPI upload to avoid error." | |
| echo "exists=true" >> $GITHUB_OUTPUT | |
| elif [ "${HTTP_CODE}" = "404" ]; then | |
| echo "✅ Version ${VERSION} does not exist on PyPI yet - proceeding with upload" | |
| echo "exists=false" >> $GITHUB_OUTPUT | |
| else | |
| echo "⚠️ Unexpected HTTP code ${HTTP_CODE} from PyPI API" | |
| echo "⚠️ Response:" | |
| cat /tmp/pypi_response.json || echo "(no response)" | |
| echo "⚠️ Proceeding with upload anyway (will fail if version exists)" | |
| echo "exists=false" >> $GITHUB_OUTPUT | |
| fi | |
| rm -f /tmp/pypi_response.json | |
| - name: Publish to PyPI | |
| if: steps.pypi_check.outputs.exists == 'false' | |
| uses: pypa/gh-action-pypi-publish@release/v1 | |
| with: | |
| # Uses trusted publishing - no API token needed | |
| # Configure at: https://pypi.org/manage/account/publishing/ | |
| verbose: true | |
| - name: Trigger RTD build | |
| env: | |
| RTD_TOKEN: ${{ secrets.RTD_TOKEN }} | |
| run: | | |
| if [ -n "$RTD_TOKEN" ]; then | |
| echo "Triggering Read the Docs build for autobahn..." | |
| curl -X POST \ | |
| -H "Authorization: Token $RTD_TOKEN" \ | |
| "https://readthedocs.org/api/v3/projects/autobahn/versions/latest/builds/" | |
| echo "✅ RTD build triggered successfully" | |
| else | |
| echo "⚠️ RTD_TOKEN not configured, skipping RTD build trigger" | |
| fi | |
| # Mark release as complete (for release-post-comment to detect) | |
| mark-release-complete: | |
| name: Mark Release Complete | |
| needs: [identifiers, release-development, release-nightly, release-stable] | |
| if: always() && needs.identifiers.result == 'success' && (needs.release-development.result == 'success' || needs.release-nightly.result == 'success' || needs.release-stable.result == 'success') | |
| runs-on: ubuntu-latest | |
| env: | |
| RELEASE_TYPE: ${{ needs.identifiers.outputs.release_type }} | |
| RELEASE_NAME: ${{ needs.identifiers.outputs.release_name }} | |
| steps: | |
| - name: Create completion marker | |
| run: | | |
| echo "==> Creating release completion marker..." | |
| echo "Release: $RELEASE_NAME" | |
| echo "Type: $RELEASE_TYPE" | |
| # Create completion marker JSON with metadata | |
| cat > release-complete.json <<EOF | |
| { | |
| "release_name": "$RELEASE_NAME", | |
| "release_type": "$RELEASE_TYPE", | |
| "completed_at": "$(date -u +'%Y-%m-%dT%H:%M:%SZ')", | |
| "workflow_run_id": "${{ github.run_id }}", | |
| "commit_sha": "${{ needs.identifiers.outputs.head_sha }}", | |
| "development_status": "${{ needs.release-development.result }}", | |
| "nightly_status": "${{ needs.release-nightly.result }}", | |
| "stable_status": "${{ needs.release-stable.result }}" | |
| } | |
| EOF | |
| echo "" | |
| echo "Completion marker contents:" | |
| cat release-complete.json | |
| - name: Upload completion marker to GitHub Release | |
| env: | |
| GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | |
| run: | | |
| echo "==> Uploading completion marker to release $RELEASE_NAME..." | |
| # Upload the completion marker as a release asset | |
| gh release upload "$RELEASE_NAME" \ | |
| release-complete.json \ | |
| --repo "$GITHUB_REPOSITORY" \ | |
| --clobber | |
| echo "✅ Completion marker uploaded successfully" |