Skip to content

Don't attempt crash loop detection or automatic restarting on crash when running in secure mode #3528

Don't attempt crash loop detection or automatic restarting on crash when running in secure mode

Don't attempt crash loop detection or automatic restarting on crash when running in secure mode #3528

Workflow file for this run

name: CI/CD
on:
push:
branches:
- master
- beta
- rc
- 'try-**'
tags:
- 'release-**'
pull_request:
branches:
- master
- beta
- rc
- 'try-**'
workflow_dispatch:
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
START_BUILD_NUMBER: ${{ vars.BUILD_NUMBER_OFFSET || 0 }}
pullRequestNumber: ${{ github.event_name == 'pull_request' && github.event.number || 0 }}
scons_publisher: ${{ vars.PUBLISHER_NAME || github.repository_owner }}
# Don't send telemetry to Microsoft when using MSVC tooling, avoiding an unnecessary PowerShell script invocation.
VSCMD_SKIP_SENDTELEMETRY: 1
# Cache details about available MSVC tooling for subsequent SCons invocations to the provided file.
SCONS_CACHE_MSVC_CONFIG: ".scons_msvc_cache.json"
defaultRunner: 'windows-2025'
supportedRunners: '["windows-2022", "windows-2025"]'
defaultArch: x64
supportedArchitectures: '["x64"]'
defaultPythonVersion: '3.13.9'
supportedPythonVersions: '["3.13.9"]'
jobs:
matrix:
name: Set up matrix
runs-on: ubuntu-latest
outputs:
defaultRunner: ${{ steps.setMatrix.outputs.defaultRunner }}
supportedRunners: ${{ steps.setMatrix.outputs.supportedRunners }}
supportedArchitectures: ${{ steps.setMatrix.outputs.supportedArchitectures }}
supportedPythonVersions: ${{ steps.setMatrix.outputs.supportedPythonVersions }}
steps:
- name: Set architecture and python version
id: setMatrix
run: |
echo "defaultRunner=${{ env.defaultRunner }}" >> "$GITHUB_OUTPUT"
echo "supportedRunners=$(jq -c <<< '${{ env.supportedRunners }}')" >> "$GITHUB_OUTPUT"
echo "supportedArchitectures=$(jq -c <<< '${{ env.supportedArchitectures }}')" >> "$GITHUB_OUTPUT"
echo "supportedPythonVersions=$(jq -c <<< '${{ env.supportedPythonVersions }}')" >> "$GITHUB_OUTPUT"
buildNVDA:
name: Build NVDA
needs: matrix
runs-on: ${{ needs.matrix.outputs.defaultRunner }}
timeout-minutes: 15
strategy:
fail-fast: false
matrix:
arch: ${{ fromJson(needs.matrix.outputs.supportedArchitectures) }}
pythonVersion: ${{ fromJson(needs.matrix.outputs.supportedPythonVersions) }}
env:
uv-arch: ${{ matrix.arch == 'x64' && 'x86_64' || 'x86' }}
outputs:
# Note: each output variable is overwritten by the last job in the matrix.
# This is not a problem for these variables as they should be the same across jobs.
version: ${{ steps.releaseInfo.outputs.VERSION }}
versionType: ${{ steps.releaseInfo.outputs.VERSION_TYPE }}
APIVersion: ${{ steps.releaseInfo.outputs.NVDA_API_VERSION }}
APIBackCompat: ${{ steps.releaseInfo.outputs.NVDA_API_COMPAT_TO }}
steps:
- name: Checkout NVDA
uses: actions/checkout@v5
with:
submodules: true
- name: Install Python
uses: actions/setup-python@v6
with:
python-version: ${{ matrix.pythonVersion }}
architecture: ${{ matrix.arch }}
- name: Install the latest version of uv
uses: astral-sh/setup-uv@v7
with:
python-version: cpython-${{ matrix.pythonVersion }}-windows-${{ env.uv-arch }}-none
- name: Set version variables
run: ci/scripts/setBuildVersionVars.ps1
- name: Set scons args
run: ci/scripts/setSconsArgs.ps1
- name: Set cache key for SCons MSVC Cache
shell: bash
run: echo "SCONS_CACHE_KEY=${RUNNER_OS}-${ImageVersion}" >> $GITHUB_ENV
# On Github Actions, it can take more than two minutes for SCons to collect information about MSVC tooling, majorly impacting build performance.
# This information is static for the current Github Actions runner image.
# Therefore, cache this information with a key scoped to the current image version, ensuring that subsequent workflow runs will be much faster.
- name: SCons MSVC Cache
uses: actions/cache@v4
with:
path: ${{ env.SCONS_CACHE_MSVC_CONFIG }}
key: ${{ env.SCONS_CACHE_KEY }}
- name: Prepare source code
shell: cmd
run: scons source %sconsArgs% %sconsCores%
- name: Prepare for tests
run: ci/scripts/tests/beforeTests.ps1
- name: Cache scons build
uses: actions/cache/save@v4
with:
path: .
key: ${{ github.ref }}-${{ github.run_id }}-${{ matrix.pythonVersion }}-${{ matrix.arch }}
- name: Store release metadata information
id: releaseInfo
run: |
Write-Output VERSION=$env:version >> $env:GITHUB_OUTPUT
Write-Output VERSION_TYPE=$env:versionType >> $env:GITHUB_OUTPUT
Write-Output NVDA_API_VERSION=$(python -c "import sys; sys.path.append('source'); from addonAPIVersion import CURRENT; print('{}.{}.{}'.format(*CURRENT))") >> $env:GITHUB_OUTPUT
Write-Output NVDA_API_COMPAT_TO=$(python -c "import sys; sys.path.append('source'); from addonAPIVersion import BACK_COMPAT_TO; print('{}.{}.{}'.format(*BACK_COMPAT_TO))") >> $env:GITHUB_OUTPUT
typeCheck:
name: Check types with Pyright
runs-on: ${{ needs.matrix.outputs.defaultRunner }}
needs: [matrix, buildNVDA]
steps:
- name: Checkout cached build
uses: actions/cache/restore@v4
with:
path: .
key: ${{ github.ref }}-${{ github.run_id }}-${{ env.defaultPythonVersion }}-${{ env.defaultArch }}
fail-on-cache-miss: true
- name: Install Python
uses: actions/setup-python@v6
with:
python-version: ${{ env.defaultPythonVersion }}
architecture: ${{ env.defaultArch }}
- name: Install the latest version of uv
uses: astral-sh/setup-uv@v7
- name: Static type analysis
run: ci/scripts/tests/typeCheck.ps1
checkPo:
name: Check po files for errors
runs-on: ${{ needs.matrix.outputs.defaultRunner }}
needs: matrix
steps:
- name: Checkout repository
uses: actions/checkout@v5
with:
# Include gettext
submodules: true
- name: Set up python
uses: actions/setup-python@v6
with:
python-version: ${{ env.defaultPythonVersion }}
architecture: ${{ env.defaultArch }}
- name: Install the latest version of uv
uses: astral-sh/setup-uv@v7
- name: Run pre-commit
run: |
# Ensure uv environment is up to date.
# If the uv environment is outdated, running pre-commit will trigger uv to generate a uv.lock file,
# which causes checkPo to fail without clear errors because pre-commit fails when files are changed during its run.
uv run pre-commit run uv-lock --all-files
# Run pre-commit on the translations
uv run pre-commit run checkPo --all-files
- name: Add job summary
if: ${{ failure() }}
shell: bash
run: |
echo "PO files contain errors. Please fix them before merging." >> $GITHUB_STEP_SUMMARY
checkPot:
name: Check translator comments
runs-on: ${{ needs.matrix.outputs.defaultRunner }}
needs: [matrix, buildNVDA]
steps:
- name: Checkout cached build
uses: actions/cache/restore@v4
with:
path: .
key: ${{ github.ref }}-${{ github.run_id }}-${{ env.defaultPythonVersion }}-${{ env.defaultArch }}
fail-on-cache-miss: true
- name: Install Python
uses: actions/setup-python@v6
with:
python-version: ${{ env.defaultPythonVersion }}
architecture: ${{ env.defaultArch }}
- name: Install the latest version of uv
uses: astral-sh/setup-uv@v7
- name: Check comments for translators
run: ci/scripts/tests/translationCheck.ps1
- name: Upload artifact
if: ${{ success() || failure() }}
uses: actions/upload-artifact@v5
with:
name: makePot results
path: |
output/nvda.pot
output/potSourceFileList.txt
licenseCheck:
name: Check license compatibility of dependencies
runs-on: ${{ needs.matrix.outputs.defaultRunner }}
needs: [matrix, buildNVDA]
steps:
- name: Checkout cached build
uses: actions/cache/restore@v4
with:
path: .
key: ${{ github.ref }}-${{ github.run_id }}-${{ env.defaultPythonVersion }}-${{ env.defaultArch }}
fail-on-cache-miss: true
- name: Install Python
uses: actions/setup-python@v6
with:
python-version: ${{ env.defaultPythonVersion }}
architecture: ${{ env.defaultArch }}
- name: Install the latest version of uv
uses: astral-sh/setup-uv@v7
- name: License check
run: ci/scripts/tests/licenseCheck.ps1
unitTests:
name: Run unit tests
runs-on: ${{ needs.matrix.outputs.defaultRunner }}
needs: [matrix, buildNVDA]
strategy:
fail-fast: false
matrix:
arch: ${{ fromJson(needs.matrix.outputs.supportedArchitectures) }}
pythonVersion: ${{ fromJson(needs.matrix.outputs.supportedPythonVersions) }}
env:
uv-arch: ${{ matrix.arch == 'x64' && 'x86_64' || 'x86' }}
steps:
- name: Checkout cached build
uses: actions/cache/restore@v4
with:
path: .
key: ${{ github.ref }}-${{ github.run_id }}-${{ matrix.pythonVersion }}-${{ matrix.arch }}
fail-on-cache-miss: true
- name: Install Python
uses: actions/setup-python@v6
with:
python-version: ${{ matrix.pythonVersion }}
architecture: ${{ matrix.arch }}
- name: Install the latest version of uv
uses: astral-sh/setup-uv@v7
with:
python-version: cpython-${{ matrix.pythonVersion }}-windows-${{ env.uv-arch }}-none
- name: Run unit tests
run: ci/scripts/tests/unitTests.ps1
- name: Replace relative paths in unit test results
if: ${{ failure() }}
# Junit reports fail on these
shell: bash
run: sed -i 's|file="..\\|file="|g' testOutput/unit/unitTests.xml
- name: Upload artifact
if: ${{ failure() }}
uses: actions/upload-artifact@v5
with:
name: Unit tests results (${{ matrix.pythonVersion }}, ${{ matrix.arch }})
path: testOutput/unit/unitTests.xml
- name: Publish unit test report
uses: mikepenz/action-junit-report@v5
if: ${{ failure() }}
with:
check_name: Unit tests (${{ matrix.pythonVersion }}, ${{ matrix.arch }})
detailed_summary: true
annotate_only: true
report_paths: testOutput/unit/unitTests.xml
crowdinUpload:
name: Upload translations to Crowdin
runs-on: ${{ needs.matrix.outputs.defaultRunner }}
needs: [matrix, buildNVDA, checkPot]
if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/beta' && vars.CROWDIN_PROJECT_ID }}
steps:
- name: Checkout cached build
uses: actions/cache/restore@v4
with:
path: .
key: ${{ github.ref }}-${{ github.run_id }}-${{ env.defaultPythonVersion }}-${{ env.defaultArch }}
fail-on-cache-miss: true
- name: Get makePot results
uses: actions/download-artifact@v6
with:
name: makePot results
path: output
- name: Install Python
uses: actions/setup-python@v6
with:
python-version: ${{ env.defaultPythonVersion }}
architecture: ${{ env.defaultArch }}
- name: Install the latest version of uv
uses: astral-sh/setup-uv@v7
- name: Upload translations to Crowdin
env:
crowdinProjectID: ${{ vars.CROWDIN_PROJECT_ID }}
crowdinAuthToken: ${{ secrets.CROWDIN_AUTH_TOKEN }}
run: uv run --with requests --directory ${{ github.workspace }} ci\scripts\crowdinSync.py uploadSourceFile 2 output\nvda.pot 2>&1
createLauncher:
name: Create launcher
runs-on: ${{ needs.matrix.outputs.defaultRunner }}
needs: [matrix, buildNVDA]
timeout-minutes: 30
strategy:
fail-fast: false
matrix:
arch: ${{ fromJson(needs.matrix.outputs.supportedArchitectures) }}
pythonVersion: ${{ fromJson(needs.matrix.outputs.supportedPythonVersions) }}
env:
uv-arch: ${{ matrix.arch == 'x64' && 'x86_64' || 'x86' }}
outputs:
# Note: each output variable is overwritten by the last job in the matrix.
# So we only set the output variables for the default architecture.
launcherArtifactId: ${{ steps.addJobSummary.outputs.artifactId }}
launcherSha256: ${{ steps.addJobSummary.outputs.SHA256 }}
steps:
- name: Checkout cached build
uses: actions/cache/restore@v4
with:
path: .
key: ${{ github.ref }}-${{ github.run_id }}-${{ matrix.pythonVersion }}-${{ matrix.arch }}
fail-on-cache-miss: true
- name: Install Python
uses: actions/setup-python@v6
with:
python-version: ${{ matrix.pythonVersion }}
architecture: ${{ matrix.arch }}
- name: Install the latest version of uv
uses: astral-sh/setup-uv@v7
with:
python-version: cpython-${{ matrix.pythonVersion }}-windows-${{ env.uv-arch }}-none
- name: Set version variables
run: ci/scripts/setBuildVersionVars.ps1
- name: Set scons args
run: ci/scripts/setSconsArgs.ps1
env:
apiSigningToken: ${{ github.event_name == 'push' && secrets.API_SIGNING_TOKEN || '' }}
- name: Build scons docs targets
shell: cmd
run: scons %sconsDocsOutTargets% %sconsArgs% %sconsCores%
# Run launcher separately as we can't safely generate that in parallel to docs scons targets (#18867)
- name: Create launcher and controller client
shell: cmd
run: |
PowerShell -Command "Set-ExecutionPolicy Bypass"
PowerShell -Command "Set-PSRepository PSGallery -InstallationPolicy Trusted"
PowerShell -Command "Install-Module -Name SignPath -Force"
scons %sconsLauncherOutTargets% %sconsArgs% %sconsCores%
- name: Upload launcher
id: uploadLauncher
uses: actions/upload-artifact@v5
with:
name: NVDA launcher (${{ matrix.pythonVersion }}, ${{ matrix.arch }})
path: |
output/nvda*.exe
output/nvda*controllerClient.zip
- name: Upload documentation artifacts
uses: actions/upload-artifact@v5
id: uploadBuildArtifacts
with:
name: Documentation files and packaging metadata (${{ matrix.pythonVersion }}, ${{ matrix.arch }})
path: |
output/*.html
output/*.css
output/library_modules.txt
output/installed_python_packages.txt
- name: Add job summary
id: addJobSummary
shell: bash
run: |
NVDA_EXE_NAME=$(ls output/nvda*.exe | head -n 1)
SHA256=$(sha256sum "$NVDA_EXE_NAME" | cut -d ' ' -f 1)
# If architecture is default we save the build details to the output variable
if [ "${{ matrix.arch == env.defaultArch && matrix.pythonVersion == env.defaultPythonVersion }}" == "true" ]; then
echo SHA256=$SHA256 >> $GITHUB_OUTPUT
echo artifactId=${{ steps.uploadLauncher.outputs.artifact-id }} >> $GITHUB_OUTPUT
fi
echo "* [Download the NVDA launcher (${{ matrix.pythonVersion }}, ${{ matrix.arch }})](${{ steps.uploadLauncher.outputs.artifact-url }})" >> $GITHUB_STEP_SUMMARY
echo " * SHA256 of launcher exe file: \`$SHA256\`" >> $GITHUB_STEP_SUMMARY
echo "* [Download the documentation and other build artifacts](${{ steps.uploadBuildArtifacts.outputs.artifact-url }})" >> $GITHUB_STEP_SUMMARY
echo " * User guide, change log, developer guide" >> $GITHUB_STEP_SUMMARY
echo " * Packaging metadata: library modules, installed python packages" >> $GITHUB_STEP_SUMMARY
systemTests:
name: Run system tests
runs-on: ${{ matrix.runner }}
needs: [matrix, createLauncher]
timeout-minutes: 10
strategy:
fail-fast: false
matrix:
# To skip tests, and just run install, replace with [excluded_from_build]
testSuite:
- installer
- startupShutdown
- symbols
- vscode
- imageDescriptions
- chrome_annotations
- chrome_list
- chrome_table
- chrome_language
- chrome_roleDescription
- chrome_misc_aria
- chrome_misc
runner: ${{ fromJson(needs.matrix.outputs.supportedRunners) }}
arch: ${{ fromJson(needs.matrix.outputs.supportedArchitectures) }}
pythonVersion: ${{ fromJson(needs.matrix.outputs.supportedPythonVersions) }}
# A bug exists with Windows 2022 notepad that prevents NVDA from focusing it.
# This causes our symbol pronunciation tests to fail on this runner.
exclude:
- runner: windows-2022
testSuite: symbols
env:
uv-arch: ${{ matrix.arch == 'x64' && 'x86_64' || 'x86' }}
steps:
- name: Checkout cached build
uses: actions/cache/restore@v4
with:
path: .
key: ${{ github.ref }}-${{ github.run_id }}-${{ matrix.pythonVersion }}-${{ matrix.arch }}
fail-on-cache-miss: true
- name: Install Python
uses: actions/setup-python@v6
with:
python-version: ${{ matrix.pythonVersion }}
architecture: ${{ matrix.arch }}
- name: Install the latest version of uv
uses: astral-sh/setup-uv@v7
with:
python-version: cpython-${{ matrix.pythonVersion }}-windows-${{ env.uv-arch }}-none
- name: Get NVDA launcher
id: getLauncher
uses: actions/download-artifact@v6
with:
name: NVDA launcher (${{ matrix.pythonVersion }}, ${{ matrix.arch }})
path: output
- name: Install NVDA
run: ci/scripts/installNVDA.ps1
env:
nvdaLauncherDir: ${{ steps.getLauncher.outputs.download-path }}
- name: Download and install Visual Studio Code
if: ${{ matrix.testSuite == 'vscode' }}
run: choco install -y vscode
- name: Run system tests
run: ci/scripts/tests/systemTests.ps1
env:
nvdaLauncherDir: ${{ steps.getLauncher.outputs.download-path }}
INCLUDE_SYSTEM_TEST_TAGS: ${{ matrix.testSuite }}
- name: Upload system tests results
if: ${{ failure() }}
uses: actions/upload-artifact@v5
with:
name: "System tests results (${{ matrix.testSuite }}) (${{ matrix.runner }}, ${{ matrix.pythonVersion }}, ${{ matrix.arch }})"
path: |
testOutput/system
testOutput/install
!testOutput/install/nvda_install_temp.log
- name: Publish system test report
uses: mikepenz/action-junit-report@v5
if: ${{ failure() }}
with:
check_name: "System tests (${{ matrix.testSuite }}) (${{ matrix.runner }}, ${{ matrix.pythonVersion }}, ${{ matrix.arch }})"
detailed_summary: true
annotate_only: true
report_paths: testOutput/system/systemTests.xml
createSymbols:
name: Create symbols
runs-on: ${{ needs.matrix.outputs.defaultRunner }}
needs: [matrix, buildNVDA]
strategy:
fail-fast: false
matrix:
arch: ${{ fromJson(needs.matrix.outputs.supportedArchitectures) }}
pythonVersion: ${{ fromJson(needs.matrix.outputs.supportedPythonVersions) }}
outputs:
# Note: each output variable is overwritten by the last job in the matrix.
# So we only set the output variables for the default architecture.
symbolsArtifactId: ${{ steps.defaultArtifact.outputs.artifactId }}
steps:
- name: Checkout cached build
uses: actions/cache/restore@v4
with:
path: .
key: ${{ github.ref }}-${{ github.run_id }}-${{ matrix.pythonVersion }}-${{ matrix.arch }}
fail-on-cache-miss: true
- name: Install Windows SDK
# Installs symstore.exe to expected location.
run: choco install -y windows-sdk-10.1
- name: Create build symbols
env:
symStore: C:\Program Files (x86)\Windows Kits\10\Debuggers\x64\symstore.exe
run: ci/scripts/buildSymbolStore.ps1
- name: Upload symbols artifact
id: uploadArtifact
uses: actions/upload-artifact@v5
with:
name: Symbols (${{ matrix.pythonVersion }}, ${{ matrix.arch }})
path: output/symbols.zip
- name: Save artifact metadata
shell: bash
id: defaultArtifact
if: ${{ matrix.arch == env.defaultArch && matrix.pythonVersion == env.defaultPythonVersion }}
run: echo artifactId=${{ steps.uploadArtifact.outputs.artifact-id }} >> $GITHUB_OUTPUT
allTestsPass:
# Is a no-op action.
# Rulesets require specifying every job you wish to block a PR/push.
# Used to protect branches/tags as a general status check so we can check a single job for all rulesets.
name: Check if all tests pass
runs-on: ubuntu-latest
if: always()
needs: [buildNVDA, typeCheck, checkPo, checkPot, licenseCheck, unitTests, createLauncher, systemTests, createSymbols]
steps:
- name: Check if all tests pass
uses: re-actors/[email protected]
with:
jobs: ${{ toJSON(needs) }}
uploadSymbols:
name: Upload symbols
runs-on: ${{ needs.matrix.outputs.defaultRunner }}
needs: [matrix, buildNvda]
if: ${{ github.event_name == 'push' && vars.feature_uploadSymbolsToMozilla }}
steps:
- name: Checkout cached build
uses: actions/cache/restore@v4
with:
path: .
key: ${{ github.ref }}-${{ github.run_id }}-${{ env.defaultPythonVersion }}-${{ env.defaultArch }}
fail-on-cache-miss: true
- name: Install Python
uses: actions/setup-python@v6
with:
python-version: ${{ env.defaultPythonVersion }}
architecture: ${{ env.defaultArch }}
- name: Download and extract dump_syms
shell: bash
run: |
curl -L -o dump_syms.zip https://github.com/mozilla/dump_syms/releases/download/v2.3.5/dump_syms-x86_64-pc-windows-msvc.zip
7z x dump_syms.zip dump_syms.exe
- name: Install the latest version of uv
uses: astral-sh/setup-uv@v7
- name: Upload symbols to Mozilla
continue-on-error: true
run: uv run --with requests --directory ${{ github.workspace }} ci\scripts\mozillaSyms.py
env:
mozillaSymsAuthToken: ${{ secrets.MOZILLA_SYMS_TOKEN }}
- name: Warn on failure
if: ${{ failure() }}
shell: bash
run: |
echo "Warning: Uploading symbols to Mozilla failed." >> $GITHUB_STEP_SUMMARY
cleanupCache:
# Cache deletion on pull_request events cannot occur on PRs from forks
# as PRs from forks do not get write permissions from pull_request events.
# Alternatively, we can change the event trigger to pull_request_target,
# however that might introduce security risks as it grants fork PRs greater permissions.
# In these cases we can always let GitHub handle cache deletion:
# auto deletion after 7 days or limits are hit
name: Cleanup cache
permissions:
actions: write
needs: [matrix, allTestsPass, crowdinUpload, uploadSymbols]
if: ${{ github.event_name == 'push' || github.event.pull_request.head.repo.owner == github.repository_owner }}
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
arch: ${{ fromJson(needs.matrix.outputs.supportedArchitectures) }}
pythonVersion: ${{ fromJson(needs.matrix.outputs.supportedPythonVersions) }}
steps:
- name: Cleanup cache
shell: bash
run: gh cache delete ${{ github.ref }}-${{ github.run_id }}-${{ matrix.pythonVersion }}-${{ matrix.arch }}
env:
GH_TOKEN: ${{ github.token }}
GH_REPO: ${{ github.repository }}
deploySnapshot:
name: Deploy NVDA snapshot
permissions:
contents: write
deployments: write
runs-on: ubuntu-latest
needs: [buildNVDA, createLauncher, createSymbols, allTestsPass]
if: ${{ github.event_name == 'push' && !startsWith(github.ref_name, 'release-') }}
concurrency:
group: snapshot-${{ github.ref_name }}
cancel-in-progress: true
environment:
name: snapshot
steps:
- name: Get NVDA launcher
uses: actions/download-artifact@v6
with:
name: NVDA launcher (${{ env.defaultPythonVersion }}, ${{ env.defaultArch }})
path: output
- name: Deploy snapshot
shell: bash
id: deploySnapshot
env:
GH_TOKEN: ${{ github.token }}
run: |
NVDA_EXE_NAME=$(ls output/nvda*.exe | head -n 1)
NVDA_HASH=$(sha1sum "$NVDA_EXE_NAME" | cut -d ' ' -f 1)
gh api \
--method POST \
-H "Accept: application/vnd.github+json" \
-H "X-GitHub-Api-Version: 2022-11-28" \
/repos/${{ github.repository }}/deployments \
-f "task=release" \
-f "description=Release of $NVDA_EXE_NAME" \
-f "ref=${{ github.ref_name }}" \
-F "auto_merge=false" \
-f "environment=snapshot" \
-f "required_contexts[]" \
-f "payload[NVDA_HASH]=$NVDA_HASH" \
-f "payload[NVDA_ARTIFACT_ID]=${{ needs.createLauncher.outputs.launcherArtifactId }}" \
-f "payload[SYMBOLS_ARTIFACT_ID]=${{ needs.createSymbols.outputs.symbolsArtifactId }}" \
-f "payload[NVDA_VERSION]=${{ needs.buildNVDA.outputs.version }}" \
-f "payload[NVDA_VERSION_TYPE]=${{ needs.buildNVDA.outputs.versionType }}" \
-f "payload[NVDA_API_VERSION]=${{ needs.buildNVDA.outputs.APIVersion }}" \
-f "payload[NVDA_API_COMPAT_TO]=${{ needs.buildNVDA.outputs.APIBackCompat }}"
release:
name: Release NVDA
permissions:
contents: write
deployments: write
discussions: write
runs-on: ubuntu-latest
needs: [buildNVDA, createLauncher, createSymbols, allTestsPass]
if: startsWith(github.ref_name, 'release-')
concurrency:
group: release
cancel-in-progress: true
environment:
name: production
steps:
- name: Get normalized tag names
id: getReleaseNotes
shell: bash
run: |
echo RELEASE_NAME=$GITHUB_REF_NAME | sed 's/release-//g' >> $GITHUB_OUTPUT
echo NORMALIZED_TAG_NAME=$GITHUB_REF_NAME | sed 's/\./-/g' | sed 's/release-//g' >> $GITHUB_OUTPUT
- name: Get NVDA launcher
uses: actions/download-artifact@v6
with:
name: NVDA launcher (${{ env.defaultPythonVersion }}, ${{ env.defaultArch }})
path: output
- name: VirusTotal Scan
id: virusTotal
uses: crazy-max/ghaction-virustotal@v4
with:
vt_api_key: ${{ secrets.VT_API_KEY }}
files: output/nvda*.exe
- name: Get normalized VT url
id: getVTUrl
shell: bash
run: |
vtUrl=$(echo ${{ steps.virusTotal.outputs.analysis }} | sed -E 's/([^=]*)=([^,]*).*/\2/')
echo VT_URL=$vtUrl >> $GITHUB_OUTPUT
- name: Deploy release
shell: bash
id: deployRelease
env:
GH_TOKEN: ${{ github.token }}
run: |
NVDA_EXE_NAME=$(ls output/nvda*.exe | head -n 1)
NVDA_HASH=$(sha1sum "$NVDA_EXE_NAME" | cut -d ' ' -f 1)
gh api \
--method POST \
-H "Accept: application/vnd.github+json" \
-H "X-GitHub-Api-Version: 2022-11-28" \
/repos/${{ github.repository }}/deployments \
-f "task=release" \
-f "description=Release of $NVDA_EXE_NAME" \
-f "ref=${{ github.ref_name }}" \
-F "auto_merge=false" \
-f "environment=production" \
-f "required_contexts[]" \
-f "payload[NVDA_HASH]=$NVDA_HASH" \
-f "payload[NVDA_ARTIFACT_ID]=${{ needs.createLauncher.outputs.launcherArtifactId }}" \
-f "payload[SYMBOLS_ARTIFACT_ID]=${{ needs.createSymbols.outputs.symbolsArtifactId }}" \
-f "payload[NVDA_VERSION]=${{ needs.buildNVDA.outputs.version }}" \
-f "payload[NVDA_VERSION_TYPE]=${{ needs.buildNVDA.outputs.versionType }}" \
-f "payload[NVDA_API_VERSION]=${{ needs.buildNVDA.outputs.APIVersion }}" \
-f "payload[NVDA_API_COMPAT_TO]=${{ needs.buildNVDA.outputs.APIBackCompat }}"
- name: Create release notes
id: createReleaseNotes
shell: bash
run: |
echo "* Highlights can be found in the [release blog post](https://www.nvaccess.org/post/nvda-${{ steps.getReleaseNotes.outputs.NORMALIZED_TAG_NAME }}/)." >> /tmp/releaseNotes.md
echo "* [VirusTotal scan results](${{ steps.getVTUrl.outputs.VT_URL }})" >> /tmp/releaseNotes.md
echo "* SHA256 sum: \`${{ needs.createLauncher.outputs.launcherSha256 }}\`" >> /tmp/releaseNotes.md
- name: Publish pre-release
if: ${{ contains(github.ref_name, 'rc') || contains(github.ref_name, 'beta') }}
shell: bash
env:
GH_TOKEN: ${{ github.token }}
run: |
gh release create ${{github.ref_name }} \
--repo ${{ github.repository }} \
--prerelease \
--title "${{ steps.getReleaseNotes.outputs.RELEASE_NAME }}" \
--notes-file /tmp/releaseNotes.md \
--verify-tag \
--discussion-category Releases
- name: Publish stable release
if: ${{ !contains(github.ref_name, 'rc') && !contains(github.ref_name, 'beta') }}
shell: bash
env:
GH_TOKEN: ${{ github.token }}
run: |
gh release create ${{github.ref_name }} \
--repo ${{ github.repository }} \
--title "${{ steps.getReleaseNotes.outputs.RELEASE_NAME }}" \
--notes-file /tmp/releaseNotes.md \
--verify-tag \
--discussion-category Releases
- name: Upload exe to release assets
shell: bash
env:
GH_TOKEN: ${{ github.token }}
run: |
NVDA_EXE_NAME=$(ls output/nvda*.exe | head -n 1)
gh release upload ${{ github.ref_name }} \
--repo ${{ github.repository }} \
--clobber \
$NVDA_EXE_NAME#Installer