Skip to content

Add GPU inference test and CI workflow integration #6

Add GPU inference test and CI workflow integration

Add GPU inference test and CI workflow integration #6

Workflow file for this run

name: Pytest/GPU Workflow
on:
push:
branches: [main, develop]
pull_request:
branches: [main, develop]
types: [opened, synchronize, reopened, ready_for_review]
permissions:
contents: read
defaults:
run:
shell: bash
env:
PYTHON_VERSION: "3.12"
UV_TORCH_BACKEND: "auto"
concurrency:
group: pytest-gpu-test-${{ github.ref }}
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
jobs:
run-gpu-tests:
name: Testing
if: github.event_name == 'push' || !github.event.pull_request.draft
timeout-minutes: 20
runs-on: Roboflow-GPU-VM-Runner
steps:
- name: 🖥️ Print GPU information
run: nvidia-smi
- name: 📥 Checkout the repository
uses: actions/checkout@v6
- name: 🐍 Install uv and set Python version
uses: astral-sh/setup-uv@v7
with:
python-version: ${{ env.PYTHON_VERSION }}
activate-environment: true
- name: 🚀 Install Packages
run: uv pip install -e . --group tests
- name: 🧪 Run the Test
run: uv run pytest tests/ -n 2 -m gpu --cov=rfdetr_plus --cov-report=xml
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v5
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: "coverage.xml"
flags: gpu,${{ runner.os }},py${{ env.PYTHON_VERSION }}
env_vars: OS,PYTHON
name: codecov-umbrella
fail_ci_if_error: false
- name: Minimize uv cache
run: uv cache prune --ci