|
| 1 | +name: Test - Native GPU Runner |
| 2 | + |
| 3 | +on: |
| 4 | + workflow_dispatch: |
| 5 | + push: |
| 6 | + paths: |
| 7 | + - ".github/workflows/test-native-gpu-runner.yaml" |
| 8 | + |
| 9 | +jobs: |
| 10 | + test-2gpu-native: |
| 11 | + runs-on: g6-2gpu-native-runner |
| 12 | + steps: |
| 13 | + - name: Checkout |
| 14 | + uses: actions/checkout@v4 |
| 15 | + |
| 16 | + - name: Job Info |
| 17 | + run: | |
| 18 | + echo "=== Native GPU Runner Test ===" |
| 19 | + echo "Runner: $(hostname)" |
| 20 | + echo "Timestamp: $(date -u)" |
| 21 | +
|
| 22 | + - name: Check GPU Environment |
| 23 | + run: | |
| 24 | + echo "=== GPU Environment ===" |
| 25 | + echo "NVIDIA_VISIBLE_DEVICES: ${NVIDIA_VISIBLE_DEVICES:-not set}" |
| 26 | + echo "CUDA_VISIBLE_DEVICES: ${CUDA_VISIBLE_DEVICES:-not set}" |
| 27 | +
|
| 28 | + - name: Check GPU Info |
| 29 | + run: | |
| 30 | + echo "=== GPU Information ===" |
| 31 | + nvidia-smi |
| 32 | + nvidia-smi -L |
| 33 | + nvidia-smi --query-gpu=index,name,uuid,memory.total,memory.used --format=csv |
| 34 | +
|
| 35 | + - name: Check CUDA |
| 36 | + run: | |
| 37 | + echo "=== CUDA Info ===" |
| 38 | + nvcc --version || echo "nvcc not in PATH" |
| 39 | + ls -la /usr/local/cuda/bin/ || true |
| 40 | +
|
| 41 | + - name: Check Node Resources |
| 42 | + run: | |
| 43 | + echo "=== Node Information ===" |
| 44 | + echo "Hostname: $(hostname)" |
| 45 | + echo "CPU cores: $(nproc)" |
| 46 | + echo "Memory: $(free -h | grep Mem | awk '{print $2}')" |
| 47 | +
|
| 48 | + # Test parallel jobs to verify GPU isolation |
| 49 | + test-parallel-native: |
| 50 | + strategy: |
| 51 | + matrix: |
| 52 | + job_id: [1, 2] |
| 53 | + runs-on: g6-2gpu-native-runner |
| 54 | + steps: |
| 55 | + - name: Job Info |
| 56 | + run: | |
| 57 | + echo "=== Parallel Native GPU Test ===" |
| 58 | + echo "Job ID: ${{ matrix.job_id }}" |
| 59 | + echo "Hostname: $(hostname)" |
| 60 | +
|
| 61 | + - name: Check GPU Allocation |
| 62 | + run: | |
| 63 | + echo "=== GPU Allocation Check ===" |
| 64 | + echo "NVIDIA_VISIBLE_DEVICES: ${NVIDIA_VISIBLE_DEVICES:-not set}" |
| 65 | + nvidia-smi -L |
| 66 | + nvidia-smi --query-gpu=index,uuid --format=csv |
| 67 | +
|
| 68 | + - name: Simulate Workload |
| 69 | + run: | |
| 70 | + echo "=== Simulating GPU Workload ===" |
| 71 | + sleep 30 |
| 72 | + echo "✅ Job ${{ matrix.job_id }} completed" |
0 commit comments