Skip to content

[tx] Add cuDNN flash attention #1

[tx] Add cuDNN flash attention

[tx] Add cuDNN flash attention #1

Workflow file for this run

name: SkyRL-tx-GPU
on:
push:
branches: [ main ]
paths:
- 'skyrl-tx/**'
- '.github/workflows/gpu_skyrl_tx.yaml'
pull_request:
paths:
- 'skyrl-tx/**'
- '.github/workflows/gpu_skyrl_tx.yaml'
workflow_dispatch:
permissions:
checks: write
contents: read
concurrency:
group: skyrl-tx-gpu-${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
skyrl_tx_gpu_tests:
runs-on: ubuntu-latest
defaults:
run:
shell: bash
working-directory: ./skyrl-tx
steps:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.12'
cache: 'pip'
- name: Install the latest version of uv
uses: astral-sh/setup-uv@v6
with:
activate-environment: true
- name: Install dependencies
run: uv pip install anyscale==0.24.79 typer==0.9.0
- name: GPU tests
env:
ANYSCALE_CLI_TOKEN: ${{ secrets.ANYSCALE_CLI_TOKEN }}
ANYSCALE_HOST: https://console.anyscale.com
run: |
anyscale job submit -f ci/anyscale_gpu_ci.yaml --timeout 10000
anyscale job wait --cloud sky-anyscale-aws-us-east-1 --name skyrl-tx-gpu-ci --timeout 10000