fix(ee): use set(ex=) instead of setex() for license cache updates #10403
Workflow file for this run
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| name: External Dependency Unit Tests | |
| concurrency: | |
| group: External-Dependency-Unit-Tests-${{ github.workflow }}-${{ github.head_ref || github.event.workflow_run.head_branch || github.run_id }} | |
| cancel-in-progress: true | |
| on: | |
| merge_group: | |
| pull_request: | |
| branches: [main] | |
| push: | |
| tags: | |
| - "v*.*.*" | |
| permissions: | |
| contents: read | |
| env: | |
| # AWS credentials for S3-specific test | |
| S3_AWS_ACCESS_KEY_ID_FOR_TEST: ${{ secrets.S3_AWS_ACCESS_KEY_ID }} | |
| S3_AWS_SECRET_ACCESS_KEY_FOR_TEST: ${{ secrets.S3_AWS_SECRET_ACCESS_KEY }} | |
| # MinIO | |
| S3_ENDPOINT_URL: "http://localhost:9004" | |
| S3_AWS_ACCESS_KEY_ID: "minioadmin" | |
| S3_AWS_SECRET_ACCESS_KEY: "minioadmin" | |
| # Confluence | |
| CONFLUENCE_TEST_SPACE_URL: ${{ vars.CONFLUENCE_TEST_SPACE_URL }} | |
| CONFLUENCE_TEST_SPACE: ${{ vars.CONFLUENCE_TEST_SPACE }} | |
| CONFLUENCE_TEST_PAGE_ID: ${{ secrets.CONFLUENCE_TEST_PAGE_ID }} | |
| CONFLUENCE_USER_NAME: ${{ vars.CONFLUENCE_USER_NAME }} | |
| CONFLUENCE_ACCESS_TOKEN: ${{ secrets.CONFLUENCE_ACCESS_TOKEN }} | |
| CONFLUENCE_ACCESS_TOKEN_SCOPED: ${{ secrets.CONFLUENCE_ACCESS_TOKEN_SCOPED }} | |
| # Jira | |
| JIRA_ADMIN_API_TOKEN: ${{ secrets.JIRA_ADMIN_API_TOKEN }} | |
| # LLMs | |
| OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} | |
| ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} | |
| VERTEX_CREDENTIALS: ${{ secrets.VERTEX_CREDENTIALS }} | |
| VERTEX_LOCATION: ${{ vars.VERTEX_LOCATION }} | |
| # Code Interpreter | |
| # TODO: debug why this is failing and enable | |
| CODE_INTERPRETER_BASE_URL: http://localhost:8000 | |
| # OpenSearch | |
| OPENSEARCH_ADMIN_PASSWORD: "StrongPassword123!" | |
| jobs: | |
| discover-test-dirs: | |
| # NOTE: Github-hosted runners have about 20s faster queue times and are preferred here. | |
| runs-on: ubuntu-slim | |
| timeout-minutes: 45 | |
| outputs: | |
| test-dirs: ${{ steps.set-matrix.outputs.test-dirs }} | |
| steps: | |
| - name: Checkout code | |
| uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # ratchet:actions/checkout@v6 | |
| with: | |
| persist-credentials: false | |
| - name: Discover test directories | |
| id: set-matrix | |
| run: | | |
| # Find all subdirectories in backend/tests/external_dependency_unit | |
| dirs=$(find backend/tests/external_dependency_unit -mindepth 1 -maxdepth 1 -type d -exec basename {} \; | sort | jq -R -s -c 'split("\n")[:-1]') | |
| echo "test-dirs=$dirs" >> $GITHUB_OUTPUT | |
| external-dependency-unit-tests: | |
| needs: discover-test-dirs | |
| # Use larger runner with more resources for Vespa | |
| runs-on: | |
| - runs-on | |
| - runner=2cpu-linux-arm64 | |
| - ${{ format('run-id={0}-external-dependency-unit-tests-job-{1}', github.run_id, strategy['job-index']) }} | |
| - extras=s3-cache | |
| timeout-minutes: 45 | |
| strategy: | |
| fail-fast: false | |
| matrix: | |
| test-dir: ${{ fromJson(needs.discover-test-dirs.outputs.test-dirs) }} | |
| env: | |
| PYTHONPATH: ./backend | |
| MODEL_SERVER_HOST: "disabled" | |
| DISABLE_TELEMETRY: "true" | |
| steps: | |
| - uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2 | |
| - name: Checkout code | |
| uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # ratchet:actions/checkout@v6 | |
| with: | |
| persist-credentials: false | |
| - name: Setup Python and Install Dependencies | |
| uses: ./.github/actions/setup-python-and-install-dependencies | |
| with: | |
| requirements: | | |
| backend/requirements/default.txt | |
| backend/requirements/dev.txt | |
| backend/requirements/ee.txt | |
| - name: Setup Playwright | |
| uses: ./.github/actions/setup-playwright | |
| # needed for pulling Vespa, Redis, Postgres, and Minio images | |
| # otherwise, we hit the "Unauthenticated users" limit | |
| # https://docs.docker.com/docker-hub/usage/ | |
| - name: Login to Docker Hub | |
| uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3 | |
| with: | |
| username: ${{ secrets.DOCKER_USERNAME }} | |
| password: ${{ secrets.DOCKER_TOKEN }} | |
| - name: Create .env file for Docker Compose | |
| run: | | |
| cat <<EOF > deployment/docker_compose/.env | |
| CODE_INTERPRETER_BETA_ENABLED=true | |
| DISABLE_TELEMETRY=true | |
| EOF | |
| - name: Set up Standard Dependencies | |
| run: | | |
| cd deployment/docker_compose | |
| docker compose \ | |
| -f docker-compose.yml \ | |
| -f docker-compose.dev.yml \ | |
| -f docker-compose.opensearch.yml \ | |
| up -d \ | |
| minio \ | |
| relational_db \ | |
| cache \ | |
| index \ | |
| opensearch \ | |
| code-interpreter | |
| - name: Run migrations | |
| run: | | |
| cd backend | |
| # Run migrations to head | |
| alembic upgrade head | |
| alembic heads --verbose | |
| - name: Run Tests for ${{ matrix.test-dir }} | |
| shell: script -q -e -c "bash --noprofile --norc -eo pipefail {0}" | |
| env: | |
| TEST_DIR: ${{ matrix.test-dir }} | |
| run: | | |
| py.test \ | |
| --durations=8 \ | |
| -o junit_family=xunit2 \ | |
| -xv \ | |
| --ff \ | |
| backend/tests/external_dependency_unit/${TEST_DIR} | |
| - name: Collect Docker logs on failure | |
| if: failure() | |
| run: | | |
| mkdir -p docker-logs | |
| cd deployment/docker_compose | |
| # Get list of running containers | |
| containers=$(docker compose -f docker-compose.yml -f docker-compose.dev.yml -f docker-compose.opensearch.yml ps -q) | |
| # Collect logs from each container | |
| for container in $containers; do | |
| container_name=$(docker inspect --format='{{.Name}}' $container | sed 's/^\///') | |
| echo "Collecting logs from $container_name..." | |
| docker logs $container > ../../docker-logs/${container_name}.log 2>&1 | |
| done | |
| cd ../.. | |
| echo "Docker logs collected in docker-logs directory" | |
| - name: Upload Docker logs | |
| if: failure() | |
| uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f | |
| with: | |
| name: docker-logs-${{ matrix.test-dir }} | |
| path: docker-logs/ | |
| retention-days: 7 |