Update LLM provider SDK npm packages #23379
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Test | |
on: | |
pull_request: | |
push: | |
branches: | |
- main | |
merge_group: | |
env: | |
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }} | |
TURBO_TEAM: hashintel | |
TURBO_CACHE: remote:rw | |
NEXTEST_PROFILE: ci | |
concurrency: | |
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} | |
cancel-in-progress: true | |
jobs: | |
optimize-ci: | |
runs-on: ubuntu-24.04 | |
name: Optimize CI | |
outputs: | |
skip: ${{ steps.check_skip.outputs.skip }} | |
steps: | |
- name: Optimize CI | |
id: check_skip | |
uses: withgraphite/graphite-ci-action@6213dcfab02da542aaf534e087282a928ff4374a | |
with: | |
graphite_token: ${{ secrets.GRAPHITE_CI_OPTIMIZER_TOKEN }} | |
setup: | |
runs-on: ubuntu-24.04 | |
outputs: | |
unit-tests: ${{ steps.packages.outputs.unit-tests }} | |
integration-tests: ${{ steps.packages.outputs.integration-tests }} | |
system-tests: ${{ steps.packages.outputs.system-tests }} | |
dockers: ${{ steps.packages.outputs.dockers }} | |
publish: ${{ steps.packages.outputs.publish }} | |
steps: | |
- name: Checkout source code | |
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 | |
with: | |
fetch-depth: 2 | |
- name: Install tools | |
uses: ./.github/actions/install-tools | |
with: | |
token: ${{ secrets.GITHUB_TOKEN }} | |
rust: false | |
- name: Determine changed packages | |
id: packages | |
run: | | |
UNIT_TEST_QUERY='query { affectedPackages(base: "HEAD^", filter: {has: {field: TASK_NAME, value: "test:unit"}}) { items { name path } } }' | |
INTEGRATION_TEST_QUERY='query { affectedPackages(base: "HEAD^", filter: {has: {field: TASK_NAME, value: "test:integration"}}) { items { name path } } }' | |
DOCKER_TEST_QUERY='query { affectedPackages(base: "HEAD^", filter: {has: {field: TASK_NAME, value: "build:docker"}}) { items { name path } } }' | |
UNIT_TEST_PACKAGES=$(turbo query "$UNIT_TEST_QUERY" \ | |
| jq --compact-output '.data.affectedPackages.items | [(.[] | select(.name != "//"))] | { name: [.[].name], include: . }') | |
INTEGRATION_TEST_PACKAGES=$(turbo query "$INTEGRATION_TEST_QUERY" \ | |
| jq --compact-output '.data.affectedPackages.items | [(.[] | select(.name != "//"))] | { name: [.[].name], include: . }') | |
DOCKER_PACKAGES=$(turbo query "$DOCKER_TEST_QUERY" \ | |
| jq --compact-output '.data.affectedPackages.items | [(.[] | select(.name != "//"))] | { name: [.[].name], include: . }') | |
PUBLISH_PACKAGES=[] | |
PUBLISH_INCLUDES=[] | |
while IFS= read -r file; do | |
if [[ -n "$file" ]]; then | |
PACKAGE_NAME=$(yq '.package.name' -p toml -oy "$file") | |
if [[ "$PACKAGE_NAME" == "null" ]]; then | |
continue | |
fi | |
if [[ "$(yq '.package.publish' -p toml -oy "$file")" == "false" ]]; then | |
continue | |
fi | |
if [[ "$(yq '.package.publish.workspace' -p toml -oy "$file")" == "true" ]]; then | |
if [[ "$(yq '.workspace.package.publish' -p toml -oy "Cargo.toml")" == "false" ]]; then | |
continue | |
fi | |
fi | |
OLD_VERSION=$(git show HEAD^:"$file" | yq '.package.version' -p toml) | |
if [[ "$OLD_VERSION" == "null" ]]; then | |
continue | |
fi | |
NEW_VERSION=$(yq '.package.version' -p toml -oy "$file") | |
if [[ "$OLD_VERSION" == "$NEW_VERSION" ]]; then | |
continue | |
fi | |
DIR=$(dirname "$file") | |
PUBLISH_PACKAGES=$(echo "$PUBLISH_PACKAGES" | jq -c --arg package "$PACKAGE_NAME" '. += [$package]') | |
PUBLISH_INCLUDES=$(echo "$PUBLISH_INCLUDES" | jq -c --arg package "$PACKAGE_NAME" --arg directory "$DIR" '. += [{name: $package, path: $directory}]') | |
fi | |
done <<< "$(find . -name 'Cargo.toml' -type f | sed 's|\./||')" | |
PUBLISH_PACKAGES=$(jq -c -n --argjson packages "$PUBLISH_PACKAGES" --argjson includes "$PUBLISH_INCLUDES" '{name: $packages, include: $includes}') | |
echo "unit-tests=$UNIT_TEST_PACKAGES" | tee -a $GITHUB_OUTPUT | |
echo "integration-tests=$INTEGRATION_TEST_PACKAGES" | tee -a $GITHUB_OUTPUT | |
echo "dockers=$DOCKER_PACKAGES" | tee -a $GITHUB_OUTPUT | |
echo "publish=$PUBLISH_PACKAGES" | tee -a $GITHUB_OUTPUT | |
unit-tests: | |
name: Unit | |
needs: [setup] | |
strategy: | |
matrix: ${{ fromJSON(needs.setup.outputs.unit-tests) }} | |
fail-fast: false | |
if: needs.setup.outputs.unit-tests != '{"name":[],"include":[]}' | |
runs-on: ubuntu-24.04 | |
steps: | |
- name: Checkout | |
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 | |
- name: Clean up disk | |
uses: ./.github/actions/clean-up-disk | |
- name: Install tools | |
uses: ./.github/actions/install-tools | |
with: | |
token: ${{ secrets.GITHUB_TOKEN }} | |
- name: Prune repository | |
uses: ./.github/actions/prune-repository | |
with: | |
scope: ${{ matrix.name }} | |
- name: Warm up repository | |
uses: ./.github/actions/warm-up-repo | |
- name: Find test steps to run | |
id: tests | |
run: | | |
HAS_RUST=$([[ -f "${{ matrix.path }}/Cargo.toml" || ${{ matrix.path }} = "apps/hash-graph" ]] && echo 'true' || echo 'false') | |
echo "has-rust=$HAS_RUST" | tee -a $GITHUB_OUTPUT | |
- name: Install PDFium | |
if: matrix.name == '@rust/chonky' | |
env: | |
GH_TOKEN: ${{ github.token }} | |
run: | | |
temp_dir=$(mktemp -d) | |
gh release download chromium/6721 --repo bblanchon/pdfium-binaries --pattern 'pdfium-linux-x64.tgz' --dir $temp_dir | |
tar -xzf $temp_dir/pdfium-linux-x64.tgz -C $temp_dir | |
mv $temp_dir/lib/* "${{ matrix.path }}/libs/" | |
rm -rf $temp_dir | |
echo "PDFIUM_DYNAMIC_LIB_PATH=$(pwd)/${{ matrix.path }}/libs/" >> $GITHUB_ENV | |
- name: Show disk usage | |
run: df -h | |
- name: Run tests | |
continue-on-error: ${{ steps.tests.outputs.allow-failure == 'true' }} | |
env: | |
TEST_COVERAGE: ${{ github.event_name != 'merge_group' }} | |
run: | | |
turbo run test:unit --env-mode=loose --filter "${{ matrix.name }}" | |
echo "TRIMMED_PACKAGE_NAME=$(echo "${{ matrix.name }}" | sed 's|@||g' | sed 's|/|.|g')" >> $GITHUB_ENV | |
- name: Show disk usage | |
run: df -h | |
- uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3 | |
name: Upload coverage to https://app.codecov.io/gh/hashintel/hash | |
with: | |
flags: ${{ env.TRIMMED_PACKAGE_NAME }} | |
token: ${{ secrets.CODECOV_TOKEN }} ## not required for public repos, can be removed when https://github.com/codecov/codecov-action/issues/837 is resolved | |
- name: Run miri | |
if: always() && steps.tests.outputs.has-rust == 'true' | |
run: | | |
turbo run test:miri --filter "${{ matrix.name }}" | |
build: | |
name: Build | |
permissions: | |
contents: read | |
packages: write | |
runs-on: ubuntu-24.04 | |
needs: [setup, optimize-ci] | |
strategy: | |
matrix: ${{ fromJSON(needs.setup.outputs.dockers) }} | |
fail-fast: false | |
if: needs.setup.outputs.dockers != '{"name":[],"include":[]}' && needs.optimize-ci.outputs.skip == 'false' | |
steps: | |
- name: Checkout | |
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 | |
- name: Clean up disk | |
uses: ./.github/actions/clean-up-disk | |
- name: Build image | |
uses: ./.github/actions/build-docker-images | |
with: | |
repo-token: ${{ secrets.GITHUB_TOKEN }} | |
hash-graph: ${{ matrix.name == '@apps/hash-graph' }} | |
hash-ai-worker-ts: ${{ matrix.name == '@apps/hash-ai-worker-ts' }} | |
hash-integration-worker: ${{ matrix.name == '@apps/hash-integration-worker' }} | |
GOOGLE_CLOUD_WORKLOAD_IDENTITY_FEDERATION_CONFIG_JSON: ${{ secrets.GOOGLE_CLOUD_WORKLOAD_IDENTITY_FEDERATION_CONFIG_JSON }} | |
hash-api: ${{ matrix.name == '@apps/hash-api' }} | |
integration-tests: | |
name: Integration | |
needs: [setup, optimize-ci] | |
strategy: | |
matrix: ${{ fromJSON(needs.setup.outputs.integration-tests) }} | |
fail-fast: false | |
if: needs.setup.outputs.integration-tests != '{"name":[],"include":[]}' && needs.optimize-ci.outputs.skip == 'false' | |
runs-on: ubuntu-24.04 | |
steps: | |
- name: Checkout | |
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 | |
- name: Clean up disk | |
uses: ./.github/actions/clean-up-disk | |
- name: Install tools | |
uses: ./.github/actions/install-tools | |
with: | |
token: ${{ secrets.GITHUB_TOKEN }} | |
- name: Prune repository | |
uses: ./.github/actions/prune-repository | |
with: | |
scope: | | |
${{ matrix.name }} | |
@apps/hash-external-services | |
- name: Warm up repository | |
uses: ./.github/actions/warm-up-repo | |
- name: Find test steps to run | |
id: tests | |
run: | | |
TEST_TASKS=$(turbo run test:integration --env-mode=loose --dry-run=json --filter "${{ matrix.name }}" | jq -c '.tasks[]') | |
HAS_BACKGROUND_TASKS=$(turbo run start:test --dry-run json | jq -c '[.tasks[] | select(.command != "<NONEXISTENT>" and .name != "@apps/hash-external-services") | .name] != []' || echo 'false') | |
echo "has-background-tasks=$HAS_BACKGROUND_TASKS" | tee -a $GITHUB_OUTPUT | |
HAS_RUST=$([[ -f "${{ matrix.path }}/Cargo.toml" || ${{ matrix.path }} = "apps/hash-graph" ]] && echo 'true' || echo 'false') | |
echo "has-rust=$HAS_RUST" | tee -a $GITHUB_OUTPUT | |
- name: Create temp files and folders | |
run: mkdir -p var/logs | |
- name: Install PDFium | |
if: matrix.name == '@rust/chonky' | |
env: | |
GH_TOKEN: ${{ github.token }} | |
run: | | |
temp_dir=$(mktemp -d) | |
gh release download chromium/6721 --repo bblanchon/pdfium-binaries --pattern 'pdfium-linux-x64.tgz' --dir $temp_dir | |
tar -xzf $temp_dir/pdfium-linux-x64.tgz -C $temp_dir | |
mv $temp_dir/lib/* "${{ matrix.path }}/libs/" | |
rm -rf $temp_dir | |
echo "PDFIUM_DYNAMIC_LIB_PATH=$(pwd)/${{ matrix.path }}/libs/" >> $GITHUB_ENV | |
- name: Install playwright | |
if: matrix.name == '@tests/hash-playwright' | |
uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # v3.0.2 | |
with: | |
max_attempts: 3 | |
timeout_minutes: 10 | |
shell: bash | |
command: npx playwright install --with-deps chromium | |
- name: Show disk usage | |
run: df -h | |
- name: Launch external services | |
run: | | |
touch .env.local | |
echo 'OPENAI_API_KEY=dummy' >> .env.local | |
echo 'ANTHROPIC_API_KEY=dummy' >> .env.local | |
echo 'HASH_TEMPORAL_WORKER_AI_AWS_ACCESS_KEY_ID=dummy' >> .env.local | |
echo 'HASH_TEMPORAL_WORKER_AI_AWS_SECRET_ACCESS_KEY=dummy' >> .env.local | |
echo 'HASH_GRAPH_PG_DATABASE=graph' >> .env.local | |
cp .env.local .env.test.local | |
yarn external-services:test up --wait | |
- name: Start background tasks | |
id: background-tasks | |
if: steps.tests.outputs.has-background-tasks == 'true' | |
env: | |
# Set terminal logging to WARN level and enable verbose file logging for CI | |
HASH_GRAPH_LOG_CONSOLE_LEVEL: warn | |
HASH_GRAPH_LOG_FILE_ENABLED: true | |
HASH_GRAPH_LOG_FILE_LEVEL: info | |
HASH_GRAPH_LOG_FOLDER: ${{ github.workspace }}/var/logs | |
run: | | |
# Optimistically compile background tasks. Ideally, we also `build` them but this includes | |
# `@apps/plugin-browser` which needs `build:test` instead. We cannot filter it out because | |
# except when running PlayWright tests, the plugin-browser is already pruned out so the | |
# command would fail. | |
turbo run compile --env-mode=loose | |
mkdir -p logs | |
turbo run start:test --env-mode=loose --log-order stream >var/logs/background-task.log 2>&1 & | |
PID=$! | |
echo "pid=$PID" | tee -a $GITHUB_OUTPUT | |
# Not strictly needed to run the healthchecks here as they are also run in the integration | |
# tests but it this way we can fail the job early if the background tasks are not healthy. | |
# Also, the recorded time for the different steps is more accurate. | |
turbo run start:test:healthcheck --env-mode=loose | |
- name: Show disk usage | |
run: df -h | |
- name: Run tests | |
continue-on-error: ${{ steps.tests.outputs.allow-failure == 'true' }} | |
run: | | |
turbo run test:integration --env-mode=loose --filter "${{ matrix.name }}" | |
echo "TRIMMED_PACKAGE_NAME=$(echo "${{ matrix.name }}" | sed 's|@||g' | sed 's|/|.|g')" >> $GITHUB_ENV | |
- name: Show disk usage | |
run: df -h | |
- uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3 | |
name: Upload coverage to https://app.codecov.io/gh/hashintel/hash | |
with: | |
flags: ${{ env.TRIMMED_PACKAGE_NAME }} | |
token: ${{ secrets.CODECOV_TOKEN }} ## not required for public repos, can be removed when https://github.com/codecov/codecov-action/issues/837 is resolved | |
- name: Show container logs | |
if: ${{ success() || failure() }} | |
run: yarn workspace @apps/hash-external-services deploy logs --timestamps | |
- name: Show background tasks logs | |
if: always() && steps.tests.outputs.has-background-tasks == 'true' | |
run: | | |
kill -15 ${{ steps.background-tasks.outputs.pid }} || true | |
cat var/logs/background-task.log | |
- name: Upload artifact playwright-report | |
if: matrix.name == '@tests/hash-playwright' && (success() || failure()) | |
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 | |
with: | |
name: ${{ env.TRIMMED_PACKAGE_NAME }}_report | |
path: tests/hash-playwright/playwright-report | |
- name: Upload logs | |
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 | |
if: success() || failure() | |
with: | |
name: ${{ env.TRIMMED_PACKAGE_NAME }}_logs | |
path: | | |
var/api | |
var/logs | |
publish: | |
name: Publish | |
needs: [setup] | |
strategy: | |
fail-fast: false | |
matrix: ${{ fromJSON(needs.setup.outputs.publish) }} | |
if: needs.setup.outputs.publish != '{"name":[],"include":[]}' | |
runs-on: ubuntu-24.04 | |
steps: | |
- name: Checkout source code | |
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 | |
- name: Clean up disk | |
uses: ./.github/actions/clean-up-disk | |
- name: Find publish jobs to run | |
id: publish | |
run: | | |
HAS_RUST=$([[ -f "${{ matrix.path }}/Cargo.toml" || ${{ matrix.path }} = "apps/hash-graph" ]] && echo 'true' || echo 'false') | |
echo "has-rust=$HAS_RUST" | tee -a $GITHUB_OUTPUT | |
- name: Login | |
run: | | |
[[ -n "${{ secrets.CARGO_REGISTRY_TOKEN }}" ]] | |
cargo login "${{ secrets.CARGO_REGISTRY_TOKEN }}" | |
- name: Publish (dry run) | |
if: always() && steps.publish.outputs.has-rust == 'true' && github.event_name == 'pull_request' || github.event_name == 'merge_group' | |
working-directory: ${{ matrix.path }} | |
run: cargo publish --all-features --dry-run --no-verify | |
- name: Publish | |
if: always() && steps.publish.outputs.has-rust == 'true' && github.event_name == 'push' && github.ref == 'refs/heads/main' | |
working-directory: ${{ matrix.path }} | |
run: cargo publish --all-features --no-verify | |
passed: | |
name: Tests passed | |
needs: [setup, unit-tests, build, integration-tests, publish, optimize-ci] | |
if: always() | |
runs-on: ubuntu-latest | |
steps: | |
- name: Check setup script | |
run: | | |
[[ ${{ needs.setup.result }} = success ]] | |
- name: Check unit tests | |
run: | | |
[[ ${{ needs.unit-tests.result }} =~ success|skipped ]] | |
- name: Check builds | |
if: needs.optimize-ci.outputs.skip == 'false' | |
run: | | |
[[ ${{ needs.build.result }} =~ success|skipped ]] | |
- name: Check integration tests | |
if: needs.optimize-ci.outputs.skip == 'false' | |
run: | | |
[[ ${{ needs.integration-tests.result }} =~ success|skipped ]] | |
- name: Check publish results | |
run: | | |
[[ ${{ needs.publish.result }} =~ success|skipped ]] | |
- name: Notify Slack on failure | |
uses: rtCamp/action-slack-notify@07cbdbfd6c6190970778d8f98f11d073b2932aae | |
if: ${{ failure() && github.event_name == 'merge_group' }} | |
env: | |
SLACK_LINK_NAMES: true | |
SLACK_MESSAGE: "At least one test job failed for a Pull Request in the Merge Queue failed <!subteam^S09KH99698T>" # Notifies @devops | |
SLACK_TITLE: Tests failed | |
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_URL }} | |
SLACK_USERNAME: GitHub | |
VAULT_ADDR: "" | |
VAULT_TOKEN: "" |