Skip to content

Migrate NodeListener from bidi streaming to unary RPC #1906

Migrate NodeListener from bidi streaming to unary RPC

Migrate NodeListener from bidi streaming to unary RPC #1906

Workflow file for this run

# SPDX-FileCopyrightText: Copyright (c) 2025 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# SPDX-License-Identifier: Apache-2.0
name: PR Checks
on:
workflow_dispatch:
pull_request:
types: [opened, synchronize, reopened]
branches: [ main, 'feature/**', 'release/**' ]
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
cancel-in-progress: true
jobs:
#######################
# Path Filters #
#######################
check-paths:
runs-on: ubuntu-latest
outputs:
ci: ${{ steps.filter.outputs.ci }}
docs: ${{ steps.filter.outputs.docs }}
ui: ${{ steps.filter.outputs.ui }}
steps:
- name: Checkout
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4.3.1
with:
fetch-depth: 0 # Fetch all history for accurate diffing
- name: Check which paths changed
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
id: filter
with:
# Use empty token to force git-based detection
token: ''
filters: |
ci:
- '.github/workflows/pr-checks.yaml'
- 'BUILD'
- 'MODULE.bazel'
- 'bzl/**'
- 'src/**'
- 'run/**'
docs:
- '.github/workflows/pr-checks.yaml'
- 'docs/**'
- 'cookbook/**'
ui:
- '.github/workflows/pr-checks.yaml'
- 'src/ui/**'
#######################
# CI Tests #
#######################
ci-public:
needs: [check-paths]
if: |
(needs.check-paths.outputs.ci == 'true') &&
(github.event_name == 'pull_request' && github.event.pull_request.head.repo.fork == true)
runs-on: ubuntu-latest
steps:
- name: Free disk space
run: |
echo "Disk space before cleanup:"
df -h
echo "Freeing disk space..."
echo "Removing .NET..."
sudo rm -rf /usr/share/dotnet || true
echo "Removing Android..."
sudo rm -rf /usr/local/lib/android || true
echo "Removing GHC..."
sudo rm -rf /opt/ghc || true
sudo rm -rf /usr/local/.ghcup || true
echo "Removing CodeQL..."
sudo rm -rf /opt/hostedtoolcache/CodeQL || true
echo "Pruning Docker images..."
sudo docker image prune --all --force || true
echo "Disk space after cleanup:"
df -h
- name: Checkout
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4.3.1
with:
lfs: true
- name: Setup Bazel
uses: bazel-contrib/setup-bazel@4fd964a13a440a8aeb0be47350db2fc640f19ca8
with:
bazelisk-cache: true
bazelisk-version: 1.27.0
disk-cache: ${{ github.workflow }}
repository-cache: true
external-cache: |
manifest:
# Python dependencies
osmo_python_deps: src/locked_requirements.txt
osmo_tests_python_deps: src/tests/locked_requirements.txt
osmo_mypy_deps: bzl/mypy/locked_requirements.txt
pylint_python_deps: bzl/linting/locked_requirements.txt
# Go dependencies
io_bazel_rules_go: src/runtime/go.mod
bazel_gazelle: src/runtime/go.sum
- name: Run Tests
run: |
bazel test --test_output=errors -- //...
ci-internal:
timeout-minutes: 30
needs: [check-paths]
if: |
(github.event_name == 'workflow_dispatch') ||
(
needs.check-paths.outputs.ci == 'true' &&
github.event_name == 'pull_request' &&
github.event.pull_request.head.repo.fork == false
)
runs-on: self-hosted
environment:
name: internal-ci
env:
# Unique identifiers for Docker resources to prevent collisions
COMPOSE_PROJECT_NAME: ci-${{ github.run_id }}
DOCKER_BUILDKIT: "1"
container:
# python:3.10.18-bullseye pinned to digest for security (amd64)
image: python:3.10.18-bullseye@sha256:4e96d6c7c610e5b2a46ff8a36cc76a159d57a5b865d580eda29d51afdc1a1923
env:
DOCKER_HOST: "tcp://docker:2375"
DOCKER_TLS_CERTDIR: ""
options: --memory=8g --cpus=4 --memory-swap=8g --pids-limit=4096 -v /var/run/docker.sock:/var/run/docker.sock
services:
docker:
# docker:29.2.1-dind pinned to digest for security
image: docker:29.2.1-dind@sha256:2658fda9e8779b94ea1581f4d25214dac9ed144b407902842e5328cce8f861f5
env:
DOCKER_TLS_CERTDIR: ""
DOCKER_DRIVER: overlay2
options: --privileged --memory=4g --cpus=2
ports:
- 2375:2375
steps:
- name: Install Node.js, Docker CLI, and dependencies
run: |
apt-get update
apt-get install -y ca-certificates curl gnupg git-lfs
# Add Docker's official GPG key and repository
install -m 0755 -d /etc/apt/keyrings
curl -fsSL https://download.docker.com/linux/debian/gpg -o /etc/apt/keyrings/docker.asc
chmod a+r /etc/apt/keyrings/docker.asc
echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/debian $(. /etc/os-release && echo "$VERSION_CODENAME") stable" | tee /etc/apt/sources.list.d/docker.list > /dev/null
# Add Node.js 20.x from NodeSource
curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg
echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_20.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list > /dev/null
apt-get update
# Install Docker CLI (client only, connects to DinD service)
apt-get install -y docker-ce-cli
# Remove Debian's nodejs package if present and install from NodeSource
apt-get remove -y nodejs || true
apt-get install -y nodejs=20.*
# Verify installations
echo "Node.js version: $(node --version)"
echo "npm version: $(npm --version)"
echo "Docker version: $(docker --version)"
echo "Git LFS version: $(git-lfs --version)"
- name: Checkout
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4.3.1
with:
lfs: true
- name: Setup Bazel
uses: bazel-contrib/setup-bazel@4fd964a13a440a8aeb0be47350db2fc640f19ca8
with:
bazelisk-cache: true
bazelisk-version: 1.27.0
- name: Run Tests
run: |
bazel test --config=ci \
--remote_cache=${{ secrets.BAZEL_REMOTE_CACHE_URL }} \
--test_env=DOCKER_HOST=tcp://docker:2375 \
--test_env=TESTCONTAINERS_HOST_OVERRIDE=docker \
--test_output=errors \
-- \
//...
- name: Docker cleanup
if: always()
run: |
echo "=== Docker Cleanup ==="
# Stop all running containers
docker ps -q | xargs -r docker stop --time=5 || true
# Remove all containers
docker ps -aq | xargs -r docker rm -f || true
# Remove all volumes
docker volume ls -q | xargs -r docker volume rm -f || true
# Remove custom networks
docker network ls --filter type=custom -q | xargs -r docker network rm || true
echo "Docker disk usage after cleanup:"
docker system df || true
- name: Workspace cleanup
if: always()
run: |
echo "=== Workspace Cleanup ==="
# Remove test outputs and temporary files
rm -rf /tmp/pytest-* /tmp/tmp* || true
# Show remaining disk usage
echo "Disk usage summary:"
df -h / || true
# Clean host Docker (same runner) so small VMs don't fill up — uses mounted socket
- name: Host cleanup
if: always()
env:
DOCKER_HOST: ""
run: |
echo "=== Host cleanup (runner node) ==="
docker ps -aq -f status=exited -f status=dead --filter "until=5m" 2>/dev/null | xargs -r docker rm -f || true
docker volume prune -f 2>/dev/null || true
docker network prune -f 2>/dev/null || true
docker image prune -f 2>/dev/null || true
echo "Host Docker disk:"
docker system df 2>/dev/null || true
#######################
# Docs Build #
#######################
docs-build:
needs: [check-paths]
if: needs.check-paths.outputs.docs == 'true' || github.event_name == 'workflow_dispatch'
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4.3.1
with:
lfs: true
- name: Setup Python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
with:
python-version: '3.10.18'
- name: Install dependencies
run: |
pip install -U -r docs/locked_requirements.txt
pip install -U -r src/locked_requirements.txt
- name: Build Sphinx documentation
run: |
make -C docs build ERR_DIR=build_error_logs OUT_DIR=public
rm -rf docs/public/.doctrees
if [ -d "docs/build_error_logs" ] && [ "$(find docs/build_error_logs -name '*.log' -size +0c)" ]; then
echo "Build errors found:"
find docs/build_error_logs -name '*.log' -size +0c -exec echo "Error in {}: " \; -exec cat {} \;
exit 1
fi
- name: Run spelling check
id: spelling
continue-on-error: true
run: |
make -C docs spelling ERR_DIR=spelling_error_logs OUT_DIR=public
if [ -d "docs/spelling_error_logs" ] && [ "$(find docs/spelling_error_logs -name '*.log' -size +0c)" ]; then
echo "Spelling errors found:"
find docs/spelling_error_logs -name '*.log' -size +0c -exec echo "Error in {}: " \; -exec cat {} \;
exit 1
fi
- name: Restore lychee cache
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
with:
path: .lycheecache
key: cache-lychee-pr-${{ github.event.pull_request.number }}
restore-keys: cache-lychee-
- name: Link Checker
id: link-checker
uses: lycheeverse/lychee-action@a8c4c7cb88f0c7386610c35eb25108e448569cb0 # v2.7.0
with:
args: --config docs/lychee.toml --no-progress '**/*.html' docs/public
output: docs/link_check_error_logs/lychee-report.md
- name: Save lychee cache
uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
if: steps.link-checker.outcome == 'success'
with:
path: .lycheecache
key: cache-lychee-pr-${{ github.event.pull_request.number }}
- name: Check spelling and link results
run: |
if [ "${{ steps.spelling.outcome }}" == "failure" ] || [ "${{ steps.link-checker.outcome }}" == "failure" ]; then
echo "::error::One or more checks failed:"
[ "${{ steps.spelling.outcome }}" == "failure" ] && echo " - Spelling check failed"
[ "${{ steps.link-checker.outcome }}" == "failure" ] && echo " - Link check failed"
exit 1
fi
echo "All checks passed"
- name: Upload build artifacts
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: documentation-build
path: |
docs/public/
docs/build_error_logs/
docs/spelling_error_logs/
docs/link_check_error_logs/
retention-days: 3
docs-deploy:
needs: [docs-build]
if: needs.check-paths.outputs.docs == 'true' && github.event_name == 'pull_request' && github.event.pull_request.head.repo.fork == false
runs-on: ubuntu-latest
environment:
name: pr-preview
url: ${{ vars.PR_PREVIEW_BASEURL }}${{ github.event.pull_request.number }}/index.html
steps:
- name: Download artifact
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
with:
name: documentation-build
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7 # v6.0.0
with:
aws-access-key-id: ${{ secrets.PR_PREVIEW_AWS_ACCESS_KEY }}
aws-secret-access-key: ${{ secrets.PR_PREVIEW_AWS_SECRET_KEY }}
aws-region: us-west-2
- name: Deploy docs to S3
run: |
aws s3 sync public/ s3://osmo-pr-preview/${{ github.event.pull_request.number }}/ --delete
- name: Comment preview URL on PR
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
with:
script: |
const previewUrl = `${{ vars.PR_PREVIEW_BASEURL }}${{ github.event.pull_request.number }}/index.html`;
const marker = '<!-- docs-preview-comment -->';
const body = `${marker}\n📖 **Docs preview:** ${previewUrl}`;
const { data: comments } = await github.rest.issues.listComments({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number,
});
const existing = comments.find(c => c.body.includes(marker));
if (existing) {
await github.rest.issues.updateComment({
owner: context.repo.owner,
repo: context.repo.repo,
comment_id: existing.id,
body,
});
} else {
await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number,
body,
});
}
#######################
# UI Build #
#######################
ui-build:
needs: [check-paths]
if: |
(needs.check-paths.outputs.ui == 'true') ||
github.event_name == 'workflow_dispatch'
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4.3.1
with:
lfs: true
- name: Setup pnpm
uses: pnpm/action-setup@v4
with:
package_json_file: src/ui/package.json
- name: Setup Node.js
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
with:
node-version-file: src/ui/.nvmrc
cache: pnpm
cache-dependency-path: src/ui/pnpm-lock.yaml
- name: Cache Next.js build
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
with:
path: src/ui/.next/cache
key: nextjs-${{ hashFiles('src/ui/pnpm-lock.yaml') }}-${{ hashFiles('src/ui/src/**') }}
restore-keys: |
nextjs-${{ hashFiles('src/ui/pnpm-lock.yaml') }}-
- name: Validate UI
working-directory: src/ui
run: |
pnpm install --frozen-lockfile
pnpm validate