Skip to content

feat(replay-vision): API validation + lens_result row column #131639

feat(replay-vision): API validation + lens_result row column

feat(replay-vision): API validation + lens_result row column #131639

Workflow file for this run

name: Node.js CI
on:
pull_request:
merge_group:
push:
branches:
- master
env:
OBJECT_STORAGE_ENABLED: true
OBJECT_STORAGE_ENDPOINT: 'http://localhost:19000'
OBJECT_STORAGE_ACCESS_KEY_ID: 'object_storage_root_user'
OBJECT_STORAGE_SECRET_ACCESS_KEY: 'object_storage_root_password'
OBJECT_STORAGE_SESSION_RECORDING_FOLDER: 'session_recordings'
OBJECT_STORAGE_BUCKET: 'posthog'
# set the max buffer size small enough that the functional tests behave the same in CI as when running locally
SESSION_RECORDING_MAX_BUFFER_SIZE_KB: 1024
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.ref }}
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
jobs:
# Job to decide if we should run nodejs services ci
# See https://github.com/dorny/paths-filter#conditional-execution for more details
changes:
runs-on: ubuntu-latest
timeout-minutes: 5
name: Determine need to run Node.js checks
permissions:
contents: read
pull-requests: read
outputs:
nodejs: ${{ steps.filter.outputs.nodejs || 'true' }}
steps:
# For pull requests it's not necessary to checkout the code, but we
# also want this to run on master so we need to checkout
- uses: actions/checkout@v6
with:
clean: false
- uses: actions/create-github-app-token@1b10c78c7865c340bc4f6099eb2f838309f1e8c3 # v3.1.1
id: app-token
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository
with:
client-id: ${{ secrets.GH_APP_POSTHOG_PATHS_FILTER_APP_ID }}
private-key: ${{ secrets.GH_APP_POSTHOG_PATHS_FILTER_PRIVATE_KEY }}
- uses: dorny/paths-filter@fbd0ab8f3e69293af611ebaee6363fc25e6d187d # v4.0.1
id: filter
if: github.event_name != 'push' # Run all tests on master push
with:
token: ${{ steps.app-token.outputs.token || github.token }}
filters: |
nodejs:
- .github/workflows/ci-nodejs.yml
- 'nodejs/**'
- 'posthog/clickhouse/**'
- 'ee/migrations/**'
- 'posthog/management/commands/setup_test_environment.py'
- 'posthog/migrations/**'
- 'posthog/plugins/**'
- 'docker*.yml'
- '*Dockerfile'
lint:
if: needs.changes.outputs.nodejs == 'true'
name: Node.js Code quality (depot-ubuntu-latest)
needs: changes
runs-on: depot-ubuntu-latest
timeout-minutes: 10
steps:
- uses: actions/checkout@v6
with:
clean: false
- name: Install pnpm
uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v5.0.0
- name: Fix node-gyp permissions
run: chmod +x ~/setup-pnpm/node_modules/.pnpm/pnpm@*/node_modules/pnpm/dist/node_modules/node-gyp/gyp/gyp_main.py
- name: Set up Node.js
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
with:
node-version-file: .nvmrc
cache: pnpm
cache-dependency-path: |
pnpm-lock.yaml
.github/workflows/ci-nodejs.yml
- name: Install package.json dependencies with pnpm
env:
npm_config_fetch_retries: 3
npm_config_fetch_retry_mintimeout: 10000
npm_config_fetch_retry_maxtimeout: 60000
run: |
pnpm --filter=@posthog/nodejs... install --frozen-lockfile
bin/turbo --filter=@posthog/nodejs prepare
- name: Check formatting with prettier
run: pnpm --filter=@posthog/nodejs format:check
- name: Lint with ESLint
run: pnpm --filter=@posthog/nodejs lint
build:
if: needs.changes.outputs.nodejs == 'true'
name: Node.js Build (depot-ubuntu-latest)
needs: changes
runs-on: depot-ubuntu-latest
timeout-minutes: 10
steps:
- uses: actions/checkout@v6
with:
clean: false
- name: Install pnpm
uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v5.0.0
- name: Fix node-gyp permissions
run: chmod +x ~/setup-pnpm/node_modules/.pnpm/pnpm@*/node_modules/pnpm/dist/node_modules/node-gyp/gyp/gyp_main.py
- name: Set up Node.js
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
with:
node-version-file: .nvmrc
cache: pnpm
cache-dependency-path: |
pnpm-lock.yaml
.github/workflows/ci-nodejs.yml
- name: Install package.json dependencies with pnpm
env:
npm_config_fetch_retries: 3
npm_config_fetch_retry_mintimeout: 10000
npm_config_fetch_retry_maxtimeout: 60000
run: |
pnpm --filter=@posthog/nodejs... install --frozen-lockfile
bin/turbo --filter=@posthog/nodejs prepare
- name: Check builds correctly
run: pnpm --filter=@posthog/nodejs build
- name: Sanity check output
run: |
cd nodejs
# We expect it to fail but check that the error isn't "MODULE_NOT_FOUND"
if node dist/index.js 2>&1 | grep "MODULE_NOT_FOUND"; then
echo "❌ Build is invalid - failed with 'MODULE_NOT_FOUND' error"
exit 1
else
echo "✅ Build is valid - failed as expected without module errors"
fi
tests:
if: needs.changes.outputs.nodejs == 'true'
name: Node.js Tests ${{matrix.shard}}/3 (depot-ubuntu-latest-4)
needs: changes
runs-on: depot-ubuntu-latest-4
timeout-minutes: 30 # We know tests don't take this long
strategy:
fail-fast: false
matrix:
shard: [1, 2, 3]
env:
REDIS_URL: 'redis://localhost'
CLICKHOUSE_HOST: 'localhost'
CLICKHOUSE_DATABASE: 'posthog_test'
KAFKA_HOSTS: 'kafka:9092'
steps:
- name: Code check out
uses: actions/checkout@v6
with:
clean: false
- name: Clean up data directories with container permissions
run: |
# Use docker to clean up files created by containers
[ -d "data" ] && docker run --rm -v "$(pwd)/data:/data" alpine sh -c "rm -rf /data/seaweedfs /data/minio" || true
continue-on-error: true
- name: Stop/Start stack with Docker Compose
env:
COMPOSE_FILE: docker-compose.dev.yml:docker-compose.profiles.yml
COMPOSE_PROFILES: dev_tools,localstack
WAIT_FOR_DOCKER_LAUNCH_RETRIES: 3
WAIT_FOR_DOCKER_LAUNCH_RETRY_DELAY: 5
run: |
bin/ci-wait-for-docker launch --down
- name: Wait for Docker services
env:
COMPOSE_FILE: docker-compose.dev.yml:docker-compose.profiles.yml
COMPOSE_PROFILES: dev_tools,localstack
run: bin/ci-wait-for-docker wait
- name: Add service hostnames to /etc/hosts
run: echo "127.0.0.1 db redis7 kafka clickhouse clickhouse-coordinator objectstorage seaweedfs temporal" | sudo tee -a /etc/hosts
- name: Set up Python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
with:
python-version-file: 'pyproject.toml'
- name: Install uv
id: setup-uv
uses: astral-sh/setup-uv@eac588ad8def6316056a12d4907a9d4d84ff7a3b # v7.3.0
with:
version: '0.10.2' # pinned: unpinned setup-uv calls GH API on every job, exhausts rate limit
enable-cache: true
cache-dependency-glob: uv.lock
save-cache: ${{ github.ref == 'refs/heads/master' }}
- name: Install rust
uses: dtolnay/rust-toolchain@0b1efabc08b657293548b77fb76cc02d26091c7e
with:
toolchain: 1.91.1
- name: Cache Rust dependencies
uses: Swatinem/rust-cache@c19371144df3bb44fab255c43d04cbc2ab54d1c4 # v2.9.1
with:
shared-key: 'v2-rust-backend'
workspaces: rust
save-if: ${{ github.ref == 'refs/heads/master' }}
- name: Install sqlx-cli
working-directory: rust
run: cargo install sqlx-cli@0.8.0 --locked --no-default-features --features native-tls,postgres
- name: Install SAML (python3-saml) dependencies
if: steps.setup-uv.outputs.cache-hit != 'true'
run: |
sudo apt-get update
sudo apt-get install libxml2-dev libxmlsec1-dev libxmlsec1-openssl
- name: Install python dependencies
run: |
UV_PROJECT_ENVIRONMENT=$pythonLocation uv sync --frozen --dev
- name: Install pnpm
uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v5.0.0
- name: Fix node-gyp permissions
run: chmod +x ~/setup-pnpm/node_modules/.pnpm/pnpm@*/node_modules/pnpm/dist/node_modules/node-gyp/gyp/gyp_main.py
- name: Set up Node.js
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
with:
node-version-file: .nvmrc
cache: pnpm
cache-dependency-path: |
pnpm-lock.yaml
.github/workflows/ci-nodejs.yml
- name: Download MaxMind Database
run: |
./bin/download-mmdb
- name: Install package.json dependencies with pnpm
env:
npm_config_fetch_retries: 3
npm_config_fetch_retry_mintimeout: 10000
npm_config_fetch_retry_maxtimeout: 60000
run: pnpm --filter=@posthog/nodejs... install --frozen-lockfile
- name: Wait for Clickhouse, Redis, Kafka, Localstack
run: docker compose -f docker-compose.dev.yml up kafka redis7 clickhouse maildev localstack -d --wait
- name: Fetch base branch for diff
if: github.event_name == 'pull_request'
env:
BASE_REF: ${{ github.base_ref }}
run: git fetch origin "$BASE_REF" --depth=1
- name: Check for migrations in this PR
id: check_migrations
if: github.event_name == 'pull_request'
env:
BASE_REF: ${{ github.base_ref }}
run: |
if git diff --name-only "origin/$BASE_REF..HEAD" | grep -E '(migrations/|rust/.*migrate)'; then
echo "has_migrations=true" >> $GITHUB_OUTPUT
else
echo "has_migrations=false" >> $GITHUB_OUTPUT
fi
- name: Set up databases (fast path - restore schema)
if: github.event_name == 'pull_request' && steps.check_migrations.outputs.has_migrations == 'false'
env:
TEST: 'true'
SECRET_KEY: 'abcdef' # unsafe - for testing only
DATABASE_URL: 'postgres://posthog:posthog@localhost:5432/posthog'
BEHAVIORAL_COHORTS_DATABASE_URL: 'postgres://posthog:posthog@localhost:5432/test_behavioral_cohorts'
GH_TOKEN: ${{ github.token }}
run: |
# Download pre-migrated schema from CI
./bin/hogli db:download-schema
# Create test databases
docker compose -f docker-compose.dev.yml exec -T db psql -U posthog -c "DROP DATABASE IF EXISTS test_posthog;"
docker compose -f docker-compose.dev.yml exec -T db psql -U posthog -c "CREATE DATABASE test_posthog;"
# Restore schema to test database
echo "Restoring schema to test_posthog database..."
gunzip -c .postgres-backups/schema-latest.sql.gz | docker compose -f docker-compose.dev.yml exec -T db psql -q -U posthog test_posthog
echo "✅ Schema restored"
# Run Rust migrations for test databases
cd nodejs && pnpm run setup:test:rust
# Create dedicated persons database for postgres-parity tests
pnpm run setup:test:persons-parity
# Set up ClickHouse test database and schema
cd .. && TEST=1 SECRET_KEY='abcdef' python manage.py setup_test_environment --only-clickhouse
- name: Set up databases (slow path - run migrations)
if: github.event_name != 'pull_request' || steps.check_migrations.outputs.has_migrations == 'true'
env:
TEST: 'true'
SECRET_KEY: 'abcdef' # unsafe - for testing only
DATABASE_URL: 'postgres://posthog:posthog@localhost:5432/posthog'
BEHAVIORAL_COHORTS_DATABASE_URL: 'postgres://posthog:posthog@localhost:5432/test_behavioral_cohorts'
run: |
pnpm --filter=@posthog/nodejs setup:test
# Create dedicated persons database for postgres-parity tests
cd nodejs && pnpm run setup:test:persons-parity
- name: Test with Jest
env:
# Below DB name has `test_` prepended, as that's how Django (ran above) creates the test DB
DATABASE_URL: 'postgres://posthog:posthog@localhost:5432/test_posthog'
BEHAVIORAL_COHORTS_DATABASE_URL: 'postgres://posthog:posthog@localhost:5432/test_behavioral_cohorts'
PERSONS_DATABASE_URL: 'postgres://posthog:posthog@localhost:5432/test_persons'
PERSONS_READONLY_DATABASE_URL: 'postgres://posthog:posthog@localhost:5432/test_persons'
REDIS_URL: 'redis://localhost'
NODE_OPTIONS: '--max_old_space_size=4096'
SHARD_INDEX: ${{ matrix.shard }}
SHARD_COUNT: 3
LOG_LEVEL: info
# Enable localstack integration tests for DynamoDB/KMS
LOCALSTACK_ENABLED: '1'
run: bin/turbo run test --filter=@posthog/nodejs
- name: Test postgres-parity (isolated DB)
if: (success() || failure()) && matrix.shard == 1
env:
DATABASE_URL: 'postgres://posthog:posthog@localhost:5432/test_posthog'
BEHAVIORAL_COHORTS_DATABASE_URL: 'postgres://posthog:posthog@localhost:5432/test_behavioral_cohorts'
PERSONS_DATABASE_URL: 'postgres://posthog:posthog@localhost:5432/test_persons_parity'
PERSONS_READONLY_DATABASE_URL: 'postgres://posthog:posthog@localhost:5432/test_persons_parity'
REDIS_URL: 'redis://localhost'
NODE_OPTIONS: '--max_old_space_size=4096'
LOG_LEVEL: info
run: cd nodejs && pnpm run test:postgres-parity
- name: Output logs on failure
if: failure()
run: |
docker compose -f docker-compose.dev.yml logs clickhouse
# Echo out the logs stored in the container
docker compose -f docker-compose.dev.yml exec clickhouse cat /var/log/clickhouse-server/clickhouse-server.err.log
node_tests:
needs: [tests, build, lint]
name: Node.js Tests Pass
runs-on: ubuntu-latest
timeout-minutes: 5
if: always()
steps:
- run: exit 0
- name: Check outcomes
run: |
if [[ "${{ needs.tests.result }}" != "success" && "${{ needs.tests.result }}" != "skipped" ]]; then
echo "One or more jobs in the Node.js test matrix failed."
exit 1
fi
echo "All jobs in the Node.js test matrix passed."
if [[ "${{ needs.build.result }}" != "success" && "${{ needs.build.result }}" != "skipped" ]]; then
echo "Node.js build failed."
exit 1
fi
echo "Node.js build passed."
if [[ "${{ needs.lint.result }}" != "success" && "${{ needs.lint.result }}" != "skipped" ]]; then
echo "Node.js lint failed."
exit 1
fi
echo "Node.js lint passed."