feat(replay-vision): API validation + lens_result row column #160130
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| # | |
| # This workflow runs CI E2E tests with Playwright. | |
| # | |
| # It relies on the container image built by 'container-images-ci.yml'. | |
| # | |
| name: E2E CI Playwright | |
| on: | |
| pull_request: | |
| # `labeled` lets developers opt in via the `run-playwright` label. | |
| # Unrelated label changes are filtered out in the `changes` job. | |
| types: [opened, synchronize, reopened, labeled] | |
| workflow_dispatch: | |
| push: | |
| branches: | |
| - master | |
| permissions: | |
| contents: read | |
| env: | |
| SECRET_KEY: '6b01eee4f945ca25045b5aab440b953461faf08693a9abbf1166dc7c6b9772da' # unsafe - for testing only | |
| REDIS_URL: redis://localhost | |
| DATABASE_URL: postgres://posthog:posthog@localhost:5432/posthog_e2e_test | |
| PERSONS_DB_WRITER_URL: postgres://posthog:posthog@localhost:5432/posthog_persons_e2e_test | |
| KAFKA_HOSTS: kafka:9092 | |
| DISABLE_SECURE_SSL_REDIRECT: 1 | |
| SECURE_COOKIES: 0 | |
| OPT_OUT_CAPTURE: 0 | |
| E2E_TESTING: 1 | |
| SKIP_SERVICE_VERSION_REQUIREMENTS: 1 | |
| EMAIL_HOST: email.test.posthog.net | |
| SITE_URL: http://localhost:8000 | |
| NO_RESTART_LOOP: 1 | |
| OBJECT_STORAGE_ENABLED: 1 | |
| OBJECT_STORAGE_ENDPOINT: http://localhost:19000 | |
| OBJECT_STORAGE_ACCESS_KEY_ID: object_storage_root_user | |
| OBJECT_STORAGE_SECRET_ACCESS_KEY: object_storage_root_password | |
| GITHUB_ACTION_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} | |
| CELERY_METRICS_PORT: 8999 | |
| CLOUD_DEPLOYMENT: E2E | |
| CLICKHOUSE_HOST: 'localhost' | |
| CLICKHOUSE_SECURE: 'False' | |
| CLICKHOUSE_VERIFY: 'False' | |
| CLICKHOUSE_DATABASE: posthog_test | |
| # Database names passed to the plugins Docker service via env var substitution | |
| POSTHOG_DB_NAME: posthog_e2e_test | |
| POSTHOG_PERSONS_DB_NAME: posthog_persons_e2e_test | |
| PGHOST: localhost | |
| PGUSER: posthog | |
| PGPASSWORD: posthog | |
| PGPORT: 5432 | |
| # this is a fake key so this workflow can run for external contributors as they do not have access to secrets (that we don't need here) | |
| OIDC_RSA_PRIVATE_KEY: ${{ vars.OIDC_RSA_FAKE_PRIVATE_KEY }} | |
| OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} | |
| ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} | |
| GEMINI_API_KEY: ${{ secrets.GEMINI_API_KEY }} | |
| INKEEP_API_KEY: ${{ secrets.INKEEP_API_KEY }} | |
| AZURE_INFERENCE_CREDENTIAL: ${{ secrets.AZURE_INFERENCE_CREDENTIAL }} | |
| AZURE_INFERENCE_ENDPOINT: ${{ secrets.AZURE_INFERENCE_ENDPOINT }} | |
| concurrency: | |
| group: ${{ github.workflow }}-${{ github.head_ref || github.ref }} | |
| cancel-in-progress: ${{ github.event_name == 'pull_request' }} | |
| jobs: | |
| changes: | |
| runs-on: ubuntu-latest | |
| timeout-minutes: 5 | |
| # Run on master push, manual dispatch, and on internal-repo PRs. Skip the | |
| # entire workflow when the only thing that changed is some other label (avoids | |
| # noisy runs on every label add/remove unrelated to Playwright). | |
| if: | | |
| github.event_name == 'push' || | |
| github.event_name == 'workflow_dispatch' || | |
| ( | |
| github.event.pull_request.head.repo.full_name == github.repository && | |
| (github.event.action != 'labeled' || github.event.label.name == 'run-playwright') | |
| ) | |
| name: Determine need to run E2E checks | |
| outputs: | |
| shouldRun: ${{ steps.decide.outputs.shouldRun }} | |
| shouldSuggest: ${{ steps.changes.outputs.shouldSuggest }} | |
| oldest_supported: ${{ steps.read-versions.outputs.oldest_supported }} | |
| schema_cache_key: ${{ steps.schema-key.outputs.key }} | |
| steps: | |
| # fetch-depth=1000 + blob:none mirrors ci-backend / ci-dagster so HEAD^2 | |
| # (PR branch tip) is reachable for the merge-base step below without the | |
| # cost of fetching blobs. | |
| - uses: actions/checkout@v6 | |
| with: | |
| fetch-depth: 1000 | |
| filter: blob:none | |
| clean: false | |
| - uses: actions/create-github-app-token@1b10c78c7865c340bc4f6099eb2f838309f1e8c3 # v3.1.1 | |
| id: app-token | |
| if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository | |
| with: | |
| client-id: ${{ secrets.GH_APP_POSTHOG_PATHS_FILTER_APP_ID }} | |
| private-key: ${{ secrets.GH_APP_POSTHOG_PATHS_FILTER_PRIVATE_KEY }} | |
| - uses: dorny/paths-filter@fbd0ab8f3e69293af611ebaee6363fc25e6d187d # v4.0.1 | |
| id: changes | |
| # Skip the path filter when the `run-playwright` label is set or on master push — | |
| # both already imply we want to run. | |
| if: github.event_name == 'pull_request' && !contains(github.event.pull_request.labels.*.name, 'run-playwright') | |
| with: | |
| token: ${{ steps.app-token.outputs.token || github.token }} | |
| filters: | | |
| mustRun: | |
| # Real triggers — only this workflow runs Playwright tests, so changes | |
| # to the tests, page models, utils, snapshots, or this workflow itself | |
| # need to validate before merge. | |
| - playwright/** | |
| - .github/workflows/ci-e2e-playwright.yml | |
| shouldSuggest: | |
| # Wider net — code that *could* affect E2E behavior. We don't auto-run | |
| # Playwright on these (most failures are flakes, real bugs are caught | |
| # elsewhere or fix-forward from master). Instead the `suggest-label` job | |
| # nudges the author to add the `run-playwright` label if they want a | |
| # full sweep before merging. | |
| - 'ee/**' | |
| - 'posthog/!(temporal/**)/**' | |
| - 'bin/*' | |
| - frontend/**/* | |
| - package.json | |
| - pnpm-lock.yaml | |
| - uv.lock | |
| - .github/clickhouse-versions.json | |
| - docker-compose.dev.yml | |
| - Dockerfile | |
| - name: Decide whether to run Playwright | |
| id: decide | |
| env: | |
| IS_PUSH: ${{ github.event_name == 'push' }} | |
| IS_DISPATCH: ${{ github.event_name == 'workflow_dispatch' }} | |
| HAS_LABEL: ${{ contains(github.event.pull_request.labels.*.name, 'run-playwright') }} | |
| MUST_RUN: ${{ steps.changes.outputs.mustRun }} | |
| run: | | |
| if [[ "$IS_PUSH" == "true" || "$IS_DISPATCH" == "true" || "$HAS_LABEL" == "true" || "$MUST_RUN" == "true" ]]; then | |
| echo "shouldRun=true" >> "$GITHUB_OUTPUT" | |
| else | |
| echo "shouldRun=false" >> "$GITHUB_OUTPUT" | |
| fi | |
| - name: Read ClickHouse versions from JSON | |
| id: read-versions | |
| if: steps.decide.outputs.shouldRun == 'true' | |
| run: | | |
| oldest_supported=$(jq -r '.oldest_supported' .github/clickhouse-versions.json) | |
| if [ -z "$oldest_supported" ] || [ "$oldest_supported" = "null" ]; then | |
| echo "::error::No oldest_supported version found in .github/clickhouse-versions.json" | |
| exit 1 | |
| fi | |
| echo "oldest_supported=$oldest_supported" >> $GITHUB_OUTPUT | |
| - name: Fetch base branch for merge-base computation | |
| if: steps.decide.outputs.shouldRun == 'true' && github.event_name == 'pull_request' | |
| env: | |
| BASE_REF: ${{ github.event.pull_request.base.ref }} | |
| # Scoped, blob-less, no-tags — matches ci-backend / ci-dagster. | |
| # Without an explicit refspec, `git fetch --deepen` would fall back to | |
| # remote.origin.fetch and pull every branch. | |
| run: git fetch --no-tags --depth=1000 --filter=blob:none origin "$BASE_REF:refs/remotes/origin/$BASE_REF" | |
| - name: Compute schema cache key from merge-base | |
| id: schema-key | |
| if: steps.decide.outputs.shouldRun == 'true' && github.event_name == 'pull_request' | |
| env: | |
| BASE_REF: ${{ github.event.pull_request.base.ref }} | |
| run: | | |
| # HEAD is the synthetic merge commit; HEAD^2 is the PR branch tip. | |
| MERGE_BASE=$(git merge-base HEAD^2 "origin/${BASE_REF}" 2>/dev/null || echo "") | |
| if [ -n "$MERGE_BASE" ]; then | |
| echo "key=posthog-schema-master-${MERGE_BASE}" >> $GITHUB_OUTPUT | |
| else | |
| echo "key=" >> $GITHUB_OUTPUT | |
| echo "::notice::merge-base not found (branch too stale?) — schema cache will be skipped" | |
| fi | |
| # Nudge the author to add the `run-playwright` label when their PR touches code | |
| # that *could* affect E2E behavior but doesn't trigger a real run on its own. | |
| # Also handles cleanup: if the PR later qualifies for a real run (label added, | |
| # narrow paths matched), remove the suggestion comment so it doesn't linger. | |
| suggest-label: | |
| name: Suggest run-playwright label | |
| needs: [changes] | |
| if: | | |
| github.event_name == 'pull_request' && ( | |
| needs.changes.outputs.shouldSuggest == 'true' || | |
| needs.changes.outputs.shouldRun == 'true' | |
| ) | |
| runs-on: ubuntu-latest | |
| timeout-minutes: 5 | |
| permissions: | |
| pull-requests: write | |
| steps: | |
| - uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 | |
| env: | |
| SHOULD_RUN: ${{ needs.changes.outputs.shouldRun }} | |
| SHOULD_SUGGEST: ${{ needs.changes.outputs.shouldSuggest }} | |
| HAS_LABEL: ${{ contains(github.event.pull_request.labels.*.name, 'run-playwright') }} | |
| with: | |
| github-token: ${{ secrets.GITHUB_TOKEN }} | |
| script: | | |
| const marker = '<!-- playwright-suggest-label -->'; | |
| const shouldRun = process.env.SHOULD_RUN === 'true'; | |
| const shouldSuggest = process.env.SHOULD_SUGGEST === 'true'; | |
| const hasLabel = process.env.HAS_LABEL === 'true'; | |
| const wantSuggestion = !shouldRun && shouldSuggest && !hasLabel; | |
| const { data: comments } = await github.rest.issues.listComments({ | |
| owner: context.repo.owner, | |
| repo: context.repo.repo, | |
| issue_number: context.issue.number, | |
| }); | |
| const existing = comments.find(c => c.body.includes(marker)); | |
| if (wantSuggestion) { | |
| const body = [ | |
| marker, | |
| "🎭 **Playwright didn't run on this PR** — your changes touch code that *could* affect E2E behavior, but Playwright is opt-in via label now to keep CI cost down.", | |
| "", | |
| "Add the **`run-playwright`** label if you want an E2E sweep before merging — CI will pick it up automatically.", | |
| "", | |
| "_Most PRs don't need this. Real regressions still get caught on master and fix-forward._", | |
| ].join('\n'); | |
| if (existing) { | |
| await github.rest.issues.updateComment({ | |
| owner: context.repo.owner, | |
| repo: context.repo.repo, | |
| comment_id: existing.id, | |
| body, | |
| }); | |
| } else { | |
| await github.rest.issues.createComment({ | |
| owner: context.repo.owner, | |
| repo: context.repo.repo, | |
| issue_number: context.issue.number, | |
| body, | |
| }); | |
| } | |
| } else if (existing) { | |
| await github.rest.issues.deleteComment({ | |
| owner: context.repo.owner, | |
| repo: context.repo.repo, | |
| comment_id: existing.id, | |
| }); | |
| } | |
| playwright: | |
| name: Playwright E2E tests | |
| needs: [changes] | |
| if: needs.changes.outputs.shouldRun == 'true' | |
| # 8-core depot: 18% avg / 30% peak memory observed on 16-core runs (#46853 noted | |
| # the workload "is not resource constrained" even after sharding was removed). | |
| # Talk to #team-devex before changing. | |
| runs-on: depot-ubuntu-latest-8 | |
| timeout-minutes: 45 | |
| outputs: | |
| vr_run_id: ${{ steps.vr-create.outputs.run_id }} | |
| steps: | |
| - uses: actions/checkout@v6 | |
| with: | |
| ref: ${{ github.event.pull_request.head.sha || github.sha }} | |
| repository: ${{ github.event.pull_request.head.repo.full_name || github.repository }} | |
| clean: false | |
| - name: Clean up data directories with container permissions | |
| run: | | |
| # Use docker to clean up files created by containers | |
| [ -d "data" ] && docker run --rm -v "$(pwd)/data:/data" alpine sh -c "rm -rf /data/seaweedfs /data/minio" || true | |
| continue-on-error: true | |
| - name: Stop/Start stack with Docker Compose | |
| shell: bash | |
| run: | | |
| export CLICKHOUSE_SERVER_IMAGE=${{ needs.changes.outputs.oldest_supported }} | |
| cp posthog/user_scripts/latest_user_defined_function.xml docker/clickhouse/user_defined_function.xml | |
| ( | |
| max_attempts=3 | |
| attempt=1 | |
| delay=5 | |
| while [ $attempt -le $max_attempts ]; do | |
| echo "Attempt $attempt of $max_attempts to start stack..." | |
| if docker compose -f docker-compose.dev.yml down && \ | |
| docker compose -f docker-compose.dev.yml -f docker-compose.profiles.yml up -d; then | |
| echo "Stack started successfully" | |
| exit 0 | |
| fi | |
| echo "Failed to start stack on attempt $attempt" | |
| if [ $attempt -lt $max_attempts ]; then | |
| sleep_time=$((delay * 2 ** (attempt - 1))) | |
| echo "Waiting ${sleep_time} seconds before retry..." | |
| sleep $sleep_time | |
| fi | |
| attempt=$((attempt + 1)) | |
| done | |
| echo "Failed to start stack after $max_attempts attempts" | |
| exit 1 | |
| ) & | |
| - name: Add service hostnames to /etc/hosts | |
| shell: bash | |
| run: echo "127.0.0.1 db redis7 kafka clickhouse clickhouse-coordinator objectstorage seaweedfs temporal" | sudo tee -a /etc/hosts | |
| - name: Set up Python | |
| uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0 | |
| with: | |
| python-version: 3.12.12 | |
| - name: Install uv | |
| id: setup-uv | |
| uses: astral-sh/setup-uv@eac588ad8def6316056a12d4907a9d4d84ff7a3b # v7.3.0 | |
| with: | |
| version: '0.10.2' # pinned: unpinned setup-uv calls GH API on every job, exhausts rate limit | |
| enable-cache: true | |
| cache-dependency-glob: uv.lock | |
| save-cache: ${{ github.ref == 'refs/heads/master' }} | |
| - name: Determine if hogql-parser has changed compared to master | |
| shell: bash | |
| id: hogql-parser-diff | |
| run: | | |
| git fetch --no-tags --prune --depth=1 origin master | |
| changed=$(git diff --quiet HEAD origin/master -- common/hogql_parser/ && echo "false" || echo "true") | |
| echo "changed=$changed" >> $GITHUB_OUTPUT | |
| - name: Install SAML (python3-saml) dependencies | |
| if: steps.setup-uv.outputs.cache-hit != 'true' | |
| shell: bash | |
| run: | | |
| sudo apt-get update && sudo apt-get install libxml2-dev libxmlsec1-dev libxmlsec1-openssl | |
| - name: Install pnpm | |
| uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v5.0.0 | |
| - name: Fix node-gyp permissions | |
| run: chmod +x ~/setup-pnpm/node_modules/.pnpm/pnpm@*/node_modules/pnpm/dist/node_modules/node-gyp/gyp/gyp_main.py | |
| - name: Set up Node.js | |
| uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0 | |
| with: | |
| node-version-file: .nvmrc | |
| cache: pnpm | |
| cache-dependency-path: | | |
| pnpm-lock.yaml | |
| .github/workflows/ci-e2e-playwright.yml | |
| # tests would intermittently fail in GH actions | |
| # with exit code 134 _after passing_ all tests | |
| # this appears to fix it | |
| # absolute wild tbh https://stackoverflow.com/a/75503402 | |
| - uses: tlambert03/setup-qt-libs@19e4ef2d781d81f5f067182e228b54ec90d23b76 # v1.8 | |
| - name: Install plugin_transpiler | |
| shell: bash | |
| run: | | |
| pnpm --filter=@posthog/plugin-transpiler... install --frozen-lockfile | |
| bin/turbo --filter=@posthog/plugin-transpiler build | |
| - name: Install Python dependencies | |
| shell: bash | |
| run: | | |
| UV_PROJECT_ENVIRONMENT=$pythonLocation uv sync --frozen --dev | |
| - name: Install the working version of hogql-parser | |
| if: needs.changes.outputs.shouldRun == 'true' && steps.hogql-parser-diff.outputs.changed == 'true' | |
| shell: bash | |
| # This is not cached currently, as it's important to build the current HEAD version of hogql-parser if it has | |
| # changed (requirements.txt has the already-published version) | |
| run: | | |
| sudo apt-get install unzip cmake curl uuid pkg-config | |
| curl --fail --location https://www.antlr.org/download/antlr4-cpp-runtime-4.13.1-source.zip --output antlr4-source.zip || curl --fail --location https://raw.githubusercontent.com/antlr/website-antlr4/gh-pages/download/antlr4-cpp-runtime-4.13.1-source.zip --output antlr4-source.zip | |
| # Check that the downloaded archive is the expected runtime - a security measure | |
| anltr_known_md5sum="c875c148991aacd043f733827644a76f" | |
| antlr_found_ms5sum="$(md5sum antlr4-source.zip | cut -d' ' -f1)" | |
| if [[ "$anltr_known_md5sum" != "$antlr_found_ms5sum" ]]; then | |
| echo "Unexpected MD5 sum of antlr4-source.zip!" | |
| echo "Known: $anltr_known_md5sum" | |
| echo "Found: $antlr_found_ms5sum" | |
| exit 64 | |
| fi | |
| unzip antlr4-source.zip -d antlr4-source && cd antlr4-source | |
| cmake . | |
| DESTDIR=out make install | |
| sudo cp -r out/usr/local/include/antlr4-runtime /usr/include/ | |
| sudo cp out/usr/local/lib/libantlr4-runtime.so* /usr/lib/ | |
| sudo ldconfig | |
| cd .. | |
| pip install ./common/hogql_parser | |
| - name: Set up needed files | |
| shell: bash | |
| run: | | |
| mkdir -p frontend/dist | |
| touch frontend/dist/index.html | |
| touch frontend/dist/layout.html | |
| touch frontend/dist/exporter.html | |
| ./bin/download-mmdb | |
| - name: Install package.json dependencies with pnpm | |
| run: | | |
| pnpm --filter=@posthog/playwright... install --frozen-lockfile | |
| bin/turbo --filter=@posthog/frontend prepare | |
| - name: Start Docker services | |
| env: | |
| COMPOSE_FILE: docker-compose.dev.yml:docker-compose.profiles.yml | |
| COMPOSE_PROFILES: capture,temporal | |
| run: bin/ci-wait-for-docker launch --down | |
| - name: Wait for Docker services | |
| env: | |
| COMPOSE_FILE: docker-compose.dev.yml:docker-compose.profiles.yml | |
| COMPOSE_PROFILES: capture,temporal | |
| run: bin/ci-wait-for-docker wait capture temporal | |
| - name: Build frontend | |
| run: | | |
| pnpm --filter=@posthog/frontend... install --frozen-lockfile | |
| pnpm --filter=@posthog/frontend build:products | |
| pnpm --filter=@posthog/frontend build | |
| - name: Collect static files | |
| run: | | |
| # KLUDGE: to get the image-bitmap-data-url-worker-*.js.map files into the dist folder | |
| # KLUDGE: rrweb thinks they're alongside and the django's collectstatic fails | |
| cp frontend/node_modules/@posthog/rrweb/dist/image-bitmap-data-url-worker-*.js.map frontend/dist/ && python manage.py collectstatic --noinput | |
| - name: Create test database | |
| shell: bash | |
| run: | | |
| createdb posthog_e2e_test || echo "Database already exists" | |
| run_clickhouse_query() { | |
| local query="$1" | |
| for attempt in {1..10}; do | |
| if printf '%s' "$query" | curl --silent --show-error --fail 'http://localhost:8123/' --data-binary @-; then | |
| echo | |
| return 0 | |
| fi | |
| echo "ClickHouse query failed on attempt ${attempt}/10, retrying in 3s..." | |
| sleep 3 | |
| done | |
| echo "ClickHouse query failed after 10 attempts: $query" >&2 | |
| return 1 | |
| } | |
| # Drop and recreate clickhouse test database. The HTTP endpoint can briefly | |
| # reset connections while the container is still settling after startup. | |
| run_clickhouse_query 'SELECT 1' | |
| run_clickhouse_query 'DROP DATABASE IF EXISTS posthog_test SYNC' | |
| run_clickhouse_query 'CREATE DATABASE posthog_test' | |
| - name: Cache Rust dependencies | |
| uses: Swatinem/rust-cache@c19371144df3bb44fab255c43d04cbc2ab54d1c4 # v2.9.1 | |
| with: | |
| shared-key: 'v2-rust-backend' | |
| workspaces: rust | |
| save-if: ${{ github.ref == 'refs/heads/master' }} | |
| - name: Install sqlx-cli | |
| run: cargo install sqlx-cli --version 0.8.0 --features postgres --no-default-features --locked | |
| - name: Restore schema cache from master | |
| # Cache key is the merge-base SHA — produced by ci-backend on master push. | |
| # Cache miss falls through to a full migrate below. | |
| if: github.event_name == 'pull_request' && needs.changes.outputs.schema_cache_key != '' | |
| uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5 | |
| id: schema-cache | |
| with: | |
| path: schema.sql.gz | |
| key: ${{ needs.changes.outputs.schema_cache_key }} | |
| - name: Prime posthog_e2e_test from cached schema | |
| # Schema dump is from master's `posthog` db but schemas are db-name agnostic. | |
| # Hogli's db:restore-schema-fresh DROPs+CREATEs the target db, restores the | |
| # schema, and runs ensure_migration_defaults to seed RunPython data that's | |
| # missing from a schema-only dump. The migrate step below still runs and | |
| # layers any PR-added migrations on top — the cache just skips the bulk of | |
| # the historical migration work. | |
| if: steps.schema-cache.outputs.cache-hit == 'true' | |
| env: | |
| TARGET_DB: posthog_e2e_test | |
| run: | | |
| mkdir -p .postgres-backups | |
| mv schema.sql.gz .postgres-backups/schema-latest.sql.gz | |
| ./bin/hogli db:restore-schema-fresh | |
| - name: Apply postgres and clickhouse migrations and setup dev | |
| run: | | |
| # Postgres migrations must run first — ClickHouse migration 0026 | |
| # depends on the posthog_instancesetting table in Postgres. | |
| # On cache hit this is near no-op; on cache miss it's a full migrate. | |
| # Either way, any PR-added migrations get applied here. | |
| python manage.py migrate --noinput | |
| # Run Rust migrations for persons e2e test database | |
| PERSONS_DATABASE_URL="postgres://posthog:posthog@localhost:5432/posthog_persons_e2e_test" | |
| sqlx database create -D "$PERSONS_DATABASE_URL" | |
| sqlx migrate run -D "$PERSONS_DATABASE_URL" --source rust/persons_migrations/ | |
| python manage.py migrate_clickhouse 2>&1 | |
| python manage.py setup_dev | |
| - name: Source celery queues | |
| run: | | |
| source ./bin/celery-queues.env | |
| echo "CELERY_WORKER_QUEUES=$CELERY_WORKER_QUEUES" >> $GITHUB_ENV | |
| - name: Resolve Node.js container image tag | |
| id: node-image | |
| run: | | |
| # ci-nodejs-container.yml tags images as pr-<number> for PRs | |
| if [ -n "${{ github.event.pull_request.number }}" ]; then | |
| TAG="pr-${{ github.event.pull_request.number }}" | |
| else | |
| TAG="${{ github.sha }}" | |
| fi | |
| if docker manifest inspect "ghcr.io/posthog/posthog-node:${TAG}" > /dev/null 2>&1; then | |
| echo "posthog-node image found: ${TAG}" | |
| echo "POSTHOG_NODE_TAG=${TAG}" >> "$GITHUB_ENV" | |
| else | |
| echo "posthog-node image not found for ${TAG}, using master" | |
| echo "POSTHOG_NODE_TAG=master" >> "$GITHUB_ENV" | |
| fi | |
| - name: Start PostHog web, Celery worker, Temporal worker & ingestion | |
| run: | | |
| python manage.py run_autoreload_celery --type=worker &> /tmp/celery.log & | |
| python manage.py start_temporal_worker --task-queue analytics-platform-task-queue &> /tmp/temporal-worker.log & | |
| # WARNING: Worker count is tuned to avoid CPU scheduling contention. Talk to #team-devex before changing. | |
| python -m granian --interface asgi posthog.asgi:application --host 0.0.0.0 --port 8000 --log-level debug --workers 2 &> /tmp/server.log & | |
| # Start the Node.js containers now that the database exists and migrations have run. | |
| # plugins: CDP (no mode = default capabilities) | |
| # ingestion-general: event ingestion (ingestion-v2-combined mode) | |
| # ingestion-sessionreplay: session replay ingestion (recordings-blob-ingestion-v2 mode) | |
| # recording-api: session replay API (recording-api mode) | |
| # ingestion-error-tracking: error tracking ingestion (ingestion-errortracking mode) | |
| # ingestion-logs: logs ingestion (ingestion-logs mode) | |
| # ingestion-traces: traces ingestion (ingestion-traces mode) | |
| COMPOSE_FILE=docker-compose.dev.yml COMPOSE_PROFILES=capture,ingestion bin/ci-wait-for-docker launch plugins ingestion-general ingestion-sessionreplay recording-api ingestion-error-tracking ingestion-logs ingestion-traces | |
| # Install Playwright browsers while we wait for PostHog to be ready | |
| - name: Install Playwright browsers | |
| run: pnpm --filter=@posthog/playwright exec playwright install chromium --with-deps | |
| - name: Wait for PostHog to be ready | |
| uses: iFaxity/wait-on-action@1fe019e0475491e9e8c4f421b6914ccc3ed8f99c # v1.2.1 | |
| with: | |
| resource: http://localhost:8000 | |
| timeout: 180000 | |
| interval: 2000 | |
| verbose: true | |
| - name: Wait for node services to be ready | |
| env: | |
| COMPOSE_FILE: docker-compose.dev.yml | |
| COMPOSE_PROFILES: capture,ingestion | |
| run: bin/ci-wait-for-docker wait plugins ingestion-general ingestion-sessionreplay recording-api ingestion-error-tracking ingestion-logs ingestion-traces | |
| - name: Clean snapshot directory | |
| run: find playwright/__snapshots__ -name '*.png' -delete 2>/dev/null || true | |
| - name: Run Playwright tests | |
| id: playwright-tests | |
| shell: bash | |
| # WARNING: Worker count is tuned to avoid CPU scheduling contention. Talk to #team-devex before changing. | |
| # Reduced from 6+4 to 4+2 to minimize CPU scheduling contention (see PR #46853) | |
| # Capture-only: VR is the gate for visual changes, Playwright just captures screenshots | |
| run: | | |
| pnpm --filter=@posthog/playwright exec playwright test --workers=4 --max-failures=5 --update-snapshots | |
| - name: Verify changed Playwright tests are stable | |
| if: success() && github.event_name == 'pull_request' | |
| shell: bash | |
| run: | | |
| BASE_SHA="${{ github.event.pull_request.base.sha }}" | |
| git fetch --no-tags --prune --depth=50 origin "$BASE_SHA" | |
| .github/scripts/verify-playwright-new-tests-and-snapshots.sh "$BASE_SHA" 10 | |
| # Visual Review: create run + upload snapshots directly from the test job. | |
| # Completion happens in handle-screenshots so the baseline lands in the same commit as PNGs. | |
| - name: Install VR CLI | |
| if: always() && (github.event.pull_request.head.repo.full_name == github.repository || github.event_name == 'push') | |
| run: cd products/visual_review/cli && npm ci && npm run build && npm link | |
| - name: Create VR run | |
| id: vr-create | |
| if: always() && (github.event.pull_request.head.repo.full_name == github.repository || github.event_name == 'push') | |
| env: | |
| VR_TOKEN: ${{ secrets.VR_API_TOKEN }} | |
| VR_BRANCH: ${{ github.event.pull_request.head.ref || github.ref_name }} | |
| VR_COMMIT: ${{ github.event.pull_request.head.sha || github.sha }} | |
| VR_PR: ${{ github.event.pull_request.number }} | |
| # PRs are gating ("review"); master pushes are tracking-only ("observe") since | |
| # there's no PR to approve and we don't want master runs to block or prompt for approval. | |
| VR_PURPOSE: ${{ github.event_name == 'push' && 'observe' || 'review' }} | |
| run: | | |
| RUN_ID=$(vr run create \ | |
| --type playwright \ | |
| --baseline playwright/snapshots.yml \ | |
| --branch "$VR_BRANCH" \ | |
| --commit "$VR_COMMIT" \ | |
| --pr "$VR_PR" \ | |
| --purpose "$VR_PURPOSE" \ | |
| --token "$VR_TOKEN") | |
| echo "run_id=$RUN_ID" >> $GITHUB_OUTPUT | |
| - name: Upload snapshots to Visual Review | |
| if: always() && steps.vr-create.outputs.run_id != '' | |
| env: | |
| VR_TOKEN: ${{ secrets.VR_API_TOKEN }} | |
| VR_RUN_ID: ${{ steps.vr-create.outputs.run_id }} | |
| run: | | |
| vr run upload \ | |
| --run-id "$VR_RUN_ID" \ | |
| --dir playwright/__snapshots__/ \ | |
| --baseline playwright/snapshots.yml \ | |
| --token "$VR_TOKEN" | |
| # ── Artifacts on failure / always ───────────────────────────────────── | |
| - name: Capture docker logs | |
| if: always() | |
| run: | | |
| mkdir -p playwright/test-results | |
| docker logs posthog-proxy-1 > playwright/test-results/docker-proxy.log 2>&1 || echo "No proxy container" > playwright/test-results/docker-proxy.log | |
| docker logs posthog-capture-1 > playwright/test-results/docker-capture.log 2>&1 || echo "No capture container" > playwright/test-results/docker-capture.log | |
| docker logs posthog-plugins-1 > playwright/test-results/docker-plugins.log 2>&1 || echo "No plugins container" > playwright/test-results/docker-plugins.log | |
| docker logs posthog-ingestion-general-1 > playwright/test-results/docker-ingestion-general.log 2>&1 || echo "No ingestion-general container" > playwright/test-results/docker-ingestion-general.log | |
| docker logs posthog-ingestion-sessionreplay-1 > playwright/test-results/docker-ingestion-sessionreplay.log 2>&1 || echo "No ingestion-sessionreplay container" > playwright/test-results/docker-ingestion-sessionreplay.log | |
| docker logs posthog-recording-api-1 > playwright/test-results/docker-recording-api.log 2>&1 || echo "No recording-api container" > playwright/test-results/docker-recording-api.log | |
| docker logs posthog-ingestion-error-tracking-1 > playwright/test-results/docker-ingestion-error-tracking.log 2>&1 || echo "No ingestion-error-tracking container" > playwright/test-results/docker-ingestion-error-tracking.log | |
| docker logs posthog-ingestion-logs-1 > playwright/test-results/docker-ingestion-logs.log 2>&1 || echo "No ingestion-logs container" > playwright/test-results/docker-ingestion-logs.log | |
| docker logs posthog-ingestion-traces-1 > playwright/test-results/docker-ingestion-traces.log 2>&1 || echo "No ingestion-traces container" > playwright/test-results/docker-ingestion-traces.log | |
| - name: Archive test artifacts | |
| if: always() | |
| uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 | |
| with: | |
| name: playwright-test-results | |
| path: | | |
| playwright/playwright-report/ | |
| playwright/playwright-report-attempt-*/ | |
| playwright/test-results/ | |
| playwright/test-results-attempt-*/ | |
| /tmp/celery.log | |
| /tmp/server.log | |
| /tmp/temporal-worker.log | |
| /tmp/playwright-output-attempt-*.log | |
| retention-days: 30 | |
| if-no-files-found: ignore | |
| - name: Upload test results | |
| if: always() | |
| uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 | |
| with: | |
| name: junit-results-playwright | |
| path: playwright/junit-results.xml | |
| if-no-files-found: ignore | |
| - name: Publish report to Cloudflare Pages | |
| if: always() && (github.event_name == 'push' || github.event.pull_request.head.repo.full_name == github.repository) | |
| id: cf-deploy | |
| continue-on-error: true | |
| env: | |
| CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }} | |
| CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} | |
| run: | | |
| BRANCH="${{ github.event_name == 'pull_request' && format('pr-{0}', github.event.number) || 'master' }}" | |
| npx --yes wrangler@3 pages deploy playwright/playwright-report \ | |
| --project-name=playwright-report \ | |
| --branch="$BRANCH" \ | |
| --commit-dirty=true 2>&1 | tee /tmp/wrangler-output.txt | |
| URL=$(grep -oP 'https://\S+\.pages\.dev' /tmp/wrangler-output.txt | tail -1 || true) | |
| if [ -n "$URL" ]; then | |
| echo "deployment-url=$URL" >> $GITHUB_OUTPUT | |
| fi | |
| - name: Write report URL to job summary | |
| if: always() && steps.cf-deploy.outputs.deployment-url != '' | |
| env: | |
| DEPLOYMENT_URL: ${{ steps.cf-deploy.outputs.deployment-url }} | |
| run: | | |
| echo "## 🎭 Playwright report" >> $GITHUB_STEP_SUMMARY | |
| echo "" >> $GITHUB_STEP_SUMMARY | |
| echo "**Report URL:** $DEPLOYMENT_URL" >> $GITHUB_STEP_SUMMARY | |
| echo "" >> $GITHUB_STEP_SUMMARY | |
| echo "- Commit: \`${{ github.sha }}\`" >> $GITHUB_STEP_SUMMARY | |
| echo "- Run: [#${{ github.run_number }}](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }})" >> $GITHUB_STEP_SUMMARY | |
| - name: Upsert report URL comment on PR | |
| if: always() && github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository | |
| continue-on-error: true | |
| uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 | |
| env: | |
| DEPLOYMENT_URL: ${{ steps.cf-deploy.outputs.deployment-url }} | |
| with: | |
| github-token: ${{ secrets.GITHUB_TOKEN }} | |
| script: | | |
| const fs = require('fs'); | |
| const marker = '<!-- playwright-report-comment -->'; | |
| const reportUrl = process.env.DEPLOYMENT_URL; | |
| const jobPassed = '${{ job.status }}' === 'success'; | |
| // Collect failed and flaky tests from JSON report — only populated when | |
| // results.json exists (i.e. tests actually ran). If the file is missing | |
| // (setup failure, cancelled, etc.) extraLines stays empty and we treat | |
| // it the same as "no failures found", so we don't delete an existing | |
| // comment that may have relevant info from a previous run. | |
| let extraLines = ''; | |
| let resultsRead = false; | |
| let failed = []; | |
| try { | |
| const results = JSON.parse(fs.readFileSync('playwright/results.json', 'utf8')); | |
| resultsRead = true; | |
| const flaky = []; | |
| function collect(suites) { | |
| for (const suite of suites || []) { | |
| for (const spec of suite.specs || []) { | |
| for (const test of spec.tests || []) { | |
| if (test.status === 'unexpected') { | |
| failed.push(`- ${spec.title} (${test.projectName})`); | |
| } else if (test.status === 'flaky') { | |
| flaky.push(`- ${spec.title} (${test.projectName})`); | |
| } | |
| } | |
| } | |
| collect(suite.suites); | |
| } | |
| } | |
| collect(results.suites); | |
| if (failed.length > 0) { | |
| extraLines += `\n\n❌ **${failed.length} failed test${failed.length > 1 ? 's' : ''}:**\n${failed.join('\n')}`; | |
| } | |
| if (flaky.length > 0) { | |
| extraLines += `\n\n⚠️ **${flaky.length} flaky test${flaky.length > 1 ? 's' : ''}:**\n${flaky.join('\n')}`; | |
| } | |
| } catch {} | |
| // Flake verification results for changed test files | |
| try { | |
| const flakeResults = JSON.parse(fs.readFileSync('playwright/flake-verification-results.json', 'utf8')); | |
| if (flakeResults.status === 'failed') { | |
| const fileList = flakeResults.files.map(f => `- \`${f}\``).join('\n'); | |
| const flakeReportLink = reportUrl ? ` [View report →](${reportUrl})` : ''; | |
| extraLines += `\n\n🔁 **Flake verification failed** (--repeat-each=${flakeResults.repeat_count}):\n${fileList}\n\nThe report only shows the tests under verification.${flakeReportLink} Fix these before merging.`; | |
| } | |
| } catch {} | |
| const { data: comments } = await github.rest.issues.listComments({ | |
| owner: context.repo.owner, | |
| repo: context.repo.repo, | |
| issue_number: context.issue.number, | |
| }); | |
| const existing = comments.find(c => c.body.includes(marker)); | |
| // No failures or flakies — nothing to comment about | |
| if (!extraLines) { | |
| // Clean up any existing comment, but only when we know tests actually ran and passed | |
| if (resultsRead && existing) { | |
| await github.rest.issues.deleteComment({ | |
| owner: context.repo.owner, | |
| repo: context.repo.repo, | |
| comment_id: existing.id, | |
| }); | |
| } | |
| return; | |
| } | |
| const reportLink = reportUrl ? ` · [View test results →](${reportUrl})` : ''; | |
| const footer = '\n\n\n*These issues are not necessarily caused by your changes.*\n*Annoyed by this comment? Help fix flakies and failures and it\'ll disappear!*'; | |
| const body = `${marker}\n🎭 Playwright report${reportLink}${extraLines}${footer}`; | |
| if (existing) { | |
| await github.rest.issues.updateComment({ | |
| owner: context.repo.owner, | |
| repo: context.repo.repo, | |
| comment_id: existing.id, | |
| body, | |
| }); | |
| } else { | |
| await github.rest.issues.createComment({ | |
| owner: context.repo.owner, | |
| repo: context.repo.repo, | |
| issue_number: context.issue.number, | |
| body, | |
| }); | |
| } | |
| capture-run-time: | |
| name: Capture run time | |
| runs-on: ubuntu-latest | |
| timeout-minutes: 5 | |
| needs: [changes, playwright, playwright_tests] | |
| if: # Run on pull requests to PostHog/posthog + on PostHog/posthog outside of PRs - but never on forks or Dependabot (no secrets access) | |
| always() && github.actor != 'dependabot[bot]' && | |
| needs.changes.outputs.shouldRun == 'true' && ( | |
| (github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == 'PostHog/posthog') || | |
| (github.event_name != 'pull_request' && github.repository == 'PostHog/posthog')) | |
| steps: | |
| - name: Get telemetry app token | |
| id: telemetry-app-token | |
| if: github.run_attempt == '1' | |
| uses: actions/create-github-app-token@1b10c78c7865c340bc4f6099eb2f838309f1e8c3 # v3.1.1 | |
| with: | |
| client-id: ${{ secrets.GH_APP_TELEMETRY_APP_ID }} | |
| private-key: ${{ secrets.GH_APP_TELEMETRY_PRIVATE_KEY }} | |
| - name: Capture running time to PostHog | |
| if: github.run_attempt == '1' | |
| uses: PostHog/posthog-github-action@58dea254b598fb5d469c0699c98af8288a7f7650 # v1.2.0 | |
| with: | |
| posthog-token: ${{ secrets.POSTHOG_API_TOKEN }} | |
| event: 'posthog-ci-running-time' | |
| capture-run-duration: true | |
| capture-job-durations: true | |
| github-token: ${{ steps.telemetry-app-token.outputs.token }} | |
| status-job: 'Playwright tests pass' | |
| runner: 'depot' | |
| - name: Capture running time to DevEx PostHog | |
| if: github.run_attempt == '1' | |
| continue-on-error: true | |
| uses: PostHog/posthog-github-action@58dea254b598fb5d469c0699c98af8288a7f7650 # v1.2.0 | |
| with: | |
| posthog-token: ${{ secrets.POSTHOG_DEVEX_PROJECT_API_TOKEN }} | |
| event: 'posthog-ci-running-time' | |
| capture-run-duration: true | |
| capture-job-durations: true | |
| github-token: ${{ steps.telemetry-app-token.outputs.token }} | |
| status-job: 'Playwright tests pass' | |
| runner: 'depot' | |
| vr-complete: | |
| name: Complete Visual Review run | |
| runs-on: ubuntu-latest | |
| timeout-minutes: 15 | |
| needs: [playwright] | |
| if: needs.playwright.outputs.vr_run_id != '' && needs.playwright.result == 'success' | |
| steps: | |
| - uses: actions/checkout@v6 | |
| with: | |
| ref: ${{ github.event.pull_request.head.sha || github.sha }} | |
| repository: ${{ github.event.pull_request.head.repo.full_name || github.repository }} | |
| sparse-checkout: | | |
| .nvmrc | |
| products/visual_review/cli | |
| products/visual_review/frontend/generated/api.schemas.ts | |
| playwright/snapshots.yml | |
| sparse-checkout-cone-mode: false | |
| - name: Set up Node.js | |
| uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0 | |
| with: | |
| node-version-file: .nvmrc | |
| - name: Install VR CLI | |
| run: cd products/visual_review/cli && npm ci && npm run build && npm link | |
| - name: Complete Visual Review run | |
| env: | |
| VR_TOKEN: ${{ secrets.VR_API_TOKEN }} | |
| run: | | |
| vr run complete \ | |
| --run-id "${{ needs.playwright.outputs.vr_run_id }}" \ | |
| --baseline playwright/snapshots.yml \ | |
| --token "$VR_TOKEN" | |
| # Collate test + VR completion status for the required check | |
| playwright_tests: | |
| needs: [playwright, vr-complete] | |
| name: Playwright tests pass | |
| runs-on: ubuntu-latest | |
| timeout-minutes: 5 | |
| if: always() | |
| steps: | |
| - name: Check outcome | |
| run: | | |
| if [[ "${{ needs.playwright.result }}" != "success" && "${{ needs.playwright.result }}" != "skipped" ]]; then | |
| echo "Playwright tests failed." | |
| exit 1 | |
| fi | |
| if [[ "${{ needs.vr-complete.result }}" != "success" && "${{ needs.vr-complete.result }}" != "skipped" ]]; then | |
| echo "Visual Review did not complete successfully (result: ${{ needs.vr-complete.result }})." | |
| exit 1 | |
| fi |