fix(projects): Clicking on New...
should not fold/unfold the folder
#54965
Workflow file for this run
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# | |
# This workflow runs CI E2E tests with Playwright. | |
# | |
# It relies on the container image built by 'container-images-ci.yml'. | |
# | |
name: E2E CI Playwright | |
on: | |
pull_request: | |
workflow_dispatch: | |
env: | |
SECRET_KEY: '6b01eee4f945ca25045b5aab440b953461faf08693a9abbf1166dc7c6b9772da' # unsafe - for testing only | |
REDIS_URL: redis://localhost | |
DATABASE_URL: postgres://posthog:posthog@localhost:5432/posthog_e2e_test | |
KAFKA_HOSTS: kafka:9092 | |
DISABLE_SECURE_SSL_REDIRECT: 1 | |
SECURE_COOKIES: 0 | |
OPT_OUT_CAPTURE: 0 | |
E2E_TESTING: 1 | |
SKIP_SERVICE_VERSION_REQUIREMENTS: 1 | |
EMAIL_HOST: email.test.posthog.net | |
SITE_URL: http://localhost:8000 | |
NO_RESTART_LOOP: 1 | |
OBJECT_STORAGE_ENABLED: 1 | |
OBJECT_STORAGE_ENDPOINT: http://localhost:19000 | |
OBJECT_STORAGE_ACCESS_KEY_ID: object_storage_root_user | |
OBJECT_STORAGE_SECRET_ACCESS_KEY: object_storage_root_password | |
GITHUB_ACTION_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} | |
CELERY_METRICS_PORT: 8999 | |
CLOUD_DEPLOYMENT: E2E | |
CLICKHOUSE_HOST: 'localhost' | |
CLICKHOUSE_SECURE: 'False' | |
CLICKHOUSE_VERIFY: 'False' | |
CLICKHOUSE_DATABASE: posthog_test | |
PGHOST: localhost | |
PGUSER: posthog | |
PGPASSWORD: posthog | |
PGPORT: 5432 | |
concurrency: | |
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} | |
# This is so that the workflow run isn't canceled when a screenshot update is pushed within it by posthog-bot | |
# We do however cancel from container-images-ci.yml if a commit is pushed by someone OTHER than posthog-bot | |
cancel-in-progress: false | |
jobs: | |
changes: | |
runs-on: ubuntu-latest | |
timeout-minutes: 5 | |
if: github.event.pull_request.head.repo.full_name == github.repository | |
name: Determine need to run E2E checks | |
# Set job outputs to values from filter step | |
outputs: | |
shouldRun: ${{ steps.changes.outputs.shouldRun }} | |
steps: | |
# For pull requests it's not necessary to check out the code | |
- uses: dorny/paths-filter@4512585405083f25c027a35db413c2b3b9006d50 # v2 | |
id: changes | |
with: | |
filters: | | |
shouldRun: | |
# Avoid running E2E tests for irrelevant changes | |
# NOTE: we are at risk of missing a dependency here. We could make | |
# the dependencies more clear if we separated the backend/frontend | |
# code completely | |
- 'ee/**' | |
- 'posthog/!(temporal/**)/**' | |
- 'bin/*' | |
- frontend/**/* | |
- requirements.txt | |
- requirements-dev.txt | |
- package.json | |
- pnpm-lock.yaml | |
# Make sure we run if someone is explicitly changes the workflow | |
- .github/workflows/ci-e2e-playwright.yml | |
- .github/actions/build-n-cache-image/action.yml | |
# We use docker compose for tests, make sure we rerun on | |
# changes to docker-compose.dev.yml e.g. dependency | |
# version changes | |
- docker-compose.dev.yml | |
- Dockerfile | |
- playwright/** | |
container: | |
name: (Optional) Container - Build and cache image | |
# run these on 4, if they're RAM constrained the FE build will fail randomly in Docker build | |
runs-on: ubuntu-latest | |
timeout-minutes: 60 | |
needs: [changes] | |
if: needs.changes.outputs.shouldRun == 'true' | |
permissions: | |
contents: read | |
id-token: write # allow issuing OIDC tokens for this workflow run | |
outputs: | |
tag: ${{ steps.build.outputs.tag }} | |
build-id: ${{ steps.build.outputs.build-id }} | |
steps: | |
- name: Checkout | |
uses: actions/checkout@v4 | |
- name: Build the Docker image with Depot | |
# Build the container image in preparation for the E2E tests | |
uses: ./.github/actions/build-n-cache-image | |
id: build | |
with: | |
save: true | |
actions-id-token-request-url: ${{ env.ACTIONS_ID_TOKEN_REQUEST_URL }} | |
no-cache: ${{ contains(github.event.pull_request.labels.*.name, 'no-depot-docker-cache') }} | |
playwright-on-container: | |
name: (Optional) Container - E2E Playwright tests | |
runs-on: depot-ubuntu-latest-arm-4 | |
timeout-minutes: 60 | |
needs: [changes, container] | |
if: needs.changes.outputs.shouldRun == 'true' | |
permissions: | |
id-token: write # allow issuing OIDC tokens for this workflow run | |
steps: | |
- name: Checkout | |
uses: actions/checkout@v4 | |
- name: Install pnpm | |
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4 | |
- name: Set up Node.js | |
uses: actions/setup-node@v4 | |
with: | |
node-version: 22.17.1 | |
cache: 'pnpm' | |
- name: Get pnpm cache directory path | |
id: pnpm-cache-dir | |
run: echo "PNPM_STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT | |
- uses: actions/cache@v4 | |
id: pnpm-cache | |
with: | |
path: | | |
${{ steps.pnpm-cache-dir.outputs.PNPM_STORE_PATH }} | |
key: ${{ runner.os }}-pnpm-playwright-${{ hashFiles('**/pnpm-lock.yaml') }} | |
restore-keys: | | |
${{ runner.os }}-pnpm-playwright- | |
- name: Install package.json dependencies with pnpm | |
run: pnpm --filter=@posthog/playwright... install --frozen-lockfile | |
- name: Stop/Start stack with Docker Compose | |
run: | | |
docker compose -f docker-compose.dev.yml down | |
docker compose -f docker-compose.dev.yml up -d & | |
# Install playwright and dependencies whilst we wait for the stack to start | |
- name: Install playwright and dependencies | |
run: pnpm --filter=@posthog/playwright exec playwright install --with-deps | |
- name: Wait for services to be available | |
run: | | |
bin/check_kafka_clickhouse_up | |
bin/check_postgres_up | |
- name: Install Depot CLI | |
uses: depot/setup-action@b0b1ea4f69e92ebf5dea3f8713a1b0c37b2126a5 # v1 | |
- name: Get Docker image cached in Depot | |
uses: depot/pull-action@8a922bdade29cf5facf3a13020cccd3b7a8127c2 # v1 | |
with: | |
build-id: ${{ needs.container.outputs.build-id }} | |
tags: ${{ needs.container.outputs.tag }} | |
- name: Write .env | |
run: | | |
cat <<EOT >> .env | |
SECRET_KEY=6b01eee4f945ca25045b5aab440b953461faf08693a9abbf1166dc7c6b9772da | |
REDIS_URL=redis://localhost | |
DATABASE_URL=postgres://posthog:posthog@localhost:5432/posthog | |
KAFKA_HOSTS=kafka:9092 | |
DISABLE_SECURE_SSL_REDIRECT=1 | |
SECURE_COOKIES=0 | |
OPT_OUT_CAPTURE=0 | |
E2E_TESTING=1 | |
SKIP_SERVICE_VERSION_REQUIREMENTS=1 | |
EMAIL_HOST=email.test.posthog.net | |
SITE_URL=http://localhost:8000 | |
NO_RESTART_LOOP=1 | |
CLICKHOUSE_SECURE=0 | |
OBJECT_STORAGE_ENABLED=1 | |
OBJECT_STORAGE_ENDPOINT=http://localhost:19000 | |
OBJECT_STORAGE_ACCESS_KEY_ID=object_storage_root_user | |
OBJECT_STORAGE_SECRET_ACCESS_KEY=object_storage_root_password | |
GITHUB_ACTION_RUN_URL="${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}" | |
CELERY_METRICS_PORT=8999 | |
CLOUD_DEPLOYMENT=1 | |
EOT | |
- name: Start PostHog | |
run: | | |
mkdir -p /tmp/logs | |
echo "Starting PostHog using the container image ${{ needs.container.outputs.tag }}" | |
DOCKER_RUN="docker run --rm --network host --add-host kafka:127.0.0.1 --add-host clickhouse:127.0.0.1 --env-file .env ${{ needs.container.outputs.tag }}" | |
# Run Django migrations (./bin/migrate will skip Rust migrations since rust/bin doesn't exist in container) | |
$DOCKER_RUN ./bin/migrate | |
# Now run Rust migrations using dedicated container | |
docker build -f rust/Dockerfile.persons-migrate -t posthog-migrations:latest ./rust | |
docker run --rm --network host \ | |
-e PERSONS_DATABASE_URL="postgres://posthog:posthog@localhost:5432/posthog_persons" \ | |
-e CYCLOTRON_DATABASE_URL="postgres://posthog:posthog@localhost:5432/cyclotron" \ | |
posthog-migrations:latest | |
$DOCKER_RUN python manage.py setup_dev | |
# only starts the plugin server so that the "wait for PostHog" step passes | |
$DOCKER_RUN ./bin/docker-worker &> /tmp/logs/worker.txt & | |
$DOCKER_RUN ./bin/docker-server &> /tmp/logs/server.txt & | |
- name: Wait for PostHog | |
# this action might be abandoned - but v1 doesn't point to latest of v1 (which it should) | |
# so pointing to v1.1.0 to remove warnings about node version with v1 | |
# todo check https://github.com/iFaxity/wait-on-action/releases for new releases | |
uses: iFaxity/wait-on-action@a7d13170ec542bdca4ef8ac4b15e9c6aa00a6866 # v1.2.1 | |
timeout-minutes: 5 | |
with: | |
verbose: true | |
log: true | |
resource: http://localhost:8000 | |
- name: Playwright run | |
run: pnpm --filter=@posthog/playwright test | |
env: | |
E2E_TESTING: 1 | |
OPT_OUT_CAPTURE: 0 | |
GITHUB_ACTION_RUN_URL: '${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}' | |
- name: Archive report | |
uses: actions/upload-artifact@v4 | |
with: | |
name: playwright-container-report | |
path: playwright/playwright-report/ | |
retention-days: 30 | |
if: ${{ failure() }} | |
- name: Archive screenshots | |
if: always() | |
uses: actions/upload-artifact@v4 | |
with: | |
name: playwright-container-screenshots | |
path: playwright/test-results | |
retention-days: 30 | |
- name: Archive docker-server logs | |
if: failure() | |
uses: actions/upload-artifact@v4 | |
with: | |
name: docker-server-logs | |
path: /tmp/logs/server.txt | |
retention-days: 5 | |
playwright: | |
name: Playwright E2E tests | |
needs: [changes] | |
if: needs.changes.outputs.shouldRun == 'true' | |
runs-on: depot-ubuntu-latest-8 | |
timeout-minutes: 30 | |
steps: | |
- uses: actions/checkout@v4 | |
with: | |
ref: ${{ github.event.pull_request.head.ref }} | |
repository: ${{ github.event.pull_request.head.repo.full_name }} | |
token: ${{ secrets.POSTHOG_BOT_PAT || github.token }} | |
fetch-depth: 50 # Need enough history for flap detection to find last human commit | |
- name: Stop/Start stack with Docker Compose | |
shell: bash | |
run: | | |
export CLICKHOUSE_SERVER_IMAGE=clickhouse/clickhouse-server:25.6.9.98 | |
export DOCKER_REGISTRY_PREFIX="us-east1-docker.pkg.dev/posthog-301601/mirror/" | |
cp posthog/user_scripts/latest_user_defined_function.xml docker/clickhouse/user_defined_function.xml | |
( | |
max_attempts=3 | |
attempt=1 | |
delay=5 | |
while [ $attempt -le $max_attempts ]; do | |
echo "Attempt $attempt of $max_attempts to start stack..." | |
if docker compose -f docker-compose.dev.yml down && \ | |
docker compose -f docker-compose.dev.yml up -d; then | |
echo "Stack started successfully" | |
exit 0 | |
fi | |
echo "Failed to start stack on attempt $attempt" | |
if [ $attempt -lt $max_attempts ]; then | |
sleep_time=$((delay * 2 ** (attempt - 1))) | |
echo "Waiting ${sleep_time} seconds before retry..." | |
sleep $sleep_time | |
fi | |
attempt=$((attempt + 1)) | |
done | |
echo "Failed to start stack after $max_attempts attempts" | |
exit 1 | |
) & | |
- name: Add Kafka and ClickHouse to /etc/hosts | |
shell: bash | |
run: echo "127.0.0.1 kafka clickhouse" | sudo tee -a /etc/hosts | |
- name: Set up Python | |
uses: actions/setup-python@v5 | |
with: | |
python-version: 3.12.11 | |
token: ${{ secrets.POSTHOG_BOT_PAT }} | |
- name: Install uv | |
id: setup-uv | |
uses: astral-sh/setup-uv@0c5e2b8115b80b4c7c5ddf6ffdd634974642d182 # v5.4.1 | |
with: | |
enable-cache: true | |
version: 0.8.19 | |
- name: Determine if hogql-parser has changed compared to master | |
shell: bash | |
id: hogql-parser-diff | |
run: | | |
git fetch --no-tags --prune --depth=1 origin master | |
changed=$(git diff --quiet HEAD origin/master -- common/hogql_parser/ && echo "false" || echo "true") | |
echo "changed=$changed" >> $GITHUB_OUTPUT | |
- name: Install SAML (python3-saml) dependencies | |
if: steps.setup-uv.outputs.cache-hit != 'true' | |
shell: bash | |
run: | | |
sudo apt-get update && sudo apt-get install libxml2-dev libxmlsec1-dev libxmlsec1-openssl | |
- name: Install pnpm | |
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4 | |
- name: Set up Node.js | |
uses: actions/setup-node@v4 | |
with: | |
node-version: 22.17.1 | |
cache: pnpm | |
# tests would intermittently fail in GH actions | |
# with exit code 134 _after passing_ all tests | |
# this appears to fix it | |
# absolute wild tbh https://stackoverflow.com/a/75503402 | |
- uses: tlambert03/setup-qt-libs@19e4ef2d781d81f5f067182e228b54ec90d23b76 # v1 | |
- name: Install plugin_transpiler | |
shell: bash | |
run: | | |
pnpm --filter=@posthog/plugin-transpiler... install --frozen-lockfile | |
bin/turbo --filter=@posthog/plugin-transpiler build | |
- name: Install Python dependencies | |
shell: bash | |
run: | | |
UV_PROJECT_ENVIRONMENT=$pythonLocation uv sync --frozen --dev | |
- name: Install the working version of hogql-parser | |
if: needs.changes.outputs.shouldRun == 'true' && steps.hogql-parser-diff.outputs.changed == 'true' | |
shell: bash | |
# This is not cached currently, as it's important to build the current HEAD version of hogql-parser if it has | |
# changed (requirements.txt has the already-published version) | |
run: | | |
sudo apt-get install libboost-all-dev unzip cmake curl uuid pkg-config | |
curl https://www.antlr.org/download/antlr4-cpp-runtime-4.13.1-source.zip --output antlr4-source.zip | |
# Check that the downloaded archive is the expected runtime - a security measure | |
anltr_known_md5sum="c875c148991aacd043f733827644a76f" | |
antlr_found_ms5sum="$(md5sum antlr4-source.zip | cut -d' ' -f1)" | |
if [[ "$anltr_known_md5sum" != "$antlr_found_ms5sum" ]]; then | |
echo "Unexpected MD5 sum of antlr4-source.zip!" | |
echo "Known: $anltr_known_md5sum" | |
echo "Found: $antlr_found_ms5sum" | |
exit 64 | |
fi | |
unzip antlr4-source.zip -d antlr4-source && cd antlr4-source | |
cmake . | |
DESTDIR=out make install | |
sudo cp -r out/usr/local/include/antlr4-runtime /usr/include/ | |
sudo cp out/usr/local/lib/libantlr4-runtime.so* /usr/lib/ | |
sudo ldconfig | |
cd .. | |
pip install ./common/hogql_parser | |
- name: Set up needed files | |
shell: bash | |
run: | | |
mkdir -p frontend/dist | |
touch frontend/dist/index.html | |
touch frontend/dist/layout.html | |
touch frontend/dist/exporter.html | |
./bin/download-mmdb | |
- name: Get pnpm cache directory path | |
id: pnpm-cache-dir | |
run: echo "PNPM_STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT | |
- uses: actions/cache@v4 | |
id: pnpm-cache | |
with: | |
path: ${{ steps.pnpm-cache-dir.outputs.PNPM_STORE_PATH }} | |
key: ${{ runner.os }}-pnpm-playwright-${{ hashFiles('pnpm-lock.yaml') }} | |
restore-keys: ${{ runner.os }}-pnpm-playwright- | |
- name: Install package.json dependencies with pnpm | |
run: | | |
pnpm --filter=@posthog/playwright... install --frozen-lockfile | |
bin/turbo --filter=@posthog/frontend prepare | |
- name: Wait for services to be available | |
run: | | |
bin/check_kafka_clickhouse_up | |
bin/check_postgres_up | |
- name: Build frontend | |
run: | | |
pnpm --filter=@posthog/frontend... install --frozen-lockfile | |
pnpm --filter=@posthog/frontend build:products | |
pnpm --filter=@posthog/frontend build | |
- name: Collect static files | |
run: | | |
# KLUDGE: to get the image-bitmap-data-url-worker-*.js.map files into the dist folder | |
# KLUDGE: rrweb thinks they're alongside and the django's collectstatic fails | |
cp frontend/node_modules/@posthog/rrweb/dist/image-bitmap-data-url-worker-*.js.map frontend/dist/ && python manage.py collectstatic --noinput | |
- name: Create test database | |
run: | | |
createdb posthog_e2e_test || echo "Database already exists" | |
# Drop and recreate clickhouse test database | |
echo 'DROP DATABASE if exists posthog_test' | curl 'http://localhost:8123/' --data-binary @- | |
echo 'create database posthog_test' | curl 'http://localhost:8123/' --data-binary @- | |
- name: Apply postgres and clickhouse migrations and setup dev | |
run: | | |
python manage.py migrate_clickhouse & | |
python manage.py migrate --noinput | |
# Build and run Rust migrations using dedicated container | |
docker build -f rust/Dockerfile.persons-migrate -t posthog-migrations:latest ./rust | |
docker run --rm --network host \ | |
-e PERSONS_DATABASE_URL="postgres://posthog:posthog@localhost:5432/posthog_persons" \ | |
-e CYCLOTRON_DATABASE_URL="postgres://posthog:posthog@localhost:5432/cyclotron" \ | |
posthog-migrations:latest | |
# Build and run Rust migrations using dedicated container | |
docker build -f rust/Dockerfile.persons-migrate -t posthog-migrations:latest ./rust | |
docker run --rm --network host \ | |
-e PERSONS_DATABASE_URL="postgres://posthog:posthog@localhost:5432/posthog_e2e_test" \ | |
-e CYCLOTRON_DATABASE_URL="postgres://posthog:posthog@localhost:5432/cyclotron" \ | |
posthog-migrations:latest | |
python manage.py setup_dev | |
wait | |
- name: Source celery queues | |
run: | | |
source ./bin/celery-queues.env | |
echo "CELERY_WORKER_QUEUES=$CELERY_WORKER_QUEUES" >> $GITHUB_ENV | |
- name: Start PostHog web & Celery worker | |
run: | | |
python manage.py run_autoreload_celery --type=worker &> /tmp/celery.log & | |
python manage.py runserver 8000 &> /tmp/server.log & | |
# Install Playwright browsers while we wait for PostHog to be ready | |
- name: Install Playwright browsers | |
run: pnpm --filter=@posthog/playwright exec playwright install chromium --with-deps | |
- name: Wait for PostHog to be ready | |
uses: iFaxity/wait-on-action@1fe019e0475491e9e8c4f421b6914ccc3ed8f99c # v1.2.1 | |
with: | |
resource: http://localhost:8000 | |
timeout: 180000 | |
interval: 2000 | |
verbose: true | |
- name: Run Playwright tests | |
continue-on-error: true | |
id: playwright-tests | |
run: pnpm --filter=@posthog/playwright exec playwright test | |
- name: Configure global git diff log | |
run: git config --global --add safe.directory '*' | |
- name: Count and optimize updated screenshots | |
id: diff | |
# Skip on forks | |
if: needs.changes.outputs.shouldRun == 'true' && github.event.pull_request.head.repo.full_name == github.repository | |
run: | | |
echo $(git diff --name-status playwright/__snapshots__) # Wrapped to ignore exit 1 on empty diff | |
ADDED=$(git diff --name-status playwright/__snapshots__/ | grep '^A' | wc -l) | |
MODIFIED=$(git diff --name-status playwright/__snapshots__/ | grep '^M' | wc -l) | |
DELETED=$(git diff --name-status playwright/__snapshots__/ | grep '^D' | wc -l) | |
TOTAL=$(git diff --name-status playwright/__snapshots__/ | wc -l) | |
# If added or modified, run OptiPNG | |
if [ $ADDED -gt 0 ] || [ $MODIFIED -gt 0 ]; then | |
echo "Screenshots updated ($ADDED new, $MODIFIED changed), running OptiPNG" | |
apt update && apt install -y optipng | |
# TODO: RESTORE OptiPNG - it turns out it never worked before | |
# optipng -clobber -o4 -strip all playwright/__snapshots__/**/*.png | |
# we don't want to _always_ run OptiPNG | |
# so, we run it after checking for a diff | |
# but, the files we diffed might then be changed by OptiPNG | |
# and as a result they might no longer be different... | |
# we check again | |
git diff --name-status playwright/__snapshots__/ # For debugging | |
ADDED=$(git diff --name-status playwright/__snapshots__/ | grep '^A' | wc -l) | |
MODIFIED=$(git diff --name-status playwright/__snapshots__/ | grep '^M' | wc -l) | |
DELETED=$(git diff --name-status playwright/__snapshots__/ | grep '^D' | wc -l) | |
TOTAL=$(git diff --name-status playwright/__snapshots__/ | wc -l) | |
if [ $ADDED -gt 0 ] || [ $MODIFIED -gt 0 ]; then | |
echo "Screenshots updated ($ADDED new, $MODIFIED changed), _even after_ running OptiPNG" | |
git add playwright/__snapshots__/ playwright/ | |
fi | |
fi | |
echo "Screenshot changes:" | |
echo "Added: $ADDED" | |
echo "Modified: $MODIFIED" | |
echo "Deleted: $DELETED" | |
echo "Total: $TOTAL" | |
- name: Detect flapping screenshots | |
id: flap-detector | |
if: needs.changes.outputs.shouldRun == 'true' && github.event.pull_request.head.repo.full_name == github.repository | |
shell: bash | |
run: | | |
set +e # Don't exit on error, we'll handle errors explicitly | |
PATHS_FLAPPING=() | |
# Find last human commit (not github-actions bot) | |
last_human_commit=$(git log --pretty=format:"%H %an" | grep -v "github-actions" | head -1 | cut -d' ' -f1 || true) | |
echo "Last human commit identified: $last_human_commit" | |
if [ -z "$last_human_commit" ]; then | |
echo "No human commit found in history, cannot detect flapping" | |
echo "This is expected for bot-only commits or shallow checkouts" | |
echo "flapping=false" >> $GITHUB_OUTPUT | |
echo "Paths flapping: none" | |
exit 0 | |
fi | |
for screenshot_path in $(git diff --name-only HEAD playwright/__snapshots__); do | |
echo "Checking screenshot path: $screenshot_path" | |
bot_changes=$(git log --oneline ${last_human_commit}..HEAD --author="github-actions" -- $screenshot_path | wc -l | xargs) | |
echo "Number of bot changes to $screenshot_path since last human commit: $bot_changes" | |
if [ "$bot_changes" -ge 2 ]; then | |
PATHS_FLAPPING+=($screenshot_path) | |
fi | |
done | |
if [ ${#PATHS_FLAPPING[@]} -gt 0 ]; then | |
echo "flapping=true" >> $GITHUB_OUTPUT | |
echo "paths=${PATHS_FLAPPING[*]}" >> $GITHUB_OUTPUT | |
else | |
echo "flapping=false" >> $GITHUB_OUTPUT | |
fi | |
echo "Paths flapping: ${PATHS_FLAPPING[*]:-none}" | |
- name: Comment on PR about flapping screenshots | |
if: steps.flap-detector.outputs.flapping == 'true' | |
uses: actions/github-script@v7 | |
with: | |
script: | | |
const flappingPaths = '${{ steps.flap-detector.outputs.paths }}'.split(' '); | |
const pathList = flappingPaths.map(p => `- \`${p}\``).join('\n'); | |
const comment = `## ⚠️ Flapping Screenshots Detected | |
The following screenshots have been updated multiple times by the bot, indicating test instability: | |
${pathList} | |
**What this means:** | |
- These screenshots are changing on each test run (timing issues, rendering differences, etc.) | |
- This prevents reliable verification and blocks auto-merge | |
- Human intervention is required | |
**How to fix:** | |
1. **Investigate flakiness**: Check test timing, wait for elements, stabilize animations | |
2. **Fix the underlying issue**: Don't just update snapshots repeatedly | |
3. **Verify locally**: Run tests multiple times to ensure stability | |
4. **Push your fix**: This will reset the flap counter | |
**If you need to proceed anyway:** | |
- Fix the test flakiness first (recommended) | |
- Or manually verify snapshots are acceptable and revert the bot commits, then push a fix | |
The workflow has been failed to prevent merging unstable tests.`; | |
github.rest.issues.createComment({ | |
issue_number: context.issue.number, | |
owner: context.repo.owner, | |
repo: context.repo.repo, | |
body: comment | |
}); | |
- name: Fail workflow if screenshots are flapping | |
if: steps.flap-detector.outputs.flapping == 'true' | |
run: | | |
echo "::error::Screenshots are flapping - this indicates test instability" | |
echo "Flapping paths: ${{ steps.flap-detector.outputs.paths }}" | |
exit 1 | |
- name: Commit updated screenshots | |
uses: EndBug/add-and-commit@a94899bca583c204427a224a7af87c02f9b325d5 # v9 | |
if: needs.changes.outputs.shouldRun == 'true' && github.event.pull_request.head.repo.full_name == github.repository && steps.flap-detector.outputs.flapping != 'true' | |
id: commit | |
with: | |
add: 'playwright/' | |
message: 'Update E2E screenshots for Playwright' | |
pull: --rebase --autostash # Make sure we're up to date with other commits | |
default_author: github_actions | |
github_token: ${{ secrets.POSTHOG_BOT_PAT || github.token }} | |
# ── Artifacts on failure / always ───────────────────────────────────── | |
- name: Show celery logs | |
run: cat /tmp/celery.log | |
- name: Show server logs | |
run: cat /tmp/server.log | |
- name: Show server logs | |
run: | | |
docker ps | |
docker logs posthog-proxy-1 | |
- name: Archive Playwright report on failure | |
if: failure() | |
uses: actions/upload-artifact@v4 | |
with: | |
name: playwright-report | |
path: playwright/playwright-report/ | |
retention-days: 30 | |
- name: Archive screenshots | |
uses: actions/upload-artifact@v4 | |
with: | |
name: playwright-screenshots | |
path: playwright/test-results | |
retention-days: 30 | |
- name: Fail workflow if tests failed | |
if: needs.changes.outputs.shouldRun == 'true' && steps.playwright-tests.outcome == 'failure' | |
run: | | |
echo "❌ Playwright tests failed, but screenshots were processed and committed." | |
echo "Check the test logs above for details on which tests failed." | |
exit 1 | |
capture-run-time: | |
name: Capture run time | |
runs-on: ubuntu-latest | |
needs: [changes, playwright] | |
if: needs.changes.outputs.shouldRun == 'true' | |
steps: | |
- name: Calculate run time and send to PostHog | |
run: | | |
gh auth login --with-token < <(echo ${{ secrets.GITHUB_TOKEN }}) | |
run_id=${GITHUB_RUN_ID} | |
repo=${GITHUB_REPOSITORY} | |
run_info=$(gh api repos/${repo}/actions/runs/${run_id}) | |
echo run_info: ${run_info} | |
# name is the name of the workflow file | |
# run_started_at is the start time of the workflow | |
# we want to get the number of seconds between the start time and now | |
name=$(echo ${run_info} | jq -r '.name') | |
run_url=$(echo ${run_info} | jq -r '.url') | |
run_started_at=$(echo ${run_info} | jq -r '.run_started_at') | |
run_attempt=$(echo ${run_info} | jq -r '.run_attempt') | |
start_seconds=$(date -d "${run_started_at}" +%s) | |
now_seconds=$(date +%s) | |
duration=$((now_seconds-start_seconds)) | |
echo running_time_duration_seconds=${duration} >> $GITHUB_ENV | |
echo running_time_run_url=${run_url} >> $GITHUB_ENV | |
echo running_time_run_attempt=${run_attempt} >> $GITHUB_ENV | |
echo running_time_run_id=${run_id} >> $GITHUB_ENV | |
echo running_time_run_started_at=${run_started_at} >> $GITHUB_ENV | |
- name: Capture running time to PostHog | |
if: github.repository == 'PostHog/posthog' | |
uses: PostHog/[email protected] | |
with: | |
posthog-token: ${{secrets.POSTHOG_API_TOKEN}} | |
event: 'posthog-ci-running-time' | |
properties: '{"runner": "depot", "duration_seconds": ${{ env.running_time_duration_seconds }}, "run_url": "${{ env.running_time_run_url }}", "run_attempt": "${{ env.running_time_run_attempt }}", "run_id": "${{ env.running_time_run_id }}", "run_started_at": "${{ env.running_time_run_started_at }}"}' |