fix(projects): Clicking on New...
should not fold/unfold the folder
#177774
Workflow file for this run
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# This workflow runs all of our backend django tests. | |
# | |
# If these tests get too slow, look at increasing concurrency and re-timing the tests by manually dispatching | |
# .github/workflows/ci-backend-update-test-timing.yml action | |
name: Backend CI | |
on: | |
push: | |
branches: | |
- master | |
workflow_dispatch: | |
inputs: | |
clickhouseServerVersion: | |
description: ClickHouse server version. Leave blank for default | |
type: string | |
pull_request: | |
concurrency: | |
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} | |
cancel-in-progress: true | |
env: | |
SECRET_KEY: '6b01eee4f945ca25045b5aab440b953461faf08693a9abbf1166dc7c6b9772da' # unsafe - for testing only | |
DATABASE_URL: 'postgres://posthog:posthog@localhost:5432/posthog' | |
REDIS_URL: 'redis://localhost' | |
CLICKHOUSE_HOST: 'localhost' | |
CLICKHOUSE_SECURE: 'False' | |
CLICKHOUSE_VERIFY: 'False' | |
TEST: 1 | |
CLICKHOUSE_SERVER_IMAGE_VERSION: ${{ github.event.inputs.clickhouseServerVersion || '' }} | |
OBJECT_STORAGE_ENABLED: 'True' | |
OBJECT_STORAGE_ENDPOINT: 'http://localhost:19000' | |
OBJECT_STORAGE_ACCESS_KEY_ID: 'object_storage_root_user' | |
OBJECT_STORAGE_SECRET_ACCESS_KEY: 'object_storage_root_password' | |
# tests would intermittently fail in GH actions | |
# with exit code 134 _after passing_ all tests | |
# this appears to fix it | |
# absolute wild tbh https://stackoverflow.com/a/75503402 | |
DISPLAY: ':99.0' | |
OIDC_RSA_PRIVATE_KEY: 'test' | |
jobs: | |
# Job to decide if we should run backend ci | |
# See https://github.com/dorny/paths-filter#conditional-execution for more details | |
changes: | |
runs-on: ubuntu-latest | |
timeout-minutes: 5 | |
name: Determine need to run backend and migration checks | |
# Set job outputs to values from filter step | |
outputs: | |
backend: ${{ steps.filter.outputs.backend }} | |
backend_files: ${{ steps.filter.outputs.backend_files }} | |
migrations: ${{ steps.filter.outputs.migrations }} | |
migrations_files: ${{ steps.filter.outputs.migrations_files }} | |
tasks_temporal: ${{ steps.filter.outputs.tasks_temporal }} | |
steps: | |
# For pull requests it's not necessary to checkout the code, but we | |
# also want this to run on master so we need to checkout | |
- uses: actions/checkout@v4 | |
- uses: dorny/paths-filter@4512585405083f25c027a35db413c2b3b9006d50 # v2 | |
id: filter | |
with: | |
list-files: 'escape' | |
filters: | | |
backend: | |
# Avoid running backend tests for irrelevant changes | |
# NOTE: we are at risk of missing a dependency here. We could make | |
# the dependencies more clear if we separated the backend/frontend | |
# code completely | |
# really we should ignore ee/frontend/** but dorny doesn't support that | |
# - '!ee/frontend/**' | |
# including the negated rule appears to work | |
# but makes it always match because the checked file always isn't `ee/frontend/**` 🙈 | |
- 'ee/**/*' | |
- 'common/hogvm/**/*' | |
- 'posthog/**/*' | |
- 'products/**/backend/**/*' | |
- 'bin/*.py' | |
- pyproject.toml | |
- uv.lock | |
- requirements.txt | |
- requirements-dev.txt | |
- mypy.ini | |
- pytest.ini | |
- frontend/src/queries/schema.json # Used for generating schema.py | |
- common/plugin_transpiler/src # Used for transpiling plugins | |
# Make sure we run if someone is explicitly changing the workflow | |
- .github/workflows/ci-backend.yml | |
# We use docker compose for tests, make sure we rerun on | |
# changes to docker-compose.dev.yml e.g. dependency | |
# version changes | |
- docker-compose.dev.yml | |
- docker-compose.base.yml | |
- frontend/public/email/* | |
# These scripts are used in the CI | |
- bin/check_temporal_up | |
- bin/check_kafka_clickhouse_up | |
migrations: | |
- 'posthog/migrations/*.py' | |
- 'products/*/backend/migrations/*.py' | |
- 'products/*/migrations/*.py' # Legacy structure | |
- 'rust/persons_migrations/*.sql' | |
- 'rust/bin/migrate-persons' | |
tasks_temporal: | |
- 'products/tasks/backend/temporal/**/*' | |
check-migrations: | |
needs: [changes] | |
if: needs.changes.outputs.backend == 'true' | |
timeout-minutes: 10 | |
name: Validate Django and CH migrations | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Stop/Start stack with Docker Compose | |
run: | | |
docker compose -f docker-compose.dev.yml down | |
docker compose -f docker-compose.dev.yml up -d & | |
- name: Set up Python | |
uses: actions/setup-python@v5 | |
with: | |
python-version: 3.12.11 | |
token: ${{ secrets.POSTHOG_BOT_PAT }} | |
- name: Install uv | |
id: setup-uv | |
uses: astral-sh/setup-uv@0c5e2b8115b80b4c7c5ddf6ffdd634974642d182 # v5.4.1 | |
with: | |
enable-cache: true | |
version: 0.8.19 | |
- name: Install SAML (python3-saml) dependencies | |
if: steps.setup-uv.outputs.cache-hit != 'true' | |
run: | | |
sudo apt-get update | |
sudo apt-get install libxml2-dev libxmlsec1-dev libxmlsec1-openssl | |
- name: Install Rust | |
uses: dtolnay/rust-toolchain@6691ebadcb18182cc1391d07c9f295f657c593cd # 1.88 | |
with: | |
toolchain: 1.88.0 | |
components: cargo | |
- name: Install sqlx-cli | |
run: | | |
cargo install sqlx-cli --version 0.8.0 --features postgres --no-default-features --locked | |
# First running migrations from master, to simulate the real-world scenario | |
- name: Checkout master | |
uses: actions/checkout@v4 | |
with: | |
ref: master | |
- name: Install python dependencies for master | |
run: | | |
UV_PROJECT_ENVIRONMENT=.venv-master uv sync --frozen --dev | |
- name: Wait for services to be available | |
run: | | |
bin/check_postgres_up | |
bin/check_kafka_clickhouse_up | |
- name: Run migrations up to master | |
run: | | |
# Run Django migrations first (excluding managed=False models) | |
.venv-master/bin/python manage.py migrate | |
# Then run persons migrations using sqlx; comment out until we've merged | |
# DATABASE_URL="postgres://posthog:posthog@localhost:5432/posthog_persons" \ | |
# sqlx database create | |
# DATABASE_URL="postgres://posthog:posthog@localhost:5432/posthog_persons" \ | |
# sqlx migrate run --source rust/persons_migrations/ | |
# Now we can consider this PR's migrations | |
- name: Checkout this PR | |
uses: actions/checkout@v4 | |
- name: Install python dependencies for this PR | |
run: | | |
UV_PROJECT_ENVIRONMENT=$pythonLocation uv sync --frozen --dev | |
- name: Check migrations and post SQL comment | |
if: github.event_name == 'pull_request' && needs.changes.outputs.migrations == 'true' | |
env: | |
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | |
run: | | |
# Read the changed files from the output | |
CHANGED_FILES="${{ needs.changes.outputs.migrations_files }}" | |
# If no migration files changed, exit | |
if [ -z "$CHANGED_FILES" ]; then | |
echo "No migration files changed" | |
exit 0 | |
fi | |
# Initialize comment body for SQL changes | |
COMMENT_BODY="## Migration SQL Changes\n\nHey 👋, we've detected some migrations on this PR. Here's the SQL output for each migration, make sure they make sense:\n\n" | |
# Process each changed migration file (excluding Rust migrations) | |
for file in $CHANGED_FILES; do | |
# Skip Rust migrations as they're handled separately by sqlx | |
if [[ $file =~ rust/persons_migrations ]]; then | |
continue | |
fi | |
if [[ $file =~ migrations/([0-9]+)_ ]]; then | |
migration_number="${BASH_REMATCH[1]}" | |
# Get app name by looking at the directory structure | |
# For new structure products/user_interviews/backend/migrations, we want user_interviews | |
# For old structure products/user_interviews/migrations, we want user_interviews | |
if [[ $file =~ products/([^/]+)/backend/migrations/ ]]; then | |
app_name="${BASH_REMATCH[1]}" | |
else | |
app_name=$(echo $file | sed -E 's|^([^/]+/)*([^/]+)/migrations/.*|\2|') | |
fi | |
echo "Checking migration $migration_number for app $app_name" | |
# Get SQL output | |
SQL_OUTPUT=$(python manage.py sqlmigrate $app_name $migration_number) | |
# Add to comment body | |
COMMENT_BODY+="#### [\`$file\`](https:\/\/github.com\/${{ github.repository }}\/blob\/${{ github.sha }}\/$file)\n\`\`\`sql\n$SQL_OUTPUT\n\`\`\`\n\n" | |
fi | |
done | |
# Delete previous comments from this workflow | |
COMMENTS=$(curl -s -H "Authorization: token $GITHUB_TOKEN" \ | |
-H "Accept: application/vnd.github.v3+json" \ | |
"https://api.github.com/repos/${{ github.repository }}/issues/${{ github.event.pull_request.number }}/comments") | |
echo "Output from listing comments: $COMMENTS" | |
echo "$COMMENTS" | jq -r '.[] | select(.body | startswith("## Migration SQL Changes")) | .id' | while read -r comment_id; do | |
echo "Deleting comment $comment_id" | |
curl -X DELETE \ | |
-H "Authorization: token $GITHUB_TOKEN" \ | |
-H "Accept: application/vnd.github.v3+json" \ | |
"https://api.github.com/repos/${{ github.repository }}/issues/comments/$comment_id" | |
done | |
# Convert \n into actual newlines | |
COMMENT_BODY=$(printf '%b' "$COMMENT_BODY") | |
COMMENT_BODY_JSON=$(jq -n --arg body "$COMMENT_BODY" '{body: $body}') | |
# Post SQL comment to PR | |
echo "Posting SQL comment to PR" | |
echo "$COMMENT_BODY_JSON" | |
curl -X POST \ | |
-H "Authorization: token $GITHUB_TOKEN" \ | |
-H "Accept: application/vnd.github.v3+json" \ | |
"https://api.github.com/repos/${{ github.repository }}/issues/${{ github.event.pull_request.number }}/comments" \ | |
-d "$COMMENT_BODY_JSON" | |
- name: Run migration risk analysis and post comment | |
if: github.event_name == 'pull_request' | |
env: | |
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | |
run: | | |
# Get risk analysis for all unapplied migrations (including third-party) | |
set +e # Don't exit immediately on error | |
RISK_ANALYSIS=$(python manage.py analyze_migration_risk --fail-on-blocked 2>/dev/null) | |
EXIT_CODE=$? | |
set -e # Re-enable exit on error | |
if [ -n "$RISK_ANALYSIS" ] && echo "$RISK_ANALYSIS" | grep -q "Summary:"; then | |
# Get existing comments | |
COMMENTS=$(curl -s -H "Authorization: token $GITHUB_TOKEN" \ | |
-H "Accept: application/vnd.github.v3+json" \ | |
"https://api.github.com/repos/${{ github.repository }}/issues/${{ github.event.pull_request.number }}/comments") | |
# Delete previous risk analysis comments | |
echo "$COMMENTS" | jq -r '.[] | select(.body | startswith("## 🔍 Migration Risk Analysis")) | .id' | while read -r comment_id; do | |
echo "Deleting risk analysis comment $comment_id" | |
curl -X DELETE \ | |
-H "Authorization: token $GITHUB_TOKEN" \ | |
-H "Accept: application/vnd.github.v3+json" \ | |
"https://api.github.com/repos/${{ github.repository }}/issues/comments/$comment_id" | |
done | |
# Post risk analysis comment (output already contains markdown formatting with legend) | |
RISK_COMMENT="## 🔍 Migration Risk Analysis\n\nWe've analyzed your migrations for potential risks.\n\n$RISK_ANALYSIS" | |
RISK_COMMENT=$(printf '%b' "$RISK_COMMENT") | |
RISK_COMMENT_JSON=$(jq -n --arg body "$RISK_COMMENT" '{body: $body}') | |
echo "Posting risk analysis comment to PR" | |
curl -X POST \ | |
-H "Authorization: token $GITHUB_TOKEN" \ | |
-H "Accept: application/vnd.github.v3+json" \ | |
"https://api.github.com/repos/${{ github.repository }}/issues/${{ github.event.pull_request.number }}/comments" \ | |
-d "$RISK_COMMENT_JSON" | |
fi | |
# Fail the job if there were blocked migrations | |
if [ $EXIT_CODE -ne 0 ]; then | |
exit $EXIT_CODE | |
fi | |
- name: Run migrations for this PR | |
run: | | |
# Run Django migrations first (excluding managed=False models) | |
python manage.py migrate | |
# Then run persons migrations using sqlx | |
DATABASE_URL="postgres://posthog:posthog@localhost:5432/posthog_persons" \ | |
sqlx migrate run --source rust/persons_migrations/ | |
- name: Check migrations | |
run: | | |
DATABASE_URL="postgres://posthog:posthog@localhost:5432/posthog_persons" \ | |
sqlx migrate info --source rust/persons_migrations/ | |
python manage.py makemigrations --check --dry-run | |
git fetch origin master | |
# Check migration safety using old SQL-based checker (still uses stdin from git diff) | |
echo "${{ needs.changes.outputs.migrations_files }}" | grep -v migrations/0001_ | grep -v 'rust/persons_migrations' | python manage.py test_migrations_are_safe | |
- name: Check CH migrations | |
run: | | |
# Same as above, except now for CH looking at files that were added in posthog/clickhouse/migrations/ | |
git diff --name-status origin/master..HEAD | grep "A\sposthog/clickhouse/migrations/" | grep -v README | awk '{print $2}' | python manage.py test_ch_migrations_are_safe | |
django: | |
needs: [changes] | |
if: needs.changes.outputs.backend == 'true' | |
# increase for tmate testing | |
timeout-minutes: 30 | |
name: Django tests – ${{ matrix.segment }} (persons-on-events ${{ matrix.person-on-events && 'on' || 'off' }}), Py ${{ matrix.python-version }}, ${{ matrix.clickhouse-server-image }} (${{matrix.group}}/${{ matrix.concurrency }}) | |
runs-on: depot-ubuntu-latest | |
strategy: | |
fail-fast: false | |
matrix: | |
python-version: ['3.12.11'] | |
clickhouse-server-image: ['clickhouse/clickhouse-server:25.6.9.98'] | |
segment: ['Core'] | |
person-on-events: [false] | |
# :NOTE: Keep concurrency and groups in sync | |
concurrency: [40] | |
group: | |
[ | |
1, | |
2, | |
3, | |
4, | |
5, | |
6, | |
7, | |
8, | |
9, | |
10, | |
11, | |
12, | |
13, | |
14, | |
15, | |
16, | |
17, | |
18, | |
19, | |
20, | |
21, | |
22, | |
23, | |
24, | |
25, | |
26, | |
27, | |
28, | |
29, | |
30, | |
31, | |
32, | |
33, | |
34, | |
35, | |
36, | |
37, | |
38, | |
39, | |
40, | |
] | |
include: | |
- segment: 'Core' | |
person-on-events: true | |
clickhouse-server-image: 'clickhouse/clickhouse-server:25.6.9.98' | |
python-version: '3.12.11' | |
concurrency: 10 | |
group: 1 | |
- segment: 'Core' | |
person-on-events: true | |
clickhouse-server-image: 'clickhouse/clickhouse-server:25.6.9.98' | |
python-version: '3.12.11' | |
concurrency: 10 | |
group: 2 | |
- segment: 'Core' | |
person-on-events: true | |
clickhouse-server-image: 'clickhouse/clickhouse-server:25.6.9.98' | |
python-version: '3.12.11' | |
concurrency: 10 | |
group: 3 | |
- segment: 'Core' | |
person-on-events: true | |
clickhouse-server-image: 'clickhouse/clickhouse-server:25.6.9.98' | |
python-version: '3.12.11' | |
concurrency: 10 | |
group: 4 | |
- segment: 'Core' | |
person-on-events: true | |
clickhouse-server-image: 'clickhouse/clickhouse-server:25.6.9.98' | |
python-version: '3.12.11' | |
concurrency: 10 | |
group: 5 | |
- segment: 'Core' | |
person-on-events: true | |
clickhouse-server-image: 'clickhouse/clickhouse-server:25.6.9.98' | |
python-version: '3.12.11' | |
concurrency: 10 | |
group: 6 | |
- segment: 'Core' | |
person-on-events: true | |
clickhouse-server-image: 'clickhouse/clickhouse-server:25.6.9.98' | |
python-version: '3.12.11' | |
concurrency: 10 | |
group: 7 | |
- segment: 'Core' | |
person-on-events: true | |
clickhouse-server-image: 'clickhouse/clickhouse-server:25.6.9.98' | |
python-version: '3.12.11' | |
concurrency: 10 | |
group: 8 | |
- segment: 'Core' | |
person-on-events: true | |
clickhouse-server-image: 'clickhouse/clickhouse-server:25.6.9.98' | |
python-version: '3.12.11' | |
concurrency: 10 | |
group: 9 | |
- segment: 'Core' | |
person-on-events: true | |
clickhouse-server-image: 'clickhouse/clickhouse-server:25.6.9.98' | |
python-version: '3.12.11' | |
concurrency: 10 | |
group: 10 | |
- segment: 'Temporal' | |
person-on-events: false | |
clickhouse-server-image: 'clickhouse/clickhouse-server:25.6.9.98' | |
python-version: '3.12.11' | |
concurrency: 10 | |
group: 1 | |
- segment: 'Temporal' | |
person-on-events: false | |
clickhouse-server-image: 'clickhouse/clickhouse-server:25.6.9.98' | |
python-version: '3.12.11' | |
concurrency: 10 | |
group: 2 | |
- segment: 'Temporal' | |
person-on-events: false | |
clickhouse-server-image: 'clickhouse/clickhouse-server:25.6.9.98' | |
python-version: '3.12.11' | |
concurrency: 10 | |
group: 3 | |
- segment: 'Temporal' | |
person-on-events: false | |
clickhouse-server-image: 'clickhouse/clickhouse-server:25.6.9.98' | |
python-version: '3.12.11' | |
concurrency: 10 | |
group: 4 | |
- segment: 'Temporal' | |
person-on-events: false | |
clickhouse-server-image: 'clickhouse/clickhouse-server:25.6.9.98' | |
python-version: '3.12.11' | |
concurrency: 10 | |
group: 5 | |
- segment: 'Temporal' | |
person-on-events: false | |
clickhouse-server-image: 'clickhouse/clickhouse-server:25.6.9.98' | |
python-version: '3.12.11' | |
concurrency: 10 | |
group: 6 | |
- segment: 'Temporal' | |
person-on-events: false | |
clickhouse-server-image: 'clickhouse/clickhouse-server:25.6.9.98' | |
python-version: '3.12.11' | |
concurrency: 10 | |
group: 7 | |
- segment: 'Temporal' | |
person-on-events: false | |
clickhouse-server-image: 'clickhouse/clickhouse-server:25.6.9.98' | |
python-version: '3.12.11' | |
concurrency: 10 | |
group: 8 | |
- segment: 'Temporal' | |
person-on-events: false | |
clickhouse-server-image: 'clickhouse/clickhouse-server:25.6.9.98' | |
python-version: '3.12.11' | |
concurrency: 10 | |
group: 9 | |
- segment: 'Temporal' | |
person-on-events: false | |
clickhouse-server-image: 'clickhouse/clickhouse-server:25.6.9.98' | |
python-version: '3.12.11' | |
concurrency: 10 | |
group: 10 | |
steps: | |
- uses: actions/checkout@v4 | |
with: | |
fetch-depth: 1 | |
repository: ${{ github.event.pull_request.head.repo.full_name }} | |
ref: ${{ github.event.pull_request.head.ref }} | |
# Use PostHog Bot token when not on forks to enable proper snapshot updating | |
token: ${{ github.event.pull_request.head.repo.full_name == github.repository && secrets.POSTHOG_BOT_PAT || github.token }} | |
- name: 'Safeguard: ensure no stray Python modules at product root' | |
run: | | |
echo "Checking that products/* only contain backend/, frontend/, or shared/ as Python code roots..." | |
BAD_FILES=$(find products -maxdepth 2 -type f -name "*.py" ! -path "*/backend/*" ! -name "__init__.py" ! -name "conftest.py" -o -maxdepth 2 -type d -name "migrations" ! -path "*/backend/*") | |
if [ -n "$BAD_FILES" ]; then | |
echo "❌ Found Python code or migrations outside backend/:" | |
echo "$BAD_FILES" | |
echo "Please move these into the appropriate backend/ folder." | |
exit 1 | |
fi | |
echo "✅ No stray Python files or migrations found at product roots." | |
# Pre-tests | |
# Copies the fully versioned UDF xml file for use in CI testing | |
- name: Stop/Start stack with Docker Compose | |
shell: bash | |
run: | | |
export CLICKHOUSE_SERVER_IMAGE=${{ matrix.clickhouse-server-image }} | |
export DOCKER_REGISTRY_PREFIX="us-east1-docker.pkg.dev/posthog-301601/mirror/" | |
cp posthog/user_scripts/latest_user_defined_function.xml docker/clickhouse/user_defined_function.xml | |
# Start docker compose in background | |
( | |
max_attempts=3 | |
attempt=1 | |
delay=5 | |
while [ $attempt -le $max_attempts ]; do | |
echo "Attempt $attempt of $max_attempts to start stack..." | |
if docker compose -f docker-compose.dev.yml down && \ | |
docker compose -f docker-compose.dev.yml up -d; then | |
echo "Stack started successfully" | |
exit 0 | |
fi | |
echo "Failed to start stack on attempt $attempt" | |
if [ $attempt -lt $max_attempts ]; then | |
sleep_time=$((delay * 2 ** (attempt - 1))) | |
echo "Waiting ${sleep_time} seconds before retry..." | |
sleep $sleep_time | |
fi | |
attempt=$((attempt + 1)) | |
done | |
echo "Failed to start stack after $max_attempts attempts" | |
exit 1 | |
) & | |
- name: Add Kafka and ClickHouse to /etc/hosts | |
shell: bash | |
run: echo "127.0.0.1 kafka clickhouse" | sudo tee -a /etc/hosts | |
- name: Set up Python | |
uses: actions/setup-python@v5 | |
with: | |
python-version: ${{ matrix.python-version }} | |
token: ${{ secrets.POSTHOG_BOT_PAT }} | |
- name: Install uv | |
id: setup-uv-tests | |
uses: astral-sh/setup-uv@0c5e2b8115b80b4c7c5ddf6ffdd634974642d182 # v5.4.1 | |
with: | |
enable-cache: true | |
version: 0.8.19 | |
- name: Install SAML (python3-saml) dependencies | |
if: ${{ needs.changes.outputs.backend == 'true' && steps.setup-uv-tests.outputs.cache-hit != 'true' }} | |
shell: bash | |
run: | | |
sudo apt-get update && sudo apt-get install libxml2-dev libxmlsec1-dev libxmlsec1-openssl | |
- name: Determine if hogql-parser has changed compared to master | |
shell: bash | |
id: hogql-parser-diff | |
run: | | |
git fetch --no-tags --prune --depth=1 origin master | |
changed=$(git diff --quiet HEAD origin/master -- common/hogql_parser/ && echo "false" || echo "true") | |
echo "changed=$changed" >> $GITHUB_OUTPUT | |
- name: Install pnpm | |
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4 | |
- name: Set up Node.js | |
uses: actions/setup-node@v4 | |
with: | |
node-version: 22.17.1 | |
cache: pnpm | |
# tests would intermittently fail in GH actions | |
# with exit code 134 _after passing_ all tests | |
# this appears to fix it | |
# absolute wild tbh https://stackoverflow.com/a/75503402 | |
- uses: tlambert03/setup-qt-libs@19e4ef2d781d81f5f067182e228b54ec90d23b76 # v1 | |
- name: Install plugin_transpiler | |
shell: bash | |
run: | | |
pnpm --filter=@posthog/plugin-transpiler... install --frozen-lockfile | |
bin/turbo --filter=@posthog/plugin-transpiler build | |
- name: Install Python dependencies | |
shell: bash | |
run: | | |
UV_PROJECT_ENVIRONMENT=$pythonLocation uv sync --frozen --dev | |
- name: Install the working version of hogql-parser | |
if: ${{ needs.changes.outputs.backend == 'true' && steps.hogql-parser-diff.outputs.changed == 'true' }} | |
shell: bash | |
# This is not cached currently, as it's important to build the current HEAD version of hogql-parser if it has | |
# changed (requirements.txt has the already-published version) | |
run: | | |
sudo apt-get install libboost-all-dev unzip cmake curl uuid pkg-config | |
curl https://www.antlr.org/download/antlr4-cpp-runtime-4.13.1-source.zip --output antlr4-source.zip | |
# Check that the downloaded archive is the expected runtime - a security measure | |
anltr_known_md5sum="c875c148991aacd043f733827644a76f" | |
antlr_found_ms5sum="$(md5sum antlr4-source.zip | cut -d' ' -f1)" | |
if [[ "$anltr_known_md5sum" != "$antlr_found_ms5sum" ]]; then | |
echo "Unexpected MD5 sum of antlr4-source.zip!" | |
echo "Known: $anltr_known_md5sum" | |
echo "Found: $antlr_found_ms5sum" | |
exit 64 | |
fi | |
unzip antlr4-source.zip -d antlr4-source && cd antlr4-source | |
cmake . | |
DESTDIR=out make install | |
sudo cp -r out/usr/local/include/antlr4-runtime /usr/include/ | |
sudo cp out/usr/local/lib/libantlr4-runtime.so* /usr/lib/ | |
sudo ldconfig | |
cd .. | |
pip install ./common/hogql_parser | |
- name: Set up needed files | |
shell: bash | |
run: | | |
mkdir -p frontend/dist | |
touch frontend/dist/index.html | |
touch frontend/dist/layout.html | |
touch frontend/dist/exporter.html | |
./bin/download-mmdb | |
- name: Wait for services to be available | |
shell: bash | |
run: | | |
bin/check_kafka_clickhouse_up | |
bin/check_postgres_up | |
- name: Wait for Temporal | |
if: ${{ needs.changes.outputs.backend == 'true' && matrix.segment == 'Temporal' }} | |
shell: bash | |
run: | | |
bin/check_temporal_up | |
- name: Determine if --snapshot-update should be on | |
# Skip on forks (due to GITHUB_TOKEN being read-only in PRs coming from them) except for persons-on-events | |
# runs, as we want to ignore snapshots diverging there | |
if: ${{ needs.changes.outputs.backend == 'true' && (github.event.pull_request.head.repo.full_name == github.repository || matrix.person-on-events) }} | |
shell: bash | |
run: echo "PYTEST_ARGS=--snapshot-update" >> $GITHUB_ENV # We can only update snapshots within the PostHog org | |
# Tests | |
- name: Run Core tests | |
id: run-core-tests | |
if: ${{ needs.changes.outputs.backend == 'true' && matrix.segment == 'Core' }} | |
env: | |
PERSON_ON_EVENTS_V2_ENABLED: ${{ matrix.person-on-events && 'true' || 'false' }} | |
shell: bash | |
run: | # async_migrations covered in ci-async-migrations.yml | |
set +e | |
pytest ${{ | |
matrix.person-on-events | |
&& './posthog/clickhouse/ ./posthog/queries/ ./posthog/api/test/test_insight* ./posthog/api/test/dashboards/test_dashboard.py' | |
|| 'posthog products' | |
}} ${{ matrix.person-on-events && 'ee/clickhouse/' || 'ee/' }} -m "not async_migrations" \ | |
--ignore=posthog/temporal \ | |
--ignore=products/batch_exports/backend/tests/temporal \ | |
--ignore=common/hogvm/python/test \ | |
${{ matrix.person-on-events && '--ignore=posthog/hogql_queries' || '' }} \ | |
${{ matrix.person-on-events && '--ignore=posthog/hogql' || '' }} \ | |
--splits ${{ matrix.concurrency }} --group ${{ matrix.group }} \ | |
--durations=1000 --durations-min=1.0 --store-durations \ | |
--splitting-algorithm=duration_based_chunks \ | |
$PYTEST_ARGS | |
exit_code=$? | |
set -e | |
if [ $exit_code -eq 5 ]; then | |
echo "No tests collected for this shard, this is expected when splitting tests" | |
exit 0 | |
else | |
exit $exit_code | |
fi | |
# Uncomment this code to create an ssh-able console so you can debug issues with github actions | |
# (Consider changing the timeout in ci-backend.yml to have more time) | |
# - name: Setup tmate session | |
# if: failure() | |
# uses: mxschmitt/action-tmate@v3 | |
- name: Run /decide read replica tests | |
id: run-decide-read-replica-tests | |
if: ${{ needs.changes.outputs.backend == 'true' && matrix.segment == 'Core' && matrix.group == 1 && !matrix.person-on-events }} | |
env: | |
POSTHOG_DB_NAME: posthog | |
READ_REPLICA_OPT_IN: 'decide,PersonalAPIKey, local_evaluation' | |
POSTHOG_POSTGRES_READ_HOST: localhost | |
POSTHOG_DB_PASSWORD: posthog | |
POSTHOG_DB_USER: posthog | |
shell: bash | |
run: | | |
pytest posthog/api/test/test_decide.py::TestDecideUsesReadReplica \ | |
--durations=1000 --durations-min=1.0 \ | |
$PYTEST_ARGS | |
- name: Run Temporal tests | |
id: run-temporal-tests | |
if: ${{ needs.changes.outputs.backend == 'true' && matrix.segment == 'Temporal' }} | |
shell: bash | |
env: | |
AWS_S3_ALLOW_UNSAFE_RENAME: 'true' | |
RUNLOOP_API_KEY: ${{ needs.changes.outputs.tasks_temporal == 'true' && secrets.RUNLOOP_API_KEY || '' }} | |
run: | | |
set +e | |
pytest posthog/temporal products/batch_exports/backend/tests/temporal products/tasks/backend/temporal -m "not async_migrations" \ | |
--splits ${{ matrix.concurrency }} --group ${{ matrix.group }} \ | |
--durations=100 --durations-min=1.0 --store-durations \ | |
--splitting-algorithm=duration_based_chunks \ | |
$PYTEST_ARGS | |
exit_code=$? | |
set -e | |
if [ $exit_code -eq 5 ]; then | |
echo "No tests collected for this shard, this is expected when splitting tests" | |
exit 0 | |
else | |
exit $exit_code | |
fi | |
# Post tests | |
- name: Show docker compose logs on failure | |
if: failure() && (needs.changes.outputs.backend == 'true' && steps.run-core-tests.outcome != 'failure' && steps.run-decide-read-replica-tests.outcome != 'failure' && steps.run-temporal-tests.outcome != 'failure') | |
shell: bash | |
run: docker compose -f docker-compose.dev.yml logs | |
- name: Upload updated timing data as artifacts | |
uses: actions/upload-artifact@v4 | |
if: ${{ needs.changes.outputs.backend == 'true' && !matrix.person-on-events && matrix.clickhouse-server-image == 'clickhouse/clickhouse-server:25.6.9.98' }} | |
with: | |
name: timing_data-${{ matrix.segment }}-${{ matrix.group }} | |
path: .test_durations | |
include-hidden-files: true | |
retention-days: 2 | |
- uses: EndBug/add-and-commit@a94899bca583c204427a224a7af87c02f9b325d5 # v9 | |
# Also skip for persons-on-events runs, as we want to ignore snapshots diverging there | |
if: ${{ github.event.pull_request.head.repo.full_name == 'PostHog/posthog' && needs.changes.outputs.backend == 'true' && !matrix.person-on-events }} | |
with: | |
add: '["ee", "./**/*.ambr", "posthog/queries/", "posthog/migrations", "posthog/tasks", "posthog/hogql/"]' | |
message: 'Update query snapshots' | |
pull: --rebase --autostash # Make sure we're up-to-date with other segments' updates | |
default_author: github_actions | |
github_token: ${{ secrets.POSTHOG_BOT_PAT }} | |
- name: Check if any snapshot changes were left uncomitted | |
id: changed-files | |
if: ${{ github.event.pull_request.head.repo.full_name == 'PostHog/posthog' && needs.changes.outputs.backend == 'true' && !matrix.person-on-events }} | |
run: | | |
if [[ -z $(git status -s | grep -v ".test_durations" | tr -d "\n") ]] | |
then | |
echo 'files_found=false' >> $GITHUB_OUTPUT | |
else | |
echo 'diff=$(git status --porcelain)' >> $GITHUB_OUTPUT | |
echo 'files_found=true' >> $GITHUB_OUTPUT | |
fi | |
- name: Fail CI if some snapshots have been updated but not committed | |
if: steps.changed-files.outputs.files_found == 'true' && steps.add-and-commit.outcome == 'success' | |
run: | | |
echo "${{ steps.changed-files.outputs.diff }}" | |
exit 1 | |
- name: Archive email renders | |
uses: actions/upload-artifact@v4 | |
if: needs.changes.outputs.backend == 'true' && matrix.segment == 'Core' && !matrix.person-on-events | |
with: | |
name: email_renders-${{ github.sha }}-${{ github.run_attempt }}-${{ matrix.segment }}-${{ matrix.person-on-events }}-${{ matrix.group }} | |
path: posthog/tasks/test/__emails__ | |
retention-days: 1 | |
# Job just to collate the status of the matrix jobs for requiring passing status | |
django_tests: | |
needs: [django] | |
name: Django Tests Pass | |
runs-on: ubuntu-latest | |
if: always() | |
steps: | |
- name: Check matrix outcome | |
run: | | |
# The `needs.django.result` will be 'success' only if all jobs in the matrix succeeded. | |
# Otherwise, it will be 'failure'. | |
if [[ "${{ needs.django.result }}" != "success" && "${{ needs.django.result }}" != "skipped" ]]; then | |
echo "One or more jobs in the Django test matrix failed." | |
exit 1 | |
fi | |
echo "All jobs in the Django test matrix passed." | |
async-migrations: | |
name: Async migrations tests - ${{ matrix.clickhouse-server-image }} | |
needs: [changes] | |
strategy: | |
fail-fast: false | |
matrix: | |
clickhouse-server-image: ['clickhouse/clickhouse-server:25.6.9.98'] | |
if: needs.changes.outputs.backend == 'true' | |
runs-on: ubuntu-latest | |
steps: | |
- name: 'Checkout repo' | |
uses: actions/checkout@v4 | |
with: | |
fetch-depth: 1 | |
- name: Start stack with Docker Compose | |
run: | | |
export CLICKHOUSE_SERVER_IMAGE_VERSION=${{ matrix.clickhouse-server-image }} | |
docker compose -f docker-compose.dev.yml down | |
docker compose -f docker-compose.dev.yml up -d & | |
- name: Set up Python | |
uses: actions/setup-python@v5 | |
with: | |
python-version-file: 'pyproject.toml' | |
- name: Install uv | |
id: setup-uv-async | |
uses: astral-sh/setup-uv@0c5e2b8115b80b4c7c5ddf6ffdd634974642d182 # v5.4.1 | |
with: | |
enable-cache: true | |
version: 0.8.19 | |
- name: Install SAML (python3-saml) dependencies | |
if: steps.setup-uv-async.outputs.cache-hit != 'true' | |
run: | | |
sudo apt-get update | |
sudo apt-get install libxml2-dev libxmlsec1-dev libxmlsec1-openssl | |
- name: Install python dependencies | |
shell: bash | |
run: | | |
UV_PROJECT_ENVIRONMENT=$pythonLocation uv sync --frozen --dev | |
- name: Add Kafka and ClickHouse to /etc/hosts | |
run: sudo echo "127.0.0.1 kafka clickhouse" | sudo tee -a /etc/hosts | |
- name: Set up needed files | |
run: | | |
mkdir -p frontend/dist | |
touch frontend/dist/index.html | |
touch frontend/dist/layout.html | |
touch frontend/dist/exporter.html | |
- name: Wait for services to be available | |
shell: bash | |
run: | | |
bin/check_kafka_clickhouse_up | |
bin/check_postgres_up | |
- name: Run async migrations tests | |
run: | | |
pytest -m "async_migrations" | |
calculate-running-time: | |
name: Calculate running time | |
needs: [django, async-migrations] | |
runs-on: ubuntu-latest | |
if: # Run on pull requests to PostHog/posthog + on PostHog/posthog outside of PRs - but never on forks | |
needs.changes.outputs.backend == 'true' && ( | |
(github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == 'PostHog/posthog') || | |
(github.event_name != 'pull_request' && github.repository == 'PostHog/posthog')) | |
steps: | |
- name: Calculate running time | |
run: | | |
gh auth login --with-token < <(echo ${{ secrets.GITHUB_TOKEN }}) | |
run_id=${GITHUB_RUN_ID} | |
repo=${GITHUB_REPOSITORY} | |
run_info=$(gh api repos/${repo}/actions/runs/${run_id}) | |
echo run_info: ${run_info} | |
# name is the name of the workflow file | |
# run_started_at is the start time of the workflow | |
# we want to get the number of seconds between the start time and now | |
name=$(echo ${run_info} | jq -r '.name') | |
run_url=$(echo ${run_info} | jq -r '.url') | |
run_started_at=$(echo ${run_info} | jq -r '.run_started_at') | |
run_attempt=$(echo ${run_info} | jq -r '.run_attempt') | |
start_seconds=$(date -d "${run_started_at}" +%s) | |
now_seconds=$(date +%s) | |
duration=$((now_seconds-start_seconds)) | |
echo running_time_duration_seconds=${duration} >> $GITHUB_ENV | |
echo running_time_run_url=${run_url} >> $GITHUB_ENV | |
echo running_time_run_attempt=${run_attempt} >> $GITHUB_ENV | |
echo running_time_run_id=${run_id} >> $GITHUB_ENV | |
echo running_time_run_started_at=${run_started_at} >> $GITHUB_ENV | |
- name: Capture running time to PostHog | |
uses: PostHog/[email protected] | |
with: | |
posthog-token: ${{secrets.POSTHOG_API_TOKEN}} | |
event: 'posthog-ci-running-time' | |
properties: '{"runner": "depot", "duration_seconds": ${{ env.running_time_duration_seconds }}, "run_url": "${{ env.running_time_run_url }}", "run_attempt": "${{ env.running_time_run_attempt }}", "run_id": "${{ env.running_time_run_id }}", "run_started_at": "${{ env.running_time_run_started_at }}"}' |