-
Notifications
You must be signed in to change notification settings - Fork 2.7k
740 lines (666 loc) · 38.3 KB
/
ci-e2e-playwright.yml
File metadata and controls
740 lines (666 loc) · 38.3 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
#
# This workflow runs CI E2E tests with Playwright.
#
# It relies on the container image built by 'container-images-ci.yml'.
#
name: E2E CI Playwright
on:
pull_request:
workflow_dispatch:
push:
branches:
- master
permissions:
contents: read
env:
SECRET_KEY: '6b01eee4f945ca25045b5aab440b953461faf08693a9abbf1166dc7c6b9772da' # unsafe - for testing only
REDIS_URL: redis://localhost
DATABASE_URL: postgres://posthog:posthog@localhost:5432/posthog_e2e_test
PERSONS_DB_WRITER_URL: postgres://posthog:posthog@localhost:5432/posthog_persons_e2e_test
KAFKA_HOSTS: kafka:9092
DISABLE_SECURE_SSL_REDIRECT: 1
SECURE_COOKIES: 0
OPT_OUT_CAPTURE: 0
E2E_TESTING: 1
SKIP_SERVICE_VERSION_REQUIREMENTS: 1
EMAIL_HOST: email.test.posthog.net
SITE_URL: http://localhost:8000
NO_RESTART_LOOP: 1
OBJECT_STORAGE_ENABLED: 1
OBJECT_STORAGE_ENDPOINT: http://localhost:19000
OBJECT_STORAGE_ACCESS_KEY_ID: object_storage_root_user
OBJECT_STORAGE_SECRET_ACCESS_KEY: object_storage_root_password
GITHUB_ACTION_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
CELERY_METRICS_PORT: 8999
CLOUD_DEPLOYMENT: E2E
CLICKHOUSE_HOST: 'localhost'
CLICKHOUSE_SECURE: 'False'
CLICKHOUSE_VERIFY: 'False'
CLICKHOUSE_DATABASE: posthog_test
# Database names passed to the plugins Docker service via env var substitution
POSTHOG_DB_NAME: posthog_e2e_test
POSTHOG_PERSONS_DB_NAME: posthog_persons_e2e_test
PGHOST: localhost
PGUSER: posthog
PGPASSWORD: posthog
PGPORT: 5432
# this is a fake key so this workflow can run for external contributors as they do not have access to secrets (that we don't need here)
OIDC_RSA_PRIVATE_KEY: ${{ vars.OIDC_RSA_FAKE_PRIVATE_KEY }}
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
GEMINI_API_KEY: ${{ secrets.GEMINI_API_KEY }}
INKEEP_API_KEY: ${{ secrets.INKEEP_API_KEY }}
AZURE_INFERENCE_CREDENTIAL: ${{ secrets.AZURE_INFERENCE_CREDENTIAL }}
AZURE_INFERENCE_ENDPOINT: ${{ secrets.AZURE_INFERENCE_ENDPOINT }}
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.ref }}
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
jobs:
changes:
runs-on: ubuntu-latest
timeout-minutes: 5
# Only run on PRs from the same repo (not forks) or on master push
if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name == github.repository
name: Determine need to run E2E checks
# Set job outputs to values from filter step
outputs:
shouldRun: ${{ steps.changes.outputs.shouldRun || 'true' }}
oldest_supported: ${{ steps.read-versions.outputs.oldest_supported }}
steps:
# For pull requests it's not necessary to check out the code
- uses: actions/create-github-app-token@1b10c78c7865c340bc4f6099eb2f838309f1e8c3 # v3.1.1
id: app-token
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository
with:
client-id: ${{ secrets.GH_APP_POSTHOG_PATHS_FILTER_APP_ID }}
private-key: ${{ secrets.GH_APP_POSTHOG_PATHS_FILTER_PRIVATE_KEY }}
- uses: dorny/paths-filter@fbd0ab8f3e69293af611ebaee6363fc25e6d187d # v4.0.1
id: changes
if: github.event_name != 'push' # Run all tests on master push
with:
token: ${{ steps.app-token.outputs.token || github.token }}
filters: |
shouldRun:
# Avoid running E2E tests for irrelevant changes
# NOTE: we are at risk of missing a dependency here. We could make
# the dependencies more clear if we separated the backend/frontend
# code completely
- 'ee/**'
- 'posthog/!(temporal/**)/**'
- 'bin/*'
- frontend/**/*
- playwright/snapshots.yml
- requirements.txt
- requirements-dev.txt
- package.json
- pnpm-lock.yaml
# Make sure we run if someone is explicitly changes the workflow
- .github/workflows/ci-e2e-playwright.yml
- .github/actions/build-n-cache-image/action.yml
- .github/clickhouse-versions.json
# We use docker compose for tests, make sure we rerun on
# changes to docker-compose.dev.yml e.g. dependency
# version changes
- docker-compose.dev.yml
- Dockerfile
- playwright/**
- uses: actions/checkout@v6
if: github.event_name == 'push' || steps.changes.outputs.shouldRun == 'true'
with:
sparse-checkout: .github/clickhouse-versions.json
sparse-checkout-cone-mode: false
- name: Read ClickHouse versions from JSON
id: read-versions
if: github.event_name == 'push' || steps.changes.outputs.shouldRun == 'true'
run: |
oldest_supported=$(jq -r '.oldest_supported' .github/clickhouse-versions.json)
if [ -z "$oldest_supported" ] || [ "$oldest_supported" = "null" ]; then
echo "::error::No oldest_supported version found in .github/clickhouse-versions.json"
exit 1
fi
echo "oldest_supported=$oldest_supported" >> $GITHUB_OUTPUT
playwright:
name: Playwright E2E tests
needs: [changes]
if: needs.changes.outputs.shouldRun == 'true'
# WARNING: Runner size is tuned to avoid CPU scheduling contention. Talk to #team-devex before changing.
runs-on: depot-ubuntu-latest-16
timeout-minutes: 45
outputs:
vr_run_id: ${{ steps.vr-create.outputs.run_id }}
steps:
- uses: actions/checkout@v6
with:
ref: ${{ github.event.pull_request.head.sha || github.sha }}
repository: ${{ github.event.pull_request.head.repo.full_name || github.repository }}
clean: false
- name: Clean up data directories with container permissions
run: |
# Use docker to clean up files created by containers
[ -d "data" ] && docker run --rm -v "$(pwd)/data:/data" alpine sh -c "rm -rf /data/seaweedfs /data/minio" || true
continue-on-error: true
- name: Stop/Start stack with Docker Compose
shell: bash
run: |
export CLICKHOUSE_SERVER_IMAGE=${{ needs.changes.outputs.oldest_supported }}
cp posthog/user_scripts/latest_user_defined_function.xml docker/clickhouse/user_defined_function.xml
(
max_attempts=3
attempt=1
delay=5
while [ $attempt -le $max_attempts ]; do
echo "Attempt $attempt of $max_attempts to start stack..."
if docker compose -f docker-compose.dev.yml down && \
docker compose -f docker-compose.dev.yml -f docker-compose.profiles.yml up -d; then
echo "Stack started successfully"
exit 0
fi
echo "Failed to start stack on attempt $attempt"
if [ $attempt -lt $max_attempts ]; then
sleep_time=$((delay * 2 ** (attempt - 1)))
echo "Waiting ${sleep_time} seconds before retry..."
sleep $sleep_time
fi
attempt=$((attempt + 1))
done
echo "Failed to start stack after $max_attempts attempts"
exit 1
) &
- name: Add service hostnames to /etc/hosts
shell: bash
run: echo "127.0.0.1 db redis7 kafka clickhouse clickhouse-coordinator objectstorage seaweedfs temporal" | sudo tee -a /etc/hosts
- name: Set up Python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
with:
python-version: 3.12.12
- name: Install uv
id: setup-uv
uses: astral-sh/setup-uv@eac588ad8def6316056a12d4907a9d4d84ff7a3b # v7.3.0
with:
version: '0.10.2' # pinned: unpinned setup-uv calls GH API on every job, exhausts rate limit
enable-cache: true
cache-dependency-glob: uv.lock
save-cache: ${{ github.ref == 'refs/heads/master' }}
- name: Determine if hogql-parser has changed compared to master
shell: bash
id: hogql-parser-diff
run: |
git fetch --no-tags --prune --depth=1 origin master
changed=$(git diff --quiet HEAD origin/master -- common/hogql_parser/ && echo "false" || echo "true")
echo "changed=$changed" >> $GITHUB_OUTPUT
- name: Install SAML (python3-saml) dependencies
if: steps.setup-uv.outputs.cache-hit != 'true'
shell: bash
run: |
sudo apt-get update && sudo apt-get install libxml2-dev libxmlsec1-dev libxmlsec1-openssl
- name: Install pnpm
uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v5.0.0
- name: Fix node-gyp permissions
run: chmod +x ~/setup-pnpm/node_modules/.pnpm/pnpm@*/node_modules/pnpm/dist/node_modules/node-gyp/gyp/gyp_main.py
- name: Set up Node.js
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
with:
node-version-file: .nvmrc
cache: pnpm
cache-dependency-path: |
pnpm-lock.yaml
.github/workflows/ci-e2e-playwright.yml
# tests would intermittently fail in GH actions
# with exit code 134 _after passing_ all tests
# this appears to fix it
# absolute wild tbh https://stackoverflow.com/a/75503402
- uses: tlambert03/setup-qt-libs@19e4ef2d781d81f5f067182e228b54ec90d23b76 # v1.8
- name: Install plugin_transpiler
shell: bash
run: |
pnpm --filter=@posthog/plugin-transpiler... install --frozen-lockfile
bin/turbo --filter=@posthog/plugin-transpiler build
- name: Install Python dependencies
shell: bash
run: |
UV_PROJECT_ENVIRONMENT=$pythonLocation uv sync --frozen --dev
- name: Install the working version of hogql-parser
if: needs.changes.outputs.shouldRun == 'true' && steps.hogql-parser-diff.outputs.changed == 'true'
shell: bash
# This is not cached currently, as it's important to build the current HEAD version of hogql-parser if it has
# changed (requirements.txt has the already-published version)
run: |
sudo apt-get install unzip cmake curl uuid pkg-config
curl --fail --location https://www.antlr.org/download/antlr4-cpp-runtime-4.13.1-source.zip --output antlr4-source.zip || curl --fail --location https://raw.githubusercontent.com/antlr/website-antlr4/gh-pages/download/antlr4-cpp-runtime-4.13.1-source.zip --output antlr4-source.zip
# Check that the downloaded archive is the expected runtime - a security measure
anltr_known_md5sum="c875c148991aacd043f733827644a76f"
antlr_found_ms5sum="$(md5sum antlr4-source.zip | cut -d' ' -f1)"
if [[ "$anltr_known_md5sum" != "$antlr_found_ms5sum" ]]; then
echo "Unexpected MD5 sum of antlr4-source.zip!"
echo "Known: $anltr_known_md5sum"
echo "Found: $antlr_found_ms5sum"
exit 64
fi
unzip antlr4-source.zip -d antlr4-source && cd antlr4-source
cmake .
DESTDIR=out make install
sudo cp -r out/usr/local/include/antlr4-runtime /usr/include/
sudo cp out/usr/local/lib/libantlr4-runtime.so* /usr/lib/
sudo ldconfig
cd ..
pip install ./common/hogql_parser
- name: Set up needed files
shell: bash
run: |
mkdir -p frontend/dist
touch frontend/dist/index.html
touch frontend/dist/layout.html
touch frontend/dist/exporter.html
./bin/download-mmdb
- name: Install package.json dependencies with pnpm
run: |
pnpm --filter=@posthog/playwright... install --frozen-lockfile
bin/turbo --filter=@posthog/frontend prepare
- name: Start Docker services
env:
COMPOSE_FILE: docker-compose.dev.yml:docker-compose.profiles.yml
COMPOSE_PROFILES: capture,temporal
run: bin/ci-wait-for-docker launch --down
- name: Wait for Docker services
env:
COMPOSE_FILE: docker-compose.dev.yml:docker-compose.profiles.yml
COMPOSE_PROFILES: capture,temporal
run: bin/ci-wait-for-docker wait capture temporal
- name: Build frontend
run: |
pnpm --filter=@posthog/frontend... install --frozen-lockfile
pnpm --filter=@posthog/frontend build:products
pnpm --filter=@posthog/frontend build
- name: Collect static files
run: |
# KLUDGE: to get the image-bitmap-data-url-worker-*.js.map files into the dist folder
# KLUDGE: rrweb thinks they're alongside and the django's collectstatic fails
cp frontend/node_modules/@posthog/rrweb/dist/image-bitmap-data-url-worker-*.js.map frontend/dist/ && python manage.py collectstatic --noinput
- name: Create test database
shell: bash
run: |
createdb posthog_e2e_test || echo "Database already exists"
run_clickhouse_query() {
local query="$1"
for attempt in {1..10}; do
if printf '%s' "$query" | curl --silent --show-error --fail 'http://localhost:8123/' --data-binary @-; then
echo
return 0
fi
echo "ClickHouse query failed on attempt ${attempt}/10, retrying in 3s..."
sleep 3
done
echo "ClickHouse query failed after 10 attempts: $query" >&2
return 1
}
# Drop and recreate clickhouse test database. The HTTP endpoint can briefly
# reset connections while the container is still settling after startup.
run_clickhouse_query 'SELECT 1'
run_clickhouse_query 'DROP DATABASE IF EXISTS posthog_test SYNC'
run_clickhouse_query 'CREATE DATABASE posthog_test'
- name: Cache Rust dependencies
uses: Swatinem/rust-cache@c19371144df3bb44fab255c43d04cbc2ab54d1c4 # v2.9.1
with:
shared-key: 'v2-rust-backend'
workspaces: rust
save-if: ${{ github.ref == 'refs/heads/master' }}
- name: Install sqlx-cli
run: cargo install sqlx-cli --version 0.8.0 --features postgres --no-default-features --locked
- name: Apply postgres and clickhouse migrations and setup dev
run: |
# Postgres migrations must run first — ClickHouse migration 0026
# depends on the posthog_instancesetting table in Postgres
python manage.py migrate --noinput
# Run Rust migrations for persons e2e test database
PERSONS_DATABASE_URL="postgres://posthog:posthog@localhost:5432/posthog_persons_e2e_test"
sqlx database create -D "$PERSONS_DATABASE_URL"
sqlx migrate run -D "$PERSONS_DATABASE_URL" --source rust/persons_migrations/
python manage.py migrate_clickhouse 2>&1
python manage.py setup_dev
- name: Source celery queues
run: |
source ./bin/celery-queues.env
echo "CELERY_WORKER_QUEUES=$CELERY_WORKER_QUEUES" >> $GITHUB_ENV
- name: Resolve Node.js container image tag
id: node-image
run: |
# ci-nodejs-container.yml tags images as pr-<number> for PRs
if [ -n "${{ github.event.pull_request.number }}" ]; then
TAG="pr-${{ github.event.pull_request.number }}"
else
TAG="${{ github.sha }}"
fi
if docker manifest inspect "ghcr.io/posthog/posthog-node:${TAG}" > /dev/null 2>&1; then
echo "posthog-node image found: ${TAG}"
echo "POSTHOG_NODE_TAG=${TAG}" >> "$GITHUB_ENV"
else
echo "posthog-node image not found for ${TAG}, using master"
echo "POSTHOG_NODE_TAG=master" >> "$GITHUB_ENV"
fi
- name: Start PostHog web, Celery worker, Temporal worker & ingestion
run: |
python manage.py run_autoreload_celery --type=worker &> /tmp/celery.log &
python manage.py start_temporal_worker --task-queue analytics-platform-task-queue &> /tmp/temporal-worker.log &
# WARNING: Worker count is tuned to avoid CPU scheduling contention. Talk to #team-devex before changing.
python -m granian --interface asgi posthog.asgi:application --host 0.0.0.0 --port 8000 --log-level debug --workers 2 &> /tmp/server.log &
# Start the Node.js containers now that the database exists and migrations have run.
# plugins: CDP (no mode = default capabilities)
# ingestion-general: event ingestion (ingestion-v2-combined mode)
# ingestion-sessionreplay: session replay ingestion (recordings-blob-ingestion-v2 mode)
# recording-api: session replay API (recording-api mode)
# ingestion-error-tracking: error tracking ingestion (ingestion-errortracking mode)
# ingestion-logs: logs ingestion (ingestion-logs mode)
# ingestion-traces: traces ingestion (ingestion-traces mode)
COMPOSE_FILE=docker-compose.dev.yml COMPOSE_PROFILES=capture,ingestion bin/ci-wait-for-docker launch plugins ingestion-general ingestion-sessionreplay recording-api ingestion-error-tracking ingestion-logs ingestion-traces
# Install Playwright browsers while we wait for PostHog to be ready
- name: Install Playwright browsers
run: pnpm --filter=@posthog/playwright exec playwright install chromium --with-deps
- name: Wait for PostHog to be ready
uses: iFaxity/wait-on-action@1fe019e0475491e9e8c4f421b6914ccc3ed8f99c # v1.2.1
with:
resource: http://localhost:8000
timeout: 180000
interval: 2000
verbose: true
- name: Wait for node services to be ready
env:
COMPOSE_FILE: docker-compose.dev.yml
COMPOSE_PROFILES: capture,ingestion
run: bin/ci-wait-for-docker wait plugins ingestion-general ingestion-sessionreplay recording-api ingestion-error-tracking ingestion-logs ingestion-traces
- name: Clean snapshot directory
run: find playwright/__snapshots__ -name '*.png' -delete 2>/dev/null || true
- name: Run Playwright tests
id: playwright-tests
shell: bash
# WARNING: Worker count is tuned to avoid CPU scheduling contention. Talk to #team-devex before changing.
# Reduced from 6+4 to 4+2 to minimize CPU scheduling contention (see PR #46853)
# Capture-only: VR is the gate for visual changes, Playwright just captures screenshots
run: |
pnpm --filter=@posthog/playwright exec playwright test --workers=4 --max-failures=5 --update-snapshots
- name: Verify changed Playwright tests are stable
if: success() && github.event_name == 'pull_request'
shell: bash
run: |
BASE_SHA="${{ github.event.pull_request.base.sha }}"
git fetch --no-tags --prune --depth=50 origin "$BASE_SHA"
.github/scripts/verify-playwright-new-tests-and-snapshots.sh "$BASE_SHA" 10
# Visual Review: create run + upload snapshots directly from the test job.
# Completion happens in handle-screenshots so the baseline lands in the same commit as PNGs.
- name: Install VR CLI
if: always() && (github.event.pull_request.head.repo.full_name == github.repository || github.event_name == 'push')
run: cd products/visual_review/cli && npm ci && npm run build && npm link
- name: Create VR run
id: vr-create
if: always() && (github.event.pull_request.head.repo.full_name == github.repository || github.event_name == 'push')
env:
VR_TOKEN: ${{ secrets.VR_API_TOKEN }}
VR_BRANCH: ${{ github.event.pull_request.head.ref || github.ref_name }}
VR_COMMIT: ${{ github.event.pull_request.head.sha || github.sha }}
VR_PR: ${{ github.event.pull_request.number }}
run: |
RUN_ID=$(vr run create \
--type playwright \
--baseline playwright/snapshots.yml \
--branch "$VR_BRANCH" \
--commit "$VR_COMMIT" \
--pr "$VR_PR" \
--token "$VR_TOKEN")
echo "run_id=$RUN_ID" >> $GITHUB_OUTPUT
- name: Upload snapshots to Visual Review
if: always() && steps.vr-create.outputs.run_id != ''
env:
VR_TOKEN: ${{ secrets.VR_API_TOKEN }}
VR_RUN_ID: ${{ steps.vr-create.outputs.run_id }}
run: |
vr run upload \
--run-id "$VR_RUN_ID" \
--dir playwright/__snapshots__/ \
--baseline playwright/snapshots.yml \
--token "$VR_TOKEN"
# ── Artifacts on failure / always ─────────────────────────────────────
- name: Capture docker logs
if: always()
run: |
mkdir -p playwright/test-results
docker logs posthog-proxy-1 > playwright/test-results/docker-proxy.log 2>&1 || echo "No proxy container" > playwright/test-results/docker-proxy.log
docker logs posthog-capture-1 > playwright/test-results/docker-capture.log 2>&1 || echo "No capture container" > playwright/test-results/docker-capture.log
docker logs posthog-plugins-1 > playwright/test-results/docker-plugins.log 2>&1 || echo "No plugins container" > playwright/test-results/docker-plugins.log
docker logs posthog-ingestion-general-1 > playwright/test-results/docker-ingestion-general.log 2>&1 || echo "No ingestion-general container" > playwright/test-results/docker-ingestion-general.log
docker logs posthog-ingestion-sessionreplay-1 > playwright/test-results/docker-ingestion-sessionreplay.log 2>&1 || echo "No ingestion-sessionreplay container" > playwright/test-results/docker-ingestion-sessionreplay.log
docker logs posthog-recording-api-1 > playwright/test-results/docker-recording-api.log 2>&1 || echo "No recording-api container" > playwright/test-results/docker-recording-api.log
docker logs posthog-ingestion-error-tracking-1 > playwright/test-results/docker-ingestion-error-tracking.log 2>&1 || echo "No ingestion-error-tracking container" > playwright/test-results/docker-ingestion-error-tracking.log
docker logs posthog-ingestion-logs-1 > playwright/test-results/docker-ingestion-logs.log 2>&1 || echo "No ingestion-logs container" > playwright/test-results/docker-ingestion-logs.log
docker logs posthog-ingestion-traces-1 > playwright/test-results/docker-ingestion-traces.log 2>&1 || echo "No ingestion-traces container" > playwright/test-results/docker-ingestion-traces.log
- name: Archive test artifacts
if: always()
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: playwright-test-results
path: |
playwright/playwright-report/
playwright/playwright-report-attempt-*/
playwright/test-results/
playwright/test-results-attempt-*/
/tmp/celery.log
/tmp/server.log
/tmp/temporal-worker.log
/tmp/playwright-output-attempt-*.log
retention-days: 30
if-no-files-found: ignore
- name: Upload test results
if: always()
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: junit-results-playwright
path: playwright/junit-results.xml
if-no-files-found: ignore
- name: Publish report to Cloudflare Pages
if: always() && (github.event_name == 'push' || github.event.pull_request.head.repo.full_name == github.repository)
id: cf-deploy
continue-on-error: true
env:
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
run: |
BRANCH="${{ github.event_name == 'pull_request' && format('pr-{0}', github.event.number) || 'master' }}"
npx --yes wrangler@3 pages deploy playwright/playwright-report \
--project-name=playwright-report \
--branch="$BRANCH" \
--commit-dirty=true 2>&1 | tee /tmp/wrangler-output.txt
URL=$(grep -oP 'https://\S+\.pages\.dev' /tmp/wrangler-output.txt | tail -1 || true)
if [ -n "$URL" ]; then
echo "deployment-url=$URL" >> $GITHUB_OUTPUT
fi
- name: Write report URL to job summary
if: always() && steps.cf-deploy.outputs.deployment-url != ''
env:
DEPLOYMENT_URL: ${{ steps.cf-deploy.outputs.deployment-url }}
run: |
echo "## 🎭 Playwright report" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "**Report URL:** $DEPLOYMENT_URL" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "- Commit: \`${{ github.sha }}\`" >> $GITHUB_STEP_SUMMARY
echo "- Run: [#${{ github.run_number }}](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }})" >> $GITHUB_STEP_SUMMARY
- name: Upsert report URL comment on PR
if: always() && github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository
continue-on-error: true
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
DEPLOYMENT_URL: ${{ steps.cf-deploy.outputs.deployment-url }}
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
const fs = require('fs');
const marker = '<!-- playwright-report-comment -->';
const reportUrl = process.env.DEPLOYMENT_URL;
const jobPassed = '${{ job.status }}' === 'success';
// Collect failed and flaky tests from JSON report — only populated when
// results.json exists (i.e. tests actually ran). If the file is missing
// (setup failure, cancelled, etc.) extraLines stays empty and we treat
// it the same as "no failures found", so we don't delete an existing
// comment that may have relevant info from a previous run.
let extraLines = '';
let resultsRead = false;
let failed = [];
try {
const results = JSON.parse(fs.readFileSync('playwright/results.json', 'utf8'));
resultsRead = true;
const flaky = [];
function collect(suites) {
for (const suite of suites || []) {
for (const spec of suite.specs || []) {
for (const test of spec.tests || []) {
if (test.status === 'unexpected') {
failed.push(`- ${spec.title} (${test.projectName})`);
} else if (test.status === 'flaky') {
flaky.push(`- ${spec.title} (${test.projectName})`);
}
}
}
collect(suite.suites);
}
}
collect(results.suites);
if (failed.length > 0) {
extraLines += `\n\n❌ **${failed.length} failed test${failed.length > 1 ? 's' : ''}:**\n${failed.join('\n')}`;
}
if (flaky.length > 0) {
extraLines += `\n\n⚠️ **${flaky.length} flaky test${flaky.length > 1 ? 's' : ''}:**\n${flaky.join('\n')}`;
}
} catch {}
// Flake verification results for changed test files
try {
const flakeResults = JSON.parse(fs.readFileSync('playwright/flake-verification-results.json', 'utf8'));
if (flakeResults.status === 'failed') {
const fileList = flakeResults.files.map(f => `- \`${f}\``).join('\n');
const flakeReportLink = reportUrl ? ` [View report →](${reportUrl})` : '';
extraLines += `\n\n🔁 **Flake verification failed** (--repeat-each=${flakeResults.repeat_count}):\n${fileList}\n\nThe report only shows the tests under verification.${flakeReportLink} Fix these before merging.`;
}
} catch {}
const { data: comments } = await github.rest.issues.listComments({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number,
});
const existing = comments.find(c => c.body.includes(marker));
// No failures or flakies — nothing to comment about
if (!extraLines) {
// Clean up any existing comment, but only when we know tests actually ran and passed
if (resultsRead && existing) {
await github.rest.issues.deleteComment({
owner: context.repo.owner,
repo: context.repo.repo,
comment_id: existing.id,
});
}
return;
}
const reportLink = reportUrl ? ` · [View test results →](${reportUrl})` : '';
const footer = '\n\n\n*These issues are not necessarily caused by your changes.*\n*Annoyed by this comment? Help fix flakies and failures and it\'ll disappear!*';
const body = `${marker}\n🎭 Playwright report${reportLink}${extraLines}${footer}`;
if (existing) {
await github.rest.issues.updateComment({
owner: context.repo.owner,
repo: context.repo.repo,
comment_id: existing.id,
body,
});
} else {
await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number,
body,
});
}
capture-run-time:
name: Capture run time
runs-on: ubuntu-latest
timeout-minutes: 5
needs: [changes, playwright, playwright_tests]
if: # Run on pull requests to PostHog/posthog + on PostHog/posthog outside of PRs - but never on forks or Dependabot (no secrets access)
always() && github.actor != 'dependabot[bot]' &&
needs.changes.outputs.shouldRun == 'true' && (
(github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == 'PostHog/posthog') ||
(github.event_name != 'pull_request' && github.repository == 'PostHog/posthog'))
steps:
- name: Get telemetry app token
id: telemetry-app-token
if: github.run_attempt == '1'
uses: actions/create-github-app-token@1b10c78c7865c340bc4f6099eb2f838309f1e8c3 # v3.1.1
with:
client-id: ${{ secrets.GH_APP_TELEMETRY_APP_ID }}
private-key: ${{ secrets.GH_APP_TELEMETRY_PRIVATE_KEY }}
- name: Capture running time to PostHog
if: github.run_attempt == '1'
uses: PostHog/posthog-github-action@58dea254b598fb5d469c0699c98af8288a7f7650 # v1.2.0
with:
posthog-token: ${{ secrets.POSTHOG_API_TOKEN }}
event: 'posthog-ci-running-time'
capture-run-duration: true
capture-job-durations: true
github-token: ${{ steps.telemetry-app-token.outputs.token }}
status-job: 'Playwright tests pass'
runner: 'depot'
- name: Capture running time to DevEx PostHog
if: github.run_attempt == '1'
continue-on-error: true
uses: PostHog/posthog-github-action@58dea254b598fb5d469c0699c98af8288a7f7650 # v1.2.0
with:
posthog-token: ${{ secrets.POSTHOG_DEVEX_PROJECT_API_TOKEN }}
event: 'posthog-ci-running-time'
capture-run-duration: true
capture-job-durations: true
github-token: ${{ steps.telemetry-app-token.outputs.token }}
status-job: 'Playwright tests pass'
runner: 'depot'
vr-complete:
name: Complete Visual Review run
runs-on: ubuntu-latest
timeout-minutes: 15
needs: [playwright]
if: needs.playwright.outputs.vr_run_id != '' && needs.playwright.result == 'success'
steps:
- uses: actions/checkout@v6
with:
ref: ${{ github.event.pull_request.head.sha || github.sha }}
repository: ${{ github.event.pull_request.head.repo.full_name || github.repository }}
sparse-checkout: |
.nvmrc
products/visual_review/cli
products/visual_review/frontend/generated/api.schemas.ts
playwright/snapshots.yml
sparse-checkout-cone-mode: false
- name: Set up Node.js
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
with:
node-version-file: .nvmrc
- name: Install VR CLI
run: cd products/visual_review/cli && npm ci && npm run build && npm link
- name: Complete Visual Review run
env:
VR_TOKEN: ${{ secrets.VR_API_TOKEN }}
run: |
vr run complete \
--run-id "${{ needs.playwright.outputs.vr_run_id }}" \
--baseline playwright/snapshots.yml \
--token "$VR_TOKEN"
# Collate test + VR completion status for the required check
playwright_tests:
needs: [playwright, vr-complete]
name: Playwright tests pass
runs-on: ubuntu-latest
timeout-minutes: 5
if: always()
steps:
- name: Check outcome
run: |
if [[ "${{ needs.playwright.result }}" != "success" && "${{ needs.playwright.result }}" != "skipped" ]]; then
echo "Playwright tests failed."
exit 1
fi
if [[ "${{ needs.vr-complete.result }}" != "success" && "${{ needs.vr-complete.result }}" != "skipped" ]]; then
echo "Visual Review did not complete successfully (result: ${{ needs.vr-complete.result }})."
exit 1
fi