-
-
Notifications
You must be signed in to change notification settings - Fork 367
351 lines (305 loc) · 11.5 KB
/
tests.yml
File metadata and controls
351 lines (305 loc) · 11.5 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
name: Test Suite
on:
push:
branches: ['**'] # Run Job 1 on all branches
pull_request:
branches: [main, dev] # Run Job 1 on PRs to main/dev
workflow_dispatch: # Manual trigger button in Actions tab
inputs:
run_integration:
description: 'Run integration tests'
required: false
type: boolean
default: false
run_e2e:
description: 'Run E2E tests'
required: false
type: boolean
default: false
# Cancel in-progress runs when new commit pushed
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
# ============================================================================
# JOB 1: Fast Tests (Smoke + Unit)
# Runs on: Every push, every PR
# Duration: ~2 minutes
# Purpose: Catch basic errors immediately
# ============================================================================
fast-tests:
name: Fast Tests (Smoke + Unit)
runs-on: ubuntu-latest
env:
PYTHONDONTWRITEBYTECODE: '1'
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Remove Python bytecode and pycache
run: |
echo "Removing any leftover .pyc and pycache"
find . -name '__pycache__' -type d -print -exec rm -rf {} +
find . -name '*.pyc' -type f -print -delete
- name: Set up Python 3.13
uses: actions/setup-python@v5
with:
python-version: '3.13'
cache: 'pip'
- name: Install system dependencies
run: |
sudo apt-get update
sudo apt-get install -y libldap2-dev libsasl2-dev libssl-dev
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
pip install -r requirements-dev.txt
- name: Create test environment structure
run: |
sudo mkdir -p /config /books/import /books/ingest /config/processed_books
sudo chmod -R 777 /config /books
touch /config/epub-fixer.log
touch /config/converter.log
touch /config/cwa.db
- name: Run smoke and unit tests
env:
PYTHONPATH: ${{ github.workspace }}:${{ github.workspace }}/scripts
run: |
pytest -m "smoke or unit" \
-n auto \
--maxfail=3 \
-v \
--tb=short \
--cov=cps \
--cov-report=xml \
--cov-report=term-missing
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v4
if: always()
with:
files: ./coverage.xml
flags: unittests
name: fast-tests
fail_ci_if_error: false
- name: Comment PR with results
uses: actions/github-script@v7
if: github.event_name == 'pull_request' && failure()
with:
script: |
github.rest.issues.createComment({
issue_number: context.issue.number,
owner: context.repo.owner,
repo: context.repo.repo,
body: '⚠️ **Fast tests failed!** Please check the [workflow logs](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}) and fix before merging.'
})
# ============================================================================
# JOB 2: Integration Tests (Docker)
# Runs on: Merge to main/dev, manual trigger
# Duration: ~15-20 minutes
# Purpose: Validate real-world behavior in Docker container
# ============================================================================
integration-tests:
name: Integration Tests (Docker)
runs-on: ubuntu-latest
# Only run on merge to main/dev OR manual dispatch with flag
if: |
github.ref == 'refs/heads/main' ||
github.ref == 'refs/heads/dev' ||
(github.event_name == 'workflow_dispatch' && inputs.run_integration)
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Set up Python 3.13
uses: actions/setup-python@v5
with:
python-version: '3.13'
cache: 'pip'
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Install system dependencies
run: |
sudo apt-get update
sudo apt-get install -y libldap2-dev libsasl2-dev libssl-dev
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
pip install -r requirements-dev.txt
- name: Build Docker image
uses: docker/build-push-action@v5
with:
context: .
push: false
load: true
tags: crocodilestick/calibre-web-automated:latest
cache-from: type=gha
cache-to: type=gha,mode=max
- name: Set test UID/GID
run: |
echo "CWA_TEST_PUID=$(id -u)" >> $GITHUB_ENV
echo "CWA_TEST_PGID=$(id -g)" >> $GITHUB_ENV
- name: Run Docker integration tests
run: |
# CRITICAL: No -n flag! Docker tests must run sequentially
pytest tests/docker/ tests/integration/ \
-v \
--tb=long \
--durations=10 \
--junitxml=integration-results.xml
env:
CWA_TEST_PORT: "8083" # CI uses production default port
CWA_TEST_IMAGE: "crocodilestick/calibre-web-automated:latest"
timeout-minutes: 30 # Kill if stuck
- name: Upload test results
uses: actions/upload-artifact@v4
if: always()
with:
name: integration-test-results
path: |
integration-results.xml
tests/logs/
tests/tmp/
retention-days: 7
- name: Notify on failure
uses: sarisia/actions-status-discord@v1
if: failure() && env.DISCORD_WEBHOOK != ''
env:
DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK }}
with:
webhook: ${{ secrets.DISCORD_WEBHOOK }}
title: "❌ Integration tests failed on ${{ github.ref_name }}"
description: |
**Commit**: ${{ github.sha }}
**Author**: ${{ github.actor }}
**Branch**: ${{ github.ref_name }}
[View logs](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }})
color: 0xFF0000
username: GitHub Actions
# ============================================================================
# JOB 3: E2E Tests (Full Stack)
# Runs on: Manual trigger, all release tags (v3.2.0, v3.2.0-rc1, etc.)
# Duration: ~30-45 minutes
# Purpose: Validate complete user workflows
# NOTE: Tests run AFTER tag creation. Check results before publishing release!
# ============================================================================
e2e-tests:
name: E2E Tests (Full Stack)
runs-on: ubuntu-latest
# Run on any version tag OR manual dispatch
if: |
startsWith(github.ref, 'refs/tags/v') ||
(github.event_name == 'workflow_dispatch' && inputs.run_e2e)
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Set up Python 3.13
uses: actions/setup-python@v5
with:
python-version: '3.13'
cache: 'pip'
- name: Set up Docker Compose
run: |
docker compose version
- name: Install system dependencies
run: |
sudo apt-get update
sudo apt-get install -y libldap2-dev libsasl2-dev libssl-dev
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
pip install -r requirements-dev.txt
# E2E dependencies (when implemented)
# pip install playwright pytest-playwright
# playwright install chromium firefox
- name: Start Docker Compose stack
run: |
docker compose -f docker-compose.yml up -d
echo "Waiting for services to be healthy..."
timeout 180 bash -c 'until curl -f http://localhost:8083/health 2>/dev/null; do sleep 5; done'
- name: Run E2E tests
run: |
# When tests/e2e/ exists:
# pytest tests/e2e/ \
# -v \
# --headed \
# --video=on-failure \
# --screenshot=on-failure \
# --junitxml=e2e-results.xml
# For now, just verify stack is healthy
curl -f http://localhost:8083/health
echo "E2E test placeholder - implement tests/e2e/ directory"
timeout-minutes: 60
- name: Upload E2E artifacts
uses: actions/upload-artifact@v4
if: always()
with:
name: e2e-test-artifacts
path: |
e2e-results.xml
test-results/
videos/
screenshots/
retention-days: 14 # Keep longer for release debugging
- name: Dump container logs
if: always()
run: |
docker compose logs > docker-compose-logs.txt
docker compose ps
- name: Upload container logs
uses: actions/upload-artifact@v4
if: always()
with:
name: docker-compose-logs
path: docker-compose-logs.txt
retention-days: 14
- name: Notify on failure
uses: sarisia/actions-status-discord@v1
if: failure() && env.DISCORD_WEBHOOK != ''
env:
DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK }}
with:
webhook: ${{ secrets.DISCORD_WEBHOOK }}
title: "⛔ E2E tests failed for ${{ github.ref_name }}"
description: |
**DO NOT RELEASE** until fixed!
**Tag**: ${{ github.ref_name }}
**Commit**: ${{ github.sha }}
**Author**: ${{ github.actor }}
Review artifacts and browser recordings.
[View logs](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }})
color: 0xFF0000
username: GitHub Actions
- name: Cleanup
if: always()
run: |
docker compose down -v
docker system prune -f
# ============================================================================
# Summary Job (Optional)
# Shows overall status in GitHub UI
# ============================================================================
test-summary:
name: Test Suite Summary
runs-on: ubuntu-latest
needs: [fast-tests, integration-tests, e2e-tests]
if: always()
steps:
- name: Check test results
run: |
echo "Fast Tests: ${{ needs.fast-tests.result }}"
echo "Integration Tests: ${{ needs.integration-tests.result }}"
echo "E2E Tests: ${{ needs.e2e-tests.result }}"
if [[ "${{ needs.fast-tests.result }}" == "failure" ]]; then
echo "❌ Fast tests failed"
exit 1
fi
if [[ "${{ needs.integration-tests.result }}" == "failure" ]]; then
echo "⚠️ Integration tests failed"
# Don't fail the summary, just warn
fi
if [[ "${{ needs.e2e-tests.result }}" == "failure" ]]; then
echo "⛔ E2E tests failed - do not release!"
# Don't fail the summary, just warn
fi
echo "✅ Test suite completed"