Skip to content

Commit db24fb0

Browse files
committed
feat(coprocessor): parallelize CI tests across services using cargo-nextest
1 parent 73e9e30 commit db24fb0

File tree

3 files changed

+205
-63
lines changed

3 files changed

+205
-63
lines changed

.github/workflows/coprocessor-cargo-tests.yml

Lines changed: 166 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -32,18 +32,70 @@ jobs:
3232
- coprocessor/fhevm-engine/**
3333
- coprocessor/proto/**
3434
cargo-tests:
35-
name: coprocessor-cargo-test/cargo-tests (bpr)
35+
name: coprocessor-cargo-test/${{ matrix.service }}
3636
needs: check-changes
3737
if: ${{ needs.check-changes.outputs.changes-rust-files == 'true' }}
3838
permissions:
3939
contents: 'read' # Required to checkout repository code
4040
checks: 'write' # Required to create GitHub checks for test results
4141
packages: 'read' # Required to read GitHub packages/container registry
42-
pull-requests: 'write' # Required to post coverage comment on PR
43-
runs-on: large_ubuntu_16
42+
runs-on: ${{ matrix.runner }}
4443
env:
4544
CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_LINKER: clang
4645
RUSTFLAGS: "-C link-arg=-fuse-ld=mold"
46+
strategy:
47+
fail-fast: false
48+
matrix:
49+
include:
50+
- service: tfhe-worker
51+
package: tfhe-worker
52+
needs_db: true
53+
needs_db_reset: false
54+
needs_localstack: false
55+
needs_foundry: false
56+
runner: large_ubuntu_16
57+
- service: sns-worker
58+
package: sns-worker
59+
needs_db: true
60+
needs_db_reset: true
61+
needs_localstack: true
62+
needs_foundry: false
63+
runner: large_ubuntu_16
64+
- service: zkproof-worker
65+
package: zkproof-worker
66+
needs_db: true
67+
needs_db_reset: true
68+
needs_localstack: false
69+
needs_foundry: false
70+
runner: large_ubuntu_16
71+
- service: transaction-sender
72+
package: transaction-sender
73+
needs_db: true
74+
needs_db_reset: true
75+
needs_localstack: true
76+
needs_foundry: true
77+
runner: large_ubuntu_16
78+
- service: gw-listener
79+
package: gw-listener
80+
needs_db: true
81+
needs_db_reset: true
82+
needs_localstack: false
83+
needs_foundry: true
84+
runner: large_ubuntu_16
85+
- service: host-listener
86+
package: host-listener
87+
needs_db: true
88+
needs_db_reset: true
89+
needs_localstack: false
90+
needs_foundry: true
91+
runner: large_ubuntu_16
92+
- service: common
93+
package: fhevm-engine-common
94+
needs_db: false
95+
needs_db_reset: false
96+
needs_localstack: false
97+
needs_foundry: false
98+
runner: ubuntu-latest
4799
steps:
48100
- name: Checkout code
49101
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
@@ -55,20 +107,23 @@ jobs:
55107
run: git lfs checkout
56108

57109
- name: Login to GitHub Container Registry
110+
if: ${{ matrix.needs_db }}
58111
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
59112
with:
60113
registry: ghcr.io
61114
username: ${{ github.actor }}
62115
password: ${{ secrets.GITHUB_TOKEN }}
63116

64117
- name: Login to GitHub Chainguard Registry
118+
if: ${{ matrix.needs_db }}
65119
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
66120
with:
67121
registry: cgr.dev
68122
username: ${{ secrets.CGR_USERNAME }}
69123
password: ${{ secrets.CGR_PASSWORD }}
70124

71125
- name: Start database services (background)
126+
if: ${{ matrix.needs_db }}
72127
run: |
73128
nohup docker compose up -d --build db-migration > /tmp/db-init.log 2>&1 &
74129
working-directory: coprocessor/fhevm-engine/tfhe-worker
@@ -87,10 +142,13 @@ jobs:
87142
- name: Install cargo dependencies
88143
run: |
89144
sudo apt-get update
90-
sudo apt-get install -y protobuf-compiler mold clang && \
91-
cargo install sqlx-cli --version 0.7.2 --no-default-features --features postgres --locked
145+
sudo apt-get install -y protobuf-compiler mold clang
146+
- name: Install sqlx-cli
147+
if: ${{ matrix.needs_db }}
148+
run: cargo install sqlx-cli --version 0.7.2 --no-default-features --features postgres --locked
92149

93150
- name: Install foundry
151+
if: ${{ matrix.needs_foundry }}
94152
uses: foundry-rs/foundry-toolchain@de808b1eea699e761c404bda44ba8f21aba30b2c
95153

96154
- name: Cache cargo
@@ -100,31 +158,32 @@ jobs:
100158
~/.cargo/registry
101159
~/.cargo/git
102160
target
103-
key: ${{ runner.os }}-cargo-coverage-${{ hashFiles('**/Cargo.lock') }}
104-
restore-keys: ${{ runner.os }}-cargo-coverage-
161+
key: ${{ runner.os }}-cargo-coverage-${{ matrix.service }}-${{ hashFiles('**/Cargo.lock') }}
162+
restore-keys: ${{ runner.os }}-cargo-coverage-${{ matrix.service }}-
105163

106164
- name: Use Node.js
107165
uses: actions/setup-node@60edb5dd545a775178f52524783378180af0d1f8 # v4.0.2
108166
with:
109167
node-version: 20.x
110168

111169
- name: Start localstack
170+
if: ${{ matrix.needs_localstack }}
112171
run: docker run --rm -d -p 4566:4566 --name localstack localstack/localstack:4.14.0
113172

114-
- name: Clean previous coverage data
115-
run: cargo llvm-cov clean --workspace --profile coverage
116-
working-directory: coprocessor/fhevm-engine
117-
118173
- name: Compile tests with coverage instrumentation
174+
env:
175+
DATABASE_URL: postgresql://postgres:postgres@localhost:5432/coprocessor
176+
SQLX_OFFLINE: 'true'
177+
TEST_PACKAGE: ${{ matrix.package }}
119178
run: |
179+
cargo llvm-cov clean --workspace --profile coverage
120180
cargo llvm-cov show-env --sh > /tmp/llvm-cov-env.sh
121181
source /tmp/llvm-cov-env.sh
122-
DATABASE_URL=postgresql://postgres:postgres@localhost:5432/coprocessor \
123-
SQLX_OFFLINE=true \
124-
cargo test --no-run --workspace --profile coverage
182+
cargo test --no-run -p "$TEST_PACKAGE" --profile coverage
125183
working-directory: coprocessor/fhevm-engine
126184

127185
- name: Wait for database migration
186+
if: ${{ matrix.needs_db }}
128187
run: |
129188
SECONDS=0
130189
while ! docker container inspect db-migration > /dev/null 2>&1; do
@@ -146,41 +205,91 @@ jobs:
146205
echo "Database migration completed"
147206
148207
- name: Run tests with coverage
208+
env:
209+
DATABASE_URL: postgresql://postgres:postgres@localhost:5432/coprocessor
210+
SQLX_OFFLINE: ${{ (!matrix.needs_db) && 'true' || 'false' }}
211+
TEST_GLOBAL_LOCALSTACK: ${{ matrix.needs_localstack && '1' || '0' }}
212+
TEST_PACKAGE: ${{ matrix.package }}
213+
NEEDS_DB_RESET: ${{ matrix.needs_db_reset && '1' || '0' }}
214+
IS_MERGE_QUEUE: ${{ startsWith(github.head_ref, 'mergify/merge-queue/') && '1' || '0' }}
149215
run: |
150216
source /tmp/llvm-cov-env.sh
151-
DATABASE_URL=postgresql://postgres:postgres@localhost:5432/coprocessor \
152-
TEST_GLOBAL_LOCALSTACK=1 \
153-
SQLX_OFFLINE=true \
154-
cargo test --workspace --profile coverage
217+
# Only export COPROCESSOR_TEST_LOCALHOST_RESET when needed.
218+
# The Rust code checks .is_ok() (existence), so setting it to "0" still triggers it.
219+
if [ "$NEEDS_DB_RESET" = "1" ]; then
220+
export COPROCESSOR_TEST_LOCALHOST_RESET=1
221+
fi
222+
# Merge queue: leave unset so supported_types() defaults to full matrix.
223+
# PR CI: run only small types (bool through 64-bit) for faster feedback.
224+
if [ "$IS_MERGE_QUEUE" != "1" ]; then
225+
export TFHE_WORKER_EVENT_TYPE_MATRIX=local
226+
fi
227+
cargo test -p "$TEST_PACKAGE" --profile coverage
228+
working-directory: coprocessor/fhevm-engine
229+
230+
- name: Export LCOV coverage data
231+
if: ${{ !cancelled() }}
232+
run: cargo llvm-cov report --lcov --profile coverage --output-path /tmp/lcov.info || true
155233
working-directory: coprocessor/fhevm-engine
156234

157-
- name: Generate coverage report
235+
- name: Upload coverage artifact
158236
if: ${{ !cancelled() }}
237+
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
238+
with:
239+
name: lcov-${{ matrix.service }}
240+
path: /tmp/lcov.info
241+
retention-days: 1
242+
if-no-files-found: ignore
243+
244+
coverage-report:
245+
name: coprocessor-cargo-test/coverage-report
246+
needs: [check-changes, cargo-tests]
247+
if: ${{ !cancelled() && needs.check-changes.outputs.changes-rust-files == 'true' }}
248+
permissions:
249+
contents: 'read' # Required to checkout repository code
250+
pull-requests: 'write' # Required to post coverage comment on PR
251+
runs-on: ubuntu-latest
252+
steps:
253+
- name: Checkout code
254+
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
255+
with:
256+
persist-credentials: 'false'
257+
258+
- name: Install lcov
259+
run: sudo apt-get update && sudo apt-get install -y lcov
260+
261+
- name: Download all coverage artifacts
262+
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
263+
with:
264+
pattern: lcov-*
265+
path: /tmp/coverage
266+
267+
- name: Merge LCOV files
159268
run: |
160-
if cargo llvm-cov report --profile coverage > /tmp/cov-report.txt 2>&1; then
161-
REPORT=$(cat /tmp/cov-report.txt)
162-
else
163-
echo "cargo llvm-cov report failed:"
164-
cat /tmp/cov-report.txt
165-
REPORT=""
269+
LCOV_FILES=$(find /tmp/coverage -name 'lcov.info' -size +0c)
270+
if [ -z "$LCOV_FILES" ]; then
271+
echo "No coverage data found"
272+
exit 0
166273
fi
274+
LCOV_ARGS=()
275+
for f in $LCOV_FILES; do
276+
LCOV_ARGS+=(-a "$f")
277+
done
278+
lcov "${LCOV_ARGS[@]}" -o /tmp/lcov.info
279+
280+
- name: Generate coverage summary
281+
if: ${{ !cancelled() }}
282+
run: |
167283
{
168284
echo '## Coverage: coprocessor/fhevm-engine'
169-
if [ -n "$REPORT" ]; then
285+
if [ -f /tmp/lcov.info ]; then
170286
echo '```'
171-
echo "$REPORT"
287+
lcov --summary /tmp/lcov.info 2>&1 || true
172288
echo '```'
173289
else
174290
echo '*No coverage data available (tests may have failed before producing profiling data).*'
175291
fi
176292
} >> "$GITHUB_STEP_SUMMARY"
177-
echo "$REPORT"
178-
working-directory: coprocessor/fhevm-engine
179-
180-
- name: Export LCOV coverage data
181-
if: ${{ !cancelled() }}
182-
run: cargo llvm-cov report --lcov --profile coverage --output-path /tmp/lcov.info || true
183-
working-directory: coprocessor/fhevm-engine
184293
185294
- name: Diff coverage of changed lines
186295
if: ${{ !cancelled() }}
@@ -230,3 +339,26 @@ jobs:
230339
uses: marocchino/sticky-pull-request-comment@773744901bac0e8cbb5a0dc842800d45e9b2b405 # v2.9.4
231340
with:
232341
path: /tmp/coverage-comment.md
342+
343+
cargo-tests-status:
344+
name: coprocessor-cargo-test/cargo-tests (bpr)
345+
needs: [check-changes, cargo-tests, coverage-report]
346+
if: ${{ always() }}
347+
runs-on: ubuntu-latest
348+
env:
349+
CHECK_CHANGES_RESULT: ${{ needs.check-changes.result }}
350+
CARGO_TESTS_RESULT: ${{ needs.cargo-tests.result }}
351+
COVERAGE_RESULT: ${{ needs.coverage-report.result }}
352+
steps:
353+
- name: Check results
354+
run: |
355+
if [ "$CHECK_CHANGES_RESULT" = "failure" ] || \
356+
[ "$CHECK_CHANGES_RESULT" = "cancelled" ] || \
357+
[ "$CARGO_TESTS_RESULT" = "failure" ] || \
358+
[ "$CARGO_TESTS_RESULT" = "cancelled" ] || \
359+
[ "$COVERAGE_RESULT" = "failure" ] || \
360+
[ "$COVERAGE_RESULT" = "cancelled" ]; then
361+
echo "One or more jobs failed or were cancelled"
362+
exit 1
363+
fi
364+
echo "All jobs passed or were skipped"

coprocessor/fhevm-engine/test-harness/src/instance.rs

Lines changed: 32 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -55,6 +55,33 @@ pub async fn setup_test_db(mode: ImportMode) -> Result<DBInstance, Box<dyn std::
5555
}
5656
}
5757

58+
// Extracts the database name from a PostgreSQL URL.
59+
// e.g. "postgresql://user:pass@host:port/mydb?opt=val" -> "mydb"
60+
// Panics if the extracted name contains characters other than alphanumeric, underscore, or hyphen.
61+
fn extract_db_name(db_url: &str) -> &str {
62+
let after_slash = db_url
63+
.rsplit('/')
64+
.next()
65+
.expect("database URL must contain /");
66+
let name = after_slash
67+
.split('?')
68+
.next()
69+
.expect("split always yields at least one element");
70+
assert!(
71+
!name.is_empty()
72+
&& name
73+
.chars()
74+
.all(|c| c.is_alphanumeric() || c == '_' || c == '-'),
75+
"invalid database name extracted from URL: {name}"
76+
);
77+
name
78+
}
79+
80+
fn admin_url_from(db_url: &str) -> String {
81+
let last_slash = db_url.rfind('/').expect("database URL must contain /");
82+
format!("{}postgres", &db_url[..=last_slash])
83+
}
84+
5885
async fn setup_test_app_existing_localhost(
5986
with_reset: bool,
6087
mode: ImportMode,
@@ -63,7 +90,7 @@ async fn setup_test_app_existing_localhost(
6390

6491
if with_reset {
6592
info!("Resetting local database at {db_url}");
66-
let admin_db_url = db_url.as_str().replace("coprocessor", "postgres");
93+
let admin_db_url = admin_url_from(db_url.as_str());
6794
create_database(&admin_db_url, db_url.as_str(), mode).await?;
6895
}
6996

@@ -122,17 +149,18 @@ async fn create_database(
122149
db_url: &str,
123150
mode: ImportMode,
124151
) -> Result<(), Box<dyn std::error::Error>> {
125-
info!("Creating coprocessor db...");
152+
let db_name = extract_db_name(db_url);
153+
info!(db_name, "Creating database...");
126154
let admin_pool = sqlx::postgres::PgPoolOptions::new()
127155
.max_connections(1)
128156
.connect(admin_db_url)
129157
.await?;
130158

131-
sqlx::query!("DROP DATABASE IF EXISTS coprocessor;")
159+
sqlx::query(&format!("DROP DATABASE IF EXISTS \"{db_name}\""))
132160
.execute(&admin_pool)
133161
.await?;
134162

135-
sqlx::query!("CREATE DATABASE coprocessor;")
163+
sqlx::query(&format!("CREATE DATABASE \"{db_name}\""))
136164
.execute(&admin_pool)
137165
.await?;
138166

0 commit comments

Comments
 (0)