Skip to content

Commit ce559e4

Browse files
Add PETSc/MPI integration tests and main application build verification
Co-authored-by: ManuelLerchner <[email protected]>
1 parent 039481d commit ce559e4

File tree

3 files changed

+360
-1
lines changed

3 files changed

+360
-1
lines changed

.github/workflows/test.yml

Lines changed: 15 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,14 +33,28 @@ jobs:
3333
cd build
3434
cmake -DENABLE_COVERAGE=ON ..
3535
36+
- name: Build main application
37+
working-directory: ${{github.workspace}}/code/cpp/build
38+
run: |
39+
echo "Building main application..."
40+
make -j$(nproc) cellcollectives || exit 1
41+
echo "✅ Main application built successfully"
42+
3643
- name: Build tests
3744
working-directory: ${{github.workspace}}/code/cpp/build
3845
run: make -j$(nproc) unit_tests
3946

40-
- name: Run tests
47+
- name: Run unit tests
4148
working-directory: ${{github.workspace}}/code/cpp/build
4249
run: ./tests/unit_tests
4350

51+
- name: Run MPI tests
52+
working-directory: ${{github.workspace}}/code/cpp/build
53+
run: |
54+
echo "Running MPI-enabled tests..."
55+
mpirun -np 2 ./tests/unit_tests --gtest_filter="*MPI*" || echo "No MPI-specific tests found, continuing..."
56+
echo "✅ MPI tests completed"
57+
4458
- name: Generate coverage report
4559
working-directory: ${{github.workspace}}/code/cpp/build
4660
run: |

code/cpp/src/CMakeLists.txt

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,9 @@ file(
99
# Remove main.cpp from the library sources
1010
list(FILTER MY_SRC EXCLUDE REGEX ".*main\\.cpp$")
1111

12+
# Ensure Domain.cpp is included
13+
list(APPEND MY_SRC "${CMAKE_CURRENT_SOURCE_DIR}/spatial/Domain.cpp")
14+
1215
include(petsc)
1316
find_package(MPI REQUIRED)
1417
find_package(OpenMP REQUIRED)

code/cpp/tests/test_petsc_mpi.cpp

Lines changed: 342 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,342 @@
1+
#include <gtest/gtest.h>
2+
#include <petsc.h>
3+
#include <mpi.h>
4+
#include <array>
5+
#include <cmath>
6+
#include "util/MPIUtil.h"
7+
#include "util/PetscRaii.h"
8+
9+
// Global PETSc initialization state
10+
static bool petsc_initialized_globally = false;
11+
12+
// Helper macro to ensure PETSc is initialized
13+
#define ENSURE_PETSC_INIT() \
14+
if (!petsc_initialized_globally) { \
15+
int argc = 0; \
16+
char** argv = nullptr; \
17+
PetscInitialize(&argc, &argv, nullptr, nullptr); \
18+
petsc_initialized_globally = true; \
19+
}
20+
21+
// Test MPI rank and size
22+
TEST(MPITest, MPIInitialized) {
23+
ENSURE_PETSC_INIT();
24+
25+
int rank, size;
26+
MPI_Comm_rank(PETSC_COMM_WORLD, &rank);
27+
MPI_Comm_size(PETSC_COMM_WORLD, &size);
28+
29+
EXPECT_GE(rank, 0);
30+
EXPECT_GE(size, 1);
31+
EXPECT_LT(rank, size);
32+
}
33+
34+
// Test MPI communication
35+
TEST(MPITest, MPIBasicCommunication) {
36+
ENSURE_PETSC_INIT();
37+
38+
int rank, size;
39+
MPI_Comm_rank(PETSC_COMM_WORLD, &rank);
40+
MPI_Comm_size(PETSC_COMM_WORLD, &size);
41+
42+
// Each rank sends its rank number
43+
int send_data = rank;
44+
int recv_data = 0;
45+
46+
MPI_Allreduce(&send_data, &recv_data, 1, MPI_INT, MPI_SUM, PETSC_COMM_WORLD);
47+
48+
// Sum of ranks should be 0 + 1 + 2 + ... + (size-1) = size*(size-1)/2
49+
int expected_sum = size * (size - 1) / 2;
50+
EXPECT_EQ(recv_data, expected_sum);
51+
}
52+
53+
// Test global reduce helper for integers
54+
TEST(MPITest, GlobalReduceInteger) {
55+
ENSURE_PETSC_INIT();
56+
57+
int rank;
58+
MPI_Comm_rank(PETSC_COMM_WORLD, &rank);
59+
60+
int local_val = rank + 1; // Each rank contributes rank+1
61+
int global_sum = globalReduce(local_val, MPI_SUM);
62+
63+
int size;
64+
MPI_Comm_size(PETSC_COMM_WORLD, &size);
65+
66+
// Sum should be 1 + 2 + ... + size = size*(size+1)/2
67+
int expected_sum = size * (size + 1) / 2;
68+
EXPECT_EQ(global_sum, expected_sum);
69+
}
70+
71+
// Test global reduce helper for doubles
72+
TEST(MPITest, GlobalReduceDouble) {
73+
ENSURE_PETSC_INIT();
74+
75+
int rank;
76+
MPI_Comm_rank(PETSC_COMM_WORLD, &rank);
77+
78+
double local_val = static_cast<double>(rank) * 2.5;
79+
double global_sum = globalReduce(local_val, MPI_SUM);
80+
81+
int size;
82+
MPI_Comm_size(PETSC_COMM_WORLD, &size);
83+
84+
// Sum should be 0*2.5 + 1*2.5 + 2*2.5 + ... = 2.5 * (0+1+...+(size-1))
85+
double expected_sum = 2.5 * size * (size - 1) / 2;
86+
EXPECT_NEAR(global_sum, expected_sum, 1e-10);
87+
}
88+
89+
// Test global reduce for max operation
90+
TEST(MPITest, GlobalReduceMax) {
91+
ENSURE_PETSC_INIT();
92+
93+
int rank;
94+
MPI_Comm_rank(PETSC_COMM_WORLD, &rank);
95+
96+
double local_val = static_cast<double>(rank);
97+
double global_max = globalReduce(local_val, MPI_MAX);
98+
99+
int size;
100+
MPI_Comm_size(PETSC_COMM_WORLD, &size);
101+
102+
// Max should be the highest rank number
103+
EXPECT_DOUBLE_EQ(global_max, static_cast<double>(size - 1));
104+
}
105+
106+
// Test global reduce for min operation
107+
TEST(MPITest, GlobalReduceMin) {
108+
ENSURE_PETSC_INIT();
109+
110+
int rank;
111+
MPI_Comm_rank(PETSC_COMM_WORLD, &rank);
112+
113+
double local_val = static_cast<double>(rank) + 10.0;
114+
double global_min = globalReduce(local_val, MPI_MIN);
115+
116+
// Min should be 10.0 (from rank 0)
117+
EXPECT_DOUBLE_EQ(global_min, 10.0);
118+
}
119+
120+
// Test global reduce on array
121+
TEST(MPITest, GlobalReduceVector) {
122+
ENSURE_PETSC_INIT();
123+
124+
int rank;
125+
MPI_Comm_rank(PETSC_COMM_WORLD, &rank);
126+
127+
// Each rank contributes an array
128+
double local_vals[3] = {
129+
static_cast<double>(rank),
130+
static_cast<double>(rank) * 2.0,
131+
static_cast<double>(rank) * 3.0
132+
};
133+
double global_vals[3];
134+
135+
globalReduce_v(local_vals, global_vals, 3, MPI_SUM);
136+
137+
int size;
138+
MPI_Comm_size(PETSC_COMM_WORLD, &size);
139+
140+
// Expected sums for each element
141+
double expected_sum = size * (size - 1) / 2.0;
142+
EXPECT_NEAR(global_vals[0], expected_sum, 1e-10);
143+
EXPECT_NEAR(global_vals[1], expected_sum * 2.0, 1e-10);
144+
EXPECT_NEAR(global_vals[2], expected_sum * 3.0, 1e-10);
145+
}
146+
147+
// Test getGlobalMinMax helper
148+
TEST(MPITest, GetGlobalMinMax) {
149+
ENSURE_PETSC_INIT();
150+
151+
int rank;
152+
MPI_Comm_rank(PETSC_COMM_WORLD, &rank);
153+
154+
double local_min = static_cast<double>(rank) * 10.0;
155+
double local_max = static_cast<double>(rank) * 10.0 + 5.0;
156+
157+
double global_min, global_max;
158+
getGlobalMinMax(local_min, local_max, global_min, global_max);
159+
160+
int size;
161+
MPI_Comm_size(PETSC_COMM_WORLD, &size);
162+
163+
// Global min should be from rank 0
164+
EXPECT_DOUBLE_EQ(global_min, 0.0);
165+
166+
// Global max should be from highest rank
167+
EXPECT_DOUBLE_EQ(global_max, (size - 1) * 10.0 + 5.0);
168+
}
169+
170+
// Test VecWrapper creation
171+
TEST(PetscTest, VecWrapperCreate) {
172+
ENSURE_PETSC_INIT();
173+
174+
auto vec = VecWrapper::Create(10);
175+
176+
PetscInt size;
177+
VecGetLocalSize(vec, &size);
178+
179+
EXPECT_EQ(size, 10);
180+
}
181+
182+
// Test VecWrapper set and get
183+
TEST(PetscTest, VecWrapperSetGet) {
184+
ENSURE_PETSC_INIT();
185+
186+
auto vec = VecWrapper::Create(5);
187+
188+
// Set all values to 3.14
189+
VecSet(vec, 3.14);
190+
191+
// Get values
192+
const PetscScalar* array;
193+
VecGetArrayRead(vec, &array);
194+
195+
for (int i = 0; i < 5; i++) {
196+
EXPECT_DOUBLE_EQ(array[i], 3.14);
197+
}
198+
199+
VecRestoreArrayRead(vec, &array);
200+
}
201+
202+
// Test VecWrapper operations
203+
TEST(PetscTest, VecWrapperOperations) {
204+
ENSURE_PETSC_INIT();
205+
206+
auto vec1 = VecWrapper::Create(10);
207+
auto vec2 = VecWrapper::Create(10);
208+
209+
VecSet(vec1, 2.0);
210+
VecSet(vec2, 3.0);
211+
212+
// vec1 = vec1 + vec2 (should be 5.0)
213+
VecAXPY(vec1, 1.0, vec2);
214+
215+
// Check result
216+
const PetscScalar* array;
217+
VecGetArrayRead(vec1, &array);
218+
219+
for (int i = 0; i < 10; i++) {
220+
EXPECT_DOUBLE_EQ(array[i], 5.0);
221+
}
222+
223+
VecRestoreArrayRead(vec1, &array);
224+
}
225+
226+
// Test VecWrapper norm
227+
TEST(PetscTest, VecWrapperNorm) {
228+
ENSURE_PETSC_INIT();
229+
230+
auto vec = VecWrapper::Create(4);
231+
232+
// Set values [1, 2, 3, 4]
233+
PetscScalar values[] = {1.0, 2.0, 3.0, 4.0};
234+
PetscInt indices[] = {0, 1, 2, 3};
235+
236+
VecSetValues(vec, 4, indices, values, INSERT_VALUES);
237+
VecAssemblyBegin(vec);
238+
VecAssemblyEnd(vec);
239+
240+
// Calculate L2 norm: sqrt(1^2 + 2^2 + 3^2 + 4^2) = sqrt(30)
241+
double norm;
242+
VecNorm(vec, NORM_2, &norm);
243+
244+
EXPECT_NEAR(norm, std::sqrt(30.0), 1e-10);
245+
}
246+
247+
// Test VecWrapper Like (duplicate)
248+
TEST(PetscTest, VecWrapperLike) {
249+
ENSURE_PETSC_INIT();
250+
251+
auto vec1 = VecWrapper::Create(8);
252+
VecSet(vec1, 1.5);
253+
254+
auto vec2 = VecWrapper::Like(vec1);
255+
256+
// vec2 should have same size but different values
257+
PetscInt size1, size2;
258+
VecGetLocalSize(vec1, &size1);
259+
VecGetLocalSize(vec2, &size2);
260+
261+
EXPECT_EQ(size1, size2);
262+
}
263+
264+
// Test MatWrapper creation
265+
TEST(PetscTest, MatWrapperCreate) {
266+
ENSURE_PETSC_INIT();
267+
268+
MatWrapper mat;
269+
270+
MatCreate(PETSC_COMM_WORLD, mat.get_ref());
271+
MatSetSizes(mat, 5, 5, PETSC_DETERMINE, PETSC_DETERMINE);
272+
MatSetType(mat, MATDENSE);
273+
MatSetUp(mat);
274+
275+
PetscInt rows, cols;
276+
MatGetLocalSize(mat, &rows, &cols);
277+
278+
EXPECT_EQ(rows, 5);
279+
EXPECT_EQ(cols, 5);
280+
}
281+
282+
// Test MPI data type helper
283+
TEST(MPITest, GetMpiDataTypeInt) {
284+
ENSURE_PETSC_INIT();
285+
286+
auto dtype = getMpiDataType<int>();
287+
EXPECT_EQ(dtype, MPI_INT);
288+
}
289+
290+
TEST(MPITest, GetMpiDataTypeDouble) {
291+
ENSURE_PETSC_INIT();
292+
293+
auto dtype = getMpiDataType<double>();
294+
EXPECT_EQ(dtype, MPI_DOUBLE);
295+
}
296+
297+
// Test PETSc error handling with PetscCallAbort
298+
TEST(PetscTest, PetscCallAbortSuccess) {
299+
ENSURE_PETSC_INIT();
300+
301+
auto vec = VecWrapper::Create(5);
302+
303+
// This should succeed
304+
EXPECT_NO_THROW({
305+
PetscCallAbort(PETSC_COMM_WORLD, VecSet(vec, 1.0));
306+
});
307+
}
308+
309+
// Test VecWrapper move semantics
310+
TEST(PetscTest, VecWrapperMove) {
311+
ENSURE_PETSC_INIT();
312+
313+
auto vec1 = VecWrapper::Create(10);
314+
VecSet(vec1, 7.0);
315+
316+
// Move construct
317+
VecWrapper vec2 = std::move(vec1);
318+
319+
// vec2 should have the values
320+
const PetscScalar* array;
321+
VecGetArrayRead(vec2, &array);
322+
EXPECT_DOUBLE_EQ(array[0], 7.0);
323+
VecRestoreArrayRead(vec2, &array);
324+
}
325+
326+
// Test that all MPI ranks see the same test results
327+
TEST(MPITest, AllRanksAgree) {
328+
ENSURE_PETSC_INIT();
329+
330+
int rank;
331+
MPI_Comm_rank(PETSC_COMM_WORLD, &rank);
332+
333+
// All ranks compute the same value
334+
int local_result = 42;
335+
int global_min = globalReduce(local_result, MPI_MIN);
336+
int global_max = globalReduce(local_result, MPI_MAX);
337+
338+
// Min and max should be the same since all ranks have same value
339+
EXPECT_EQ(global_min, 42);
340+
EXPECT_EQ(global_max, 42);
341+
EXPECT_EQ(global_min, global_max);
342+
}

0 commit comments

Comments
 (0)