Skip to content

Commit 22500d1

Browse files
committed
Merge branch 'develop' of github.com:su2code/SU2 into release_v7.0.2
2 parents 49e2914 + 963b8f3 commit 22500d1

File tree

3 files changed

+25
-10
lines changed

3 files changed

+25
-10
lines changed

Common/src/linear_algebra/CPastixWrapper.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -90,7 +90,7 @@ void CPastixWrapper::Initialize(CGeometry *geometry, CConfig *config) {
9090
iparm[IPARM_INCOMPLETE] = incomplete;
9191
iparm[IPARM_LEVEL_OF_FILL] = pastix_int_t(config->GetPastixFillLvl());
9292
iparm[IPARM_THREAD_NBR] = omp_get_max_threads();
93-
#ifdef HAVE_MPI
93+
#if defined(HAVE_MPI) && defined(HAVE_OMP)
9494
int comm_mode = MPI_THREAD_SINGLE;
9595
MPI_Query_thread(&comm_mode);
9696
if (comm_mode == MPI_THREAD_MULTIPLE)

SU2_CFD/src/solvers/CIncNSSolver.cpp

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -768,13 +768,13 @@ void CIncNSSolver::Preprocessing(CGeometry *geometry, CSolver **solver_container
768768

769769
/*--- Evaluate the vorticity and strain rate magnitude ---*/
770770

771-
solver_container[FLOW_SOL]->GetNodes()->SetVorticity_StrainMag();
771+
nodes->SetVorticity_StrainMag();
772772

773773
StrainMag_Max = 0.0; Omega_Max = 0.0;
774774
for (iPoint = 0; iPoint < nPoint; iPoint++) {
775775

776-
StrainMag = solver_container[FLOW_SOL]->GetNodes()->GetStrainMag(iPoint);
777-
Vorticity = solver_container[FLOW_SOL]->GetNodes()->GetVorticity(iPoint);
776+
StrainMag = nodes->GetStrainMag(iPoint);
777+
Vorticity = nodes->GetVorticity(iPoint);
778778
Omega = sqrt(Vorticity[0]*Vorticity[0]+ Vorticity[1]*Vorticity[1]+ Vorticity[2]*Vorticity[2]);
779779

780780
StrainMag_Max = max(StrainMag_Max, StrainMag);

TestCases/pastix_support/readme.txt

Lines changed: 21 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,11 @@
1818
% cd scotch/src && cp Make.inc/Makefile.inc.x86-64_pc_linux2.XXXX Makefile.inc
1919
% (choose the XXXX that matches your compiler)
2020
% Edit Makefile.inc and delete the cflag -DSCOTCH_PTHREAD (see why in 3-ii)
21-
% make ptscotch
21+
% "make ptscotch"
22+
%
23+
% Note: If you build SU2 (7.0.1+) with OpenMP support (-Dwith-omp=true),
24+
% AND your system supports MPI_THREAD_MULTIPLE, you do not need to
25+
% delete the -DSCOTCH_PTHREAD flag (but doing so does no harm).
2226
%
2327
% 3 - Build PaStiX
2428
% Extract the tarball downloaded in 1 into "externals"
@@ -30,10 +34,18 @@
3034
% ii - Uncomment the lines for "VERSIONSMP = _nosmp",
3135
% SU2 does not currently support MPI+Threads.
3236
% iii - Set SCOTCH_HOME as SCOTCH_HOME ?= ${PWD}/../../scotch/
33-
% iv - Comment out the lines for "Hardware Locality", this is only
34-
% important for an MPI+Threads build.
35-
% v - Optionally look at the BLAS section (required by "make examples")
36-
% make all
37+
% iv - Comment out the lines for "Hardware Locality", this may only be
38+
% important for an MPI+Threads build (usually it is not).
39+
% v - Optionally look at the BLAS section (only required by "make examples")
40+
% "make all"
41+
%
42+
% Note: If you build SU2 (7.0.1+) with OpenMP support (-Dwith-omp=true),
43+
% skip 3-ii, note however that this may not work well with SU2_CFD_AD.
44+
% If you do use MPI+Threads, it is important for good performance that your
45+
% system supports MPI_THREAD_MULTIPLE (SU2_CFD --thread_multiple ...)
46+
% Furthermore, if MPI_THREAD_MULTIPLE is NOT supported, you need to
47+
% uncomment the line with "-DPASTIX_FUNNELED" in config.in.
48+
% Finally, if you just use threads (no MPI) this is not important.
3749
%
3850
% 4 - Build SU2
3951
% Follow the normal meson build instructions, add -Denable-pastix=true,
@@ -42,6 +54,9 @@
4254
% If you did not build PaStiX and Scotch in the externals folders you must
4355
% use -Dpastix_root="some path" and -Dscotch_root="another path" to
4456
% indicate where they are RELATIVE to the SU2 directory.
57+
% You need sequential versions of BLAS. But when using MPI+Threads beware that
58+
% OpenBLAS needs to have parallel support otherwise the solver will crash, if
59+
% you get poor performance see 5.4 below.
4560
%
4661
% 5 - Common problems and known issues
4762
% - OpenMPI 4 does not work with PaStiX 5, downgrade to 3.1.4.
@@ -67,4 +82,4 @@
6782
% - CentOS 7.6.1810, gcc 5.4, ompi 3.1.4, mkl 2017
6883
% - CentOS 7.6.1810, gcc 5.4, impi 2018, mkl 2019
6984
% - CentOS 7.6.1810, gcc 8.2, impi 2018, mkl 2019
70-
85+
%

0 commit comments

Comments
 (0)