-
Notifications
You must be signed in to change notification settings - Fork 23
Expand file tree
/
Copy pathIntegrator.cc
More file actions
738 lines (642 loc) · 28.9 KB
/
Integrator.cc
File metadata and controls
738 lines (642 loc) · 28.9 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
//---------------------------------Spheral++----------------------------------//
// Integrator -- The topmost abstract base class for all integrator classes
// Spheral++. Integrator classes take a list of Physics packages and advance
// them in time.
//
// Created by JMO, Wed May 31 21:58:08 PDT 2000
//----------------------------------------------------------------------------//
#include "FileIO/FileIO.hh"
#include "DataOutput/Restart.hh"
#include "DataBase/DataBase.hh"
#include "DataBase/State.hh"
#include "DataBase/StateDerivatives.hh"
#include "Field/FieldList.hh"
#include "Physics/Physics.hh"
#include "Boundary/Boundary.hh"
#include "Hydro/HydroFieldNames.hh"
#include "Utilities/range.hh"
#include "Neighbor/ConnectivityMap.hh"
#include "Distributed/allReduce.hh"
#include "Distributed/Communicator.hh"
#include "Utilities/DBC.hh"
#include "Integrator.hh"
#include <limits.h>
#include <float.h>
#include <algorithm>
using std::vector;
using std::string;
using std::pair;
using std::make_pair;
namespace Spheral {
//------------------------------------------------------------------------------
// Construct with the given DataBase and Physics packages.
//------------------------------------------------------------------------------
template<typename Dimension>
Integrator<Dimension>::
Integrator(DataBase<Dimension>& dataBase,
const vector<Physics<Dimension>*>& physicsPackages):
mDtMultiplier(1.0),
mDtMin(0.0),
mDtMax(std::numeric_limits<Scalar>::max()),
mDtGrowth(2.0),
mLastDt(1e-5),
mDtCheckFrac(0.5),
mCurrentTime(0.0),
mCurrentCycle(0),
mUpdateBoundaryFrequency(1),
mVerboseStep(1),
mVerbose(false),
mAllowDtCheck(false),
mRequireConnectivity(true),
mRequireGhostConnectivity(false),
mRequireOverlapConnectivity(false),
mRequireIntersectionConnectivity(false),
mDataBase(dataBase),
mPhysicsPackages(),
mCullGhostNodes(true),
mRestart(registerWithRestart(*this)) {
for (auto* pkg: physicsPackages) this->appendPhysicsPackage(*pkg);
}
//------------------------------------------------------------------------------
// step
//------------------------------------------------------------------------------
template<typename Dimension>
bool
Integrator<Dimension>::
step(const typename Dimension::Scalar maxTime) {
DataBase<Dimension>& db = this->accessDataBase();
// Check if we need to construct connectivity.
mRequireConnectivity = false;
mRequireGhostConnectivity = false;
mRequireOverlapConnectivity = false;
mRequireIntersectionConnectivity = false;
for (auto* physicsPtr: mPhysicsPackages) {
mRequireConnectivity = (mRequireConnectivity or physicsPtr->requireConnectivity());
mRequireGhostConnectivity = (mRequireGhostConnectivity or physicsPtr->requireGhostConnectivity());
mRequireOverlapConnectivity = (mRequireOverlapConnectivity or physicsPtr->requireOverlapConnectivity());
mRequireIntersectionConnectivity = (mRequireIntersectionConnectivity or physicsPtr->requireIntersectionConnectivity());
}
// Set the ghost nodes (this updates the ConnectivityMap as well in the DataBase)
if (mCurrentCycle % mUpdateBoundaryFrequency == 0) setGhostNodes();
// Build the state and derivatives
State<Dimension> state(db, this->physicsPackagesBegin(), this->physicsPackagesEnd());
StateDerivatives<Dimension> derivs(db, this->physicsPackagesBegin(), this->physicsPackagesEnd());
// Set boundary properties
applyGhostBoundaries(state, derivs);
// Try to advance using the derived class step method
auto success = false;
auto count = 0u;
auto maxIterations = 10u;
while (not success and count < maxIterations) {
++count;
if (count == maxIterations) mAllowDtCheck = false;
success = this->step(maxTime, state, derivs);
if (count == maxIterations) mAllowDtCheck = true;
if (not success) {
mDtMultiplier *= 0.5;
if (Process::getRank() == 0) cerr << "Integrator::step reported unstable timestep -- cutting dt and trying again: " << count << "/10" << endl;
}
}
mDtMultiplier = 1.0;
return success;
}
//------------------------------------------------------------------------------
// Loop over the stored physics packages and pick the minimum timestep.
//------------------------------------------------------------------------------
template<typename Dimension>
typename Dimension::Scalar
Integrator<Dimension>::
selectDt(const typename Dimension::Scalar dtMin,
const typename Dimension::Scalar dtMax,
const State<Dimension>& state,
const StateDerivatives<Dimension>& derivs) const {
REQUIRE(dtMin >= 0 and dtMax > 0);
REQUIRE(dtMin <= dtMax);
// Get the current time and data base.
auto t = currentTime();
const auto& db = dataBase();
// Loop over each package, and pick their timesteps.
TimeStepType dt(dtMax, "");
for (auto* pkg: mPhysicsPackages) {
auto dtVote = this->dt(pkg, db, state, derivs, t);
if (dtVote.first > 0.0 and dtVote.first < dt.first) dt = dtVote;
}
// Apply any dt scaling due to iteration
dt.first *= mDtMultiplier;
// We also require that the timestep is not allowed to grow faster than a
// prescribed fraction.
dt.first = std::min(dt.first, dtGrowth()*lastDt());
// Enforce the timestep boundaries.
dt.first = std::min(dtMax, std::max(dtMin, dt.first));
CHECK(dt.first >= 0.0 and
dt.first >= dtMin and dt.first <= dtMax);
// In the parallel case we need to find the minimum timestep across all processors.
#ifdef SPHERAL_ENABLE_GLOBALDT_REDUCTION
const auto [globalDt, rank] = allReduceLoc(dt.first, SPHERAL_OP_MINLOC);
#else
const auto globalDt = dt.first;
const auto rank = Process::getRank();
#endif
// Are we verbose?
if (rank == Process::getRank() and (verbose() and currentCycle() % verboseStep() == 0)) {
std::cout << "Selected timestep of "
<< dt.first << " on rank " << rank << std::endl
<< dt.second << std::endl;
}
std::cout.flush();
dt.first = globalDt;
return dt.first;
}
//------------------------------------------------------------------------------
// Initializations for integrators. To be called once at the beginning of an
// integrator step.
//------------------------------------------------------------------------------
template<typename Dimension>
void
Integrator<Dimension>::preStepInitialize(State<Dimension>& state,
StateDerivatives<Dimension>& derivs) const {
auto& db = mDataBase.get();
for (auto* physicsPtr: range(physicsPackagesBegin(), physicsPackagesEnd())) {
physicsPtr->preStepInitialize(db, state, derivs);
}
}
//------------------------------------------------------------------------------
// Initialize all physics packages before evaluating derivatives.
// Called before each call to Physics::evaluateDerivatives.
//------------------------------------------------------------------------------
template<typename Dimension>
void
Integrator<Dimension>::initializeDerivatives(const double t,
const double dt,
State<Dimension>& state,
StateDerivatives<Dimension>& derivs) const {
// Initialize the work fields.
auto& db = mDataBase.get();
auto work = db.globalWork();
work = 0.0;
// Loop over the physics packages and perform any necessary initializations.
auto updateBoundaries = false;
for (auto* physicsPtr: range(physicsPackagesBegin(), physicsPackagesEnd())) {
updateBoundaries |= physicsPtr->initialize(t, dt, db, state, derivs);
}
// Apply boundaries if requested.
if (updateBoundaries) {
this->applyGhostBoundaries(state, derivs);
this->finalizeGhostBoundaries();
}
}
//------------------------------------------------------------------------------
// Iterate over all physics packages and call evaluateDerivatives.
//------------------------------------------------------------------------------
template<typename Dimension>
void
Integrator<Dimension>::evaluateDerivatives(const Scalar t,
const Scalar dt,
const DataBase<Dimension>& dataBase,
const State<Dimension>& state,
StateDerivatives<Dimension>& derivs) const {
// Loop over the physics packages and have them evaluate their derivatives.
for (auto* physicsPtr: range(physicsPackagesBegin(), physicsPackagesEnd())) {
physicsPtr->evaluateDerivatives(t, dt, dataBase, state, derivs);
}
}
//------------------------------------------------------------------------------
// Iterate over all physics packages and call finalizeDerivatives.
//------------------------------------------------------------------------------
template<typename Dimension>
void
Integrator<Dimension>::finalizeDerivatives(const Scalar t,
const Scalar dt,
const DataBase<Dimension>& dataBase,
const State<Dimension>& state,
StateDerivatives<Dimension>& derivs) const {
// Loop over the physics packages and have them finalize their derivatives.
for (auto* physicsPtr: range(physicsPackagesBegin(), physicsPackagesEnd())) {
physicsPtr->finalizeDerivatives(t, dt, dataBase, state, derivs);
}
}
//------------------------------------------------------------------------------
// Iterate over all physics packages and let them do any post state update
// stuff.
//------------------------------------------------------------------------------
template<typename Dimension>
void
Integrator<Dimension>::postStateUpdate(const Scalar t,
const Scalar dt,
const DataBase<Dimension>& dataBase,
State<Dimension>& state,
StateDerivatives<Dimension>& derivs) const {
// Loop over the physics packages.
auto updateBoundaries = false;
for (auto* physicsPtr: range(physicsPackagesBegin(), physicsPackagesEnd())) {
updateBoundaries |= physicsPtr->postStateUpdate(t, dt, dataBase, state, derivs);
}
// Apply boundaries if requested.
if (updateBoundaries) {
this->applyGhostBoundaries(state, derivs);
this->finalizeGhostBoundaries();
}
}
//------------------------------------------------------------------------------
// Finalize at the completion of a time step.
//------------------------------------------------------------------------------
template<typename Dimension>
void
Integrator<Dimension>::postStepFinalize(const double t,
const double dt,
State<Dimension>& state,
StateDerivatives<Dimension>& derivs) const {
// Loop over the physics packages and perform any necessary finalizations.
auto& db = mDataBase.get();
for (auto* physicsPtr: range(physicsPackagesBegin(), physicsPackagesEnd())) {
physicsPtr->finalize(t, dt, db, state, derivs);
}
}
//------------------------------------------------------------------------------
// Add a physics package.
//------------------------------------------------------------------------------
template<typename Dimension>
void
Integrator<Dimension>::
appendPhysicsPackage(Physics<Dimension>& package) {
if (!havePhysicsPackage(package)) {
for (auto* packagePtr: package.preSubPackages()) this->appendPhysicsPackage(*packagePtr);
mPhysicsPackages.push_back(&package);
for (auto* packagePtr: package.postSubPackages()) this->appendPhysicsPackage(*packagePtr);
} else {
cerr << "Warning: attempt to append Physics package " << &package
<< "to Integrator " << this << " which already has it." << endl;
}
}
//------------------------------------------------------------------------------
// Reset the Physics packages to a new set
//------------------------------------------------------------------------------
template<typename Dimension>
void
Integrator<Dimension>::
resetPhysicsPackages(std::vector<Physics<Dimension>*>& packages) {
mPhysicsPackages = packages;
}
//------------------------------------------------------------------------------
// Test if the given physics package is listed in the integrator.
//------------------------------------------------------------------------------
template<typename Dimension>
bool
Integrator<Dimension>::
havePhysicsPackage(const Physics<Dimension>& package) const {
return count(mPhysicsPackages.begin(), mPhysicsPackages.end(), &package) > 0;
}
//------------------------------------------------------------------------------
// Get the unique set of boundary conditions across all physics packages.
//------------------------------------------------------------------------------
template<typename Dimension>
vector<Boundary<Dimension>*>
Integrator<Dimension>::
uniqueBoundaryConditions() const {
// The result we're going to build up.
vector<Boundary<Dimension>*> result;
// Iterate over each physics package.
for (auto* physicsPtr: range(physicsPackagesBegin(), physicsPackagesEnd())) {
// Iterate over the boundary conditions associated with this package.
for (auto* boundaryPtr: range(physicsPtr->boundaryBegin(), physicsPtr->boundaryEnd())) {
if (find(result.begin(), result.end(), boundaryPtr) == result.end())
result.push_back(boundaryPtr);
}
}
BEGIN_CONTRACT_SCOPE
// Ensure that all boundary conditions are included in the result
for (auto* physicsPtr: range(physicsPackagesBegin(), physicsPackagesEnd())) {
for (auto* boundaryPtr: range(physicsPtr->boundaryBegin(), physicsPtr->boundaryEnd())) {
CONTRACT_VAR(boundaryPtr);
ENSURE(find(result.begin(), result.end(), boundaryPtr) != result.end());
}
}
// Also ensure that there are no duplicates.
for (auto* boundaryPtr: result) {
CONTRACT_VAR(boundaryPtr);
ENSURE(count(result.begin(), result.end(), boundaryPtr) == 1);
}
END_CONTRACT_SCOPE
// That's it.
return result;
}
//------------------------------------------------------------------------------
// Set up the ghost nodes for boundary conditions.
//------------------------------------------------------------------------------
template<typename Dimension>
void
Integrator<Dimension>::setGhostNodes() const {
// Get that DataBase.
auto& db = mDataBase.get();
// Get the complete set of unique boundary conditions.
const auto boundaries = uniqueBoundaryConditions();
// Remove any old ghost node information from the NodeLists.
for (auto* nodeListPtr: range(db.fluidNodeListBegin(), db.fluidNodeListEnd())) {
nodeListPtr->numGhostNodes(0);
}
for (auto* nodeListPtr: range(db.DEMNodeListBegin(), db.DEMNodeListEnd())) {
nodeListPtr->numGhostNodes(0);
}
// If we're need overlap connectivity, we need to double the kernel extent before setting ghost nodes.
if (mRequireOverlapConnectivity) {
for (auto* nodeListPtr: range(db.fluidNodeListBegin(), db.fluidNodeListEnd())) {
auto& neighbor = nodeListPtr->neighbor();
auto maxeta = 2.0*neighbor.kernelExtent();
neighbor.kernelExtent(maxeta);
}
for (auto* nodeListPtr: range(db.DEMNodeListBegin(), db.DEMNodeListEnd())) {
auto& neighbor = nodeListPtr->neighbor();
auto maxeta = 2.0*neighbor.kernelExtent();
neighbor.kernelExtent(maxeta);
}
}
// Update neighboring
for (auto* nodeListPtr: range(db.fluidNodeListBegin(), db.fluidNodeListEnd())) {
auto& neighbor = nodeListPtr->neighbor();
neighbor.updateNodes();
}
for (auto* nodeListPtr: range(db.DEMNodeListBegin(), db.DEMNodeListEnd())) {
auto& neighbor = nodeListPtr->neighbor();
neighbor.updateNodes();
}
// Iterate over the boundaries and set their ghost node info.
for (auto* boundaryPtr: range(boundaries.begin(), boundaries.end())) {
boundaryPtr->setAllGhostNodes(db);
boundaryPtr->finalizeGhostBoundary();
for (auto* nodeListPtr: range(db.fluidNodeListBegin(), db.fluidNodeListEnd())) {
nodeListPtr->neighbor().updateNodes();
}
for (auto* nodeListPtr: range(db.DEMNodeListBegin(), db.DEMNodeListEnd())) {
nodeListPtr->neighbor().updateNodes();
}
}
// If we doubled the kernel extents for overlap connectivity, put 'em back.
if (mRequireOverlapConnectivity) {
for (auto* nodeListPtr: range(db.fluidNodeListBegin(), db.fluidNodeListEnd())) {
auto& neighbor = nodeListPtr->neighbor();
auto maxeta = 0.5*neighbor.kernelExtent();
neighbor.kernelExtent(maxeta);
neighbor.updateNodes();
}
for (auto* nodeListPtr: range(db.DEMNodeListBegin(), db.DEMNodeListEnd())) {
auto& neighbor = nodeListPtr->neighbor();
auto maxeta = 0.5*neighbor.kernelExtent();
neighbor.kernelExtent(maxeta);
neighbor.updateNodes();
}
}
if (mRequireConnectivity) {
// Update the connectivity.
db.updateConnectivityMap(mRequireGhostConnectivity, mRequireOverlapConnectivity, mRequireIntersectionConnectivity);
// If we're culling ghost nodes, do it now.
if (mCullGhostNodes and
(not this->domainDecompositionIndependent()) and
(not mRequireGhostConnectivity) and
(not mRequireOverlapConnectivity)) {
const auto numNodeLists = db.numNodeLists();
const auto& cm = db.connectivityMap();
// First build the set of flags indicating which nodes are used.
FieldList<Dimension, size_t> flags = db.newGlobalFieldList(size_t(0u), "active nodes");
for (auto [nodeListi, nodeListPtr]: enumerate(db.nodeListBegin(), db.nodeListEnd())) {
const auto& nodeList = *nodeListPtr;
for (auto i = 0u; i < nodeList.numInternalNodes(); ++i) {
flags(nodeListi, i) = 1;
const vector<vector<int> >& fullConnectivity = cm.connectivityForNode(&nodeList, i);
for (auto nodeListj = 0u; nodeListj < fullConnectivity.size(); ++nodeListj) {
const vector<int>& connectivity = fullConnectivity[nodeListj];
for (auto j: connectivity) flags(nodeListj, j) = 1;
}
}
// Ghost nodes that are control nodes for other ghost nodes we're keeping must
// be kept as well.
const auto firstGhostNode = nodeList.firstGhostNode();
for (auto* boundaryPtr: range(boundaries.begin(), boundaries.end())) {
const auto& boundary = *boundaryPtr;
const auto& controlNodes = boundary.controlNodes(nodeList);
const auto& ghostNodes = boundary.ghostNodes(nodeList);
// CHECK(controlNodes.size() == ghostNodes.size()); // Not true if this is a DistributedBoundary!
for (auto i: controlNodes) {
if (i >= firstGhostNode) flags(nodeListi, i) = 1;
}
// Boundary conditions are allowed to opt out of culling entirely.
if (not boundary.allowGhostCulling()) {
for (auto i: ghostNodes) flags(nodeListi, i) = 1;
}
}
}
// Create the index mapping from old to new node orderings.
FieldList<Dimension, size_t> old2newIndexMap = db.newGlobalFieldList(size_t(0u), "index map");
for (auto [nodeListi, nodeListPtr]: enumerate(db.nodeListBegin(), db.nodeListEnd())) {
const auto numNodes = nodeListPtr->numNodes();
for (auto i = 0u; i < numNodes; ++i) old2newIndexMap(nodeListi, i) = i;
}
// Now use these flags to cull the boundary conditions.
vector<size_t> numNodesRemoved(numNodeLists, 0);
for (auto* boundaryPtr: range(boundaries.begin(), boundaries.end())) {
boundaryPtr->cullGhostNodes(flags, old2newIndexMap, numNodesRemoved);
}
// Patch up the connectivity map.
db.patchConnectivityMap(flags, old2newIndexMap);
// Now the boundary conditions have been updated, so we can go ahead and remove
// the ghost nodes themselves from the NodeLists.
for (auto [nodeListi, nodeListPtr]: enumerate(db.nodeListBegin(), db.nodeListEnd())) {
auto& nodeList = *nodeListPtr;
vector<size_t> nodesToRemove;
for (auto i = nodeList.firstGhostNode(); i < nodeList.numNodes(); ++i) {
if (flags(nodeListi, i) == 0) nodesToRemove.push_back(i);
}
nodeList.deleteNodes(nodesToRemove);
nodeList.neighbor().updateNodes();
}
// All nodes should now be labeled as keepers.
BEGIN_CONTRACT_SCOPE
{
for (auto nodeListi = 0u; nodeListi < numNodeLists; ++nodeListi) {
ENSURE(flags[nodeListi]->numElements() == 0 or
*std::min_element(flags[nodeListi]->begin(), flags[nodeListi]->end()) == 1);
}
}
END_CONTRACT_SCOPE
// The ConnectivityMap should be valid too.
ENSURE(db.connectivityMap().valid());
}
// } else {
// // We're not connectivity and don't need ghost nodes, so make sure all
// // boundaries are empty.
// const vector<Boundary<Dimension>*> boundaries = uniqueBoundaryConditions();
// for (ConstBoundaryIterator boundaryItr = boundaries.begin();
// boundaryItr != boundaries.end();
// ++boundaryItr) (*boundaryItr)->reset(db);
}
}
//------------------------------------------------------------------------------
// Apply the ghost boundary conditions.
//------------------------------------------------------------------------------
template<typename Dimension>
void
Integrator<Dimension>::applyGhostBoundaries(State<Dimension>& state,
StateDerivatives<Dimension>& derivs) const {
// // Start our work timer.
// typedef Timing::Time Time;
// const Time start = Timing::currentTime();
// Get that DataBase.
auto& db = mDataBase.get();
// If we're being rigorous about boundaries, we have to reset the ghost nodes.
const auto boundaries = uniqueBoundaryConditions();
// If we didn't call setGhostNodes, then make each boundary update it's
// ghost node info (position and H).
for (auto* boundaryPtr: range(boundaries.begin(), boundaries.end())) {
for (auto* nodeListPtr: range(db.nodeListBegin(), db.nodeListEnd())) {
boundaryPtr->updateGhostNodes(*nodeListPtr);
}
boundaryPtr->finalizeGhostBoundary();
}
for (auto* nodeListPtr: range(db.fluidNodeListBegin(), db.fluidNodeListEnd())) {
nodeListPtr->neighbor().updateNodes();
}
for (auto* nodeListPtr: range(db.DEMNodeListBegin(), db.DEMNodeListEnd())) {
nodeListPtr->neighbor().updateNodes();
}
// Iterate over the physics packages, and have them apply ghost boundaries
// for their state.
for (auto* physicsPtr: range(physicsPackagesBegin(), physicsPackagesEnd())) {
physicsPtr->applyGhostBoundaries(state, derivs);
}
// // Update the work per node fields.
// double deltaLocal = Timing::difference(start, Timing::currentTime());
// int numLocalNodes = 0;
// for (typename DataBase<Dimension>::NodeListIterator nodeListItr = db.nodeListBegin();
// nodeListItr != db.nodeListEnd();
// ++nodeListItr) numLocalNodes += (*nodeListItr)->numInternalNodes();
// const double workPerNode = deltaLocal/(numLocalNodes + 1.0e-30);
// FieldList<Dimension, Scalar> work = db.globalWork();
// work += workPerNode;
// Finalize the boundaries.
this->finalizeGhostBoundaries();
}
//------------------------------------------------------------------------------
// Finalize the ghost boundary conditions.
//------------------------------------------------------------------------------
template<typename Dimension>
void
Integrator<Dimension>::finalizeGhostBoundaries() const {
// // Start our work timer.
// typedef Timing::Time Time;
// const Time start = Timing::currentTime();
// Get that DataBase.
//DataBase<Dimension>& db = accessDataBase();
// If we're being rigorous about boundaries, we have to reset the ghost nodes.
const vector<Boundary<Dimension>*> boundaries = uniqueBoundaryConditions();
for (auto* boundaryPtr: range(boundaries.begin(), boundaries.end())) {
boundaryPtr->finalizeGhostBoundary();
}
// // Update the work per node fields.
// double deltaLocal = Timing::difference(start, Timing::currentTime());
// int numLocalNodes = 0;
// for (typename DataBase<Dimension>::NodeListIterator nodeListItr = db.nodeListBegin();
// nodeListItr != db.nodeListEnd();
// ++nodeListItr) numLocalNodes += (*nodeListItr)->numInternalNodes();
// const double workPerNode = deltaLocal/(numLocalNodes + 1.0e-30);
// FieldList<Dimension, Scalar> work = db.globalWork();
// work += workPerNode;
}
//------------------------------------------------------------------------------
// Identify the internal nodes in violation of the boundary conditions.
//------------------------------------------------------------------------------
template<typename Dimension>
void
Integrator<Dimension>::setViolationNodes() const {
// Get that DataBase.
auto& db = mDataBase.get();
// Get the complete set of unique boundary conditions.
const vector<Boundary<Dimension>*> boundaries = uniqueBoundaryConditions();
// Have each boundary identify the set of nodes that violate it.
for (auto* boundaryPtr: range(boundaries.begin(), boundaries.end())) {
boundaryPtr->setAllViolationNodes(db);
}
// Fix neighbor information.
for (auto* nodeListPtr: range(db.fluidNodeListBegin(), db.fluidNodeListEnd())) {
nodeListPtr->neighbor().updateNodes();
}
for (auto* nodeListPtr: range(db.DEMNodeListBegin(), db.DEMNodeListEnd())) {
nodeListPtr->neighbor().updateNodes();
}
}
//------------------------------------------------------------------------------
// Enforce the boundary conditions.
//------------------------------------------------------------------------------
template<typename Dimension>
void
Integrator<Dimension>::enforceBoundaries(State<Dimension>& state,
StateDerivatives<Dimension>& derivs) const {
// Have each boundary identify the set of nodes in violation. This also resets
// the positions and H's of the nodes to be in compliance.
setViolationNodes();
// Iterate over the physics packages, and have them apply ghost boundaries
// for their state.
for (auto* physicsPtr: range(physicsPackagesBegin(), physicsPackagesEnd())) {
physicsPtr->enforceBoundaries(state, derivs);
}
}
//------------------------------------------------------------------------------
// Copy the ghost positions and H tensor from one state to another.
//------------------------------------------------------------------------------
template<typename Dimension>
void
Integrator<Dimension>::copyGhostState(const State<Dimension>& state0,
State<Dimension>& state1) const {
const FieldList<Dimension, Vector> x0 = state0.fields(HydroFieldNames::position, Vector::zero());
const FieldList<Dimension, SymTensor> H0 = state0.fields(HydroFieldNames::H, SymTensor::zero());
FieldList<Dimension, Vector> x1 = state1.fields(HydroFieldNames::position, Vector::zero());
FieldList<Dimension, SymTensor> H1 = state1.fields(HydroFieldNames::H, SymTensor::zero());
for (GhostNodeIterator<Dimension> itr = x0.ghostNodeBegin();
itr != x0.ghostNodeEnd();
++itr) {
x1(itr) = x0(itr);
H1(itr) = H0(itr);
}
}
//------------------------------------------------------------------------------
// Dump the current state of the Integrator to the given file.
//------------------------------------------------------------------------------
template<typename Dimension>
void
Integrator<Dimension>::
dumpState(FileIO& file, const string& pathName) const {
// file.write(dtMin(), pathName + "/dtMin");
// file.write(dtMax(), pathName + "/dtMax");
// file.write(dtGrowth(), pathName + "/dtGrowth");
file.write(mLastDt, pathName + "/lastDt");
file.write(mCurrentTime, pathName + "/currentTime");
file.write(mCurrentCycle, pathName + "/currentCycle");
file.write(mDtMultiplier, pathName + "/dtMultiplier");
}
//------------------------------------------------------------------------------
// Restore the state of the NodeList from the given file.
//------------------------------------------------------------------------------
template<typename Dimension>
void
Integrator<Dimension>::
restoreState(const FileIO& file, const string& pathName) {
// file.read(mDtMin, pathName + "/dtMin");
// file.read(mDtMax, pathName + "/dtMax");
// file.read(mDtGrowth, pathName + "/dtGrowth");
file.read(mLastDt, pathName + "/lastDt");
file.read(mCurrentTime, pathName + "/currentTime");
file.read(mCurrentCycle, pathName + "/currentCycle");
file.read(mDtMultiplier, pathName + "/dtMultiplier");
}
//------------------------------------------------------------------------------
// How should we query a physics package for the time step?
//------------------------------------------------------------------------------
template<typename Dimension>
typename Integrator<Dimension>::TimeStepType
Integrator<Dimension>::
dt(const Physics<Dimension>* pkg,
const DataBase<Dimension>& dataBase,
const State<Dimension>& state,
const StateDerivatives<Dimension>& derivs,
const Scalar currentTime) const {
return pkg->dt(dataBase, state, derivs, currentTime);
}
}