Skip to content

Commit 3d505a7

Browse files
Add implementation to enable parallel execution of macro-dumux (#663)
* [two-scale-heat-conduction] Enable parallel macro dumux solver * fixup clang-format * [two-scale-heat-conduction] Don't pass overlap points to adapter * [two-scale-heat-conduction] Store coupling data and exchange if parallel * Remove whitespace * Fix clang-format * Add changelog entry * [two-scale-heat-conduction] Use Dune::Partitions to only couple interior points * Improve documentation Co-authored-by: Ishaan Desai <[email protected]> --------- Co-authored-by: Ishaan Desai <[email protected]>
1 parent 75bddc1 commit 3d505a7

File tree

3 files changed

+49
-14
lines changed

3 files changed

+49
-14
lines changed

changelog-entries/663.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
- Enable parallel execution of the participant macro-dumux in the two-scale heat conduction tutorial [#663](https://github.com/precice/tutorials/pull/663)

two-scale-heat-conduction/macro-dumux/appl/main.cc

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@
2222

2323
#include <dune/common/parallel/mpihelper.hh>
2424
#include <dune/common/timer.hh>
25+
#include <dune/grid/common/partitionset.hh>
2526
#include <dune/grid/io/file/vtk.hh>
2627
#include <dune/istl/io.hh>
2728

@@ -118,7 +119,7 @@ int main(int argc, char **argv)
118119
// coordinate loop (created vectors are 1D)
119120
// these positions of cell centers are later communicated to precice
120121
std::cout << "Coordinates: " << std::endl;
121-
for (const auto &element : elements(leafGridView)) {
122+
for (const auto &element : elements(leafGridView, Dune::Partitions::interior)) {
122123
auto fvGeometry = localView(*gridGeometry);
123124
fvGeometry.bindElement(element);
124125
for (const auto &scv : scvs(fvGeometry)) {
@@ -275,6 +276,8 @@ int main(int argc, char **argv)
275276
dt);
276277
couplingParticipant.readQuantityFromOtherSolver(meshName,
277278
readDataPorosity, dt);
279+
// store coupling data in problem
280+
problem->spatialParams().updateCouplingData();
278281
}
279282
std::cout << "Solver starts" << std::endl;
280283

two-scale-heat-conduction/macro-dumux/appl/spatialparams.hh

Lines changed: 44 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,9 @@
1818
#ifndef DUMUX_TEST_1PNI_SPATIAL_PARAMS_HH
1919
#define DUMUX_TEST_1PNI_SPATIAL_PARAMS_HH
2020

21+
#include <dune/grid/common/partitionset.hh>
22+
23+
#include <dumux/parallel/vectorcommdatahandle.hh>
2124
#include <dumux/porousmediumflow/fvspatialparams1p.hh>
2225
#include <dumux/porousmediumflow/properties.hh>
2326

@@ -48,8 +51,9 @@ public:
4851
using PermeabilityType = Scalar;
4952

5053
OnePNISpatialParams(std::shared_ptr<const GridGeometry> gridGeometry)
51-
: ParentType(gridGeometry),
52-
couplingParticipant_(Dumux::Precice::CouplingAdapter::getInstance()) {}
54+
: ParentType(gridGeometry), couplingParticipant_(Dumux::Precice::CouplingAdapter::getInstance()), couplingData_(gridGeometry->numDofs()), couplingDataHandle_(this->gridGeometry().elementMapper(), couplingData_)
55+
{
56+
}
5357

5458
/*!
5559
* \brief Defines the intrinsic permeability \f$\mathrm{[m^2]}\f$.
@@ -71,8 +75,7 @@ public:
7175
const ElementSolution &elemSol) const
7276
{
7377
if (getParam<bool>("Precice.RunWithCoupling") == true)
74-
return couplingParticipant_.getScalarQuantityOnFace(
75-
"macro-mesh", "porosity", scv.elementIndex());
78+
return couplingData_[scv.elementIndex()][0];
7679
else
7780
return getParam<Scalar>("Problem.DefaultPorosity");
7881
}
@@ -87,14 +90,10 @@ public:
8790
DimWorldMatrix K;
8891

8992
if (getParam<bool>("Precice.RunWithCoupling") == true) {
90-
K[0][0] = couplingParticipant_.getScalarQuantityOnFace(
91-
"macro-mesh", "k_00", scv.elementIndex());
92-
K[0][1] = couplingParticipant_.getScalarQuantityOnFace(
93-
"macro-mesh", "k_01", scv.elementIndex());
94-
K[1][0] = couplingParticipant_.getScalarQuantityOnFace(
95-
"macro-mesh", "k_10", scv.elementIndex());
96-
K[1][1] = couplingParticipant_.getScalarQuantityOnFace(
97-
"macro-mesh", "k_11", scv.elementIndex());
93+
K[0][0] = couplingData_[scv.elementIndex()][1];
94+
K[0][1] = couplingData_[scv.elementIndex()][2];
95+
K[1][0] = couplingData_[scv.elementIndex()][3];
96+
K[1][1] = couplingData_[scv.elementIndex()][4];
9897
} else {
9998
K[0][0] = getParam<Scalar>("Component.SolidThermalConductivity");
10099
K[0][1] = 0.0;
@@ -104,8 +103,40 @@ public:
104103
return K;
105104
}
106105

106+
void updateCouplingData()
107+
{
108+
for (const auto &element : elements(this->gridGeometry().gridView(), Dune::Partitions::interior)) {
109+
auto fvGeometry = localView(this->gridGeometry());
110+
fvGeometry.bindElement(element);
111+
for (const auto &scv : scvs(fvGeometry)) {
112+
const auto elementIdx = scv.elementIndex();
113+
couplingData_[elementIdx][0] =
114+
couplingParticipant_.getScalarQuantityOnFace("macro-mesh", "porosity", elementIdx);
115+
couplingData_[elementIdx][1] =
116+
couplingParticipant_.getScalarQuantityOnFace("macro-mesh", "k_00", elementIdx);
117+
couplingData_[elementIdx][2] =
118+
couplingParticipant_.getScalarQuantityOnFace("macro-mesh", "k_01", elementIdx);
119+
couplingData_[elementIdx][3] =
120+
couplingParticipant_.getScalarQuantityOnFace("macro-mesh", "k_10", elementIdx);
121+
couplingData_[elementIdx][4] =
122+
couplingParticipant_.getScalarQuantityOnFace("macro-mesh", "k_11", elementIdx);
123+
}
124+
}
125+
// Trigger exchange of coupling data between neighboring ranks, if the domain is partitioned
126+
if (this->gridGeometry().gridView().comm().size() > 1) {
127+
this->gridGeometry().gridView().communicate(couplingDataHandle_,
128+
Dune::InteriorBorder_All_Interface, Dune::ForwardCommunication);
129+
}
130+
}
131+
107132
private:
108-
Dumux::Precice::CouplingAdapter &couplingParticipant_;
133+
Dumux::Precice::CouplingAdapter &couplingParticipant_;
134+
Dune::BlockVector<Dune::FieldVector<double, 5>> couplingData_;
135+
Dumux::VectorCommDataHandleEqual<
136+
typename GridGeometry::ElementMapper,
137+
Dune::BlockVector<Dune::FieldVector<double, 5>>,
138+
/* Entity codimension = */ 0>
139+
couplingDataHandle_;
109140
};
110141

111142
} // end namespace Dumux

0 commit comments

Comments
 (0)