Skip to content

Commit b44a865

Browse files
committed
Support DynamicPartitioner
1 parent 7ce469d commit b44a865

File tree

3 files changed

+58
-33
lines changed

3 files changed

+58
-33
lines changed

examples/dam_break.cpp

Lines changed: 9 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -66,7 +66,7 @@ struct ParticleInitFunc
6666
//---------------------------------------------------------------------------//
6767
void damBreak( const double cell_size, const int ppc, const int halo_size,
6868
const double delta_t, const double t_final, const int write_freq,
69-
const std::string& device )
69+
const std::string& device, const std::string& partitioner_type )
7070
{
7171
// The dam break domain is in a box on [0,1] in each dimension.
7272
Kokkos::Array<double, 6> global_box = { 0.0, 0.0, 0.0, 1.0, 1.0, 1.0 };
@@ -87,7 +87,6 @@ void damBreak( const double cell_size, const int ppc, const int halo_size,
8787
int comm_size;
8888
MPI_Comm_size( MPI_COMM_WORLD, &comm_size );
8989
std::array<int, 3> ranks_per_dim = { 1, comm_size, 1 };
90-
Cajita::ManualPartitioner partitioner( ranks_per_dim );
9190

9291
// Material properties.
9392
double bulk_modulus = 1.0e5;
@@ -110,8 +109,9 @@ void damBreak( const double cell_size, const int ppc, const int halo_size,
110109
// Solve the problem.
111110
auto solver = ExaMPM::createSolver(
112111
device, MPI_COMM_WORLD, global_box, global_num_cell, periodic,
113-
partitioner, halo_size, ParticleInitFunc( cell_size, ppc, density ),
114-
ppc, bulk_modulus, density, gamma, kappa, delta_t, gravity, bc );
112+
halo_size, ParticleInitFunc( cell_size, ppc, density ), ppc,
113+
bulk_modulus, density, gamma, kappa, delta_t, gravity, bc,
114+
partitioner_type );
115115
solver->solve( t_final, write_freq );
116116
}
117117

@@ -143,8 +143,12 @@ int main( int argc, char* argv[] )
143143
// device type
144144
std::string device( argv[7] );
145145

146+
// partitioner type
147+
std::string partitioner_type( argv[8] );
148+
146149
// run the problem.
147-
damBreak( cell_size, ppc, halo_size, delta_t, t_final, write_freq, device );
150+
damBreak( cell_size, ppc, halo_size, delta_t, t_final, write_freq, device,
151+
partitioner_type );
148152

149153
Kokkos::finalize();
150154

examples/free_fall.cpp

Lines changed: 9 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,7 @@ struct ParticleInitFunc
6565
//---------------------------------------------------------------------------//
6666
void freeFall( const double cell_size, const int ppc, const int halo_size,
6767
const double delta_t, const double t_final, const int write_freq,
68-
const std::string& device )
68+
const std::string& device, const std::string& partitioner_type )
6969
{
7070
// The dam break domain is in a box on [0,1] in each dimension.
7171
Kokkos::Array<double, 6> global_box = { -0.5, -0.5, -0.5, 0.5, 0.5, 0.5 };
@@ -86,7 +86,6 @@ void freeFall( const double cell_size, const int ppc, const int halo_size,
8686
int comm_size;
8787
MPI_Comm_size( MPI_COMM_WORLD, &comm_size );
8888
std::array<int, 3> ranks_per_dim = { 1, comm_size, 1 };
89-
Cajita::ManualPartitioner partitioner( ranks_per_dim );
9089

9190
// Material properties.
9291
double bulk_modulus = 5.0e5;
@@ -109,8 +108,9 @@ void freeFall( const double cell_size, const int ppc, const int halo_size,
109108
// Solve the problem.
110109
auto solver = ExaMPM::createSolver(
111110
device, MPI_COMM_WORLD, global_box, global_num_cell, periodic,
112-
partitioner, halo_size, ParticleInitFunc( cell_size, ppc, density ),
113-
ppc, bulk_modulus, density, gamma, kappa, delta_t, gravity, bc );
111+
halo_size, ParticleInitFunc( cell_size, ppc, density ), ppc,
112+
bulk_modulus, density, gamma, kappa, delta_t, gravity, bc,
113+
partitioner_type );
114114
solver->solve( t_final, write_freq );
115115
}
116116

@@ -142,8 +142,12 @@ int main( int argc, char* argv[] )
142142
// device type
143143
std::string device( argv[7] );
144144

145+
// partitioner type
146+
std::string partitioner_type( argv[8] );
147+
145148
// run the problem.
146-
freeFall( cell_size, ppc, halo_size, delta_t, t_final, write_freq, device );
149+
freeFall( cell_size, ppc, halo_size, delta_t, t_final, write_freq, device,
150+
partitioner_type );
147151

148152
Kokkos::finalize();
149153

src/ExaMPM_Solver.hpp

Lines changed: 40 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -43,20 +43,37 @@ class Solver : public SolverBase
4343
template <class InitFunc>
4444
Solver( MPI_Comm comm, const Kokkos::Array<double, 6>& global_bounding_box,
4545
const std::array<int, 3>& global_num_cell,
46-
const std::array<bool, 3>& periodic,
47-
const Cajita::BlockPartitioner<3>& partitioner,
48-
const int halo_cell_width, const InitFunc& create_functor,
49-
const int particles_per_cell, const double bulk_modulus,
50-
const double density, const double gamma, const double kappa,
51-
const double delta_t, const double gravity,
52-
const BoundaryCondition& bc )
46+
const std::array<bool, 3>& periodic, const int halo_cell_width,
47+
const InitFunc& create_functor, const int particles_per_cell,
48+
const double bulk_modulus, const double density, const double gamma,
49+
const double kappa, const double delta_t, const double gravity,
50+
const BoundaryCondition& bc, const std::string& partitioner_type )
5351
: _dt( delta_t )
5452
, _gravity( gravity )
5553
, _bc( bc )
5654
, _halo_min( 3 )
5755
{
56+
if ( 0 == partitioner_type.compare( "manual" ) )
57+
{
58+
int comm_size;
59+
MPI_Comm_size( comm, &comm_size );
60+
std::array<int, 3> ranks_per_dim = { 1, comm_size, 1 };
61+
_partitioner =
62+
std::make_shared<Cajita::ManualPartitioner>( ranks_per_dim );
63+
}
64+
else if ( 0 == partitioner_type.compare( "dynamic" ) )
65+
{
66+
_partitioner = std::make_shared<Cajita::DynamicPartitioner<
67+
Kokkos::Device<ExecutionSpace, MemorySpace>>>(
68+
comm, global_num_cell );
69+
}
70+
else
71+
{
72+
throw std::runtime_error( "invalid partitioner type" );
73+
}
74+
5875
_mesh = std::make_shared<Mesh<MemorySpace>>(
59-
global_bounding_box, global_num_cell, periodic, partitioner,
76+
global_bounding_box, global_num_cell, periodic, *_partitioner,
6077
halo_cell_width, _halo_min, comm );
6178

6279
_bc.min = _mesh->minDomainGlobalNodeIndex();
@@ -112,6 +129,7 @@ class Solver : public SolverBase
112129
int _halo_min;
113130
std::shared_ptr<Mesh<MemorySpace>> _mesh;
114131
std::shared_ptr<ProblemManager<MemorySpace>> _pm;
132+
std::shared_ptr<Cajita::BlockPartitioner<3>> _partitioner;
115133
int _rank;
116134
};
117135

@@ -122,22 +140,21 @@ std::shared_ptr<SolverBase>
122140
createSolver( const std::string& device, MPI_Comm comm,
123141
const Kokkos::Array<double, 6>& global_bounding_box,
124142
const std::array<int, 3>& global_num_cell,
125-
const std::array<bool, 3>& periodic,
126-
const Cajita::BlockPartitioner<3>& partitioner,
127-
const int halo_cell_width, const InitFunc& create_functor,
128-
const int particles_per_cell, const double bulk_modulus,
129-
const double density, const double gamma, const double kappa,
130-
const double delta_t, const double gravity,
131-
const BoundaryCondition& bc )
143+
const std::array<bool, 3>& periodic, const int halo_cell_width,
144+
const InitFunc& create_functor, const int particles_per_cell,
145+
const double bulk_modulus, const double density,
146+
const double gamma, const double kappa, const double delta_t,
147+
const double gravity, const BoundaryCondition& bc,
148+
const std::string& partitioner_type )
132149
{
133150
if ( 0 == device.compare( "serial" ) )
134151
{
135152
#ifdef KOKKOS_ENABLE_SERIAL
136153
return std::make_shared<
137154
ExaMPM::Solver<Kokkos::HostSpace, Kokkos::Serial>>(
138-
comm, global_bounding_box, global_num_cell, periodic, partitioner,
155+
comm, global_bounding_box, global_num_cell, periodic,
139156
halo_cell_width, create_functor, particles_per_cell, bulk_modulus,
140-
density, gamma, kappa, delta_t, gravity, bc );
157+
density, gamma, kappa, delta_t, gravity, bc, partitioner_type );
141158
#else
142159
throw std::runtime_error( "Serial Backend Not Enabled" );
143160
#endif
@@ -147,9 +164,9 @@ createSolver( const std::string& device, MPI_Comm comm,
147164
#ifdef KOKKOS_ENABLE_OPENMP
148165
return std::make_shared<
149166
ExaMPM::Solver<Kokkos::HostSpace, Kokkos::OpenMP>>(
150-
comm, global_bounding_box, global_num_cell, periodic, partitioner,
167+
comm, global_bounding_box, global_num_cell, periodic,
151168
halo_cell_width, create_functor, particles_per_cell, bulk_modulus,
152-
density, gamma, kappa, delta_t, gravity, bc );
169+
density, gamma, kappa, delta_t, gravity, bc, partitioner_type );
153170
#else
154171
throw std::runtime_error( "OpenMP Backend Not Enabled" );
155172
#endif
@@ -159,9 +176,9 @@ createSolver( const std::string& device, MPI_Comm comm,
159176
#ifdef KOKKOS_ENABLE_CUDA
160177
return std::make_shared<
161178
ExaMPM::Solver<Kokkos::CudaSpace, Kokkos::Cuda>>(
162-
comm, global_bounding_box, global_num_cell, periodic, partitioner,
179+
comm, global_bounding_box, global_num_cell, periodic,
163180
halo_cell_width, create_functor, particles_per_cell, bulk_modulus,
164-
density, gamma, kappa, delta_t, gravity, bc );
181+
density, gamma, kappa, delta_t, gravity, bc, partitioner_type );
165182
#else
166183
throw std::runtime_error( "CUDA Backend Not Enabled" );
167184
#endif
@@ -171,9 +188,9 @@ createSolver( const std::string& device, MPI_Comm comm,
171188
#ifdef KOKKOS_ENABLE_HIP
172189
return std::make_shared<ExaMPM::Solver<Kokkos::Experimental::HIPSpace,
173190
Kokkos::Experimental::HIP>>(
174-
comm, global_bounding_box, global_num_cell, periodic, partitioner,
191+
comm, global_bounding_box, global_num_cell, periodic,
175192
halo_cell_width, create_functor, particles_per_cell, bulk_modulus,
176-
density, gamma, kappa, delta_t, gravity, bc );
193+
density, gamma, kappa, delta_t, gravity, bc, partitioner_type );
177194
#else
178195
throw std::runtime_error( "HIP Backend Not Enabled" );
179196
#endif

0 commit comments

Comments
 (0)