Skip to content

Commit 81ecf14

Browse files
committed
Merge PR #200 into development
* upstream/pr/200/head: encap: Consolidate MPIVectorEncaps into VectorEncaps. Signed-off-by: Torbjörn Klatt <[email protected]>
2 parents 4f6f86c + 195d7ad commit 81ecf14

File tree

5 files changed

+109
-175
lines changed

5 files changed

+109
-175
lines changed

examples/advection_diffusion/mpi_pfasst.cpp

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ using namespace std;
2020
#include <pfasst/controller/pfasst.hpp>
2121
#include <pfasst/mpi_communicator.hpp>
2222
#include <pfasst/encap/automagic.hpp>
23-
#include <pfasst/encap/mpi_vector.hpp>
23+
#include <pfasst/encap/vector.hpp>
2424

2525
#include "advection_diffusion_sweeper.hpp"
2626
#include "spectral_transfer_1d.hpp"
@@ -61,7 +61,7 @@ namespace pfasst
6161
vector<size_t> ndofs = { ndofs_c, ndofs_f };
6262

6363
auto build_level = [ndofs](size_t level) {
64-
auto factory = make_shared<MPIVectorFactory<double>>(ndofs[level]);
64+
auto factory = make_shared<VectorFactory<double>>(ndofs[level]);
6565
auto sweeper = make_shared<AdvectionDiffusionSweeper<>>(ndofs[level]);
6666
auto transfer = make_shared<SpectralTransfer1D<>>();
6767

include/pfasst/encap/mpi_vector.hpp

Lines changed: 0 additions & 66 deletions
This file was deleted.

include/pfasst/encap/vector.hpp

Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,11 @@
55
#include <vector>
66
using namespace std;
77

8+
#ifdef WITH_MPI
9+
#include "pfasst/mpi_communicator.hpp"
10+
using namespace pfasst::mpi;
11+
#endif
12+
813
#include "pfasst/encap/encapsulation.hpp"
914

1015

@@ -24,6 +29,7 @@ namespace pfasst
2429
public Encapsulation<time>
2530
{
2631
public:
32+
2733
//! @{
2834
VectorEncapsulation(const size_t size);
2935

@@ -89,6 +95,29 @@ namespace pfasst
8995
virtual time norm0() const override;
9096
//! @}
9197

98+
#ifdef WITH_MPI
99+
//! @{
100+
MPI_Request recv_request = MPI_REQUEST_NULL;
101+
MPI_Request send_request = MPI_REQUEST_NULL;
102+
//! @}
103+
104+
//! @{
105+
inline MPICommunicator& as_mpi(ICommunicator* comm)
106+
{
107+
auto mpi = dynamic_cast<MPICommunicator*>(comm);
108+
assert(mpi);
109+
return *mpi;
110+
}
111+
//! @}
112+
113+
//! @{
114+
virtual void post(ICommunicator* comm, int tag) override;
115+
virtual void recv(ICommunicator* comm, int tag, bool blocking) override;
116+
virtual void send(ICommunicator* comm, int tag, bool blocking) override;
117+
virtual void broadcast(ICommunicator* comm) override;
118+
//! @}
119+
#endif
120+
92121
};
93122

94123
/**

src/pfasst/encap/mpi_vector_impl.hpp

Lines changed: 0 additions & 103 deletions
This file was deleted.

src/pfasst/encap/vector_impl.hpp

Lines changed: 78 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,7 @@
1-
#include "pfasst/encap/vector.hpp"
2-
31
#include <algorithm>
42
#include <cassert>
5-
#include <vector>
6-
using namespace std;
73

4+
#include "pfasst/encap/vector.hpp"
85

96
namespace pfasst
107
{
@@ -167,5 +164,82 @@ namespace pfasst
167164
return *y.get();
168165
}
169166

167+
#ifdef WITH_MPI
168+
template<typename scalar, typename time>
169+
void VectorEncapsulation<scalar, time>::post(ICommunicator* comm, int tag)
170+
{
171+
auto& mpi = as_mpi(comm);
172+
if (mpi.size() == 1) { return; }
173+
if (mpi.rank() == 0) { return; }
174+
175+
int err = MPI_Irecv(this->data(), sizeof(scalar) * this->size(), MPI_CHAR,
176+
(mpi.rank() - 1) % mpi.size(), tag, mpi.comm, &recv_request);
177+
if (err != MPI_SUCCESS) {
178+
throw MPIError();
179+
}
180+
}
181+
182+
template<typename scalar, typename time>
183+
void VectorEncapsulation<scalar, time>::recv(ICommunicator* comm, int tag, bool blocking)
184+
{
185+
auto& mpi = as_mpi(comm);
186+
if (mpi.size() == 1) { return; }
187+
if (mpi.rank() == 0) { return; }
188+
189+
int err;
190+
if (blocking) {
191+
MPI_Status stat;
192+
err = MPI_Recv(this->data(), sizeof(scalar) * this->size(), MPI_CHAR,
193+
(mpi.rank() - 1) % mpi.size(), tag, mpi.comm, &stat);
194+
} else {
195+
MPI_Status stat;
196+
err = MPI_Wait(&recv_request, &stat);
197+
}
198+
199+
if (err != MPI_SUCCESS) {
200+
throw MPIError();
201+
}
202+
}
203+
204+
template<typename scalar, typename time>
205+
void VectorEncapsulation<scalar, time>::send(ICommunicator* comm, int tag, bool blocking)
206+
{
207+
auto& mpi = as_mpi(comm);
208+
if (mpi.size() == 1) { return; }
209+
if (mpi.rank() == mpi.size() - 1) { return; }
210+
211+
int err = MPI_SUCCESS;
212+
if (blocking) {
213+
err = MPI_Send(this->data(), sizeof(scalar) * this->size(), MPI_CHAR,
214+
(mpi.rank() + 1) % mpi.size(), tag, mpi.comm);
215+
} else {
216+
MPI_Status stat;
217+
err = MPI_Wait(&send_request, &stat);
218+
if (err != MPI_SUCCESS) {
219+
throw MPIError();
220+
}
221+
222+
err = MPI_Isend(this->data(), sizeof(scalar) * this->size(), MPI_CHAR,
223+
(mpi.rank() + 1) % mpi.size(), tag, mpi.comm, &send_request);
224+
}
225+
226+
if (err != MPI_SUCCESS) {
227+
throw MPIError();
228+
}
229+
}
230+
231+
template<typename scalar, typename time>
232+
void VectorEncapsulation<scalar, time>::broadcast(ICommunicator* comm)
233+
{
234+
auto& mpi = as_mpi(comm);
235+
int err = MPI_Bcast(this->data(), sizeof(scalar) * this->size(), MPI_CHAR,
236+
comm->size()-1, mpi.comm);
237+
238+
if (err != MPI_SUCCESS) {
239+
throw MPIError();
240+
}
241+
}
242+
#endif
243+
170244
} // ::pfasst::encap
171245
} // ::pfasst

0 commit comments

Comments
 (0)