57 template <
class BaseType>
class window {
73 : win_{std::exchange(other.win_, MPI_WIN_NULL)},
74 comm_{std::exchange(other.comm_,
communicator{MPI_COMM_NULL})},
75 owned_{std::exchange(other.owned_,
false)},
76 data_{std::exchange(other.data_,
nullptr)},
77 size_{std::exchange(other.size_, 0)} {}
81 if (
this != std::addressof(rhs)) {
83 win_ = std::exchange(rhs.win_, MPI_WIN_NULL);
84 comm_ = std::exchange(rhs.comm_,
communicator{MPI_COMM_NULL});
85 owned_ = std::exchange(rhs.owned_,
false);
86 data_ = std::exchange(rhs.data_,
nullptr);
87 size_ = std::exchange(rhs.size_, 0);
104 explicit window(
communicator const &c, BaseType *base_ptr, MPI_Aint sz, MPI_Info info = MPI_INFO_NULL)
105 : comm_(c.
get()), data_(base_ptr), size_(sz) {
107 ASSERT(!(data_ ==
nullptr && size_ > 0))
108 if (
has_env)
check_mpi_call(MPI_Win_create(data_, size_ *
sizeof(BaseType),
sizeof(BaseType), info, c.
get(), &win_),
"MPI_Win_create");
125 check_mpi_call(MPI_Win_allocate(size_ *
sizeof(BaseType),
sizeof(BaseType), info, c.
get(), &data_, &win_),
"MPI_Win_allocate");
127 data_ =
new BaseType[size_];
133 explicit operator MPI_Win()
const {
return win_; };
136 explicit operator MPI_Win *() {
return &win_; };
155 if (win_ != MPI_WIN_NULL) {
156 MPI_Win_fence(0, win_);
219 void lock(
int rank = -1,
int lock_type = MPI_LOCK_SHARED,
int assert = 0)
const {
222 check_mpi_call(MPI_Win_lock_all(assert, win_),
"MPI_Win_lock_all");
224 check_mpi_call(MPI_Win_lock(lock_type, rank, assert, win_),
"MPI_Win_lock");
294 template <
typename TargetType = BaseType,
typename OriginType>
296 void get(OriginType *origin_addr,
int origin_count,
int target_rank, MPI_Aint target_disp = 0,
int target_count = -1)
const {
297 ASSERT(origin_count >= 0 && target_disp >= 0);
298 target_count = target_count < 0 ? origin_count : target_count;
302 check_mpi_call(MPI_Get(origin_addr, origin_count, origin_datatype, target_rank, target_disp, target_count, target_datatype, win_),
"MPI_Get");
304 ASSERT(target_rank == 0);
305 std::copy(data_, data_ + target_count, origin_addr);
324 template <
typename TargetType = BaseType,
typename OriginType>
326 void put(OriginType *origin_addr,
int origin_count,
int target_rank, MPI_Aint target_disp = 0,
int target_count = -1)
const {
327 ASSERT(origin_count >= 0 && target_disp >= 0);
328 target_count = target_count < 0 ? origin_count : target_count;
332 check_mpi_call(MPI_Put(origin_addr, origin_count, origin_datatype, target_rank, target_disp, target_count, target_datatype, win_),
"MPI_Put");
334 ASSERT(target_rank == 0);
335 std::copy(origin_addr, origin_addr + origin_count, data_);
340 [[nodiscard]] BaseType *
base()
const {
return data_; }
343 [[nodiscard]] MPI_Aint
size()
const {
return size_; }
346 [[nodiscard]]
int disp_unit()
const {
return sizeof(BaseType); }
352 MPI_Win win_{MPI_WIN_NULL};
353 communicator comm_{MPI_COMM_NULL};
355 BaseType *data_{
nullptr};
386 check_mpi_call(MPI_Win_allocate_shared(size_ *
sizeof(BaseType),
sizeof(BaseType), info, c.
get(), &data_, &win_),
"MPI_Win_allocate_shared");
388 data_ =
new BaseType[size_];
405 [[nodiscard]] std::tuple<MPI_Aint, int, void *>
query(
int rank = MPI_PROC_NULL)
const {
409 void *baseptr =
nullptr;
410 check_mpi_call(MPI_Win_shared_query(win_, rank, &sz, &du, &baseptr),
"MPI_Win_shared_query");
411 return {sz, du, baseptr};
413 return {size_,
sizeof(BaseType), data_};
423 [[nodiscard]] BaseType *
base(
int rank = MPI_PROC_NULL)
const {
return static_cast<BaseType *
>(std::get<2>(
query(rank))); }
431 [[nodiscard]] MPI_Aint
size(
int rank = MPI_PROC_NULL)
const {
return std::get<0>(
query(rank)) /
sizeof(BaseType); }
439 [[nodiscard]]
int disp_unit(
int rank = MPI_PROC_NULL)
const {
return std::get<1>(
query(rank)); }
445 using window<BaseType>::win_;
446 using window<BaseType>::comm_;
447 using window<BaseType>::owned_;
448 using window<BaseType>::data_;
449 using window<BaseType>::size_;
C++ wrapper around MPI_Comm providing various convenience functions.
MPI_Comm get() const noexcept
Get the wrapped MPI_Comm object.
C++ wrapper around MPI_Group providing various convenience functions.
MPI_Group get() const noexcept
Get the wrapped MPI_Group object.
C++ wrapper around MPI_Comm that is a result of the mpi::communicator::split_shared operation.
shared_communicator get_communicator() const
Get the mpi::shared_communicator associated with the window.
shared_window()=default
Construct a shared memory window with MPI_WIN_NULL.
int disp_unit(int rank=MPI_PROC_NULL) const
Get the displacement unit of the shared memory region of a specific rank.
std::tuple< MPI_Aint, int, void * > query(int rank=MPI_PROC_NULL) const
Query attributes of a shared memory window.
shared_window(shared_communicator const &c, MPI_Aint sz, MPI_Info info=MPI_INFO_NULL)
Construct a shared memory window by dynamically allocating memory.
BaseType * base(int rank=MPI_PROC_NULL) const
Get a pointer to the beginning of the shared memory region of a specific rank.
MPI_Aint size(int rank=MPI_PROC_NULL) const
Get the size of the shared memory region of a specific rank.
A C++ wrapper around MPI_Win providing convenient memory window management.
int disp_unit() const
Get the displacement unit in bytes.
BaseType base_type
Type of the base pointer.
void lock(int rank=-1, int lock_type=MPI_LOCK_SHARED, int assert=0) const
Start an RMA access epoch.
window()=default
Construct a window with MPI_WIN_NULL.
void get(OriginType *origin_addr, int origin_count, int target_rank, MPI_Aint target_disp=0, int target_count=-1) const
Read data from a remote memory window.
window(communicator const &c, BaseType *base_ptr, MPI_Aint sz, MPI_Info info=MPI_INFO_NULL)
Construct an MPI window over an existing local memory buffer.
window(window const &)=delete
Deleted copy constructor.
void post(group const &grp, int assert=0) const
Start an RMA exposure epoch by calling MPI_Win_post (see also wait()).
void complete() const
Completes an RMA access epoch by calling MPI_Win_complete (see also start()).
virtual ~window()
Destructor calls free() to release the window.
MPI_Aint size() const
Get the size of the window in number of elements.
void put(OriginType *origin_addr, int origin_count, int target_rank, MPI_Aint target_disp=0, int target_count=-1) const
Write data to a remote memory window.
void start(group const &grp, int assert=0) const
Start an RMA access epoch by calling MPI_Win_start (see also complete()).
window(communicator const &c, MPI_Aint sz, MPI_Info info=MPI_INFO_NULL)
Construct an MPI window with dynamically allocated memory.
void fence(int assert=0) const
Synchronize all RMA operations within an access epoch by calling MPI_Win_fence.
void flush(int rank=-1) const
Ensure completion of all outstanding RMA operations.
void free() noexcept
Release allocated resources owned by the window.
BaseType * base() const
Get a pointer to the beginning of the window memory.
window & operator=(window &&rhs) noexcept
Move assignment operator takes ownership of the moved-from MPI window and leaves it with MPI_WIN_NULL...
void wait() const
Completes an RMA exposure epoch by calling MPI_Win_wait (see also post()).
window & operator=(window const &)=delete
Deleted copy assignment operator.
void unlock(int rank=-1) const
Complete an RMA access epoch started by lock().
void sync() const
Synchronize the public and private copies of the window.
window(window &&other) noexcept
Move constructor takes ownership of the moved-from MPI window and leaves it with MPI_WIN_NULL.
communicator get_communicator() const
Get the mpi::communicator associated with the window.
Provides a C++ wrapper class for an MPI_Comm object.
Provides utilities to map C++ datatypes to MPI datatypes.
Provides a C++ wrapper class for an MPI_Group object.
static const bool has_env
Boolean variable that is true, if one of the environment variables OMPI_COMM_WORLD_RANK,...
constexpr bool has_mpi_type
Type trait to check if a type T has a corresponding MPI datatype, i.e. if mpi::mpi_type has been spec...
void check_mpi_call(int errcode, const std::string &mpi_routine)
Check the success of an MPI call.
Macros used in the mpi library.
Map C++ datatypes to the corresponding MPI datatypes.
Provides general utilities related to MPI.