Newer
Older
#include <array>
#include <iostream>
#include "Communicator.hpp"
Communicator::Communicator(const MpiEnvironment& env, const Size& gridSize, const Size& tileSize) {
MPI_Type_contiguous(static_cast<int>(tileSize.Cols), MPI_CHAR, &haloRowType_);
MPI_Type_vector(static_cast<int>(tileSize.Rows), 1, static_cast<int>(tileSize.Cols + 2), MPI_CHAR,
&haloColumnType_);
MPI_Type_commit(&haloColumnType_);
// End definition of basic types
// Begin definition of types/displacements for a general cell somewhere in
// the middle of the procs grid
const std::array<MPI_Datatype, NoNeighbors> generalSendTypes{{
haloCornerType_, haloRowType_, haloCornerType_, //
haloColumnType_, haloColumnType_, //
haloCornerType_, haloRowType_, haloCornerType_ //
const auto tCols = tileSize.Cols;
const auto tRows = tileSize.Rows;
// character coordinates to displacement
const auto dp = [&](std::size_t x, std::size_t y) { return static_cast<MPI_Aint>(y * (tCols + 2) + x); };
const std::array<MPI_Aint, NoNeighbors> generalSendDisplacements{{
dp(1, 1), dp(1, 1), dp(tCols, 1), //
dp(1, 1), dp(tCols, 1), //
dp(1, tRows), dp(1, tRows), dp(tCols, tRows) //
const std::array<MPI_Aint, NoNeighbors> generalRecvDisplacements{{
dp(0, 0), dp(1, 0), dp(tCols + 1, 0), //
dp(0, 1), dp(tCols + 1, 1), //
dp(0, tRows + 1), dp(1, tRows + 1), dp(tCols + 1, tRows + 1) //
const std::array<int, NoNeighbors> generalSizes{{
// End definition of datastructures for a general cell
// Begin definition of datastructures for this particular cell (handle the
// border cases)
const auto rank2coord = [&](std::size_t rank) {
return Coord{
rank % gridSize.Cols, //
rank / gridSize.Cols //
const auto coord2rank = [&](Coord c) { return static_cast<int>(gridSize.Cols * c.Y + c.X); };
const auto isInsideProcsGrid = [&](Coord c) { return c.X < gridSize.Cols && c.Y < gridSize.Rows; };
const auto myCoord = rank2coord(env.worldRank());
const std::array<Coord, NoNeighbors> generalNeighborCoords{{
{myCoord.X - 1, myCoord.Y - 1}, // intentional signed underflow
{myCoord.X + 0, myCoord.Y - 1}, //
{myCoord.X + 1, myCoord.Y - 1}, //
{myCoord.X - 1, myCoord.Y + 0}, //
{myCoord.X + 1, myCoord.Y + 0}, //
{myCoord.X - 1, myCoord.Y + 1}, //
{myCoord.X + 0, myCoord.Y + 1}, //
{myCoord.X + 1, myCoord.Y + 1} //
}};
for (std::size_t i{0}; i < NoNeighbors; ++i) {
const auto nbrCoord = generalNeighborCoords[i];
if (isInsideProcsGrid(nbrCoord)) {
neighbors_.push_back(coord2rank(nbrCoord));
sendTypes_.push_back(generalSendTypes[i]);
sendDisplacements_.push_back(generalSendDisplacements[i]);
recvDisplacements_.push_back(generalRecvDisplacements[i]);
sizes_.push_back(generalSizes[i]);
MPI_Dist_graph_create_adjacent(MPI_COMM_WORLD, // comm_old
static_cast<int>(neighbors_.size()), // indegree
neighbors_.data(), // sources
static_cast<int*>(MPI_UNWEIGHTED), // sourceweights
static_cast<int>(neighbors_.size()), // outdegree
neighbors_.data(), // destinations
static_cast<int*>(MPI_UNWEIGHTED), // destweights
MPI_INFO_NULL, // info
0, // reorder
&commDistGraph_ // comm_dist_graph
);
// End definition of datastructures for this particular cell
}
Communicator::~Communicator() {
if (commDistGraph_ != MPI_COMM_NULL) {
MPI_Comm_free(&commDistGraph_);
MPI_Type_free(&haloColumnType_);
MPI_Type_free(&haloRowType_);
}
}
void Communicator::swap(Communicator& first, Communicator& second) {
using std::swap;
swap(first.neighbors_, second.neighbors_);
swap(first.sizes_, second.sizes_);
swap(first.sendTypes_, second.sendTypes_);
swap(first.sendDisplacements_, second.sendDisplacements_);
swap(first.recvDisplacements_, second.recvDisplacements_);
swap(first.commDistGraph_, second.commDistGraph_);
swap(first.haloRowType_, second.haloRowType_);
swap(first.haloColumnType_, second.haloColumnType_);
swap(first.haloCornerType_, second.haloCornerType_);
Communicator::Communicator(Communicator&& other) noexcept { swap(*this, other); }
Communicator& Communicator::operator=(Communicator&& other) noexcept {
swap(*this, other);
return *this;
}