#include <iostream> #include <cmath> #include "../datastructures/Body.hpp" #include "../datastructures/Box.hpp" #include "MpiSimulation.hpp" #include "../datastructures/Tree.hpp" #include "../datastructures/Node.hpp" namespace nbody { using namespace std; MpiSimulation::MpiSimulation(int& argc, char**& argv) { Body b[2]; int bodyBlocklengths[7] = {1, 3, 3, 3, 1, 1, 1}; MPI::Datatype bodyDatatypes[7] = {MPI::LB, MPI::DOUBLE, MPI::DOUBLE, MPI::DOUBLE, MPI::DOUBLE, MPI::BOOL, MPI::UB}; MPI::Aint bodyDisplacements[7]; Box box[2]; int boxBlocklengths[4] = {1, 3, 3, 1}; MPI::Datatype boxDatatypes[4] = {MPI::LB, MPI::DOUBLE, MPI::DOUBLE, MPI::UB}; MPI::Aint boxDisplacements[4]; this->bodyType = MPI::DATATYPE_NULL; this->boxType = MPI::DATATYPE_NULL; bodyDisplacements[0] = MPI::Get_address(&(b[0])) - MPI::Get_address(&(b[0])); bodyDisplacements[1] = MPI::Get_address(&(b[0].position[0])) - MPI::Get_address(&(b[0])); bodyDisplacements[2] = MPI::Get_address(&(b[0].velocity[0])) - MPI::Get_address(&(b[0])); bodyDisplacements[3] = MPI::Get_address(&(b[0].acceleration[0])) - MPI::Get_address(&(b[0])); bodyDisplacements[4] = MPI::Get_address(&(b[0].mass)) - MPI::Get_address(&(b[0])); bodyDisplacements[5] = MPI::Get_address(&(b[0].refinement)) - MPI::Get_address(&(b[0])); bodyDisplacements[6] = MPI::Get_address(&(b[1])) - MPI::Get_address(&(b[0])); this->bodyType = this->bodyType.Create_struct(7, bodyBlocklengths, bodyDisplacements, bodyDatatypes); this->bodyType.Commit(); boxDisplacements[0] = MPI::Get_address(&(box[0])) - MPI::Get_address(&(box[0])); boxDisplacements[1] = MPI::Get_address(&(box[0].min[0])) - MPI::Get_address(&(box[0])); boxDisplacements[2] = MPI::Get_address(&(box[0].max[0])) - MPI::Get_address(&(box[0])); boxDisplacements[3] = MPI::Get_address(&(box[1])) - MPI::Get_address(&(box[0])); this->boxType = this->bodyType.Create_struct(4, boxBlocklengths, boxDisplacements, boxDatatypes); this->boxType.Commit(); this->mpiSize = MPI::COMM_WORLD.Get_size(); this->mpiRank = MPI::COMM_WORLD.Get_rank(); this->domains = new Box[this->mpiSize]; this->correctState = true; if (argc == 2) { this->correctState = this->readInputData(string(argv[1])); } else { this->correctState = false; } MPI::COMM_WORLD.Bcast(&this->correctState, 1, MPI::BOOL, 0); if (!this->correctState) { cerr << "Error occurred: terminating ..." << endl; this->bodyType.Free(); MPI::Finalize(); exit(-1); } if (this->mpiRank == 0) { MPI::COMM_WORLD.Send(b, 2, this->bodyType, 1, 0); } else if (this->mpiRank == 1) { MPI::COMM_WORLD.Recv(b, 2, this->bodyType, 0, 0); } } bool MpiSimulation::readInputData(string filename) { if (this->mpiRank == 0) { this->bodies = Tree::dubinskiParse(filename); if (this->bodies.empty()) { return false; } } return true; } void MpiSimulation::cleanup() { while (!this->comms.empty()) { this->comms.back().cleanup(); this->comms.pop_back(); } this->bodyType.Free(); } bool MpiSimulation::stateCorrect() { return this->correctState; } MpiSimulation::~MpiSimulation() { delete[] this->domains; } int MpiSimulation::getNumberOfProcesses() { return this->mpiSize; } int MpiSimulation::getProcessId() { return this->mpiRank; } MPI::Datatype* MpiSimulation::getDatatype() { return &this->bodyType; } void MpiSimulation::distributeBodies() { if (this->mpiRank == 0) { vector<Node> nodes; Box bb; nodes.push_back(Node(NULL)); nodes.front().setBodies(this->bodies); bb = nodes.front().getBB(); bb.extendForBodies(this->bodies); nodes.front().setBB(bb); while (nodes.size() < this->mpiSize) { int mostBodiesIndex = 0; for (unsigned int i = 0; i < nodes.size(); i++) { if (nodes[i].getBodies().size() > nodes[mostBodiesIndex].getBodies().size()) { mostBodiesIndex = i; } } vector<Box> subdomains = nodes[mostBodiesIndex].getBB().splitLongestSide(); vector<Body> buf = nodes[mostBodiesIndex].getBodies(); Node n(NULL); Box bb; n.setBodies(subdomains[0].extractBodies(buf)); bb.extendForBodies(n.getBodies()); n.setBB(bb); nodes.insert(nodes.begin() + mostBodiesIndex, n); n = Node(NULL); n.setBodies(subdomains[1].extractBodies(buf)); bb.extendForBodies(n.getBodies()); n.setBB(bb); nodes.insert(nodes.begin() + mostBodiesIndex, n); nodes.erase(nodes.begin() + mostBodiesIndex + 2); } this->bodies = nodes[0].getBodies(); for (unsigned int i = 1; i < nodes.size(); i++) { this->comms.push_back(MpiBodyComm(&this->bodyType)); this->comms.back().sendUnblocking(i, nodes[i].getBodies()); } } else { this->comms.push_back(MpiBodyComm(&this->bodyType)); this->comms[0].recvBlocking(0, this->bodies); } this->tree.build(this->bodies); this->domains[this->mpiRank] = this->tree.getRootBB(); //this->tree.isCorrect(); } void MpiSimulation::distributeDomains() { MPI::COMM_WORLD.Allgather(&this->domains[this->mpiRank], 1, this->boxType, &this->domains[0], 1, this->boxType); } void MpiSimulation::distributeLETs() { //send out locally essential trees (local bodies needed by remote simulations) for (unsigned int i = 0; i < this->mpiSize; i++) { if (i != this->mpiRank) { vector<Body> refinements = this->tree.copyRefinements(this->domains[i]); vector<MpiBodyComm>::iterator it = this->comms.begin(); while (it != this->comms.end() && !it->sendUnblocking(i, refinements)) { it++; } if (it == this->comms.end()) { this->comms.push_back(MpiBodyComm(&this->bodyType)); this->comms.back().sendUnblocking(i, refinements); } } } //receive bodies and integrate them into local tree for simulation unsigned int received = 0; while (received < this->mpiSize - 1) { vector<Body> refinements; //integrate bodies in order of arrival to do communication/computation overlapping this->comms[0].recvBlocking(MPI::ANY_SOURCE, refinements); this->tree.mergeLET(refinements); received++; } } void MpiSimulation::runStep() { Box overallDomain; for (int k = 0; k < 3; k++) { this->distributeDomains(); //this->domains[this->mpiRank].print(); for (unsigned int i = 0; i < this->mpiSize; i++) { overallDomain.extend(this->domains[i]); } /* if (this->mpiRank == 0) { overallDomain.print(); } */ this->tree.rebuild(overallDomain); this->distributeLETs(); this->tree.computeForces(); this->tree.advance(); this->domains[this->mpiRank] = this->tree.getLocalBB(); } } }