#pragma once #include #include #include #include #include #include #include "gsl/gsl" #include "MpiSubarray.hpp" #include "Util.hpp" #include "state.hpp" struct HeaderInfo { Size GlobalSize; std::size_t HeaderLength; }; struct FileIO { static auto ReadHeader(const std::string& path) { // read the header into a buf (20 should be sufficient) MPI_File fh; MPI_File_open(MPI_COMM_SELF, path.c_str(), MPI_MODE_RDONLY | MPI_MODE_UNIQUE_OPEN, MPI_INFO_NULL, &fh); constexpr auto HeaderBufSize = 20; std::array buf; MPI_File_read_all(fh, buf.data(), buf.size(), MPI_CHAR, MPI_STATUS_IGNORE); MPI_File_close(&fh); // make stream out of buf std::istringstream input; input.rdbuf()->pubsetbuf(buf.data(), buf.size()); // parse the stream std::size_t noCols{}; std::size_t noRows{}; input >> noCols >> noRows; if (noCols < 1 || noRows < 1) { std::cerr << "File header corrupt\n"; MPI_Abort(MPI_COMM_WORLD, EXIT_FAILURE); } std::string dummy; // skip line break std::getline(input, dummy); const auto headerLength = input.tellg(); return HeaderInfo{ {noCols, noRows}, static_cast( headerLength) // should fit (is max HeaderBuf size) }; } static auto WriteHeader(const HeaderInfo& header, const std::string& path, const MpiEnvironment& env) { if (!env.isMaster()) return; std::ostringstream ss; ss << header.GlobalSize.Cols << ' ' << header.GlobalSize.Rows << '\n'; const auto buf = ss.str(); MPI_File fh; MPI_File_open(MPI_COMM_SELF, path.c_str(), MPI_MODE_WRONLY | MPI_MODE_CREATE, MPI_INFO_NULL, &fh); MPI_File_write(fh, buf.data(), buf.size(), MPI_CHAR, MPI_STATUS_IGNORE); MPI_File_close(&fh); } // TODO: is not an IO concern static auto GetTileSize(Size globalSize, Size gridSize) { const auto tileSizeCols = globalSize.Cols / gridSize.Cols; const auto tileSizeRows = globalSize.Rows / gridSize.Rows; return Size{tileSizeCols, tileSizeRows}; } class Tile { static constexpr std::size_t LF = 1; // linefeed chars const std::string& _path; const std::size_t _headerLength; const Size _srcSize; const Size _gridSize; const std::size_t _rank; gsl::span _buf; const Size _tileSize; const std::size_t _tileX; const std::size_t _tileY; const MpiSubarray _tileType; const MpiSubarray _bufType; const std::size_t _displ; public: Tile(const std::string& path, HeaderInfo header, Size gridSize, std::size_t rank, gsl::span buf) : _path(path), _headerLength(header.HeaderLength), _srcSize(header.GlobalSize), _gridSize(gridSize), _rank(rank), _buf(buf), _tileSize(FileIO::GetTileSize(header.GlobalSize, gridSize)), // _tileX(rank % gridSize.Cols), // _tileY(rank / gridSize.Cols), // _tileType(MpiSubarray( {{header.GlobalSize.Rows, _tileSize.Rows, 0}, {header.GlobalSize.Cols + LF, _tileSize.Cols, 0}})), _bufType(MpiSubarray({{_tileSize.Rows + 2, _tileSize.Rows, 1}, {_tileSize.Cols + 2, _tileSize.Cols, 1}})), _displ(header.HeaderLength + (header.GlobalSize.Cols + LF) * _tileSize.Rows * _tileY + _tileSize.Cols * _tileX) {} void Read() { MPI_File file; MPI_File_open(MPI_COMM_WORLD, _path.c_str(), MPI_MODE_RDONLY | MPI_MODE_UNIQUE_OPEN, MPI_INFO_NULL, &file); MPI_File_set_view(file, _displ, MPI_CHAR, _tileType.type(), "native", MPI_INFO_NULL); MPI_File_read_all(file, _buf.data(), 1, _bufType.type(), MPI_STATUS_IGNORE); MPI_File_close(&file); } void Write() const { MPI_File file; MPI_File_open(MPI_COMM_WORLD, _path.c_str(), MPI_MODE_CREATE | MPI_MODE_WRONLY, MPI_INFO_NULL, &file); MPI_File_set_view(file, _displ, MPI_CHAR, _tileType.type(), "native", MPI_INFO_NULL); MPI_File_write_all(file, _buf.data(), 1, _bufType.type(), MPI_STATUS_IGNORE); /// fix line feeds // this is done with an collective call, but only the rightmost // ranks actually write line feeds // are we a rightMost tile? const auto rightMost = _tileX == _gridSize.Cols - 1; const auto bottomMost = _tileY == _gridSize.Rows - 1; const auto noLfNeeded = rightMost ? // (bottomMost ? // _tileSize.Rows - 1 // : _tileSize.Rows) // : 0; // const auto lfType = MpiSubarray( // subsize must be > 0 {{_srcSize.Rows, std::max(noLfNeeded, 1), 0}, {_srcSize.Cols + LF, 1, 0}}); const std::vector lfs(noLfNeeded, '\n'); const auto lfDisp = _headerLength + (_srcSize.Cols + LF) * _tileSize.Rows * _tileY + _srcSize.Cols; MPI_File_set_view(file, lfDisp, MPI_CHAR, lfType.type(), "native", MPI_INFO_NULL); // lfs is empty for non-rightmost ranks MPI_File_write_all(file, lfs.data(), lfs.size(), MPI_CHAR, MPI_STATUS_IGNORE); MPI_File_close(&file); } }; };