Skip to content
FileIO.cpp 4.85 KiB
Newer Older
#include "FileIO.hpp"

#include <algorithm>
#include <fstream>
#include <iomanip>
#include <iostream>
#include <sstream>

HeaderInfo FileIO::ReadHeader(const std::string& path) {
	// read the header into a buf (20 should be sufficient)
	constexpr auto HeaderBufSize = 20;
	MPI_File fh;
	MPI_File_open(MPI_COMM_SELF, path.c_str(),
	              MPI_MODE_RDONLY | MPI_MODE_UNIQUE_OPEN, MPI_INFO_NULL, &fh);
	std::array<char, HeaderBufSize> buf;
	MPI_File_read_all(fh, buf.data(), buf.size(), MPI_CHAR, MPI_STATUS_IGNORE);
	MPI_File_close(&fh);

	if (std::find(std::begin(buf), std::end(buf), '\r') != std::end(buf)) {
		MpiReportErrorAbort("Invalid newline character detected!");
	}

	// make stream from buf
	std::istringstream input;
	input.rdbuf()->pubsetbuf(buf.data(), buf.size());

	// parse the stream
	std::size_t noCols{};
	std::size_t noRows{};
	input >> noCols >> noRows;
	if (noCols < 1 || noRows < 1) {
		MpiReportErrorAbort("File header corrupt");
	}
	std::string dummy; // skip line break
	std::getline(input, dummy);
	const auto headerLength = input.tellg();
	return {
	    {noCols, noRows},
	    static_cast<std::size_t>(
	        headerLength) // should fit (max is HeaderBuf size)
	};
}

void FileIO::WriteHeader(const HeaderInfo& header, const std::string& path,
                         const MpiEnvironment& env) {
	if (!env.isMaster()) return;

	// write header into buffer
	std::ostringstream ss;
	ss << header.GlobalSize.Cols << ' ' << header.GlobalSize.Rows << '\n';
	const auto buf = ss.str();

	// write buffer
	MPI_File fh;
	MPI_File_open(MPI_COMM_SELF, path.c_str(),
	              MPI_MODE_WRONLY | MPI_MODE_CREATE, MPI_INFO_NULL, &fh);
	MPI_File_write(fh, buf.data(), static_cast<int>(buf.size()), MPI_CHAR,
	               MPI_STATUS_IGNORE);
	MPI_File_close(&fh);
}

TileInfo FileIO::GetTileInfo(Size globalSize, Size procsSize,
                             std::size_t rank) {
	const auto tileX = rank % procsSize.Cols;
	const auto tileY = rank / procsSize.Cols;
	const auto xBeg = (tileX + 0) * globalSize.Cols / procsSize.Cols;
	const auto xEnd = (tileX + 1) * globalSize.Cols / procsSize.Cols;
	const auto yBeg = (tileY + 0) * globalSize.Rows / procsSize.Rows;
	const auto yEnd = (tileY + 1) * globalSize.Rows / procsSize.Rows;

	const auto tileSizeCols = xEnd - xBeg;
	const auto tileSizeRows = yEnd - yBeg;
	return {{tileSizeCols, tileSizeRows}, {xBeg, yBeg}, {tileX, tileY}};
FileIO::Tile::Tile(const std::string& path, HeaderInfo header, Size procsSize,
                   std::size_t rank, State* buf)
    : path_(path), headerLength_(header.HeaderLength),
      srcSize_(header.GlobalSize), procsSize_(procsSize), buf_(buf),
      tileInfo_(FileIO::GetTileInfo(header.GlobalSize, procsSize, rank)),
      tileSize_(tileInfo_.Size), tileCoord_(tileInfo_.GlobalCoord),
      tileType_(
          MpiSubarray({{header.GlobalSize.Rows, tileSize_.Rows, 0},
                       {header.GlobalSize.Cols + LF, tileSize_.Cols, 0}})),
      bufType_(MpiSubarray({{tileSize_.Rows + 2, tileSize_.Rows, 1},
                            {tileSize_.Cols + 2, tileSize_.Cols, 1}})),
      displ_(header.HeaderLength +

             (header.GlobalSize.Cols + LF) * tileCoord_.Y + tileCoord_.X) {}

void FileIO::Tile::Read() {
	MPI_File file;
	MPI_File_open(MPI_COMM_WORLD, path_.c_str(),
	              MPI_MODE_RDONLY | MPI_MODE_UNIQUE_OPEN, MPI_INFO_NULL, &file);
	MPI_File_set_view(file, static_cast<MPI_Offset>(displ_), MPI_CHAR,
	                  tileType_.type(), "native", MPI_INFO_NULL);
	MPI_File_read_all(file, buf_, 1, bufType_.type(), MPI_STATUS_IGNORE);
	MPI_File_close(&file);
}

void FileIO::Tile::Write() const {
	MPI_File file;
	MPI_File_open(MPI_COMM_WORLD, path_.c_str(),
	              MPI_MODE_CREATE | MPI_MODE_WRONLY, MPI_INFO_NULL, &file);
	MPI_File_set_view(file, static_cast<MPI_Offset>(displ_), MPI_CHAR,
	                  tileType_.type(), "native", MPI_INFO_NULL);
	MPI_File_write_all(file, buf_, 1, bufType_.type(), MPI_STATUS_IGNORE);

	/// fix line feeds
	// this is done with an collective call, but only the rightmost
	// ranks actually write line feeds

	// are we a rightMost tile?
	const auto rightMost = tileInfo_.ProcCoord.X == procsSize_.Cols - 1;
	const auto noLfNeeded = rightMost ? tileSize_.Rows : 0;
	const auto lfType = MpiSubarray( // subsize must be > 0
	    {{srcSize_.Rows, std::max<std::size_t>(noLfNeeded, 1), 0},
	     {srcSize_.Cols + LF, 1, 0}});
	const std::vector<char> lfs(noLfNeeded, '\n');

	const auto lfDisp = headerLength_ +
	                    tileInfo_.GlobalCoord.Y * (srcSize_.Cols + LF) +
	                    srcSize_.Cols;
	MPI_File_set_view(file, static_cast<MPI_Offset>(lfDisp), MPI_CHAR,
	                  lfType.type(), "native", MPI_INFO_NULL);

	// lfs is empty for non-rightmost ranks
	MPI_File_write_all(file, lfs.data(), static_cast<int>(lfs.size()), MPI_CHAR,
	                   MPI_STATUS_IGNORE);

	MPI_File_close(&file);
}