Skip to content
FileIO.cpp 4.85 KiB
Newer Older
#include "FileIO.hpp"

#include <algorithm>
#include <fstream>
#include <iomanip>
#include <iostream>
#include <sstream>

HeaderInfo FileIO::ReadHeader(const std::string& path) {
	// read the header into a buf (20 should be sufficient)
	constexpr auto HeaderBufSize = 20;
	MPI_File fh;
	MPI_File_open(MPI_COMM_SELF, path.c_str(),
	              MPI_MODE_RDONLY | MPI_MODE_UNIQUE_OPEN, MPI_INFO_NULL, &fh);
	std::array<char, HeaderBufSize> buf;
	MPI_File_read_all(fh, buf.data(), buf.size(), MPI_CHAR, MPI_STATUS_IGNORE);
	MPI_File_close(&fh);

	if (std::find(std::begin(buf), std::end(buf), '\r') != std::end(buf)) {
		MpiReportErrorAbort("Invalid newline character detected!");
	}

	// make stream from buf
	std::istringstream input;
	input.rdbuf()->pubsetbuf(buf.data(), buf.size());

	// parse the stream
	std::size_t noCols{};
	std::size_t noRows{};
	input >> noCols >> noRows;
	if (noCols < 1 || noRows < 1) {
		MpiReportErrorAbort("File header corrupt");
	}
	std::string dummy; // skip line break
	std::getline(input, dummy);
	const auto headerLength = input.tellg();
	return {
	    {noCols, noRows},
	    static_cast<std::size_t>(
	        headerLength) // should fit (max is HeaderBuf size)
	};
}

void FileIO::WriteHeader(const HeaderInfo& header, const std::string& path,
                         const MpiEnvironment& env) {
	if (!env.isMaster()) return;

	// write header into buffer
	std::ostringstream ss;
	ss << header.GlobalSize.Cols << ' ' << header.GlobalSize.Rows << '\n';
	const auto buf = ss.str();

	// write buffer
	MPI_File fh;
	MPI_File_open(MPI_COMM_SELF, path.c_str(),
	              MPI_MODE_WRONLY | MPI_MODE_CREATE, MPI_INFO_NULL, &fh);
	MPI_File_write(fh, buf.data(), static_cast<int>(buf.size()), MPI_CHAR,
	               MPI_STATUS_IGNORE);
	MPI_File_close(&fh);
}

TileInfo FileIO::GetTileInfo(Size globalSize, Size procsSize,
                             std::size_t rank) {
	const auto tileX = rank % procsSize.Cols;
	const auto tileY = rank / procsSize.Cols;
	const auto xBeg = (tileX + 0) * globalSize.Cols / procsSize.Cols;
	const auto xEnd = (tileX + 1) * globalSize.Cols / procsSize.Cols;
	const auto yBeg = (tileY + 0) * globalSize.Rows / procsSize.Rows;
	const auto yEnd = (tileY + 1) * globalSize.Rows / procsSize.Rows;

	const auto tileSizeCols = xEnd - xBeg;
	const auto tileSizeRows = yEnd - yBeg;
	return {{tileSizeCols, tileSizeRows}, {xBeg, yBeg}, {tileX, tileY}};
FileIO::Tile::Tile(const std::string& path, HeaderInfo header, Size procsSize,
                   std::size_t rank, State* buf)
    : _path(path), _headerLength(header.HeaderLength),
      _srcSize(header.GlobalSize), _procsSize(procsSize), _buf(buf),
      _tileInfo(FileIO::GetTileInfo(header.GlobalSize, procsSize, rank)),
      _tileSize(_tileInfo.Size), _tileCoord(_tileInfo.GlobalCoord),
      _tileType(
          MpiSubarray({{header.GlobalSize.Rows, _tileSize.Rows, 0},
                       {header.GlobalSize.Cols + LF, _tileSize.Cols, 0}})),
      _bufType(MpiSubarray({{_tileSize.Rows + 2, _tileSize.Rows, 1},
                            {_tileSize.Cols + 2, _tileSize.Cols, 1}})),
      _displ(header.HeaderLength +

             (header.GlobalSize.Cols + LF) * _tileCoord.Y + _tileCoord.X) {}

void FileIO::Tile::Read() {
	MPI_File file;
	MPI_File_open(MPI_COMM_WORLD, _path.c_str(),
	              MPI_MODE_RDONLY | MPI_MODE_UNIQUE_OPEN, MPI_INFO_NULL, &file);
	MPI_File_set_view(file, static_cast<MPI_Offset>(_displ), MPI_CHAR,
	                  _tileType.type(), "native", MPI_INFO_NULL);
	MPI_File_read_all(file, _buf, 1, _bufType.type(), MPI_STATUS_IGNORE);
	MPI_File_close(&file);
}

void FileIO::Tile::Write() const {
	MPI_File file;
	MPI_File_open(MPI_COMM_WORLD, _path.c_str(),
	              MPI_MODE_CREATE | MPI_MODE_WRONLY, MPI_INFO_NULL, &file);
	MPI_File_set_view(file, static_cast<MPI_Offset>(_displ), MPI_CHAR,
	                  _tileType.type(), "native", MPI_INFO_NULL);
	MPI_File_write_all(file, _buf, 1, _bufType.type(), MPI_STATUS_IGNORE);

	/// fix line feeds
	// this is done with an collective call, but only the rightmost
	// ranks actually write line feeds

	// are we a rightMost tile?
	const auto rightMost = _tileInfo.ProcCoord.X == _procsSize.Cols - 1;
	const auto noLfNeeded = rightMost ? _tileSize.Rows : 0;
	const auto lfType = MpiSubarray( // subsize must be > 0
	    {{_srcSize.Rows, std::max<std::size_t>(noLfNeeded, 1), 0},
	     {_srcSize.Cols + LF, 1, 0}});
	const std::vector<char> lfs(noLfNeeded, '\n');

	const auto lfDisp = _headerLength +
	                    _tileInfo.GlobalCoord.Y * (_srcSize.Cols + LF) +
	                    _srcSize.Cols;
	MPI_File_set_view(file, static_cast<MPI_Offset>(lfDisp), MPI_CHAR,
	                  lfType.type(), "native", MPI_INFO_NULL);

	// lfs is empty for non-rightmost ranks
	MPI_File_write_all(file, lfs.data(), static_cast<int>(lfs.size()), MPI_CHAR,
	                   MPI_STATUS_IGNORE);

	MPI_File_close(&file);
}