Skip to content
FileIO.cpp 4.64 KiB
Newer Older
#include "FileIO.hpp"

#include <algorithm>
#include <fstream>
#include <iomanip>
#include <iostream>
#include <sstream>

HeaderInfo FileIO::ReadHeader(const std::string& path) {
	// read the header into a buf (20 should be sufficient)
	MPI_File fh;
	MPI_File_open(MPI_COMM_SELF, path.c_str(),
	              MPI_MODE_RDONLY | MPI_MODE_UNIQUE_OPEN, MPI_INFO_NULL, &fh);
	constexpr auto HeaderBufSize = 20;
	std::array<char, HeaderBufSize> buf;
	MPI_File_read_all(fh, buf.data(), buf.size(), MPI_CHAR, MPI_STATUS_IGNORE);
	MPI_File_close(&fh);

	// make stream out of buf
	std::istringstream input;
	input.rdbuf()->pubsetbuf(buf.data(), buf.size());

	// parse the stream
	std::size_t noCols{};
	std::size_t noRows{};
	input >> noCols >> noRows;
	if (noCols < 1 || noRows < 1) {
		std::cerr << "File header corrupt\n";
		MPI_Abort(MPI_COMM_WORLD, EXIT_FAILURE);
	}
	std::string dummy; // skip line break
	std::getline(input, dummy);
	const auto headerLength = input.tellg();
	return {
	    {noCols, noRows},
	    static_cast<std::size_t>(
	        headerLength) // should fit (is max HeaderBuf size)
	};
}

void FileIO::WriteHeader(const HeaderInfo& header, const std::string& path,
                         const MpiEnvironment& env) {
	if (!env.isMaster()) return;

	std::ostringstream ss;
	ss << header.GlobalSize.Cols << ' ' << header.GlobalSize.Rows << '\n';
	const auto buf = ss.str();

	MPI_File fh;
	MPI_File_open(MPI_COMM_SELF, path.c_str(),
	              MPI_MODE_WRONLY | MPI_MODE_CREATE, MPI_INFO_NULL, &fh);

	MPI_File_write(fh, buf.data(), buf.size(), MPI_CHAR, MPI_STATUS_IGNORE);
	MPI_File_close(&fh);
}

// TODO: is not an IO concern
Size FileIO::GetTileSize(Size globalSize, Size gridSize) {
	const auto tileSizeCols = globalSize.Cols / gridSize.Cols;
	const auto tileSizeRows = globalSize.Rows / gridSize.Rows;
	return {tileSizeCols, tileSizeRows};
}

FileIO::Tile::Tile(const std::string& path, HeaderInfo header, Size gridSize,
                   std::size_t rank, gsl::span<State> buf)
    : _path(path), _headerLength(header.HeaderLength),
      _srcSize(header.GlobalSize), _gridSize(gridSize), _rank(rank), _buf(buf),
      _tileSize(FileIO::GetTileSize(header.GlobalSize, gridSize)), //
      _tileX(rank % gridSize.Cols),                                //
      _tileY(rank / gridSize.Cols),                                //
      _tileType(
          MpiSubarray({{header.GlobalSize.Rows, _tileSize.Rows, 0},
                       {header.GlobalSize.Cols + LF, _tileSize.Cols, 0}})),
      _bufType(MpiSubarray({{_tileSize.Rows + 2, _tileSize.Rows, 1},
                            {_tileSize.Cols + 2, _tileSize.Cols, 1}})),
      _displ(header.HeaderLength +
             (header.GlobalSize.Cols + LF) * _tileSize.Rows * _tileY +
             _tileSize.Cols * _tileX) {}

void FileIO::Tile::Read() {
	MPI_File file;
	MPI_File_open(MPI_COMM_WORLD, _path.c_str(),
	              MPI_MODE_RDONLY | MPI_MODE_UNIQUE_OPEN, MPI_INFO_NULL, &file);
	MPI_File_set_view(file, _displ, MPI_CHAR, _tileType.type(), "native",
	                  MPI_INFO_NULL);

	MPI_File_read_all(file, _buf.data(), 1, _bufType.type(), MPI_STATUS_IGNORE);
	MPI_File_close(&file);
}

void FileIO::Tile::Write() const {
	MPI_File file;
	MPI_File_open(MPI_COMM_WORLD, _path.c_str(),
	              MPI_MODE_CREATE | MPI_MODE_WRONLY, MPI_INFO_NULL, &file);
	MPI_File_set_view(file, _displ, MPI_CHAR, _tileType.type(), "native",
	                  MPI_INFO_NULL);

	MPI_File_write_all(file, _buf.data(), 1, _bufType.type(),
	                   MPI_STATUS_IGNORE);

	/// fix line feeds
	// this is done with an collective call, but only the rightmost
	// ranks actually write line feeds

	// are we a rightMost tile?
	const auto rightMost = _tileX == _gridSize.Cols - 1;
	const auto bottomMost = _tileY == _gridSize.Rows - 1;
	const auto noLfNeeded = rightMost ?                       //
	                            (bottomMost ?                 //
	                                 _tileSize.Rows - 1       //
	                                        : _tileSize.Rows) //
	                                  : 0;                    //
	const auto lfType = MpiSubarray( // subsize must be > 0
	    {{_srcSize.Rows, std::max<std::size_t>(noLfNeeded, 1), 0},
	     {_srcSize.Cols + LF, 1, 0}});
	const std::vector<char> lfs(noLfNeeded, '\n');

	const auto lfDisp = _headerLength +
	                    (_srcSize.Cols + LF) * _tileSize.Rows * _tileY +
	                    _srcSize.Cols;

	MPI_File_set_view(file, lfDisp, MPI_CHAR, lfType.type(), "native",
	                  MPI_INFO_NULL);

	// lfs is empty for non-rightmost ranks
	MPI_File_write_all(file, lfs.data(), lfs.size(), MPI_CHAR,
	                   MPI_STATUS_IGNORE);

	MPI_File_close(&file);
}