GADGET-4
lightcone_particle_io.cc
Go to the documentation of this file.
1 /*******************************************************************************
2  * \copyright This file is part of the GADGET4 N-body/SPH code developed
3  * \copyright by Volker Springel. Copyright (C) 2014-2020 by Volker Springel
4  * \copyright (vspringel@mpa-garching.mpg.de) and all contributing authors.
5  *******************************************************************************/
6 
12 #include "gadgetconfig.h"
13 
14 #if defined(LIGHTCONE) && defined(LIGHTCONE_PARTICLES)
15 
16 #include <gsl/gsl_rng.h>
17 #include <hdf5.h>
18 #include <math.h>
19 #include <mpi.h>
20 #include <stdio.h>
21 #include <stdlib.h>
22 #include <string.h>
23 #include <sys/stat.h>
24 #include <sys/types.h>
25 #include <unistd.h>
26 
27 #include "../data/allvars.h"
28 #include "../data/dtypes.h"
29 #include "../data/mymalloc.h"
30 #include "../io/hdf5_util.h"
31 #include "../io/io.h"
32 #include "../lightcone/lightcone.h"
33 #include "../lightcone/lightcone_particle_io.h"
34 #include "../main/simulation.h"
35 #include "../mpi_utils/mpi_utils.h"
36 #include "../system/system.h"
37 
44 #ifdef MERGERTREE
45 lightcone_particle_io::lightcone_particle_io(lcparticles *Lp_ptr, lightcone *LightCone_ptr, mergertree *MergerTree_ptr, MPI_Comm comm,
46  int format)
47  : IO_Def(comm, format)
48 #else
49 lightcone_particle_io::lightcone_particle_io(lcparticles *Lp_ptr, lightcone *LightCone_ptr, MPI_Comm comm, int format)
50  : IO_Def(comm, format)
51 #endif
52 {
53  Lp = Lp_ptr;
54  LightCone = LightCone_ptr;
55 #ifdef MERGERTREE
56  MergerTree = MergerTree_ptr;
57 #endif
58 
59  this->N_IO_Fields = 0;
60  this->N_DataGroups = NTYPES + 2;
61  /* the +1 data group is a tree table used only for storing reordered lightcone data,
62  the +2 data group is a coarse healpix table used only for storing normal lightcone data */
63 
64  this->header_size = sizeof(header);
65  this->header_buf = &header;
66  this->type_of_file = FILE_IS_LIGHTCONE;
67  sprintf(this->info, "LIGHTCONE: writing particle lightcone data");
68 
69  init_field("POS ", "Coordinates", MEM_MY_DOUBLE, FILE_MY_IO_FLOAT, READ_IF_PRESENT, 3, A_LC, NULL, io_func_pos, ALL_TYPES, 1, 1.,
70  -1., 1., 0., 0., All.UnitLength_in_cm, true);
71 
72 #ifdef OUTPUT_VELOCITIES_IN_HALF_PRECISION
73  init_field("VEL ", "Velocities", MEM_MY_FLOAT, FILE_HALF, READ_IF_PRESENT, 3, A_LC, &Lp->P[0].Vel[0], NULL, ALL_TYPES, 1, 0.5, 0.,
74  0., 0., 1., All.UnitVelocity_in_cm_per_s);
75 #else
76  init_field("VEL ", "Velocities", MEM_MY_FLOAT, FILE_MY_IO_FLOAT, READ_IF_PRESENT, 3, A_LC, &Lp->P[0].Vel[0], NULL, ALL_TYPES, 1, 0.5,
77  0., 0., 0., 1., All.UnitVelocity_in_cm_per_s);
78 #endif
79 
80 #ifdef LIGHTCONE_OUTPUT_ACCELERATIONS
81 #ifdef OUTPUT_ACCELERATIONS_IN_HALF_PRECISION
82  All.accel_normalize_fac = 10.0 * All.Hubble * (100.0 * 1.0e5 / All.UnitVelocity_in_cm_per_s);
83  init_field("ACCE", "Acceleration", MEM_MY_FLOAT, FILE_HALF, READ_IF_PRESENT, 3, A_LC, NULL, io_func_accel, ALL_TYPES, 1, -2.0, 1, -1,
85 #else
86  init_field("ACCE", "Acceleration", MEM_MY_FLOAT, FILE_MY_IO_FLOAT, READ_IF_PRESENT, 3, A_LC, &Lp->P[0].GravAccel[0], NULL, ALL_TYPES,
88 #endif
89 #endif
90 
91  init_field("ID ", "ParticleIDs", MEM_MY_ID_TYPE, FILE_MY_ID_TYPE, READ_IF_PRESENT, 1, A_LC, NULL, io_func_id, ALL_TYPES, 0, 0, 0, 0,
92  0, 0, 0, true);
93 
94 #ifndef LEAN
95  init_field("MASS", "Masses", MEM_MY_DOUBLE, FILE_MY_IO_FLOAT, READ_IF_PRESENT, 1, A_LC, NULL, io_func_mass, MASS_BLOCK, 1, 0., -1.,
96  0., 1., 0., All.UnitMass_in_g);
97 #endif
98 
99 #ifndef LEAN
100  init_field("ASCL", "Ascale", MEM_MY_FLOAT, FILE_MY_IO_FLOAT, READ_IF_PRESENT, 1, A_LC, &Lp->P[0].Ascale, NULL, ALL_TYPES, 0, 0, 0, 0,
101  0, 0, 0);
102 #endif
103 
104 #ifdef REARRANGE_OPTION
105  init_field("MTRI", "TreeID", MEM_INT64, FILE_INT64, SKIP_ON_READ, 1, A_LC, &Lp->P[0].TreeID, NULL, ALL_TYPES, 0, 0, 0, 0, 0, 0, 0);
106 #endif
107 
108 #if defined(SUBFIND) && defined(SUBFIND_STORE_LOCAL_DENSITY)
109  init_field("SFDE", "SubfindDensity", MEM_MY_FLOAT, All.RestartFlag != RST_CREATEICS ? FILE_MY_IO_FLOAT : FILE_NONE, SKIP_ON_READ, 1,
110  A_PS, &Lp->PS[0].SubfindDensity, 0, ALL_TYPES, /* subfind density */
111  1, -3., 2., -3., 1., 0., All.UnitDensity_in_cgs);
112 
113  init_field("SFHS", "SubfindHsml", MEM_MY_FLOAT, All.RestartFlag != RST_CREATEICS ? FILE_MY_IO_FLOAT : FILE_NONE, SKIP_ON_READ, 1,
114  A_PS, &Lp->PS[0].SubfindHsml, 0, ALL_TYPES, /* subfind hsml */
115  1, 1., -1., 1., 0., 0., All.UnitLength_in_cm);
116 
117  init_field("SFVD", "SubfindVelDisp", MEM_MY_FLOAT, All.RestartFlag != RST_CREATEICS ? FILE_MY_IO_FLOAT : FILE_NONE, SKIP_ON_READ, 1,
118  A_PS, &Lp->PS[0].SubfindVelDisp, 0, ALL_TYPES, /* subfind velocity dispersion */
119  1, 0., 0., 0., 0., 1., All.UnitVelocity_in_cm_per_s);
120 #endif
121 
122 #ifdef MERGERTREE
123  init_field("MTRL", "ParticleCount", MEM_INT, FILE_INT, SKIP_ON_READ, 1, A_TT, &MergerTree->TreeTable[0].HaloCount, NULL, TREETABLE,
124  0, 0, 0, 0, 0, 0, 0, true);
125  init_field("MTRS", "ParticleFirst", MEM_INT64, FILE_INT64, SKIP_ON_READ, 1, A_TT, &MergerTree->TreeTable[0].FirstHalo, NULL,
126  TREETABLE, 0, 0, 0, 0, 0, 0, 0, true);
127  init_field("MTRI", "TreeID", MEM_INT64, FILE_INT64, SKIP_ON_READ, 1, A_TT, &MergerTree->TreeTable[0].TreeID, NULL, TREETABLE, 0, 0,
128  0, 0, 0, 0, 0, true);
129 #endif
130 
131  init_field("HPHT", "ParticleCount", MEM_INT, FILE_INT, SKIP_ON_READ, 1, A_MM, &Lp->HealPixTab_PartCount[0], NULL, HEALPIXTAB, 0, 0,
132  0, 0, 0, 0, 0, true);
133 }
134 
135 void lightcone_particle_io::lightcone_read(int num, int conenr)
136 {
137  double t0 = Logs.second();
138 
139  Lp->TotNumPart = 0;
140 
141  char fname[2 * MAXLEN_PATH];
142 
143  if(All.NumFilesPerSnapshot > 1)
144  sprintf(fname, "%s/lightcone_%02d/conedir_%04d/%s_%04d", All.OutputDir, conenr, num, "conesnap", num);
145  else
146  sprintf(fname, "%s/lightcone_%02d/%s_%04d", All.OutputDir, conenr, "conesnap", num);
147 
148  int num_files = find_files(fname, fname);
149 
150  reset_io_byte_count();
151 
152  for(int rep = 0; rep < 2; rep++)
153  {
154  Lp->NumPart = 0;
155 
156  read_files_driver(fname, rep, num_files);
157 
158  /* now do the memory allocation */
159  if(rep == 0)
160  {
161  Lp->MaxPart = Lp->NumPart;
162  Lp->allocate_memory(); /* allocate lightcone particle storage */
163  }
164  }
165 
166  MPI_Barrier(Communicator);
167 
168  long long byte_count = get_io_byte_count(), byte_count_all;
169  sumup_longs(1, &byte_count, &byte_count_all, Communicator);
170 
171  double t1 = Logs.second();
172 
173  mpi_printf("LIGHTCONE-READ: reading done. Took %g sec, total size %g MB, corresponds to effective I/O rate of %g MB/sec\n",
174  Logs.timediff(t0, t1), byte_count_all / (1024.0 * 1024.0), byte_count_all / (1024.0 * 1024.0) / Logs.timediff(t0, t1));
175 
176  mpi_printf("\nLIGHTCONE-READ: Total number of particles : %lld\n\n", Lp->TotNumPart);
177 }
178 
179 void lightcone_particle_io::lightcone_save(int num, int conenr, bool reordered_flag)
180 {
181  char buf[3 * MAXLEN_PATH];
182 
183  cone = conenr; /* note: cone is here a variable of the class, NOT a local variable */
184  reorder_flag = reordered_flag;
185 
186  /* determine local and global particle numbers for current lightcone */
187 
188  int n_type[NTYPES];
189 
190  /* determine global particle numbers for file header */
191  for(int n = 0; n < NTYPES; n++)
192  n_type[n] = 0;
193 
194  for(int n = 0; n < Lp->NumPart; n++)
195  {
196  if(reorder_flag || LightCone->lightcone_is_cone_member(n, cone))
197  n_type[Lp->P[n].getType()]++;
198  }
199 
200  sumup_large_ints(NTYPES, n_type, ntot_type_all, Communicator);
201 
202  if(!reorder_flag)
203  {
204  /* prepare healpix look-up table */
205  Lp->Npix = nside2npix(LIGHTCONE_ORDER_NSIDE);
206 
207  subdivide_evenly(Lp->Npix, NTask, ThisTask, &Lp->FirstPix, &Lp->NpixLoc);
208 
209  Lp->HealPixTab_PartCount = (int *)Mem.mymalloc("HealPixTab_PartCount", Lp->NpixLoc * sizeof(int));
210 
211  int *tmp_PartCount = (int *)Mem.mymalloc_clear("tmp_PartCount", Lp->Npix * sizeof(int));
212  for(int n = 0; n < Lp->NumPart; n++)
213  {
214  if(LightCone->lightcone_is_cone_member(n, cone))
215  tmp_PartCount[Lp->P[n].ipnest]++;
216  }
217 
218  MPI_Allreduce(MPI_IN_PLACE, tmp_PartCount, Lp->Npix, MPI_INT, MPI_SUM, Communicator);
219 
220  memcpy(Lp->HealPixTab_PartCount, tmp_PartCount + Lp->FirstPix, Lp->NpixLoc * sizeof(int));
221 
222  Mem.myfree(tmp_PartCount);
223  }
224  else
225  Lp->Npix = Lp->NpixLoc = 0;
226 
227  mpi_printf("\nLIGHTCONE: writing cone=%d\n", conenr);
228 
229  char lname[MAXLEN_PATH];
230  if(reordered_flag)
231  sprintf(lname, "lightcone_treeorder");
232  else
233  sprintf(lname, "lightcone");
234 
235  if(All.NumFilesPerSnapshot > 1)
236  {
237  if(ThisTask == 0)
238  {
239  char buf[3 * MAXLEN_PATH];
240  sprintf(buf, "%s/%s_%02d", All.OutputDir, lname, cone);
241  mkdir(buf, 02755);
242  }
243  MPI_Barrier(Communicator);
244  }
245 
246  if(ThisTask == 0)
247  {
248  char buf[3 * MAXLEN_PATH];
249  sprintf(buf, "%s/%s_%02d/conedir_%04d", All.OutputDir, lname, cone, num);
250  mkdir(buf, 02755);
251  }
252  MPI_Barrier(Communicator);
253 
254  if(All.NumFilesPerSnapshot > 1)
255  sprintf(buf, "%s/%s_%02d/conedir_%04d/%s_%04d", All.OutputDir, lname, cone, num, "conesnap", num);
256  else
257  sprintf(buf, "%s/%s_%02d/%s_%04d", All.OutputDir, lname, cone, "conesnap", num);
258 
259  write_multiple_files(buf, All.NumFilesPerSnapshot);
260 
261  if(!reorder_flag)
262  {
263  Mem.myfree(Lp->HealPixTab_PartCount);
264  Lp->HealPixTab_PartCount = NULL;
265  }
266 }
267 
268 void lightcone_particle_io::fill_file_header(int writeTask, int lastTask, long long *n_type, long long *ntot_type)
269 {
270  /* determine global and local particle numbers */
271  for(int n = 0; n < NTYPES + 2; n++)
272  n_type[n] = 0;
273 
274  for(int n = 0; n < Lp->NumPart; n++)
275  if(reorder_flag || LightCone->lightcone_is_cone_member(n, cone))
276  if(Lp->P[n].getType() < NTYPES)
277  n_type[Lp->P[n].getType()]++;
278 
279 #ifdef MERGERTREE
280  n_type[NTYPES + 0] = MergerTree->Ntrees;
281 #else
282  n_type[NTYPES + 0] = 0;
283 #endif
284 
285  n_type[NTYPES + 1] = Lp->NpixLoc;
286 
287  /* determine particle numbers of each type in file */
288  if(ThisTask == writeTask)
289  {
290  for(int n = 0; n < NTYPES + 2; n++)
291  ntot_type[n] = n_type[n];
292 
293  for(int task = writeTask + 1; task <= lastTask; task++)
294  {
295  long long nn[NTYPES + 2];
296  MPI_Recv(&nn[0], NTYPES + 2, MPI_LONG_LONG, task, TAG_LOCALN, Communicator, MPI_STATUS_IGNORE);
297  for(int n = 0; n < NTYPES + 2; n++)
298  ntot_type[n] += nn[n];
299  }
300 
301  for(int task = writeTask + 1; task <= lastTask; task++)
302  MPI_Send(&ntot_type[0], NTYPES + 2, MPI_LONG_LONG, task, TAG_N, Communicator);
303  }
304  else
305  {
306  MPI_Send(&n_type[0], NTYPES + 2, MPI_LONG_LONG, writeTask, TAG_LOCALN, Communicator);
307  MPI_Recv(&ntot_type[0], NTYPES + 2, MPI_LONG_LONG, writeTask, TAG_N, Communicator, MPI_STATUS_IGNORE);
308  }
309 
310  /* fill file header */
311 
312  for(int n = 0; n < NTYPES; n++)
313  {
314  header.npart[n] = ntot_type[n];
315  header.npartTotal[n] = ntot_type_all[n];
316  }
317 
318  if(reorder_flag)
319  {
320  header.Ntrees = ntot_type[NTYPES];
321 #ifdef MERGERTREE
322  header.TotNtrees = MergerTree->TotNtrees;
323 #else
324  header.TotNtrees = 0;
325 #endif
326  header.Npix = 0;
327  header.TotNpix = 0;
328  }
329  else
330  {
331  header.Ntrees = 0;
332  header.TotNtrees = 0;
333 
334  header.Npix = ntot_type[NTYPES + 1];
335  header.TotNpix = Lp->Npix;
336  }
337 
338  header.num_files = All.NumFilesPerSnapshot;
339 }
340 
341 void lightcone_particle_io::write_header_fields(hid_t handle)
342 {
343  write_vector_attribute(handle, "NumPart_ThisFile", header.npart, H5T_NATIVE_UINT64, NTYPES);
344  write_vector_attribute(handle, "NumPart_Total", header.npartTotal, H5T_NATIVE_UINT64, NTYPES);
345  write_scalar_attribute(handle, "NumFiles", &header.num_files, H5T_NATIVE_INT);
346 
347  if(header.TotNtrees > 0)
348  {
349  write_scalar_attribute(handle, "Ntrees_ThisFile", &header.Ntrees, H5T_NATIVE_UINT64);
350  write_scalar_attribute(handle, "Ntrees_Total", &header.TotNtrees, H5T_NATIVE_UINT64);
351  }
352 
353  if(header.TotNpix > 0)
354  {
355  write_scalar_attribute(handle, "Npix_ThisFile", &header.Npix, H5T_NATIVE_UINT32);
356  write_scalar_attribute(handle, "Npix_Total", &header.TotNpix, H5T_NATIVE_UINT32);
357  }
358 }
359 
360 void lightcone_particle_io::set_filenr_in_header(int numfiles) { header.num_files = numfiles; }
361 
362 void lightcone_particle_io::get_datagroup_name(int type, char *buf)
363 {
364  if(type < NTYPES)
365  sprintf(buf, "/PartType%d", type);
366  else if(type == NTYPES)
367  sprintf(buf, "/TreeTable");
368  else if(type == NTYPES + 1)
369  sprintf(buf, "/HealPixHashTable");
370  else
371  Terminate("wrong group");
372 }
373 
374 void *lightcone_particle_io::get_base_address_of_structure(enum arrays array, int index)
375 {
376  switch(array)
377  {
378  case A_LC:
379  return (void *)(Lp->P + index);
380 
381  case A_PS:
382  return (void *)(Lp->PS + index);
383 
384 #ifdef MERGERTREE
385  case A_TT:
386  return (void *)(MergerTree->TreeTable + index);
387 #endif
388 
389  case A_MM:
390  return (void *)(Lp->HealPixTab_PartCount + index);
391 
392  default:
393  Terminate("we don't expect to get here");
394  }
395 
396  return NULL;
397 }
398 
399 void lightcone_particle_io::read_file_header(const char *fname, int filenr, int readTask, int lastTask, long long *n_type,
400  long long *ntot_type, int *nstart)
401 {
402  n_type[NTYPES] = 0;
403  ntot_type[NTYPES] = 0;
404  n_type[NTYPES + 1] = 0;
405  ntot_type[NTYPES + 1] = 0;
406 
407  if(Lp->TotNumPart == 0)
408  {
409  if(header.num_files <= 1)
410  for(int i = 0; i < NTYPES; i++)
411  header.npartTotal[i] = header.npart[i];
412 
413  Lp->TotNumPart = 0;
414 
415  for(int i = 0; i < NTYPES; i++)
416  Lp->TotNumPart += header.npartTotal[i];
417  }
418 
419  if(ThisTask == readTask)
420  {
421  if(filenr == 0 && nstart == NULL)
422  {
423  for(int type = 0; type < NTYPES; type++)
424  {
425  mpi_printf("READ-LIGHTCONE: Type %d: %8lld (tot=%15lld)\n", type, (long long)header.npart[type],
426  (long long)header.npartTotal[type]);
427  }
428  mpi_printf("\n");
429  }
430  }
431 
432  /* to collect the gas particles all at the beginning (in case several
433  snapshot files are read on the current CPU) we move the collisionless
434  particles such that a gap of the right size is created */
435 
436  long long nall = 0;
437  for(int type = 0; type < NTYPES; type++)
438  {
439  ntot_type[type] = header.npart[type];
440 
441  long long n_in_file = header.npart[type];
442  int ntask = lastTask - readTask + 1;
443  int n_for_this_task = n_in_file / ntask;
444  if((ThisTask - readTask) < (n_in_file % ntask))
445  n_for_this_task++;
446 
447  n_type[type] = n_for_this_task;
448 
449  nall += n_for_this_task;
450  }
451 
452  if(nstart)
453  {
454  memmove(&Lp->P[nall], &Lp->P[0], Lp->NumPart * sizeof(lightcone_particle_data));
455 
456  *nstart = 0;
457  }
458 }
459 
460 void lightcone_particle_io::read_header_fields(const char *fname)
461 {
462  for(int i = 0; i < NTYPES; i++)
463  {
464  header.npart[i] = 0;
465  header.npartTotal[i] = 0;
466  }
467 
468  header.Ntrees = 0;
469  header.TotNtrees = 0;
470 
471  int ntypes = NTYPES;
472 
473  hid_t hdf5_file = my_H5Fopen(fname, H5F_ACC_RDONLY, H5P_DEFAULT);
474  hid_t handle = my_H5Gopen(hdf5_file, "/Header");
475 
476  /* check if the file in question actually has this number of types */
477  hid_t hdf5_attribute = my_H5Aopen_name(handle, "NumPart_ThisFile");
478  hid_t space = H5Aget_space(hdf5_attribute);
479  hsize_t dims, len;
480  H5Sget_simple_extent_dims(space, &dims, &len);
481  H5Sclose(space);
482  if(len != (size_t)ntypes)
483  Terminate("Length of NumPart_ThisFile attribute (%d) does not match NTYPES(ICS) (%d)", (int)len, ntypes);
484  my_H5Aclose(hdf5_attribute, "NumPart_ThisFile");
485 
486  /* now read the header fields */
487  read_vector_attribute(handle, "NumPart_ThisFile", header.npart, H5T_NATIVE_UINT64, ntypes);
488  read_vector_attribute(handle, "NumPart_Total", header.npartTotal, H5T_NATIVE_UINT64, ntypes);
489  read_scalar_attribute(handle, "NumFiles", &header.num_files, H5T_NATIVE_INT);
490 
491  my_H5Gclose(handle, "/Header");
492  my_H5Fclose(hdf5_file, fname);
493 }
494 
495 void lightcone_particle_io::read_increase_numbers(int type, int n_for_this_task) { Lp->NumPart += n_for_this_task; }
496 
497 int lightcone_particle_io::get_filenr_from_header(void) { return header.num_files; }
498 
499 int lightcone_particle_io::get_type_of_element(int index)
500 {
501  if(index < 0 || index >= Lp->NumPart)
502  Terminate("index = %d Lp->NumPart=%d", index, Lp->NumPart);
503 
504  if(reorder_flag || LightCone->lightcone_is_cone_member(index, cone))
505  return Lp->P[index].getType();
506  else
507  return -1; /* this will skip this particle */
508 }
509 
510 void lightcone_particle_io::set_type_of_element(int index, int type)
511 {
512  if(type < NTYPES)
513  Lp->P[index].setType(type);
514 }
515 
516 #endif
global_data_all_processes All
Definition: main.cc:40
Definition: io.h:129
double timediff(double t0, double t1)
Definition: logs.cc:488
double second(void)
Definition: logs.cc:471
#define NTYPES
Definition: constants.h:308
#define MAXLEN_PATH
Definition: constants.h:300
@ RST_CREATEICS
Definition: dtypes.h:319
void write_vector_attribute(hid_t handle, const char *attr_name, const void *buf, hid_t mem_type_id, int length)
Definition: hdf5_util.cc:636
hid_t my_H5Gopen(hid_t loc_id, const char *groupname)
Definition: hdf5_util.cc:311
hid_t my_H5Fopen(const char *fname, unsigned int flags, hid_t fapl_id)
Definition: hdf5_util.cc:297
herr_t my_H5Gclose(hid_t group_id, const char *groupname)
Definition: hdf5_util.cc:457
void read_vector_attribute(hid_t handle, const char *attr_name, void *buf, hid_t mem_type_id, int length)
Definition: hdf5_util.cc:614
herr_t my_H5Fclose(hid_t file_id, const char *fname)
Definition: hdf5_util.cc:482
hid_t my_H5Aopen_name(hid_t loc_id, const char *attr_name)
Definition: hdf5_util.cc:371
herr_t my_H5Aclose(hid_t attr_id, const char *attr_name)
Definition: hdf5_util.cc:429
void write_scalar_attribute(hid_t handle, const char *attr_name, const void *buf, hid_t mem_type_id)
Definition: hdf5_util.cc:621
void read_scalar_attribute(hid_t handle, const char *attr_name, void *buf, hid_t mem_type_id)
Definition: hdf5_util.cc:589
@ READ_IF_PRESENT
Definition: io.h:124
@ SKIP_ON_READ
Definition: io.h:125
@ ALL_TYPES
Definition: io.h:103
@ MASS_BLOCK
Definition: io.h:104
@ TREETABLE
Definition: io.h:116
@ HEALPIXTAB
Definition: io.h:119
arrays
Definition: io.h:30
@ A_MM
Definition: io.h:46
@ A_TT
Definition: io.h:42
@ A_PS
Definition: io.h:34
@ A_LC
Definition: io.h:45
@ FILE_HALF
Definition: io.h:70
@ FILE_NONE
Definition: io.h:66
@ FILE_MY_IO_FLOAT
Definition: io.h:69
@ FILE_INT64
Definition: io.h:68
@ FILE_MY_ID_TYPE
Definition: io.h:71
@ FILE_INT
Definition: io.h:67
@ MEM_INT
Definition: io.h:79
@ MEM_MY_FLOAT
Definition: io.h:85
@ MEM_MY_ID_TYPE
Definition: io.h:81
@ MEM_MY_DOUBLE
Definition: io.h:86
@ MEM_INT64
Definition: io.h:80
@ FILE_IS_LIGHTCONE
Definition: io.h:59
logs Logs
Definition: main.cc:43
#define Terminate(...)
Definition: macros.h:19
#define TAG_LOCALN
Definition: mpi_utils.h:52
void sumup_large_ints(int n, int *src, long long *res, MPI_Comm comm)
#define TAG_N
Definition: mpi_utils.h:25
void sumup_longs(int n, long long *src, long long *res, MPI_Comm comm)
memory Mem
Definition: main.cc:44
double UnitVelocity_in_cm_per_s
Definition: allvars.h:119
enum restart_options RestartFlag
Definition: allvars.h:68
char OutputDir[MAXLEN_PATH]
Definition: allvars.h:272
void subdivide_evenly(long long N, int pieces, int index_bin, long long *first, int *count)
Definition: system.cc:51