Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions examples/parameter_example.yml
Original file line number Diff line number Diff line change
Expand Up @@ -226,6 +226,7 @@ Statistics:
InitialConditions:
file_name: SedovBlast/sedov.hdf5 # The file to read
periodic: 1 # Are we running with periodic ICs?
accept_snapshot: 0 # (Optional) Accept SWIFT snapshot field names when reading ICs.
generate_gas_in_ics: 0 # (Optional) Generate gas particles from the DM-only ICs (e.g. from panphasia).
cleanup_h_factors: 0 # (Optional) Clean up the h-factors used in the ICs (e.g. in Gadget files).
cleanup_velocity_factors: 0 # (Optional) Clean up the scale-factors used in the definition of the velocity variable in the ICs (e.g. in Gadget files).
Expand Down
16 changes: 16 additions & 0 deletions src/io_properties.h
Original file line number Diff line number Diff line change
Expand Up @@ -209,6 +209,22 @@ INLINE static void safe_strcpy(char *restrict dst, const char *restrict src,
dst[dst_len - 1] = '\0';
}

/**
* @brief Return a snapshot-output alias for legacy/singular IC field names.
*
* SWIFT follows GADGET-style singular names in ICs but writes plural field
* names in snapshots. Accepting these aliases lets a snapshot be reused as an
* IC without an external conversion step.
*/
INLINE static const char *io_get_input_field_alias(const char *name) {

if (strcmp(name, "SmoothingLength") == 0) return "SmoothingLengths";
if (strcmp(name, "InternalEnergy") == 0) return "InternalEnergies";
if (strcmp(name, "Density") == 0) return "Densities";

return NULL;
}

/**
* @brief Constructs an #io_props from its parameters
*
Expand Down
42 changes: 34 additions & 8 deletions src/parallel_io.c
Original file line number Diff line number Diff line change
Expand Up @@ -251,16 +251,40 @@ void read_array_parallel(hid_t grp, struct io_props props, size_t N,
long long N_total, int mpi_rank, long long offset,
const struct unit_system *internal_units,
const struct unit_system *ic_units, int cleanup_h,
int cleanup_sqrt_a, double h, double a) {
int cleanup_sqrt_a, double h, double a,
const int accept_snapshot) {

const size_t typeSize = io_sizeof_type(props.type);
const size_t copySize = typeSize * props.dimension;

const char *dataset_name = props.name;

/* Check whether the dataspace exists or not */
const htri_t exist = H5Lexists(grp, props.name, 0);
htri_t exist = H5Lexists(grp, dataset_name, 0);
if (exist < 0) {
error("Error while checking the existence of data set '%s'.", props.name);
} else if (exist == 0) {
error("Error while checking the existence of data set '%s'.", dataset_name);
}

if (exist == 0) {
const char *alias = io_get_input_field_alias(props.name);
if (alias != NULL) {
const htri_t alias_exist = H5Lexists(grp, alias, 0);
if (alias_exist < 0) {
error("Error while checking the existence of data set '%s'.", alias);
} else if (alias_exist > 0 && accept_snapshot) {
dataset_name = alias;
exist = alias_exist;
} else if (alias_exist > 0 && props.importance == COMPULSORY) {
error(
"Compulsory data set '%s' not present in the file. Found snapshot "
"field '%s' instead. Set InitialConditions:accept_snapshot to 1 "
"to accept snapshot field names.",
props.name, alias);
}
}
}

if (exist == 0) {
if (props.importance == COMPULSORY) {
error("Compulsory data set '%s' not present in the file.", props.name);
} else {
Expand All @@ -279,8 +303,8 @@ void read_array_parallel(hid_t grp, struct io_props props, size_t N,
}

/* Open data space in file */
const hid_t h_data = H5Dopen2(grp, props.name, H5P_DEFAULT);
if (h_data < 0) error("Error while opening data space '%s'.", props.name);
const hid_t h_data = H5Dopen2(grp, dataset_name, H5P_DEFAULT);
if (h_data < 0) error("Error while opening data space '%s'.", dataset_name);

/* Parallel-HDF5 1.10.2 incorrectly reads data that was compressed */
/* We detect this here and crash with an error message instead of */
Expand Down Expand Up @@ -796,7 +820,8 @@ void read_ic_parallel(char *fileName, const struct unit_system *internal_units,
const int cleanup_sqrt_a, const double h, const double a,
const int mpi_rank, const int mpi_size, MPI_Comm comm,
MPI_Info info, const int n_threads, const int dry_run,
const int remap_ids, struct ic_info *ics_metadata) {
const int remap_ids, const int accept_snapshot,
struct ic_info *ics_metadata) {

hid_t h_file = 0, h_grp = 0;
/* GADGET has only cubic boxes (in cosmological mode) */
Expand Down Expand Up @@ -1100,7 +1125,8 @@ void read_ic_parallel(char *fileName, const struct unit_system *internal_units,
/* Read array. */
read_array_parallel(h_grp, list[i], Nparticles, N_total[ptype],
mpi_rank, offset[ptype], internal_units, ic_units,
cleanup_h, cleanup_sqrt_a, h, a);
cleanup_h, cleanup_sqrt_a, h, a,
accept_snapshot);
}

/* Close particle group */
Expand Down
3 changes: 2 additions & 1 deletion src/parallel_io.h
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,8 @@ void read_ic_parallel(char *fileName, const struct unit_system *internal_units,
const int cleanup_sqrt_a, const double h, const double a,
const int mpi_rank, const int mpi_size, MPI_Comm comm,
MPI_Info info, const int nr_threads, const int dry_run,
const int remap_ids, struct ic_info *ics_metadata);
const int remap_ids, const int accept_snapshot,
struct ic_info *ics_metadata);

void write_output_parallel(struct engine *e,
const struct unit_system *internal_units,
Expand Down
44 changes: 35 additions & 9 deletions src/serial_io.c
Original file line number Diff line number Diff line change
Expand Up @@ -89,17 +89,41 @@ void read_array_serial(hid_t grp, const struct io_props props, size_t N,
long long N_total, long long offset,
const struct unit_system *internal_units,
const struct unit_system *ic_units, int cleanup_h,
int cleanup_sqrt_a, double h, double a) {
int cleanup_sqrt_a, double h, double a,
const int accept_snapshot) {

const size_t typeSize = io_sizeof_type(props.type);
const size_t copySize = typeSize * props.dimension;
const size_t num_elements = N * props.dimension;

const char *dataset_name = props.name;

/* Check whether the dataspace exists or not */
const htri_t exist = H5Lexists(grp, props.name, 0);
htri_t exist = H5Lexists(grp, dataset_name, 0);
if (exist < 0) {
error("Error while checking the existence of data set '%s'.", props.name);
} else if (exist == 0) {
error("Error while checking the existence of data set '%s'.", dataset_name);
}

if (exist == 0) {
const char *alias = io_get_input_field_alias(props.name);
if (alias != NULL) {
const htri_t alias_exist = H5Lexists(grp, alias, 0);
if (alias_exist < 0) {
error("Error while checking the existence of data set '%s'.", alias);
} else if (alias_exist > 0 && accept_snapshot) {
dataset_name = alias;
exist = alias_exist;
} else if (alias_exist > 0 && props.importance == COMPULSORY) {
error(
"Compulsory data set '%s' not present in the file. Found snapshot "
"field '%s' instead. Set InitialConditions:accept_snapshot to 1 "
"to accept snapshot field names.",
props.name, alias);
}
}
}

if (exist == 0) {
if (props.importance == COMPULSORY) {
error("Compulsory data set '%s' not present in the file.", props.name);
} else {
Expand All @@ -122,8 +146,8 @@ void read_array_serial(hid_t grp, const struct io_props props, size_t N,
/* fflush(stdout); */

/* Open data space */
const hid_t h_data = H5Dopen(grp, props.name, H5P_DEFAULT);
if (h_data < 0) error("Error while opening data space '%s'.", props.name);
const hid_t h_data = H5Dopen(grp, dataset_name, H5P_DEFAULT);
if (h_data < 0) error("Error while opening data space '%s'.", dataset_name);

/* Allocate temporary buffer */
void *temp = malloc(num_elements * typeSize);
Expand Down Expand Up @@ -158,7 +182,7 @@ void read_array_serial(hid_t grp, const struct io_props props, size_t N,
/* Using HDF5 dataspaces would be better */
const hid_t h_err = H5Dread(h_data, io_hdf5_type(props.type), h_memspace,
h_filespace, H5P_DEFAULT, temp);
if (h_err < 0) error("Error while reading data array '%s'.", props.name);
if (h_err < 0) error("Error while reading data array '%s'.", dataset_name);

/* Unit conversion if necessary */
const double factor =
Expand Down Expand Up @@ -569,7 +593,8 @@ void read_ic_serial(char *fileName, const struct unit_system *internal_units,
const int cleanup_sqrt_a, double h, double a,
const int mpi_rank, int mpi_size, MPI_Comm comm,
MPI_Info info, const int n_threads, const int dry_run,
const int remap_ids, struct ic_info *ics_metadata) {
const int remap_ids, const int accept_snapshot,
struct ic_info *ics_metadata) {

hid_t h_file = 0, h_grp = 0;
/* GADGET has only cubic boxes (in cosmological mode) */
Expand Down Expand Up @@ -905,7 +930,8 @@ void read_ic_serial(char *fileName, const struct unit_system *internal_units,
/* Read array. */
read_array_serial(h_grp, list[i], Nparticles, N_total[ptype],
offset[ptype], internal_units, ic_units,
cleanup_h, cleanup_sqrt_a, h, a);
cleanup_h, cleanup_sqrt_a, h, a,
accept_snapshot);
}

/* Close particle group */
Expand Down
3 changes: 2 additions & 1 deletion src/serial_io.h
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,8 @@ void read_ic_serial(char *fileName, const struct unit_system *internal_units,
const int cleanup_sqrt_a, const double h, const double a,
const int mpi_rank, int mpi_size, MPI_Comm comm,
MPI_Info info, const int n_threads, const int dry_run,
const int remap_ids, struct ic_info *ics_metadata);
const int remap_ids, const int accept_snapshot,
struct ic_info *ics_metadata);

void write_output_serial(struct engine *e,
const struct unit_system *internal_units,
Expand Down
42 changes: 33 additions & 9 deletions src/single_io.c
Original file line number Diff line number Diff line change
Expand Up @@ -86,17 +86,41 @@ static const int io_max_size_output_list = 100;
void read_array_single(hid_t h_grp, const struct io_props props, size_t N,
const struct unit_system *internal_units,
const struct unit_system *ic_units, int cleanup_h,
int cleanup_sqrt_a, double h, double a) {
int cleanup_sqrt_a, double h, double a,
const int accept_snapshot) {

const size_t typeSize = io_sizeof_type(props.type);
const size_t copySize = typeSize * props.dimension;
const size_t num_elements = N * props.dimension;

const char *dataset_name = props.name;

/* Check whether the dataspace exists or not */
const htri_t exist = H5Lexists(h_grp, props.name, 0);
htri_t exist = H5Lexists(h_grp, dataset_name, 0);
if (exist < 0) {
error("Error while checking the existence of data set '%s'.", props.name);
} else if (exist == 0) {
error("Error while checking the existence of data set '%s'.", dataset_name);
}

if (exist == 0) {
const char *alias = io_get_input_field_alias(props.name);
if (alias != NULL) {
const htri_t alias_exist = H5Lexists(h_grp, alias, 0);
if (alias_exist < 0) {
error("Error while checking the existence of data set '%s'.", alias);
} else if (alias_exist > 0 && accept_snapshot) {
dataset_name = alias;
exist = alias_exist;
} else if (alias_exist > 0 && props.importance == COMPULSORY) {
error(
"Compulsory data set '%s' not present in the file. Found snapshot "
"field '%s' instead. Set InitialConditions:accept_snapshot to 1 "
"to accept snapshot field names.",
props.name, alias);
}
}
}

if (exist == 0) {
if (props.importance == COMPULSORY) {
error("Compulsory data set '%s' not present in the file.", props.name);
} else {
Expand All @@ -119,8 +143,8 @@ void read_array_single(hid_t h_grp, const struct io_props props, size_t N,
/* props.name); */

/* Open data space */
const hid_t h_data = H5Dopen(h_grp, props.name, H5P_DEFAULT);
if (h_data < 0) error("Error while opening data space '%s'.", props.name);
const hid_t h_data = H5Dopen(h_grp, dataset_name, H5P_DEFAULT);
if (h_data < 0) error("Error while opening data space '%s'.", dataset_name);

/* Allocate temporary buffer */
void *temp = malloc(num_elements * typeSize);
Expand All @@ -131,7 +155,7 @@ void read_array_single(hid_t h_grp, const struct io_props props, size_t N,
/* Using HDF5 dataspaces would be better */
const hid_t h_err = H5Dread(h_data, io_hdf5_type(props.type), H5S_ALL,
H5S_ALL, H5P_DEFAULT, temp);
if (h_err < 0) error("Error while reading data array '%s'.", props.name);
if (h_err < 0) error("Error while reading data array '%s'.", dataset_name);

/* Unit conversion if necessary */
const double unit_factor =
Expand Down Expand Up @@ -466,7 +490,7 @@ void read_ic_single(
const int with_stars, const int with_black_holes, const int with_cosmology,
const int cleanup_h, const int cleanup_sqrt_a, const double h,
const double a, const int n_threads, const int dry_run, const int remap_ids,
struct ic_info *ics_metadata) {
const int accept_snapshot, struct ic_info *ics_metadata) {

hid_t h_file = 0, h_grp = 0;
/* GADGET has only cubic boxes (in cosmological mode) */
Expand Down Expand Up @@ -754,7 +778,7 @@ void read_ic_single(

/* Read array. */
read_array_single(h_grp, list[i], Nparticles, internal_units, ic_units,
cleanup_h, cleanup_sqrt_a, h, a);
cleanup_h, cleanup_sqrt_a, h, a, accept_snapshot);
}

/* Close particle group */
Expand Down
3 changes: 2 additions & 1 deletion src/single_io.h
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,8 @@ void read_ic_single(
const int with_stars, const int with_black_holes, const int with_cosmology,
const int cleanup_h, const int cleanup_sqrt_a, const double h,
const double a, const int nr_threads, const int dry_run,
const int remap_ids, struct ic_info *ics_metadata);
const int remap_ids, const int accept_snapshot,
struct ic_info *ics_metadata);

void write_output_single(struct engine *e,
const struct unit_system *internal_units,
Expand Down
10 changes: 7 additions & 3 deletions swift.c
Original file line number Diff line number Diff line change
Expand Up @@ -1095,6 +1095,8 @@ int main(int argc, char *argv[]) {
params, "InitialConditions:generate_gas_in_ics", 0);
const int remap_ids =
parser_get_opt_param_int(params, "InitialConditions:remap_ids", 0);
const int accept_snapshot =
parser_get_opt_param_int(params, "InitialConditions:accept_snapshot", 0);

/* Initialise the cosmology */
if (with_cosmology)
Expand Down Expand Up @@ -1281,23 +1283,25 @@ int main(int argc, char *argv[]) {
with_gravity, with_sinks, with_stars, with_black_holes,
with_cosmology, cleanup_h, cleanup_sqrt_a, cosmo.h,
cosmo.a, myrank, nr_nodes, MPI_COMM_WORLD, MPI_INFO_NULL,
nr_threads, dry_run, remap_ids, &ics_metadata);
nr_threads, dry_run, remap_ids, accept_snapshot,
&ics_metadata);
#else
read_ic_serial(ICfileName, &us, dim, &parts, &gparts, &sinks, &sparts,
&bparts, &Ngas, &Ngpart, &Ngpart_background, &Nnupart,
&Nsink, &Nspart, &Nbpart, &flag_entropy_ICs, with_hydro,
with_gravity, with_sinks, with_stars, with_black_holes,
with_cosmology, cleanup_h, cleanup_sqrt_a, cosmo.h, cosmo.a,
myrank, nr_nodes, MPI_COMM_WORLD, MPI_INFO_NULL, nr_threads,
dry_run, remap_ids, &ics_metadata);
dry_run, remap_ids, accept_snapshot, &ics_metadata);
#endif
#else
read_ic_single(ICfileName, &us, dim, &parts, &gparts, &sinks, &sparts,
&bparts, &Ngas, &Ngpart, &Ngpart_background, &Nnupart,
&Nsink, &Nspart, &Nbpart, &flag_entropy_ICs, with_hydro,
with_gravity, with_sinks, with_stars, with_black_holes,
with_cosmology, cleanup_h, cleanup_sqrt_a, cosmo.h, cosmo.a,
nr_threads, dry_run, remap_ids, &ics_metadata);
nr_threads, dry_run, remap_ids, accept_snapshot,
&ics_metadata);
#endif
#endif

Expand Down