aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRoland Haas <roland.haas@physics.gatech.edu>2012-08-03 22:34:59 -0700
committerRoland Haas <roland.haas@physics.gatech.edu>2012-08-03 22:34:59 -0700
commit6915174b23acdc4353d820e58298a11a55ee826d (patch)
tree5526a3d3e346530b35cf5d55e5035b949492d438
parenta54c76a4a07b2c28cdca4dd3767abab334257f8f (diff)
CarpetIOHDF5: support indices in sequential chunked output
which happens to be the output method used for single process runs.
-rw-r--r--Carpet/CarpetIOHDF5/src/CarpetIOHDF5.cc2
-rw-r--r--Carpet/CarpetIOHDF5/src/CarpetIOHDF5.hh3
-rw-r--r--Carpet/CarpetIOHDF5/src/Output.cc24
3 files changed, 25 insertions, 4 deletions
diff --git a/Carpet/CarpetIOHDF5/src/CarpetIOHDF5.cc b/Carpet/CarpetIOHDF5/src/CarpetIOHDF5.cc
index 909764b46..85b10c229 100644
--- a/Carpet/CarpetIOHDF5/src/CarpetIOHDF5.cc
+++ b/Carpet/CarpetIOHDF5/src/CarpetIOHDF5.cc
@@ -871,7 +871,7 @@ static int OutputVarAs (const cGH* const cctkGH, const char* const fullname,
groupdata.disttype == CCTK_DISTRIB_CONSTANT) {
error_count += WriteVarUnchunked (cctkGH, file, io_bytes, r, false);
} else if (CCTK_EQUALS (out_mode, "onefile")) {
- error_count += WriteVarChunkedSequential (cctkGH, file, io_bytes, r, false);
+ error_count += WriteVarChunkedSequential (cctkGH, file, io_bytes, r, false, index_file);
} else {
error_count += WriteVarChunkedParallel (cctkGH, file, io_bytes, r, false, index_file);
}
diff --git a/Carpet/CarpetIOHDF5/src/CarpetIOHDF5.hh b/Carpet/CarpetIOHDF5/src/CarpetIOHDF5.hh
index 3cff7bf3b..42c7d6622 100644
--- a/Carpet/CarpetIOHDF5/src/CarpetIOHDF5.hh
+++ b/Carpet/CarpetIOHDF5/src/CarpetIOHDF5.hh
@@ -119,7 +119,8 @@ namespace CarpetIOHDF5
hid_t file,
CCTK_REAL & io_bytes,
const ioRequest* const request,
- bool called_from_checkpoint);
+ bool called_from_checkpoint,
+ hid_t index = -1);
int WriteVarChunkedParallel (const cGH* const cctkGH,
hid_t file,
CCTK_REAL & io_bytes,
diff --git a/Carpet/CarpetIOHDF5/src/Output.cc b/Carpet/CarpetIOHDF5/src/Output.cc
index f8c580335..1df156c16 100644
--- a/Carpet/CarpetIOHDF5/src/Output.cc
+++ b/Carpet/CarpetIOHDF5/src/Output.cc
@@ -307,7 +307,8 @@ int WriteVarChunkedSequential (const cGH* const cctkGH,
hid_t outfile,
CCTK_REAL & io_bytes,
const ioRequest* const request,
- bool called_from_checkpoint)
+ bool called_from_checkpoint,
+ hid_t indexfile)
{
DECLARE_CCTK_PARAMETERS;
@@ -353,11 +354,13 @@ int WriteVarChunkedSequential (const cGH* const cctkGH,
// Get the shape of the HDF5 dataset (in Fortran index order)
hsize_t shape[dim];
+ hsize_t index_shape[dim];
hsize_t num_elems = 1;
for (int d = 0; d < group.dim; ++d) {
assert (group.dim-1-d>=0 and group.dim-1-d<dim);
shape[group.dim-1-d] = (bbox.shape() / bbox.stride())[d];
num_elems *= shape[group.dim-1-d];
+ index_shape[group.dim-1-d] = 1;
}
// Don't create zero-sized components
@@ -426,6 +429,8 @@ int WriteVarChunkedSequential (const cGH* const cctkGH,
if (request->check_exist) {
H5E_BEGIN_TRY {
H5Gunlink (outfile, datasetname.str().c_str());
+ if (indexfile != -1)
+ H5Gunlink (indexfile, datasetname.str().c_str());
} H5E_END_TRY;
}
@@ -438,7 +443,7 @@ int WriteVarChunkedSequential (const cGH* const cctkGH,
}
// Write the component as an individual dataset
- hid_t plist, dataspace, dataset;
+ hid_t plist, dataspace, dataset, index_dataspace, index_dataset;
HDF5_ERROR (plist = H5Pcreate (H5P_DATASET_CREATE));
// enable compression if requested
const int compression_lvl = request->compression_level >= 0 ?
@@ -456,6 +461,14 @@ int WriteVarChunkedSequential (const cGH* const cctkGH,
HDF5_ERROR (dataspace = H5Screate_simple (group.dim, shape, NULL));
HDF5_ERROR (dataset = H5Dcreate (outfile, datasetname.str().c_str(),
filedatatype, dataspace, plist));
+
+ if (indexfile != -1) {
+ HDF5_ERROR (index_dataspace = H5Screate_simple (group.dim,
+ index_shape, NULL));
+ HDF5_ERROR (index_dataset = H5Dcreate (indexfile, datasetname.str().c_str(),
+ filedatatype, index_dataspace, H5P_DEFAULT));
+ }
+
io_bytes +=
H5Sget_simple_extent_npoints (dataspace) *
H5Tget_size (filedatatype);
@@ -466,6 +479,13 @@ int WriteVarChunkedSequential (const cGH* const cctkGH,
error_count += AddAttributes (cctkGH, fullname, group.dim,
refinementlevel, request, bbox,dataset);
HDF5_ERROR (H5Dclose (dataset));
+
+ if (indexfile != -1) {
+ HDF5_ERROR (H5Sclose (index_dataspace));
+ error_count += AddAttributes (cctkGH, fullname, group.dim,refinementlevel,
+ request, bbox, index_dataset, true);
+ HDF5_ERROR (H5Dclose (index_dataset));
+ }
}
} // if dist::rank() == 0