aboutsummaryrefslogtreecommitdiff
path: root/Carpet/CarpetIOHDF5
diff options
context:
space:
mode:
authorRoland Haas <roland.haas@physics.gatech.edu>2012-09-05 10:13:52 -0400
committerBarry Wardell <barry.wardell@gmail.com>2012-09-11 18:23:36 +0100
commit49044804c879b4b80b35737c63420b07f2a80916 (patch)
treed158f859d08f050e0ff337bd27413440a3fa9e4d /Carpet/CarpetIOHDF5
parent75ee69144b59c74f4ebac5e356f4b7cf5d628b12 (diff)
CarpetIOHDF5: make index files sparse files with full datasets
rather than datasets of extent [1,1,1]. It so happens that this patch removes the separate shape arrays and separate dataspaces for the datasets in index files and creates the index file datasets with the same dataspace as the "heavy" file datasets. It retains the h5shape attribute that was originally introduced for index files even though it is now redundant (since one could call H5Sget_simple_extent_dims on the datasets in the index files).
Diffstat (limited to 'Carpet/CarpetIOHDF5')
-rw-r--r--Carpet/CarpetIOHDF5/src/Output.cc18
1 files changed, 4 insertions, 14 deletions
diff --git a/Carpet/CarpetIOHDF5/src/Output.cc b/Carpet/CarpetIOHDF5/src/Output.cc
index 1df156c16..44f9df478 100644
--- a/Carpet/CarpetIOHDF5/src/Output.cc
+++ b/Carpet/CarpetIOHDF5/src/Output.cc
@@ -354,13 +354,11 @@ int WriteVarChunkedSequential (const cGH* const cctkGH,
// Get the shape of the HDF5 dataset (in Fortran index order)
hsize_t shape[dim];
- hsize_t index_shape[dim];
hsize_t num_elems = 1;
for (int d = 0; d < group.dim; ++d) {
assert (group.dim-1-d>=0 and group.dim-1-d<dim);
shape[group.dim-1-d] = (bbox.shape() / bbox.stride())[d];
num_elems *= shape[group.dim-1-d];
- index_shape[group.dim-1-d] = 1;
}
// Don't create zero-sized components
@@ -443,7 +441,7 @@ int WriteVarChunkedSequential (const cGH* const cctkGH,
}
// Write the component as an individual dataset
- hid_t plist, dataspace, dataset, index_dataspace, index_dataset;
+ hid_t plist, dataspace, dataset, index_dataset;
HDF5_ERROR (plist = H5Pcreate (H5P_DATASET_CREATE));
// enable compression if requested
const int compression_lvl = request->compression_level >= 0 ?
@@ -463,10 +461,8 @@ int WriteVarChunkedSequential (const cGH* const cctkGH,
filedatatype, dataspace, plist));
if (indexfile != -1) {
- HDF5_ERROR (index_dataspace = H5Screate_simple (group.dim,
- index_shape, NULL));
HDF5_ERROR (index_dataset = H5Dcreate (indexfile, datasetname.str().c_str(),
- filedatatype, index_dataspace, H5P_DEFAULT));
+ filedatatype, dataspace, H5P_DEFAULT));
}
io_bytes +=
@@ -481,7 +477,6 @@ int WriteVarChunkedSequential (const cGH* const cctkGH,
HDF5_ERROR (H5Dclose (dataset));
if (indexfile != -1) {
- HDF5_ERROR (H5Sclose (index_dataspace));
error_count += AddAttributes (cctkGH, fullname, group.dim,refinementlevel,
request, bbox, index_dataset, true);
HDF5_ERROR (H5Dclose (index_dataset));
@@ -620,17 +615,15 @@ int WriteVarChunkedParallel (const cGH* const cctkGH,
// Get the shape of the HDF5 dataset (in Fortran index order)
hsize_t shape[dim];
- hsize_t index_shape[dim];
hssize_t origin[dim];
for (int d = 0; d < group.dim; ++d) {
assert (group.dim-1-d>=0 and group.dim-1-d<dim);
origin[group.dim-1-d] = (bbox.lower() / bbox.stride())[d];
shape[group.dim-1-d] = (bbox.shape() / bbox.stride())[d];
- index_shape[group.dim-1-d] = 1;
}
// Write the component as an individual dataset
- hid_t plist, dataspace, dataset, index_dataspace, index_dataset;
+ hid_t plist, dataspace, dataset, index_dataset;
HDF5_ERROR (plist = H5Pcreate (H5P_DATASET_CREATE));
// enable compression if requested
const int compression_lvl = request->compression_level >= 0 ?
@@ -650,10 +643,8 @@ int WriteVarChunkedParallel (const cGH* const cctkGH,
filedatatype, dataspace, plist));
if (indexfile != -1) {
- HDF5_ERROR (index_dataspace = H5Screate_simple (group.dim,
- index_shape, NULL));
HDF5_ERROR (index_dataset = H5Dcreate (indexfile, datasetname.str().c_str(),
- filedatatype, index_dataspace, H5P_DEFAULT));
+ filedatatype, dataspace, H5P_DEFAULT));
}
io_bytes +=
@@ -667,7 +658,6 @@ int WriteVarChunkedParallel (const cGH* const cctkGH,
HDF5_ERROR (H5Dclose (dataset));
if (indexfile != -1) {
- HDF5_ERROR (H5Sclose (index_dataspace));
error_count += AddAttributes (cctkGH, fullname, group.dim,refinementlevel,
request, bbox, index_dataset, true);
HDF5_ERROR (H5Dclose (index_dataset));