diff options
author | tradke <tradke@7842ec3a-9562-4be5-9c5b-06ba18f2b668> | 2000-10-24 22:26:50 +0000 |
---|---|---|
committer | tradke <tradke@7842ec3a-9562-4be5-9c5b-06ba18f2b668> | 2000-10-24 22:26:50 +0000 |
commit | eef36f49f2764e8ead7b9eab0991ffb4beee29ce (patch) | |
tree | 17e185fac9c1cabd2e1a6184f16a415d6897d240 | |
parent | 568461b3440d28004752786fcb6e1e6e22c71195 (diff) |
Check HDF5 write buffer size and increase it if necessary.
This fixes Markus' problems for writing large datasets
(> 1MB which is the default buffer size).
git-svn-id: http://svn.cactuscode.org/arrangements/CactusPUGHIO/IOHDF5Util/trunk@6 7842ec3a-9562-4be5-9c5b-06ba18f2b668
-rw-r--r-- | src/DumpVar.c | 34 |
1 files changed, 26 insertions, 8 deletions
diff --git a/src/DumpVar.c b/src/DumpVar.c index c08c281..126232f 100644 --- a/src/DumpVar.c +++ b/src/DumpVar.c @@ -777,10 +777,11 @@ static void IOHDF5Util_procDump (cGH *GH, int myproc; ioGH *ioUtilGH; ioHDF5UtilGH *myGH; - hid_t group, dataset, memspace, filespace; + hid_t group, dataset, memspace, filespace, xfer_plist; char *fullname, *datasetname, *chunkname; hssize_t *chunk_origin; hsize_t *chunk_dims, *file_dims; + hsize_t buffersize; int locpoints; @@ -853,11 +854,20 @@ static void IOHDF5Util_procDump (cGH *GH, IOHDF5_ERROR (dataset = H5Dopen (file, datasetname)); } + /* increase the buffer size if the default isn't sufficient */ + IOHDF5_ERROR (xfer_plist = H5Pcreate (H5P_DATASET_XFER)); + buffersize = H5Dget_storage_size (dataset); + if (buffersize > H5Pget_buffer (xfer_plist, NULL, NULL)) + { + IOHDF5_ERROR (H5Pset_buffer (xfer_plist, buffersize, NULL, NULL)); + } + /* write the data */ IOHDF5_ERROR (H5Dwrite (dataset, iohdf5_type, memspace, filespace, - H5P_DEFAULT, outme)); + xfer_plist, outme)); - /* and close the file dataspace */ + /* and close the transfer property list and the file dataspace */ + IOHDF5_ERROR (H5Pclose (xfer_plist)); IOHDF5_ERROR (H5Sclose (filespace)); } else @@ -962,10 +972,11 @@ static void IOHDF5Util_collectiveDump (cGH *GH, DECLARE_CCTK_PARAMETERS int i, dim; ioHDF5GH *myGH; - hid_t dataset, memspace, filespace; + hid_t dataset, memspace, filespace, xfer_plist; char *name, datasetname[128]; hssize_t *chunk_origin; hsize_t *chunk_dims, *file_dims; + hsize_t buffersize; myGH = (ioHDF5GH *) CCTK_GHExtension (GH, "IOHDF5Util"); @@ -1016,14 +1027,21 @@ static void IOHDF5Util_collectiveDump (cGH *GH, slab, dataset); } + /* increase the buffer size if the default isn't sufficient */ + IOHDF5_ERROR (xfer_plist = H5Pcreate (H5P_DATASET_XFER)); + buffersize = H5Dget_storage_size (dataset); + if (buffersize > H5Pget_buffer (xfer_plist, NULL, NULL)) + { + IOHDF5_ERROR (H5Pset_buffer (xfer_plist, buffersize, NULL, NULL)); + } + /* write the data */ IOHDF5_ERROR (H5Dwrite (dataset, hdf5io_type, memspace, - filespace, H5P_DEFAULT, outme)); + filespace, xfer_plist, outme)); - /* and close the file dataspace */ + /* close resources */ + IOHDF5_ERROR (H5Pclose (xfer_plist)); IOHDF5_ERROR (H5Sclose (filespace)); - - /* close the dataset and the memspace */ IOHDF5_ERROR (H5Dclose (dataset)); IOHDF5_ERROR (H5Sclose (memspace)); |