aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authortradke <tradke@4825ed28-b72c-4eae-9704-e50c059e567d>2003-05-12 15:43:26 +0000
committertradke <tradke@4825ed28-b72c-4eae-9704-e50c059e567d>2003-05-12 15:43:26 +0000
commit6fbe03a65e0f4320ccb11538c4ac025085881248 (patch)
tree7f6f5956e9cf4f1cb8d8f1bcbf3eb6c87d9cfacd
parent5660ddb4bbeae40516c6c377fafca934718dd6a2 (diff)
Fix for the recombination of chunks starting at the same origin (but have
different extents). Also open the input file(s) with a sieve buffer size of 32 MB in order to reduce the number of low-level reads when reading in a chunk. git-svn-id: http://svn.cactuscode.org/arrangements/CactusPUGHIO/IOHDF5/trunk@166 4825ed28-b72c-4eae-9704-e50c059e567d
-rw-r--r--src/util/hdf5_recombiner.c42
1 files changed, 31 insertions, 11 deletions
diff --git a/src/util/hdf5_recombiner.c b/src/util/hdf5_recombiner.c
index 4f75bcb..0a3df97 100644
--- a/src/util/hdf5_recombiner.c
+++ b/src/util/hdf5_recombiner.c
@@ -33,6 +33,10 @@ CCTK_FILEVERSION(CactusPUGHIO_IOHDF5_util_hdf5_recombiner_c)
may need additional descriptors for internal use) */
#define RESERVED_FILE_DESCRIPTORS 5
+/* the size (in bytes) of the data sieving buffer for input files
+ (should be larger than HDF5's default of 64 kB) */
+#define SIEVE_BUFFERSIZE (32 * 1024 * 1024)
+
/* macro to do an HDF5 call, check its return code, and print a warning
in case of an error */
#define CHECK_ERROR(hdf5_call) \
@@ -61,6 +65,7 @@ CCTK_FILEVERSION(CactusPUGHIO_IOHDF5_util_hdf5_recombiner_c)
static char *pathname = NULL; /* pathname of the current object */
static hid_t *infiles = NULL; /* list of input file handles */
static hid_t outfile = -1 ; /* output file handle */
+static hid_t sieve_plist = -1 ; /* input file data access property list */
static int max_filedescriptors = 0;/* maximum number of open files */
static char **infilenames = NULL; /* list of input filenames */
static int nprocs = 0; /* total number of processors */
@@ -68,6 +73,7 @@ static int ioproc_every = 0; /* I/O was done on every N'th processor */
static int ninfiles = 0; /* number of input files */
static unsigned int nerrors = 0; /* global error counter */
+
/*****************************************************************************/
/* local function prototypes */
/*****************************************************************************/
@@ -126,10 +132,16 @@ int main (int argc, const char *const argv[])
return (0);
}
+ /* set the data sieving buffer to a much higher value than its default (64kB)
+ in order to minimize the number of low-level reads when reading individual
+ chunks as hyperslabs into the slice buffer in memory */
+ CHECK_ERROR (sieve_plist = H5Pcreate (H5P_FILE_ACCESS));
+ CHECK_ERROR (H5Pset_sieve_buf_size (sieve_plist, SIEVE_BUFFERSIZE));
+
/* open (first) input file */
H5E_BEGIN_TRY
{
- infile = H5Fopen (argv[1], H5F_ACC_RDONLY, H5P_DEFAULT);
+ infile = H5Fopen (argv[1], H5F_ACC_RDONLY, sieve_plist);
} H5E_END_TRY
if (infile < 0)
{
@@ -285,7 +297,7 @@ int main (int argc, const char *const argv[])
infilenames[infile] = malloc (strlen (template) + 10);
sprintf (infilenames[infile], template, infile);
infiles[infile] = H5Fopen (infilenames[infile], H5F_ACC_RDONLY,
- H5P_DEFAULT);
+ sieve_plist);
if (infiles[infile] < 0)
{
fprintf (stderr, "ERROR: Cannot open chunked HDF5 input file '%s' !\n",
@@ -674,7 +686,7 @@ static int RecombineGroupData (const char *groupname)
printf ("reopening input file '%s'\n", infilenames[infile]);
#endif
CHECK_ERROR (infiles[infile] = H5Fopen (infilenames[infile],
- H5F_ACC_RDONLY, H5P_DEFAULT));
+ H5F_ACC_RDONLY, sieve_plist));
}
/* loop over all chunks of this input file */
@@ -803,7 +815,8 @@ static int RecombineGroupData (const char *groupname)
}
printf ("]\n");
- if (chunk_origin[ndims - 1] == 0 &&
+ if (slice_dataspace < 0 &&
+ chunk_origin[ndims - 1] == 0 &&
(ndims == 1 || chunk_origin[ndims - 2] == 0))
{
memset (slice_origin, 0, ndims * sizeof (hssize_t));
@@ -835,14 +848,21 @@ static int RecombineGroupData (const char *groupname)
slice_origin[ndims - 2] = chunk_origin[ndims - 2];
}
- CHECK_ERROR (H5Sselect_hyperslab (slice_dataspace, H5S_SELECT_SET,
- slice_origin, NULL, chunk_dims, NULL));
- CHECK_ERROR (H5Dread (chunked_dataset, unchunked_datatype, slice_dataspace,
- H5S_ALL, H5P_DEFAULT, data));
+ if (slice_dataspace >= 0)
+ {
+ CHECK_ERROR (H5Sselect_hyperslab (slice_dataspace, H5S_SELECT_SET,
+ slice_origin, NULL, chunk_dims,NULL));
+ /* set the buffer size to read the full hyperslab */
+ CHECK_ERROR (H5Dread (chunked_dataset, unchunked_datatype,
+ slice_dataspace, H5S_ALL, H5P_DEFAULT, data));
+ }
+ CHECK_ERROR (H5Dclose (chunked_dataset));
+ CHECK_ERROR (H5Sclose (chunked_dataspace));
/* if this was the last chunk for the current z-slice
then write it back now */
- if (chunk_dims[ndims - 1] + chunk_origin[ndims - 1] == dims[ndims - 1] &&
+ if (slice_dataspace >= 0 &&
+ chunk_dims[ndims - 1] + chunk_origin[ndims - 1] == dims[ndims - 1] &&
(ndims == 1 ||
chunk_dims[ndims - 2] + chunk_origin[ndims - 2] == dims[ndims - 2]))
{
@@ -859,9 +879,9 @@ static int RecombineGroupData (const char *groupname)
slice_dataspace, unchunked_dataspace,
H5P_DEFAULT, data));
free (data);
+ CHECK_ERROR (H5Sclose (slice_dataspace));
+ slice_dataspace = -1;
}
- CHECK_ERROR (H5Dclose (chunked_dataset));
- CHECK_ERROR (H5Sclose (chunked_dataspace));
} /* end of loop over all chunks in this input file */