aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authortradke <tradke@7842ec3a-9562-4be5-9c5b-06ba18f2b668>2002-12-05 13:45:04 +0000
committertradke <tradke@7842ec3a-9562-4be5-9c5b-06ba18f2b668>2002-12-05 13:45:04 +0000
commitcfb50136a02328ed4a0692e6b413a94367007d80 (patch)
treee4872a3338bbbe7dd75816921bda37973de8ac03 /src
parent8c8674265e79d43c722a017c9261cc370cb3e273 (diff)
Added missing includes and fixed structure name when compiling with parallel HDF5.
This fixes PR CactusPUGHIO/1329. git-svn-id: http://svn.cactuscode.org/arrangements/CactusPUGHIO/IOHDF5Util/trunk@83 7842ec3a-9562-4be5-9c5b-06ba18f2b668
Diffstat (limited to 'src')
-rw-r--r--src/DumpVar.c6
1 files changed, 4 insertions, 2 deletions
diff --git a/src/DumpVar.c b/src/DumpVar.c
index 08ce4bf..3284852 100644
--- a/src/DumpVar.c
+++ b/src/DumpVar.c
@@ -686,11 +686,12 @@ static void WriteData (const cGH *GH, const ioRequest *request,const char *name,
static void WriteDataCollective (const cGH *GH, const ioRequest *request,
const char *name, const void *data, hid_t file)
{
- int i, dim;
+ int i;
hid_t hdf5type, dataset, memspace, filespace, plist;
hssize_t *chunk_origin;
hsize_t *chunk_dims, *file_dims;
hsize_t buffersize;
+ const ioHDF5UtilGH *myGH;
DECLARE_CCTK_PARAMETERS
@@ -707,9 +708,9 @@ static void WriteDataCollective (const cGH *GH, const ioRequest *request,
file_dims = chunk_dims + request->hdim;
for (i = 0; i < request->hdim; i++)
{
+ file_dims [i] = request->hsize[request->hdim - 1 - i];
chunk_origin[i] = request->hoffset[request->hdim - 1 - i];
chunk_dims [i] = request->hsize_chunk[request->hdim - 1 - i];
- file_dims [i] = request->hsize_global[request->hdim - 1 - i];
}
/* create the memspace according to chunk dims */
@@ -722,6 +723,7 @@ static void WriteDataCollective (const cGH *GH, const ioRequest *request,
/* the I/O processor creates the dataset and adds the common attributes
when writing its own data, otherwise the dataset is reopened */
+ myGH = (const ioHDF5UtilGH *) CCTK_GHExtension (GH, "IOHDF5Util");
hdf5type = IOHDF5Util_DataType (myGH, request->hdatatype);
/* enable compression for chunked dataset if compression was requested */