From fca0957f83894e482ea17615ab4013d5cdb1bf0f Mon Sep 17 00:00:00 2001 From: tradke Date: Tue, 24 Aug 2004 17:10:54 +0000 Subject: Added utility program to convert SDF datafiles into HDF5 so that they can be imported into Cactus (using the filereader). Still a prototype version only, therefore no documentation yet. git-svn-id: http://svn.cactuscode.org/arrangements/CactusPUGHIO/IOHDF5/trunk@205 4825ed28-b72c-4eae-9704-e50c059e567d --- src/make.configuration.defn | 4 + src/make.configuration.deps | 20 ++++ src/util/hdf5_convert_from_sdf.c | 220 +++++++++++++++++++++++++++++++++++++++ 3 files changed, 244 insertions(+) create mode 100644 src/util/hdf5_convert_from_sdf.c diff --git a/src/make.configuration.defn b/src/make.configuration.defn index c52aca4..b44abf3 100644 --- a/src/make.configuration.defn +++ b/src/make.configuration.defn @@ -7,3 +7,7 @@ ifeq ($(findstring CactusExternal/FlexIO,$(THORNS)),CactusExternal/FlexIO) ALL_UTILS += hdf5_convert_from_ieeeio FLEXIO_INC_DIRS = $(CCTK_HOME)$(DIRSEP)arrangements/CactusExternal/FlexIO/src endif + +ifeq ($(findstring CactusIO/IOSDF,$(THORNS)),CactusIO/IOSDF) +ALL_UTILS += hdf5_convert_from_sdf +endif diff --git a/src/make.configuration.deps b/src/make.configuration.deps index 3f8f3f8..f9d61ae 100644 --- a/src/make.configuration.deps +++ b/src/make.configuration.deps @@ -31,3 +31,23 @@ $(BUILD_DIR)$(DIRSEP)IOHDF5$(DIRSEP)hdf5_convert_from_ieeeio.o: $(PACKAGE_DIR)$( if [ ! -d $(BUILD_DIR)$(DIRSEP)IOHDF5 ]; then $(MKDIR) $(MKDIRFLAGS) $(BUILD_DIR)$(DIRSEP)IOHDF5 ; fi cd $(BUILD_DIR)$(DIRSEP)IOHDF5 $(CC) $< -DCCODE $(CFLAGS) -DANSI -I$(CONFIG) -I$(BINDINGS_DIR)$(DIRSEP)include -I$(FLESH_DIR)$(DIRSEP)include -I$(CCTK_HOME)$(DIRSEP)arrangements $(HDF5_INC_DIRS:%=-I%) $(FLEXIO_INC_DIRS:%=-I%) $(SYS_INC_DIRS:%=-I%) $(CCOMPILEONLY)$(OPTIONSEP)$@ + + +# need to extract SDF includes and libs from the bindings makefiles +# (those are not included when building utilities) +SDF_INC_DIRS := $(shell grep INC_DIRS $(CONFIG)/../bindings/Configuration/IOSDF/make.configuration.defn | cut -f2 -d'=') +SDF_LIB_DIRS := $(shell grep LIBDIRS $(CONFIG)/../bindings/Configuration/make.link | cut -f2 -d'=') +SDF_LIBS := $(shell grep LIBS $(CONFIG)/../bindings/Configuration/make.link | cut -f2 -d'=') + +# linking rule for hdf5_convert_from_sdf +$(UTIL_DIR)$(DIRSEP)hdf5_convert_from_sdf: $(BUILD_DIR)$(DIRSEP)IOHDF5$(DIRSEP)hdf5_convert_from_sdf.o + @echo Creating hdf5_convert_from_sdf in $(UTIL_DIR) from $< + if [ ! -d $(UTIL_DIR) ]; then $(MKDIR) $(MKDIRFLAGS) $(UTIL_DIR) ; fi + $(LD) $(CREATEEXE)$(OPTIONSEP)$@ $(DEBUG_LD) $(LDFLAGS) $(EXTRAFLAGS) $< -L$(CCTK_LIBDIR) $(SDF_LIB_DIRS:%=-L%) $(SDF_LIBS:%=-l%) $(HDF5_LIB_DIRS:%=-L%) $(HDF5_LIBS:%=-l%) -lm + +# compile rule for hdf5_convert_from_sdf +$(BUILD_DIR)$(DIRSEP)IOHDF5$(DIRSEP)hdf5_convert_from_sdf.o: $(PACKAGE_DIR)$(DIRSEP)CactusPUGHIO$(DIRSEP)IOHDF5$(DIRSEP)src$(DIRSEP)util$(DIRSEP)hdf5_convert_from_sdf.c + @echo Compiling $< + if [ ! -d $(BUILD_DIR)$(DIRSEP)IOHDF5 ]; then $(MKDIR) $(MKDIRFLAGS) $(BUILD_DIR)$(DIRSEP)IOHDF5 ; fi + cd $(BUILD_DIR)$(DIRSEP)IOHDF5 + $(CC) $< -DCCODE $(CFLAGS) -DANSI -I$(CONFIG) -I$(BINDINGS_DIR)$(DIRSEP)include -I$(FLESH_DIR)$(DIRSEP)include -I$(SDF_INC_DIRS) $(HDF5_INC_DIRS:%=-I%) $(FLEXIO_INC_DIRS:%=-I%) $(SYS_INC_DIRS:%=-I%) $(CCOMPILEONLY)$(OPTIONSEP)$@ diff --git a/src/util/hdf5_convert_from_sdf.c b/src/util/hdf5_convert_from_sdf.c new file mode 100644 index 0000000..ef0ca79 --- /dev/null +++ b/src/util/hdf5_convert_from_sdf.c @@ -0,0 +1,220 @@ + /*@@ + @file hdf5_convert_from_sdf.c + @date Tue 24 August 2004 + @author Thomas Radke + @desc + Utility program to convert SDF datafiles into IOHDF5 + datafiles. + @enddesc + @version $Id$ + @@*/ + +#define MAXDIM 3 +#define MAXNAMESIZE 100 + +#include +#include + + +/* Cactus includes (defines CCTK_FILEVERSION) */ +#include "cctk.h" + +/* SDF includes */ +#include +#include + +/* HDF5 include */ +#include + +#define GLOBAL_ATTRIBUTES_GROUP "Global Attributes" + +/* the rcs ID and its dummy function to use it */ +static const char *rcsid = "$Header$"; +CCTK_FILEVERSION(CactusPUGHIO_IOHDF5_util_hdf5_convert_from_sdf_c) + + +/*****************************************************************************/ +/* macro definitions */ +/*****************************************************************************/ +/* uncomment the following to get some debugging output */ +/* #define IOHDF5_DEBUG 1 */ + +/* macro to do an HDF5 call, check its return code, and print a warning + in case of an error */ +#define CHECK_ERROR(hdf5_call) \ + do \ + { \ + int _error_code = hdf5_call; \ + \ + \ + if (_error_code < 0) \ + { \ + fprintf (stderr, "WARNING: line %d: HDF5 call '%s' returned " \ + "error code %d\n", \ + __LINE__, #hdf5_call, _error_code); \ + exit (-1); \ + } \ + } while (0) + + + /*@@ + @routine main + @date Tue 24 August 2004 + @author Thomas Radke + @desc + Main routine of the SDF-to-HDF5 converter + @enddesc + + @var argc + @vdesc number of command line arguments + @vtype int + @vio in + @endvar + @var argv + @vdesc command line arguments + @vtype char *[] + @vio in + @endvar + + @returntype int + @returndesc + 0 for success, negative return values indicate an error + @endreturndesc +@@*/ +int main (int argc, char **argv) +{ + int i, j; + int version, rank, dsize, csize, grouptype; + int *dims; + double timestep; + char *varname, *groupname, *coordname, *tag; + gft_sdf_file_data *infile; + double *coords, *data, *bbox; + hid_t outfile, dataset, dataspace, attr, attrDataspace; + hid_t group, hdf5String; + hsize_t hdf5Dims[MAXDIM]; + int iteration, timelevel, ntimelevels; + char hdf5DatasetName[2 * MAXNAMESIZE]; + + + if (argc <= 2) + { + fprintf (stderr, "Usage: %s \n", argv[0]); + fprintf (stderr, " eg. %s foo.sdf bar.h5\n", argv[0]); + return (0); + } + + infile = gft_open_sdf_file (argv[1]); + if (! infile) + { + printf ("Could not open SDF input file '%s'\n", argv[1]); + return (-1); + } + + outfile = H5Fcreate (argv[2], H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT); + if (outfile < 0) + { + fprintf (stderr, "Could not create HDF5 output file '%s'\n", argv[2]); + return (-1); + } + + printf ("\n ----------------------------\n" + " Cactus SDF-to-HDF5 Converter\n" + " ----------------------------\n"); + + /* add a dummy GLOBAL_ATTRIBUTES_GROUP so that the HDF5 file is recognized as + unchunked Cactus data */ + CHECK_ERROR (group = H5Gcreate (outfile, GLOBAL_ATTRIBUTES_GROUP, 0)); + CHECK_ERROR (attrDataspace = H5Screate (H5S_SCALAR)); + + CHECK_ERROR (attr = H5Acreate (group, "nprocs", H5T_NATIVE_INT, + attrDataspace, H5P_DEFAULT)); + i = 1; + CHECK_ERROR (H5Awrite (attr, H5T_NATIVE_INT, &i)); + CHECK_ERROR (H5Aclose (attr)); + + CHECK_ERROR (attr = H5Acreate (group, "ioproc_every", H5T_NATIVE_INT, + attrDataspace, H5P_DEFAULT)); + i = 1; + CHECK_ERROR (H5Awrite (attr, H5T_NATIVE_INT, &i)); + CHECK_ERROR (H5Aclose (attr)); + + CHECK_ERROR (attr = H5Acreate (group, "unchunked", H5T_NATIVE_INT, + attrDataspace, H5P_DEFAULT)); + i = 1; + CHECK_ERROR (H5Awrite (attr, H5T_NATIVE_INT, &i)); + CHECK_ERROR (H5Aclose (attr)); + + CHECK_ERROR (H5Gclose (group)); + + CHECK_ERROR (hdf5String = H5Tcopy (H5T_C_S1)); + + /*** FIXME: get iteration number and timelevel information from user ***/ + iteration = timelevel = 0; + ntimelevels = 1; + grouptype = CCTK_GF; + groupname = "grid::coordinates"; + + while (low_read_sdf_stream (1, infile->fp, ×tep, &version, &rank, &dsize, + &csize, &varname, &coordname, &tag, &dims, &bbox, + &coords, &data)) + { + /*** FIXME: give the dataset a valid variable name ***/ + varname = strdup ("grid::x"); + + printf ("Processing dataset '%s' (timestep %f)\n", varname, timestep); + + /* convert from int to hsize_t */ + for (j = 0; j < rank; j++) + { + hdf5Dims[j] = dims[j]; + } + CHECK_ERROR (dataspace = H5Screate_simple (rank, hdf5Dims, NULL)); + + sprintf (hdf5DatasetName, "/%s timelevel %d at iteration %d", + varname, timelevel, iteration); + CHECK_ERROR (dataset = H5Dcreate (outfile, hdf5DatasetName, + H5T_NATIVE_DOUBLE,dataspace,H5P_DEFAULT)); + CHECK_ERROR (H5Dwrite (dataset, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, + H5P_DEFAULT, data)); + + /* attach necessary attributes */ + CHECK_ERROR (H5Tset_size (hdf5String, strlen (groupname))); + CHECK_ERROR (attr = H5Acreate (dataset, "groupname", hdf5String, + attrDataspace, H5P_DEFAULT)); + CHECK_ERROR (H5Awrite (attr, hdf5String, groupname)); + CHECK_ERROR (H5Aclose (attr)); + + CHECK_ERROR (attr = H5Acreate (dataset, "grouptype", H5T_NATIVE_INT, + attrDataspace, H5P_DEFAULT)); + CHECK_ERROR (H5Awrite (attr, H5T_NATIVE_INT, &grouptype)); + CHECK_ERROR (H5Aclose (attr)); + + CHECK_ERROR (attr = H5Acreate (dataset, "ntimelevels", H5T_NATIVE_INT, + attrDataspace, H5P_DEFAULT)); + CHECK_ERROR (H5Awrite (attr, H5T_NATIVE_INT, &ntimelevels)); + CHECK_ERROR (H5Aclose (attr)); + + CHECK_ERROR (H5Dclose (dataset)); + + CHECK_ERROR (H5Sclose (dataspace)); + + free (varname); + free (coordname); + free (tag); + free (dims); + free (bbox); + free (coords); + free (data); + + /*** FIXME: increment iteration number to distinguish datasets ***/ + iteration++; + } + + CHECK_ERROR (H5Sclose (attrDataspace)); + CHECK_ERROR (H5Tclose (hdf5String)); + CHECK_ERROR (H5Fclose (outfile)); + gsfd_close (infile); + + return (0); +} -- cgit v1.2.3