aboutsummaryrefslogtreecommitdiff
path: root/CarpetDev/CarpetIOF5/src/tensor_component.cc
diff options
context:
space:
mode:
Diffstat (limited to 'CarpetDev/CarpetIOF5/src/tensor_component.cc')
-rw-r--r--CarpetDev/CarpetIOF5/src/tensor_component.cc136
1 files changed, 102 insertions, 34 deletions
diff --git a/CarpetDev/CarpetIOF5/src/tensor_component.cc b/CarpetDev/CarpetIOF5/src/tensor_component.cc
index 5d4b281fc..6cd2b6486 100644
--- a/CarpetDev/CarpetIOF5/src/tensor_component.cc
+++ b/CarpetDev/CarpetIOF5/src/tensor_component.cc
@@ -1,7 +1,15 @@
#include <cassert>
#include <cstdlib>
+// force HDF5 1.8.x installations to use the new API
+#define H5Dcreate_vers 2
+
+#include <hdf5.h>
+
#include "cctk.h"
+#include "cctk_Parameters.h"
+
+#include "vect.hh"
#include "tensor_component.hh"
@@ -12,22 +20,65 @@ namespace CarpetIOF5 {
namespace F5 {
tensor_component_t::
- tensor_component_t (physical_quantity_t & physical_quantity,
+ tensor_component_t (data_region_t & data_region,
int const variable)
- : m_physical_quantity (physical_quantity),
+ : m_data_region (data_region),
m_variable (variable)
{
+ DECLARE_CCTK_PARAMETERS;
+
assert (variable >= 0 and variable < CCTK_NumVars());
char const * const name = CCTK_VarName (variable);
assert (name != 0);
m_name = string (name);
- m_hdf5_tensor_component
- = open_or_create_group (m_physical_quantity
- .get_hdf5_physical_quantity(),
- name);
- assert (m_hdf5_tensor_component >= 0);
+ int const vartype = CCTK_VarTypeI (variable);
+ assert (vartype >= 0);
+ hid_t const hdf5_datatype = hdf5_datatype_from_cactus_datatype (vartype);
+ assert (hdf5_datatype >= 0);
+
+ bbox<int, dim> const & region = m_data_region.get_region();
+
+ vect<hsize_t, dim> const dims
+ = (region.shape() / region.stride()).reverse();
+ m_dataspace = H5Screate_simple (dim, & dims [0], 0);
+ assert (m_dataspace >= 0);
+
+ m_properties = H5Pcreate (H5P_DATASET_CREATE);
+ assert (m_properties >= 0);
+ vect<int, dim> const user_chunk_size
+ (chunk_size_x, chunk_size_y, chunk_size_z);
+ bool const need_chunks
+ = any (user_chunk_size > 0) or compression_level > 0 or write_checksum;
+ if (need_chunks)
+ {
+ vect<hsize_t, dim> const chunk_size
+ = either (user_chunk_size > 0,
+ vect<hsize_t, dim> (user_chunk_size),
+ dims);
+ herr_t const herr = H5Pset_chunk (m_properties, dim, & chunk_size [0]);
+ assert (not herr);
+ }
+ if (compression_level > 0)
+ {
+ herr_t const herr = H5Pset_deflate (m_properties, compression_level);
+ assert (not herr);
+ }
+ if (write_checksum)
+ {
+ herr_t const herr = H5Pset_fletcher32 (m_properties);
+ assert (not herr);
+ }
+
+ m_dataset
+ = H5Dcreate (m_data_region.get_hdf5_data_region(), m_name.c_str(),
+ hdf5_datatype, m_dataspace,
+ H5P_DEFAULT, m_properties, H5P_DEFAULT);
+ assert (m_dataset >= 0);
+
+ write_or_check_attribute
+ (m_dataset, "iorigin", region.lower() / region.stride());
assert (invariant());
}
@@ -37,17 +88,25 @@ namespace CarpetIOF5 {
tensor_component_t::
~ tensor_component_t ()
{
- herr_t const herr = H5Gclose (m_hdf5_tensor_component);
+ herr_t herr;
+
+ herr = H5Dclose (m_dataset);
+ assert (not herr);
+
+ herr = H5Sclose (m_dataspace);
+ assert (not herr);
+
+ herr = H5Pclose (m_properties);
assert (not herr);
}
- physical_quantity_t & tensor_component_t::
- get_physical_quantity ()
+ data_region_t & tensor_component_t::
+ get_data_region ()
const
{
- return m_physical_quantity;
+ return m_data_region;
}
@@ -61,31 +120,38 @@ namespace CarpetIOF5 {
- hid_t tensor_component_t::
- get_hdf5_tensor_component ()
- const
- {
- return m_hdf5_tensor_component;
- }
-
-
-
+#warning "TODO: This assumes that the shape of data is the same as the shape of the region; this may not be so if not all of the data are written out"
void tensor_component_t::
- get_link_destination (string & filename,
- string & objectname)
+ write (void const * const data,
+ int const cactus_datatype)
const
{
- static bool initialised = false;
- static string l_filename;
- static string l_objectname;
- if (not initialised)
- {
- initialised = true;
- get_physical_quantity().get_link_destination (l_filename, l_objectname);
- l_objectname += string ("/") + m_name;
- }
- filename = l_filename;
- objectname = l_objectname;
+ hid_t const memory_hdf5_datatype
+ = hdf5_datatype_from_cactus_datatype (cactus_datatype);
+ assert (memory_hdf5_datatype >= 0);
+
+ bbox<int, dim> const & region = m_data_region.get_region();
+
+ vect<hsize_t, dim> const dims
+ = (region.shape() / region.stride()).reverse();
+ hid_t const memory_dataspace
+ = H5Screate_simple (dim, & dims [0], & dims [0]);
+ assert (memory_dataspace >= 0);
+
+ hid_t const transfer_properties = H5Pcreate (H5P_DATASET_XFER);
+ assert (transfer_properties >= 0);
+
+ herr_t herr;
+ herr
+ = H5Dwrite (m_dataset, memory_hdf5_datatype, memory_dataspace,
+ m_dataspace, transfer_properties, data);
+ assert (not herr);
+
+ herr = H5Pclose (transfer_properties);
+ assert (not herr);
+
+ herr = H5Sclose (memory_dataspace);
+ assert (not herr);
}
@@ -95,7 +161,9 @@ namespace CarpetIOF5 {
const
{
return (m_variable >= 0 and m_variable < CCTK_NumVars()
- and m_hdf5_tensor_component >= 0);
+ and m_properties >= 0
+ and m_dataset >= 0
+ and m_dataspace >= 0);
}
} // namespace F5