Commit 03c2d998 authored by David Moxey's avatar David Moxey

Fix all the things

parent c86e0d6f
......@@ -13,4 +13,4 @@ AllowShortLoopsOnASingleLine: false
IndentCaseLabels: true
Standard: Cpp03
AccessModifierOffset: -4
BinPackParameters: false
BinPackParameters: true
......@@ -278,7 +278,7 @@ LIB_UTILITIES_EXPORT void Import(
std::vector<FieldDefinitionsSharedPtr> &fielddefs,
std::vector<std::vector<NekDouble> > &fielddata,
FieldMetaDataMap &fieldinfomap,
const Array<OneD, int> ElementIDs)
const Array<OneD, int> &ElementIDs)
{
#ifdef NEKTAR_USE_MPI
int size;
......@@ -300,7 +300,6 @@ LIB_UTILITIES_EXPORT void Import(
#endif
CommSharedPtr c = GetCommFactory().CreateInstance("Serial", 0, 0);
const std::string iofmt = FieldIO::GetFileType(infilename, c);
std::cout << "LOADING " << iofmt << std::endl;
FieldIOSharedPtr f = GetFieldIOFactory().CreateInstance(iofmt, c, false);
f->Import(infilename, fielddefs, fielddata, fieldinfomap, ElementIDs);
}
......@@ -509,8 +508,6 @@ int FieldIO::CheckFieldDefinition(const FieldDefinitionsSharedPtr &fielddefs)
{
return 0;
}
// ASSERTL0(fielddefs->m_elementIDs.size() > 0, "Fielddefs vector must
// contain at least one element of data .");
unsigned int numbasis = 0;
......@@ -563,12 +560,13 @@ int FieldIO::CheckFieldDefinition(const FieldDefinitionsSharedPtr &fielddefs)
int l = fielddefs->m_numModes[cnt++];
if (fielddefs->m_numHomogeneousDir == 1)
{
datasize += l * fielddefs->m_numModes[cnt++];
datasize += l * fielddefs->m_homogeneousZIDs.size();
cnt++;
}
else if (fielddefs->m_numHomogeneousDir == 2)
{
int m = fielddefs->m_numModes[cnt++];
datasize += l * m * fielddefs->m_numModes[cnt++];
datasize += l * fielddefs->m_homogeneousYIDs.size();
cnt += 2;
}
else
{
......@@ -656,91 +654,88 @@ int FieldIO::CheckFieldDefinition(const FieldDefinitionsSharedPtr &fielddefs)
case eSegment:
{
int l = fielddefs->m_numModes[cnt++];
if(fielddefs->m_numHomogeneousDir == 1)
if (fielddefs->m_numHomogeneousDir == 1)
{
datasize += l*fielddefs->m_homogeneousZIDs.size();
datasize += l * fielddefs->m_homogeneousZIDs.size();
cnt++;
}
else if(fielddefs->m_numHomogeneousDir == 2)
else if (fielddefs->m_numHomogeneousDir == 2)
{
datasize += l*fielddefs->m_homogeneousYIDs.size();
datasize += l * fielddefs->m_homogeneousYIDs.size();
cnt += 2;
}
else
{
datasize += l;
}
break;
}
break;
case eTriangle:
{
int l = fielddefs->m_numModes[cnt++];
int m = fielddefs->m_numModes[cnt++];
if(fielddefs->m_numHomogeneousDir == 1)
if (fielddefs->m_numHomogeneousDir == 1)
{
datasize += StdTriData::getNumberOfCoefficients(l,m)*
fielddefs->m_homogeneousZIDs.size();
datasize += StdTriData::getNumberOfCoefficients(l, m) *
fielddefs->m_homogeneousZIDs.size();
cnt++;
}
else
{
datasize += StdTriData::getNumberOfCoefficients(l,m);
datasize += StdTriData::getNumberOfCoefficients(l, m);
}
break;
}
break;
case eQuadrilateral:
{
int l = fielddefs->m_numModes[cnt++];
int m = fielddefs->m_numModes[cnt++];
if(fielddefs->m_numHomogeneousDir == 1)
if (fielddefs->m_numHomogeneousDir == 1)
{
datasize += l*m*fielddefs->m_homogeneousZIDs.size();
datasize += l * m * fielddefs->m_homogeneousZIDs.size();
cnt++;
}
else
{
datasize += l*m;
datasize += l * m;
}
break;
}
break;
case eTetrahedron:
{
int l = fielddefs->m_numModes[cnt++];
int m = fielddefs->m_numModes[cnt++];
int n = fielddefs->m_numModes[cnt++];
datasize += StdTetData::getNumberOfCoefficients(l, m, n);
break;
}
break;
case ePyramid:
{
int l = fielddefs->m_numModes[cnt++];
int m = fielddefs->m_numModes[cnt++];
int n = fielddefs->m_numModes[cnt++];
datasize += StdPyrData::getNumberOfCoefficients(l, m, n);
break;
}
break;
case ePrism:
{
int l = fielddefs->m_numModes[cnt++];
int m = fielddefs->m_numModes[cnt++];
int n = fielddefs->m_numModes[cnt++];
datasize += StdPrismData::getNumberOfCoefficients(l, m, n);
break;
}
break;
case eHexahedron:
{
int l = fielddefs->m_numModes[cnt++];
int m = fielddefs->m_numModes[cnt++];
int n = fielddefs->m_numModes[cnt++];
datasize += l * m * n;
break;
}
break;
default:
{
ASSERTL0(false, "Unsupported shape type.");
break;
}
}
}
}
......
......@@ -42,7 +42,6 @@
#include <LibUtilities/BasicUtils/ShapeType.hpp>
#include <LibUtilities/Foundations/Basis.h>
#include <LibUtilities/Foundations/Points.h>
#include <boost/assign/list_of.hpp>
#include <tinyxml.h>
#include <LibUtilities/BasicUtils/NekFactory.hpp>
......@@ -52,6 +51,9 @@ namespace Nektar
namespace LibUtilities
{
typedef std::map<std::string, std::string> FieldMetaDataMap;
static FieldMetaDataMap NullFieldMetaDataMap;
/**
* @brief Base class for writing hierarchical data (XML or HDF5).
*/
......@@ -68,15 +70,6 @@ protected:
};
typedef boost::shared_ptr<TagWriter> TagWriterSharedPtr;
static std::vector<NekDouble> NullNekDoubleVector;
static std::vector<LibUtilities::PointsType> NullPointsTypeVector;
static std::vector<unsigned int> NullUnsignedIntVector;
typedef std::map<std::string, std::string> FieldMetaDataMap;
static FieldMetaDataMap NullFieldMetaDataMap;
static std::vector<std::vector<NekDouble> > NullVectorNekDoubleVector =
boost::assign::list_of(NullNekDoubleVector);
/**
* @class A simple class encapsulating a data source. This allows us to pass
* around native file formats in virtual functions without resorting to using
......@@ -197,7 +190,7 @@ LIB_UTILITIES_EXPORT void Import(
std::vector<FieldDefinitionsSharedPtr> &fielddefs,
std::vector<std::vector<NekDouble> > &fielddata = NullVectorNekDoubleVector,
FieldMetaDataMap &fieldinfomap = NullFieldMetaDataMap,
const Array<OneD, int> ElementIDs = NullInt1DArray);
const Array<OneD, int> &ElementIDs = NullInt1DArray);
// Forward declare
class FieldIO;
......@@ -249,11 +242,11 @@ public:
std::vector<std::vector<NekDouble> > &fielddata =
NullVectorNekDoubleVector,
FieldMetaDataMap &fieldinfomap = NullFieldMetaDataMap,
const Array<OneD, int> ElementIDs = NullInt1DArray);
const Array<OneD, int> &ElementIDs = NullInt1DArray);
LIB_UTILITIES_EXPORT DataSourceSharedPtr ImportFieldMetaData(
std::string filename,
FieldMetaDataMap &fieldmetadatamap);
const std::string &filename,
FieldMetaDataMap &fieldmetadatamap);
LIB_UTILITIES_EXPORT static const std::string GetFileType(
const std::string &filename, CommSharedPtr comm);
......@@ -301,13 +294,13 @@ protected:
const std::string &infilename,
std::vector<FieldDefinitionsSharedPtr> &fielddefs,
std::vector<std::vector<NekDouble> >
&fielddata = NullVectorNekDoubleVector,
FieldMetaDataMap &fieldinfomap = NullFieldMetaDataMap,
const Array<OneD, int> ElementIDs = NullInt1DArray) = 0;
&fielddata = NullVectorNekDoubleVector,
FieldMetaDataMap &fieldinfomap = NullFieldMetaDataMap,
const Array<OneD, int> &ElementIDs = NullInt1DArray) = 0;
/// @copydoc FieldIO::ImportFieldMetaData
LIB_UTILITIES_EXPORT virtual DataSourceSharedPtr v_ImportFieldMetaData(
std::string filename, FieldMetaDataMap &fieldmetadatamap) = 0;
const std::string &filename, FieldMetaDataMap &fieldmetadatamap) = 0;
};
typedef boost::shared_ptr<FieldIO> FieldIOSharedPtr;
......@@ -346,7 +339,7 @@ inline void FieldIO::Import(const std::string &infilename,
std::vector<FieldDefinitionsSharedPtr> &fielddefs,
std::vector<std::vector<NekDouble> > &fielddata,
FieldMetaDataMap &fieldinfo,
const Array<OneD, int> ElementIDs)
const Array<OneD, int> &ElementIDs)
{
v_Import(infilename, fielddefs, fielddata, fieldinfo, ElementIDs);
}
......@@ -359,7 +352,7 @@ inline void FieldIO::Import(const std::string &infilename,
* filename.
*/
inline DataSourceSharedPtr FieldIO::ImportFieldMetaData(
std::string filename, FieldMetaDataMap &fieldmetadatamap)
const std::string &filename, FieldMetaDataMap &fieldmetadatamap)
{
return v_ImportFieldMetaData(filename, fieldmetadatamap);
}
......
......@@ -52,42 +52,124 @@ template <> inline DataTypeSharedPtr DataTypeTraits<BasisType>::GetType()
}
std::string FieldIOHdf5::className =
GetFieldIOFactory().RegisterCreatorFunction(
"Hdf5", FieldIOHdf5::create, "HDF5-based output of field data.");
/// Version of the Nektar++ HDF5 format, which is embedded into the main NEKTAR
/// group as an attribute.
const unsigned int FieldIOHdf5::FORMAT_VERSION = 1;
// The following definitions allow us to consistently refer to indexes pulled
// out of the various datasets.
/// A helper for FieldIOHdf5::v_Write and FieldIOHdf5::v_Import. Describes the
/// position of the number of elements in decomposition (i.e. field definition).
const unsigned int FieldIOHdf5::ELEM_DCMP_IDX = 0;
/// A helper for FieldIOHdf5::v_Write and FieldIOHdf5::v_Import. Describes the
/// position of the number of data points in decomposition (i.e. field
/// definition).
const unsigned int FieldIOHdf5::VAL_DCMP_IDX = 1;
/// A helper for FieldIOHdf5::v_Write and FieldIOHdf5::v_Import. Describes the
/// position of the number of elements multiplied by the dimension of the
/// element, giving number of modes when variable polynomial order is defined.
const unsigned int FieldIOHdf5::ORDER_DCMP_IDX = 2;
/// A helper for FieldIOHdf5::v_Write and FieldIOHdf5::v_Import. Describes the
/// position of the number of the number of y-planes for homogeneous
/// simulations.
const unsigned int FieldIOHdf5::HOMY_DCMP_IDX = 3;
/// A helper for FieldIOHdf5::v_Write and FieldIOHdf5::v_Import. Describes the
/// position of the number of the number of z-planes for homogeneous
/// simulations.
const unsigned int FieldIOHdf5::HOMZ_DCMP_IDX = 4;
/// A helper for FieldIOHdf5::v_Write and FieldIOHdf5::v_Import. Describes the
/// position of the number of the number of strips for homogeneous simulations.
const unsigned int FieldIOHdf5::HOMS_DCMP_IDX = 5;
/// The hash of the field definition information, which defines the name of the
/// attribute containing the field definition itself.
const unsigned int FieldIOHdf5::HASH_DCMP_IDX = 6;
/// A helper for FieldIOHdf5::v_Write. Describes the maximum number of items in
/// the decomposition per field definition.
const unsigned int FieldIOHdf5::MAX_DCMPS = FieldIOHdf5::HASH_DCMP_IDX + 1;
/// A helper for FieldIOHdf5::v_Write. Describes the position of the number of
/// elements in the cnt array.
const unsigned int FieldIOHdf5::ELEM_CNT_IDX = 0;
/// A helper for FieldIOHdf5::v_Write. Describes the position of the number of
/// data points in the cnt array.
const unsigned int FieldIOHdf5::VAL_CNT_IDX = 1;
/// A helper for FieldIOHdf5::v_Write. Describes the position of the number of
/// order points in the cnt array.
const unsigned int FieldIOHdf5::ORDER_CNT_IDX = 2;
/// A helper for FieldIOHdf5::v_Write. Describes the position of the number of
/// homogeneous y-planes in the cnt array.
const unsigned int FieldIOHdf5::HOMY_CNT_IDX = 3;
/// A helper for FieldIOHdf5::v_Write. Describes the position of the number of
/// homogeneous z-planes in the cnt array.
const unsigned int FieldIOHdf5::HOMZ_CNT_IDX = 4;
/// A helper for FieldIOHdf5::v_Write. Describes the position of the number of
/// homogeneous strips in the cnt array.
const unsigned int FieldIOHdf5::HOMS_CNT_IDX = 5;
/// A helper for FieldIOHdf5::v_Write. Describes the maximum number of items in
/// the cnt array per field definition.
const unsigned int FieldIOHdf5::MAX_CNTS = FieldIOHdf5::HOMS_CNT_IDX + 1;
/// A helper for FieldIOHdf5::v_Write. Describes the position of the element IDs
/// within the indexing set.
const unsigned int FieldIOHdf5::IDS_IDX_IDX = 0;
/// A helper for FieldIOHdf5::v_Write. Describes the position of the data size
/// within the indexing set.
const unsigned int FieldIOHdf5::DATA_IDX_IDX = 1;
/// A helper for FieldIOHdf5::v_Write. Describes the position of the element
/// order within the indexing set.
const unsigned int FieldIOHdf5::ORDER_IDX_IDX = 2;
/// A helper for FieldIOHdf5::v_Write. Describes the position of the number of
/// y-planes within the indexing set.
const unsigned int FieldIOHdf5::HOMY_IDX_IDX = 3;
/// A helper for FieldIOHdf5::v_Write. Describes the position of the number of
/// z-planes within the indexing set.
const unsigned int FieldIOHdf5::HOMZ_IDX_IDX = 4;
/// A helper for FieldIOHdf5::v_Write. Describes the position of the number of
/// homogeneous strips within the indexing set.
const unsigned int FieldIOHdf5::HOMS_IDX_IDX = 5;
/// A helper for FieldIOHdf5::v_Write. Describes the maximum number of items in
/// the indexing set.
const unsigned int FieldIOHdf5::MAX_IDXS = FieldIOHdf5::HOMS_IDX_IDX + 1;
/**
* @brief Construct the FieldIO object for HDF5 output.
*
* @param pComm Communicator object.
* @param sharedFilesystem True if this system has a shared filesystem.
*/
FieldIOHdf5::FieldIOHdf5(LibUtilities::CommSharedPtr pComm,
bool sharedFilesystem)
: FieldIO(pComm, sharedFilesystem)
{
}
/**
* @brief Write a HDF5 file to @p outFile given the field definitions @p
* fielddefs, field data @p fielddata and metadata @p fieldmetadatamap.
*
* The writing strategy for HDF5 output is as follows:
*
* - Each rank determines the amount of data needed to be written into each
* dataset.
* - Each rank communicates its decomposition information to the root process.
* - The root processor initialises the output structure, writes the
* decomposition dataset and all the field definition information.
* - Other ranks may have field definitions that do not belong to the root
* process, in which case they open the file and append this (since
* attributes cannot be written in parallel).
* - Each of the other ranks writes their data contributions to the rest of
* the set.
*
* @param outFile Output filename.
* @param fielddefs Input field definitions.
* @param fielddata Input field data.
* @param fieldmetadatamap Field metadata.
*/
void FieldIOHdf5::v_Write(const std::string &outFile,
std::vector<FieldDefinitionsSharedPtr> &fielddefs,
std::vector<std::vector<NekDouble> > &fielddata,
......@@ -757,11 +839,23 @@ void FieldIOHdf5::v_Write(const std::string &outFile,
}
}
/**
* @brief Import a HDF5 format file.
*
* @param finfilename Input filename
* @param fielddefs Field definitions of resulting field
* @param fielddata Field data of resulting field
* @param fieldinfomap Field metadata of resulting field
* @param ElementIDs If specified, contains the list of element IDs on
* this rank. The resulting field definitions will only
* contain data for the element IDs specified in this
* array.
*/
void FieldIOHdf5::v_Import(const std::string &infilename,
std::vector<FieldDefinitionsSharedPtr> &fielddefs,
std::vector<std::vector<NekDouble> > &fielddata,
FieldMetaDataMap &fieldinfomap,
const Array<OneD, int> ElementIDs)
const Array<OneD, int> &ElementIDs)
{
std::stringstream prfx;
int nRanks = m_comm->GetSize();
......@@ -1021,12 +1115,9 @@ void FieldIOHdf5::ImportFieldDef(
}
}
ASSERTL0(valid,
prfx.str() +
std::string(
ASSERTL0(valid, prfx.str() + std::string(
"unable to correctly parse the shape type: ")
.append(shapeString)
.c_str());
.append(shapeString).c_str());
}
else if (attrName == "BASIS")
{
......@@ -1160,9 +1251,9 @@ void FieldIOHdf5::ImportFieldDef(
* @param readPL Reading parameter list.
* @param data_dset Pointer to the `DATA` dataset.
* @param data_fspace Pointer to the `DATA` data space.
* @param data_i ...
* @param data_i Index in the `DATA` dataset to start reading from.
* @param decomps Information from the `DECOMPOSITION` dataset.
* @param decomp ...
* @param decomp Index of the decomposition.
* @param fielddef Field definitions for this file
* @param fielddata On return contains resulting field data.
*/
......@@ -1192,9 +1283,16 @@ void FieldIOHdf5::ImportFieldData(
"input data is not the same length as header information.");
}
/**
* @brief Import field metadata from @p filename and return the data source
* which wraps @p filename.
*
* @param filename Input filename.
* @param fieldmetadatamap Resulting field metadata from @p dataSource.
*/
DataSourceSharedPtr FieldIOHdf5::v_ImportFieldMetaData(
std::string filename,
FieldMetaDataMap &fieldmetadatamap)
const std::string &filename,
FieldMetaDataMap &fieldmetadatamap)
{
H5::PListSharedPtr parallelProps = H5::PList::Default();
DataSourceSharedPtr ans = H5DataSource::create(filename, parallelProps);
......@@ -1202,6 +1300,12 @@ DataSourceSharedPtr FieldIOHdf5::v_ImportFieldMetaData(
return ans;
}
/**
* @brief Import field metadata from @p dataSource.
*
* @param dataSource Input datasource, which should be a H5DataSource.
* @param fieldmetadatamap Resulting field metadata from @p dataSource.
*/
void FieldIOHdf5::ImportHDF5FieldMetaData(DataSourceSharedPtr dataSource,
FieldMetaDataMap &fieldmetadatamap)
{
......
......@@ -122,7 +122,8 @@ typedef boost::shared_ptr<H5TagWriter> H5TagWriterSharedPtr;
* This class implements a HDF5 reader/writer based on MPI/O that is designed to
* operate on a single file across all processors of a simulation. The
* definition follows vaguely similar lines to XML output but is stored somewhat
* differently. At a basic level metadata is organised as follows:
* differently to accommodate parallel reading and writing. At a basic level
* metadata is organised as follows:
*
* - Nektar++ data lies in the root `/NEKTAR` group.
* - The contents of a FieldDefinitions object is hashed to construct a unique
......@@ -130,30 +131,44 @@ typedef boost::shared_ptr<H5TagWriter> H5TagWriterSharedPtr;
* root group. We then use the H5TagWriter to assign the field definitions
* to each group.
* - In a similar fashion, we create a `Metadata` group to contain field
* metadata that is written
* metadata that is written.
*
* We then define two data sets to contain field data:
* We then define five data sets to contain field data:
*
* - The `DATA` dataset contains the double-precision modal coefficient data.
* - The `IDS` dataset contains the element IDs of the elements that are
* written out.
* - The `POLYORDERS` dataset is written if the field data contains variable
* polynomial order, and contains the (possibly hetergeneous) mode orders in
* each direction for each of the elements.
* - The `HOMOGENEOUSZIDS` dataset contains the IDs of z-planes for
* homogeneous simulations, if the data are homogeneous.
* - The `HOMOGENEOUSYIDS` dataset contains the IDs of y-planes for
* homogeneous simulations, if the data are homogeneous.
* - The `HOMOGENEOUSSIDS` dataset contains the strip IDs for
* homogeneous simulations, if the data are homogeneous and use strips.
*
* The ordering is defined according to:
* The ordering is defined according to the `DECOMPOSITION` dataset. A
* `decomposition' in this class is essentially a single field definition with
* its accompanying data. Data are written into each dataset by the order of
* each decomposition. Each decomposition contains the following seven integers
* that define it per field definition per processor:
*
* - The `INDEXES` dataset, of size NPROCS * 2 contains the following
* information per processor:
* - Offset of the start of this block's element IDs
* (FieldIOHdf5::IDS_IDX_IDX)
* - Offset of the start of this block in the data array
* (FieldIOHdf5::DATA_IDX_IDX)
* - The `DECOMPOSITION` dataset contains the following three integers of
* information per field definition per processor:
* - Number of elements in this field definition
* (FieldIOHdf5::ELEM_DCMP_IDX)
* - Number of entries in the data array for this field definition
* (FieldIOHdf5::VAL_DCMP_IDX)
* - Hash of the field definition that these entries belong inside
* (FieldIOHdf5::HASH_DCMP_IDX).
* - Number of elements in this field definition (index #ELEM_DCMP_IDX).
* - Number of entries in the `DATA` array for this field definition
* (index #VAL_DCMP_IDX)
* - Number of entries in the `POLYORDERS` array for this field definition
* (index #ORDER_DCMP_IDX)
* - Number of entries in the `HOMOGENEOUSZIDS` array (index #HOMZ_DCMP_IDX).
* - Number of entries in the `HOMOGENEOUSYIDS` array (index #HOMY_DCMP_IDX).
* - Number of entries in the `HOMOGENEOUSSIDS` array (index #HOMS_DCMP_IDX).
* - Hash of the field definition, represented as a 32-bit integer, which
* describes the name of the attribute that contains the rest of the field
* definition information (e.g. field names, basis type, etc).
*
* The number of decompositions is therefore calculated as the field size
* divided by #MAX_DCMPS which allows us to calculate the offsets of the data
* for each field definition within the arrays.
*/
class FieldIOHdf5 : public FieldIO
{
......@@ -228,26 +243,28 @@ private:
std::vector<std::vector<NekDouble> > &fielddata,
const FieldMetaDataMap &fieldinfomap = NullFieldMetaDataMap);
LIB_UTILITIES_EXPORT virtual void v_Import(const std::string &infilename,
std::vector<FieldDefinitionsSharedPtr> &fielddefs,
std::vector<std::vector<NekDouble> > &fielddata =
NullVectorNekDoubleVector,
FieldMetaDataMap &fieldinfomap = NullFieldMetaDataMap,
const Array<OneD, int> ElementiDs = NullInt1DArray);
LIB_UTILITIES_EXPORT virtual void v_Import(
const std::string &infilename,
std::vector<FieldDefinitionsSharedPtr> &fielddefs,
std::vector<std::vector<NekDouble> > &fielddata =
NullVectorNekDoubleVector,
FieldMetaDataMap &fieldinfomap = NullFieldMetaDataMap,
const Array<OneD, int> &ElementIDs = NullInt1DArray);
LIB_UTILITIES_EXPORT virtual DataSourceSharedPtr v_ImportFieldMetaData(
std::string filename, FieldMetaDataMap &fieldmetadatamap);
const std::string &filename, FieldMetaDataMap &fieldmetadatamap);
LIB_UTILITIES_EXPORT void ImportHDF5FieldMetaData(
DataSourceSharedPtr dataSource, FieldMetaDataMap &fieldmetadatamap);
LIB_UTILITIES_EXPORT void ImportFieldDef(H5::PListSharedPtr readPL,
H5::GroupSharedPtr root,
std::vector<uint64_t> &decomps,
uint64_t decomp,
OffsetHelper offset,
std::string group,
FieldDefinitionsSharedPtr def);
LIB_UTILITIES_EXPORT void ImportFieldDef(
H5::PListSharedPtr readPL,
H5::GroupSharedPtr root,
std::vector<uint64_t> &decomps,
uint64_t decomp,
OffsetHelper offset,
std::string group,
FieldDefinitionsSharedPtr def);
LIB_UTILITIES_EXPORT void ImportFieldData(
H5::PListSharedPtr readPL,
......
......@@ -64,6 +64,26 @@ FieldIOXml::FieldIOXml(LibUtilities::CommSharedPtr pComm, bool sharedFilesystem)
{
}
/**
* @brief Write an XML file to @p outFile given the field definitions @p
* fielddefs, field data @p fielddata and metadata @p fieldmetadatamap.
*
* The writing strategy is as follows:
*
* - Use FieldIO::SetUpOutput to construct the directory to contain each
* partition.
* - The root processor writes an `Info.xml` file containing the field
* metadata and an index that describes which elements lie in which XML
* file.
* - Each processor then writes an XML file containing the field definitions
* for that processor and output data in base64-encoded zlib-compressed
* format.
*
* @param outFile Output filename.
* @param fielddefs Input field definitions.
* @param fielddata Input field data.
* @param fieldmetadatamap Field metadata.
*/
void FieldIOXml::v_Write(const std::string &outFile,
std::vector<FieldDefinitionsSharedPtr> &fielddefs,
std::vector<std::vector<NekDouble> > &fielddata,
......@@ -122,7 +142,9 @@ void FieldIOXml::v_Write(const std::string &outFile,
i++)
{
if (!first)
{
fieldsStringStream << ",";
}
fieldsStringStream << fielddefs[f]->m_fields[i];
first = false;
}
......@@ -163,7 +185,9 @@ void FieldIOXml::v_Write(const std::string &outFile,
i++)
{
if (!first)
{
basisStringStream << ",";
}
basisStringStream << BasisTypeMap[fielddefs[f]->m_basis[i]];
first = false;
}
......@@ -204,7 +228,9 @@ void FieldIOXml::v_Write(const std::string &outFile,
i++)
{
if (!first)
{
homoYIDsStringStream << ",";
}
homoYIDsStringStream
<< fielddefs[f]->m_homogeneousYIDs[i];
first = false;
......@@ -224,7 +250,9 @@ void FieldIOXml::v_Write(const std::string &outFile,
i++)
{
if (!first)
{
homoZIDsStringStream << ",";
}
homoZIDsStringStream
<< fielddefs[f]->m_homogeneousZIDs[i];
first = false;
......@@ -272,7 +300,9 @@ void FieldIOXml::v_Write(const std::string &outFile,
i++)
{
if (!first)
{
numModesStringStream << ",";
}
numModesStringStream << fielddefs[f]->m_numModes[i];
first = false;
}
......@@ -286,7 +316,9 @@ void FieldIOXml::v_Write(const std::string &outFile,
i++)
{
if (!first)
{
numModesStringStream << ",";
}
numModesStringStream << fielddefs[f]->m_numModes[i];
first = false;
}
......@@ -454,11 +486,23 @@ void FieldIOXml::ImportMultiFldFileIDs(
}
}
/**
* @brief Import an XML format file.
*