diff --git a/CMakeLists.txt b/CMakeLists.txt
index 2c73e2dbe1..963d0297b5 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -127,8 +127,10 @@ else()
endif()
if(openPMD_HAVE_JSON)
add_library(openPMD::thirdparty::nlohmann_json INTERFACE IMPORTED)
- target_link_libraries(openPMD::thirdparty::nlohmann_json
- INTERFACE nlohmann_json::nlohmann_json)
+ #target_link_libraries(openPMD::thirdparty::nlohmann_json
+ # INTERFACE nlohmann_json::nlohmann_json)
+ get_target_property(lib_include_dirs nlohmann_json::nlohmann_json INTERFACE_INCLUDE_DIRECTORIES)
+ target_include_directories(openPMD::thirdparty::nlohmann_json SYSTEM INTERFACE ${lib_include_dirs})
endif()
# external library: HDF5 (optional)
@@ -304,7 +306,10 @@ set(IO_SOURCE
src/IO/AbstractIOHandlerHelper.cpp
src/IO/IOTask.cpp
src/IO/HDF5/HDF5IOHandler.cpp
- src/IO/HDF5/ParallelHDF5IOHandler.cpp)
+ src/IO/HDF5/ParallelHDF5IOHandler.cpp
+ src/IO/JSON/JSONIOHandler.cpp
+ src/IO/JSON/JSONIOHandlerImpl.cpp
+ src/IO/JSON/JSONFilePosition.cpp)
set(IO_ADIOS1_SEQUENTIAL_SOURCE
src/IO/AbstractIOHandler.cpp
src/IO/AbstractIOHandlerImpl.cpp
diff --git a/README.md b/README.md
index 2a1cfbdbb0..b5fcb0f562 100644
--- a/README.md
+++ b/README.md
@@ -100,7 +100,7 @@ Shipped internally in `share/openPMD/thirdParty/`:
* [NLohmann-JSON](https://github.com/nlohmann/json) 3.4.0+ ([MIT](https://github.com/nlohmann/json/blob/develop/LICENSE.MIT))
Optional I/O backends:
-* [JSON](https://en.wikipedia.org/wiki/JSON) (*not yet implemented*)
+* [JSON](https://en.wikipedia.org/wiki/JSON)
* [HDF5](https://support.hdfgroup.org/HDF5) 1.8.13+
* [ADIOS1](https://www.olcf.ornl.gov/center-projects/adios) 1.13.1+
* [ADIOS2](https://github.com/ornladios/ADIOS2) 2.1+ (*not yet implemented*)
@@ -175,7 +175,7 @@ CMake controls options with prefixed `-D`, e.g. `-DopenPMD_USE_MPI=OFF`:
| CMake Option | Values | Description |
|------------------------------|------------------|------------------------------------------------------------------------------|
| `openPMD_USE_MPI` | **AUTO**/ON/OFF | Enable MPI support |
-| `openPMD_USE_JSON` | **AUTO**/ON/OFF | Enable support for JSON 1 |
+| `openPMD_USE_JSON` | **AUTO**/ON/OFF | Enable support for JSON |
| `openPMD_USE_HDF5` | **AUTO**/ON/OFF | Enable support for HDF5 |
| `openPMD_USE_ADIOS1` | **AUTO**/ON/OFF | Enable support for ADIOS1 |
| `openPMD_USE_ADIOS2` | AUTO/ON/**OFF** | Enable support for ADIOS2 1 |
diff --git a/docs/source/backends/json.rst b/docs/source/backends/json.rst
new file mode 100644
index 0000000000..42b55844b7
--- /dev/null
+++ b/docs/source/backends/json.rst
@@ -0,0 +1,83 @@
+.. _backends-json:
+
+JSON Backend
+============
+
+openPMD supports writing to and reading from JSON files.
+For this, the installed copy of openPMD must have been built with support for the JSON backend.
+To build openPMD with support for JSON, use the CMake option ``-DopenPMD_USE_JSON=ON``.
+For further information, check out the :ref:`installation guide `,
+:ref:`build dependencies ` and the :ref:`build options `.
+
+
+JSON File Format
+----------------
+A JSON file uses the file ending ``.json``. The JSON backend is chosen by creating
+a ``Series`` object with a filename that has this file ending.
+
+The top-level JSON object is a group representing the openPMD root group ``"/"``.
+Any **openPMD group** is represented in JSON as a JSON object with two reserved keys:
+
+ * ``attributes``: Attributes associated with the group. This key may be null or not be present
+ at all, thus indicating a group without attributes.
+ * ``platform_byte_widths`` (root group only): Byte widths specific to the writing platform.
+ Will be overwritten every time that a JSON value is stored to disk, hence this information
+ is only available about the last platform writing the JSON value.
+
+All datasets and subgroups contained in this group are represented as a further key of
+the group object. ``attributes`` and ``platform_byte_widths`` have
+hence the character of reserved keywords and cannot be used for group and dataset names
+when working with the JSON backend.
+Datasets and groups have the same namespace, meaning that there may not be a subgroup
+and a dataset with the same name contained in one group.
+
+Any **openPMD dataset** is a JSON object with three keys:
+
+ * ``attributes``: Attributes associated with the dataset. May be ``null`` or not present if no attributes are associated with the dataset.
+ * ``datatype``: A string describing the type of the stored data.
+ * ``data`` A nested array storing the actual data in row-major manner.
+ The data needs to be consistent with the fields ``datatype`` and ``extent``.
+ Checking whether this key points to an array can be (and is internally) used to distinguish groups from datasets.
+
+**Attributes** are stored as a JSON object with a key for each attribute.
+Every such attribute is itself a JSON object with two keys:
+
+ * ``datatype``: A string describing the type of the value.
+ * ``value``: The actual value of type ``datatype``.
+
+Restrictions
+------------
+For creation of JSON serializations (i.e. writing), the restrictions of the JSON backend are
+equivalent to those of the `JSON library by Niels Lohmann `_
+used by the openPMD backend.
+
+Numerical values, integral as well as floating point, are supported up to a length of
+64 bits.
+Since JSON does not support special floating point values (i.e. NaN, Infinity, -Infinity),
+those values are rendered as ``null``.
+
+Instructing openPMD to write values of a datatype that is too wide for the JSON
+backend does *not* result in an error:
+ * If casting the value to the widest supported datatype of the same category (integer or floating point)
+ is possible without data loss, the cast is performed and the value is written.
+ As an example, on a platform with ``sizeof(double) == 8``, writing the value
+ ``static_cast(std::numeric_limits::max())`` will work as expected
+ since it can be cast back to ``double``.
+ * Otherwise, a ``null`` value is written.
+
+Upon reading ``null`` when expecting a floating point number, a NaN value will be
+returned. Take notice that a NaN value returned from the deserialization process
+may have originally been +/-Infinity or beyond the supported value range.
+
+Upon reading ``null`` when expecting any other datatype, the JSON backend will
+propagate the exception thrown by Niels Lohmann's library.
+
+A parallel (i.e. MPI) implementation is *not* available.
+
+Example
+-------
+The example code in the :ref:`usage section ` will produce the following JSON serialization
+when picking the JSON backend:
+
+.. literalinclude:: json_example.json
+
diff --git a/docs/source/backends/json_example.json b/docs/source/backends/json_example.json
new file mode 100644
index 0000000000..2fad4fc451
--- /dev/null
+++ b/docs/source/backends/json_example.json
@@ -0,0 +1,142 @@
+{
+ "attributes": {
+ "basePath": {
+ "datatype": "STRING",
+ "value": "/data/%T/"
+ },
+ "iterationEncoding": {
+ "datatype": "STRING",
+ "value": "groupBased"
+ },
+ "iterationFormat": {
+ "datatype": "STRING",
+ "value": "/data/%T/"
+ },
+ "meshesPath": {
+ "datatype": "STRING",
+ "value": "meshes/"
+ },
+ "openPMD": {
+ "datatype": "STRING",
+ "value": "1.1.0"
+ },
+ "openPMDextension": {
+ "datatype": "UINT",
+ "value": 0
+ }
+ },
+ "data": {
+ "1": {
+ "attributes": {
+ "dt": {
+ "datatype": "DOUBLE",
+ "value": 1
+ },
+ "time": {
+ "datatype": "DOUBLE",
+ "value": 0
+ },
+ "timeUnitSI": {
+ "datatype": "DOUBLE",
+ "value": 1
+ }
+ },
+ "meshes": {
+ "rho": {
+ "attributes": {
+ "axisLabels": {
+ "datatype": "VEC_STRING",
+ "value": [
+ "x"
+ ]
+ },
+ "dataOrder": {
+ "datatype": "STRING",
+ "value": "C"
+ },
+ "geometry": {
+ "datatype": "STRING",
+ "value": "cartesian"
+ },
+ "gridGlobalOffset": {
+ "datatype": "VEC_DOUBLE",
+ "value": [
+ 0
+ ]
+ },
+ "gridSpacing": {
+ "datatype": "VEC_DOUBLE",
+ "value": [
+ 1
+ ]
+ },
+ "gridUnitSI": {
+ "datatype": "DOUBLE",
+ "value": 1
+ },
+ "position": {
+ "datatype": "VEC_DOUBLE",
+ "value": [
+ 0
+ ]
+ },
+ "timeOffset": {
+ "datatype": "FLOAT",
+ "value": 0
+ },
+ "unitDimension": {
+ "datatype": "ARR_DBL_7",
+ "value": [
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0
+ ]
+ },
+ "unitSI": {
+ "datatype": "DOUBLE",
+ "value": 1
+ }
+ },
+ "data": [
+ [
+ 0,
+ 1,
+ 2
+ ],
+ [
+ 3,
+ 4,
+ 5
+ ],
+ [
+ 6,
+ 7,
+ 8
+ ]
+ ],
+ "datatype": "DOUBLE"
+ }
+ }
+ }
+ },
+ "platform_byte_widths": {
+ "BOOL": 1,
+ "CHAR": 1,
+ "DOUBLE": 8,
+ "FLOAT": 4,
+ "INT": 4,
+ "LONG": 8,
+ "LONGLONG": 8,
+ "LONG_DOUBLE": 16,
+ "SHORT": 2,
+ "UCHAR": 1,
+ "UINT": 4,
+ "ULONG": 8,
+ "ULONGLONG": 8,
+ "USHORT": 2
+ }
+}
diff --git a/docs/source/dev/buildoptions.rst b/docs/source/dev/buildoptions.rst
index bcae829343..e016a38ef4 100644
--- a/docs/source/dev/buildoptions.rst
+++ b/docs/source/dev/buildoptions.rst
@@ -15,7 +15,7 @@ CMake controls options with prefixed ``-D``, e.g. ``-DopenPMD_USE_MPI=OFF``:
CMake Option Values Description
============================== =============== ========================================================================
``openPMD_USE_MPI`` **AUTO**/ON/OFF Enable MPI support
-``openPMD_USE_JSON`` **AUTO**/ON/OFF Enable support for JSON :sup:`1`
+``openPMD_USE_JSON`` **AUTO**/ON/OFF Enable support for JSON
``openPMD_USE_HDF5`` **AUTO**/ON/OFF Enable support for HDF5
``openPMD_USE_ADIOS1`` **AUTO**/ON/OFF Enable support for ADIOS1
``openPMD_USE_ADIOS2`` AUTO/ON/**OFF** Enable support for ADIOS2 :sup:`1`
diff --git a/docs/source/index.rst b/docs/source/index.rst
index 2c9f0a743a..6d8fecb00e 100644
--- a/docs/source/index.rst
+++ b/docs/source/index.rst
@@ -84,6 +84,17 @@ Development
dev/doxygen
dev/release
+********
+Backends
+********
+.. toctree::
+ :caption: BACKENDS
+ :maxdepth: 1
+ :hidden:
+
+ backends/json
+
+
*********
Utilities
*********
@@ -93,3 +104,4 @@ Utilities
:hidden:
utilities/benchmark.rst
+
diff --git a/docs/source/usage/firststeps.rst b/docs/source/usage/firststeps.rst
index be53d92d60..a799ca56ae 100644
--- a/docs/source/usage/firststeps.rst
+++ b/docs/source/usage/firststeps.rst
@@ -1,4 +1,4 @@
-.. usage-firststeps:
+.. _usage-firststeps:
First Steps
===========
diff --git a/docs/source/usage/parallel.rst b/docs/source/usage/parallel.rst
index 17c2c881bf..c98e362929 100644
--- a/docs/source/usage/parallel.rst
+++ b/docs/source/usage/parallel.rst
@@ -1,4 +1,4 @@
-.. usage-parallel:
+.. _usage-parallel:
Parallel API
============
diff --git a/docs/source/usage/serial.rst b/docs/source/usage/serial.rst
index fc92894d5d..fe77c2d446 100644
--- a/docs/source/usage/serial.rst
+++ b/docs/source/usage/serial.rst
@@ -1,4 +1,4 @@
-.. usage-serial:
+.. _usage-serial:
Serial API
==========
diff --git a/include/openPMD/Datatype.hpp b/include/openPMD/Datatype.hpp
index 2c38077271..0635f1c951 100644
--- a/include/openPMD/Datatype.hpp
+++ b/include/openPMD/Datatype.hpp
@@ -519,6 +519,11 @@ isSame( openPMD::Datatype const d, openPMD::Datatype const e )
return false;
}
+#if _MSC_VER && !__INTEL_COMPILER
+#define OPENPMD_TEMPLATE_OPERATOR operator
+#else
+#define OPENPMD_TEMPLATE_OPERATOR template operator
+#endif
/**
* Generalizes switching over an openPMD datatype.
*
@@ -536,12 +541,6 @@ isSame( openPMD::Datatype const d, openPMD::Datatype const e )
* the passed arguments and the template parameter type corresponding to the
* openPMD type.
*/
-
-#if _MSC_VER && !__INTEL_COMPILER
-#define OPENPMD_TEMPLATE_OPERATOR operator
-#else
-#define OPENPMD_TEMPLATE_OPERATOR template operator
-#endif
template<
typename ReturnType = void,
typename Action,
diff --git a/include/openPMD/IO/Format.hpp b/include/openPMD/IO/Format.hpp
index 4d48e315da..3b80a85327 100644
--- a/include/openPMD/IO/Format.hpp
+++ b/include/openPMD/IO/Format.hpp
@@ -30,6 +30,7 @@ enum class Format
HDF5,
ADIOS1,
ADIOS2,
+ JSON,
DUMMY
}; //Format
} // openPMD
diff --git a/include/openPMD/IO/JSON/JSONFilePosition.hpp b/include/openPMD/IO/JSON/JSONFilePosition.hpp
new file mode 100644
index 0000000000..206ec34a10
--- /dev/null
+++ b/include/openPMD/IO/JSON/JSONFilePosition.hpp
@@ -0,0 +1,49 @@
+/* Copyright 2017-2018 Franz Pöschel
+ *
+ * This file is part of openPMD-api.
+ *
+ * openPMD-api is free software: you can redistribute it and/or modify
+ * it under the terms of of either the GNU General Public License or
+ * the GNU Lesser General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * openPMD-api is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License and the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * and the GNU Lesser General Public License along with openPMD-api.
+ * If not, see .
+ */
+
+#pragma once
+
+
+#include "openPMD/IO/AbstractFilePosition.hpp"
+
+
+#if openPMD_HAVE_JSON
+#include
+#endif
+
+namespace openPMD
+{
+
+
+ struct JSONFilePosition :
+ public AbstractFilePosition
+#if openPMD_HAVE_JSON
+ {
+ using json = nlohmann::json;
+ json::json_pointer id;
+
+ JSONFilePosition( json::json_pointer ptr = json::json_pointer( ) );
+ };
+#else
+ {};
+#endif
+
+} // openPMD
diff --git a/include/openPMD/IO/JSON/JSONIOHandler.hpp b/include/openPMD/IO/JSON/JSONIOHandler.hpp
new file mode 100644
index 0000000000..a6d0ba3ffd
--- /dev/null
+++ b/include/openPMD/IO/JSON/JSONIOHandler.hpp
@@ -0,0 +1,47 @@
+/* Copyright 2017-2018 Franz Pöschel
+ *
+ * This file is part of openPMD-api.
+ *
+ * openPMD-api is free software: you can redistribute it and/or modify
+ * it under the terms of of either the GNU General Public License or
+ * the GNU Lesser General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * openPMD-api is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License and the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * and the GNU Lesser General Public License along with openPMD-api.
+ * If not, see .
+ */
+
+#pragma once
+
+
+#include "openPMD/IO/AbstractIOHandler.hpp"
+#include "openPMD/IO/JSON/JSONIOHandlerImpl.hpp"
+
+
+namespace openPMD
+{
+ class JSONIOHandler :
+ public AbstractIOHandler
+ {
+ public:
+ JSONIOHandler(
+ std::string path,
+ AccessType at
+ );
+
+ virtual ~JSONIOHandler( );
+
+ std::future< void > flush( ) override;
+
+ private:
+ JSONIOHandlerImpl m_impl;
+ };
+} // openPMD
diff --git a/include/openPMD/IO/JSON/JSONIOHandlerImpl.hpp b/include/openPMD/IO/JSON/JSONIOHandlerImpl.hpp
new file mode 100644
index 0000000000..ef4e1a2154
--- /dev/null
+++ b/include/openPMD/IO/JSON/JSONIOHandlerImpl.hpp
@@ -0,0 +1,576 @@
+/* Copyright 2017-2018 Franz Pöschel
+ *
+ * This file is part of openPMD-api.
+ *
+ * openPMD-api is free software: you can redistribute it and/or modify
+ * it under the terms of of either the GNU General Public License or
+ * the GNU Lesser General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * openPMD-api is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License and the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * and the GNU Lesser General Public License along with openPMD-api.
+ * If not, see .
+ */
+
+#pragma once
+
+
+#include "openPMD/auxiliary/Filesystem.hpp"
+#include "openPMD/IO/AbstractIOHandler.hpp"
+#include "openPMD/IO/AbstractIOHandlerImpl.hpp"
+#include "openPMD/IO/AccessType.hpp"
+#include "openPMD/IO/JSON/JSONFilePosition.hpp"
+
+#include
+#include
+#include
+#include
+#include
+#include
+
+
+#if openPMD_HAVE_JSON
+
+
+#include
+
+
+#endif
+
+
+namespace openPMD
+{
+ // Wrapper around a shared pointer to:
+ // * a filename
+ // * and a boolean indicating whether the file still exists
+ // The wrapper adds no extra information, but some commodity functions.
+ // Invariant for JSONIOHandlerImpl:
+ // For any valid filename, there is at any time at most one
+ // such shared pointer (wrapper) in the HandlerImpl's data structures
+ // (counting by pointer equality)
+ // This means, that a file can be invalidated (i.e. deleted or overwritten)
+ // by simply searching for one instance of the file e.g. in m_files and
+ // invalidating this instance
+ // A new instance may hence only be created after making sure that there are
+ // no valid instances in the data structures.
+ struct File
+ {
+ explicit File( std::string s ) :
+ fileState { std::make_shared< FileState >( s ) }
+ {}
+
+
+ File( ) = default;
+
+
+ struct FileState
+ {
+ explicit FileState( std::string s ) :
+ name { std::move( s ) }
+ {}
+
+
+ std::string name;
+ bool valid = true;
+ };
+
+ std::shared_ptr< FileState > fileState;
+
+
+ void invalidate( )
+ {
+ fileState->valid = false;
+ }
+
+
+ bool valid( ) const
+ {
+ return fileState->valid;
+ }
+
+
+ File & operator=( std::string s )
+ {
+ if( fileState )
+ {
+ fileState->name = s;
+ }
+ else
+ {
+ fileState = std::make_shared< FileState >( s );
+ }
+ return *this;
+ }
+
+
+ bool operator==(
+ File const & f
+ ) const
+ {
+ return this->fileState == f.fileState;
+ }
+
+
+ std::string & operator*( ) const
+ {
+ return fileState->name;
+ }
+
+
+ std::string * operator->( ) const
+ {
+ return &fileState->name;
+ }
+
+
+ explicit operator bool( ) const
+ {
+ return fileState.operator bool( );
+ }
+ };
+}
+
+namespace std
+{
+ template< >
+ struct hash< openPMD::File >
+ {
+ typedef openPMD::File argument_type;
+ typedef std::size_t result_type;
+
+
+ result_type operator()( argument_type const & s ) const noexcept
+ {
+ return std::hash< shared_ptr< openPMD::File::FileState>> {}( s.fileState );
+ }
+ };
+}
+
+namespace openPMD
+{
+#if openPMD_HAVE_JSON
+
+ class JSONIOHandlerImpl :
+ public AbstractIOHandlerImpl
+ {
+ using json = nlohmann::json;
+
+ public:
+ explicit JSONIOHandlerImpl( AbstractIOHandler * );
+
+ ~JSONIOHandlerImpl( ) override;
+
+ void createFile(
+ Writable *,
+ Parameter< Operation::CREATE_FILE > const &
+ ) override;
+
+ void createPath(
+ Writable *,
+ Parameter< Operation::CREATE_PATH > const &
+ ) override;
+
+ void createDataset(
+ Writable *,
+ Parameter< Operation::CREATE_DATASET > const &
+ ) override;
+
+ void extendDataset(
+ Writable *,
+ Parameter< Operation::EXTEND_DATASET > const &
+ ) override;
+
+ void openFile(
+ Writable *,
+ Parameter< Operation::OPEN_FILE > const &
+ ) override;
+
+ void openPath(
+ Writable *,
+ Parameter< Operation::OPEN_PATH > const &
+ ) override;
+
+ void openDataset(
+ Writable *,
+ Parameter< Operation::OPEN_DATASET > &
+ ) override;
+
+ void deleteFile(
+ Writable *,
+ Parameter< Operation::DELETE_FILE > const &
+ ) override;
+
+ void deletePath(
+ Writable *,
+ Parameter< Operation::DELETE_PATH > const &
+ ) override;
+
+ void deleteDataset(
+ Writable *,
+ Parameter< Operation::DELETE_DATASET > const &
+ ) override;
+
+ void deleteAttribute(
+ Writable *,
+ Parameter< Operation::DELETE_ATT > const &
+ ) override;
+
+ void writeDataset(
+ Writable *,
+ Parameter< Operation::WRITE_DATASET > const &
+ ) override;
+
+ void writeAttribute(
+ Writable *,
+ Parameter< Operation::WRITE_ATT > const &
+ ) override;
+
+ void readDataset(
+ Writable *,
+ Parameter< Operation::READ_DATASET > &
+ ) override;
+
+ void readAttribute(
+ Writable *,
+ Parameter< Operation::READ_ATT > &
+ ) override;
+
+ void listPaths(
+ Writable *,
+ Parameter< Operation::LIST_PATHS > &
+ ) override;
+
+ void listDatasets(
+ Writable *,
+ Parameter< Operation::LIST_DATASETS > &
+ ) override;
+
+ void listAttributes(
+ Writable *,
+ Parameter< Operation::LIST_ATTS > &
+ ) override;
+
+ std::future< void > flush( ) override;
+
+
+ private:
+
+ using FILEHANDLE = std::fstream;
+
+ // map each Writable to its associated file
+ // contains only the filename, without the OS path
+ std::unordered_map<
+ Writable *,
+ File
+ > m_files;
+
+ std::unordered_map<
+ File,
+ std::shared_ptr< nlohmann::json >> m_jsonVals;
+
+ // files that have logically, but not physically been written to
+ std::unordered_set< File > m_dirty;
+
+
+ // HELPER FUNCTIONS
+
+
+ // will use the IOHandler to retrieve the correct directory
+ // shared pointer to circumvent the fact that c++ pre 17 does
+ // not enforce (only allow) copy elision in return statements
+ std::shared_ptr< FILEHANDLE > getFilehandle(
+ File,
+ AccessType accessType
+ ); //, AccessType accessType=this->m_handler->accessType);
+
+ // full operating system path of the given file
+ std::string fullPath( File );
+
+ std::string fullPath( std::string );
+
+ // from a path specification /a/b/c, remove the last
+ // "folder" (i.e. modify the string to equal /a/b)
+ static void parentDir( std::string & );
+
+ // Fileposition is assumed to have already been set,
+ // get it in string form
+ static std::string filepositionOf( Writable * w );
+
+ // Execute visitor on each pair of positions in the json value
+ // and the flattened multidimensional array.
+ // Used for writing from the data to JSON and for reading back into
+ // the array from JSON
+ template<
+ typename T,
+ typename Visitor
+ >
+ static void syncMultidimensionalJson(
+ nlohmann::json & j,
+ Offset const & offset,
+ Extent const & extent,
+ Extent const & multiplicator,
+ Visitor visitor,
+ T * data,
+ size_t currentdim = 0
+ );
+
+ // multiplicators: an array [m_0,...,m_n] s.t.
+ // data[i_0]...[i_n] = data[m_0*i_0+...+m_n*i_n]
+ // (m_n = 1)
+ // essentially: m_i = \prod_{j=0}^{i-1} extent_j
+ static Extent getMultiplicators( Extent const & extent );
+
+ static nlohmann::json initializeNDArray( Extent const & extent );
+
+ static Extent getExtent( nlohmann::json & j );
+
+
+ // remove single '/' in the beginning and end of a string
+ static std::string removeSlashes( std::string );
+
+ template< typename KeyT >
+ static bool hasKey(
+ nlohmann::json &,
+ KeyT && key
+ );
+
+ // make sure that the given path exists in proper form in
+ // the passed json value
+ static void ensurePath(
+ nlohmann::json * json,
+ std::string path
+ );
+
+ // In order not to insert the same file name into the data structures
+ // with a new pointer (e.g. when reopening), search for a possibly
+ // existing old pointer. Construct a new pointer only upon failure.
+ // The bool is true iff the pointer has been newly-created.
+ // The iterator is an iterator for m_files
+ std::tuple<
+ File,
+ std::unordered_map<
+ Writable *,
+ File
+ >::iterator,
+ bool
+ > getPossiblyExisting(
+ std::string file
+ );
+
+ // get the json value representing the whole file, possibly reading
+ // from disk
+ std::shared_ptr< nlohmann::json > obtainJsonContents( File );
+
+ // get the json value at the writable's fileposition
+ nlohmann::json & obtainJsonContents( Writable * writable );
+
+ // write to disk the json contents associated with the file
+ // remove from m_dirty if unsetDirty == true
+ void putJsonContents(
+ File,
+ bool unsetDirty = true
+ );
+
+ // figure out the file position of the writable
+ // (preferring the parent's file position) and extend it
+ // by extend. return the modified file position.
+ std::shared_ptr< JSONFilePosition > setAndGetFilePosition(
+ Writable *,
+ std::string extend
+ );
+
+ // figure out the file position of the writable
+ // (preferring the parent's file position)
+ // only modify the writable's fileposition when specified
+ std::shared_ptr< JSONFilePosition > setAndGetFilePosition(
+ Writable *,
+ bool write = true
+ );
+
+ // get the writable's containing file
+ // if the parent is associated with another file,
+ // associate the writable with that file and return it
+ File refreshFileFromParent( Writable * writable );
+
+ void associateWithFile(
+ Writable * writable,
+ File
+ );
+
+ // need to check the name too in order to exclude "attributes" key
+ static bool isGroup( nlohmann::json::const_iterator it );
+
+ static bool isDataset( nlohmann::json const & j );
+
+
+ // check whether the json reference contains a valid dataset
+ template< typename Param >
+ void verifyDataset(
+ Param const & parameters,
+ nlohmann::json &
+ );
+
+ static nlohmann::json platformSpecifics( );
+
+ struct DatasetWriter
+ {
+ template< typename T >
+ void operator()(
+ nlohmann::json & json,
+ const Parameter< Operation::WRITE_DATASET > & parameters
+ );
+
+ template< int n >
+ void operator()(
+ nlohmann::json & json,
+ const Parameter< Operation::WRITE_DATASET > & parameters
+ );
+
+ };
+
+ struct DatasetReader
+ {
+ template< typename T >
+ void operator()(
+ nlohmann::json & json,
+ Parameter< Operation::READ_DATASET > & parameters
+ );
+
+ template< int n >
+ void operator()(
+ nlohmann::json & json,
+ Parameter< Operation::READ_DATASET > & parameters
+ );
+ };
+
+ struct AttributeWriter
+ {
+ template< typename T >
+ void operator()(
+ nlohmann::json &,
+ Attribute::resource const &
+ );
+
+ template< int n >
+ void operator()(
+ nlohmann::json &,
+ Attribute::resource const &
+ );
+
+ };
+
+ struct AttributeReader
+ {
+ template< typename T >
+ void operator()(
+ nlohmann::json &,
+ Parameter< Operation::READ_ATT > &
+ );
+
+ template< int n >
+ void operator()(
+ nlohmann::json &,
+ Parameter< Operation::READ_ATT > &
+ );
+
+ };
+
+ template< typename T >
+ struct CppToJSON
+ {
+ nlohmann::json operator()( T const & );
+ };
+
+ template< typename T >
+ struct CppToJSON< std::vector< T>>
+ {
+ nlohmann::json operator()( std::vector< T > const & );
+ };
+
+ template< typename T, int n >
+ struct CppToJSON<
+ std::array<
+ T,
+ n>>
+ {
+ nlohmann::json operator()(
+ std::array<
+ T,
+ n
+ > const &
+ );
+ };
+
+ template<
+ typename T,
+ typename Enable = T
+ >
+ struct JsonToCpp
+ {
+ T operator()( nlohmann::json const & );
+ };
+
+ template< typename T >
+ struct JsonToCpp< std::vector< T > >
+ {
+ std::vector< T > operator()( nlohmann::json const & );
+ };
+
+ template< typename T, int n >
+ struct JsonToCpp<
+ std::array<
+ T,
+ n
+ >
+ >
+ {
+ std::array<
+ T,
+ n
+ > operator()( nlohmann::json const & );
+ };
+
+ template< typename T >
+ struct JsonToCpp<
+ T,
+ typename std::enable_if<
+ std::is_floating_point<
+ T
+ >::value
+ >::type
+ >
+ {
+ T operator()( nlohmann::json const & );
+ };
+ };
+
+#else
+
+ class JSONIOHandlerImpl
+ {
+ public:
+ JSONIOHandlerImpl( openPMD::AbstractIOHandler * )
+ {};
+
+
+ ~JSONIOHandlerImpl( )
+ {};
+
+
+ std::future< void > flush( )
+ {
+ return std::future< void >( );
+ }
+ };
+
+#endif
+
+
+} // openPMD
diff --git a/include/openPMD/auxiliary/StringManip.hpp b/include/openPMD/auxiliary/StringManip.hpp
index 78c7db6ac8..58f8cbf919 100644
--- a/include/openPMD/auxiliary/StringManip.hpp
+++ b/include/openPMD/auxiliary/StringManip.hpp
@@ -25,6 +25,7 @@
#include
#include
#include
+#include
namespace openPMD
@@ -109,11 +110,23 @@ replace_all(std::string s,
std::string const& target,
std::string const& replacement)
{
- std::string::size_type pos;
- while( (pos = s.find(target)) != std::string::npos )
- s.replace(pos, target.size(), replacement);
+ std::string::size_type pos = 0;
+ auto tsize = target.size();
+ assert(tsize > 0);
+ auto rsize = replacement.size();
+ while (true)
+ {
+ pos = s.find(target, pos);
+ if (pos == std::string::npos)
+ break;
+ s.replace(pos, tsize, replacement);
+ // Allow replacing recursively, but only if
+ // the next replaced substring overlaps with
+ // some parts of the original word.
+ // This avoids loops.
+ pos += rsize - std::min(tsize - 1, rsize);
+ }
s.shrink_to_fit();
-
return s;
}
diff --git a/include/openPMD/backend/Writable.hpp b/include/openPMD/backend/Writable.hpp
index af905d31d8..9c1b2627c9 100644
--- a/include/openPMD/backend/Writable.hpp
+++ b/include/openPMD/backend/Writable.hpp
@@ -69,6 +69,7 @@ class Writable
friend class ADIOS2IOHandlerImpl;
friend class HDF5IOHandlerImpl;
friend class ParallelHDF5IOHandlerImpl;
+ friend class JSONIOHandlerImpl;
friend struct test::TestHelper;
friend std::string concrete_h5_file_position(Writable*);
friend std::string concrete_bp1_file_position(Writable*);
diff --git a/src/IO/AbstractIOHandlerHelper.cpp b/src/IO/AbstractIOHandlerHelper.cpp
index 926bafd448..ab358c2bfa 100644
--- a/src/IO/AbstractIOHandlerHelper.cpp
+++ b/src/IO/AbstractIOHandlerHelper.cpp
@@ -24,6 +24,7 @@
#include "openPMD/IO/ADIOS/ParallelADIOS1IOHandler.hpp"
#include "openPMD/IO/HDF5/HDF5IOHandler.hpp"
#include "openPMD/IO/HDF5/ParallelHDF5IOHandler.hpp"
+#include "openPMD/IO/JSON/JSONIOHandler.hpp"
namespace openPMD
@@ -65,6 +66,8 @@ namespace openPMD
return std::make_shared< ADIOS1IOHandler >(path, accessType);
case Format::ADIOS2:
throw std::runtime_error("ADIOS2 backend not yet implemented");
+ case Format::JSON:
+ return std::make_shared< JSONIOHandler >(path, accessType);
default:
return std::make_shared< DummyIOHandler >(path, accessType);
}
diff --git a/src/IO/JSON/JSONFilePosition.cpp b/src/IO/JSON/JSONFilePosition.cpp
new file mode 100644
index 0000000000..f5019fd260
--- /dev/null
+++ b/src/IO/JSON/JSONFilePosition.cpp
@@ -0,0 +1,11 @@
+#include "openPMD/IO/JSON/JSONFilePosition.hpp"
+
+
+namespace openPMD {
+
+#if openPMD_HAVE_JSON
+ JSONFilePosition::JSONFilePosition( json::json_pointer ptr):
+ id( ptr )
+ {}
+#endif
+}
diff --git a/src/IO/JSON/JSONIOHandler.cpp b/src/IO/JSON/JSONIOHandler.cpp
new file mode 100644
index 0000000000..dde4b2df3b
--- /dev/null
+++ b/src/IO/JSON/JSONIOHandler.cpp
@@ -0,0 +1,47 @@
+/* Copyright 2017-2018 Franz Pöschel
+ *
+ * This file is part of openPMD-api.
+ *
+ * openPMD-api is free software: you can redistribute it and/or modify
+ * it under the terms of of either the GNU General Public License or
+ * the GNU Lesser General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * openPMD-api is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License and the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * and the GNU Lesser General Public License along with openPMD-api.
+ * If not, see .
+ */
+
+#include "openPMD/IO/JSON/JSONIOHandler.hpp"
+
+
+namespace openPMD
+{
+ JSONIOHandler::~JSONIOHandler( )
+ {}
+
+
+ JSONIOHandler::JSONIOHandler(
+ std::string path,
+ AccessType at
+ ) :
+ AbstractIOHandler {
+ path,
+ at
+ },
+ m_impl { JSONIOHandlerImpl { this } }
+ {}
+
+
+ std::future< void > JSONIOHandler::flush( )
+ {
+ return m_impl.flush( );
+ }
+} // openPMD
diff --git a/src/IO/JSON/JSONIOHandlerImpl.cpp b/src/IO/JSON/JSONIOHandlerImpl.cpp
new file mode 100644
index 0000000000..cb26694d26
--- /dev/null
+++ b/src/IO/JSON/JSONIOHandlerImpl.cpp
@@ -0,0 +1,1562 @@
+/* Copyright 2017-2018 Franz Pöschel
+ *
+ * This file is part of openPMD-api.
+ *
+ * openPMD-api is free software: you can redistribute it and/or modify
+ * it under the terms of of either the GNU General Public License or
+ * the GNU Lesser General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * openPMD-api is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License and the GNU Lesser General Public License
+ * for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * and the GNU Lesser General Public License along with openPMD-api.
+ * If not, see .
+ */
+
+#include "openPMD/auxiliary/Filesystem.hpp"
+#include "openPMD/auxiliary/Memory.hpp"
+#include "openPMD/auxiliary/StringManip.hpp"
+#include "openPMD/backend/Writable.hpp"
+#include "openPMD/Datatype.hpp"
+#include "openPMD/IO/JSON/JSONIOHandlerImpl.hpp"
+
+
+namespace openPMD
+{
+#if openPMD_USE_VERIFY
+# define VERIFY( CONDITION, TEXT ) { if(!(CONDITION)) throw std::runtime_error((TEXT)); }
+#else
+# define VERIFY( CONDITION, TEXT ) do{ (void)sizeof(CONDITION); } while( 0 );
+#endif
+
+#define VERIFY_ALWAYS( CONDITION, TEXT ) { if(!(CONDITION)) throw std::runtime_error((TEXT)); }
+
+#if openPMD_HAVE_JSON
+
+
+ JSONIOHandlerImpl::JSONIOHandlerImpl( AbstractIOHandler * handler ) :
+ AbstractIOHandlerImpl( handler )
+ {}
+
+
+ JSONIOHandlerImpl::~JSONIOHandlerImpl( )
+ {
+ flush( );
+ }
+
+
+ std::future< void > JSONIOHandlerImpl::flush( )
+ {
+
+ AbstractIOHandlerImpl::flush( );
+ for( auto const & file: m_dirty )
+ {
+ putJsonContents(
+ file,
+ false
+ );
+ }
+ m_dirty.clear( );
+ return std::future< void >( );
+ }
+
+
+ void JSONIOHandlerImpl::createFile(
+ Writable * writable,
+ Parameter< Operation::CREATE_FILE > const & parameters
+ )
+ {
+ VERIFY_ALWAYS( m_handler->accessType != AccessType::READ_ONLY,
+ "Creating a file in read-only mode is not possible." );
+
+ if( !writable->written )
+ {
+ std::string name = parameters.name;
+ if( !auxiliary::ends_with(
+ name,
+ ".json"
+ ) )
+ {
+ name += ".json";
+ }
+
+ auto res_pair = getPossiblyExisting( name );
+ File shared_name = File( name );
+ VERIFY_ALWAYS( !( m_handler->accessType == AccessType::READ_WRITE &&
+ ( !std::get< 2 >( res_pair ) ||
+ auxiliary::file_exists( fullPath( std::get< 0 >( res_pair ) ) ) ) ),
+ "Can only overwrite existing file in CREATE mode." );
+
+ if( !std::get< 2 >( res_pair ) )
+ {
+ auto file = std::get< 0 >( res_pair );
+ m_dirty.erase( file );
+ m_jsonVals.erase( file );
+ file.invalidate( );
+ }
+
+ std::string const dir( m_handler->directory );
+ if( !auxiliary::directory_exists( dir ) )
+ {
+ auto success = auxiliary::create_directories( dir );
+ VERIFY( success,
+ "Could not create directory." );
+ }
+
+ associateWithFile(
+ writable,
+ shared_name
+ );
+ this->m_dirty
+ .emplace( shared_name );
+ // make sure to overwrite!
+ this->m_jsonVals[shared_name] =
+ std::make_shared< nlohmann::json >( );
+
+
+ writable->written = true;
+ writable->abstractFilePosition =
+ std::make_shared< JSONFilePosition >( );
+ }
+ }
+
+
+ void JSONIOHandlerImpl::createPath(
+ Writable * writable,
+ Parameter< Operation::CREATE_PATH > const & parameter
+ )
+ {
+ std::string path = parameter.path;
+ /* Sanitize:
+ * The JSON API does not like to have slashes in the end.
+ */
+ if( auxiliary::ends_with(
+ path,
+ "/"
+ ) )
+ {
+ path = auxiliary::replace_last(
+ path,
+ "/",
+ ""
+ );
+ }
+
+ auto file = refreshFileFromParent( writable );
+
+ auto * jsonVal = &*obtainJsonContents( file );
+ if( !auxiliary::starts_with(
+ path,
+ "/"
+ ) )
+ { // path is relative
+ auto filepos = setAndGetFilePosition(
+ writable,
+ false
+ );
+
+ jsonVal = &( *jsonVal )[filepos->id];
+ ensurePath(
+ jsonVal,
+ path
+ );
+ path =
+ filepos->id
+ .to_string( ) + "/" + path;
+ }
+ else
+ {
+
+ ensurePath(
+ jsonVal,
+ path
+ );
+ }
+
+ m_dirty.emplace( file );
+ writable->written = true;
+ writable->abstractFilePosition =
+ std::make_shared< JSONFilePosition >( nlohmann::json::json_pointer( path ) );
+ }
+
+
+ void JSONIOHandlerImpl::createDataset(
+ Writable * writable,
+ Parameter< Operation::CREATE_DATASET > const & parameter
+ )
+ {
+ if( m_handler->accessType == AccessType::READ_ONLY )
+ {
+ throw std::runtime_error( "Creating a dataset in a file opened as read only is not possible." );
+ }
+ if( !writable->written )
+ {
+ /* Sanitize name */
+ std::string name = removeSlashes( parameter.name );
+
+ auto file = refreshFileFromParent( writable );
+ setAndGetFilePosition( writable );
+ auto & jsonVal = obtainJsonContents( writable );
+ // be sure to have a JSON object, not a list
+ if( jsonVal.empty( ) )
+ {
+ jsonVal = nlohmann::json::object( );
+ }
+ setAndGetFilePosition(
+ writable,
+ name
+ );
+ auto & dset = jsonVal[name];
+ dset["datatype"] = datatypeToString( parameter.dtype );
+ dset["data"] = initializeNDArray( parameter.extent );
+ writable->written = true;
+ m_dirty.emplace( file );
+ }
+ }
+
+
+ void JSONIOHandlerImpl::extendDataset(
+ Writable * writable,
+ Parameter< Operation::EXTEND_DATASET > const & parameters
+ )
+ {
+ VERIFY_ALWAYS( m_handler->accessType != AccessType::READ_ONLY,
+ "Cannot extend a dataset in read-only mode." )
+ refreshFileFromParent( writable );
+ setAndGetFilePosition( writable );
+ auto name = removeSlashes( parameters.name );
+ auto & j = obtainJsonContents( writable )[name];
+
+ try
+ {
+ auto datasetExtent = getExtent( j["data"] );
+ VERIFY_ALWAYS( datasetExtent.size( ) ==
+ parameters.extent
+ .size( ),
+ "Cannot change dimensionality of a dataset" )
+ for( size_t currentdim = 0;
+ currentdim <
+ parameters.extent
+ .size( );
+ currentdim++ )
+ {
+ VERIFY_ALWAYS( datasetExtent[currentdim] <=
+ parameters.extent[currentdim],
+ "Cannot shrink the extent of a dataset" )
+ }
+ } catch( json::basic_json::type_error & e )
+ {
+ throw std::runtime_error( "The specified location contains no valid dataset" );
+ }
+ j["data"] = initializeNDArray( parameters.extent );
+ writable->written = true;
+
+ }
+
+
+ void JSONIOHandlerImpl::openFile(
+ Writable * writable,
+ Parameter< Operation::OPEN_FILE > const & parameter
+ )
+ {
+ if( !auxiliary::directory_exists( m_handler->directory ) )
+ {
+ throw no_such_file_error(
+ "Supplied directory is not valid: " + m_handler->directory
+ );
+ }
+
+ std::string name = parameter.name;
+ if( !auxiliary::ends_with(
+ name,
+ ".json"
+ ) )
+ {
+ name += ".json";
+ }
+
+ auto file = std::get< 0 >( getPossiblyExisting( name ) );
+
+ associateWithFile(
+ writable,
+ file
+ );
+
+ writable->written = true;
+ writable->abstractFilePosition =
+ std::make_shared< JSONFilePosition >( );
+ }
+
+
+ void JSONIOHandlerImpl::openPath(
+ Writable * writable,
+ Parameter< Operation::OPEN_PATH > const & parameters
+ )
+ {
+ auto file = refreshFileFromParent( writable );
+
+ nlohmann::json * j = &obtainJsonContents( writable->parent );
+ auto path = removeSlashes( parameters.path );
+ path =
+ path.empty( )
+ ? filepositionOf( writable->parent )
+ : filepositionOf( writable->parent ) + "/" + path;
+
+ if( writable->abstractFilePosition )
+ {
+ *setAndGetFilePosition(
+ writable,
+ false
+ ) = JSONFilePosition( json::json_pointer( path ) );
+ }
+ else
+ {
+ writable->abstractFilePosition =
+ std::make_shared< JSONFilePosition >( json::json_pointer( path ) );
+ }
+
+ ensurePath(
+ j,
+ removeSlashes( parameters.path )
+ );
+
+ writable->written = true;
+ }
+
+
+ void JSONIOHandlerImpl::openDataset(
+ Writable * writable,
+ Parameter< Operation::OPEN_DATASET > & parameters
+ )
+ {
+ refreshFileFromParent( writable );
+ auto name = removeSlashes( parameters.name );
+ auto & datasetJson = obtainJsonContents( writable->parent )[name];
+ setAndGetFilePosition(
+ writable,
+ name
+ );
+
+ *parameters.dtype =
+ Datatype( stringToDatatype( datasetJson["datatype"].get< std::string >( ) ) );
+ *parameters.extent = getExtent( datasetJson["data"] );
+ writable->written = true;
+
+ }
+
+
+ void JSONIOHandlerImpl::deleteFile(
+ Writable * writable,
+ Parameter< Operation::DELETE_FILE > const & parameters
+ )
+ {
+ VERIFY_ALWAYS( m_handler->accessType != AccessType::READ_ONLY,
+ "Cannot delete files in read-only mode" )
+
+ if( !writable->written )
+ {
+ return;
+ }
+
+ auto filename = auxiliary::ends_with(
+ parameters.name,
+ ".json"
+ ) ? parameters.name : parameters.name + ".json";
+
+ auto tuple = getPossiblyExisting( filename );
+ if( !std::get< 2 >( tuple ) )
+ {
+ // file is already in the system
+ auto file = std::get< 0 >( tuple );
+ m_dirty.erase( file );
+ m_jsonVals.erase( file );
+ file.invalidate( );
+ }
+
+ std::remove( fullPath( filename ).c_str( ) );
+
+ writable->written = false;
+ }
+
+
+ void JSONIOHandlerImpl::deletePath(
+ Writable * writable,
+ Parameter< Operation::DELETE_PATH > const & parameters
+ )
+ {
+ VERIFY_ALWAYS( m_handler->accessType != AccessType::READ_ONLY,
+ "Cannot delete paths in read-only mode" )
+
+ if( !writable->written )
+ {
+ return;
+ }
+
+ VERIFY_ALWAYS( !auxiliary::starts_with(
+ parameters.path,
+ '/'
+ ),
+ "Paths passed for deletion should be relative, the given path is absolute (starts with '/')" )
+ auto file = refreshFileFromParent( writable );
+ auto filepos = setAndGetFilePosition(
+ writable,
+ false
+ );
+ auto path = removeSlashes( parameters.path );
+ VERIFY( !path.empty( ),
+ "No path passed for deletion." )
+ nlohmann::json * j;
+ if( path == "." )
+ {
+ auto
+ s =
+ filepos->id
+ .to_string( );
+ if( s == "/" )
+ {
+ throw std::runtime_error( "Cannot delete the root group" );
+ }
+
+ auto i = s.rfind( '/' );
+ path = s;
+ path.replace(
+ 0,
+ i + 1,
+ ""
+ );
+ // path should now be equal to the name of the current group
+ // go up one group
+
+ // go to parent directory
+ // parent exists since we have verified that the current
+ // directory is != root
+ parentDir( s );
+ j =
+ &( *obtainJsonContents( file ) )[nlohmann::json::json_pointer( s )];
+ }
+ else
+ {
+ if( auxiliary::starts_with(
+ path,
+ "./"
+ ) )
+ {
+ path = auxiliary::replace_first(
+ path,
+ "./",
+ ""
+ );
+ }
+ j = &obtainJsonContents( writable );
+ }
+ nlohmann::json * lastPointer = j;
+ bool needToDelete = true;
+ auto splitPath = auxiliary::split(
+ path,
+ "/"
+ );
+ // be careful not to create the group by accident
+ // the loop will execute at least once
+ for( auto folder: splitPath )
+ {
+ auto it = j->find( folder );
+ if( it == j->end( ) )
+ {
+ needToDelete = false;
+ break;
+ }
+ else
+ {
+ lastPointer = j;
+ j = &it.value( );
+ }
+ }
+ if( needToDelete )
+ {
+ lastPointer->erase(
+ splitPath[splitPath.size( ) - 1]
+ );
+ }
+
+ putJsonContents( file );
+ writable->abstractFilePosition
+ .reset( );
+ writable->written = false;
+ }
+
+
+ void JSONIOHandlerImpl::deleteDataset(
+ Writable * writable,
+ Parameter< Operation::DELETE_DATASET > const & parameters
+ )
+ {
+ VERIFY_ALWAYS( m_handler->accessType != AccessType::READ_ONLY,
+ "Cannot delete datasets in read-only mode" )
+
+ if( !writable->written )
+ {
+ return;
+ }
+
+ auto filepos = setAndGetFilePosition(
+ writable,
+ false
+ );
+
+ auto file = refreshFileFromParent( writable );
+ auto dataset = removeSlashes( parameters.name );
+ nlohmann::json * parent;
+ if( dataset == "." )
+ {
+ auto
+ s =
+ filepos->id
+ .to_string( );
+ if( s.empty( ) )
+ {
+ throw std::runtime_error( "Invalid position for a dataset in the JSON file." );
+ }
+ dataset = s;
+ auto i = dataset.rfind( '/' );
+ dataset.replace(
+ 0,
+ i + 1,
+ ""
+ );
+
+ parentDir( s );
+ parent =
+ &( *obtainJsonContents( file ) )[nlohmann::json::json_pointer( s )];
+ }
+ else
+ {
+ parent = &obtainJsonContents( writable );
+ }
+ parent->erase( dataset );
+ putJsonContents( file );
+ writable->written = false;
+ writable->abstractFilePosition
+ .reset( );
+ }
+
+
+ void JSONIOHandlerImpl::deleteAttribute(
+ Writable * writable,
+ Parameter< Operation::DELETE_ATT > const & parameters
+ )
+ {
+ VERIFY_ALWAYS( m_handler->accessType != AccessType::READ_ONLY,
+ "Cannot delete attributes in read-only mode" )
+ if( !writable->written )
+ {
+ return;
+ }
+ setAndGetFilePosition( writable );
+ auto file = refreshFileFromParent( writable );
+ auto & j = obtainJsonContents( writable );
+ j.erase( parameters.name );
+ putJsonContents( file );
+ }
+
+
+ void JSONIOHandlerImpl::writeDataset(
+ Writable * writable,
+ Parameter< Operation::WRITE_DATASET > const & parameters
+ )
+ {
+ VERIFY_ALWAYS( m_handler->accessType != AccessType::READ_ONLY,
+ "Cannot write data in read-only mode." );
+
+ auto pos = setAndGetFilePosition( writable );
+ auto file = refreshFileFromParent( writable );
+ auto & j = obtainJsonContents( writable );
+
+ verifyDataset(
+ parameters,
+ j
+ );
+
+
+ DatasetWriter dw;
+ switchType(
+ parameters.dtype,
+ dw,
+ j,
+ parameters
+ );
+
+ writable->written = true;
+ putJsonContents( file );
+ }
+
+
+ void JSONIOHandlerImpl::writeAttribute(
+ Writable * writable,
+ Parameter< Operation::WRITE_ATT > const & parameter
+ )
+ {
+ if( m_handler->accessType == AccessType::READ_ONLY )
+ {
+ throw std::runtime_error( "Creating a dataset in a file opened as read only is not possible." );
+ }
+
+ /* Sanitize name */
+ std::string name = removeSlashes( parameter.name );
+
+ auto file = refreshFileFromParent( writable );
+ auto jsonVal = obtainJsonContents( file );
+ auto filePosition = setAndGetFilePosition( writable );
+ if( ( *jsonVal )[filePosition->id]["attributes"].empty( ) )
+ {
+ ( *jsonVal )[filePosition->id]["attributes"] =
+ nlohmann::json::object( );
+ }
+ nlohmann::json value;
+ AttributeWriter aw;
+ switchType(
+ parameter.dtype,
+ aw,
+ value,
+ parameter.resource
+ );
+ ( *jsonVal )[filePosition->id]["attributes"][parameter.name] = {
+ {
+ "datatype",
+ datatypeToString( parameter.dtype )
+ },
+ {
+ "value",
+ value
+ }
+ };
+ writable->written = true;
+ m_dirty.emplace( file );
+ }
+
+
+ void JSONIOHandlerImpl::readDataset(
+ Writable * writable,
+ Parameter< Operation::READ_DATASET > & parameters
+ )
+ {
+ refreshFileFromParent( writable );
+ setAndGetFilePosition( writable );
+ auto & j = obtainJsonContents( writable );
+ verifyDataset(
+ parameters,
+ j
+ );
+
+ try
+ {
+ DatasetReader dr;
+ switchType(
+ parameters.dtype,
+ dr,
+ j["data"],
+ parameters
+ );
+ } catch( json::basic_json::type_error & )
+ {
+ throw std::runtime_error( "The given path does not contain a valid dataset." );
+ }
+ }
+
+
+ void JSONIOHandlerImpl::readAttribute(
+ Writable * writable,
+ Parameter< Operation::READ_ATT > & parameters
+ )
+ {
+ VERIFY_ALWAYS( writable->written,
+ "Attributes have to be written before reading." )
+ refreshFileFromParent( writable );
+ auto name = removeSlashes( parameters.name );
+ auto & jsonLoc = obtainJsonContents( writable )["attributes"];
+ setAndGetFilePosition( writable );
+ VERIFY_ALWAYS( hasKey(
+ jsonLoc,
+ name
+ ),
+ "No such attribute in the given location." )
+ auto & j = jsonLoc[name];
+ try
+ {
+ *parameters.dtype =
+ Datatype( stringToDatatype( j["datatype"].get< std::string >( ) ) );
+ AttributeReader ar;
+ switchType(
+ *parameters.dtype,
+ ar,
+ j["value"],
+ parameters
+ );
+ } catch( json::type_error & )
+ {
+ throw std::runtime_error( "The given location does not contain a properly formatted attribute" );
+ }
+ }
+
+
+ void JSONIOHandlerImpl::listPaths(
+ Writable * writable,
+ Parameter< Operation::LIST_PATHS > & parameters
+ )
+ {
+ VERIFY_ALWAYS( writable->written,
+ "Values have to be written before reading a directory" );
+ auto & j = obtainJsonContents( writable );
+ setAndGetFilePosition( writable );
+ refreshFileFromParent( writable );
+ parameters.paths
+ ->clear( );
+ for( auto it = j.begin( ); it != j.end( ); it++ )
+ {
+ if( isGroup( it ) )
+ {
+ parameters.paths
+ ->push_back( it.key( ) );
+ }
+ }
+ }
+
+
+ void JSONIOHandlerImpl::listDatasets(
+ Writable * writable,
+ Parameter< Operation::LIST_DATASETS > & parameters
+ )
+ {
+ VERIFY_ALWAYS( writable->written,
+ "Datasets have to be written before reading." )
+ refreshFileFromParent( writable );
+ auto filePosition = setAndGetFilePosition( writable );
+ auto & j = obtainJsonContents( writable );
+ parameters.datasets
+ ->clear( );
+ for( auto it = j.begin( ); it != j.end( ); it++ )
+ {
+ if( isDataset( it.value() ) )
+ {
+ parameters.datasets
+ ->push_back( it.key( ) );
+ }
+ }
+ }
+
+
+ void JSONIOHandlerImpl::listAttributes(
+ Writable * writable,
+ Parameter< Operation::LIST_ATTS > & parameters
+ )
+ {
+ VERIFY_ALWAYS( writable->written,
+ "Attributes have to be written before reading." )
+ refreshFileFromParent( writable );
+ auto filePosition = setAndGetFilePosition( writable );
+ auto & j = obtainJsonContents( writable )["attributes"];
+ for( auto it = j.begin( ); it != j.end( ); it++ )
+ {
+ parameters.attributes
+ ->push_back( it.key( ) );
+ }
+ }
+
+
+ std::shared_ptr< JSONIOHandlerImpl::FILEHANDLE >
+ JSONIOHandlerImpl::getFilehandle(
+ File fileName,
+ AccessType accessType
+ )
+ {
+ VERIFY_ALWAYS( fileName.valid( ),
+ "Tried opening a file that has been overwritten or deleted." )
+ auto path = fullPath( std::move( fileName ) );
+ auto fs = std::make_shared< std::fstream >( );
+ switch( accessType )
+ {
+ case AccessType::CREATE:
+ case AccessType::READ_WRITE:
+ fs->open(
+ path,
+ std::ios_base::out | std::ios_base::trunc
+ );
+ break;
+ case AccessType::READ_ONLY:
+ fs->open(
+ path,
+ std::ios_base::in
+ );
+ break;
+ }
+ VERIFY( fs->good( ),
+ "Failed opening a file" );
+ return fs;
+ }
+
+
+ std::string JSONIOHandlerImpl::fullPath( File fileName )
+ {
+ return fullPath( *fileName );
+ }
+
+
+ std::string JSONIOHandlerImpl::fullPath( std::string fileName )
+ {
+ if( auxiliary::ends_with(
+ m_handler->directory,
+ "/"
+ ) )
+ {
+ return m_handler->directory + fileName;
+ }
+ else
+ {
+ return m_handler->directory + "/" + fileName;
+ }
+ }
+
+
+ void JSONIOHandlerImpl::parentDir( std::string & s )
+ {
+ auto i = s.rfind( '/' );
+ if( i != std::string::npos )
+ {
+ s.replace(
+ i,
+ s.size( ) - i,
+ ""
+ );
+ s.shrink_to_fit( );
+ }
+ }
+
+
+ std::string JSONIOHandlerImpl::filepositionOf( Writable * writable )
+ {
+ return std::dynamic_pointer_cast< JSONFilePosition >( writable->abstractFilePosition )->id
+ .to_string( );
+ }
+
+
+ template<
+ typename T,
+ typename Visitor
+ >
+ void JSONIOHandlerImpl::syncMultidimensionalJson(
+ nlohmann::json & j,
+ Offset const & offset,
+ Extent const & extent,
+ Extent const & multiplicator,
+ Visitor visitor,
+ T * data,
+ size_t currentdim
+ )
+ {
+ // Offset only relevant for JSON, the array data is contiguous
+ auto off = offset[currentdim];
+ // maybe rewrite iteratively, using a stack that stores for each level the
+ // current iteration value i
+
+ if( currentdim == offset.size( ) - 1 )
+ {
+ for( std::size_t i = 0; i < extent[currentdim]; ++i )
+ {
+ visitor(
+ j[i + off],
+ data[i]
+ );
+ }
+ }
+ else
+ {
+ for( std::size_t i = 0; i < extent[currentdim]; ++i )
+ {
+ syncMultidimensionalJson<
+ T,
+ Visitor
+ >(
+ j[i + off],
+ offset,
+ extent,
+ multiplicator,
+ visitor,
+ data + i * multiplicator[currentdim],
+ currentdim + 1
+ );
+ }
+ }
+ }
+
+
+ // multiplicators: an array [m_0,...,m_n] s.t.
+ // data[i_0]...[i_n] = data[m_0*i_0+...+m_n*i_n]
+ // (m_n = 1)
+ Extent JSONIOHandlerImpl::getMultiplicators( Extent const & extent )
+ {
+ Extent res( extent );
+ Extent::value_type n = 1;
+ size_t i = extent.size( );
+ do
+ {
+ --i;
+ res[i] = n;
+ n *= extent[i];
+ }
+ while( i > 0 );
+ return res;
+ }
+
+
+ nlohmann::json JSONIOHandlerImpl::initializeNDArray( Extent const & extent )
+ {
+ // idea: begin from the innermost shale and copy the result into the
+ // outer shales
+ nlohmann::json accum;
+ nlohmann::json old;
+ auto * accum_ptr = & accum;
+ auto * old_ptr = & old;
+ for( auto it = extent.rbegin( ); it != extent.rend( ); it++ )
+ {
+ std::swap(old_ptr, accum_ptr);
+ *accum_ptr = nlohmann::json {};
+ for( Extent::value_type i = 0; i < *it; i++ )
+ {
+ (*accum_ptr)[i] = *old_ptr; // copy boi
+ }
+ }
+ return *accum_ptr;
+ }
+
+
+ Extent JSONIOHandlerImpl::getExtent( nlohmann::json & j )
+ {
+ Extent res;
+ nlohmann::json * ptr = &j;
+ while( ptr->is_array( ) )
+ {
+ res.push_back( ptr->size( ) );
+ ptr = &( *ptr )[0];
+ }
+ return res;
+ }
+
+
+ std::string JSONIOHandlerImpl::removeSlashes( std::string s )
+ {
+ if( auxiliary::starts_with(
+ s,
+ '/'
+ ) )
+ {
+ s = auxiliary::replace_first(
+ s,
+ "/",
+ ""
+ );
+ }
+ if( auxiliary::ends_with(
+ s,
+ '/'
+ ) )
+ {
+ s = auxiliary::replace_last(
+ s,
+ "/",
+ ""
+ );
+ }
+ return s;
+ }
+
+
+ template< typename KeyT >
+ bool JSONIOHandlerImpl::hasKey(
+ nlohmann::json & j,
+ KeyT && key
+ )
+ {
+ return j.find( std::forward< KeyT >( key ) ) != j.end( );
+ }
+
+
+ void JSONIOHandlerImpl::ensurePath(
+ nlohmann::json * jsonp,
+ std::string path
+ )
+ {
+ auto groups = auxiliary::split(
+ path,
+ "/"
+ );
+ for( std::string & group: groups )
+ {
+ // Enforce a JSON object
+ // the library will automatically create a list if the first
+ // key added to it is parseable as an int
+ jsonp = &( *jsonp )[group];
+ if (jsonp->is_null())
+ {
+ *jsonp = nlohmann::json::object();
+ }
+ }
+ }
+
+
+ std::tuple<
+ File,
+ std::unordered_map<
+ Writable *,
+ File
+ >::iterator,
+ bool
+ > JSONIOHandlerImpl::getPossiblyExisting( std::string file )
+ {
+
+ auto it = std::find_if(
+ m_files.begin( ),
+ m_files.end( ),
+ [file](
+ std::unordered_map<
+ Writable *,
+ File
+ >::value_type const & entry
+ )
+ {
+ return *entry.second == file &&
+ entry.second
+ .valid( );
+ }
+ );
+
+ bool newlyCreated;
+ File name;
+ if( it == m_files.end( ) )
+ {
+ name = file;
+ newlyCreated = true;
+ }
+ else
+ {
+ name = it->second;
+ newlyCreated = false;
+ }
+ return std::tuple<
+ File,
+ std::unordered_map<
+ Writable *,
+ File
+ >::iterator,
+ bool
+ >(
+ std::move( name ),
+ it,
+ newlyCreated
+ );
+ }
+
+
+ std::shared_ptr< nlohmann::json >
+ JSONIOHandlerImpl::obtainJsonContents( File file )
+ {
+ VERIFY_ALWAYS( file.valid( ),
+ "File has been overwritten or deleted before reading" );
+ auto it = m_jsonVals.find( file );
+ if( it != m_jsonVals.end( ) )
+ {
+ return it->second;
+ }
+ // read from file
+ auto fh = getFilehandle(
+ file,
+ AccessType::READ_ONLY
+ );
+ std::shared_ptr< nlohmann::json >
+ res = std::make_shared< nlohmann::json >( );
+ *fh >> *res;
+ VERIFY( fh->good( ),
+ "Failed reading from a file." );
+ m_jsonVals.emplace(
+ file,
+ res
+ );
+ return res;
+ }
+
+
+ nlohmann::json &
+ JSONIOHandlerImpl::obtainJsonContents( Writable * writable )
+ {
+ auto file = refreshFileFromParent( writable );
+ auto filePosition = setAndGetFilePosition(
+ writable,
+ false
+ );
+ return ( *obtainJsonContents( file ) )[filePosition->id];
+ }
+
+
+ void JSONIOHandlerImpl::putJsonContents(
+ File filename,
+ bool unsetDirty // = true
+ )
+ {
+ VERIFY_ALWAYS( filename.valid( ),
+ "File has been overwritten/deleted before writing" );
+ auto it = m_jsonVals.find( filename );
+ if( it != m_jsonVals.end( ) )
+ {
+ auto fh = getFilehandle(
+ filename,
+ AccessType::CREATE
+ );
+ ( *it->second )["platform_byte_widths"] = platformSpecifics( );
+ *fh << *it->second << std::endl;
+ VERIFY( fh->good( ),
+ "Failed writing data to disk." )
+ m_jsonVals.erase( it );
+ if( unsetDirty )
+ {
+ m_dirty.erase( filename );
+ }
+ }
+
+ }
+
+
+ std::shared_ptr< JSONFilePosition >
+ JSONIOHandlerImpl::setAndGetFilePosition(
+ Writable * writable,
+ std::string extend
+ )
+ {
+ std::string path;
+ if( writable->abstractFilePosition )
+ {
+ // do NOT reuse the old pointer, we want to change the file position
+ // only for the writable!
+ path = filepositionOf( writable ) + "/" + extend;
+ }
+ else if( writable->parent )
+ {
+ path = filepositionOf( writable->parent ) + "/" + extend;
+ }
+ else
+ { // we are root
+ path = extend;
+ if( !auxiliary::starts_with(
+ path,
+ "/"
+ ) )
+ {
+ path = "/" + path;
+ }
+ }
+ auto
+ res =
+ std::make_shared< JSONFilePosition >( json::json_pointer( path ) );
+
+ writable->abstractFilePosition = res;
+
+ return res;
+ }
+
+
+ std::shared_ptr< JSONFilePosition >
+ JSONIOHandlerImpl::setAndGetFilePosition(
+ Writable * writable,
+ bool write
+ )
+ {
+ std::shared_ptr< AbstractFilePosition > res;
+
+
+ if( writable->abstractFilePosition )
+ {
+ res = writable->abstractFilePosition;
+ }
+ else if( writable->parent )
+ {
+ res =
+ writable->parent
+ ->abstractFilePosition;
+ }
+ else
+ { // we are root
+ res = std::make_shared< JSONFilePosition >( );
+ }
+ if( write )
+ {
+ writable->abstractFilePosition = res;
+ }
+ return std::dynamic_pointer_cast< JSONFilePosition >( res );
+ }
+
+
+ File JSONIOHandlerImpl::refreshFileFromParent( Writable * writable )
+ {
+ if( writable->parent )
+ {
+ auto
+ file =
+ m_files.find( writable->parent )
+ ->second;
+ associateWithFile(
+ writable,
+ file
+ );
+ return file;
+ }
+ else
+ {
+ return m_files.find( writable )
+ ->second;
+ }
+ }
+
+
+ void JSONIOHandlerImpl::associateWithFile(
+ Writable * writable,
+ File file
+ )
+ {
+ // make sure to overwrite
+ m_files[writable] = std::move( file );
+ }
+
+
+ bool JSONIOHandlerImpl::isDataset( nlohmann::json const & j )
+ {
+ if( !j.is_object( ) )
+ {
+ return false;
+ }
+ auto i = j.find( "data" );
+ return i != j.end( ) && i.value( ).is_array();
+ }
+
+
+ bool JSONIOHandlerImpl::isGroup( nlohmann::json::const_iterator it )
+ {
+ auto & j = it.value();
+ if( it.key() == "attributes" || it.key() == "platform_byte_widths" || !j.is_object( ) )
+ {
+ return false;
+ }
+ auto i = j.find( "data" );
+ return i == j.end( ) || !i.value( ).is_array();
+ }
+
+
+ template< typename Param >
+ void JSONIOHandlerImpl::verifyDataset(
+ Param const & parameters,
+ nlohmann::json & j
+ )
+ {
+ VERIFY_ALWAYS( isDataset(j),
+ "Specified dataset does not exist or is not a dataset." );
+
+ try
+ {
+ auto datasetExtent = getExtent( j["data"] );
+ VERIFY_ALWAYS( datasetExtent.size( ) ==
+ parameters.extent
+ .size( ),
+ "Read/Write request does not fit the dataset's dimension" );
+ for( unsigned int dimension = 0;
+ dimension <
+ parameters.extent
+ .size( );
+ dimension++ )
+ {
+ VERIFY_ALWAYS( parameters.offset[dimension] +
+ parameters.extent[dimension] <=
+ datasetExtent[dimension],
+ "Read/Write request exceeds the dataset's size" );
+ }
+ Datatype
+ dt = stringToDatatype( j["datatype"].get< std::string >( ) );
+ VERIFY_ALWAYS( dt == parameters.dtype,
+ "Read/Write request does not fit the dataset's type" );
+ } catch( json::basic_json::type_error & e )
+ {
+ throw std::runtime_error( "The given path does not contain a valid dataset." );
+ }
+ }
+
+
+ nlohmann::json JSONIOHandlerImpl::platformSpecifics( )
+ {
+ nlohmann::json res;
+ static Datatype datatypes[] = {
+ Datatype::CHAR,
+ Datatype::UCHAR,
+ Datatype::SHORT,
+ Datatype::INT,
+ Datatype::LONG,
+ Datatype::LONGLONG,
+ Datatype::USHORT,
+ Datatype::UINT,
+ Datatype::ULONG,
+ Datatype::ULONGLONG,
+ Datatype::FLOAT,
+ Datatype::DOUBLE,
+ Datatype::LONG_DOUBLE,
+ Datatype::BOOL
+ };
+ for( auto it = std::begin( datatypes );
+ it != std::end( datatypes );
+ it++ )
+ {
+ res[datatypeToString( *it )] = toBytes( *it );
+ }
+ return res;
+ }
+
+
+ template< typename T >
+ void JSONIOHandlerImpl::DatasetWriter::operator()(
+ nlohmann::json & json,
+ const Parameter< Operation::WRITE_DATASET > & parameters
+ )
+ {
+ CppToJSON< T > ctj;
+ syncMultidimensionalJson(
+ json["data"],
+ parameters.offset,
+ parameters.extent,
+ getMultiplicators( parameters.extent ),
+ [&ctj](
+ nlohmann::json & j,
+ T const & data
+ )
+ {
+ j = ctj( data );
+ },
+ static_cast(parameters.data
+ .get( ))
+ );
+ }
+
+
+ template< int n >
+ void JSONIOHandlerImpl::DatasetWriter::operator()(
+ nlohmann::json &,
+ const Parameter< Operation::WRITE_DATASET > &
+ )
+ {
+ throw std::runtime_error( "Unknown datatype given for writing." );
+ }
+
+
+ template< typename T >
+ void JSONIOHandlerImpl::DatasetReader::operator()(
+ nlohmann::json & json,
+ Parameter< Operation::READ_DATASET > & parameters
+ )
+ {
+ JsonToCpp<
+ T
+ > jtc;
+ syncMultidimensionalJson(
+ json,
+ parameters.offset,
+ parameters.extent,
+ getMultiplicators( parameters.extent ),
+ [&jtc](
+ nlohmann::json & j,
+ T & data
+ )
+ {
+ data = jtc( j );
+ },
+ static_cast(parameters.data
+ .get( ))
+ );
+ }
+
+
+ template< int n >
+ void JSONIOHandlerImpl::DatasetReader::operator()(
+ nlohmann::json &,
+ Parameter< Operation::READ_DATASET > &
+ )
+ {
+ throw std::runtime_error( "Unknown datatype while reading a dataset." );
+ }
+
+
+ template< typename T >
+ void JSONIOHandlerImpl::AttributeWriter::operator()(
+ nlohmann::json & value,
+ Attribute::resource const & resource
+ )
+ {
+ CppToJSON< T > ctj;
+ value = ctj( variantSrc::get< T >( resource ) );
+ }
+
+
+ template< int n >
+ void JSONIOHandlerImpl::AttributeWriter::operator()(
+ nlohmann::json &,
+ Attribute::resource const &
+ )
+ {
+ throw std::runtime_error( "Unknown datatype in attribute writing." );
+ }
+
+
+ template< int n >
+ void JSONIOHandlerImpl::AttributeReader::operator()(
+ nlohmann::json &,
+ Parameter< Operation::READ_ATT > &
+ )
+ {
+ throw std::runtime_error( "Unknown datatype while reading attribute." );
+ }
+
+
+ template< typename T >
+ void JSONIOHandlerImpl::AttributeReader::operator()(
+ nlohmann::json & json,
+ Parameter< Operation::READ_ATT > & parameters
+ )
+ {
+ JsonToCpp<
+ T
+ > jtc;
+ *parameters.resource = jtc(
+ json
+ );
+ }
+
+
+ template< typename T >
+ nlohmann::json
+ JSONIOHandlerImpl::CppToJSON< T >::operator()( const T & val )
+ {
+ return nlohmann::json( val );
+ }
+
+
+ template< typename T >
+ nlohmann::json
+ JSONIOHandlerImpl::CppToJSON< std::vector< T > >::operator()( const std::vector< T > & v )
+ {
+ nlohmann::json j;
+ CppToJSON< T > ctj;
+ for( auto a: v )
+ {
+ j.push_back( ctj( a ) );
+ }
+ return j;
+ }
+
+
+ template< typename T, int n >
+ nlohmann::json JSONIOHandlerImpl::CppToJSON<
+ std::array<
+ T,
+ n
+ >
+ >::operator()(
+ const std::array<
+ T,
+ n
+ > & v
+ )
+ {
+ nlohmann::json j;
+ CppToJSON< T > ctj;
+ for( auto a: v )
+ {
+ j.push_back( ctj( a ) );
+ }
+ return j;
+ }
+
+
+ template<
+ typename T,
+ typename Dummy
+ >
+ T JSONIOHandlerImpl::JsonToCpp<
+ T,
+ Dummy
+ >::operator()( nlohmann::json const & json )
+ { return json.get< T >( ); }
+
+
+ template< typename T >
+ std::vector< T >
+ JSONIOHandlerImpl::JsonToCpp< std::vector< T > >::operator()( nlohmann::json const & json )
+ {
+ std::vector< T > v;
+ JsonToCpp< T > jtp;
+ for( auto & j: json )
+ {
+ v.push_back( jtp( j ) );
+ }
+ return v;
+ }
+
+
+ template< typename T, int n >
+ std::array<
+ T,
+ n
+ > JSONIOHandlerImpl::JsonToCpp<
+ std::array<
+ T,
+ n
+ >
+ >::operator()( nlohmann::json const & json )
+ {
+ std::array<
+ T,
+ n
+ > a;
+ JsonToCpp< T > jtp;
+ size_t i = 0;
+ for( auto & j: json )
+ {
+ a[i] = jtp( j );
+ i++;
+ }
+ return a;
+ }
+
+
+ template<
+ typename T
+ >
+ T JSONIOHandlerImpl::JsonToCpp<
+ T,
+ typename std::enable_if<
+ std::is_floating_point<
+ T
+ >::value
+ >::type
+ >::operator()( nlohmann::json const & j )
+ {
+ try
+ {
+ return j.get< T >( );
+ } catch( ... )
+ {
+ return std::numeric_limits< T >::quiet_NaN( );
+ }
+ }
+
+
+#endif
+
+
+} // openPMD
diff --git a/src/Series.cpp b/src/Series.cpp
index b84a2560cf..161a5508a8 100644
--- a/src/Series.cpp
+++ b/src/Series.cpp
@@ -862,6 +862,8 @@ determineFormat(std::string const& filename)
return Format::HDF5;
if( auxiliary::ends_with(filename, ".bp") )
return Format::ADIOS1;
+ if( auxiliary::ends_with(filename, ".json") )
+ return Format::JSON;
if( std::string::npos != filename.find('.') /* extension is provided */ )
throw std::runtime_error("Unknown file format. Did you append a valid filename extension?");
@@ -879,6 +881,8 @@ suffix(Format f)
case Format::ADIOS1:
case Format::ADIOS2:
return ".bp";
+ case Format::JSON:
+ return ".json";
default:
return "";
}
@@ -892,6 +896,7 @@ cleanFilename(std::string const& filename, Format f)
case Format::HDF5:
case Format::ADIOS1:
case Format::ADIOS2:
+ case Format::JSON:
return auxiliary::replace_last(filename, suffix(f), "");
default:
return filename;
@@ -952,6 +957,16 @@ matcher(std::string const& prefix, int padding, std::string const& postfix, Form
nameReg += + ")" + postfix + ".bp$";
return buildMatcher(nameReg);
}
+ case Format::JSON:
+ {
+ std::string nameReg = "^" + prefix + "([[:digit:]]";
+ if( padding != 0 )
+ nameReg += "{" + std::to_string(padding) + "}";
+ else
+ nameReg += "+";
+ nameReg += + ")" + postfix + ".json$";
+ return buildMatcher(nameReg);
+ }
default:
return [](std::string const&) -> std::tuple< bool, int > { return std::tuple< bool, int >{false, 0}; };
}
diff --git a/src/auxiliary/Filesystem.cpp b/src/auxiliary/Filesystem.cpp
index 39c5500293..20405be98c 100644
--- a/src/auxiliary/Filesystem.cpp
+++ b/src/auxiliary/Filesystem.cpp
@@ -160,6 +160,7 @@ remove_file( std::string const& path )
#else
return (0 == remove(path.c_str()));
#endif
-}
+}
+
} // auxiliary
} // openPMD
diff --git a/test/SerialIOTest.cpp b/test/SerialIOTest.cpp
index fe2c98fba2..64e73dac15 100644
--- a/test/SerialIOTest.cpp
+++ b/test/SerialIOTest.cpp
@@ -19,6 +19,23 @@
using namespace openPMD;
+std::vector> getBackends() {
+ // first component: backend file ending
+ // second component: whether to test 128 bit values
+ std::vector> res;
+#if openPMD_HAVE_ADIOS1
+ res.emplace_back("bp", true);
+#endif
+#if openPMD_HAVE_HDF5
+ res.emplace_back("h5", true);
+#endif
+#if openPMD_HAVE_JSON
+ res.emplace_back("json", false);
+#endif
+ return res;
+}
+
+auto const backends = getBackends();
inline
void constant_scalar(std::string file_ending)
@@ -110,6 +127,15 @@ void constant_scalar(std::string file_ending)
}
}
+TEST_CASE( "constant_scalar", "[serial]" )
+{
+ for (auto const & t: backends)
+ {
+ constant_scalar(std::get<0>(t));
+ }
+}
+
+
inline
void particle_patches( std::string file_ending )
{
@@ -192,1932 +218,1721 @@ void particle_patches( std::string file_ending )
}
}
-#if openPMD_HAVE_HDF5
-TEST_CASE( "git_hdf5_sample_structure_test", "[serial][hdf5]" )
+TEST_CASE( "particle_patches", "[serial]" )
{
-#if openPMD_USE_INVASIVE_TESTS
- try
+ for (auto const & t: backends)
{
- Series o = Series("../samples/git-sample/data%T.h5", AccessType::READ_ONLY);
+ particle_patches(std::get<0>(t));
+ }
+}
- REQUIRE(!o.parent);
- REQUIRE(o.iterations.parent == getWritable(&o));
- REQUIRE_THROWS_AS(o.iterations[42], std::out_of_range);
- REQUIRE(o.iterations[100].parent == getWritable(&o.iterations));
- REQUIRE(o.iterations[100].meshes.parent == getWritable(&o.iterations[100]));
- REQUIRE(o.iterations[100].meshes["E"].parent == getWritable(&o.iterations[100].meshes));
- REQUIRE(o.iterations[100].meshes["E"]["x"].parent == getWritable(&o.iterations[100].meshes["E"]));
- REQUIRE(o.iterations[100].meshes["E"]["y"].parent == getWritable(&o.iterations[100].meshes["E"]));
- REQUIRE(o.iterations[100].meshes["E"]["z"].parent == getWritable(&o.iterations[100].meshes["E"]));
- REQUIRE(o.iterations[100].meshes["rho"].parent == getWritable(&o.iterations[100].meshes));
- REQUIRE(o.iterations[100].meshes["rho"][MeshRecordComponent::SCALAR].parent == getWritable(&o.iterations[100].meshes));
- REQUIRE_THROWS_AS(o.iterations[100].meshes["cherries"], std::out_of_range);
- REQUIRE(o.iterations[100].particles.parent == getWritable(&o.iterations[100]));
- REQUIRE(o.iterations[100].particles["electrons"].parent == getWritable(&o.iterations[100].particles));
- REQUIRE(o.iterations[100].particles["electrons"]["charge"].parent == getWritable(&o.iterations[100].particles["electrons"]));
- REQUIRE(o.iterations[100].particles["electrons"]["charge"][RecordComponent::SCALAR].parent == getWritable(&o.iterations[100].particles["electrons"]));
- REQUIRE(o.iterations[100].particles["electrons"]["mass"].parent == getWritable(&o.iterations[100].particles["electrons"]));
- REQUIRE(o.iterations[100].particles["electrons"]["mass"][RecordComponent::SCALAR].parent == getWritable(&o.iterations[100].particles["electrons"]));
- REQUIRE(o.iterations[100].particles["electrons"]["momentum"].parent == getWritable(&o.iterations[100].particles["electrons"]));
- REQUIRE(o.iterations[100].particles["electrons"]["momentum"]["x"].parent == getWritable(&o.iterations[100].particles["electrons"]["momentum"]));
- REQUIRE(o.iterations[100].particles["electrons"]["momentum"]["y"].parent == getWritable(&o.iterations[100].particles["electrons"]["momentum"]));
- REQUIRE(o.iterations[100].particles["electrons"]["momentum"]["z"].parent == getWritable(&o.iterations[100].particles["electrons"]["momentum"]));
- REQUIRE(o.iterations[100].particles["electrons"]["position"].parent == getWritable(&o.iterations[100].particles["electrons"]));
- REQUIRE(o.iterations[100].particles["electrons"]["position"]["x"].parent == getWritable(&o.iterations[100].particles["electrons"]["position"]));
- REQUIRE(o.iterations[100].particles["electrons"]["position"]["y"].parent == getWritable(&o.iterations[100].particles["electrons"]["position"]));
- REQUIRE(o.iterations[100].particles["electrons"]["position"]["z"].parent == getWritable(&o.iterations[100].particles["electrons"]["position"]));
- REQUIRE(o.iterations[100].particles["electrons"]["positionOffset"].parent == getWritable(&o.iterations[100].particles["electrons"]));
- REQUIRE(o.iterations[100].particles["electrons"]["positionOffset"]["x"].parent == getWritable(&o.iterations[100].particles["electrons"]["positionOffset"]));
- REQUIRE(o.iterations[100].particles["electrons"]["positionOffset"]["y"].parent == getWritable(&o.iterations[100].particles["electrons"]["positionOffset"]));
- REQUIRE(o.iterations[100].particles["electrons"]["positionOffset"]["z"].parent == getWritable(&o.iterations[100].particles["electrons"]["positionOffset"]));
- REQUIRE(o.iterations[100].particles["electrons"]["weighting"].parent == getWritable(&o.iterations[100].particles["electrons"]));
- REQUIRE(o.iterations[100].particles["electrons"]["weighting"][RecordComponent::SCALAR].parent == getWritable(&o.iterations[100].particles["electrons"]));
- REQUIRE_THROWS_AS(o.iterations[100].particles["electrons"]["numberOfLegs"], std::out_of_range);
- REQUIRE_THROWS_AS(o.iterations[100].particles["apples"], std::out_of_range);
+inline
+void dtype_test(const std::string & backend, bool test_128_bit = true)
+{
+ bool test_long_double = test_128_bit || sizeof (long double) <= 8;
+ bool test_long_long = test_128_bit || sizeof (long long) <= 8;
+ {
+ Series s = Series("../samples/dtype_test." + backend, AccessType::CREATE);
+ char c = 'c';
+ s.setAttribute("char", c);
+ unsigned char uc = 'u';
+ s.setAttribute("uchar", uc);
+ int16_t i16 = 16;
+ s.setAttribute("int16", i16);
int32_t i32 = 32;
- REQUIRE_THROWS(o.setAttribute("setAttributeFail", i32));
- } catch (no_such_file_error& e)
+ s.setAttribute("int32", i32);
+ int64_t i64 = 64;
+ s.setAttribute("int64", i64);
+ uint16_t u16 = 16u;
+ s.setAttribute("uint16", u16);
+ uint32_t u32 = 32u;
+ s.setAttribute("uint32", u32);
+ uint64_t u64 = 64u;
+ s.setAttribute("uint64", u64);
+ float f = 16.e10f;
+ s.setAttribute("float", f);
+ double d = 1.e64;
+ s.setAttribute("double", d);
+ if (test_long_double)
+ {
+ long double ld = 1.e80L;
+ s.setAttribute("longdouble", ld);
+ }
+ std::string str = "string";
+ s.setAttribute("string", str);
+ s.setAttribute("vecChar", std::vector< char >({'c', 'h', 'a', 'r'}));
+ s.setAttribute("vecInt16", std::vector< int16_t >({32766, 32767}));
+ s.setAttribute("vecInt32", std::vector< int32_t >({2147483646, 2147483647}));
+ s.setAttribute("vecInt64", std::vector< int64_t >({9223372036854775806, 9223372036854775807}));
+ s.setAttribute("vecUchar", std::vector< char >({'u', 'c', 'h', 'a', 'r'}));
+ s.setAttribute("vecUint16", std::vector< uint16_t >({65534u, 65535u}));
+ s.setAttribute("vecUint32", std::vector< uint32_t >({4294967294u, 4294967295u}));
+ s.setAttribute("vecUint64", std::vector< uint64_t >({18446744073709551614u, 18446744073709551615u}));
+ s.setAttribute("vecFloat", std::vector< float >({0.f, 3.40282e+38f}));
+ s.setAttribute("vecDouble", std::vector< double >({0., 1.79769e+308}));
+ if (test_long_double)
+ {
+ s.setAttribute("vecLongdouble", std::vector< long double >({0.L, std::numeric_limits::max()}));
+ }
+ s.setAttribute("vecString", std::vector< std::string >({"vector", "of", "strings"}));
+ s.setAttribute("bool", true);
+ s.setAttribute("boolF", false);
+
+ // non-fixed size integer types
+ short ss = 16;
+ s.setAttribute("short", ss);
+ int si = 32;
+ s.setAttribute("int", si);
+ long sl = 64;
+ s.setAttribute("long", sl);
+ if (test_long_long)
+ {
+ long long sll = 128;
+ s.setAttribute("longlong", sll);
+ }
+ unsigned short us = 16u;
+ s.setAttribute("ushort", us);
+ unsigned int ui = 32u;
+ s.setAttribute("uint", ui);
+ unsigned long ul = 64u;
+ s.setAttribute("ulong", ul);
+ if (test_long_long)
+ {
+ unsigned long long ull = 128u;
+ s.setAttribute("ulonglong", ull);
+ }
+ s.setAttribute("vecShort", std::vector< short >({32766, 32767}));
+ s.setAttribute("vecInt", std::vector< int >({32766, 32767}));
+ s.setAttribute("vecLong", std::vector< long >({2147483646, 2147483647}));
+ if (test_long_long)
+ {
+ s.setAttribute("vecLongLong", std::vector< long long >({2147483644, 2147483643}));
+ }
+ s.setAttribute("vecUShort", std::vector< unsigned short >({65534u, 65535u}));
+ s.setAttribute("vecUInt", std::vector< unsigned int >({65533u, 65531u}));
+ s.setAttribute("vecULong", std::vector< unsigned long >({65532u, 65530u}));
+ if (test_long_long)
+ {
+ s.setAttribute("vecULongLong", std::vector< unsigned long long >({65531u, 65529u}));
+ }
+ }
+
+ Series s = Series("../samples/dtype_test." + backend, AccessType::READ_ONLY);
+
+ REQUIRE(s.getAttribute("char").get< char >() == 'c');
+ REQUIRE(s.getAttribute("uchar").get< unsigned char >() == 'u');
+ REQUIRE(s.getAttribute("int16").get< int16_t >() == 16);
+ REQUIRE(s.getAttribute("int32").get< int32_t >() == 32);
+ REQUIRE(s.getAttribute("int64").get< int64_t >() == 64);
+ REQUIRE(s.getAttribute("uint16").get< uint16_t >() == 16u);
+ REQUIRE(s.getAttribute("uint32").get< uint32_t >() == 32u);
+ REQUIRE(s.getAttribute("uint64").get< uint64_t >() == 64u);
+ REQUIRE(s.getAttribute("float").get< float >() == 16.e10f);
+ REQUIRE(s.getAttribute("double").get< double >() == 1.e64);
+ if (test_long_double)
{
- std::cerr << "git sample not accessible. (" << e.what() << ")\n";
- return;
+ REQUIRE(s.getAttribute("longdouble").get< long double >() == 1.e80L);
+ }
+ REQUIRE(s.getAttribute("string").get< std::string >() == "string");
+ REQUIRE(s.getAttribute("vecChar").get< std::vector< char > >() == std::vector< char >({'c', 'h', 'a', 'r'}));
+ REQUIRE(s.getAttribute("vecInt16").get< std::vector< int16_t > >() == std::vector< int16_t >({32766, 32767}));
+ REQUIRE(s.getAttribute("vecInt32").get< std::vector< int32_t > >() == std::vector< int32_t >({2147483646, 2147483647}));
+ REQUIRE(s.getAttribute("vecInt64").get< std::vector< int64_t > >() == std::vector< int64_t >({9223372036854775806, 9223372036854775807}));
+ REQUIRE(s.getAttribute("vecUchar").get< std::vector< char > >() == std::vector< char >({'u', 'c', 'h', 'a', 'r'}));
+ REQUIRE(s.getAttribute("vecUint16").get< std::vector< uint16_t > >() == std::vector< uint16_t >({65534u, 65535u}));
+ REQUIRE(s.getAttribute("vecUint32").get< std::vector< uint32_t > >() == std::vector< uint32_t >({4294967294u, 4294967295u}));
+ REQUIRE(s.getAttribute("vecUint64").get< std::vector< uint64_t > >() == std::vector< uint64_t >({18446744073709551614u, 18446744073709551615u}));
+ REQUIRE(s.getAttribute("vecFloat").get< std::vector< float > >() == std::vector< float >({0.f, 3.40282e+38f}));
+ REQUIRE(s.getAttribute("vecDouble").get< std::vector< double > >() == std::vector< double >({0., 1.79769e+308}));
+ if (test_long_double)
+ {
+ REQUIRE(s.getAttribute("vecLongdouble").get< std::vector< long double > >() == std::vector< long double >({0.L, std::numeric_limits::max()}));
+ }
+ REQUIRE(s.getAttribute("vecString").get< std::vector< std::string > >() == std::vector< std::string >({"vector", "of", "strings"}));
+ REQUIRE(s.getAttribute("bool").get< bool >() == true);
+ REQUIRE(s.getAttribute("boolF").get< bool >() == false);
+
+ // same implementation types (not necessary aliases) detection
+#if !defined(_MSC_VER)
+ REQUIRE(s.getAttribute("short").dtype == Datatype::SHORT);
+ REQUIRE(s.getAttribute("int").dtype == Datatype::INT);
+ REQUIRE(s.getAttribute("long").dtype == Datatype::LONG);
+ REQUIRE(s.getAttribute("longlong").dtype == Datatype::LONGLONG);
+ REQUIRE(s.getAttribute("ushort").dtype == Datatype::USHORT);
+ REQUIRE(s.getAttribute("uint").dtype == Datatype::UINT);
+ REQUIRE(s.getAttribute("ulong").dtype == Datatype::ULONG);
+ if (test_long_long)
+ {
+ REQUIRE(s.getAttribute("ulonglong").dtype == Datatype::ULONGLONG);
+ }
+
+ REQUIRE(s.getAttribute("vecShort").dtype == Datatype::VEC_SHORT);
+ REQUIRE(s.getAttribute("vecInt").dtype == Datatype::VEC_INT);
+ REQUIRE(s.getAttribute("vecLong").dtype == Datatype::VEC_LONG);
+ REQUIRE(s.getAttribute("vecLongLong").dtype == Datatype::VEC_LONGLONG);
+ REQUIRE(s.getAttribute("vecUShort").dtype == Datatype::VEC_USHORT);
+ REQUIRE(s.getAttribute("vecUInt").dtype == Datatype::VEC_UINT);
+ REQUIRE(s.getAttribute("vecULong").dtype == Datatype::VEC_ULONG);
+ if (test_long_long)
+ {
+ REQUIRE(s.getAttribute("vecULongLong").dtype == Datatype::VEC_ULONGLONG);
}
-#else
- std::cerr << "Invasive tests not enabled. Hierarchy is not visible.\n";
#endif
+ REQUIRE(isSame(s.getAttribute("short").dtype, Datatype::SHORT));
+ REQUIRE(isSame(s.getAttribute("int").dtype, Datatype::INT));
+ REQUIRE(isSame(s.getAttribute("long").dtype, Datatype::LONG));
+ if (test_long_long)
+ {
+ REQUIRE(isSame(s.getAttribute("longlong").dtype, Datatype::LONGLONG));
+ }
+ REQUIRE(isSame(s.getAttribute("ushort").dtype, Datatype::USHORT));
+ REQUIRE(isSame(s.getAttribute("uint").dtype, Datatype::UINT));
+ REQUIRE(isSame(s.getAttribute("ulong").dtype, Datatype::ULONG));
+ if (test_long_long)
+ {
+ REQUIRE(isSame(s.getAttribute("ulonglong").dtype, Datatype::ULONGLONG));
+ }
+
+ REQUIRE(isSame(s.getAttribute("vecShort").dtype, Datatype::VEC_SHORT));
+ REQUIRE(isSame(s.getAttribute("vecInt").dtype, Datatype::VEC_INT));
+ REQUIRE(isSame(s.getAttribute("vecLong").dtype, Datatype::VEC_LONG));
+ if (test_long_long)
+ {
+ REQUIRE(isSame(s.getAttribute("vecLongLong").dtype, Datatype::VEC_LONGLONG));
+ }
+ REQUIRE(isSame(s.getAttribute("vecUShort").dtype, Datatype::VEC_USHORT));
+ REQUIRE(isSame(s.getAttribute("vecUInt").dtype, Datatype::VEC_UINT));
+ REQUIRE(isSame(s.getAttribute("vecULong").dtype, Datatype::VEC_ULONG));
+ if (test_long_long)
+ {
+ REQUIRE(isSame(s.getAttribute("vecULongLong").dtype, Datatype::VEC_ULONGLONG));
+ }
}
-TEST_CASE( "git_hdf5_sample_attribute_test", "[serial][hdf5]" )
+TEST_CASE( "dtype_test", "[serial]" )
{
- try
+ std::string backend;
+ bool test_128_bit;
+ for (auto const & t: backends)
{
- Series o = Series("../samples/git-sample/data%T.h5", AccessType::READ_ONLY);
-
- REQUIRE(o.openPMD() == "1.1.0");
- REQUIRE(o.openPMDextension() == 1);
- REQUIRE(o.basePath() == "/data/%T/");
- REQUIRE(o.meshesPath() == "fields/");
- REQUIRE(o.particlesPath() == "particles/");
- REQUIRE(o.iterationEncoding() == IterationEncoding::fileBased);
- REQUIRE(o.iterationFormat() == "data%T.h5");
- REQUIRE(o.name() == "data%T");
+ std::tie(backend, test_128_bit) = t;
+ dtype_test(backend, test_128_bit);
+ }
+}
- REQUIRE(o.iterations.size() == 5);
- REQUIRE(o.iterations.count(100) == 1);
+inline
+void write_test(const std::string & backend)
+{
+ Series o = Series("../samples/serial_write." + backend, AccessType::CREATE);
- Iteration& iteration_100 = o.iterations[100];
- REQUIRE(iteration_100.time< double >() == 3.2847121452090077e-14);
- REQUIRE(iteration_100.dt< double >() == 3.2847121452090093e-16);
- REQUIRE(iteration_100.timeUnitSI() == 1.0);
+ ParticleSpecies& e_1 = o.iterations[1].particles["e"];
- REQUIRE(iteration_100.meshes.size() == 2);
- REQUIRE(iteration_100.meshes.count("E") == 1);
- REQUIRE(iteration_100.meshes.count("rho") == 1);
+ std::vector< double > position_global(4);
+ double pos{0.};
+ std::generate(position_global.begin(), position_global.end(), [&pos]{ return pos++; });
+ std::shared_ptr< double > position_local_1(new double);
+ e_1["position"]["x"].resetDataset(Dataset(determineDatatype(position_local_1), {4}));
- std::vector< std::string > al{"x", "y", "z"};
- std::vector< double > gs{8.0000000000000007e-07,
- 8.0000000000000007e-07,
- 1.0000000000000001e-07};
- std::vector< double > ggo{-1.0000000000000001e-05,
- -1.0000000000000001e-05,
- -5.1999999999999993e-06};
- std::array< double, 7 > ud{{1., 1., -3., -1., 0., 0., 0.}};
- Mesh& E = iteration_100.meshes["E"];
- REQUIRE(E.geometry() == Mesh::Geometry::cartesian);
- REQUIRE(E.dataOrder() == Mesh::DataOrder::C);
- REQUIRE(E.axisLabels() == al);
- REQUIRE(E.gridSpacing< double >() == gs);
- REQUIRE(E.gridGlobalOffset() == ggo);
- REQUIRE(E.gridUnitSI() == 1.0);
- REQUIRE(E.unitDimension() == ud);
- REQUIRE(E.timeOffset< double >() == 0.0);
+ for( uint64_t i = 0; i < 4; ++i )
+ {
+ *position_local_1 = position_global[i];
+ e_1["position"]["x"].storeChunk(position_local_1, {i}, {1});
+ }
- REQUIRE(E.size() == 3);
- REQUIRE(E.count("x") == 1);
- REQUIRE(E.count("y") == 1);
- REQUIRE(E.count("z") == 1);
+ std::vector< uint64_t > positionOffset_global(4);
+ uint64_t posOff{0};
+ std::generate(positionOffset_global.begin(), positionOffset_global.end(), [&posOff]{ return posOff++; });
+ std::shared_ptr< uint64_t > positionOffset_local_1(new uint64_t);
+ e_1["positionOffset"]["x"].resetDataset(Dataset(determineDatatype(positionOffset_local_1), {4}));
- std::vector< double > p{0.5, 0., 0.};
- Extent e{26, 26, 201};
- MeshRecordComponent& E_x = E["x"];
- REQUIRE(E_x.unitSI() == 1.0);
- REQUIRE(E_x.position< double >() == p);
- REQUIRE(E_x.getDatatype() == Datatype::DOUBLE);
- REQUIRE(E_x.getExtent() == e);
- REQUIRE(E_x.getDimensionality() == 3);
+ for( uint64_t i = 0; i < 4; ++i )
+ {
+ *positionOffset_local_1 = positionOffset_global[i];
+ e_1["positionOffset"]["x"].storeChunk(positionOffset_local_1, {i}, {1});
+ }
- p = {0., 0.5, 0.};
- MeshRecordComponent& E_y = E["y"];
- REQUIRE(E_y.unitSI() == 1.0);
- REQUIRE(E_y.position< double >() == p);
- REQUIRE(E_y.getDatatype() == Datatype::DOUBLE);
- REQUIRE(E_y.getExtent() == e);
- REQUIRE(E_y.getDimensionality() == 3);
+ ParticleSpecies& e_2 = o.iterations[2].particles["e"];
- p = {0., 0., 0.5};
- MeshRecordComponent& E_z = E["z"];
- REQUIRE(E_z.unitSI() == 1.0);
- REQUIRE(E_z.position< double >() == p);
- REQUIRE(E_z.getDatatype() == Datatype::DOUBLE);
- REQUIRE(E_z.getExtent() == e);
- REQUIRE(E_z.getDimensionality() == 3);
+ std::generate(position_global.begin(), position_global.end(), [&pos]{ return pos++; });
+ std::shared_ptr< double > position_local_2(new double);
+ e_2["position"]["x"].resetDataset(Dataset(determineDatatype(position_local_2), {4}));
- gs = {8.0000000000000007e-07,
- 8.0000000000000007e-07,
- 1.0000000000000001e-07};
- ggo = {-1.0000000000000001e-05,
- -1.0000000000000001e-05,
- -5.1999999999999993e-06};
- ud = {{-3., 0., 1., 1., 0., 0., 0.}};
- Mesh& rho = iteration_100.meshes["rho"];
- REQUIRE(rho.geometry() == Mesh::Geometry::cartesian);
- REQUIRE(rho.dataOrder() == Mesh::DataOrder::C);
- REQUIRE(rho.axisLabels() == al);
- REQUIRE(rho.gridSpacing< double >() == gs);
- REQUIRE(rho.gridGlobalOffset() == ggo);
- REQUIRE(rho.gridUnitSI() == 1.0);
- REQUIRE(rho.unitDimension() == ud);
- REQUIRE(rho.timeOffset< double >() == 0.0);
+ for( uint64_t i = 0; i < 4; ++i )
+ {
+ *position_local_2 = position_global[i];
+ e_2["position"]["x"].storeChunk(position_local_2, {i}, {1});
+ }
- REQUIRE(rho.size() == 1);
- REQUIRE(rho.count(MeshRecordComponent::SCALAR) == 1);
+ std::generate(positionOffset_global.begin(), positionOffset_global.end(), [&posOff]{ return posOff++; });
+ std::shared_ptr< uint64_t > positionOffset_local_2(new uint64_t);
+ e_2["positionOffset"]["x"].resetDataset(Dataset(determineDatatype(positionOffset_local_2), {4}));
- p = {0., 0., 0.};
- e = {26, 26, 201};
- MeshRecordComponent& rho_scalar = rho[MeshRecordComponent::SCALAR];
- REQUIRE(rho_scalar.unitSI() == 1.0);
- REQUIRE(rho_scalar.position< double >() == p);
- REQUIRE(rho_scalar.getDatatype() == Datatype::DOUBLE);
- REQUIRE(rho_scalar.getExtent() == e);
- REQUIRE(rho_scalar.getDimensionality() == 3);
+ for( uint64_t i = 0; i < 4; ++i )
+ {
+ *positionOffset_local_2 = positionOffset_global[i];
+ e_2["positionOffset"]["x"].storeChunk(positionOffset_local_2, {i}, {1});
+ }
- REQUIRE(iteration_100.particles.size() == 1);
- REQUIRE(iteration_100.particles.count("electrons") == 1);
+ o.flush();
- ParticleSpecies& electrons = iteration_100.particles["electrons"];
+ ParticleSpecies& e_3 = o.iterations[3].particles["e"];
- REQUIRE(electrons.size() == 6);
- REQUIRE(electrons.count("charge") == 1);
- REQUIRE(electrons.count("mass") == 1);
- REQUIRE(electrons.count("momentum") == 1);
- REQUIRE(electrons.count("position") == 1);
- REQUIRE(electrons.count("positionOffset") == 1);
- REQUIRE(electrons.count("weighting") == 1);
+ std::generate(position_global.begin(), position_global.end(), [&pos]{ return pos++; });
+ std::shared_ptr< double > position_local_3(new double);
+ e_3["position"]["x"].resetDataset(Dataset(determineDatatype(position_local_3), {4}));
- ud = {{0., 0., 1., 1., 0., 0., 0.}};
- Record& charge = electrons["charge"];
- REQUIRE(charge.unitDimension() == ud);
- REQUIRE(charge.timeOffset< double >() == 0.0);
+ for( uint64_t i = 0; i < 4; ++i )
+ {
+ *position_local_3 = position_global[i];
+ e_3["position"]["x"].storeChunk(position_local_3, {i}, {1});
+ }
- REQUIRE(charge.size() == 1);
- REQUIRE(charge.count(RecordComponent::SCALAR) == 1);
+ std::generate(positionOffset_global.begin(), positionOffset_global.end(), [&posOff]{ return posOff++; });
+ std::shared_ptr< uint64_t > positionOffset_local_3(new uint64_t);
+ e_3["positionOffset"]["x"].resetDataset(Dataset(determineDatatype(positionOffset_local_3), {4}));
- e = {85625};
- RecordComponent& charge_scalar = charge[RecordComponent::SCALAR];
- REQUIRE(charge_scalar.unitSI() == 1.0);
- REQUIRE(charge_scalar.getDatatype() == Datatype::DOUBLE);
- REQUIRE(charge_scalar.getDimensionality() == 1);
- REQUIRE(charge_scalar.getExtent() == e);
+ for( uint64_t i = 0; i < 4; ++i )
+ {
+ *positionOffset_local_3 = positionOffset_global[i];
+ e_3["positionOffset"]["x"].storeChunk(positionOffset_local_3, {i}, {1});
+ }
- ud = {{1., 0., 0., 0., 0., 0., 0.}};
- Record& mass = electrons["mass"];
- REQUIRE(mass.unitDimension() == ud);
- REQUIRE(mass.timeOffset< double >() == 0.0);
+ o.flush();
+}
- REQUIRE(mass.size() == 1);
- REQUIRE(mass.count(RecordComponent::SCALAR) == 1);
+TEST_CASE( "write_test", "[serial]" )
+{
+ for (auto const & t: backends)
+ {
+ write_test(std::get<0>(t));
+ }
+}
- RecordComponent& mass_scalar = mass[RecordComponent::SCALAR];
- REQUIRE(mass_scalar.unitSI() == 1.0);
- REQUIRE(mass_scalar.getDatatype() == Datatype::DOUBLE);
- REQUIRE(mass_scalar.getDimensionality() == 1);
- REQUIRE(mass_scalar.getExtent() == e);
+inline
+void fileBased_write_empty_test(const std::string & backend)
+{
+ if( auxiliary::directory_exists("../samples/subdir") )
+ auxiliary::remove_directory("../samples/subdir");
- ud = {{1., 1., -1., 0., 0., 0., 0.}};
- Record& momentum = electrons["momentum"];
- REQUIRE(momentum.unitDimension() == ud);
- REQUIRE(momentum.timeOffset< double >() == 0.0);
+ Dataset dset = Dataset(Datatype::DOUBLE, {2});
+ {
+ Series o = Series("../samples/subdir/serial_fileBased_write%T." + backend, AccessType::CREATE);
- REQUIRE(momentum.size() == 3);
- REQUIRE(momentum.count("x") == 1);
- REQUIRE(momentum.count("y") == 1);
- REQUIRE(momentum.count("z") == 1);
+ ParticleSpecies& e_1 = o.iterations[1].particles["e"];
+ e_1["position"][RecordComponent::SCALAR].resetDataset(dset);
+ e_1["positionOffset"][RecordComponent::SCALAR].resetDataset(dset);
+ o.iterations[1].setTime(1.f);
+ ParticleSpecies& e_2 = o.iterations[2].particles["e"];
+ e_2["position"][RecordComponent::SCALAR].resetDataset(dset);
+ e_2["positionOffset"][RecordComponent::SCALAR].resetDataset(dset);
+ o.iterations[2].setTime(2.f);
+ ParticleSpecies& e_3 = o.iterations[3].particles["e"];
+ e_3["position"][RecordComponent::SCALAR].resetDataset(dset);
+ e_3["positionOffset"][RecordComponent::SCALAR].resetDataset(dset);
+ o.iterations[3].setTime(3.f);
+ }
- RecordComponent& momentum_x = momentum["x"];
- REQUIRE(momentum_x.unitSI() == 1.0);
- REQUIRE(momentum_x.getDatatype() == Datatype::DOUBLE);
- REQUIRE(momentum_x.getDimensionality() == 1);
- REQUIRE(momentum_x.getExtent() == e);
+ REQUIRE(auxiliary::directory_exists("../samples/subdir"));
+ REQUIRE(auxiliary::file_exists("../samples/subdir/serial_fileBased_write1." + backend));
+ REQUIRE(auxiliary::file_exists("../samples/subdir/serial_fileBased_write2." + backend));
+ REQUIRE(auxiliary::file_exists("../samples/subdir/serial_fileBased_write3." + backend));
- RecordComponent& momentum_y = momentum["y"];
- REQUIRE(momentum_y.unitSI() == 1.0);
- REQUIRE(momentum_y.getDatatype() == Datatype::DOUBLE);
- REQUIRE(momentum_y.getDimensionality() == 1);
- REQUIRE(momentum_y.getExtent() == e);
+ {
+ Series o = Series("../samples/subdir/serial_fileBased_write%T." + backend, AccessType::READ_ONLY);
- RecordComponent& momentum_z = momentum["z"];
- REQUIRE(momentum_z.unitSI() == 1.0);
- REQUIRE(momentum_z.getDatatype() == Datatype::DOUBLE);
- REQUIRE(momentum_z.getDimensionality() == 1);
- REQUIRE(momentum_z.getExtent() == e);
+ REQUIRE(o.iterations.size() == 3);
+ REQUIRE(o.iterations.count(1) == 1);
+ REQUIRE(o.iterations.count(2) == 1);
+ REQUIRE(o.iterations.count(3) == 1);
- ud = {{1., 0., 0., 0., 0., 0., 0.}};
- Record& position = electrons["position"];
- REQUIRE(position.unitDimension() == ud);
- REQUIRE(position.timeOffset< double >() == 0.0);
+ REQUIRE(o.iterations[1].time< float >() == 1.f);
+ REQUIRE(o.iterations[2].time< float >() == 2.f);
+ REQUIRE(o.iterations[3].time< float >() == 3.f);
- REQUIRE(position.size() == 3);
- REQUIRE(position.count("x") == 1);
- REQUIRE(position.count("y") == 1);
- REQUIRE(position.count("z") == 1);
+ REQUIRE(o.iterations[1].particles.size() == 1);
+ REQUIRE(o.iterations[1].particles.count("e") == 1);
+ REQUIRE(o.iterations[2].particles.size() == 1);
+ REQUIRE(o.iterations[2].particles.count("e") == 1);
+ REQUIRE(o.iterations[3].particles.size() == 1);
+ REQUIRE(o.iterations[3].particles.count("e") == 1);
- RecordComponent& position_x = position["x"];
- REQUIRE(position_x.unitSI() == 1.0);
- REQUIRE(position_x.getDatatype() == Datatype::DOUBLE);
- REQUIRE(position_x.getDimensionality() == 1);
- REQUIRE(position_x.getExtent() == e);
+ REQUIRE(o.iterations[1].particles["e"].size() == 2);
+ REQUIRE(o.iterations[1].particles["e"].count("position") == 1);
+ REQUIRE(o.iterations[1].particles["e"].count("positionOffset") == 1);
+ REQUIRE(o.iterations[2].particles["e"].size() == 2);
+ REQUIRE(o.iterations[2].particles["e"].count("position") == 1);
+ REQUIRE(o.iterations[2].particles["e"].count("positionOffset") == 1);
+ REQUIRE(o.iterations[3].particles["e"].size() == 2);
+ REQUIRE(o.iterations[3].particles["e"].count("position") == 1);
+ REQUIRE(o.iterations[3].particles["e"].count("positionOffset") == 1);
- RecordComponent& position_y = position["y"];
- REQUIRE(position_y.unitSI() == 1.0);
- REQUIRE(position_y.getDatatype() == Datatype::DOUBLE);
- REQUIRE(position_y.getDimensionality() == 1);
- REQUIRE(position_y.getExtent() == e);
+ REQUIRE(o.iterations[1].particles["e"]["position"].size() == 1);
+ REQUIRE(o.iterations[1].particles["e"]["position"].count(RecordComponent::SCALAR) == 1);
+ REQUIRE(o.iterations[1].particles["e"]["positionOffset"].size() == 1);
+ REQUIRE(o.iterations[1].particles["e"]["positionOffset"].count(RecordComponent::SCALAR) == 1);
+ REQUIRE(o.iterations[1].particles["e"]["position"][RecordComponent::SCALAR].getDatatype() == Datatype::DOUBLE);
+ REQUIRE(o.iterations[1].particles["e"]["position"][RecordComponent::SCALAR].getDimensionality() == 1);
+ REQUIRE(o.iterations[1].particles["e"]["position"][RecordComponent::SCALAR].getExtent() == Extent{2});
+ REQUIRE(o.iterations[2].particles["e"]["position"].size() == 1);
+ REQUIRE(o.iterations[2].particles["e"]["position"].count(RecordComponent::SCALAR) == 1);
+ REQUIRE(o.iterations[2].particles["e"]["positionOffset"].size() == 1);
+ REQUIRE(o.iterations[2].particles["e"]["positionOffset"].count(RecordComponent::SCALAR) == 1);
+ REQUIRE(o.iterations[2].particles["e"]["position"][RecordComponent::SCALAR].getDatatype() == Datatype::DOUBLE);
+ REQUIRE(o.iterations[2].particles["e"]["position"][RecordComponent::SCALAR].getDimensionality() == 1);
+ REQUIRE(o.iterations[2].particles["e"]["position"][RecordComponent::SCALAR].getExtent() == Extent{2});
+ REQUIRE(o.iterations[3].particles["e"]["position"].size() == 1);
+ REQUIRE(o.iterations[3].particles["e"]["position"].count(RecordComponent::SCALAR) == 1);
+ REQUIRE(o.iterations[3].particles["e"]["positionOffset"].size() == 1);
+ REQUIRE(o.iterations[3].particles["e"]["positionOffset"].count(RecordComponent::SCALAR) == 1);
+ REQUIRE(o.iterations[3].particles["e"]["position"][RecordComponent::SCALAR].getDatatype() == Datatype::DOUBLE);
+ REQUIRE(o.iterations[3].particles["e"]["position"][RecordComponent::SCALAR].getDimensionality() == 1);
+ REQUIRE(o.iterations[3].particles["e"]["position"][RecordComponent::SCALAR].getExtent() == Extent{2});
+ }
- RecordComponent& position_z = position["z"];
- REQUIRE(position_z.unitSI() == 1.0);
- REQUIRE(position_z.getDatatype() == Datatype::DOUBLE);
- REQUIRE(position_z.getDimensionality() == 1);
- REQUIRE(position_z.getExtent() == e);
+ {
+ Series o = Series("../samples/subdir/serial_fileBased_write%T." + backend, AccessType::READ_WRITE);
+ ParticleSpecies& e_4 = o.iterations[4].particles["e"];
+ e_4["position"][RecordComponent::SCALAR].resetDataset(dset);
+ e_4["positionOffset"][RecordComponent::SCALAR].resetDataset(dset);
+ o.iterations[4].setTime(4.f);
+ }
- Record& positionOffset = electrons["positionOffset"];
- REQUIRE(positionOffset.unitDimension() == ud);
- REQUIRE(positionOffset.timeOffset< double >() == 0.0);
+ {
+ Series o = Series("../samples/subdir/serial_fileBased_write%T." + backend, AccessType::READ_ONLY);
- REQUIRE(positionOffset.size() == 3);
- REQUIRE(positionOffset.count("x") == 1);
- REQUIRE(positionOffset.count("y") == 1);
- REQUIRE(positionOffset.count("z") == 1);
-
- RecordComponent& positionOffset_x = positionOffset["x"];
- REQUIRE(positionOffset_x.unitSI() == 1.0);
- REQUIRE(positionOffset_x.getDatatype() == Datatype::DOUBLE);
- REQUIRE(positionOffset_x.getDimensionality() == 1);
- REQUIRE(positionOffset_x.getExtent() == e);
+ REQUIRE(o.iterations.size() == 4);
+ REQUIRE(o.iterations.count(4) == 1);
- RecordComponent& positionOffset_y = positionOffset["y"];
- REQUIRE(positionOffset_y.unitSI() == 1.0);
- REQUIRE(positionOffset_y.getDatatype() == Datatype::DOUBLE);
- REQUIRE(positionOffset_y.getDimensionality() == 1);
- REQUIRE(positionOffset_y.getExtent() == e);
+ REQUIRE(o.iterations[4].time< float >() == 4.f);
- RecordComponent& positionOffset_z = positionOffset["z"];
- REQUIRE(positionOffset_z.unitSI() == 1.0);
- REQUIRE(positionOffset_z.getDatatype() == Datatype::DOUBLE);
- REQUIRE(positionOffset_z.getDimensionality() == 1);
- REQUIRE(positionOffset_z.getExtent() == e);
+ REQUIRE(o.iterations[4].particles.size() == 1);
+ REQUIRE(o.iterations[4].particles.count("e") == 1);
- ud = {{0., 0., 0., 0., 0., 0., 0.}};
- Record& weighting = electrons["weighting"];
- REQUIRE(weighting.unitDimension() == ud);
- REQUIRE(weighting.timeOffset< double >() == 0.0);
+ REQUIRE(o.iterations[4].particles["e"].size() == 2);
+ REQUIRE(o.iterations[4].particles["e"].count("position") == 1);
+ REQUIRE(o.iterations[4].particles["e"].count("positionOffset") == 1);
- REQUIRE(weighting.size() == 1);
- REQUIRE(weighting.count(RecordComponent::SCALAR) == 1);
+ REQUIRE(o.iterations[4].particles["e"]["position"].size() == 1);
+ REQUIRE(o.iterations[4].particles["e"]["position"].count(RecordComponent::SCALAR) == 1);
+ REQUIRE(o.iterations[4].particles["e"]["positionOffset"].size() == 1);
+ REQUIRE(o.iterations[4].particles["e"]["positionOffset"].count(RecordComponent::SCALAR) == 1);
+ REQUIRE(o.iterations[4].particles["e"]["position"][RecordComponent::SCALAR].getDatatype() == Datatype::DOUBLE);
+ REQUIRE(o.iterations[4].particles["e"]["position"][RecordComponent::SCALAR].getDimensionality() == 1);
+ REQUIRE(o.iterations[4].particles["e"]["position"][RecordComponent::SCALAR].getExtent() == Extent{2});
+ }
+}
- RecordComponent& weighting_scalar = weighting[RecordComponent::SCALAR];
- REQUIRE(weighting_scalar.unitSI() == 1.0);
- REQUIRE(weighting_scalar.getDatatype() == Datatype::DOUBLE);
- REQUIRE(weighting_scalar.getDimensionality() == 1);
- REQUIRE(weighting_scalar.getExtent() == e);
- } catch (no_such_file_error& e)
+TEST_CASE( "fileBased_write_empty_test", "[serial]" )
+{
+ for (auto const & t: backends)
{
- std::cerr << "git sample not accessible. (" << e.what() << ")\n";
- return;
+ fileBased_write_empty_test(std::get<0>(t));
}
}
-TEST_CASE( "git_hdf5_sample_content_test", "[serial][hdf5]" )
+inline
+void fileBased_write_test(const std::string & backend)
{
- try
+ if( auxiliary::directory_exists("../samples/subdir") )
+ auxiliary::remove_directory("../samples/subdir");
+
{
- Series o = Series("../samples/git-sample/data%T.h5", AccessType::READ_ONLY);
+ Series o = Series("../samples/subdir/serial_fileBased_write%08T." + backend, AccessType::CREATE);
- {
- double actual[3][3][3] = {{{-1.9080703683727052e-09, -1.5632650729457964e-10, 1.1497536256399599e-09},
- {-1.9979540244463578e-09, -2.5512036927466397e-10, 1.0402234629225404e-09},
- {-1.7353589676361025e-09, -8.0899198451334087e-10, -1.6443779671249104e-10}},
+ ParticleSpecies& e_1 = o.iterations[1].particles["e"];
- {{-2.0029988778702545e-09, -1.9543477947081556e-10, 1.0916454407094989e-09},
- {-2.3890367462087170e-09, -4.7158010829662089e-10, 9.0026075483251589e-10},
- {-1.9033881137886510e-09, -7.5192119197708962e-10, 5.0038861942880430e-10}},
+ std::vector< double > position_global(4);
+ double pos{0.};
+ std::generate(position_global.begin(), position_global.end(), [&pos]{ return pos++; });
+ std::shared_ptr< double > position_local_1(new double);
+ e_1["position"]["x"].resetDataset(Dataset(determineDatatype(position_local_1), {4}));
+ std::vector< uint64_t > positionOffset_global(4);
+ uint64_t posOff{0};
+ std::generate(positionOffset_global.begin(), positionOffset_global.end(), [&posOff]{ return posOff++; });
+ std::shared_ptr< uint64_t > positionOffset_local_1(new uint64_t);
+ e_1["positionOffset"]["x"].resetDataset(Dataset(determineDatatype(positionOffset_local_1), {4}));
- {{-1.3271805876513554e-09, -5.9243276950837753e-10, -2.2445734160214670e-10},
- {-7.4578609954301101e-10, -1.1995737736469891e-10, 2.5611823772919706e-10},
- {-9.4806251738077663e-10, -1.5472800818372434e-10, -3.6461900165818406e-10}}};
- MeshRecordComponent& rho = o.iterations[100].meshes["rho"][MeshRecordComponent::SCALAR];
- Offset offset{20, 20, 190};
- Extent extent{3, 3, 3};
- auto data = rho.loadChunk(offset, extent);
+ for( uint64_t i = 0; i < 4; ++i )
+ {
+ *position_local_1 = position_global[i];
+ e_1["position"]["x"].storeChunk(position_local_1, {i}, {1});
+ *positionOffset_local_1 = positionOffset_global[i];
+ e_1["positionOffset"]["x"].storeChunk(positionOffset_local_1, {i}, {1});
o.flush();
- double* raw_ptr = data.get();
-
- for( int i = 0; i < 3; ++i )
- for( int j = 0; j < 3; ++j )
- for( int k = 0; k < 3; ++k )
- REQUIRE(raw_ptr[((i*3) + j)*3 + k] == actual[i][j][k]);
}
+ o.iterations[1].setTime(static_cast< double >(1));
+
+ ParticleSpecies& e_2 = o.iterations[2].particles["e"];
+
+ std::generate(position_global.begin(), position_global.end(), [&pos]{ return pos++; });
+ e_2["position"]["x"].resetDataset(Dataset(determineDatatype(), {4}));
+ std::generate(positionOffset_global.begin(), positionOffset_global.end(), [&posOff]{ return posOff++; });
+ std::shared_ptr< uint64_t > positionOffset_local_2(new uint64_t);
+ e_2["positionOffset"]["x"].resetDataset(Dataset(determineDatatype(positionOffset_local_2), {4}));
+
+ for( uint64_t i = 0; i < 4; ++i )
{
- double constant_value = 9.1093829099999999e-31;
- RecordComponent& electrons_mass = o.iterations[100].particles["electrons"]["mass"][RecordComponent::SCALAR];
- Offset offset{15};
- Extent extent{3};
- auto data = electrons_mass.loadChunk(offset, extent);
+ double const position_local_2 = position_global.at(i);
+ e_2["position"]["x"].storeChunk(shareRaw(&position_local_2), {i}, {1});
+ *positionOffset_local_2 = positionOffset_global[i];
+ e_2["positionOffset"]["x"].storeChunk(positionOffset_local_2, {i}, {1});
o.flush();
- double* raw_ptr = data.get();
-
- for( int i = 0; i < 3; ++i )
- REQUIRE(raw_ptr[i] == constant_value);
}
- } catch (no_such_file_error& e)
- {
- std::cerr << "git sample not accessible. (" << e.what() << ")\n";
- return;
- }
-}
-TEST_CASE( "git_hdf5_sample_fileBased_read_test", "[serial][hdf5]" )
-{
- try
- {
- Series o = Series("../samples/git-sample/data%T.h5", AccessType::READ_ONLY);
+ o.iterations[2].setTime(static_cast< double >(2));
- REQUIRE(o.iterations.size() == 5);
- REQUIRE(o.iterations.count(100) == 1);
- REQUIRE(o.iterations.count(200) == 1);
- REQUIRE(o.iterations.count(300) == 1);
- REQUIRE(o.iterations.count(400) == 1);
- REQUIRE(o.iterations.count(500) == 1);
+ ParticleSpecies& e_3 = o.iterations[3].particles["e"];
-#if openPMD_USE_INVASIVE_TESTS
- REQUIRE(*o.m_filenamePadding == 8);
-#endif
- } catch (no_such_file_error& e)
- {
- std::cerr << "git sample not accessible. (" << e.what() << ")\n";
- return;
+ std::generate(position_global.begin(), position_global.end(), [&pos]{ return pos++; });
+ std::shared_ptr< double > position_local_3(new double);
+ e_3["position"]["x"].resetDataset(Dataset(determineDatatype(position_local_3), {4}));
+ std::generate(positionOffset_global.begin(), positionOffset_global.end(), [&posOff]{ return posOff++; });
+ std::shared_ptr< uint64_t > positionOffset_local_3(new uint64_t);
+ e_3["positionOffset"]["x"].resetDataset(Dataset(determineDatatype(positionOffset_local_3), {4}));
+
+ for( uint64_t i = 0; i < 4; ++i )
+ {
+ *position_local_3 = position_global[i];
+ e_3["position"]["x"].storeChunk(position_local_3, {i}, {1});
+ *positionOffset_local_3 = positionOffset_global[i];
+ e_3["positionOffset"]["x"].storeChunk(positionOffset_local_3, {i}, {1});
+ o.flush();
+ }
+
+ o.setOpenPMDextension(1);
+ o.iterations[3].setTime(static_cast< double >(3));
}
+ REQUIRE(auxiliary::file_exists("../samples/subdir/serial_fileBased_write00000001." + backend));
+ REQUIRE(auxiliary::file_exists("../samples/subdir/serial_fileBased_write00000002." + backend));
+ REQUIRE(auxiliary::file_exists("../samples/subdir/serial_fileBased_write00000003." + backend));
- try
{
- Series o = Series("../samples/git-sample/data%08T.h5", AccessType::READ_ONLY);
+ Series o = Series("../samples/subdir/serial_fileBased_write%T." + backend, AccessType::READ_ONLY);
- REQUIRE(o.iterations.size() == 5);
- REQUIRE(o.iterations.count(100) == 1);
- REQUIRE(o.iterations.count(200) == 1);
- REQUIRE(o.iterations.count(300) == 1);
- REQUIRE(o.iterations.count(400) == 1);
- REQUIRE(o.iterations.count(500) == 1);
+ REQUIRE(o.iterations.size() == 3);
+ REQUIRE(o.iterations.count(1) == 1);
+ REQUIRE(o.iterations.count(2) == 1);
+ REQUIRE(o.iterations.count(3) == 1);
#if openPMD_USE_INVASIVE_TESTS
REQUIRE(*o.m_filenamePadding == 8);
#endif
- } catch (no_such_file_error& e)
- {
- std::cerr << "git sample not accessible. (" << e.what() << ")\n";
- return;
- }
-
- REQUIRE_THROWS_WITH(Series("../samples/git-sample/data%07T.h5", AccessType::READ_ONLY),
- Catch::Equals("No matching iterations found: data%07T"));
-
- try
- {
- std::vector< std::string > newFiles{"../samples/git-sample/data00000001.h5",
- "../samples/git-sample/data00000010.h5",
- "../samples/git-sample/data00001000.h5",
- "../samples/git-sample/data00010000.h5",
- "../samples/git-sample/data00100000.h5"};
-
- for( auto const& file : newFiles )
- if( auxiliary::file_exists(file) )
- auxiliary::remove_file(file);
+ REQUIRE(o.basePath() == "/data/%T/");
+ REQUIRE(o.iterationEncoding() == IterationEncoding::fileBased);
+ REQUIRE(o.iterationFormat() == "serial_fileBased_write%08T");
+ REQUIRE(o.openPMD() == "1.1.0");
+ REQUIRE(o.openPMDextension() == 1u);
+ REQUIRE(o.particlesPath() == "particles/");
+ REQUIRE_FALSE(o.containsAttribute("meshesPath"));
+ REQUIRE_THROWS_AS(o.meshesPath(), no_such_attribute_error);
+ std::array< double, 7 > udim{{1, 0, 0, 0, 0, 0, 0}};
+ Extent ext{4};
+ for( auto& entry : o.iterations )
{
- Series o = Series("../samples/git-sample/data%T.h5", AccessType::READ_WRITE);
-
-#if openPMD_USE_INVASIVE_TESTS
- REQUIRE(*o.m_filenamePadding == 8);
+ auto& it = entry.second;
+ REQUIRE(it.dt< double >() == 1.);
+ REQUIRE(it.time< double >() == static_cast< double >(entry.first));
+ REQUIRE(it.timeUnitSI() == 1.);
+ auto& pos = it.particles.at("e").at("position");
+ REQUIRE(pos.timeOffset< float >() == 0.f);
+ REQUIRE(pos.unitDimension() == udim);
+ auto& pos_x = pos.at("x");
+ REQUIRE(pos_x.unitSI() == 1.);
+ REQUIRE(pos_x.getExtent() == ext);
+ REQUIRE(pos_x.getDatatype() == Datatype::DOUBLE);
+ auto& posOff = it.particles.at("e").at("positionOffset");
+ REQUIRE(posOff.timeOffset< float >() == 0.f);
+ REQUIRE(posOff.unitDimension() == udim);
+ auto& posOff_x = posOff.at("x");
+ REQUIRE(posOff_x.unitSI() == 1.);
+ REQUIRE(posOff_x.getExtent() == ext);
+#if !defined(_MSC_VER)
+ REQUIRE(posOff_x.getDatatype() == determineDatatype< uint64_t >());
#endif
+ REQUIRE(isSame(posOff_x.getDatatype(), determineDatatype< uint64_t >()));
- o.iterations[1];
- o.iterations[10];
- o.iterations[1000];
- o.iterations[10000];
- o.iterations[100000];
+ auto position = pos_x.loadChunk< double >({0}, {4});
+ auto position_raw = position.get();
+ auto positionOffset = posOff_x.loadChunk< uint64_t >({0}, {4});
+ auto positionOffset_raw = positionOffset.get();
o.flush();
+ for( uint64_t j = 0; j < 4; ++j )
+ {
+ REQUIRE(position_raw[j] == static_cast< double >(j + (entry.first-1)*4));
+ REQUIRE(positionOffset_raw[j] == j + (entry.first-1)*4);
+ }
}
+ }
- for( auto const& file : newFiles )
- {
- REQUIRE(auxiliary::file_exists(file));
- auxiliary::remove_file(file);
- }
- } catch (no_such_file_error& e)
+ // extend existing series with new step and auto-detection of iteration padding
{
- std::cerr << "git sample not accessible. (" << e.what() << ")\n";
- return;
- }
-}
+ Series o = Series("../samples/subdir/serial_fileBased_write%T." + backend, AccessType::READ_WRITE);
-TEST_CASE( "hzdr_hdf5_sample_content_test", "[serial][hdf5]" )
-{
- // since this file might not be publicly available, gracefully handle errors
- try
- {
- /* HZDR: /bigdata/hplsim/development/huebl/lwfa-openPMD-062-smallLWFA-h5
- * DOI:10.14278/rodare.57 */
- Series o = Series("../samples/hzdr-sample/h5/simData_%T.h5", AccessType::READ_ONLY);
+ REQUIRE(o.iterations.size() == 3);
+ o.iterations[4];
+ REQUIRE(o.iterations.size() == 4);
+ }
+ REQUIRE(auxiliary::file_exists("../samples/subdir/serial_fileBased_write00000004." + backend));
- REQUIRE(o.openPMD() == "1.0.0");
- REQUIRE(o.openPMDextension() == 1);
- REQUIRE(o.basePath() == "/data/%T/");
- REQUIRE(o.meshesPath() == "fields/");
- REQUIRE(o.particlesPath() == "particles/");
- REQUIRE(o.author() == "Axel Huebl ");
- REQUIRE(o.software() == "PIConGPU");
- REQUIRE(o.softwareVersion() == "0.2.0");
- REQUIRE(o.date() == "2016-11-04 00:59:14 +0100");
- REQUIRE(o.iterationEncoding() == IterationEncoding::fileBased);
- REQUIRE(o.iterationFormat() == "h5/simData_%T.h5");
- REQUIRE(o.name() == "simData_%T");
+ // additional iteration with different iteration padding but similar content
+ {
+ Series o = Series("../samples/subdir/serial_fileBased_write%01T." + backend, AccessType::READ_WRITE);
- REQUIRE(o.iterations.size() >= 1);
- REQUIRE(o.iterations.count(0) == 1);
+ REQUIRE(o.iterations.empty());
- Iteration& i = o.iterations[0];
- REQUIRE(i.time< float >() == static_cast< float >(0.0f));
- REQUIRE(i.dt< float >() == static_cast< float >(1.0f));
- REQUIRE(i.timeUnitSI() == 1.3899999999999999e-16);
+ auto& it = o.iterations[10];
+ ParticleSpecies& e = it.particles["e"];
+ e["position"]["x"].resetDataset(Dataset(Datatype::DOUBLE, {42}));
+ e["positionOffset"]["x"].resetDataset(Dataset(Datatype::DOUBLE, {42}));
+ e["position"]["x"].makeConstant(1.23);
+ e["positionOffset"]["x"].makeConstant(1.23);
- REQUIRE(i.meshes.size() == 4);
- REQUIRE(i.meshes.count("B") == 1);
- REQUIRE(i.meshes.count("E") == 1);
- REQUIRE(i.meshes.count("e_chargeDensity") == 1);
- REQUIRE(i.meshes.count("e_energyDensity") == 1);
+ REQUIRE(o.iterations.size() == 1);
+ }
+ REQUIRE(auxiliary::file_exists("../samples/subdir/serial_fileBased_write10." + backend));
- std::vector< std::string > al{"z", "y", "x"};
- std::vector< float > gs{static_cast< float >(6.2393283843994141f),
- static_cast< float >(1.0630855560302734f),
- static_cast< float >(6.2393283843994141f)};
- std::vector< double > ggo{0., 0., 0.};
- std::array< double, 7 > ud{{0., 1., -2., -1., 0., 0., 0.}};
- Mesh& B = i.meshes["B"];
- REQUIRE(B.geometry() == Mesh::Geometry::cartesian);
- REQUIRE(B.dataOrder() == Mesh::DataOrder::C);
- REQUIRE(B.axisLabels() == al);
- REQUIRE(B.gridSpacing< float >() == gs);
- REQUIRE(B.gridGlobalOffset() == ggo);
- REQUIRE(B.gridUnitSI() == 4.1671151661999998e-08);
- REQUIRE(B.unitDimension() == ud);
- REQUIRE(B.timeOffset< float >() == static_cast< float >(0.0f));
+ // read back with auto-detection and non-fixed padding
+ {
+ Series s = Series("../samples/subdir/serial_fileBased_write%T." + backend, AccessType::READ_ONLY);
+ REQUIRE(s.iterations.size() == 5);
+ }
- REQUIRE(B.size() == 3);
- REQUIRE(B.count("x") == 1);
- REQUIRE(B.count("y") == 1);
- REQUIRE(B.count("z") == 1);
+ // write with auto-detection and in-consistent padding
+ {
+ REQUIRE_THROWS_WITH(Series("../samples/subdir/serial_fileBased_write%T." + backend, AccessType::READ_WRITE),
+ Catch::Equals("Cannot write to a series with inconsistent iteration padding. Please specify '%0T' or open as read-only."));
+ }
- std::vector< float > p{static_cast< float >(0.0f),
- static_cast< float >(0.5f),
- static_cast< float >(0.5f)};
- Extent e{80, 384, 80};
- MeshRecordComponent& B_x = B["x"];
- REQUIRE(B_x.unitSI() == 40903.822240601701);
- REQUIRE(B_x.position< float >() == p);
- REQUIRE(B_x.getDatatype() == Datatype::FLOAT);
- REQUIRE(B_x.getExtent() == e);
- REQUIRE(B_x.getDimensionality() == 3);
+ // read back with auto-detection and fixed padding
+ {
+ Series s = Series("../samples/subdir/serial_fileBased_write%08T." + backend, AccessType::READ_ONLY);
+ REQUIRE(s.iterations.size() == 4);
+ }
+}
- p = {static_cast< float >(0.5f),
- static_cast< float >(0.0f),
- static_cast< float >(0.5f)};
- MeshRecordComponent& B_y = B["y"];
- REQUIRE(B_y.unitSI() == 40903.822240601701);
- REQUIRE(B_y.position< float >() == p);
- REQUIRE(B_y.getDatatype() == Datatype::FLOAT);
- REQUIRE(B_y.getExtent() == e);
- REQUIRE(B_y.getDimensionality() == 3);
+TEST_CASE( "fileBased_write_test", "[serial]" )
+{
+ for (auto const & t: backends)
+ {
+ fileBased_write_test(std::get<0>(t));
+ }
+}
- p = {static_cast< float >(0.5f),
- static_cast< float >(0.5f),
- static_cast< float >(0.0f)};
- MeshRecordComponent& B_z = B["z"];
- REQUIRE(B_z.unitSI() == 40903.822240601701);
- REQUIRE(B_z.position< float >() == p);
- REQUIRE(B_z.getDatatype() == Datatype::FLOAT);
- REQUIRE(B_z.getExtent() == e);
- REQUIRE(B_z.getDimensionality() == 3);
+inline
+void bool_test(const std::string & backend)
+{
+ {
+ Series o = Series("../samples/serial_bool." + backend, AccessType::CREATE);
- ud = {{1., 1., -3., -1., 0., 0., 0.}};
- Mesh& E = i.meshes["E"];
- REQUIRE(E.geometry() == Mesh::Geometry::cartesian);
- REQUIRE(E.dataOrder() == Mesh::DataOrder::C);
- REQUIRE(E.axisLabels() == al);
- REQUIRE(E.gridSpacing< float >() == gs);
- REQUIRE(E.gridGlobalOffset() == ggo);
- REQUIRE(E.gridUnitSI() == 4.1671151661999998e-08);
- REQUIRE(E.unitDimension() == ud);
- REQUIRE(E.timeOffset< float >() == static_cast< float >(0.0f));
+ o.setAttribute("Bool attribute (true)", true);
+ o.setAttribute("Bool attribute (false)", false);
+ }
+ {
+ Series o = Series("../samples/serial_bool." + backend, AccessType::READ_ONLY);
- REQUIRE(E.size() == 3);
- REQUIRE(E.count("x") == 1);
- REQUIRE(E.count("y") == 1);
- REQUIRE(E.count("z") == 1);
+ auto attrs = o.attributes();
+ REQUIRE(std::count(attrs.begin(), attrs.end(), "Bool attribute (true)") == 1);
+ REQUIRE(std::count(attrs.begin(), attrs.end(), "Bool attribute (false)") == 1);
+ REQUIRE(o.getAttribute("Bool attribute (true)").get< bool >() == true);
+ REQUIRE(o.getAttribute("Bool attribute (false)").get< bool >() == false);
+ }
+}
- p = {static_cast< float >(0.5f),
- static_cast< float >(0.0f),
- static_cast< float >(0.0f)};
- e = {80, 384, 80};
- MeshRecordComponent& E_x = E["x"];
- REQUIRE(E_x.unitSI() == 12262657411105.049);
- REQUIRE(E_x.position< float >() == p);
- REQUIRE(E_x.getDatatype() == Datatype::FLOAT);
- REQUIRE(E_x.getExtent() == e);
- REQUIRE(E_x.getDimensionality() == 3);
+TEST_CASE( "bool_test", "[serial]" )
+{
+ for (auto const & t: backends)
+ {
+ bool_test(std::get<0>(t));
+ }
+}
- p = {static_cast< float >(0.0f),
- static_cast< float >(0.5f),
- static_cast< float >(0.0f)};
- MeshRecordComponent& E_y = E["y"];
- REQUIRE(E_y.unitSI() == 12262657411105.049);
- REQUIRE(E_y.position< float >() == p);
- REQUIRE(E_y.getDatatype() == Datatype::FLOAT);
- REQUIRE(E_y.getExtent() == e);
- REQUIRE(E_y.getDimensionality() == 3);
+inline
+void patch_test(const std::string & backend)
+{
+ Series o = Series("../samples/serial_patch." + backend, AccessType::CREATE);
- p = {static_cast< float >(0.0f),
- static_cast< float >(0.0f),
- static_cast< float >(0.5f)};
- MeshRecordComponent& E_z = E["z"];
- REQUIRE(E_z.unitSI() == 12262657411105.049);
- REQUIRE(E_z.position< float >() == p);
- REQUIRE(E_z.getDatatype() == Datatype::FLOAT);
- REQUIRE(E_z.getExtent() == e);
- REQUIRE(E_z.getDimensionality() == 3);
+ auto dset = Dataset(Datatype::DOUBLE, {1});
+ o.iterations[1].particles["e"].particlePatches["offset"]["x"].resetDataset(dset);
+ o.iterations[1].particles["e"].particlePatches["offset"]["x"].setUnitSI(42);
+ o.iterations[1].particles["e"]["position"][RecordComponent::SCALAR].resetDataset(dset);
+ o.iterations[1].particles["e"]["positionOffset"][RecordComponent::SCALAR].resetDataset(dset);
+}
- ud = {{-3., 0., 1., 1., 0., 0., 0.}};
- Mesh& e_chargeDensity = i.meshes["e_chargeDensity"];
- REQUIRE(e_chargeDensity.geometry() == Mesh::Geometry::cartesian);
- REQUIRE(e_chargeDensity.dataOrder() == Mesh::DataOrder::C);
- REQUIRE(e_chargeDensity.axisLabels() == al);
- REQUIRE(e_chargeDensity.gridSpacing< float >() == gs);
- REQUIRE(e_chargeDensity.gridGlobalOffset() == ggo);
- REQUIRE(e_chargeDensity.gridUnitSI() == 4.1671151661999998e-08);
- REQUIRE(e_chargeDensity.unitDimension() == ud);
- REQUIRE(e_chargeDensity.timeOffset< float >() == static_cast< float >(0.0f));
+TEST_CASE( "patch_test", "[serial]" )
+{
+ for (auto const & t: backends)
+ {
+ patch_test(std::get<0>(t));
+ }
+}
- REQUIRE(e_chargeDensity.size() == 1);
- REQUIRE(e_chargeDensity.count(MeshRecordComponent::SCALAR) == 1);
+inline
+void deletion_test(const std::string & backend)
+{
+ Series o = Series("../samples/serial_deletion." + backend, AccessType::CREATE);
- p = {static_cast< float >(0.f),
- static_cast< float >(0.f),
- static_cast< float >(0.f)};
- MeshRecordComponent& e_chargeDensity_scalar = e_chargeDensity[MeshRecordComponent::SCALAR];
- REQUIRE(e_chargeDensity_scalar.unitSI() == 66306201.002331272);
- REQUIRE(e_chargeDensity_scalar.position< float >() == p);
- REQUIRE(e_chargeDensity_scalar.getDatatype() == Datatype::FLOAT);
- REQUIRE(e_chargeDensity_scalar.getExtent() == e);
- REQUIRE(e_chargeDensity_scalar.getDimensionality() == 3);
- ud = {{-1., 1., -2., 0., 0., 0., 0.}};
- Mesh& e_energyDensity = i.meshes["e_energyDensity"];
- REQUIRE(e_energyDensity.geometry() == Mesh::Geometry::cartesian);
- REQUIRE(e_energyDensity.dataOrder() == Mesh::DataOrder::C);
- REQUIRE(e_energyDensity.axisLabels() == al);
- REQUIRE(e_energyDensity.gridSpacing< float >() == gs);
- REQUIRE(e_energyDensity.gridGlobalOffset() == ggo);
- REQUIRE(e_energyDensity.gridUnitSI() == 4.1671151661999998e-08);
- REQUIRE(e_energyDensity.unitDimension() == ud);
- REQUIRE(e_energyDensity.timeOffset< float >() == static_cast< float >(0.0f));
+ o.setAttribute("removed",
+ "this attribute will be removed after being written to disk");
+ o.flush();
- REQUIRE(e_energyDensity.size() == 1);
- REQUIRE(e_energyDensity.count(MeshRecordComponent::SCALAR) == 1);
+ o.deleteAttribute("removed");
+ o.flush();
- MeshRecordComponent& e_energyDensity_scalar = e_energyDensity[MeshRecordComponent::SCALAR];
- REQUIRE(e_energyDensity_scalar.unitSI() == 1.0146696675429705e+18);
- REQUIRE(e_energyDensity_scalar.position< float >() == p);
- REQUIRE(e_energyDensity_scalar.getDatatype() == Datatype::FLOAT);
- REQUIRE(e_energyDensity_scalar.getExtent() == e);
- REQUIRE(e_energyDensity_scalar.getDimensionality() == 3);
+ ParticleSpecies& e = o.iterations[1].particles["e"];
+ auto dset = Dataset(Datatype::DOUBLE, {1});
+ e["position"][RecordComponent::SCALAR].resetDataset(dset);
+ e["positionOffset"][RecordComponent::SCALAR].resetDataset(dset);
+ e.erase("deletion");
+ o.flush();
- REQUIRE(i.particles.size() == 1);
- REQUIRE(i.particles.count("e") == 1);
+ e["deletion_scalar"][RecordComponent::SCALAR].resetDataset(dset);
+ o.flush();
- ParticleSpecies& species_e = i.particles["e"];
+ e["deletion_scalar"].erase(RecordComponent::SCALAR);
+ e.erase("deletion_scalar");
+ o.flush();
- REQUIRE(species_e.size() == 6);
- REQUIRE(species_e.count("charge") == 1);
- REQUIRE(species_e.count("mass") == 1);
- REQUIRE(species_e.count("momentum") == 1);
- REQUIRE(species_e.count("particlePatches") == 0);
- REQUIRE(species_e.count("position") == 1);
- REQUIRE(species_e.count("positionOffset") == 1);
- REQUIRE(species_e.count("weighting") == 1);
+ double value = 0.;
+ e["deletion_scalar_constant"][RecordComponent::SCALAR].resetDataset(dset);
+ e["deletion_scalar_constant"][RecordComponent::SCALAR].makeConstant(value);
+ o.flush();
- ud = {{0., 0., 1., 1., 0., 0., 0.}};
- Record& e_charge = species_e["charge"];
- REQUIRE(e_charge.unitDimension() == ud);
- REQUIRE(e_charge.timeOffset< float >() == static_cast< float >(0.0f));
+ e["deletion_scalar_constant"].erase(RecordComponent::SCALAR);
+ e.erase("deletion_scalar_constant");
+ o.flush();
+}
- REQUIRE(e_charge.size() == 1);
- REQUIRE(e_charge.count(RecordComponent::SCALAR) == 1);
+TEST_CASE( "deletion_test", "[serial]" )
+{
+ for (auto const & t: backends)
+ {
+ if (std::get<0>(t) == "bp")
+ {
+ continue; // deletion not implemented in ADIOS1 backend
+ }
+ deletion_test(std::get<0>(t));
+ }
+}
- e = {2150400};
- RecordComponent& e_charge_scalar = e_charge[RecordComponent::SCALAR];
- REQUIRE(e_charge_scalar.unitSI() == 4.7980045488500004e-15);
- REQUIRE(e_charge_scalar.getDatatype() == Datatype::DOUBLE);
- REQUIRE(e_charge_scalar.getExtent() == e);
- REQUIRE(e_charge_scalar.getDimensionality() == 1);
+inline
+void optional_paths_110_test(const std::string & backend)
+{
+ try
+ {
+ {
+ Series s = Series("../samples/issue-sample/no_fields/data%T." + backend, AccessType::READ_ONLY);
+ auto attrs = s.attributes();
+ REQUIRE(std::count(attrs.begin(), attrs.end(), "meshesPath") == 1);
+ REQUIRE(std::count(attrs.begin(), attrs.end(), "particlesPath") == 1);
+ REQUIRE(s.iterations[400].meshes.empty());
+ REQUIRE(s.iterations[400].particles.size() == 1);
+ }
- ud = {{0., 1., 0., 0., 0., 0., 0.}};
- Record& e_mass = species_e["mass"];
- REQUIRE(e_mass.unitDimension() == ud);
- REQUIRE(e_mass.timeOffset< float >() == static_cast< float >(0.0f));
+ {
+ Series s = Series("../samples/issue-sample/no_particles/data%T." + backend, AccessType::READ_ONLY);
+ auto attrs = s.attributes();
+ REQUIRE(std::count(attrs.begin(), attrs.end(), "meshesPath") == 1);
+ REQUIRE(std::count(attrs.begin(), attrs.end(), "particlesPath") == 1);
+ REQUIRE(s.iterations[400].meshes.size() == 2);
+ REQUIRE(s.iterations[400].particles.empty());
+ }
+ } catch (no_such_file_error& e)
+ {
+ std::cerr << "issue sample not accessible. (" << e.what() << ")\n";
+ }
- REQUIRE(e_mass.size() == 1);
- REQUIRE(e_mass.count(RecordComponent::SCALAR) == 1);
+ {
+ Series s = Series("../samples/no_meshes_1.1.0_compliant." + backend, AccessType::CREATE);
+ auto foo = s.iterations[1].particles["foo"];
+ Dataset dset = Dataset(Datatype::DOUBLE, {1});
+ foo["position"][RecordComponent::SCALAR].resetDataset(dset);
+ foo["positionOffset"][RecordComponent::SCALAR].resetDataset(dset);
+ }
- RecordComponent& e_mass_scalar = e_mass[RecordComponent::SCALAR];
- REQUIRE(e_mass_scalar.unitSI() == 2.7279684799430467e-26);
- REQUIRE(e_mass_scalar.getDatatype() == Datatype::DOUBLE);
- REQUIRE(e_mass_scalar.getExtent() == e);
- REQUIRE(e_mass_scalar.getDimensionality() == 1);
+ {
+ Series s = Series("../samples/no_particles_1.1.0_compliant." + backend, AccessType::CREATE);
+ auto foo = s.iterations[1].meshes["foo"];
+ Dataset dset = Dataset(Datatype::DOUBLE, {1});
+ foo[RecordComponent::SCALAR].resetDataset(dset);
+ }
- ud = {{1., 1., -1., 0., 0., 0., 0.}};
- Record& e_momentum = species_e["momentum"];
- REQUIRE(e_momentum.unitDimension() == ud);
- REQUIRE(e_momentum.timeOffset< float >() == static_cast< float >(0.0f));
+ {
+ Series s = Series("../samples/no_meshes_1.1.0_compliant." + backend, AccessType::READ_ONLY);
+ auto attrs = s.attributes();
+ REQUIRE(std::count(attrs.begin(), attrs.end(), "meshesPath") == 0);
+ REQUIRE(std::count(attrs.begin(), attrs.end(), "particlesPath") == 1);
+ REQUIRE(s.iterations[1].meshes.empty());
+ REQUIRE(s.iterations[1].particles.size() == 1);
+ }
- REQUIRE(e_momentum.size() == 3);
- REQUIRE(e_momentum.count("x") == 1);
- REQUIRE(e_momentum.count("y") == 1);
- REQUIRE(e_momentum.count("z") == 1);
+ {
+ Series s = Series("../samples/no_particles_1.1.0_compliant." + backend, AccessType::READ_ONLY);
+ auto attrs = s.attributes();
+ REQUIRE(std::count(attrs.begin(), attrs.end(), "meshesPath") == 1);
+ REQUIRE(std::count(attrs.begin(), attrs.end(), "particlesPath") == 0);
+ REQUIRE(s.iterations[1].meshes.size() == 1);
+ REQUIRE(s.iterations[1].particles.empty());
+ }
+}
- RecordComponent& e_momentum_x = e_momentum["x"];
- REQUIRE(e_momentum_x.unitSI() == 8.1782437594864961e-18);
- REQUIRE(e_momentum_x.getDatatype() == Datatype::FLOAT);
- REQUIRE(e_momentum_x.getExtent() == e);
- REQUIRE(e_momentum_x.getDimensionality() == 1);
- RecordComponent& e_momentum_y = e_momentum["y"];
- REQUIRE(e_momentum_y.unitSI() == 8.1782437594864961e-18);
- REQUIRE(e_momentum_y.getDatatype() == Datatype::FLOAT);
- REQUIRE(e_momentum_y.getExtent() == e);
- REQUIRE(e_momentum_y.getDimensionality() == 1);
+#if openPMD_HAVE_HDF5
+TEST_CASE( "optional_paths_110_test", "[serial]" )
+{
+ optional_paths_110_test("h5"); // samples only present for hdf5
+}
- RecordComponent& e_momentum_z = e_momentum["z"];
- REQUIRE(e_momentum_z.unitSI() == 8.1782437594864961e-18);
- REQUIRE(e_momentum_z.getDatatype() == Datatype::FLOAT);
- REQUIRE(e_momentum_z.getExtent() == e);
- REQUIRE(e_momentum_z.getDimensionality() == 1);
+TEST_CASE( "git_hdf5_sample_structure_test", "[serial][hdf5]" )
+{
+#if openPMD_USE_INVASIVE_TESTS
+ try
+ {
+ Series o = Series("../samples/git-sample/data%T.h5", AccessType::READ_ONLY);
- ud = {{1., 0., 0., 0., 0., 0., 0.}};
- Record& e_position = species_e["position"];
- REQUIRE(e_position.unitDimension() == ud);
- REQUIRE(e_position.timeOffset< float >() == static_cast< float >(0.0f));
+ REQUIRE(!o.parent);
+ REQUIRE(o.iterations.parent == getWritable(&o));
+ REQUIRE_THROWS_AS(o.iterations[42], std::out_of_range);
+ REQUIRE(o.iterations[100].parent == getWritable(&o.iterations));
+ REQUIRE(o.iterations[100].meshes.parent == getWritable(&o.iterations[100]));
+ REQUIRE(o.iterations[100].meshes["E"].parent == getWritable(&o.iterations[100].meshes));
+ REQUIRE(o.iterations[100].meshes["E"]["x"].parent == getWritable(&o.iterations[100].meshes["E"]));
+ REQUIRE(o.iterations[100].meshes["E"]["y"].parent == getWritable(&o.iterations[100].meshes["E"]));
+ REQUIRE(o.iterations[100].meshes["E"]["z"].parent == getWritable(&o.iterations[100].meshes["E"]));
+ REQUIRE(o.iterations[100].meshes["rho"].parent == getWritable(&o.iterations[100].meshes));
+ REQUIRE(o.iterations[100].meshes["rho"][MeshRecordComponent::SCALAR].parent == getWritable(&o.iterations[100].meshes));
+ REQUIRE_THROWS_AS(o.iterations[100].meshes["cherries"], std::out_of_range);
+ REQUIRE(o.iterations[100].particles.parent == getWritable(&o.iterations[100]));
+ REQUIRE(o.iterations[100].particles["electrons"].parent == getWritable(&o.iterations[100].particles));
+ REQUIRE(o.iterations[100].particles["electrons"]["charge"].parent == getWritable(&o.iterations[100].particles["electrons"]));
+ REQUIRE(o.iterations[100].particles["electrons"]["charge"][RecordComponent::SCALAR].parent == getWritable(&o.iterations[100].particles["electrons"]));
+ REQUIRE(o.iterations[100].particles["electrons"]["mass"].parent == getWritable(&o.iterations[100].particles["electrons"]));
+ REQUIRE(o.iterations[100].particles["electrons"]["mass"][RecordComponent::SCALAR].parent == getWritable(&o.iterations[100].particles["electrons"]));
+ REQUIRE(o.iterations[100].particles["electrons"]["momentum"].parent == getWritable(&o.iterations[100].particles["electrons"]));
+ REQUIRE(o.iterations[100].particles["electrons"]["momentum"]["x"].parent == getWritable(&o.iterations[100].particles["electrons"]["momentum"]));
+ REQUIRE(o.iterations[100].particles["electrons"]["momentum"]["y"].parent == getWritable(&o.iterations[100].particles["electrons"]["momentum"]));
+ REQUIRE(o.iterations[100].particles["electrons"]["momentum"]["z"].parent == getWritable(&o.iterations[100].particles["electrons"]["momentum"]));
+ REQUIRE(o.iterations[100].particles["electrons"]["position"].parent == getWritable(&o.iterations[100].particles["electrons"]));
+ REQUIRE(o.iterations[100].particles["electrons"]["position"]["x"].parent == getWritable(&o.iterations[100].particles["electrons"]["position"]));
+ REQUIRE(o.iterations[100].particles["electrons"]["position"]["y"].parent == getWritable(&o.iterations[100].particles["electrons"]["position"]));
+ REQUIRE(o.iterations[100].particles["electrons"]["position"]["z"].parent == getWritable(&o.iterations[100].particles["electrons"]["position"]));
+ REQUIRE(o.iterations[100].particles["electrons"]["positionOffset"].parent == getWritable(&o.iterations[100].particles["electrons"]));
+ REQUIRE(o.iterations[100].particles["electrons"]["positionOffset"]["x"].parent == getWritable(&o.iterations[100].particles["electrons"]["positionOffset"]));
+ REQUIRE(o.iterations[100].particles["electrons"]["positionOffset"]["y"].parent == getWritable(&o.iterations[100].particles["electrons"]["positionOffset"]));
+ REQUIRE(o.iterations[100].particles["electrons"]["positionOffset"]["z"].parent == getWritable(&o.iterations[100].particles["electrons"]["positionOffset"]));
+ REQUIRE(o.iterations[100].particles["electrons"]["weighting"].parent == getWritable(&o.iterations[100].particles["electrons"]));
+ REQUIRE(o.iterations[100].particles["electrons"]["weighting"][RecordComponent::SCALAR].parent == getWritable(&o.iterations[100].particles["electrons"]));
+ REQUIRE_THROWS_AS(o.iterations[100].particles["electrons"]["numberOfLegs"], std::out_of_range);
+ REQUIRE_THROWS_AS(o.iterations[100].particles["apples"], std::out_of_range);
- REQUIRE(e_position.size() == 3);
- REQUIRE(e_position.count("x") == 1);
- REQUIRE(e_position.count("y") == 1);
- REQUIRE(e_position.count("z") == 1);
+ int32_t i32 = 32;
+ REQUIRE_THROWS(o.setAttribute("setAttributeFail", i32));
+ } catch (no_such_file_error& e)
+ {
+ std::cerr << "git sample not accessible. (" << e.what() << ")\n";
+ return;
+ }
+#else
+ std::cerr << "Invasive tests not enabled. Hierarchy is not visible.\n";
+#endif
+}
- RecordComponent& e_position_x = e_position["x"];
- REQUIRE(e_position_x.unitSI() == 2.599999993753294e-07);
- REQUIRE(e_position_x.getDatatype() == Datatype::FLOAT);
- REQUIRE(e_position_x.getExtent() == e);
- REQUIRE(e_position_x.getDimensionality() == 1);
+TEST_CASE( "git_hdf5_sample_attribute_test", "[serial][hdf5]" )
+{
+ try
+ {
+ Series o = Series("../samples/git-sample/data%T.h5", AccessType::READ_ONLY);
- RecordComponent& e_position_y = e_position["y"];
- REQUIRE(e_position_y.unitSI() == 4.4299999435019118e-08);
- REQUIRE(e_position_y.getDatatype() == Datatype::FLOAT);
- REQUIRE(e_position_y.getExtent() == e);
- REQUIRE(e_position_y.getDimensionality() == 1);
+ REQUIRE(o.openPMD() == "1.1.0");
+ REQUIRE(o.openPMDextension() == 1);
+ REQUIRE(o.basePath() == "/data/%T/");
+ REQUIRE(o.meshesPath() == "fields/");
+ REQUIRE(o.particlesPath() == "particles/");
+ REQUIRE(o.iterationEncoding() == IterationEncoding::fileBased);
+ REQUIRE(o.iterationFormat() == "data%T.h5");
+ REQUIRE(o.name() == "data%T");
- RecordComponent& e_position_z = e_position["z"];
- REQUIRE(e_position_z.unitSI() == 2.599999993753294e-07);
- REQUIRE(e_position_z.getDatatype() == Datatype::FLOAT);
- REQUIRE(e_position_z.getExtent() == e);
- REQUIRE(e_position_z.getDimensionality() == 1);
+ REQUIRE(o.iterations.size() == 5);
+ REQUIRE(o.iterations.count(100) == 1);
- ud = {{1., 0., 0., 0., 0., 0., 0.}};
- Record& e_positionOffset = species_e["positionOffset"];
- REQUIRE(e_positionOffset.unitDimension() == ud);
- REQUIRE(e_positionOffset.timeOffset< float >() == static_cast< float >(0.0f));
+ Iteration& iteration_100 = o.iterations[100];
+ REQUIRE(iteration_100.time< double >() == 3.2847121452090077e-14);
+ REQUIRE(iteration_100.dt< double >() == 3.2847121452090093e-16);
+ REQUIRE(iteration_100.timeUnitSI() == 1.0);
- REQUIRE(e_positionOffset.size() == 3);
- REQUIRE(e_positionOffset.count("x") == 1);
- REQUIRE(e_positionOffset.count("y") == 1);
- REQUIRE(e_positionOffset.count("z") == 1);
+ REQUIRE(iteration_100.meshes.size() == 2);
+ REQUIRE(iteration_100.meshes.count("E") == 1);
+ REQUIRE(iteration_100.meshes.count("rho") == 1);
- RecordComponent& e_positionOffset_x = e_positionOffset["x"];
- REQUIRE(e_positionOffset_x.unitSI() == 2.599999993753294e-07);
- REQUIRE(e_positionOffset_x.getDatatype() == determineDatatype< int32_t >());
- REQUIRE(e_positionOffset_x.getExtent() == e);
- REQUIRE(e_positionOffset_x.getDimensionality() == 1);
+ std::vector< std::string > al{"x", "y", "z"};
+ std::vector< double > gs{8.0000000000000007e-07,
+ 8.0000000000000007e-07,
+ 1.0000000000000001e-07};
+ std::vector< double > ggo{-1.0000000000000001e-05,
+ -1.0000000000000001e-05,
+ -5.1999999999999993e-06};
+ std::array< double, 7 > ud{{1., 1., -3., -1., 0., 0., 0.}};
+ Mesh& E = iteration_100.meshes["E"];
+ REQUIRE(E.geometry() == Mesh::Geometry::cartesian);
+ REQUIRE(E.dataOrder() == Mesh::DataOrder::C);
+ REQUIRE(E.axisLabels() == al);
+ REQUIRE(E.gridSpacing< double >() == gs);
+ REQUIRE(E.gridGlobalOffset() == ggo);
+ REQUIRE(E.gridUnitSI() == 1.0);
+ REQUIRE(E.unitDimension() == ud);
+ REQUIRE(E.timeOffset< double >() == 0.0);
- RecordComponent& e_positionOffset_y = e_positionOffset["y"];
- REQUIRE(e_positionOffset_y.unitSI() == 4.4299999435019118e-08);
- REQUIRE(e_positionOffset_y.getDatatype() == determineDatatype< int32_t >());
- REQUIRE(e_positionOffset_y.getExtent() == e);
- REQUIRE(e_positionOffset_y.getDimensionality() == 1);
+ REQUIRE(E.size() == 3);
+ REQUIRE(E.count("x") == 1);
+ REQUIRE(E.count("y") == 1);
+ REQUIRE(E.count("z") == 1);
- RecordComponent& e_positionOffset_z = e_positionOffset["z"];
- REQUIRE(e_positionOffset_z.unitSI() == 2.599999993753294e-07);
- REQUIRE(e_positionOffset_z.getDatatype() == determineDatatype< int32_t >());
- REQUIRE(e_positionOffset_z.getExtent() == e);
- REQUIRE(e_positionOffset_z.getDimensionality() == 1);
+ std::vector< double > p{0.5, 0., 0.};
+ Extent e{26, 26, 201};
+ MeshRecordComponent& E_x = E["x"];
+ REQUIRE(E_x.unitSI() == 1.0);
+ REQUIRE(E_x.position< double >() == p);
+ REQUIRE(E_x.getDatatype() == Datatype::DOUBLE);
+ REQUIRE(E_x.getExtent() == e);
+ REQUIRE(E_x.getDimensionality() == 3);
- ud = {{0., 0., 0., 0., 0., 0., 0.}};
- Record& e_weighting = species_e["weighting"];
- REQUIRE(e_weighting.unitDimension() == ud);
- REQUIRE(e_weighting.timeOffset< float >() == static_cast< float >(0.0f));
-
- REQUIRE(e_weighting.size() == 1);
- REQUIRE(e_weighting.count(RecordComponent::SCALAR) == 1);
+ p = {0., 0.5, 0.};
+ MeshRecordComponent& E_y = E["y"];
+ REQUIRE(E_y.unitSI() == 1.0);
+ REQUIRE(E_y.position< double >() == p);
+ REQUIRE(E_y.getDatatype() == Datatype::DOUBLE);
+ REQUIRE(E_y.getExtent() == e);
+ REQUIRE(E_y.getDimensionality() == 3);
- RecordComponent& e_weighting_scalar = e_weighting[RecordComponent::SCALAR];
- REQUIRE(e_weighting_scalar.unitSI() == 1.0);
- REQUIRE(e_weighting_scalar.getDatatype() == Datatype::FLOAT);
- REQUIRE(e_weighting_scalar.getExtent() == e);
- REQUIRE(e_weighting_scalar.getDimensionality() == 1);
+ p = {0., 0., 0.5};
+ MeshRecordComponent& E_z = E["z"];
+ REQUIRE(E_z.unitSI() == 1.0);
+ REQUIRE(E_z.position< double >() == p);
+ REQUIRE(E_z.getDatatype() == Datatype::DOUBLE);
+ REQUIRE(E_z.getExtent() == e);
+ REQUIRE(E_z.getDimensionality() == 3);
- ParticlePatches& e_patches = species_e.particlePatches;
- REQUIRE(e_patches.size() == 4); /* extent, numParticles, numParticlesOffset, offset */
- REQUIRE(e_patches.count("extent") == 1);
- REQUIRE(e_patches.count("numParticles") == 1);
- REQUIRE(e_patches.count("numParticlesOffset") == 1);
- REQUIRE(e_patches.count("offset") == 1);
- REQUIRE(e_patches.numPatches() == 4);
+ gs = {8.0000000000000007e-07,
+ 8.0000000000000007e-07,
+ 1.0000000000000001e-07};
+ ggo = {-1.0000000000000001e-05,
+ -1.0000000000000001e-05,
+ -5.1999999999999993e-06};
+ ud = {{-3., 0., 1., 1., 0., 0., 0.}};
+ Mesh& rho = iteration_100.meshes["rho"];
+ REQUIRE(rho.geometry() == Mesh::Geometry::cartesian);
+ REQUIRE(rho.dataOrder() == Mesh::DataOrder::C);
+ REQUIRE(rho.axisLabels() == al);
+ REQUIRE(rho.gridSpacing< double >() == gs);
+ REQUIRE(rho.gridGlobalOffset() == ggo);
+ REQUIRE(rho.gridUnitSI() == 1.0);
+ REQUIRE(rho.unitDimension() == ud);
+ REQUIRE(rho.timeOffset< double >() == 0.0);
- ud = {{1., 0., 0., 0., 0., 0., 0.}};
- PatchRecord& e_extent = e_patches["extent"];
- REQUIRE(e_extent.unitDimension() == ud);
+ REQUIRE(rho.size() == 1);
+ REQUIRE(rho.count(MeshRecordComponent::SCALAR) == 1);
- REQUIRE(e_extent.size() == 3);
- REQUIRE(e_extent.count("x") == 1);
- REQUIRE(e_extent.count("y") == 1);
- REQUIRE(e_extent.count("z") == 1);
+ p = {0., 0., 0.};
+ e = {26, 26, 201};
+ MeshRecordComponent& rho_scalar = rho[MeshRecordComponent::SCALAR];
+ REQUIRE(rho_scalar.unitSI() == 1.0);
+ REQUIRE(rho_scalar.position< double >() == p);
+ REQUIRE(rho_scalar.getDatatype() == Datatype::DOUBLE);
+ REQUIRE(rho_scalar.getExtent() == e);
+ REQUIRE(rho_scalar.getDimensionality() == 3);
- PatchRecordComponent& e_extent_x = e_extent["x"];
- REQUIRE(e_extent_x.unitSI() == 2.599999993753294e-07);
-#if !defined(_MSC_VER)
- REQUIRE(e_extent_x.getDatatype() == determineDatatype< uint64_t >());
-#endif
- REQUIRE(isSame(e_extent_x.getDatatype(), determineDatatype< uint64_t >()));
+ REQUIRE(iteration_100.particles.size() == 1);
+ REQUIRE(iteration_100.particles.count("electrons") == 1);
- PatchRecordComponent& e_extent_y = e_extent["y"];
- REQUIRE(e_extent_y.unitSI() == 4.429999943501912e-08);
-#if !defined(_MSC_VER)
- REQUIRE(e_extent_y.getDatatype() == determineDatatype< uint64_t >());
-#endif
- REQUIRE(isSame(e_extent_y.getDatatype(), determineDatatype< uint64_t >()));
+ ParticleSpecies& electrons = iteration_100.particles["electrons"];
- PatchRecordComponent& e_extent_z = e_extent["z"];
- REQUIRE(e_extent_z.unitSI() == 2.599999993753294e-07);
-#if !defined(_MSC_VER)
- REQUIRE(e_extent_z.getDatatype() == determineDatatype< uint64_t >());
-#endif
- REQUIRE(isSame(e_extent_z.getDatatype(), determineDatatype< uint64_t >()));
+ REQUIRE(electrons.size() == 6);
+ REQUIRE(electrons.count("charge") == 1);
+ REQUIRE(electrons.count("mass") == 1);
+ REQUIRE(electrons.count("momentum") == 1);
+ REQUIRE(electrons.count("position") == 1);
+ REQUIRE(electrons.count("positionOffset") == 1);
+ REQUIRE(electrons.count("weighting") == 1);
- std::vector< uint64_t > data( e_patches.size() );
- e_extent_z.load(shareRaw(data.data()));
- o.flush();
- REQUIRE(data.at(0) == static_cast< uint64_t >(80));
- REQUIRE(data.at(1) == static_cast< uint64_t >(80));
- REQUIRE(data.at(2) == static_cast< uint64_t >(80));
- REQUIRE(data.at(3) == static_cast< uint64_t >(80));
+ ud = {{0., 0., 1., 1., 0., 0., 0.}};
+ Record& charge = electrons["charge"];
+ REQUIRE(charge.unitDimension() == ud);
+ REQUIRE(charge.timeOffset< double >() == 0.0);
- PatchRecord& e_numParticles = e_patches["numParticles"];
- REQUIRE(e_numParticles.size() == 1);
- REQUIRE(e_numParticles.count(RecordComponent::SCALAR) == 1);
+ REQUIRE(charge.size() == 1);
+ REQUIRE(charge.count(RecordComponent::SCALAR) == 1);
- PatchRecordComponent& e_numParticles_scalar = e_numParticles[RecordComponent::SCALAR];
-#if !defined(_MSC_VER)
- REQUIRE(e_numParticles_scalar.getDatatype() == determineDatatype< uint64_t >());
-#endif
- REQUIRE(isSame(e_numParticles_scalar.getDatatype(), determineDatatype< uint64_t >()));
+ e = {85625};
+ RecordComponent& charge_scalar = charge[RecordComponent::SCALAR];
+ REQUIRE(charge_scalar.unitSI() == 1.0);
+ REQUIRE(charge_scalar.getDatatype() == Datatype::DOUBLE);
+ REQUIRE(charge_scalar.getDimensionality() == 1);
+ REQUIRE(charge_scalar.getExtent() == e);
- e_numParticles_scalar.load(shareRaw(data.data()));
- o.flush();
- REQUIRE(data.at(0) == static_cast< uint64_t >(512000));
- REQUIRE(data.at(1) == static_cast< uint64_t >(819200));
- REQUIRE(data.at(2) == static_cast< uint64_t >(819200));
- REQUIRE(data.at(3) == static_cast< uint64_t >(0));
+ ud = {{1., 0., 0., 0., 0., 0., 0.}};
+ Record& mass = electrons["mass"];
+ REQUIRE(mass.unitDimension() == ud);
+ REQUIRE(mass.timeOffset< double >() == 0.0);
- PatchRecord& e_numParticlesOffset = e_patches["numParticlesOffset"];
- REQUIRE(e_numParticlesOffset.size() == 1);
- REQUIRE(e_numParticlesOffset.count(RecordComponent::SCALAR) == 1);
+ REQUIRE(mass.size() == 1);
+ REQUIRE(mass.count(RecordComponent::SCALAR) == 1);
- PatchRecordComponent& e_numParticlesOffset_scalar = e_numParticlesOffset[RecordComponent::SCALAR];
-#if !defined(_MSC_VER)
- REQUIRE(e_numParticlesOffset_scalar.getDatatype() == determineDatatype< uint64_t >());
-#endif
- REQUIRE(isSame(e_numParticlesOffset_scalar.getDatatype(), determineDatatype< uint64_t >()));
+ RecordComponent& mass_scalar = mass[RecordComponent::SCALAR];
+ REQUIRE(mass_scalar.unitSI() == 1.0);
+ REQUIRE(mass_scalar.getDatatype() == Datatype::DOUBLE);
+ REQUIRE(mass_scalar.getDimensionality() == 1);
+ REQUIRE(mass_scalar.getExtent() == e);
- PatchRecord& e_offset = e_patches["offset"];
- REQUIRE(e_offset.unitDimension() == ud);
+ ud = {{1., 1., -1., 0., 0., 0., 0.}};
+ Record& momentum = electrons["momentum"];
+ REQUIRE(momentum.unitDimension() == ud);
+ REQUIRE(momentum.timeOffset< double >() == 0.0);
- REQUIRE(e_offset.size() == 3);
- REQUIRE(e_offset.count("x") == 1);
- REQUIRE(e_offset.count("y") == 1);
- REQUIRE(e_offset.count("z") == 1);
+ REQUIRE(momentum.size() == 3);
+ REQUIRE(momentum.count("x") == 1);
+ REQUIRE(momentum.count("y") == 1);
+ REQUIRE(momentum.count("z") == 1);
- PatchRecordComponent& e_offset_x = e_offset["x"];
- REQUIRE(e_offset_x.unitSI() == 2.599999993753294e-07);
-#if !defined(_MSC_VER)
- REQUIRE(e_offset_x.getDatatype() == determineDatatype< uint64_t >());
-#endif
- REQUIRE(isSame(e_offset_x.getDatatype(), determineDatatype< uint64_t >()));
+ RecordComponent& momentum_x = momentum["x"];
+ REQUIRE(momentum_x.unitSI() == 1.0);
+ REQUIRE(momentum_x.getDatatype() == Datatype::DOUBLE);
+ REQUIRE(momentum_x.getDimensionality() == 1);
+ REQUIRE(momentum_x.getExtent() == e);
- PatchRecordComponent& e_offset_y = e_offset["y"];
- REQUIRE(e_offset_y.unitSI() == 4.429999943501912e-08);
-#if !defined(_MSC_VER)
- REQUIRE(e_offset_y.getDatatype() == determineDatatype< uint64_t >());
-#endif
- REQUIRE(isSame(e_offset_y.getDatatype(), determineDatatype< uint64_t >()));
+ RecordComponent& momentum_y = momentum["y"];
+ REQUIRE(momentum_y.unitSI() == 1.0);
+ REQUIRE(momentum_y.getDatatype() == Datatype::DOUBLE);
+ REQUIRE(momentum_y.getDimensionality() == 1);
+ REQUIRE(momentum_y.getExtent() == e);
- e_offset_y.load(shareRaw(data.data()));
- o.flush();
- REQUIRE(data.at(0) == static_cast< uint64_t >(0));
- REQUIRE(data.at(1) == static_cast< uint64_t >(128));
- REQUIRE(data.at(2) == static_cast< uint64_t >(256));
- REQUIRE(data.at(3) == static_cast< uint64_t >(384));
+ RecordComponent& momentum_z = momentum["z"];
+ REQUIRE(momentum_z.unitSI() == 1.0);
+ REQUIRE(momentum_z.getDatatype() == Datatype::DOUBLE);
+ REQUIRE(momentum_z.getDimensionality() == 1);
+ REQUIRE(momentum_z.getExtent() == e);
- PatchRecordComponent& e_offset_z = e_offset["z"];
- REQUIRE(e_offset_z.unitSI() == 2.599999993753294e-07);
-#if !defined(_MSC_VER)
- REQUIRE(e_offset_z.getDatatype() == determineDatatype< uint64_t >());
-#endif
- REQUIRE(isSame(e_offset_z.getDatatype(), determineDatatype< uint64_t >()));
- } catch (no_such_file_error& e)
- {
- std::cerr << "HZDR sample not accessible. (" << e.what() << ")\n";
- return;
- }
-}
+ ud = {{1., 0., 0., 0., 0., 0., 0.}};
+ Record& position = electrons["position"];
+ REQUIRE(position.unitDimension() == ud);
+ REQUIRE(position.timeOffset< double >() == 0.0);
-TEST_CASE( "hdf5_dtype_test", "[serial][hdf5]" )
-{
- {
- Series s = Series("../samples/dtype_test.h5", AccessType::CREATE);
+ REQUIRE(position.size() == 3);
+ REQUIRE(position.count("x") == 1);
+ REQUIRE(position.count("y") == 1);
+ REQUIRE(position.count("z") == 1);
- char c = 'c';
- s.setAttribute("char", c);
- unsigned char uc = 'u';
- s.setAttribute("uchar", uc);
- int16_t i16 = 16;
- s.setAttribute("int16", i16);
- int32_t i32 = 32;
- s.setAttribute("int32", i32);
- int64_t i64 = 64;
- s.setAttribute("int64", i64);
- uint16_t u16 = 16u;
- s.setAttribute("uint16", u16);
- uint32_t u32 = 32u;
- s.setAttribute("uint32", u32);
- uint64_t u64 = 64u;
- s.setAttribute("uint64", u64);
- float f = 16.e10f;
- s.setAttribute("float", f);
- double d = 1.e64;
- s.setAttribute("double", d);
- long double ld = 1.e80L;
- s.setAttribute("longdouble", ld);
- std::string str = "string";
- s.setAttribute("string", str);
- s.setAttribute("vecChar", std::vector< char >({'c', 'h', 'a', 'r'}));
- s.setAttribute("vecInt16", std::vector< int16_t >({32766, 32767}));
- s.setAttribute("vecInt32", std::vector< int32_t >({2147483646, 2147483647}));
- s.setAttribute("vecInt64", std::vector< int64_t >({9223372036854775806, 9223372036854775807}));
- s.setAttribute("vecUchar", std::vector< char >({'u', 'c', 'h', 'a', 'r'}));
- s.setAttribute("vecUint16", std::vector< uint16_t >({65534u, 65535u}));
- s.setAttribute("vecUint32", std::vector< uint32_t >({4294967294u, 4294967295u}));
- s.setAttribute("vecUint64", std::vector< uint64_t >({18446744073709551614u, 18446744073709551615u}));
- s.setAttribute("vecFloat", std::vector< float >({0.f, 3.40282e+38f}));
- s.setAttribute("vecDouble", std::vector< double >({0., 1.79769e+308}));
- s.setAttribute("vecLongdouble", std::vector< long double >({0.L, std::numeric_limits::max()}));
- s.setAttribute("vecString", std::vector< std::string >({"vector", "of", "strings"}));
- s.setAttribute("bool", true);
- s.setAttribute("boolF", false);
+ RecordComponent& position_x = position["x"];
+ REQUIRE(position_x.unitSI() == 1.0);
+ REQUIRE(position_x.getDatatype() == Datatype::DOUBLE);
+ REQUIRE(position_x.getDimensionality() == 1);
+ REQUIRE(position_x.getExtent() == e);
- // non-fixed size integer types
- short ss = 16;
- s.setAttribute("short", ss);
- int si = 32;
- s.setAttribute("int", si);
- long sl = 64;
- s.setAttribute("long", sl);
- long long sll = 128;
- s.setAttribute("longlong", sll);
- unsigned short us = 16u;
- s.setAttribute("ushort", us);
- unsigned int ui = 32u;
- s.setAttribute("uint", ui);
- unsigned long ul = 64u;
- s.setAttribute("ulong", ul);
- unsigned long long ull = 128u;
- s.setAttribute("ulonglong", ull);
- s.setAttribute("vecShort", std::vector< short >({32766, 32767}));
- s.setAttribute("vecInt", std::vector< int >({32766, 32767}));
- s.setAttribute("vecLong", std::vector< long >({2147483646, 2147483647}));
- s.setAttribute("vecLongLong", std::vector< long long >({2147483644, 2147483643}));
- s.setAttribute("vecUShort", std::vector< unsigned short >({65534u, 65535u}));
- s.setAttribute("vecUInt", std::vector< unsigned int >({65533u, 65531u}));
- s.setAttribute("vecULong", std::vector< unsigned long >({65532u, 65530u}));
- s.setAttribute("vecULongLong", std::vector< unsigned long long >({65531u, 65529u}));
- }
+ RecordComponent& position_y = position["y"];
+ REQUIRE(position_y.unitSI() == 1.0);
+ REQUIRE(position_y.getDatatype() == Datatype::DOUBLE);
+ REQUIRE(position_y.getDimensionality() == 1);
+ REQUIRE(position_y.getExtent() == e);
- Series s = Series("../samples/dtype_test.h5", AccessType::READ_ONLY);
+ RecordComponent& position_z = position["z"];
+ REQUIRE(position_z.unitSI() == 1.0);
+ REQUIRE(position_z.getDatatype() == Datatype::DOUBLE);
+ REQUIRE(position_z.getDimensionality() == 1);
+ REQUIRE(position_z.getExtent() == e);
- REQUIRE(s.getAttribute("char").get< char >() == 'c');
- REQUIRE(s.getAttribute("uchar").get< unsigned char >() == 'u');
- REQUIRE(s.getAttribute("int16").get< int16_t >() == 16);
- REQUIRE(s.getAttribute("int32").get< int32_t >() == 32);
- REQUIRE(s.getAttribute("int64").get< int64_t >() == 64);
- REQUIRE(s.getAttribute("uint16").get< uint16_t >() == 16u);
- REQUIRE(s.getAttribute("uint32").get< uint32_t >() == 32u);
- REQUIRE(s.getAttribute("uint64").get< uint64_t >() == 64u);
- REQUIRE(s.getAttribute("float").get< float >() == 16.e10f);
- REQUIRE(s.getAttribute("double").get< double >() == 1.e64);
- REQUIRE(s.getAttribute("longdouble").get< long double >() == 1.e80L);
- REQUIRE(s.getAttribute("string").get< std::string >() == "string");
- REQUIRE(s.getAttribute("vecChar").get< std::vector< char > >() == std::vector< char >({'c', 'h', 'a', 'r'}));
- REQUIRE(s.getAttribute("vecInt16").get< std::vector< int16_t > >() == std::vector< int16_t >({32766, 32767}));
- REQUIRE(s.getAttribute("vecInt32").get< std::vector< int32_t > >() == std::vector< int32_t >({2147483646, 2147483647}));
- REQUIRE(s.getAttribute("vecInt64").get< std::vector< int64_t > >() == std::vector< int64_t >({9223372036854775806, 9223372036854775807}));
- REQUIRE(s.getAttribute("vecUchar").get< std::vector< char > >() == std::vector< char >({'u', 'c', 'h', 'a', 'r'}));
- REQUIRE(s.getAttribute("vecUint16").get< std::vector< uint16_t > >() == std::vector< uint16_t >({65534u, 65535u}));
- REQUIRE(s.getAttribute("vecUint32").get< std::vector< uint32_t > >() == std::vector< uint32_t >({4294967294u, 4294967295u}));
- REQUIRE(s.getAttribute("vecUint64").get< std::vector< uint64_t > >() == std::vector< uint64_t >({18446744073709551614u, 18446744073709551615u}));
- REQUIRE(s.getAttribute("vecFloat").get< std::vector< float > >() == std::vector< float >({0.f, 3.40282e+38f}));
- REQUIRE(s.getAttribute("vecDouble").get< std::vector< double > >() == std::vector< double >({0., 1.79769e+308}));
- REQUIRE(s.getAttribute("vecLongdouble").get< std::vector< long double > >() == std::vector< long double >({0.L, std::numeric_limits::max()}));
- REQUIRE(s.getAttribute("vecString").get< std::vector< std::string > >() == std::vector< std::string >({"vector", "of", "strings"}));
- REQUIRE(s.getAttribute("bool").get< bool >() == true);
- REQUIRE(s.getAttribute("boolF").get< bool >() == false);
+ Record& positionOffset = electrons["positionOffset"];
+ REQUIRE(positionOffset.unitDimension() == ud);
+ REQUIRE(positionOffset.timeOffset< double >() == 0.0);
- // same implementation types (not necessary aliases) detection
-#if !defined(_MSC_VER)
- REQUIRE(s.getAttribute("short").dtype == Datatype::SHORT);
- REQUIRE(s.getAttribute("int").dtype == Datatype::INT);
- REQUIRE(s.getAttribute("long").dtype == Datatype::LONG);
- REQUIRE(s.getAttribute("longlong").dtype == Datatype::LONGLONG);
- REQUIRE(s.getAttribute("ushort").dtype == Datatype::USHORT);
- REQUIRE(s.getAttribute("uint").dtype == Datatype::UINT);
- REQUIRE(s.getAttribute("ulong").dtype == Datatype::ULONG);
- REQUIRE(s.getAttribute("ulonglong").dtype == Datatype::ULONGLONG);
+ REQUIRE(positionOffset.size() == 3);
+ REQUIRE(positionOffset.count("x") == 1);
+ REQUIRE(positionOffset.count("y") == 1);
+ REQUIRE(positionOffset.count("z") == 1);
- REQUIRE(s.getAttribute("vecShort").dtype == Datatype::VEC_SHORT);
- REQUIRE(s.getAttribute("vecInt").dtype == Datatype::VEC_INT);
- REQUIRE(s.getAttribute("vecLong").dtype == Datatype::VEC_LONG);
- REQUIRE(s.getAttribute("vecLongLong").dtype == Datatype::VEC_LONGLONG);
- REQUIRE(s.getAttribute("vecUShort").dtype == Datatype::VEC_USHORT);
- REQUIRE(s.getAttribute("vecUInt").dtype == Datatype::VEC_UINT);
- REQUIRE(s.getAttribute("vecULong").dtype == Datatype::VEC_ULONG);
- REQUIRE(s.getAttribute("vecULongLong").dtype == Datatype::VEC_ULONGLONG);
-#endif
- REQUIRE(isSame(s.getAttribute("short").dtype, Datatype::SHORT));
- REQUIRE(isSame(s.getAttribute("int").dtype, Datatype::INT));
- REQUIRE(isSame(s.getAttribute("long").dtype, Datatype::LONG));
- REQUIRE(isSame(s.getAttribute("longlong").dtype, Datatype::LONGLONG));
- REQUIRE(isSame(s.getAttribute("ushort").dtype, Datatype::USHORT));
- REQUIRE(isSame(s.getAttribute("uint").dtype, Datatype::UINT));
- REQUIRE(isSame(s.getAttribute("ulong").dtype, Datatype::ULONG));
- REQUIRE(isSame(s.getAttribute("ulonglong").dtype, Datatype::ULONGLONG));
+ RecordComponent& positionOffset_x = positionOffset["x"];
+ REQUIRE(positionOffset_x.unitSI() == 1.0);
+ REQUIRE(positionOffset_x.getDatatype() == Datatype::DOUBLE);
+ REQUIRE(positionOffset_x.getDimensionality() == 1);
+ REQUIRE(positionOffset_x.getExtent() == e);
- REQUIRE(isSame(s.getAttribute("vecShort").dtype, Datatype::VEC_SHORT));
- REQUIRE(isSame(s.getAttribute("vecInt").dtype, Datatype::VEC_INT));
- REQUIRE(isSame(s.getAttribute("vecLong").dtype, Datatype::VEC_LONG));
- REQUIRE(isSame(s.getAttribute("vecLongLong").dtype, Datatype::VEC_LONGLONG));
- REQUIRE(isSame(s.getAttribute("vecUShort").dtype, Datatype::VEC_USHORT));
- REQUIRE(isSame(s.getAttribute("vecUInt").dtype, Datatype::VEC_UINT));
- REQUIRE(isSame(s.getAttribute("vecULong").dtype, Datatype::VEC_ULONG));
- REQUIRE(isSame(s.getAttribute("vecULongLong").dtype, Datatype::VEC_ULONGLONG));
-}
+ RecordComponent& positionOffset_y = positionOffset["y"];
+ REQUIRE(positionOffset_y.unitSI() == 1.0);
+ REQUIRE(positionOffset_y.getDatatype() == Datatype::DOUBLE);
+ REQUIRE(positionOffset_y.getDimensionality() == 1);
+ REQUIRE(positionOffset_y.getExtent() == e);
-TEST_CASE( "hdf5_write_test", "[serial][hdf5]" )
-{
- Series o = Series("../samples/serial_write.h5", AccessType::CREATE);
+ RecordComponent& positionOffset_z = positionOffset["z"];
+ REQUIRE(positionOffset_z.unitSI() == 1.0);
+ REQUIRE(positionOffset_z.getDatatype() == Datatype::DOUBLE);
+ REQUIRE(positionOffset_z.getDimensionality() == 1);
+ REQUIRE(positionOffset_z.getExtent() == e);
- o.setAuthor("Serial HDF5");
- ParticleSpecies& e = o.iterations[1].particles["e"];
+ ud = {{0., 0., 0., 0., 0., 0., 0.}};
+ Record& weighting = electrons["weighting"];
+ REQUIRE(weighting.unitDimension() == ud);
+ REQUIRE(weighting.timeOffset< double >() == 0.0);
- std::vector< double > position_global(4);
- double pos{0.};
- std::generate(position_global.begin(), position_global.end(), [&pos]{ return pos++; });
- std::vector< double > position_local = {0.};
- e["position"]["x"].resetDataset(Dataset(determineDatatype(), {4}));
- std::vector< uint64_t > positionOffset_global(4);
- uint64_t posOff{0};
- std::generate(positionOffset_global.begin(), positionOffset_global.end(), [&posOff]{ return posOff++; });
- std::array< uint64_t, 1 > positionOffset_local = {{ 0u }};
- auto dataset = Dataset(determineDatatype(), {4});
- REQUIRE_THROWS_AS(dataset.setCompression("zlib", 10), std::runtime_error);
- dataset.setCompression("zlib", 9);
- e["positionOffset"]["x"].resetDataset(dataset);
+ REQUIRE(weighting.size() == 1);
+ REQUIRE(weighting.count(RecordComponent::SCALAR) == 1);
- for( uint64_t i = 0; i < 4; ++i )
+ RecordComponent& weighting_scalar = weighting[RecordComponent::SCALAR];
+ REQUIRE(weighting_scalar.unitSI() == 1.0);
+ REQUIRE(weighting_scalar.getDatatype() == Datatype::DOUBLE);
+ REQUIRE(weighting_scalar.getDimensionality() == 1);
+ REQUIRE(weighting_scalar.getExtent() == e);
+ } catch (no_such_file_error& e)
{
- position_local.at(0) = position_global[i];
- e["position"]["x"].storeChunk(shareRaw(position_local), {i}, {1});
- positionOffset_local[0] = positionOffset_global[i];
- e["positionOffset"]["x"].storeChunk(shareRaw(positionOffset_local), {i}, {1});
- o.flush();
+ std::cerr << "git sample not accessible. (" << e.what() << ")\n";
+ return;
}
-
- //TODO close file, read back, verify
}
-TEST_CASE( "hdf5_fileBased_write_empty_test", "[serial][hdf5]" )
+TEST_CASE( "git_hdf5_sample_content_test", "[serial][hdf5]" )
{
- if( auxiliary::directory_exists("../samples/subdir") )
- auxiliary::remove_directory("../samples/subdir");
-
- Dataset dset = Dataset(Datatype::DOUBLE, {2});
+ try
{
- Series o = Series("../samples/subdir/serial_fileBased_write%T.h5", AccessType::CREATE);
+ Series o = Series("../samples/git-sample/data%T.h5", AccessType::READ_ONLY);
- ParticleSpecies& e_1 = o.iterations[1].particles["e"];
- e_1["position"][RecordComponent::SCALAR].resetDataset(dset);
- e_1["positionOffset"][RecordComponent::SCALAR].resetDataset(dset);
- o.iterations[1].setTime(1.f);
- ParticleSpecies& e_2 = o.iterations[2].particles["e"];
- e_2["position"][RecordComponent::SCALAR].resetDataset(dset);
- e_2["positionOffset"][RecordComponent::SCALAR].resetDataset(dset);
- o.iterations[2].setTime(2.f);
- ParticleSpecies& e_3 = o.iterations[3].particles["e"];
- e_3["position"][RecordComponent::SCALAR].resetDataset(dset);
- e_3["positionOffset"][RecordComponent::SCALAR].resetDataset(dset);
- o.iterations[3].setTime(3.f);
- }
+ {
+ double actual[3][3][3] = {{{-1.9080703683727052e-09, -1.5632650729457964e-10, 1.1497536256399599e-09},
+ {-1.9979540244463578e-09, -2.5512036927466397e-10, 1.0402234629225404e-09},
+ {-1.7353589676361025e-09, -8.0899198451334087e-10, -1.6443779671249104e-10}},
- REQUIRE(auxiliary::directory_exists("../samples/subdir"));
- REQUIRE(auxiliary::file_exists("../samples/subdir/serial_fileBased_write1.h5"));
- REQUIRE(auxiliary::file_exists("../samples/subdir/serial_fileBased_write2.h5"));
- REQUIRE(auxiliary::file_exists("../samples/subdir/serial_fileBased_write3.h5"));
+ {{-2.0029988778702545e-09, -1.9543477947081556e-10, 1.0916454407094989e-09},
+ {-2.3890367462087170e-09, -4.7158010829662089e-10, 9.0026075483251589e-10},
+ {-1.9033881137886510e-09, -7.5192119197708962e-10, 5.0038861942880430e-10}},
- {
- Series o = Series("../samples/subdir/serial_fileBased_write%T.h5", AccessType::READ_ONLY);
+ {{-1.3271805876513554e-09, -5.9243276950837753e-10, -2.2445734160214670e-10},
+ {-7.4578609954301101e-10, -1.1995737736469891e-10, 2.5611823772919706e-10},
+ {-9.4806251738077663e-10, -1.5472800818372434e-10, -3.6461900165818406e-10}}};
+ MeshRecordComponent& rho = o.iterations[100].meshes["rho"][MeshRecordComponent::SCALAR];
+ Offset offset{20, 20, 190};
+ Extent extent{3, 3, 3};
+ auto data = rho.loadChunk(offset, extent);
+ o.flush();
+ double* raw_ptr = data.get();
- REQUIRE(o.iterations.size() == 3);
- REQUIRE(o.iterations.count(1) == 1);
- REQUIRE(o.iterations.count(2) == 1);
- REQUIRE(o.iterations.count(3) == 1);
+ for( int i = 0; i < 3; ++i )
+ for( int j = 0; j < 3; ++j )
+ for( int k = 0; k < 3; ++k )
+ REQUIRE(raw_ptr[((i*3) + j)*3 + k] == actual[i][j][k]);
+ }
- REQUIRE(o.iterations[1].time< float >() == 1.f);
- REQUIRE(o.iterations[2].time< float >() == 2.f);
- REQUIRE(o.iterations[3].time< float >() == 3.f);
+ {
+ double constant_value = 9.1093829099999999e-31;
+ RecordComponent& electrons_mass = o.iterations[100].particles["electrons"]["mass"][RecordComponent::SCALAR];
+ Offset offset{15};
+ Extent extent{3};
+ auto data = electrons_mass.loadChunk(offset, extent);
+ o.flush();
+ double* raw_ptr = data.get();
- REQUIRE(o.iterations[1].particles.size() == 1);
- REQUIRE(o.iterations[1].particles.count("e") == 1);
- REQUIRE(o.iterations[2].particles.size() == 1);
- REQUIRE(o.iterations[2].particles.count("e") == 1);
- REQUIRE(o.iterations[3].particles.size() == 1);
- REQUIRE(o.iterations[3].particles.count("e") == 1);
+ for( int i = 0; i < 3; ++i )
+ REQUIRE(raw_ptr[i] == constant_value);
+ }
+ } catch (no_such_file_error& e)
+ {
+ std::cerr << "git sample not accessible. (" << e.what() << ")\n";
+ return;
+ }
+}
- REQUIRE(o.iterations[1].particles["e"].size() == 2);
- REQUIRE(o.iterations[1].particles["e"].count("position") == 1);
- REQUIRE(o.iterations[1].particles["e"].count("positionOffset") == 1);
- REQUIRE(o.iterations[2].particles["e"].size() == 2);
- REQUIRE(o.iterations[2].particles["e"].count("position") == 1);
- REQUIRE(o.iterations[2].particles["e"].count("positionOffset") == 1);
- REQUIRE(o.iterations[3].particles["e"].size() == 2);
- REQUIRE(o.iterations[3].particles["e"].count("position") == 1);
- REQUIRE(o.iterations[3].particles["e"].count("positionOffset") == 1);
+TEST_CASE( "git_hdf5_sample_fileBased_read_test", "[serial][hdf5]" )
+{
+ try
+ {
+ Series o = Series("../samples/git-sample/data%T.h5", AccessType::READ_ONLY);
- REQUIRE(o.iterations[1].particles["e"]["position"].size() == 1);
- REQUIRE(o.iterations[1].particles["e"]["position"].count(RecordComponent::SCALAR) == 1);
- REQUIRE(o.iterations[1].particles["e"]["positionOffset"].size() == 1);
- REQUIRE(o.iterations[1].particles["e"]["positionOffset"].count(RecordComponent::SCALAR) == 1);
- REQUIRE(o.iterations[1].particles["e"]["position"][RecordComponent::SCALAR].getDatatype() == Datatype::DOUBLE);
- REQUIRE(o.iterations[1].particles["e"]["position"][RecordComponent::SCALAR].getDimensionality() == 1);
- REQUIRE(o.iterations[1].particles["e"]["position"][RecordComponent::SCALAR].getExtent() == Extent{2});
- REQUIRE(o.iterations[2].particles["e"]["position"].size() == 1);
- REQUIRE(o.iterations[2].particles["e"]["position"].count(RecordComponent::SCALAR) == 1);
- REQUIRE(o.iterations[2].particles["e"]["positionOffset"].size() == 1);
- REQUIRE(o.iterations[2].particles["e"]["positionOffset"].count(RecordComponent::SCALAR) == 1);
- REQUIRE(o.iterations[2].particles["e"]["position"][RecordComponent::SCALAR].getDatatype() == Datatype::DOUBLE);
- REQUIRE(o.iterations[2].particles["e"]["position"][RecordComponent::SCALAR].getDimensionality() == 1);
- REQUIRE(o.iterations[2].particles["e"]["position"][RecordComponent::SCALAR].getExtent() == Extent{2});
- REQUIRE(o.iterations[3].particles["e"]["position"].size() == 1);
- REQUIRE(o.iterations[3].particles["e"]["position"].count(RecordComponent::SCALAR) == 1);
- REQUIRE(o.iterations[3].particles["e"]["positionOffset"].size() == 1);
- REQUIRE(o.iterations[3].particles["e"]["positionOffset"].count(RecordComponent::SCALAR) == 1);
- REQUIRE(o.iterations[3].particles["e"]["position"][RecordComponent::SCALAR].getDatatype() == Datatype::DOUBLE);
- REQUIRE(o.iterations[3].particles["e"]["position"][RecordComponent::SCALAR].getDimensionality() == 1);
- REQUIRE(o.iterations[3].particles["e"]["position"][RecordComponent::SCALAR].getExtent() == Extent{2});
- }
+ REQUIRE(o.iterations.size() == 5);
+ REQUIRE(o.iterations.count(100) == 1);
+ REQUIRE(o.iterations.count(200) == 1);
+ REQUIRE(o.iterations.count(300) == 1);
+ REQUIRE(o.iterations.count(400) == 1);
+ REQUIRE(o.iterations.count(500) == 1);
+#if openPMD_USE_INVASIVE_TESTS
+ REQUIRE(*o.m_filenamePadding == 8);
+#endif
+ } catch (no_such_file_error& e)
{
- Series o = Series("../samples/subdir/serial_fileBased_write%T.h5", AccessType::READ_WRITE);
- ParticleSpecies& e_4 = o.iterations[4].particles["e"];
- e_4["position"][RecordComponent::SCALAR].resetDataset(dset);
- e_4["positionOffset"][RecordComponent::SCALAR].resetDataset(dset);
- o.iterations[4].setTime(4.f);
+ std::cerr << "git sample not accessible. (" << e.what() << ")\n";
+ return;
}
+ try
{
- Series o = Series("../samples/subdir/serial_fileBased_write%T.h5", AccessType::READ_ONLY);
-
- REQUIRE(o.iterations.size() == 4);
- REQUIRE(o.iterations.count(4) == 1);
-
- REQUIRE(o.iterations[4].time< float >() == 4.f);
-
- REQUIRE(o.iterations[4].particles.size() == 1);
- REQUIRE(o.iterations[4].particles.count("e") == 1);
+ Series o = Series("../samples/git-sample/data%08T.h5", AccessType::READ_ONLY);
- REQUIRE(o.iterations[4].particles["e"].size() == 2);
- REQUIRE(o.iterations[4].particles["e"].count("position") == 1);
- REQUIRE(o.iterations[4].particles["e"].count("positionOffset") == 1);
+ REQUIRE(o.iterations.size() == 5);
+ REQUIRE(o.iterations.count(100) == 1);
+ REQUIRE(o.iterations.count(200) == 1);
+ REQUIRE(o.iterations.count(300) == 1);
+ REQUIRE(o.iterations.count(400) == 1);
+ REQUIRE(o.iterations.count(500) == 1);
- REQUIRE(o.iterations[4].particles["e"]["position"].size() == 1);
- REQUIRE(o.iterations[4].particles["e"]["position"].count(RecordComponent::SCALAR) == 1);
- REQUIRE(o.iterations[4].particles["e"]["positionOffset"].size() == 1);
- REQUIRE(o.iterations[4].particles["e"]["positionOffset"].count(RecordComponent::SCALAR) == 1);
- REQUIRE(o.iterations[4].particles["e"]["position"][RecordComponent::SCALAR].getDatatype() == Datatype::DOUBLE);
- REQUIRE(o.iterations[4].particles["e"]["position"][RecordComponent::SCALAR].getDimensionality() == 1);
- REQUIRE(o.iterations[4].particles["e"]["position"][RecordComponent::SCALAR].getExtent() == Extent{2});
+#if openPMD_USE_INVASIVE_TESTS
+ REQUIRE(*o.m_filenamePadding == 8);
+#endif
+ } catch (no_such_file_error& e)
+ {
+ std::cerr << "git sample not accessible. (" << e.what() << ")\n";
+ return;
}
-}
-TEST_CASE( "hdf5_fileBased_write_test", "[serial][hdf5]" )
-{
- if( auxiliary::directory_exists("../samples/subdir") )
- auxiliary::remove_directory("../samples/subdir");
+ REQUIRE_THROWS_WITH(Series("../samples/git-sample/data%07T.h5", AccessType::READ_ONLY),
+ Catch::Equals("No matching iterations found: data%07T"));
+ try
{
- Series o = Series("../samples/subdir/serial_fileBased_write%08T.h5", AccessType::CREATE);
-
- ParticleSpecies& e_1 = o.iterations[1].particles["e"];
+ std::vector< std::string > newFiles{"../samples/git-sample/data00000001.h5",
+ "../samples/git-sample/data00000010.h5",
+ "../samples/git-sample/data00001000.h5",
+ "../samples/git-sample/data00010000.h5",
+ "../samples/git-sample/data00100000.h5"};
- std::vector< double > position_global(4);
- double pos{0.};
- std::generate(position_global.begin(), position_global.end(), [&pos]{ return pos++; });
- std::shared_ptr< double > position_local_1(new double);
- e_1["position"]["x"].resetDataset(Dataset(determineDatatype(position_local_1), {4}));
- std::vector< uint64_t > positionOffset_global(4);
- uint64_t posOff{0};
- std::generate(positionOffset_global.begin(), positionOffset_global.end(), [&posOff]{ return posOff++; });
- std::shared_ptr< uint64_t > positionOffset_local_1(new uint64_t);
- e_1["positionOffset"]["x"].resetDataset(Dataset(determineDatatype(positionOffset_local_1), {4}));
+ for( auto const& file : newFiles )
+ if( auxiliary::file_exists(file) )
+ auxiliary::remove_file(file);
- for( uint64_t i = 0; i < 4; ++i )
{
- *position_local_1 = position_global[i];
- e_1["position"]["x"].storeChunk(position_local_1, {i}, {1});
- *positionOffset_local_1 = positionOffset_global[i];
- e_1["positionOffset"]["x"].storeChunk(positionOffset_local_1, {i}, {1});
- o.flush();
- }
-
- o.iterations[1].setTime(static_cast< double >(1));
-
- ParticleSpecies& e_2 = o.iterations[2].particles["e"];
+ Series o = Series("../samples/git-sample/data%T.h5", AccessType::READ_WRITE);
- std::generate(position_global.begin(), position_global.end(), [&pos]{ return pos++; });
- e_2["position"]["x"].resetDataset(Dataset(determineDatatype(), {4}));
- std::generate(positionOffset_global.begin(), positionOffset_global.end(), [&posOff]{ return posOff++; });
- std::shared_ptr< uint64_t > positionOffset_local_2(new uint64_t);
- e_2["positionOffset"]["x"].resetDataset(Dataset(determineDatatype(positionOffset_local_2), {4}));
+#if openPMD_USE_INVASIVE_TESTS
+ REQUIRE(*o.m_filenamePadding == 8);
+#endif
- for( uint64_t i = 0; i < 4; ++i )
- {
- double const position_local_2 = position_global.at(i);
- e_2["position"]["x"].storeChunk(shareRaw(&position_local_2), {i}, {1});
- *positionOffset_local_2 = positionOffset_global[i];
- e_2["positionOffset"]["x"].storeChunk(positionOffset_local_2, {i}, {1});
+ o.iterations[1];
+ o.iterations[10];
+ o.iterations[1000];
+ o.iterations[10000];
+ o.iterations[100000];
o.flush();
}
- o.iterations[2].setTime(static_cast< double >(2));
-
- ParticleSpecies& e_3 = o.iterations[3].particles["e"];
-
- std::generate(position_global.begin(), position_global.end(), [&pos]{ return pos++; });
- std::shared_ptr< double > position_local_3(new double);
- e_3["position"]["x"].resetDataset(Dataset(determineDatatype(position_local_3), {4}));
- std::generate(positionOffset_global.begin(), positionOffset_global.end(), [&posOff]{ return posOff++; });
- std::shared_ptr< uint64_t > positionOffset_local_3(new uint64_t);
- e_3["positionOffset"]["x"].resetDataset(Dataset(determineDatatype(positionOffset_local_3), {4}));
-
- for( uint64_t i = 0; i < 4; ++i )
+ for( auto const& file : newFiles )
{
- *position_local_3 = position_global[i];
- e_3["position"]["x"].storeChunk(position_local_3, {i}, {1});
- *positionOffset_local_3 = positionOffset_global[i];
- e_3["positionOffset"]["x"].storeChunk(positionOffset_local_3, {i}, {1});
- o.flush();
+ REQUIRE(auxiliary::file_exists(file));
+ auxiliary::remove_file(file);
}
-
- o.setOpenPMDextension(1);
- o.iterations[3].setTime(static_cast< double >(3));
+ } catch (no_such_file_error& e)
+ {
+ std::cerr << "git sample not accessible. (" << e.what() << ")\n";
+ return;
}
- REQUIRE(auxiliary::file_exists("../samples/subdir/serial_fileBased_write00000001.h5"));
- REQUIRE(auxiliary::file_exists("../samples/subdir/serial_fileBased_write00000002.h5"));
- REQUIRE(auxiliary::file_exists("../samples/subdir/serial_fileBased_write00000003.h5"));
+}
+TEST_CASE( "hzdr_hdf5_sample_content_test", "[serial][hdf5]" )
+{
+ // since this file might not be publicly available, gracefully handle errors
+ try
{
- Series o = Series("../samples/subdir/serial_fileBased_write%T.h5", AccessType::READ_ONLY);
-
- REQUIRE(o.iterations.size() == 3);
- REQUIRE(o.iterations.count(1) == 1);
- REQUIRE(o.iterations.count(2) == 1);
- REQUIRE(o.iterations.count(3) == 1);
-
-#if openPMD_USE_INVASIVE_TESTS
- REQUIRE(*o.m_filenamePadding == 8);
-#endif
+ /* HZDR: /bigdata/hplsim/development/huebl/lwfa-openPMD-062-smallLWFA-h5
+ * DOI:10.14278/rodare.57 */
+ Series o = Series("../samples/hzdr-sample/h5/simData_%T.h5", AccessType::READ_ONLY);
+ REQUIRE(o.openPMD() == "1.0.0");
+ REQUIRE(o.openPMDextension() == 1);
REQUIRE(o.basePath() == "/data/%T/");
- REQUIRE(o.iterationEncoding() == IterationEncoding::fileBased);
- REQUIRE(o.iterationFormat() == "serial_fileBased_write%08T");
- REQUIRE(o.openPMD() == "1.1.0");
- REQUIRE(o.openPMDextension() == 1u);
+ REQUIRE(o.meshesPath() == "fields/");
REQUIRE(o.particlesPath() == "particles/");
- REQUIRE_FALSE(o.containsAttribute("meshesPath"));
- REQUIRE_THROWS_AS(o.meshesPath(), no_such_attribute_error);
- std::array< double, 7 > udim{{1, 0, 0, 0, 0, 0, 0}};
- Extent ext{4};
- for( auto& entry : o.iterations )
- {
- auto& it = entry.second;
- REQUIRE(it.dt< double >() == 1.);
- REQUIRE(it.time< double >() == static_cast< double >(entry.first));
- REQUIRE(it.timeUnitSI() == 1.);
- auto& pos = it.particles.at("e").at("position");
- REQUIRE(pos.timeOffset< float >() == 0.f);
- REQUIRE(pos.unitDimension() == udim);
- auto& pos_x = pos.at("x");
- REQUIRE(pos_x.unitSI() == 1.);
- REQUIRE(pos_x.getExtent() == ext);
- REQUIRE(pos_x.getDatatype() == Datatype::DOUBLE);
- auto& posOff = it.particles.at("e").at("positionOffset");
- REQUIRE(posOff.timeOffset< float >() == 0.f);
- REQUIRE(posOff.unitDimension() == udim);
- auto& posOff_x = posOff.at("x");
- REQUIRE(posOff_x.unitSI() == 1.);
- REQUIRE(posOff_x.getExtent() == ext);
-#if !defined(_MSC_VER)
- REQUIRE(posOff_x.getDatatype() == determineDatatype< uint64_t >());
-#endif
- REQUIRE(isSame(posOff_x.getDatatype(), determineDatatype< uint64_t >()));
+ REQUIRE(o.author() == "Axel Huebl ");
+ REQUIRE(o.software() == "PIConGPU");
+ REQUIRE(o.softwareVersion() == "0.2.0");
+ REQUIRE(o.date() == "2016-11-04 00:59:14 +0100");
+ REQUIRE(o.iterationEncoding() == IterationEncoding::fileBased);
+ REQUIRE(o.iterationFormat() == "h5/simData_%T.h5");
+ REQUIRE(o.name() == "simData_%T");
- auto position = pos_x.loadChunk< double >({0}, {4});
- auto position_raw = position.get();
- auto positionOffset = posOff_x.loadChunk< uint64_t >({0}, {4});
- auto positionOffset_raw = positionOffset.get();
- o.flush();
- for( uint64_t j = 0; j < 4; ++j )
- {
- REQUIRE(position_raw[j] == static_cast< double >(j + (entry.first-1)*4));
- REQUIRE(positionOffset_raw[j] == j + (entry.first-1)*4);
- }
- }
- }
+ REQUIRE(o.iterations.size() >= 1);
+ REQUIRE(o.iterations.count(0) == 1);
- // extend existing series with new step and auto-detection of iteration padding
- {
- Series o = Series("../samples/subdir/serial_fileBased_write%T.h5", AccessType::READ_WRITE);
+ Iteration& i = o.iterations[0];
+ REQUIRE(i.time< float >() == static_cast< float >(0.0f));
+ REQUIRE(i.dt< float >() == static_cast< float >(1.0f));
+ REQUIRE(i.timeUnitSI() == 1.3899999999999999e-16);
- REQUIRE(o.iterations.size() == 3);
- o.iterations[4];
- REQUIRE(o.iterations.size() == 4);
- }
- REQUIRE(auxiliary::file_exists("../samples/subdir/serial_fileBased_write00000004.h5"));
+ REQUIRE(i.meshes.size() == 4);
+ REQUIRE(i.meshes.count("B") == 1);
+ REQUIRE(i.meshes.count("E") == 1);
+ REQUIRE(i.meshes.count("e_chargeDensity") == 1);
+ REQUIRE(i.meshes.count("e_energyDensity") == 1);
- // additional iteration with different iteration padding but similar content
- {
- Series o = Series("../samples/subdir/serial_fileBased_write%01T.h5", AccessType::READ_WRITE);
+ std::vector< std::string > al{"z", "y", "x"};
+ std::vector< float > gs{static_cast< float >(6.2393283843994141f),
+ static_cast< float >(1.0630855560302734f),
+ static_cast< float >(6.2393283843994141f)};
+ std::vector< double > ggo{0., 0., 0.};
+ std::array< double, 7 > ud{{0., 1., -2., -1., 0., 0., 0.}};
+ Mesh& B = i.meshes["B"];
+ REQUIRE(B.geometry() == Mesh::Geometry::cartesian);
+ REQUIRE(B.dataOrder() == Mesh::DataOrder::C);
+ REQUIRE(B.axisLabels() == al);
+ REQUIRE(B.gridSpacing< float >() == gs);
+ REQUIRE(B.gridGlobalOffset() == ggo);
+ REQUIRE(B.gridUnitSI() == 4.1671151661999998e-08);
+ REQUIRE(B.unitDimension() == ud);
+ REQUIRE(B.timeOffset< float >() == static_cast< float >(0.0f));
- REQUIRE(o.iterations.empty());
+ REQUIRE(B.size() == 3);
+ REQUIRE(B.count("x") == 1);
+ REQUIRE(B.count("y") == 1);
+ REQUIRE(B.count("z") == 1);
- auto& it = o.iterations[10];
- ParticleSpecies& e = it.particles["e"];
- e["position"]["x"].resetDataset(Dataset(Datatype::DOUBLE, {42}));
- e["positionOffset"]["x"].resetDataset(Dataset(Datatype::DOUBLE, {42}));
- e["position"]["x"].makeConstant(1.23);
- e["positionOffset"]["x"].makeConstant(1.23);
+ std::vector< float > p{static_cast< float >(0.0f),
+ static_cast< float >(0.5f),
+ static_cast< float >(0.5f)};
+ Extent e{80, 384, 80};
+ MeshRecordComponent& B_x = B["x"];
+ REQUIRE(B_x.unitSI() == 40903.822240601701);
+ REQUIRE(B_x.position< float >() == p);
+ REQUIRE(B_x.getDatatype() == Datatype::FLOAT);
+ REQUIRE(B_x.getExtent() == e);
+ REQUIRE(B_x.getDimensionality() == 3);
- REQUIRE(o.iterations.size() == 1);
- }
- REQUIRE(auxiliary::file_exists("../samples/subdir/serial_fileBased_write10.h5"));
+ p = {static_cast< float >(0.5f),
+ static_cast< float >(0.0f),
+ static_cast< float >(0.5f)};
+ MeshRecordComponent& B_y = B["y"];
+ REQUIRE(B_y.unitSI() == 40903.822240601701);
+ REQUIRE(B_y.position< float >() == p);
+ REQUIRE(B_y.getDatatype() == Datatype::FLOAT);
+ REQUIRE(B_y.getExtent() == e);
+ REQUIRE(B_y.getDimensionality() == 3);
- // read back with auto-detection and non-fixed padding
- {
- Series s = Series("../samples/subdir/serial_fileBased_write%T.h5", AccessType::READ_ONLY);
- REQUIRE(s.iterations.size() == 5);
- }
-
- // write with auto-detection and in-consistent padding
- {
- REQUIRE_THROWS_WITH(Series("../samples/subdir/serial_fileBased_write%T.h5", AccessType::READ_WRITE),
- Catch::Equals("Cannot write to a series with inconsistent iteration padding. Please specify '%0T' or open as read-only."));
- }
-
- // read back with auto-detection and fixed padding
- {
- Series s = Series("../samples/subdir/serial_fileBased_write%08T.h5", AccessType::READ_ONLY);
- REQUIRE(s.iterations.size() == 4);
- }
-}
-
-TEST_CASE( "hdf5_bool_test", "[serial][hdf5]" )
-{
- {
- Series o = Series("../samples/serial_bool.h5", AccessType::CREATE);
-
- o.setAttribute("Bool attribute (true)", true);
- o.setAttribute("Bool attribute (false)", false);
- }
- {
- Series o = Series("../samples/serial_bool.h5", AccessType::READ_ONLY);
-
- auto attrs = o.attributes();
- REQUIRE(std::count(attrs.begin(), attrs.end(), "Bool attribute (true)") == 1);
- REQUIRE(std::count(attrs.begin(), attrs.end(), "Bool attribute (false)") == 1);
- REQUIRE(o.getAttribute("Bool attribute (true)").get< bool >() == true);
- REQUIRE(o.getAttribute("Bool attribute (false)").get< bool >() == false);
- }
-}
-
-TEST_CASE( "hdf5_patch_test", "[serial][hdf5]" )
-{
- Series o = Series("../samples/serial_patch.h5", AccessType::CREATE);
-
- auto dset = Dataset(Datatype::DOUBLE, {1});
- o.iterations[1].particles["e"].particlePatches["offset"]["x"].resetDataset(dset);
- o.iterations[1].particles["e"].particlePatches["offset"]["x"].setUnitSI(42);
- o.iterations[1].particles["e"]["position"][RecordComponent::SCALAR].resetDataset(dset);
- o.iterations[1].particles["e"]["positionOffset"][RecordComponent::SCALAR].resetDataset(dset);
-}
-
-TEST_CASE( "hdf5_deletion_test", "[serial][hdf5]" )
-{
- Series o = Series("../samples/serial_deletion.h5", AccessType::CREATE);
-
-
- o.setAttribute("removed",
- "this attribute will be removed after being written to disk");
- o.flush();
-
- o.deleteAttribute("removed");
- o.flush();
-
- ParticleSpecies& e = o.iterations[1].particles["e"];
- auto dset = Dataset(Datatype::DOUBLE, {1});
- e["position"][RecordComponent::SCALAR].resetDataset(dset);
- e["positionOffset"][RecordComponent::SCALAR].resetDataset(dset);
- e.erase("deletion");
- o.flush();
-
- e["deletion_scalar"][RecordComponent::SCALAR].resetDataset(dset);
- o.flush();
-
- e["deletion_scalar"].erase(RecordComponent::SCALAR);
- e.erase("deletion_scalar");
- o.flush();
-
- double value = 0.;
- e["deletion_scalar_constant"][RecordComponent::SCALAR].resetDataset(dset);
- e["deletion_scalar_constant"][RecordComponent::SCALAR].makeConstant(value);
- o.flush();
-
- e["deletion_scalar_constant"].erase(RecordComponent::SCALAR);
- e.erase("deletion_scalar_constant");
- o.flush();
-}
-
-TEST_CASE( "hdf5_110_optional_paths", "[serial][hdf5]" )
-{
- try
- {
- {
- Series s = Series("../samples/issue-sample/no_fields/data%T.h5", AccessType::READ_ONLY);
- auto attrs = s.attributes();
- REQUIRE(std::count(attrs.begin(), attrs.end(), "meshesPath") == 1);
- REQUIRE(std::count(attrs.begin(), attrs.end(), "particlesPath") == 1);
- REQUIRE(s.iterations[400].meshes.empty());
- REQUIRE(s.iterations[400].particles.size() == 1);
- }
-
- {
- Series s = Series("../samples/issue-sample/no_particles/data%T.h5", AccessType::READ_ONLY);
- auto attrs = s.attributes();
- REQUIRE(std::count(attrs.begin(), attrs.end(), "meshesPath") == 1);
- REQUIRE(std::count(attrs.begin(), attrs.end(), "particlesPath") == 1);
- REQUIRE(s.iterations[400].meshes.size() == 2);
- REQUIRE(s.iterations[400].particles.empty());
- }
- } catch (no_such_file_error& e)
- {
- std::cerr << "issue sample not accessible. (" << e.what() << ")\n";
- }
-
- {
- Series s = Series("../samples/no_meshes_1.1.0_compliant.h5", AccessType::CREATE);
- auto foo = s.iterations[1].particles["foo"];
- Dataset dset = Dataset(Datatype::DOUBLE, {1});
- foo["position"][RecordComponent::SCALAR].resetDataset(dset);
- foo["positionOffset"][RecordComponent::SCALAR].resetDataset(dset);
- }
-
- {
- Series s = Series("../samples/no_particles_1.1.0_compliant.h5", AccessType::CREATE);
- auto foo = s.iterations[1].meshes["foo"];
- Dataset dset = Dataset(Datatype::DOUBLE, {1});
- foo[RecordComponent::SCALAR].resetDataset(dset);
- }
-
- {
- Series s = Series("../samples/no_meshes_1.1.0_compliant.h5", AccessType::READ_ONLY);
- auto attrs = s.attributes();
- REQUIRE(std::count(attrs.begin(), attrs.end(), "meshesPath") == 0);
- REQUIRE(std::count(attrs.begin(), attrs.end(), "particlesPath") == 1);
- REQUIRE(s.iterations[1].meshes.empty());
- REQUIRE(s.iterations[1].particles.size() == 1);
- }
-
- {
- Series s = Series("../samples/no_particles_1.1.0_compliant.h5", AccessType::READ_ONLY);
- auto attrs = s.attributes();
- REQUIRE(std::count(attrs.begin(), attrs.end(), "meshesPath") == 1);
- REQUIRE(std::count(attrs.begin(), attrs.end(), "particlesPath") == 0);
- REQUIRE(s.iterations[1].meshes.size() == 1);
- REQUIRE(s.iterations[1].particles.empty());
- }
-}
+ p = {static_cast< float >(0.5f),
+ static_cast< float >(0.5f),
+ static_cast< float >(0.0f)};
+ MeshRecordComponent& B_z = B["z"];
+ REQUIRE(B_z.unitSI() == 40903.822240601701);
+ REQUIRE(B_z.position< float >() == p);
+ REQUIRE(B_z.getDatatype() == Datatype::FLOAT);
+ REQUIRE(B_z.getExtent() == e);
+ REQUIRE(B_z.getDimensionality() == 3);
-TEST_CASE( "hdf5_constant_scalar", "[serial][hdf5]" )
-{
- constant_scalar("h5");
-}
+ ud = {{1., 1., -3., -1., 0., 0., 0.}};
+ Mesh& E = i.meshes["E"];
+ REQUIRE(E.geometry() == Mesh::Geometry::cartesian);
+ REQUIRE(E.dataOrder() == Mesh::DataOrder::C);
+ REQUIRE(E.axisLabels() == al);
+ REQUIRE(E.gridSpacing< float >() == gs);
+ REQUIRE(E.gridGlobalOffset() == ggo);
+ REQUIRE(E.gridUnitSI() == 4.1671151661999998e-08);
+ REQUIRE(E.unitDimension() == ud);
+ REQUIRE(E.timeOffset< float >() == static_cast< float >(0.0f));
-TEST_CASE( "hdf5_particle_patches", "[serial][hdf5]" )
-{
- particle_patches("h5");
-}
-#else
-TEST_CASE( "no_serial_hdf5", "[serial][hdf5]" )
-{
- REQUIRE(true);
-}
-#endif
-#if openPMD_HAVE_ADIOS1
-TEST_CASE( "adios1_dtype_test", "[serial][adios1]" )
-{
- {
- Series s = Series("../samples/dtype_test.bp", AccessType::CREATE);
+ REQUIRE(E.size() == 3);
+ REQUIRE(E.count("x") == 1);
+ REQUIRE(E.count("y") == 1);
+ REQUIRE(E.count("z") == 1);
- char c = 'c';
- s.setAttribute("char", c);
- unsigned char uc = 'u';
- s.setAttribute("uchar", uc);
- int16_t i16 = 16;
- s.setAttribute("int16", i16);
- int32_t i32 = 32;
- s.setAttribute("int32", i32);
- int64_t i64 = 64;
- s.setAttribute("int64", i64);
- uint16_t u16 = 16u;
- s.setAttribute("uint16", u16);
- uint32_t u32 = 32u;
- s.setAttribute("uint32", u32);
- uint64_t u64 = 64u;
- s.setAttribute("uint64", u64);
- float f = 16.e10f;
- s.setAttribute("float", f);
- double d = 1.e64;
- s.setAttribute("double", d);
- long double ld = 1.e80L;
- s.setAttribute("longdouble", ld);
- std::string str = "string";
- s.setAttribute("string", str);
- s.setAttribute("vecChar", std::vector< char >({'c', 'h', 'a', 'r'}));
- s.setAttribute("vecInt16", std::vector< int16_t >({32766, 32767}));
- s.setAttribute("vecInt32", std::vector< int32_t >({2147483646, 2147483647}));
- s.setAttribute("vecInt64", std::vector< int64_t >({9223372036854775806, 9223372036854775807}));
- s.setAttribute("vecUchar", std::vector< char >({'u', 'c', 'h', 'a', 'r'}));
- s.setAttribute("vecUint16", std::vector< uint16_t >({65534u, 65535u}));
- s.setAttribute("vecUint32", std::vector< uint32_t >({4294967294u, 4294967295u}));
- s.setAttribute("vecUint64", std::vector< uint64_t >({18446744073709551614u, 18446744073709551615u}));
- s.setAttribute("vecFloat", std::vector< float >({0.f, 3.40282e+38f}));
- s.setAttribute("vecDouble", std::vector< double >({0., 1.79769e+308}));
- s.setAttribute("vecLongdouble", std::vector< long double >({0.L, std::numeric_limits::max()}));
- s.setAttribute("vecString", std::vector< std::string >({"vector", "of", "strings"}));
- s.setAttribute("bool", true);
- s.setAttribute("boolF", false);
+ p = {static_cast< float >(0.5f),
+ static_cast< float >(0.0f),
+ static_cast< float >(0.0f)};
+ e = {80, 384, 80};
+ MeshRecordComponent& E_x = E["x"];
+ REQUIRE(E_x.unitSI() == 12262657411105.049);
+ REQUIRE(E_x.position< float >() == p);
+ REQUIRE(E_x.getDatatype() == Datatype::FLOAT);
+ REQUIRE(E_x.getExtent() == e);
+ REQUIRE(E_x.getDimensionality() == 3);
- // translation from non-fixed size integer types
- short rs = 8;
- s.setAttribute("short", rs);
- int i = 16;
- s.setAttribute("int", i);
- long l = 32;
- s.setAttribute("long", l);
- long long ll = 64;
- s.setAttribute("longlong", ll);
- unsigned short us = 8u;
- s.setAttribute("ushort", us);
- unsigned int ui = 16u;
- s.setAttribute("uint", ui);
- unsigned long ul = 32u;
- s.setAttribute("ulong", ul);
- unsigned long long ull = 64u;
- s.setAttribute("ulonglong", ull);
- s.setAttribute("vecShort", std::vector< short >({32766, 32767}));
- s.setAttribute("vecInt", std::vector< int >({32766, 32767}));
- s.setAttribute("vecLong", std::vector< long >({2147483646, 2147483647}));
- s.setAttribute("vecLongLong", std::vector< long long >({2147483644, 2147483643}));
- s.setAttribute("vecUShort", std::vector< unsigned short >({65534u, 65535u}));
- s.setAttribute("vecUInt", std::vector< unsigned int >({65533u, 65531u}));
- s.setAttribute("vecULong", std::vector< unsigned long >({65532u, 65530u}));
- s.setAttribute("vecULongLong", std::vector< unsigned long long >({65529u, 65528u}));
- }
+ p = {static_cast< float >(0.0f),
+ static_cast< float >(0.5f),
+ static_cast< float >(0.0f)};
+ MeshRecordComponent& E_y = E["y"];
+ REQUIRE(E_y.unitSI() == 12262657411105.049);
+ REQUIRE(E_y.position< float >() == p);
+ REQUIRE(E_y.getDatatype() == Datatype::FLOAT);
+ REQUIRE(E_y.getExtent() == e);
+ REQUIRE(E_y.getDimensionality() == 3);
- Series s = Series("../samples/dtype_test.bp", AccessType::READ_ONLY);
+ p = {static_cast< float >(0.0f),
+ static_cast< float >(0.0f),
+ static_cast< float >(0.5f)};
+ MeshRecordComponent& E_z = E["z"];
+ REQUIRE(E_z.unitSI() == 12262657411105.049);
+ REQUIRE(E_z.position< float >() == p);
+ REQUIRE(E_z.getDatatype() == Datatype::FLOAT);
+ REQUIRE(E_z.getExtent() == e);
+ REQUIRE(E_z.getDimensionality() == 3);
- REQUIRE(s.getAttribute("char").get< char >() == 'c');
- REQUIRE(s.getAttribute("uchar").get< unsigned char >() == 'u');
- REQUIRE(s.getAttribute("int16").get< int16_t >() == 16);
- REQUIRE(s.getAttribute("int32").get< int32_t >() == 32);
- REQUIRE(s.getAttribute("int64").get() == 64);
- REQUIRE(s.getAttribute("uint16").get< uint16_t >() == 16u);
- REQUIRE(s.getAttribute("uint32").get< uint32_t >() == 32u);
- REQUIRE(s.getAttribute("uint64").get< uint64_t >() == 64u);
- REQUIRE(s.getAttribute("float").get< float >() == 16.e10f);
- REQUIRE(s.getAttribute("double").get< double >() == 1.e64);
- REQUIRE(s.getAttribute("longdouble").get< long double >() == 1.e80L);
- REQUIRE(s.getAttribute("string").get< std::string >() == "string");
- REQUIRE(s.getAttribute("vecChar").get< std::vector< char > >() == std::vector< char >({'c', 'h', 'a', 'r'}));
- REQUIRE(s.getAttribute("vecInt16").get< std::vector< int16_t > >() == std::vector< int16_t >({32766, 32767}));
- REQUIRE(s.getAttribute("vecInt32").get< std::vector< int32_t > >() == std::vector< int32_t >({2147483646, 2147483647}));
- REQUIRE(s.getAttribute("vecInt64").get< std::vector< int64_t > >() == std::vector< int64_t >({9223372036854775806, 9223372036854775807}));
- REQUIRE(s.getAttribute("vecUchar").get< std::vector< char > >() == std::vector< char >({'u', 'c', 'h', 'a', 'r'}));
- REQUIRE(s.getAttribute("vecUint16").get< std::vector< uint16_t > >() == std::vector< uint16_t >({65534u, 65535u}));
- REQUIRE(s.getAttribute("vecUint32").get< std::vector< uint32_t > >() == std::vector< uint32_t >({4294967294u, 4294967295u}));
- REQUIRE(s.getAttribute("vecUint64").get< std::vector< uint64_t > >() == std::vector< uint64_t >({18446744073709551614u, 18446744073709551615u}));
- REQUIRE(s.getAttribute("vecFloat").get< std::vector< float > >() == std::vector< float >({0.f, 3.40282e+38f}));
- REQUIRE(s.getAttribute("vecDouble").get< std::vector< double > >() == std::vector< double >({0., 1.79769e+308}));
- REQUIRE(s.getAttribute("vecLongdouble").get< std::vector< long double > >() == std::vector< long double >({0.L, std::numeric_limits::max()}));
- REQUIRE(s.getAttribute("vecString").get< std::vector< std::string > >() == std::vector< std::string >({"vector", "of", "strings"}));
- REQUIRE(s.getAttribute("bool").get< bool >() == true);
- REQUIRE(s.getAttribute("boolF").get< bool >() == false);
+ ud = {{-3., 0., 1., 1., 0., 0., 0.}};
+ Mesh& e_chargeDensity = i.meshes["e_chargeDensity"];
+ REQUIRE(e_chargeDensity.geometry() == Mesh::Geometry::cartesian);
+ REQUIRE(e_chargeDensity.dataOrder() == Mesh::DataOrder::C);
+ REQUIRE(e_chargeDensity.axisLabels() == al);
+ REQUIRE(e_chargeDensity.gridSpacing< float >() == gs);
+ REQUIRE(e_chargeDensity.gridGlobalOffset() == ggo);
+ REQUIRE(e_chargeDensity.gridUnitSI() == 4.1671151661999998e-08);
+ REQUIRE(e_chargeDensity.unitDimension() == ud);
+ REQUIRE(e_chargeDensity.timeOffset< float >() == static_cast< float >(0.0f));
- // translation from non-fixed size integer types
- REQUIRE(s.getAttribute("short").get< short >() == 8);
- REQUIRE(s.getAttribute("int").get< int >() == 16);
- REQUIRE(s.getAttribute("long").get< long >() == 32);
- REQUIRE(s.getAttribute("longlong").get< long long >() == 64);
- REQUIRE(s.getAttribute("ushort").get< unsigned short >() == 8u);
- REQUIRE(s.getAttribute("uint").get< unsigned int >() == 16u);
- REQUIRE(s.getAttribute("ulong").get< unsigned long >() == 32u);
- REQUIRE(s.getAttribute("ulonglong").get< unsigned long long >() == 64u);
- REQUIRE(s.getAttribute("vecShort").get< std::vector< short > >() == std::vector< short >({32766, 32767}));
- REQUIRE(s.getAttribute("vecInt").get< std::vector< int > >() == std::vector< int >({32766, 32767}));
- REQUIRE(s.getAttribute("vecLong").get< std::vector< long > >() == std::vector< long >({2147483646, 2147483647}));
- REQUIRE(s.getAttribute("vecLongLong").get< std::vector< long long > >() == std::vector< long long >({2147483644, 2147483643}));
- REQUIRE(s.getAttribute("vecUShort").get< std::vector< unsigned short > >() == std::vector< unsigned short >({65534u, 65535u}));
- REQUIRE(s.getAttribute("vecUInt").get< std::vector< unsigned int > >() == std::vector< unsigned int >({65533u, 65531u}));
- REQUIRE(s.getAttribute("vecULong").get< std::vector< unsigned long > >() == std::vector< unsigned long >({65532u, 65530u}));
- REQUIRE(s.getAttribute("vecULongLong").get< std::vector< unsigned long long > >() == std::vector< unsigned long long >({65529u, 65528u}));
-}
+ REQUIRE(e_chargeDensity.size() == 1);
+ REQUIRE(e_chargeDensity.count(MeshRecordComponent::SCALAR) == 1);
-TEST_CASE( "adios1_write_test", "[serial][adios1]")
-{
- Series o = Series("../samples/serial_write.bp", AccessType::CREATE);
+ p = {static_cast< float >(0.f),
+ static_cast< float >(0.f),
+ static_cast< float >(0.f)};
+ MeshRecordComponent& e_chargeDensity_scalar = e_chargeDensity[MeshRecordComponent::SCALAR];
+ REQUIRE(e_chargeDensity_scalar.unitSI() == 66306201.002331272);
+ REQUIRE(e_chargeDensity_scalar.position< float >() == p);
+ REQUIRE(e_chargeDensity_scalar.getDatatype() == Datatype::FLOAT);
+ REQUIRE(e_chargeDensity_scalar.getExtent() == e);
+ REQUIRE(e_chargeDensity_scalar.getDimensionality() == 3);
- ParticleSpecies& e_1 = o.iterations[1].particles["e"];
+ ud = {{-1., 1., -2., 0., 0., 0., 0.}};
+ Mesh& e_energyDensity = i.meshes["e_energyDensity"];
+ REQUIRE(e_energyDensity.geometry() == Mesh::Geometry::cartesian);
+ REQUIRE(e_energyDensity.dataOrder() == Mesh::DataOrder::C);
+ REQUIRE(e_energyDensity.axisLabels() == al);
+ REQUIRE(e_energyDensity.gridSpacing< float >() == gs);
+ REQUIRE(e_energyDensity.gridGlobalOffset() == ggo);
+ REQUIRE(e_energyDensity.gridUnitSI() == 4.1671151661999998e-08);
+ REQUIRE(e_energyDensity.unitDimension() == ud);
+ REQUIRE(e_energyDensity.timeOffset< float >() == static_cast< float >(0.0f));
- std::vector< double > position_global(4);
- double pos{0.};
- std::generate(position_global.begin(), position_global.end(), [&pos]{ return pos++; });
- std::shared_ptr< double > position_local_1(new double);
- e_1["position"]["x"].resetDataset(Dataset(determineDatatype(position_local_1), {4}));
+ REQUIRE(e_energyDensity.size() == 1);
+ REQUIRE(e_energyDensity.count(MeshRecordComponent::SCALAR) == 1);
- for( uint64_t i = 0; i < 4; ++i )
- {
- *position_local_1 = position_global[i];
- e_1["position"]["x"].storeChunk(position_local_1, {i}, {1});
- }
+ MeshRecordComponent& e_energyDensity_scalar = e_energyDensity[MeshRecordComponent::SCALAR];
+ REQUIRE(e_energyDensity_scalar.unitSI() == 1.0146696675429705e+18);
+ REQUIRE(e_energyDensity_scalar.position< float >() == p);
+ REQUIRE(e_energyDensity_scalar.getDatatype() == Datatype::FLOAT);
+ REQUIRE(e_energyDensity_scalar.getExtent() == e);
+ REQUIRE(e_energyDensity_scalar.getDimensionality() == 3);
- std::vector< uint64_t > positionOffset_global(4);
- uint64_t posOff{0};
- std::generate(positionOffset_global.begin(), positionOffset_global.end(), [&posOff]{ return posOff++; });
- std::shared_ptr< uint64_t > positionOffset_local_1(new uint64_t);
- e_1["positionOffset"]["x"].resetDataset(Dataset(determineDatatype(positionOffset_local_1), {4}));
+ REQUIRE(i.particles.size() == 1);
+ REQUIRE(i.particles.count("e") == 1);
- for( uint64_t i = 0; i < 4; ++i )
- {
- *positionOffset_local_1 = positionOffset_global[i];
- e_1["positionOffset"]["x"].storeChunk(positionOffset_local_1, {i}, {1});
- }
+ ParticleSpecies& species_e = i.particles["e"];
- ParticleSpecies& e_2 = o.iterations[2].particles["e"];
+ REQUIRE(species_e.size() == 6);
+ REQUIRE(species_e.count("charge") == 1);
+ REQUIRE(species_e.count("mass") == 1);
+ REQUIRE(species_e.count("momentum") == 1);
+ REQUIRE(species_e.count("particlePatches") == 0);
+ REQUIRE(species_e.count("position") == 1);
+ REQUIRE(species_e.count("positionOffset") == 1);
+ REQUIRE(species_e.count("weighting") == 1);
- std::generate(position_global.begin(), position_global.end(), [&pos]{ return pos++; });
- std::shared_ptr< double > position_local_2(new double);
- e_2["position"]["x"].resetDataset(Dataset(determineDatatype(position_local_2), {4}));
+ ud = {{0., 0., 1., 1., 0., 0., 0.}};
+ Record& e_charge = species_e["charge"];
+ REQUIRE(e_charge.unitDimension() == ud);
+ REQUIRE(e_charge.timeOffset< float >() == static_cast< float >(0.0f));
- for( uint64_t i = 0; i < 4; ++i )
- {
- *position_local_2 = position_global[i];
- e_2["position"]["x"].storeChunk(position_local_2, {i}, {1});
- }
+ REQUIRE(e_charge.size() == 1);
+ REQUIRE(e_charge.count(RecordComponent::SCALAR) == 1);
- std::generate(positionOffset_global.begin(), positionOffset_global.end(), [&posOff]{ return posOff++; });
- std::shared_ptr< uint64_t > positionOffset_local_2(new uint64_t);
- e_2["positionOffset"]["x"].resetDataset(Dataset(determineDatatype(positionOffset_local_2), {4}));
+ e = {2150400};
+ RecordComponent& e_charge_scalar = e_charge[RecordComponent::SCALAR];
+ REQUIRE(e_charge_scalar.unitSI() == 4.7980045488500004e-15);
+ REQUIRE(e_charge_scalar.getDatatype() == Datatype::DOUBLE);
+ REQUIRE(e_charge_scalar.getExtent() == e);
+ REQUIRE(e_charge_scalar.getDimensionality() == 1);
- for( uint64_t i = 0; i < 4; ++i )
- {
- *positionOffset_local_2 = positionOffset_global[i];
- e_2["positionOffset"]["x"].storeChunk(positionOffset_local_2, {i}, {1});
- }
+ ud = {{0., 1., 0., 0., 0., 0., 0.}};
+ Record& e_mass = species_e["mass"];
+ REQUIRE(e_mass.unitDimension() == ud);
+ REQUIRE(e_mass.timeOffset< float >() == static_cast< float >(0.0f));
- o.flush();
+ REQUIRE(e_mass.size() == 1);
+ REQUIRE(e_mass.count(RecordComponent::SCALAR) == 1);
- ParticleSpecies& e_3 = o.iterations[3].particles["e"];
+ RecordComponent& e_mass_scalar = e_mass[RecordComponent::SCALAR];
+ REQUIRE(e_mass_scalar.unitSI() == 2.7279684799430467e-26);
+ REQUIRE(e_mass_scalar.getDatatype() == Datatype::DOUBLE);
+ REQUIRE(e_mass_scalar.getExtent() == e);
+ REQUIRE(e_mass_scalar.getDimensionality() == 1);
- std::generate(position_global.begin(), position_global.end(), [&pos]{ return pos++; });
- std::shared_ptr< double > position_local_3(new double);
- e_3["position"]["x"].resetDataset(Dataset(determineDatatype(position_local_3), {4}));
+ ud = {{1., 1., -1., 0., 0., 0., 0.}};
+ Record& e_momentum = species_e["momentum"];
+ REQUIRE(e_momentum.unitDimension() == ud);
+ REQUIRE(e_momentum.timeOffset< float >() == static_cast< float >(0.0f));
- for( uint64_t i = 0; i < 4; ++i )
- {
- *position_local_3 = position_global[i];
- e_3["position"]["x"].storeChunk(position_local_3, {i}, {1});
- }
+ REQUIRE(e_momentum.size() == 3);
+ REQUIRE(e_momentum.count("x") == 1);
+ REQUIRE(e_momentum.count("y") == 1);
+ REQUIRE(e_momentum.count("z") == 1);
- std::generate(positionOffset_global.begin(), positionOffset_global.end(), [&posOff]{ return posOff++; });
- std::shared_ptr< uint64_t > positionOffset_local_3(new uint64_t);
- e_3["positionOffset"]["x"].resetDataset(Dataset(determineDatatype(positionOffset_local_3), {4}));
+ RecordComponent& e_momentum_x = e_momentum["x"];
+ REQUIRE(e_momentum_x.unitSI() == 8.1782437594864961e-18);
+ REQUIRE(e_momentum_x.getDatatype() == Datatype::FLOAT);
+ REQUIRE(e_momentum_x.getExtent() == e);
+ REQUIRE(e_momentum_x.getDimensionality() == 1);
- for( uint64_t i = 0; i < 4; ++i )
- {
- *positionOffset_local_3 = positionOffset_global[i];
- e_3["positionOffset"]["x"].storeChunk(positionOffset_local_3, {i}, {1});
- }
+ RecordComponent& e_momentum_y = e_momentum["y"];
+ REQUIRE(e_momentum_y.unitSI() == 8.1782437594864961e-18);
+ REQUIRE(e_momentum_y.getDatatype() == Datatype::FLOAT);
+ REQUIRE(e_momentum_y.getExtent() == e);
+ REQUIRE(e_momentum_y.getDimensionality() == 1);
- o.flush();
-}
+ RecordComponent& e_momentum_z = e_momentum["z"];
+ REQUIRE(e_momentum_z.unitSI() == 8.1782437594864961e-18);
+ REQUIRE(e_momentum_z.getDatatype() == Datatype::FLOAT);
+ REQUIRE(e_momentum_z.getExtent() == e);
+ REQUIRE(e_momentum_z.getDimensionality() == 1);
-TEST_CASE( "adios1_fileBased_write_empty_test", "[serial][adios1]" )
-{
- if( auxiliary::directory_exists("../samples/subdir") )
- auxiliary::remove_directory("../samples/subdir");
+ ud = {{1., 0., 0., 0., 0., 0., 0.}};
+ Record& e_position = species_e["position"];
+ REQUIRE(e_position.unitDimension() == ud);
+ REQUIRE(e_position.timeOffset< float >() == static_cast< float >(0.0f));
- Dataset dset = Dataset(Datatype::DOUBLE, {2});
- {
- Series o = Series("../samples/subdir/serial_fileBased_write%T.bp", AccessType::CREATE);
+ REQUIRE(e_position.size() == 3);
+ REQUIRE(e_position.count("x") == 1);
+ REQUIRE(e_position.count("y") == 1);
+ REQUIRE(e_position.count("z") == 1);
- ParticleSpecies& e_1 = o.iterations[1].particles["e"];
- e_1["position"][RecordComponent::SCALAR].resetDataset(dset);
- e_1["positionOffset"][RecordComponent::SCALAR].resetDataset(dset);
- o.iterations[1].setTime(1.f);
- ParticleSpecies& e_2 = o.iterations[2].particles["e"];
- e_2["position"][RecordComponent::SCALAR].resetDataset(dset);
- e_2["positionOffset"][RecordComponent::SCALAR].resetDataset(dset);
- o.iterations[2].setTime(2.f);
- ParticleSpecies& e_3 = o.iterations[3].particles["e"];
- e_3["position"][RecordComponent::SCALAR].resetDataset(dset);
- e_3["positionOffset"][RecordComponent::SCALAR].resetDataset(dset);
- o.iterations[3].setTime(3.f);
- }
+ RecordComponent& e_position_x = e_position["x"];
+ REQUIRE(e_position_x.unitSI() == 2.599999993753294e-07);
+ REQUIRE(e_position_x.getDatatype() == Datatype::FLOAT);
+ REQUIRE(e_position_x.getExtent() == e);
+ REQUIRE(e_position_x.getDimensionality() == 1);
- REQUIRE(auxiliary::directory_exists("../samples/subdir"));
- REQUIRE(auxiliary::file_exists("../samples/subdir/serial_fileBased_write1.bp"));
- REQUIRE(auxiliary::file_exists("../samples/subdir/serial_fileBased_write2.bp"));
- REQUIRE(auxiliary::file_exists("../samples/subdir/serial_fileBased_write3.bp"));
+ RecordComponent& e_position_y = e_position["y"];
+ REQUIRE(e_position_y.unitSI() == 4.4299999435019118e-08);
+ REQUIRE(e_position_y.getDatatype() == Datatype::FLOAT);
+ REQUIRE(e_position_y.getExtent() == e);
+ REQUIRE(e_position_y.getDimensionality() == 1);
- {
- Series o = Series("../samples/subdir/serial_fileBased_write%T.bp", AccessType::READ_ONLY);
+ RecordComponent& e_position_z = e_position["z"];
+ REQUIRE(e_position_z.unitSI() == 2.599999993753294e-07);
+ REQUIRE(e_position_z.getDatatype() == Datatype::FLOAT);
+ REQUIRE(e_position_z.getExtent() == e);
+ REQUIRE(e_position_z.getDimensionality() == 1);
- REQUIRE(o.iterations.size() == 3);
- REQUIRE(o.iterations.count(1) == 1);
- REQUIRE(o.iterations.count(2) == 1);
- REQUIRE(o.iterations.count(3) == 1);
+ ud = {{1., 0., 0., 0., 0., 0., 0.}};
+ Record& e_positionOffset = species_e["positionOffset"];
+ REQUIRE(e_positionOffset.unitDimension() == ud);
+ REQUIRE(e_positionOffset.timeOffset< float >() == static_cast< float >(0.0f));
- REQUIRE(o.iterations[1].time< float >() == 1.f);
- REQUIRE(o.iterations[2].time< float >() == 2.f);
- REQUIRE(o.iterations[3].time< float >() == 3.f);
+ REQUIRE(e_positionOffset.size() == 3);
+ REQUIRE(e_positionOffset.count("x") == 1);
+ REQUIRE(e_positionOffset.count("y") == 1);
+ REQUIRE(e_positionOffset.count("z") == 1);
- REQUIRE(o.iterations[1].particles.size() == 1);
- REQUIRE(o.iterations[1].particles.count("e") == 1);
- REQUIRE(o.iterations[2].particles.size() == 1);
- REQUIRE(o.iterations[2].particles.count("e") == 1);
- REQUIRE(o.iterations[3].particles.size() == 1);
- REQUIRE(o.iterations[3].particles.count("e") == 1);
+ RecordComponent& e_positionOffset_x = e_positionOffset["x"];
+ REQUIRE(e_positionOffset_x.unitSI() == 2.599999993753294e-07);
+ REQUIRE(e_positionOffset_x.getDatatype() == determineDatatype< int32_t >());
+ REQUIRE(e_positionOffset_x.getExtent() == e);
+ REQUIRE(e_positionOffset_x.getDimensionality() == 1);
- REQUIRE(o.iterations[1].particles["e"].size() == 2);
- REQUIRE(o.iterations[1].particles["e"].count("position") == 1);
- REQUIRE(o.iterations[1].particles["e"].count("positionOffset") == 1);
- REQUIRE(o.iterations[2].particles["e"].size() == 2);
- REQUIRE(o.iterations[2].particles["e"].count("position") == 1);
- REQUIRE(o.iterations[2].particles["e"].count("positionOffset") == 1);
- REQUIRE(o.iterations[3].particles["e"].size() == 2);
- REQUIRE(o.iterations[3].particles["e"].count("position") == 1);
- REQUIRE(o.iterations[3].particles["e"].count("positionOffset") == 1);
+ RecordComponent& e_positionOffset_y = e_positionOffset["y"];
+ REQUIRE(e_positionOffset_y.unitSI() == 4.4299999435019118e-08);
+ REQUIRE(e_positionOffset_y.getDatatype() == determineDatatype< int32_t >());
+ REQUIRE(e_positionOffset_y.getExtent() == e);
+ REQUIRE(e_positionOffset_y.getDimensionality() == 1);
- REQUIRE(o.iterations[1].particles["e"]["position"].size() == 1);
- REQUIRE(o.iterations[1].particles["e"]["position"].count(RecordComponent::SCALAR) == 1);
- REQUIRE(o.iterations[1].particles["e"]["positionOffset"].size() == 1);
- REQUIRE(o.iterations[1].particles["e"]["positionOffset"].count(RecordComponent::SCALAR) == 1);
- REQUIRE(o.iterations[1].particles["e"]["position"][RecordComponent::SCALAR].getDatatype() == Datatype::DOUBLE);
- REQUIRE(o.iterations[1].particles["e"]["position"][RecordComponent::SCALAR].getDimensionality() == 1);
- REQUIRE(o.iterations[1].particles["e"]["position"][RecordComponent::SCALAR].getExtent() == Extent{2});
- REQUIRE(o.iterations[2].particles["e"]["position"].size() == 1);
- REQUIRE(o.iterations[2].particles["e"]["position"].count(RecordComponent::SCALAR) == 1);
- REQUIRE(o.iterations[2].particles["e"]["positionOffset"].size() == 1);
- REQUIRE(o.iterations[2].particles["e"]["positionOffset"].count(RecordComponent::SCALAR) == 1);
- REQUIRE(o.iterations[2].particles["e"]["position"][RecordComponent::SCALAR].getDatatype() == Datatype::DOUBLE);
- REQUIRE(o.iterations[2].particles["e"]["position"][RecordComponent::SCALAR].getDimensionality() == 1);
- REQUIRE(o.iterations[2].particles["e"]["position"][RecordComponent::SCALAR].getExtent() == Extent{2});
- REQUIRE(o.iterations[3].particles["e"]["position"].size() == 1);
- REQUIRE(o.iterations[3].particles["e"]["position"].count(RecordComponent::SCALAR) == 1);
- REQUIRE(o.iterations[3].particles["e"]["positionOffset"].size() == 1);
- REQUIRE(o.iterations[3].particles["e"]["positionOffset"].count(RecordComponent::SCALAR) == 1);
- REQUIRE(o.iterations[3].particles["e"]["position"][RecordComponent::SCALAR].getDatatype() == Datatype::DOUBLE);
- REQUIRE(o.iterations[3].particles["e"]["position"][RecordComponent::SCALAR].getDimensionality() == 1);
- REQUIRE(o.iterations[3].particles["e"]["position"][RecordComponent::SCALAR].getExtent() == Extent{2});
- }
+ RecordComponent& e_positionOffset_z = e_positionOffset["z"];
+ REQUIRE(e_positionOffset_z.unitSI() == 2.599999993753294e-07);
+ REQUIRE(e_positionOffset_z.getDatatype() == determineDatatype< int32_t >());
+ REQUIRE(e_positionOffset_z.getExtent() == e);
+ REQUIRE(e_positionOffset_z.getDimensionality() == 1);
- {
- Series o = Series("../samples/subdir/serial_fileBased_write%T.bp", AccessType::READ_WRITE);
- ParticleSpecies& e_4 = o.iterations[4].particles["e"];
- e_4["position"][RecordComponent::SCALAR].resetDataset(dset);
- e_4["positionOffset"][RecordComponent::SCALAR].resetDataset(dset);
- o.iterations[4].setTime(4.f);
- }
+ ud = {{0., 0., 0., 0., 0., 0., 0.}};
+ Record& e_weighting = species_e["weighting"];
+ REQUIRE(e_weighting.unitDimension() == ud);
+ REQUIRE(e_weighting.timeOffset< float >() == static_cast< float >(0.0f));
- {
- Series o = Series("../samples/subdir/serial_fileBased_write%T.bp", AccessType::READ_ONLY);
+ REQUIRE(e_weighting.size() == 1);
+ REQUIRE(e_weighting.count(RecordComponent::SCALAR) == 1);
- REQUIRE(o.iterations.size() == 4);
- REQUIRE(o.iterations.count(4) == 1);
+ RecordComponent& e_weighting_scalar = e_weighting[RecordComponent::SCALAR];
+ REQUIRE(e_weighting_scalar.unitSI() == 1.0);
+ REQUIRE(e_weighting_scalar.getDatatype() == Datatype::FLOAT);
+ REQUIRE(e_weighting_scalar.getExtent() == e);
+ REQUIRE(e_weighting_scalar.getDimensionality() == 1);
- REQUIRE(o.iterations[4].time< float >() == 4.f);
+ ParticlePatches& e_patches = species_e.particlePatches;
+ REQUIRE(e_patches.size() == 4); /* extent, numParticles, numParticlesOffset, offset */
+ REQUIRE(e_patches.count("extent") == 1);
+ REQUIRE(e_patches.count("numParticles") == 1);
+ REQUIRE(e_patches.count("numParticlesOffset") == 1);
+ REQUIRE(e_patches.count("offset") == 1);
+ REQUIRE(e_patches.numPatches() == 4);
- REQUIRE(o.iterations[4].particles.size() == 1);
- REQUIRE(o.iterations[4].particles.count("e") == 1);
+ ud = {{1., 0., 0., 0., 0., 0., 0.}};
+ PatchRecord& e_extent = e_patches["extent"];
+ REQUIRE(e_extent.unitDimension() == ud);
- REQUIRE(o.iterations[4].particles["e"].size() == 2);
- REQUIRE(o.iterations[4].particles["e"].count("position") == 1);
- REQUIRE(o.iterations[4].particles["e"].count("positionOffset") == 1);
+ REQUIRE(e_extent.size() == 3);
+ REQUIRE(e_extent.count("x") == 1);
+ REQUIRE(e_extent.count("y") == 1);
+ REQUIRE(e_extent.count("z") == 1);
- REQUIRE(o.iterations[4].particles["e"]["position"].size() == 1);
- REQUIRE(o.iterations[4].particles["e"]["position"].count(RecordComponent::SCALAR) == 1);
- REQUIRE(o.iterations[4].particles["e"]["positionOffset"].size() == 1);
- REQUIRE(o.iterations[4].particles["e"]["positionOffset"].count(RecordComponent::SCALAR) == 1);
- REQUIRE(o.iterations[4].particles["e"]["position"][RecordComponent::SCALAR].getDatatype() == Datatype::DOUBLE);
- REQUIRE(o.iterations[4].particles["e"]["position"][RecordComponent::SCALAR].getDimensionality() == 1);
- REQUIRE(o.iterations[4].particles["e"]["position"][RecordComponent::SCALAR].getExtent() == Extent{2});
- }
-}
+ PatchRecordComponent& e_extent_x = e_extent["x"];
+ REQUIRE(e_extent_x.unitSI() == 2.599999993753294e-07);
+#if !defined(_MSC_VER)
+ REQUIRE(e_extent_x.getDatatype() == determineDatatype< uint64_t >());
+#endif
+ REQUIRE(isSame(e_extent_x.getDatatype(), determineDatatype< uint64_t >()));
-TEST_CASE( "adios1_fileBased_write_test", "[serial][adios1]" )
-{
- {
- Series o = Series("../samples/serial_fileBased_write%T.bp", AccessType::CREATE);
+ PatchRecordComponent& e_extent_y = e_extent["y"];
+ REQUIRE(e_extent_y.unitSI() == 4.429999943501912e-08);
+#if !defined(_MSC_VER)
+ REQUIRE(e_extent_y.getDatatype() == determineDatatype< uint64_t >());
+#endif
+ REQUIRE(isSame(e_extent_y.getDatatype(), determineDatatype< uint64_t >()));
- ParticleSpecies& e_1 = o.iterations[1].particles["e"];
+ PatchRecordComponent& e_extent_z = e_extent["z"];
+ REQUIRE(e_extent_z.unitSI() == 2.599999993753294e-07);
+#if !defined(_MSC_VER)
+ REQUIRE(e_extent_z.getDatatype() == determineDatatype< uint64_t >());
+#endif
+ REQUIRE(isSame(e_extent_z.getDatatype(), determineDatatype< uint64_t >()));
- std::vector< double > position_global(4);
- double pos{0.};
- std::generate(position_global.begin(), position_global.end(), [&pos]{ return pos++; });
- std::shared_ptr< double > position_local_1(new double);
- e_1["position"]["x"].resetDataset(Dataset(determineDatatype(position_local_1), {4}));
- std::vector< uint64_t > positionOffset_global(4);
- uint64_t posOff{0};
- std::generate(positionOffset_global.begin(), positionOffset_global.end(), [&posOff]{ return posOff++; });
- std::shared_ptr< uint64_t > positionOffset_local_1(new uint64_t);
- e_1["positionOffset"]["x"].resetDataset(Dataset(determineDatatype(positionOffset_local_1), {4}));
+ std::vector< uint64_t > data( e_patches.size() );
+ e_extent_z.load(shareRaw(data.data()));
+ o.flush();
+ REQUIRE(data.at(0) == static_cast< uint64_t >(80));
+ REQUIRE(data.at(1) == static_cast< uint64_t >(80));
+ REQUIRE(data.at(2) == static_cast< uint64_t >(80));
+ REQUIRE(data.at(3) == static_cast< uint64_t >(80));
- for( uint64_t i = 0; i < 4; ++i )
- {
- *position_local_1 = position_global[i];
- e_1["position"]["x"].storeChunk(position_local_1, {i}, {1});
- *positionOffset_local_1 = positionOffset_global[i];
- e_1["positionOffset"]["x"].storeChunk(positionOffset_local_1, {i}, {1});
- o.flush();
- }
+ PatchRecord& e_numParticles = e_patches["numParticles"];
+ REQUIRE(e_numParticles.size() == 1);
+ REQUIRE(e_numParticles.count(RecordComponent::SCALAR) == 1);
- o.iterations[1].setTime(1.f);
+ PatchRecordComponent& e_numParticles_scalar = e_numParticles[RecordComponent::SCALAR];
+#if !defined(_MSC_VER)
+ REQUIRE(e_numParticles_scalar.getDatatype() == determineDatatype< uint64_t >());
+#endif
+ REQUIRE(isSame(e_numParticles_scalar.getDatatype(), determineDatatype< uint64_t >()));
- ParticleSpecies& e_2 = o.iterations[2].particles["e"];
+ e_numParticles_scalar.load(shareRaw(data.data()));
+ o.flush();
+ REQUIRE(data.at(0) == static_cast< uint64_t >(512000));
+ REQUIRE(data.at(1) == static_cast< uint64_t >(819200));
+ REQUIRE(data.at(2) == static_cast< uint64_t >(819200));
+ REQUIRE(data.at(3) == static_cast< uint64_t >(0));
- std::generate(position_global.begin(), position_global.end(), [&pos]{ return pos++; });
- e_2["position"]["x"].resetDataset(Dataset(determineDatatype(), {4}));
- std::generate(positionOffset_global.begin(), positionOffset_global.end(), [&posOff]{ return posOff++; });
- std::shared_ptr< uint64_t > positionOffset_local_2(new uint64_t);
- e_2["positionOffset"]["x"].resetDataset(Dataset(determineDatatype(positionOffset_local_2), {4}));
+ PatchRecord& e_numParticlesOffset = e_patches["numParticlesOffset"];
+ REQUIRE(e_numParticlesOffset.size() == 1);
+ REQUIRE(e_numParticlesOffset.count(RecordComponent::SCALAR) == 1);
- for( uint64_t i = 0; i < 4; ++i )
- {
- double const position_local_2 = position_global.at(i);
- e_2["position"]["x"].storeChunk(shareRaw(&position_local_2), {i}, {1});
- *positionOffset_local_2 = positionOffset_global[i];
- e_2["positionOffset"]["x"].storeChunk(positionOffset_local_2, {i}, {1});
- o.flush();
- }
+ PatchRecordComponent& e_numParticlesOffset_scalar = e_numParticlesOffset[RecordComponent::SCALAR];
+#if !defined(_MSC_VER)
+ REQUIRE(e_numParticlesOffset_scalar.getDatatype() == determineDatatype< uint64_t >());
+#endif
+ REQUIRE(isSame(e_numParticlesOffset_scalar.getDatatype(), determineDatatype< uint64_t >()));
- o.iterations[2].setTime(2.f);
+ PatchRecord& e_offset = e_patches["offset"];
+ REQUIRE(e_offset.unitDimension() == ud);
- ParticleSpecies& e_3 = o.iterations[3].particles["e"];
+ REQUIRE(e_offset.size() == 3);
+ REQUIRE(e_offset.count("x") == 1);
+ REQUIRE(e_offset.count("y") == 1);
+ REQUIRE(e_offset.count("z") == 1);
- std::generate(position_global.begin(), position_global.end(), [&pos]{ return pos++; });
- std::shared_ptr< double > position_local_3(new double);
- e_3["position"]["x"].resetDataset(Dataset(determineDatatype(position_local_3), {4}));
- std::generate(positionOffset_global.begin(), positionOffset_global.end(), [&posOff]{ return posOff++; });
- std::shared_ptr< uint64_t > positionOffset_local_3(new uint64_t);
- e_3["positionOffset"]["x"].resetDataset(Dataset(determineDatatype(positionOffset_local_3), {4}));
+ PatchRecordComponent& e_offset_x = e_offset["x"];
+ REQUIRE(e_offset_x.unitSI() == 2.599999993753294e-07);
+#if !defined(_MSC_VER)
+ REQUIRE(e_offset_x.getDatatype() == determineDatatype< uint64_t >());
+#endif
+ REQUIRE(isSame(e_offset_x.getDatatype(), determineDatatype< uint64_t >()));
- for( uint64_t i = 0; i < 4; ++i )
- {
- *position_local_3 = position_global[i];
- e_3["position"]["x"].storeChunk(position_local_3, {i}, {1});
- *positionOffset_local_3 = positionOffset_global[i];
- e_3["positionOffset"]["x"].storeChunk(positionOffset_local_3, {i}, {1});
- o.flush();
- }
+ PatchRecordComponent& e_offset_y = e_offset["y"];
+ REQUIRE(e_offset_y.unitSI() == 4.429999943501912e-08);
+#if !defined(_MSC_VER)
+ REQUIRE(e_offset_y.getDatatype() == determineDatatype< uint64_t >());
+#endif
+ REQUIRE(isSame(e_offset_y.getDatatype(), determineDatatype< uint64_t >()));
- o.setOpenPMDextension(1);
- o.iterations[3].setTime(3.f);
- }
- REQUIRE(auxiliary::file_exists("../samples/serial_fileBased_write1.bp"));
- REQUIRE(auxiliary::file_exists("../samples/serial_fileBased_write2.bp"));
- REQUIRE(auxiliary::file_exists("../samples/serial_fileBased_write3.bp"));
+ e_offset_y.load(shareRaw(data.data()));
+ o.flush();
+ REQUIRE(data.at(0) == static_cast< uint64_t >(0));
+ REQUIRE(data.at(1) == static_cast< uint64_t >(128));
+ REQUIRE(data.at(2) == static_cast< uint64_t >(256));
+ REQUIRE(data.at(3) == static_cast< uint64_t >(384));
+ PatchRecordComponent& e_offset_z = e_offset["z"];
+ REQUIRE(e_offset_z.unitSI() == 2.599999993753294e-07);
+#if !defined(_MSC_VER)
+ REQUIRE(e_offset_z.getDatatype() == determineDatatype< uint64_t >());
+#endif
+ REQUIRE(isSame(e_offset_z.getDatatype(), determineDatatype< uint64_t >()));
+ } catch (no_such_file_error& e)
{
- Series o = Series("../samples/serial_fileBased_write%T.bp", AccessType::READ_ONLY);
+ std::cerr << "HZDR sample not accessible. (" << e.what() << ")\n";
+ return;
+ }
+}
- REQUIRE(o.iterations.size() == 3);
- REQUIRE(o.iterations.count(1) == 1);
- REQUIRE(o.iterations.count(2) == 1);
- REQUIRE(o.iterations.count(3) == 1);
+TEST_CASE( "hdf5_dtype_test", "[serial][hdf5]" )
+{
+ dtype_test("h5");
+}
- REQUIRE(o.iterations.at(1).time< float >() == 1.f);
- REQUIRE(o.iterations.at(2).time< float >() == 2.f);
- REQUIRE(o.iterations.at(3).time< float >() == 3.f);
+TEST_CASE( "hdf5_write_test", "[serial][hdf5]" )
+{
+ write_test("h5");
+}
- REQUIRE(o.basePath() == "/data/%T/");
- REQUIRE(o.iterationEncoding() == IterationEncoding::fileBased);
- REQUIRE(o.iterationFormat() == "serial_fileBased_write%T");
- REQUIRE(o.openPMD() == "1.1.0");
- REQUIRE(o.openPMDextension() == 1u);
- REQUIRE(o.particlesPath() == "particles/");
- REQUIRE_FALSE(o.containsAttribute("meshesPath"));
- REQUIRE_THROWS_AS(o.meshesPath(), no_such_attribute_error);
+TEST_CASE( "hdf5_fileBased_write_empty_test", "[serial][hdf5]" )
+{
+ fileBased_write_empty_test("h5");
+}
- for( uint64_t i = 1; i <= 3; ++i )
- {
- Iteration iteration = o.iterations.at(i);
+TEST_CASE( "hdf5_fileBased_write_test", "[serial][hdf5]" )
+{
+ fileBased_write_test("h5");
+}
+
+TEST_CASE( "hdf5_bool_test", "[serial][hdf5]" )
+{
+ bool_test("h5");
+}
+
+TEST_CASE( "hdf5_patch_test", "[serial][hdf5]" )
+{
+ patch_test("h5");
+}
- REQUIRE(iteration.particles.size() == 1);
- REQUIRE(iteration.particles.count("e") == 1);
+TEST_CASE( "hdf5_deletion_test", "[serial][hdf5]" )
+{
+ deletion_test("h5");
+}
- ParticleSpecies& species = iteration.particles.at("e");
+TEST_CASE( "hdf5_110_optional_paths", "[serial][hdf5]" )
+{
+ optional_paths_110_test("h5");
+}
- REQUIRE(species.size() == 2);
- REQUIRE(species.count("position") == 1);
- REQUIRE(species.count("positionOffset") == 1);
+TEST_CASE( "hdf5_constant_scalar", "[serial][hdf5]" )
+{
+ constant_scalar("h5");
+}
- REQUIRE(species.at("position").size() == 1);
- REQUIRE(species.at("position").count("x") == 1);
- REQUIRE(species.at("position").at("x").getDatatype() == Datatype::DOUBLE);
- REQUIRE(species.at("position").at("x").getDimensionality() == 1);
- REQUIRE(species.at("position").at("x").getExtent() == Extent{4});
- REQUIRE(species.at("positionOffset").size() == 1);
- REQUIRE(species.at("positionOffset").count("x") == 1);
-#if !defined(_MSC_VER)
- REQUIRE(species.at("positionOffset").at("x").getDatatype() == determineDatatype< uint64_t >());
+TEST_CASE( "hdf5_particle_patches", "[serial][hdf5]" )
+{
+ particle_patches("h5");
+}
+#else
+TEST_CASE( "no_serial_hdf5", "[serial][hdf5]" )
+{
+ REQUIRE(true);
+}
#endif
- REQUIRE(isSame(species.at("positionOffset").at("x").getDatatype(), determineDatatype< uint64_t >()));
- REQUIRE(species.at("positionOffset").at("x").getDimensionality() == 1);
- REQUIRE(species.at("positionOffset").at("x").getExtent() == Extent{4});
+#if openPMD_HAVE_ADIOS1
+TEST_CASE( "adios1_dtype_test", "[serial][adios1]" )
+{
+ dtype_test(".bp");
+}
- auto position = species.at("position").at("x").loadChunk< double >({0}, {4});
- auto position_raw = position.get();
- auto positionOffset = species.at("positionOffset").at("x").loadChunk< uint64_t >({0}, {4});
- auto positionOffset_raw = positionOffset.get();
- o.flush();
- for( uint64_t j = 0; j < 4; ++j )
- {
- REQUIRE(position_raw[j] == static_cast< double >(j + (i-1)*4));
- REQUIRE(positionOffset_raw[j] == j + (i-1)*4);
- }
- }
- }
+TEST_CASE( "adios1_write_test", "[serial][adios1]")
+{
+ write_test("bp");
+}
+
+TEST_CASE( "adios1_fileBased_write_empty_test", "[serial][adios1]" )
+{
+ fileBased_write_empty_test("bp");
+}
+
+TEST_CASE( "adios1_fileBased_write_test", "[serial][adios1]" )
+{
+ fileBased_write_test("bp");
}
TEST_CASE( "hzdr_adios1_sample_content_test", "[serial][adios1]" )
@@ -2294,3 +2109,4 @@ TEST_CASE( "no_serial_adios1", "[serial][adios]")
REQUIRE(true);
}
#endif
+