From 6da2f4d9d03aa32cb8f3a4fd148dd35ca3316141 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Franz=20P=C3=B6schel?= Date: Mon, 13 Sep 2021 15:35:10 +0200 Subject: [PATCH 01/23] New error type: BackendConfigSchema --- include/openPMD/Error.hpp | 9 +++++++++ src/Error.cpp | 28 ++++++++++++++++++++++++++++ src/binding/python/Error.cpp | 2 ++ 3 files changed, 39 insertions(+) diff --git a/include/openPMD/Error.hpp b/include/openPMD/Error.hpp index 0afb69998a..eea5cd56ff 100644 --- a/include/openPMD/Error.hpp +++ b/include/openPMD/Error.hpp @@ -3,6 +3,7 @@ #include #include #include +#include namespace openPMD { @@ -62,5 +63,13 @@ namespace error public: WrongAPIUsage( std::string what ); }; + + class BackendConfigSchema : public Error + { + public: + std::vector< std::string > errorLocation; + + BackendConfigSchema( std::vector< std::string >, std::string what ); + }; } } diff --git a/src/Error.cpp b/src/Error.cpp index c91331f52c..e6cf850279 100644 --- a/src/Error.cpp +++ b/src/Error.cpp @@ -1,5 +1,7 @@ #include "openPMD/Error.hpp" +#include + namespace openPMD { const char * Error::what() const noexcept @@ -20,5 +22,31 @@ namespace error : Error( "Wrong API usage: " + what ) { } + + static std::string concatVector( + std::vector< std::string > const & vec, + std::string const & intersperse = "." ) + { + if( vec.empty() ) + { + return ""; + } + std::stringstream res; + res << vec[ 0 ]; + for( size_t i = 1; i < vec.size(); ++i ) + { + res << intersperse << vec[ i ]; + } + return res.str(); + } + + BackendConfigSchema::BackendConfigSchema( + std::vector< std::string > errorLocation_in, std::string what ) + : Error( + "Wrong JSON schema at index '" + + concatVector( errorLocation_in ) + "': " + std::move( what ) ) + , errorLocation( std::move( errorLocation_in ) ) + { + } } } diff --git a/src/binding/python/Error.cpp b/src/binding/python/Error.cpp index 6eb5c18aa3..056faab956 100644 --- a/src/binding/python/Error.cpp +++ b/src/binding/python/Error.cpp @@ -12,6 +12,8 @@ void init_Error( py::module & m ) m, "ErrorOperationUnsupportedInBackend", baseError ); py::register_exception< error::WrongAPIUsage >( m, "ErrorWrongAPIUsage", baseError ); + py::register_exception< error::BackendConfigSchema >( + m, "ErrorBackendConfigSchema", baseError ); #ifndef NDEBUG m.def( "test_throw", []( std::string description ) { From 2ce67dcb51a446453e172a0985bd0fc732732db3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Franz=20P=C3=B6schel?= Date: Thu, 8 Jul 2021 14:51:42 +0200 Subject: [PATCH 02/23] Update to JSON.hpp auxiliary header (now JSON_internal.hpp) 1. Use new namespace for json stuff 2. Several API fixes, such as const methods 3. Lowercase parsing 4. API additions: warnGlobalUnusedOptions 5. Move JSON.hpp -> JSON_internal.hpp --- CMakeLists.txt | 3 + include/openPMD/IO/ADIOS/ADIOS2IOHandler.hpp | 14 +- include/openPMD/IO/HDF5/HDF5IOHandlerImpl.hpp | 4 +- .../auxiliary/{JSON.hpp => JSON_internal.hpp} | 38 +++- include/openPMD/auxiliary/StringManip.hpp | 12 +- src/IO/ADIOS/ADIOS2IOHandler.cpp | 7 +- src/IO/AbstractIOHandlerHelper.cpp | 2 +- src/IO/HDF5/HDF5IOHandler.cpp | 3 +- src/Series.cpp | 8 +- src/auxiliary/JSON.cpp | 194 ++++++++++++++---- test/AuxiliaryTest.cpp | 111 ++++++++++ test/SerialIOTest.cpp | 6 +- 12 files changed, 333 insertions(+), 69 deletions(-) rename include/openPMD/auxiliary/{JSON.hpp => JSON_internal.hpp} (82%) diff --git a/CMakeLists.txt b/CMakeLists.txt index 542601a70d..2cae96727f 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -857,6 +857,9 @@ if(openPMD_BUILD_TESTING) else() target_link_libraries(${testname}Tests PRIVATE CatchMain) endif() + + target_include_directories(${testname}Tests SYSTEM PRIVATE + $) endforeach() endif() diff --git a/include/openPMD/IO/ADIOS/ADIOS2IOHandler.hpp b/include/openPMD/IO/ADIOS/ADIOS2IOHandler.hpp index f35fe9883b..08e4603e84 100644 --- a/include/openPMD/IO/ADIOS/ADIOS2IOHandler.hpp +++ b/include/openPMD/IO/ADIOS/ADIOS2IOHandler.hpp @@ -28,7 +28,7 @@ #include "openPMD/IO/ADIOS/ADIOS2PreloadAttributes.hpp" #include "openPMD/IO/IOTask.hpp" #include "openPMD/IO/InvalidatableFile.hpp" -#include "openPMD/auxiliary/JSON.hpp" +#include "openPMD/auxiliary/JSON_internal.hpp" #include "openPMD/auxiliary/Option.hpp" #include "openPMD/backend/Writable.hpp" #include "openPMD/config.hpp" @@ -277,15 +277,15 @@ class ADIOS2IOHandlerImpl std::vector< ParameterizedOperator > defaultOperators; - auxiliary::TracingJSON m_config; - static auxiliary::TracingJSON nullvalue; + json::TracingJSON m_config; + static json::TracingJSON nullvalue; void init( nlohmann::json config ); template< typename Key > - auxiliary::TracingJSON - config( Key && key, auxiliary::TracingJSON & cfg ) + json::TracingJSON + config( Key && key, json::TracingJSON & cfg ) { if( cfg.json().is_object() && cfg.json().contains( key ) ) { @@ -298,7 +298,7 @@ class ADIOS2IOHandlerImpl } template< typename Key > - auxiliary::TracingJSON + json::TracingJSON config( Key && key ) { return config< Key >( std::forward< Key >( key ), m_config ); @@ -312,7 +312,7 @@ class ADIOS2IOHandlerImpl * operators have been configured */ auxiliary::Option< std::vector< ParameterizedOperator > > - getOperators( auxiliary::TracingJSON config ); + getOperators( json::TracingJSON config ); // use m_config auxiliary::Option< std::vector< ParameterizedOperator > > diff --git a/include/openPMD/IO/HDF5/HDF5IOHandlerImpl.hpp b/include/openPMD/IO/HDF5/HDF5IOHandlerImpl.hpp index 815c57516e..4116c21417 100644 --- a/include/openPMD/IO/HDF5/HDF5IOHandlerImpl.hpp +++ b/include/openPMD/IO/HDF5/HDF5IOHandlerImpl.hpp @@ -24,7 +24,7 @@ #if openPMD_HAVE_HDF5 # include "openPMD/IO/AbstractIOHandlerImpl.hpp" -# include "openPMD/auxiliary/JSON.hpp" +# include "openPMD/auxiliary/JSON_internal.hpp" # include "openPMD/auxiliary/Option.hpp" # include @@ -81,7 +81,7 @@ namespace openPMD hid_t m_H5T_CLONG_DOUBLE; private: - auxiliary::TracingJSON m_config; + json::TracingJSON m_config; std::string m_chunks = "auto"; struct File { diff --git a/include/openPMD/auxiliary/JSON.hpp b/include/openPMD/auxiliary/JSON_internal.hpp similarity index 82% rename from include/openPMD/auxiliary/JSON.hpp rename to include/openPMD/auxiliary/JSON_internal.hpp index 0cbdad175e..d6ef9dccc5 100644 --- a/include/openPMD/auxiliary/JSON.hpp +++ b/include/openPMD/auxiliary/JSON_internal.hpp @@ -23,6 +23,9 @@ #include "openPMD/config.hpp" +#include "openPMD/Error.hpp" +#include "openPMD/auxiliary/Option.hpp" + #include #if openPMD_HAVE_MPI @@ -34,7 +37,7 @@ namespace openPMD { -namespace auxiliary +namespace json { /** * @brief Extend nlohmann::json with tracing of which keys have been @@ -75,8 +78,7 @@ namespace auxiliary * * @return nlohmann::json const& */ - nlohmann::json const & - getShadow(); + nlohmann::json const & getShadow() const; /** * @brief Invert the "shadow", i.e. a copy of the original JSON value @@ -84,12 +86,14 @@ namespace auxiliary * * @return nlohmann::json */ - nlohmann::json - invertShadow(); + nlohmann::json invertShadow() const; /** * @brief Declare all keys of the current object read. * + * Rationale: This class does not (yet) trace array types (or anything + * contained in an array). Use this call to explicitly declare + * an array as read. */ void declareFullyRead(); @@ -126,8 +130,8 @@ namespace auxiliary nlohmann::json * m_positionInShadow; bool m_trace = true; - void - invertShadow( nlohmann::json & result, nlohmann::json const & shadow ); + void invertShadow( + nlohmann::json & result, nlohmann::json const & shadow ) const; TracingJSON( std::shared_ptr< nlohmann::json > originalJSON, @@ -165,17 +169,31 @@ namespace auxiliary * If yes, return the file content, if not just parse options directly. * * @param options as a parsed JSON object. + * @param considerFiles If yes, check if `options` refers to a file and read + * from there. */ - nlohmann::json parseOptions( std::string const & options ); + nlohmann::json + parseOptions( std::string const & options, bool considerFiles ); #if openPMD_HAVE_MPI /** * Parallel version of parseOptions(). MPI-collective. */ - nlohmann::json parseOptions( std::string const & options, MPI_Comm comm ); + nlohmann::json parseOptions( + std::string const & options, MPI_Comm comm, bool considerFiles ); #endif -} // namespace auxiliary + nlohmann::json & lowerCase( nlohmann::json & ); + + auxiliary::Option< std::string > asStringDynamic( nlohmann::json const & ); + + auxiliary::Option< std::string > + asLowerCaseStringDynamic( nlohmann::json const & ); + + extern std::vector< std::string > backendKeys; + + void warnGlobalUnusedOptions( TracingJSON const & config ); +} // namespace json } // namespace openPMD diff --git a/include/openPMD/auxiliary/StringManip.hpp b/include/openPMD/auxiliary/StringManip.hpp index c7a00eb634..92b12a058d 100644 --- a/include/openPMD/auxiliary/StringManip.hpp +++ b/include/openPMD/auxiliary/StringManip.hpp @@ -21,12 +21,12 @@ #pragma once #include +#include +#include // std::tolower #include #include #include #include -#include - namespace openPMD { @@ -261,5 +261,13 @@ removeSlashes( std::string s ) return s; } +template< typename S > +S && lowerCase( S && s ) +{ + std::transform( s.begin(), s.end(), s.begin(), []( unsigned char c ) { + return std::tolower( c ); + } ); + return std::forward< S >( s ); +} } // auxiliary } // openPMD diff --git a/src/IO/ADIOS/ADIOS2IOHandler.cpp b/src/IO/ADIOS/ADIOS2IOHandler.cpp index 46f306fd12..e4bcbbc608 100644 --- a/src/IO/ADIOS/ADIOS2IOHandler.cpp +++ b/src/IO/ADIOS/ADIOS2IOHandler.cpp @@ -159,7 +159,7 @@ ADIOS2IOHandlerImpl::init( nlohmann::json cfg ) } auxiliary::Option< std::vector< ADIOS2IOHandlerImpl::ParameterizedOperator > > -ADIOS2IOHandlerImpl::getOperators( auxiliary::TracingJSON cfg ) +ADIOS2IOHandlerImpl::getOperators( json::TracingJSON cfg ) { using ret_t = auxiliary::Option< std::vector< ParameterizedOperator > >; std::vector< ParameterizedOperator > res; @@ -353,9 +353,10 @@ void ADIOS2IOHandlerImpl::createDataset( std::vector< ParameterizedOperator > operators; nlohmann::json options = nlohmann::json::parse( parameters.options ); + json::lowerCase( options ); if( options.contains( "adios2" ) ) { - auxiliary::TracingJSON datasetConfig( options[ "adios2" ] ); + json::TracingJSON datasetConfig( options[ "adios2" ] ); auto datasetOperators = getOperators( datasetConfig ); operators = datasetOperators ? std::move( datasetOperators.get() ) @@ -1080,7 +1081,7 @@ ADIOS2IOHandlerImpl::adios2AccessMode( std::string const & fullPath ) } } -auxiliary::TracingJSON ADIOS2IOHandlerImpl::nullvalue = nlohmann::json(); +json::TracingJSON ADIOS2IOHandlerImpl::nullvalue = nlohmann::json(); std::string ADIOS2IOHandlerImpl::filePositionToString( diff --git a/src/IO/AbstractIOHandlerHelper.cpp b/src/IO/AbstractIOHandlerHelper.cpp index 25e2bfcd01..dbd9e49c72 100644 --- a/src/IO/AbstractIOHandlerHelper.cpp +++ b/src/IO/AbstractIOHandlerHelper.cpp @@ -27,7 +27,7 @@ #include "openPMD/IO/HDF5/HDF5IOHandler.hpp" #include "openPMD/IO/HDF5/ParallelHDF5IOHandler.hpp" #include "openPMD/IO/JSON/JSONIOHandler.hpp" -#include "openPMD/auxiliary/JSON.hpp" +#include "openPMD/auxiliary/JSON_internal.hpp" namespace openPMD { diff --git a/src/IO/HDF5/HDF5IOHandler.cpp b/src/IO/HDF5/HDF5IOHandler.cpp index af0e59c4d4..245324bb3d 100644 --- a/src/IO/HDF5/HDF5IOHandler.cpp +++ b/src/IO/HDF5/HDF5IOHandler.cpp @@ -295,6 +295,7 @@ HDF5IOHandlerImpl::createDataset(Writable* writable, name = auxiliary::replace_last(name, "/", ""); auto config = nlohmann::json::parse( parameters.options ); + json::lowerCase( config ); // general bool is_resizable_dataset = false; @@ -307,7 +308,7 @@ HDF5IOHandlerImpl::createDataset(Writable* writable, if( config.contains( "hdf5" ) && config[ "hdf5" ].contains( "dataset" ) ) { - auxiliary::TracingJSON datasetConfig{ + json::TracingJSON datasetConfig{ config[ "hdf5" ][ "dataset" ] }; /* diff --git a/src/Series.cpp b/src/Series.cpp index 91bcc12f48..c65c35477e 100644 --- a/src/Series.cpp +++ b/src/Series.cpp @@ -20,7 +20,7 @@ */ #include "openPMD/auxiliary/Date.hpp" #include "openPMD/auxiliary/Filesystem.hpp" -#include "openPMD/auxiliary/JSON.hpp" +#include "openPMD/auxiliary/JSON_internal.hpp" #include "openPMD/auxiliary/StringManip.hpp" #include "openPMD/IO/AbstractIOHandler.hpp" #include "openPMD/IO/AbstractIOHandlerHelper.hpp" @@ -1531,7 +1531,8 @@ Series::Series( { Attributable::setData( m_series ); iterations = m_series->iterations; - nlohmann::json optionsJson = auxiliary::parseOptions( options, comm ); + nlohmann::json optionsJson = json::parseOptions( + options, comm, /* considerFiles = */ true ); parseJsonOptions( get(), optionsJson ); auto input = parseInput( filepath ); auto handler = createIOHandler( @@ -1547,7 +1548,8 @@ Series::Series( { Attributable::setData( m_series ); iterations = m_series->iterations; - nlohmann::json optionsJson = auxiliary::parseOptions( options ); + nlohmann::json optionsJson = + json::parseOptions( options, /* considerFiles = */ true ); parseJsonOptions( get(), optionsJson ); auto input = parseInput( filepath ); auto handler = createIOHandler( diff --git a/src/auxiliary/JSON.cpp b/src/auxiliary/JSON.cpp index d0e844c525..5732a2e00d 100644 --- a/src/auxiliary/JSON.cpp +++ b/src/auxiliary/JSON.cpp @@ -19,20 +19,24 @@ * If not, see . */ -#include "openPMD/auxiliary/JSON.hpp" +#include "openPMD/auxiliary/JSON_internal.hpp" #include "openPMD/auxiliary/Filesystem.hpp" #include "openPMD/auxiliary/Option.hpp" #include "openPMD/auxiliary/StringManip.hpp" +#include #include // std::isspace #include +#include // std::cerr +#include #include +#include // std::forward #include namespace openPMD { -namespace auxiliary +namespace json { TracingJSON::TracingJSON() : TracingJSON( nlohmann::json() ) { @@ -47,24 +51,20 @@ namespace auxiliary { } - nlohmann::json const & - TracingJSON::getShadow() + nlohmann::json const & TracingJSON::getShadow() const { return *m_positionInShadow; } - nlohmann::json - TracingJSON::invertShadow() + nlohmann::json TracingJSON::invertShadow() const { nlohmann::json inverted = *m_positionInOriginal; invertShadow( inverted, *m_positionInShadow ); return inverted; } - void - TracingJSON::invertShadow( - nlohmann::json & result, - nlohmann::json const & shadow ) + void TracingJSON::invertShadow( + nlohmann::json & result, nlohmann::json const & shadow ) const { if( !shadow.is_object() ) { @@ -123,7 +123,7 @@ namespace auxiliary { std::string trimmed = auxiliary::trim( unparsed, []( char c ) { return std::isspace( c ); } ); - if( trimmed.at( 0 ) == '@' ) + if( !trimmed.empty() && trimmed.at( 0 ) == '@' ) { trimmed = trimmed.substr( 1 ); trimmed = auxiliary::trim( @@ -138,44 +138,164 @@ namespace auxiliary } nlohmann::json - parseOptions( std::string const & options ) + parseOptions( std::string const & options, bool considerFiles ) { - auto filename = extractFilename( options ); - if( filename.has_value() ) - { - std::fstream handle; - handle.open( filename.get(), std::ios_base::in ); - nlohmann::json res; - handle >> res; - if( !handle.good() ) + if( considerFiles ) + { + auto filename = extractFilename( options ); + if( filename.has_value() ) { - throw std::runtime_error( - "Failed reading JSON config from file " + filename.get() + - "." ); + std::fstream handle; + handle.open( filename.get(), std::ios_base::in ); + nlohmann::json res; + handle >> res; + if( !handle.good() ) + { + throw std::runtime_error( + "Failed reading JSON config from file " + + filename.get() + "." ); + } + lowerCase( res ); + return res; } - return res; } - else + auto res = nlohmann::json::parse( options ); + lowerCase( res ); + return res; + } + +#if openPMD_HAVE_MPI + nlohmann::json parseOptions( + std::string const & options, MPI_Comm comm, bool considerFiles ) + { + if( considerFiles ) { - return nlohmann::json::parse( options ); + auto filename = extractFilename( options ); + if( filename.has_value() ) + { + auto res = nlohmann::json::parse( + auxiliary::collective_file_read( filename.get(), comm ) ); + lowerCase( res ); + return res; + } } + auto res = nlohmann::json::parse( options ); + lowerCase( res ); + return res; } +#endif -#if openPMD_HAVE_MPI - nlohmann::json - parseOptions( std::string const & options, MPI_Comm comm ) + static nlohmann::json & + lowerCase( nlohmann::json & json, std::vector< std::string > & currentPath ) { - auto filename = extractFilename( options ); - if( filename.has_value() ) + if( json.is_object() ) { - return nlohmann::json::parse( - auxiliary::collective_file_read( filename.get(), comm ) ); + auto & val = json.get_ref< nlohmann::json::object_t & >(); + // somekey -> SomeKey + std::map< std::string, std::string > originalKeys; + for( auto & pair : val ) + { + std::string lower = + auxiliary::lowerCase( std::string( pair.first ) ); + auto findEntry = originalKeys.find( lower ); + if( findEntry != originalKeys.end() ) + { + // double entry found + std::vector< std::string > copyCurrentPath{ currentPath }; + copyCurrentPath.push_back( lower ); + throw error::BackendConfigSchema( + std::move( copyCurrentPath ), + "JSON config: duplicate keys." ); + } + originalKeys.emplace_hint( + findEntry, std::move( lower ), pair.first ); + } + + nlohmann::json::object_t newObject; + for( auto & pair : originalKeys ) + { + newObject[ pair.first ] = std::move( val[ pair.second ] ); + } + val = newObject; + + // now recursively + for( auto & pair : val ) + { + currentPath.push_back( pair.first ); + lowerCase( pair.second, currentPath ); + currentPath.pop_back(); + } } - else + else if( json.is_array() ) { - return nlohmann::json::parse( options ); + for( auto & val : json ) + { + lowerCase( val ); + } } + return json; } -#endif -} // namespace auxiliary + + nlohmann::json & lowerCase( nlohmann::json & json ) + { + std::vector< std::string > currentPath; + // that's as deep as our config currently goes, +1 for good measure + currentPath.reserve( 6 ); + return lowerCase( json, currentPath ); + } + + auxiliary::Option< std::string > + asStringDynamic( nlohmann::json const & value ) + { + if( value.is_string() ) + { + return value.get< std::string >(); + } + else if( value.is_number_integer() ) + { + return std::to_string( value.get< long long >() ); + } + else if( value.is_number_float() ) + { + return std::to_string( value.get< long double >() ); + } + else if( value.is_boolean() ) + { + return std::string( value.get< bool >() ? "1" : "0" ); + } + return auxiliary::Option< std::string >{}; + } + + auxiliary::Option< std::string > + asLowerCaseStringDynamic( nlohmann::json const & value ) + { + auto maybeString = asStringDynamic( value ); + if( maybeString.has_value() ) + { + auxiliary::lowerCase( maybeString.get() ); + } + return maybeString; + } + + std::vector< std::string > backendKeys{ + "adios1", "adios2", "json", "hdf5" }; + + void warnGlobalUnusedOptions( TracingJSON const & config ) + { + auto shadow = config.invertShadow(); + // The backends are supposed to deal with this + // Only global options here + for( auto const & backendKey : json::backendKeys ) + { + shadow.erase( backendKey ); + } + if( shadow.size() > 0 ) + { + std::cerr + << "[Series] The following parts of the global JSON config " + "remains unused:\n" + << shadow.dump() << std::endl; + } + } +} // namespace json } // namespace openPMD diff --git a/test/AuxiliaryTest.cpp b/test/AuxiliaryTest.cpp index aaadcc9bd0..bf2c6c2c06 100644 --- a/test/AuxiliaryTest.cpp +++ b/test/AuxiliaryTest.cpp @@ -9,6 +9,7 @@ #include "openPMD/backend/Container.hpp" #include "openPMD/auxiliary/DerefDynamicCast.hpp" #include "openPMD/auxiliary/Filesystem.hpp" +#include "openPMD/auxiliary/JSON_internal.hpp" #include "openPMD/auxiliary/Option.hpp" #include "openPMD/auxiliary/StringManip.hpp" #include "openPMD/auxiliary/Variant.hpp" @@ -42,6 +43,116 @@ struct TestHelper : public Attributable } // test } // openPMD +TEST_CASE( "json_parsing", "[auxiliary]" ) +{ + std::string wrongValue = R"END( +{ + "ADIOS2": { + "duplicate key": 1243, + "DUPLICATE KEY": 234 + } +})END"; + REQUIRE_THROWS_WITH( + json::parseOptions( wrongValue, false ), + error::BackendConfigSchema( + { "adios2", "duplicate key" }, "JSON config: duplicate keys." ) + .what() ); + std::string same1 = R"( +{ + "ADIOS2": { + "type": "nullcore", + "engine": { + "type": "bp4", + "usesteps": true + } + } +})"; + std::string same2 = R"( +{ + "adios2": { + "type": "nullcore", + "ENGINE": { + "type": "bp4", + "usesteps": true + } + } +})"; + std::string different = R"( +{ + "adios2": { + "type": "NULLCORE", + "ENGINE": { + "type": "bp4", + "usesteps": true + } + } +})"; + REQUIRE( + json::parseOptions( same1, false ).dump() == + json::parseOptions( same2, false ).dump() ); + // Only keys should be transformed to lower case, values must stay the same + REQUIRE( + json::parseOptions( same1, false ).dump() != + json::parseOptions( different, false ).dump() ); + + std::string upper = R"END( +{ + "ADIOS2": { + "ENGINE": { + "TYPE": "BP3", + "UNUSED": "PARAMETER", + "PARAMETERS": { + "BUFFERGROWTHFACTOR": "2.0", + "PROFILE": "ON" + } + }, + "UNUSED": "AS WELL", + "DATASET": { + "OPERATORS": [ + { + "TYPE": "BLOSC", + "PARAMETERS": { + "CLEVEL": "1", + "DOSHUFFLE": "BLOSC_BITSHUFFLE" + } + } + ] + } + } +} +)END"; + std::string lower = R"END( +{ + "adios2": { + "engine": { + "type": "BP3", + "unused": "PARAMETER", + "parameters": { + "buffergrowthfactor": "2.0", + "profile": "ON" + } + }, + "unused": "AS WELL", + "dataset": { + "operators": [ + { + "type": "BLOSC", + "parameters": { + "clevel": "1", + "doshuffle": "BLOSC_BITSHUFFLE" + } + } + ] + } + } +} +)END"; + nlohmann::json jsonUpper = nlohmann::json::parse( upper ); + nlohmann::json jsonLower = nlohmann::json::parse( lower ); + REQUIRE( jsonUpper.dump() != jsonLower.dump() ); + json::lowerCase( jsonUpper ); + REQUIRE( jsonUpper.dump() == jsonLower.dump() ); +} TEST_CASE( "optional", "[auxiliary]" ) { using namespace auxiliary; diff --git a/test/SerialIOTest.cpp b/test/SerialIOTest.cpp index fff67243e2..1a4536562c 100644 --- a/test/SerialIOTest.cpp +++ b/test/SerialIOTest.cpp @@ -3547,7 +3547,7 @@ TEST_CASE( "bp4_steps", "[serial][adios2]" ) { std::string useSteps = R"( { - "adios2": { + "ADIOS2": { "engine": { "type": "bp4", "usesteps": true @@ -3559,7 +3559,7 @@ TEST_CASE( "bp4_steps", "[serial][adios2]" ) { "adios2": { "type": "nullcore", - "engine": { + "ENGINE": { "type": "bp4", "usesteps": true } @@ -3571,7 +3571,7 @@ TEST_CASE( "bp4_steps", "[serial][adios2]" ) "adios2": { "engine": { "type": "bp4", - "usesteps": false + "UseSteps": false } } } From b0b31b9375c592d290138a4f2094ee1ce7b0eba9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Franz=20P=C3=B6schel?= Date: Thu, 19 Aug 2021 14:48:21 +0200 Subject: [PATCH 03/23] Use TracingJSON from the start already --- include/openPMD/IO/ADIOS/ADIOS2IOHandler.hpp | 10 ++--- include/openPMD/IO/HDF5/HDF5IOHandler.hpp | 5 +-- include/openPMD/IO/HDF5/HDF5IOHandlerImpl.hpp | 2 +- .../openPMD/IO/HDF5/ParallelHDF5IOHandler.hpp | 5 +-- .../IO/HDF5/ParallelHDF5IOHandlerImpl.hpp | 4 +- src/IO/ADIOS/ADIOS2IOHandler.cpp | 18 ++++---- src/IO/AbstractIOHandlerHelper.cpp | 13 +++--- src/IO/HDF5/HDF5IOHandler.cpp | 10 ++--- src/IO/HDF5/ParallelHDF5IOHandler.cpp | 4 +- src/Series.cpp | 41 ++++++++++++------- 10 files changed, 62 insertions(+), 50 deletions(-) diff --git a/include/openPMD/IO/ADIOS/ADIOS2IOHandler.hpp b/include/openPMD/IO/ADIOS/ADIOS2IOHandler.hpp index 08e4603e84..9cdcbf1fec 100644 --- a/include/openPMD/IO/ADIOS/ADIOS2IOHandler.hpp +++ b/include/openPMD/IO/ADIOS/ADIOS2IOHandler.hpp @@ -130,14 +130,14 @@ class ADIOS2IOHandlerImpl ADIOS2IOHandlerImpl( AbstractIOHandler *, MPI_Comm, - nlohmann::json config, + json::TracingJSON config, std::string engineType ); #endif // openPMD_HAVE_MPI explicit ADIOS2IOHandlerImpl( AbstractIOHandler *, - nlohmann::json config, + json::TracingJSON config, std::string engineType ); @@ -281,7 +281,7 @@ class ADIOS2IOHandlerImpl static json::TracingJSON nullvalue; void - init( nlohmann::json config ); + init( json::TracingJSON config ); template< typename Key > json::TracingJSON @@ -1398,7 +1398,7 @@ friend class ADIOS2IOHandlerImpl; std::string path, Access, MPI_Comm, - nlohmann::json options, + json::TracingJSON options, std::string engineType ); #endif @@ -1406,7 +1406,7 @@ friend class ADIOS2IOHandlerImpl; ADIOS2IOHandler( std::string path, Access, - nlohmann::json options, + json::TracingJSON options, std::string engineType ); std::string backendName() const override { return "ADIOS2"; } diff --git a/include/openPMD/IO/HDF5/HDF5IOHandler.hpp b/include/openPMD/IO/HDF5/HDF5IOHandler.hpp index 3dfc1f6e1b..77dbf26c37 100644 --- a/include/openPMD/IO/HDF5/HDF5IOHandler.hpp +++ b/include/openPMD/IO/HDF5/HDF5IOHandler.hpp @@ -20,10 +20,9 @@ */ #pragma once +#include "openPMD/auxiliary/JSON_internal.hpp" #include "openPMD/IO/AbstractIOHandler.hpp" -#include - #include #include #include @@ -36,7 +35,7 @@ class HDF5IOHandlerImpl; class HDF5IOHandler : public AbstractIOHandler { public: - HDF5IOHandler(std::string path, Access, nlohmann::json config); + HDF5IOHandler(std::string path, Access, json::TracingJSON config); ~HDF5IOHandler() override; std::string backendName() const override { return "HDF5"; } diff --git a/include/openPMD/IO/HDF5/HDF5IOHandlerImpl.hpp b/include/openPMD/IO/HDF5/HDF5IOHandlerImpl.hpp index 4116c21417..ba5247284a 100644 --- a/include/openPMD/IO/HDF5/HDF5IOHandlerImpl.hpp +++ b/include/openPMD/IO/HDF5/HDF5IOHandlerImpl.hpp @@ -39,7 +39,7 @@ namespace openPMD class HDF5IOHandlerImpl : public AbstractIOHandlerImpl { public: - HDF5IOHandlerImpl(AbstractIOHandler*, nlohmann::json config); + HDF5IOHandlerImpl(AbstractIOHandler*, json::TracingJSON config); ~HDF5IOHandlerImpl() override; void createFile(Writable*, Parameter< Operation::CREATE_FILE > const&) override; diff --git a/include/openPMD/IO/HDF5/ParallelHDF5IOHandler.hpp b/include/openPMD/IO/HDF5/ParallelHDF5IOHandler.hpp index 70cb681f0d..fd115c94b7 100644 --- a/include/openPMD/IO/HDF5/ParallelHDF5IOHandler.hpp +++ b/include/openPMD/IO/HDF5/ParallelHDF5IOHandler.hpp @@ -21,10 +21,9 @@ #pragma once #include "openPMD/config.hpp" +#include "openPMD/auxiliary/JSON_internal.hpp" #include "openPMD/IO/AbstractIOHandler.hpp" -#include - #include #include #include @@ -39,7 +38,7 @@ namespace openPMD public: #if openPMD_HAVE_MPI ParallelHDF5IOHandler( - std::string path, Access, MPI_Comm, nlohmann::json config); + std::string path, Access, MPI_Comm, json::TracingJSON config); #else ParallelHDF5IOHandler(std::string path, Access, nlohmann::json config); #endif diff --git a/include/openPMD/IO/HDF5/ParallelHDF5IOHandlerImpl.hpp b/include/openPMD/IO/HDF5/ParallelHDF5IOHandlerImpl.hpp index d0e18dc85c..843280fc55 100644 --- a/include/openPMD/IO/HDF5/ParallelHDF5IOHandlerImpl.hpp +++ b/include/openPMD/IO/HDF5/ParallelHDF5IOHandlerImpl.hpp @@ -27,7 +27,7 @@ # include # if openPMD_HAVE_HDF5 # include "openPMD/IO/HDF5/HDF5IOHandlerImpl.hpp" -# include +# include "openPMD/auxiliary/JSON_internal.hpp" # endif #endif @@ -39,7 +39,7 @@ namespace openPMD { public: ParallelHDF5IOHandlerImpl( - AbstractIOHandler*, MPI_Comm, nlohmann::json config); + AbstractIOHandler*, MPI_Comm, json::TracingJSON config); ~ParallelHDF5IOHandlerImpl() override; MPI_Comm m_mpiComm; diff --git a/src/IO/ADIOS/ADIOS2IOHandler.cpp b/src/IO/ADIOS/ADIOS2IOHandler.cpp index e4bcbbc608..53c868cd1d 100644 --- a/src/IO/ADIOS/ADIOS2IOHandler.cpp +++ b/src/IO/ADIOS/ADIOS2IOHandler.cpp @@ -67,7 +67,7 @@ namespace openPMD ADIOS2IOHandlerImpl::ADIOS2IOHandlerImpl( AbstractIOHandler * handler, MPI_Comm communicator, - nlohmann::json cfg, + json::TracingJSON cfg, std::string engineType ) : AbstractIOHandlerImplCommon( handler ) , m_ADIOS{ communicator, ADIOS2_DEBUG_MODE } @@ -80,7 +80,7 @@ ADIOS2IOHandlerImpl::ADIOS2IOHandlerImpl( ADIOS2IOHandlerImpl::ADIOS2IOHandlerImpl( AbstractIOHandler * handler, - nlohmann::json cfg, + json::TracingJSON cfg, std::string engineType ) : AbstractIOHandlerImplCommon( handler ) , m_ADIOS{ ADIOS2_DEBUG_MODE } @@ -120,11 +120,11 @@ ADIOS2IOHandlerImpl::~ADIOS2IOHandlerImpl() } void -ADIOS2IOHandlerImpl::init( nlohmann::json cfg ) +ADIOS2IOHandlerImpl::init( json::TracingJSON cfg ) { - if( cfg.contains( "adios2" ) ) + if( cfg.json().contains( "adios2" ) ) { - m_config = std::move( cfg[ "adios2" ] ); + m_config = cfg[ "adios2" ]; if( m_config.json().contains( "schema" ) ) { @@ -2915,7 +2915,7 @@ ADIOS2IOHandler::ADIOS2IOHandler( std::string path, openPMD::Access at, MPI_Comm comm, - nlohmann::json options, + json::TracingJSON options, std::string engineType ) : AbstractIOHandler( std::move( path ), at, comm ) , m_impl{ this, comm, std::move( options ), std::move( engineType ) } @@ -2927,7 +2927,7 @@ ADIOS2IOHandler::ADIOS2IOHandler( ADIOS2IOHandler::ADIOS2IOHandler( std::string path, Access at, - nlohmann::json options, + json::TracingJSON options, std::string engineType ) : AbstractIOHandler( std::move( path ), at ) , m_impl{ this, std::move( options ), std::move( engineType ) } @@ -2947,7 +2947,7 @@ ADIOS2IOHandler::ADIOS2IOHandler( std::string path, Access at, MPI_Comm comm, - nlohmann::json, + json::TracingJSON, std::string ) : AbstractIOHandler( std::move( path ), at, comm ) { @@ -2958,7 +2958,7 @@ ADIOS2IOHandler::ADIOS2IOHandler( ADIOS2IOHandler::ADIOS2IOHandler( std::string path, Access at, - nlohmann::json, + json::TracingJSON, std::string ) : AbstractIOHandler( std::move( path ), at ) { diff --git a/src/IO/AbstractIOHandlerHelper.cpp b/src/IO/AbstractIOHandlerHelper.cpp index dbd9e49c72..43d2b331c6 100644 --- a/src/IO/AbstractIOHandlerHelper.cpp +++ b/src/IO/AbstractIOHandlerHelper.cpp @@ -34,12 +34,12 @@ namespace openPMD #if openPMD_HAVE_MPI template<> std::shared_ptr< AbstractIOHandler > - createIOHandler< nlohmann::json >( + createIOHandler< json::TracingJSON >( std::string path, Access access, Format format, MPI_Comm comm, - nlohmann::json options ) + json::TracingJSON options ) { (void) options; switch( format ) @@ -71,11 +71,11 @@ namespace openPMD template<> std::shared_ptr< AbstractIOHandler > - createIOHandler< nlohmann::json >( + createIOHandler< json::TracingJSON >( std::string path, Access access, Format format, - nlohmann::json options ) + json::TracingJSON options ) { (void) options; switch( format ) @@ -112,6 +112,9 @@ namespace openPMD createIOHandler( std::string path, Access access, Format format ) { return createIOHandler( - std::move( path ), access, format, nlohmann::json::object() ); + std::move( path ), + access, + format, + json::TracingJSON( nlohmann::json::object() )); } } // namespace openPMD diff --git a/src/IO/HDF5/HDF5IOHandler.cpp b/src/IO/HDF5/HDF5IOHandler.cpp index 245324bb3d..23168b8121 100644 --- a/src/IO/HDF5/HDF5IOHandler.cpp +++ b/src/IO/HDF5/HDF5IOHandler.cpp @@ -53,7 +53,7 @@ namespace openPMD # endif HDF5IOHandlerImpl::HDF5IOHandlerImpl( - AbstractIOHandler* handler, nlohmann::json config) + AbstractIOHandler* handler, json::TracingJSON config) : AbstractIOHandlerImpl(handler), m_datasetTransferProperty{H5P_DEFAULT}, m_fileAccessProperty{H5P_DEFAULT}, @@ -87,9 +87,9 @@ HDF5IOHandlerImpl::HDF5IOHandlerImpl( m_chunks = auxiliary::getEnvString( "OPENPMD_HDF5_CHUNKS", "auto" ); // JSON option can overwrite env option: - if( config.contains( "hdf5" ) ) + if( config.json().contains( "hdf5" ) ) { - m_config = std::move( config[ "hdf5" ] ); + m_config = config[ "hdf5" ]; // check for global dataset configs if( m_config.json().contains( "dataset" ) ) @@ -2019,7 +2019,7 @@ HDF5IOHandlerImpl::getFile( Writable * writable ) #endif #if openPMD_HAVE_HDF5 -HDF5IOHandler::HDF5IOHandler(std::string path, Access at, nlohmann::json config) +HDF5IOHandler::HDF5IOHandler(std::string path, Access at, json::TracingJSON config) : AbstractIOHandler(std::move(path), at), m_impl{new HDF5IOHandlerImpl(this, std::move(config))} { } @@ -2032,7 +2032,7 @@ HDF5IOHandler::flush() return m_impl->flush(); } #else -HDF5IOHandler::HDF5IOHandler(std::string path, Access at, nlohmann::json /* config */) +HDF5IOHandler::HDF5IOHandler(std::string path, Access at, json::TracingJSON /* config */) : AbstractIOHandler(std::move(path), at) { throw std::runtime_error("openPMD-api built without HDF5 support"); diff --git a/src/IO/HDF5/ParallelHDF5IOHandler.cpp b/src/IO/HDF5/ParallelHDF5IOHandler.cpp index 19c9621393..fde421d8d6 100644 --- a/src/IO/HDF5/ParallelHDF5IOHandler.cpp +++ b/src/IO/HDF5/ParallelHDF5IOHandler.cpp @@ -40,7 +40,7 @@ namespace openPMD # endif ParallelHDF5IOHandler::ParallelHDF5IOHandler( - std::string path, Access at, MPI_Comm comm, nlohmann::json config ) + std::string path, Access at, MPI_Comm comm, json::TracingJSON config ) : AbstractIOHandler(std::move(path), at, comm), m_impl{new ParallelHDF5IOHandlerImpl(this, comm, std::move(config))} { } @@ -54,7 +54,7 @@ ParallelHDF5IOHandler::flush() } ParallelHDF5IOHandlerImpl::ParallelHDF5IOHandlerImpl( - AbstractIOHandler* handler, MPI_Comm comm, nlohmann::json config ) + AbstractIOHandler* handler, MPI_Comm comm, json::TracingJSON config ) : HDF5IOHandlerImpl{handler, std::move(config)}, m_mpiComm{comm}, m_mpiInfo{MPI_INFO_NULL} /* MPI 3.0+: MPI_INFO_ENV */ diff --git a/src/Series.cpp b/src/Series.cpp index c65c35477e..4aa411aa44 100644 --- a/src/Series.cpp +++ b/src/Series.cpp @@ -1458,21 +1458,32 @@ void Series::openIteration( uint64_t index, Iteration iteration ) namespace { -template< typename T > -void getJsonOption( - nlohmann::json const & config, std::string const & key, T & dest ) -{ - if( config.contains( key ) ) + template< typename From, typename Dest = From > + void getJsonOption( + json::TracingJSON & config, std::string const & key, Dest & dest ) { - dest = config.at( key ).get< T >(); + if( config.json().contains( key ) ) + { + dest = config[ key ].json().get< From >(); + } } -} -void parseJsonOptions( - internal::SeriesData & series, nlohmann::json const & options ) -{ - getJsonOption( options, "defer_iteration_parsing", series.m_parseLazily ); -} + template< typename Dest = std::string > + void getJsonOptionLowerCase( + json::TracingJSON & config, std::string const & key, Dest & dest ) + { + if( config.json().contains( key ) ) + { + dest = json::asLowerCaseStringDynamic( config[ key ].json() ); + } + } + + void parseJsonOptions( + internal::SeriesData & series, json::TracingJSON & options ) + { + getJsonOption< bool >( + options, "defer_iteration_parsing", series.m_parseLazily ); + } } namespace internal @@ -1531,7 +1542,7 @@ Series::Series( { Attributable::setData( m_series ); iterations = m_series->iterations; - nlohmann::json optionsJson = json::parseOptions( + json::TracingJSON optionsJson = json::parseOptions( options, comm, /* considerFiles = */ true ); parseJsonOptions( get(), optionsJson ); auto input = parseInput( filepath ); @@ -1548,8 +1559,8 @@ Series::Series( { Attributable::setData( m_series ); iterations = m_series->iterations; - nlohmann::json optionsJson = - json::parseOptions( options, /* considerFiles = */ true ); + json::TracingJSON optionsJson = json::parseOptions( + options, /* considerFiles = */ true ); parseJsonOptions( get(), optionsJson ); auto input = parseInput( filepath ); auto handler = createIOHandler( From af40454016e343f5876f0d858fcbc14bb28d2b87 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Franz=20P=C3=B6schel?= Date: Mon, 13 Sep 2021 15:59:49 +0200 Subject: [PATCH 04/23] Use JSON to set dataset transform in ADIOS1 --- CMakeLists.txt | 5 ++ include/openPMD/IO/ADIOS/ADIOS1IOHandler.hpp | 5 +- .../openPMD/IO/ADIOS/ADIOS1IOHandlerImpl.hpp | 2 +- .../IO/ADIOS/CommonADIOS1IOHandler.hpp | 5 ++ .../IO/ADIOS/ParallelADIOS1IOHandler.hpp | 5 +- .../IO/ADIOS/ParallelADIOS1IOHandlerImpl.hpp | 3 +- src/IO/ADIOS/ADIOS1IOHandler.cpp | 12 +-- src/IO/ADIOS/CommonADIOS1IOHandler.cpp | 75 +++++++++++++++++-- src/IO/ADIOS/ParallelADIOS1IOHandler.cpp | 9 ++- src/IO/AbstractIOHandlerHelper.cpp | 6 +- 10 files changed, 107 insertions(+), 20 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 2cae96727f..443482ccd4 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -581,6 +581,11 @@ if(openPMD_HAVE_ADIOS1) target_compile_definitions(openPMD.ADIOS1.Parallel PRIVATE openPMD_HAVE_MPI=0) endif() + target_include_directories(openPMD.ADIOS1.Serial SYSTEM PRIVATE + $) + target_include_directories(openPMD.ADIOS1.Parallel SYSTEM PRIVATE + $) + set_target_properties(openPMD.ADIOS1.Serial PROPERTIES POSITION_INDEPENDENT_CODE ON CXX_VISIBILITY_PRESET hidden diff --git a/include/openPMD/IO/ADIOS/ADIOS1IOHandler.hpp b/include/openPMD/IO/ADIOS/ADIOS1IOHandler.hpp index d80d6ddf24..4b2ff08e4a 100644 --- a/include/openPMD/IO/ADIOS/ADIOS1IOHandler.hpp +++ b/include/openPMD/IO/ADIOS/ADIOS1IOHandler.hpp @@ -22,6 +22,7 @@ #include "openPMD/config.hpp" #include "openPMD/auxiliary/Export.hpp" +#include "openPMD/auxiliary/JSON_internal.hpp" #include "openPMD/IO/AbstractIOHandler.hpp" #include @@ -42,7 +43,7 @@ namespace openPMD friend class ADIOS1IOHandlerImpl; public: - ADIOS1IOHandler(std::string path, Access); + ADIOS1IOHandler(std::string path, Access, json::TracingJSON ); ~ADIOS1IOHandler() override; std::string backendName() const override { return "ADIOS1"; } @@ -61,7 +62,7 @@ namespace openPMD friend class ADIOS1IOHandlerImpl; public: - ADIOS1IOHandler(std::string path, Access); + ADIOS1IOHandler(std::string path, Access, json::TracingJSON ); ~ADIOS1IOHandler() override; std::string backendName() const override { return "DUMMY_ADIOS1"; } diff --git a/include/openPMD/IO/ADIOS/ADIOS1IOHandlerImpl.hpp b/include/openPMD/IO/ADIOS/ADIOS1IOHandlerImpl.hpp index 4b06b83d6a..9b7d1e48a4 100644 --- a/include/openPMD/IO/ADIOS/ADIOS1IOHandlerImpl.hpp +++ b/include/openPMD/IO/ADIOS/ADIOS1IOHandlerImpl.hpp @@ -46,7 +46,7 @@ namespace openPMD private: using Base_t = CommonADIOS1IOHandlerImpl< ADIOS1IOHandlerImpl >; public: - ADIOS1IOHandlerImpl(AbstractIOHandler*); + ADIOS1IOHandlerImpl(AbstractIOHandler*, json::TracingJSON); virtual ~ADIOS1IOHandlerImpl(); virtual void init(); diff --git a/include/openPMD/IO/ADIOS/CommonADIOS1IOHandler.hpp b/include/openPMD/IO/ADIOS/CommonADIOS1IOHandler.hpp index 2dd096fc3f..8d102247ca 100644 --- a/include/openPMD/IO/ADIOS/CommonADIOS1IOHandler.hpp +++ b/include/openPMD/IO/ADIOS/CommonADIOS1IOHandler.hpp @@ -27,6 +27,7 @@ #include "openPMD/IO/AbstractIOHandler.hpp" #include "openPMD/auxiliary/Filesystem.hpp" #include "openPMD/auxiliary/DerefDynamicCast.hpp" +#include "openPMD/auxiliary/JSON_internal.hpp" #include "openPMD/auxiliary/Memory.hpp" #include "openPMD/auxiliary/StringManip.hpp" #include "openPMD/IO/AbstractIOHandlerImpl.hpp" @@ -89,11 +90,15 @@ namespace openPMD std::unordered_map< std::shared_ptr< std::string >, ADIOS_FILE* > m_openReadFileHandles; std::unordered_map< ADIOS_FILE*, std::vector< ADIOS_SELECTION* > > m_scheduledReads; std::unordered_map< int64_t, std::unordered_map< std::string, Attribute > > m_attributeWrites; + // config options + std::string m_defaultTransform; /** * Call this function to get adios file id for a Writable. Will create one if does not exist * @return returns an adios file id. */ int64_t GetFileHandle(Writable*); + + void initJson( json::TracingJSON ); }; // ParallelADIOS1IOHandlerImpl } // openPMD diff --git a/include/openPMD/IO/ADIOS/ParallelADIOS1IOHandler.hpp b/include/openPMD/IO/ADIOS/ParallelADIOS1IOHandler.hpp index 817b6f56df..9eeaefcce0 100644 --- a/include/openPMD/IO/ADIOS/ParallelADIOS1IOHandler.hpp +++ b/include/openPMD/IO/ADIOS/ParallelADIOS1IOHandler.hpp @@ -22,6 +22,7 @@ #include "openPMD/config.hpp" #include "openPMD/auxiliary/Export.hpp" +#include "openPMD/auxiliary/JSON_internal.hpp" #include "openPMD/IO/AbstractIOHandler.hpp" #include @@ -42,9 +43,9 @@ namespace openPMD public: # if openPMD_HAVE_MPI - ParallelADIOS1IOHandler(std::string path, Access, MPI_Comm); + ParallelADIOS1IOHandler(std::string path, Access, json::TracingJSON , MPI_Comm); # else - ParallelADIOS1IOHandler(std::string path, Access); + ParallelADIOS1IOHandler(std::string path, Access, json::TracingJSON); # endif ~ParallelADIOS1IOHandler() override; diff --git a/include/openPMD/IO/ADIOS/ParallelADIOS1IOHandlerImpl.hpp b/include/openPMD/IO/ADIOS/ParallelADIOS1IOHandlerImpl.hpp index f1c2a6eb0e..27ad5fae3b 100644 --- a/include/openPMD/IO/ADIOS/ParallelADIOS1IOHandlerImpl.hpp +++ b/include/openPMD/IO/ADIOS/ParallelADIOS1IOHandlerImpl.hpp @@ -22,6 +22,7 @@ #include "openPMD/config.hpp" #include "openPMD/auxiliary/Export.hpp" +#include "openPMD/auxiliary/JSON_internal.hpp" #include "openPMD/IO/AbstractIOHandler.hpp" #if openPMD_HAVE_ADIOS1 && openPMD_HAVE_MPI @@ -46,7 +47,7 @@ namespace openPMD private: using Base_t = CommonADIOS1IOHandlerImpl< ParallelADIOS1IOHandlerImpl >; public: - ParallelADIOS1IOHandlerImpl(AbstractIOHandler*, MPI_Comm); + ParallelADIOS1IOHandlerImpl(AbstractIOHandler*, json::TracingJSON, MPI_Comm); virtual ~ParallelADIOS1IOHandlerImpl(); virtual void init(); diff --git a/src/IO/ADIOS/ADIOS1IOHandler.cpp b/src/IO/ADIOS/ADIOS1IOHandler.cpp index 4134bcd1df..e6bce3835d 100644 --- a/src/IO/ADIOS/ADIOS1IOHandler.cpp +++ b/src/IO/ADIOS/ADIOS1IOHandler.cpp @@ -43,9 +43,11 @@ namespace openPMD # define VERIFY(CONDITION, TEXT) do{ (void)sizeof(CONDITION); } while( 0 ) # endif -ADIOS1IOHandlerImpl::ADIOS1IOHandlerImpl(AbstractIOHandler* handler) +ADIOS1IOHandlerImpl::ADIOS1IOHandlerImpl(AbstractIOHandler* handler, json::TracingJSON json) : Base_t(handler) -{ } +{ + initJson( std::move( json ) ); +} ADIOS1IOHandlerImpl::~ADIOS1IOHandlerImpl() { @@ -220,9 +222,9 @@ ADIOS1IOHandlerImpl::init() #endif #if openPMD_HAVE_ADIOS1 -ADIOS1IOHandler::ADIOS1IOHandler(std::string path, Access at) +ADIOS1IOHandler::ADIOS1IOHandler(std::string path, Access at, json::TracingJSON json) : AbstractIOHandler(std::move(path), at), - m_impl{new ADIOS1IOHandlerImpl(this)} + m_impl{new ADIOS1IOHandlerImpl(this, std::move(json))} { m_impl->init(); } @@ -317,7 +319,7 @@ ADIOS1IOHandlerImpl::initialize_group(std::string const &name) } #else -ADIOS1IOHandler::ADIOS1IOHandler(std::string path, Access at) +ADIOS1IOHandler::ADIOS1IOHandler(std::string path, Access at, json::TracingJSON) : AbstractIOHandler(std::move(path), at) { throw std::runtime_error("openPMD-api built without ADIOS1 support"); diff --git a/src/IO/ADIOS/CommonADIOS1IOHandler.cpp b/src/IO/ADIOS/CommonADIOS1IOHandler.cpp index aeef40e0d8..e9938ed139 100644 --- a/src/IO/ADIOS/CommonADIOS1IOHandler.cpp +++ b/src/IO/ADIOS/CommonADIOS1IOHandler.cpp @@ -23,6 +23,8 @@ #if openPMD_HAVE_ADIOS1 +#include "openPMD/auxiliary/JSON_internal.hpp" +#include "openPMD/Error.hpp" #include "openPMD/IO/ADIOS/ADIOS1IOHandlerImpl.hpp" #include "openPMD/IO/ADIOS/ParallelADIOS1IOHandlerImpl.hpp" @@ -466,6 +468,33 @@ CommonADIOS1IOHandlerImpl< ChildClass >::createPath(Writable* writable, } } +static auxiliary::Option< std::string > datasetTransform( + json::TracingJSON config ) +{ + using ret_t = auxiliary::Option< std::string >; + if( !config.json().contains( "dataset" ) ) + { + return ret_t{}; + } + config = config[ "dataset" ]; + if( !config.json().contains( "transform" ) ) + { + return ret_t{}; + } + config = config[ "transform" ]; + auto maybeRes = json::asStringDynamic( config.json() ); + if( maybeRes.has_value() ) + { + return std::move( maybeRes.get() ); + } + else + { + throw error::BackendConfigSchema( + { "adios1", "dataset", "transform" }, + "Key must convertible to type string." ); + } +} + template< typename ChildClass > void CommonADIOS1IOHandlerImpl< ChildClass >::createDataset(Writable* writable, @@ -519,14 +548,35 @@ CommonADIOS1IOHandlerImpl< ChildClass >::createDataset(Writable* writable, chunkOffsetParam.c_str()); VERIFY(id != 0, "[ADIOS1] Internal error: Failed to define ADIOS variable during Dataset creation"); - if( !parameters.compression.empty() ) - std::cerr << "Custom compression not compatible with ADIOS1 backend. Use transform instead." - << std::endl; + std::string transform = ""; + { + json::TracingJSON options = json::parseOptions( + parameters.options, /* considerFiles = */ false ); + auto maybeTransform = datasetTransform( options ); + if( maybeTransform.has_value() ) + { + transform = maybeTransform.get(); + } + + auto shadow = options.invertShadow(); + if( shadow.size() > 0 ) + { + std::cerr << "Warning: parts of the JSON configuration for " + "ADIOS1 dataset '" + << name << "' remain unused:\n" + << shadow << std::endl; + } + } + // Fallback: global option + if( transform.empty() ) + { + transform = m_defaultTransform; + } - if( !parameters.transform.empty() ) + if( !transform.empty() ) { int status; - status = adios_set_transform(id, parameters.transform.c_str()); + status = adios_set_transform(id, transform.c_str()); VERIFY(status == err_no_error, "[ADIOS1] Internal error: Failed to set ADIOS transform during Dataset cretaion"); } @@ -1699,6 +1749,21 @@ CommonADIOS1IOHandlerImpl< ChildClass >::listAttributes(Writable* writable, } } +template< typename ChildClass > +void CommonADIOS1IOHandlerImpl< ChildClass >::initJson( + json::TracingJSON config ) +{ + if( !config.json().contains( "adios1" ) ) + { + return; + } + auto maybeTransform = datasetTransform( config[ "adios1" ] ); + if( maybeTransform.has_value() ) + { + m_defaultTransform = std::move( maybeTransform.get() ); + } +} + template class CommonADIOS1IOHandlerImpl< ADIOS1IOHandlerImpl >; #if openPMD_HAVE_MPI template class CommonADIOS1IOHandlerImpl< ParallelADIOS1IOHandlerImpl >; diff --git a/src/IO/ADIOS/ParallelADIOS1IOHandler.cpp b/src/IO/ADIOS/ParallelADIOS1IOHandler.cpp index 91d6fcc63e..bfcc13675d 100644 --- a/src/IO/ADIOS/ParallelADIOS1IOHandler.cpp +++ b/src/IO/ADIOS/ParallelADIOS1IOHandler.cpp @@ -41,6 +41,7 @@ namespace openPMD # endif ParallelADIOS1IOHandlerImpl::ParallelADIOS1IOHandlerImpl(AbstractIOHandler* handler, + json::TracingJSON json, MPI_Comm comm) : Base_t{handler}, m_mpiInfo{MPI_INFO_NULL} @@ -48,6 +49,7 @@ ParallelADIOS1IOHandlerImpl::ParallelADIOS1IOHandlerImpl(AbstractIOHandler* hand int status = MPI_SUCCESS; status = MPI_Comm_dup(comm, &m_mpiComm); VERIFY(status == MPI_SUCCESS, "[ADIOS1] Internal error: Failed to duplicate MPI communicator"); + initJson( std::move( json ) ); } ParallelADIOS1IOHandlerImpl::~ParallelADIOS1IOHandlerImpl() @@ -240,9 +242,10 @@ ParallelADIOS1IOHandlerImpl::init() ParallelADIOS1IOHandler::ParallelADIOS1IOHandler(std::string path, Access at, + json::TracingJSON json, MPI_Comm comm) : AbstractIOHandler(std::move(path), at, comm), - m_impl{new ParallelADIOS1IOHandlerImpl(this, comm)} + m_impl{new ParallelADIOS1IOHandlerImpl(this, std::move(json), comm)} { m_impl->init(); } @@ -347,6 +350,7 @@ ParallelADIOS1IOHandlerImpl::initialize_group(std::string const &name) # if openPMD_HAVE_MPI ParallelADIOS1IOHandler::ParallelADIOS1IOHandler(std::string path, Access at, + json::TracingJSON, MPI_Comm comm) : AbstractIOHandler(std::move(path), at, comm) { @@ -354,7 +358,8 @@ ParallelADIOS1IOHandler::ParallelADIOS1IOHandler(std::string path, } # else ParallelADIOS1IOHandler::ParallelADIOS1IOHandler(std::string path, - Access at) + Access at, + json::TracingJSON) : AbstractIOHandler(std::move(path), at) { throw std::runtime_error("openPMD-api built without parallel ADIOS1 support"); diff --git a/src/IO/AbstractIOHandlerHelper.cpp b/src/IO/AbstractIOHandlerHelper.cpp index 43d2b331c6..f4b542254a 100644 --- a/src/IO/AbstractIOHandlerHelper.cpp +++ b/src/IO/AbstractIOHandlerHelper.cpp @@ -49,7 +49,8 @@ namespace openPMD path, access, comm, std::move( options ) ); case Format::ADIOS1: # if openPMD_HAVE_ADIOS1 - return std::make_shared< ParallelADIOS1IOHandler >( path, access, comm ); + return std::make_shared< ParallelADIOS1IOHandler >( + path, access, std::move( options ), comm ); # else throw std::runtime_error("openPMD-api built without ADIOS1 support"); # endif @@ -85,7 +86,8 @@ namespace openPMD path, access, std::move( options ) ); case Format::ADIOS1: #if openPMD_HAVE_ADIOS1 - return std::make_shared< ADIOS1IOHandler >( path, access ); + return std::make_shared< ADIOS1IOHandler >( + path, access, std::move( options ) ); #else throw std::runtime_error("openPMD-api built without ADIOS1 support"); #endif From 65624fe1e3ee7f87b3431655ee4fc18111a0bd32 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Franz=20P=C3=B6schel?= Date: Fri, 3 Sep 2021 16:32:54 +0200 Subject: [PATCH 05/23] Remove Dataset::compression, ::transform and ::chunksize --- examples/7_extended_write_serial.cpp | 23 +++++++++- examples/7_extended_write_serial.py | 23 +++++++++- examples/8_benchmark_parallel.cpp | 4 +- include/openPMD/Dataset.hpp | 6 --- include/openPMD/IO/IOTask.hpp | 6 +-- include/openPMD/RecordComponent.tpp | 3 -- .../openPMD/benchmark/mpi/MPIBenchmark.hpp | 44 +------------------ .../benchmark/mpi/MPIBenchmarkReport.hpp | 18 -------- src/Dataset.cpp | 38 ---------------- src/IO/ADIOS/ADIOS2IOHandler.cpp | 11 ----- src/IO/HDF5/HDF5IOHandler.cpp | 7 +-- src/RecordComponent.cpp | 3 -- src/backend/PatchRecordComponent.cpp | 3 -- src/binding/python/Dataset.cpp | 6 --- 14 files changed, 48 insertions(+), 147 deletions(-) diff --git a/examples/7_extended_write_serial.cpp b/examples/7_extended_write_serial.cpp index 5fa62add1e..02831dc11d 100644 --- a/examples/7_extended_write_serial.cpp +++ b/examples/7_extended_write_serial.cpp @@ -92,8 +92,27 @@ main() // this describes the datatype and shape of data as it should be written to disk io::Datatype dtype = io::determineDatatype(partial_mesh); auto d = io::Dataset(dtype, io::Extent{2, 5}); - d.setCompression("zlib", 9); - d.setCustomTransform("blosc:compressor=zlib,shuffle=bit,lvl=1;nometa"); + std::string datasetConfig = R"END( +{ + "adios1": { + "dataset": { + "transform": "blosc:compressor=zlib,shuffle=bit,lvl=1;nometa" + } + }, + "adios2": { + "dataset": { + "operators": [ + { + "type": "zlib", + "parameters": { + "clevel": 9 + } + } + ] + } + } +})END"; + d.options = datasetConfig; mesh["x"].resetDataset(d); io::ParticleSpecies electrons = cur_it.particles["electrons"]; diff --git a/examples/7_extended_write_serial.py b/examples/7_extended_write_serial.py index dc23d8d427..6f939f9c75 100755 --- a/examples/7_extended_write_serial.py +++ b/examples/7_extended_write_serial.py @@ -102,8 +102,27 @@ # component this describes the datatype and shape of data as it should be # written to disk d = Dataset(partial_mesh.dtype, extent=[2, 5]) - d.set_compression("zlib", 9) - d.set_custom_transform("blosc:compressor=zlib,shuffle=bit,lvl=1;nometa") + dataset_config = """ +{ + "adios1": { + "dataset": { + "transform": "blosc:compressor=zlib,shuffle=bit,lvl=1;nometa" + } + }, + "adios2": { + "dataset": { + "operators": [ + { + "type": "zlib", + "parameters": { + "clevel": 9 + } + } + ] + } + } +}""" + d.options = dataset_config mesh["x"].reset_dataset(d) electrons = cur_it.particles["electrons"] diff --git a/examples/8_benchmark_parallel.cpp b/examples/8_benchmark_parallel.cpp index 235576f3f0..c1fd68c7ae 100644 --- a/examples/8_benchmark_parallel.cpp +++ b/examples/8_benchmark_parallel.cpp @@ -148,10 +148,10 @@ int main( // * The number of iterations. Effectively, the benchmark will be repeated for this many // times. #if openPMD_HAVE_ADIOS1 || openPMD_HAVE_ADIOS2 - benchmark.addConfiguration("", 0, "bp", dt, 10); + benchmark.addConfiguration("bp", dt, 10); #endif #if openPMD_HAVE_HDF5 - benchmark.addConfiguration("", 0, "h5", dt, 10); + benchmark.addConfiguration("h5", dt, 10); #endif // Execute all previously configured benchmarks. Will return a MPIBenchmarkReport object diff --git a/include/openPMD/Dataset.hpp b/include/openPMD/Dataset.hpp index 88fa5a4e49..bfe41d6b63 100644 --- a/include/openPMD/Dataset.hpp +++ b/include/openPMD/Dataset.hpp @@ -49,16 +49,10 @@ class Dataset Dataset( Extent ); Dataset& extend(Extent newExtent); - Dataset& setChunkSize(Extent const&); - Dataset& setCompression(std::string const&, uint8_t const); - Dataset& setCustomTransform(std::string const&); Extent extent; Datatype dtype; uint8_t rank; - Extent chunkSize; - std::string compression; - std::string transform; std::string options = "{}"; //!< backend-dependent JSON configuration }; } // namespace openPMD diff --git a/include/openPMD/IO/IOTask.hpp b/include/openPMD/IO/IOTask.hpp index 6fb4be636d..f556e08c81 100644 --- a/include/openPMD/IO/IOTask.hpp +++ b/include/openPMD/IO/IOTask.hpp @@ -268,8 +268,7 @@ struct OPENPMDAPI_EXPORT Parameter< Operation::CREATE_DATASET > : public Abstrac Parameter() = default; Parameter(Parameter const & p) : AbstractParameter(), name(p.name), extent(p.extent), dtype(p.dtype), - chunkSize(p.chunkSize), compression(p.compression), - transform(p.transform), options(p.options) {} + options(p.options) {} std::unique_ptr< AbstractParameter > clone() const override @@ -281,9 +280,6 @@ struct OPENPMDAPI_EXPORT Parameter< Operation::CREATE_DATASET > : public Abstrac std::string name = ""; Extent extent = {}; Datatype dtype = Datatype::UNDEFINED; - Extent chunkSize = {}; - std::string compression = ""; - std::string transform = ""; std::string options = "{}"; }; diff --git a/include/openPMD/RecordComponent.tpp b/include/openPMD/RecordComponent.tpp index 89a7100b56..e73b59a5ae 100644 --- a/include/openPMD/RecordComponent.tpp +++ b/include/openPMD/RecordComponent.tpp @@ -298,9 +298,6 @@ RecordComponent::storeChunk( Offset o, Extent e, F && createBuffer ) dCreate.name = rc.m_name; dCreate.extent = getExtent(); dCreate.dtype = getDatatype(); - dCreate.chunkSize = rc.m_dataset.chunkSize; - dCreate.compression = rc.m_dataset.compression; - dCreate.transform = rc.m_dataset.transform; dCreate.options = rc.m_dataset.options; IOHandler()->enqueue(IOTask(this, dCreate)); } diff --git a/include/openPMD/benchmark/mpi/MPIBenchmark.hpp b/include/openPMD/benchmark/mpi/MPIBenchmark.hpp index 0f789d57c5..cea5f00726 100644 --- a/include/openPMD/benchmark/mpi/MPIBenchmark.hpp +++ b/include/openPMD/benchmark/mpi/MPIBenchmark.hpp @@ -93,9 +93,8 @@ namespace openPMD MPI_Comm comm = MPI_COMM_WORLD ); + // @TODO replace former compression operator with JSON config /** - * @param compression Compression string, leave empty to disable commpression. - * @param compressionLevel Compression level. * @param backend Backend to use, specified by filename extension (eg "bp" or "h5"). * @param dt Type of data to write and read. * @param iterations The number of iterations to write and read for each @@ -104,8 +103,6 @@ namespace openPMD * @param threadSize Number of threads to use. */ void addConfiguration( - std::string compression, - uint8_t compressionLevel, std::string backend, Datatype dt, typename decltype( Series::iterations )::key_type iterations, @@ -115,8 +112,6 @@ namespace openPMD /** * Version of addConfiguration() that automatically sets the number of used * threads to the MPI size. - * @param compression Compression string, leave empty to disable commpression. - * @param compressionLevel Compression level. * @param backend Backend to use, specified by filename extension (eg "bp" or "h5"). * @param dt Type of data to write and read. * @param iterations The number of iterations to write and read for each @@ -124,8 +119,6 @@ namespace openPMD * iteration, so it should create sufficient data for one iteration. */ void addConfiguration( - std::string compression, - uint8_t compressionLevel, std::string backend, Datatype dt, typename decltype( Series::iterations)::key_type iterations @@ -151,8 +144,6 @@ namespace openPMD std::string m_basePath; std::vector< std::tuple< - std::string, - uint8_t, std::string, int, Datatype, @@ -161,9 +152,7 @@ namespace openPMD enum Config { - COMPRESSION = 0, - COMPRESSION_LEVEL, - BACKEND, + BACKEND = 0, NRANKS, DTYPE, ITERATIONS @@ -194,8 +183,6 @@ namespace openPMD /** * Execute a single read benchmark. * @tparam T Type of the dataset to write. - * @param compression Compression to use. - * @param level Compression level to use. * @param offset Local offset of the chunk to write. * @param extent Local extent of the chunk to write. * @param extension File extension to control the openPMD backend. @@ -207,8 +194,6 @@ namespace openPMD typename T > typename Clock::duration writeBenchmark( - std::string const & compression, - uint8_t level, Offset & offset, Extent & extent, std::string const & extension, @@ -331,8 +316,6 @@ namespace openPMD template< typename DatasetFillerProvider > void MPIBenchmark< DatasetFillerProvider >::addConfiguration( - std::string compression, - uint8_t compressionLevel, std::string backend, Datatype dt, typename decltype( Series::iterations)::key_type iterations, @@ -341,8 +324,6 @@ namespace openPMD { this->m_configurations .emplace_back( - compression, - compressionLevel, backend, threadSize, dt, @@ -353,8 +334,6 @@ namespace openPMD template< typename DatasetFillerProvider > void MPIBenchmark< DatasetFillerProvider >::addConfiguration( - std::string compression, - uint8_t compressionLevel, std::string backend, Datatype dt, typename decltype( Series::iterations)::key_type iterations @@ -366,8 +345,6 @@ namespace openPMD &size ); addConfiguration( - compression, - compressionLevel, backend, dt, iterations, @@ -389,8 +366,6 @@ namespace openPMD template< typename T > typename Clock::duration MPIBenchmark< DatasetFillerProvider >::BenchmarkExecution< Clock >::writeBenchmark( - std::string const & compression, - uint8_t level, Offset & offset, Extent & extent, std::string const & extension, @@ -424,13 +399,6 @@ namespace openPMD datatype, m_benchmark->totalExtent ); - if( !compression.empty( ) ) - { - dataset.setCompression( - compression, - level - ); - } id.resetDataset( dataset ); @@ -521,15 +489,11 @@ namespace openPMD ); for( auto const & config: exec.m_benchmark->m_configurations ) { - std::string compression; - uint8_t compressionLevel; std::string backend; int size; Datatype dt2; typename decltype( Series::iterations)::key_type iterations; std::tie( - compression, - compressionLevel, backend, size, dt2, @@ -551,8 +515,6 @@ namespace openPMD dsf->setNumberOfItems( blockSize ); auto writeTime = exec.writeBenchmark< T >( - compression, - compressionLevel, localCuboid.first, localCuboid.second, backend, @@ -567,8 +529,6 @@ namespace openPMD ); report.addReport( rootThread, - compression, - compressionLevel, backend, size, dt2, diff --git a/include/openPMD/benchmark/mpi/MPIBenchmarkReport.hpp b/include/openPMD/benchmark/mpi/MPIBenchmarkReport.hpp index eb43458bc2..0cc6fb348f 100644 --- a/include/openPMD/benchmark/mpi/MPIBenchmarkReport.hpp +++ b/include/openPMD/benchmark/mpi/MPIBenchmarkReport.hpp @@ -53,8 +53,6 @@ namespace openPMD std::map< std::tuple< int, // rank - std::string, // compression - uint8_t, // compression level std::string, // extension int, // thread size Datatype, @@ -81,8 +79,6 @@ namespace openPMD * Add results for a certain compression strategy and level. * * @param rootThread The MPI rank which will collect the data. - * @param compression Compression strategy. - * @param level Compression level * @param extension The openPMD filename extension. * @param threadSize The MPI size. * @param dt The openPMD datatype. @@ -91,8 +87,6 @@ namespace openPMD */ void addReport( int rootThread, - std::string compression, - uint8_t level, std::string extension, int threadSize, Datatype dt, @@ -106,8 +100,6 @@ namespace openPMD /** Retrieve the time measured for a certain compression strategy. * * @param rank Which MPI rank's duration results to retrieve. - * @param compression Compression strategy. - * @param level Compression level * @param extension The openPMD filename extension. * @param threadSize The MPI size. * @param dt The openPMD datatype. @@ -119,8 +111,6 @@ namespace openPMD Duration > getReport( int rank, - std::string compression, - uint8_t level, std::string extension, int threadSize, Datatype dt, @@ -244,8 +234,6 @@ namespace openPMD template< typename Duration > void MPIBenchmarkReport< Duration >::addReport( int rootThread, - std::string compression, - uint8_t level, std::string extension, int threadSize, Datatype dt, @@ -316,8 +304,6 @@ namespace openPMD .emplace( std::make_tuple( i, - compression, - level, extension, threadSize, dt, @@ -348,8 +334,6 @@ namespace openPMD Duration > MPIBenchmarkReport< Duration >::getReport( int rank, - std::string compression, - uint8_t level, std::string extension, int threadSize, Datatype dt, @@ -362,8 +346,6 @@ namespace openPMD .find( std::make_tuple( rank, - compression, - level, extension, threadSize, dt, diff --git a/src/Dataset.cpp b/src/Dataset.cpp index 21be4fa3f2..bb89f76423 100644 --- a/src/Dataset.cpp +++ b/src/Dataset.cpp @@ -30,7 +30,6 @@ Dataset::Dataset(Datatype d, Extent e, std::string options_in) : extent{e}, dtype{d}, rank{static_cast(e.size())}, - chunkSize{e}, options{std::move(options_in)} { } @@ -50,41 +49,4 @@ Dataset::extend( Extent newExtents ) extent = newExtents; return *this; } - -Dataset& -Dataset::setChunkSize(Extent const& cs) -{ - if( extent.size() != rank ) - throw std::runtime_error("Dimensionality of extended Dataset must match the original dimensionality"); - for( size_t i = 0; i < cs.size(); ++i ) - if( cs[i] > extent[i] ) - throw std::runtime_error("Dataset chunk size must be equal or smaller than Extent"); - - chunkSize = cs; - return *this; -} - -Dataset& -Dataset::setCompression(std::string const& format, uint8_t const level) -{ - if(format == "zlib" || format == "gzip" || format == "deflate") - { - if(level > 9) - throw std::runtime_error("Compression level out of range for " + format); - } - else - std::cerr << "Unknown compression format " << format - << ". This might mean that compression will not be enabled." - << std::endl; - - compression = format + ':' + std::to_string(static_cast< int >(level)); - return *this; -} - -Dataset& -Dataset::setCustomTransform(std::string const& parameter) -{ - transform = parameter; - return *this; -} } // openPMD diff --git a/src/IO/ADIOS/ADIOS2IOHandler.cpp b/src/IO/ADIOS/ADIOS2IOHandler.cpp index 53c868cd1d..638fdfb577 100644 --- a/src/IO/ADIOS/ADIOS2IOHandler.cpp +++ b/src/IO/ADIOS/ADIOS2IOHandler.cpp @@ -376,17 +376,6 @@ void ADIOS2IOHandlerImpl::createDataset( operators = defaultOperators; } - if( !parameters.compression.empty() ) - { - auxiliary::Option< adios2::Operator > adiosOperator = - getCompressionOperator( parameters.compression ); - if( adiosOperator ) - { - operators.push_back( ParameterizedOperator{ - adiosOperator.get(), adios2::Params() } ); - } - } - // cast from openPMD::Extent to adios2::Dims adios2::Dims const shape( parameters.extent.begin(), parameters.extent.end() ); diff --git a/src/IO/HDF5/HDF5IOHandler.cpp b/src/IO/HDF5/HDF5IOHandler.cpp index 23168b8121..c581d9c37b 100644 --- a/src/IO/HDF5/HDF5IOHandler.cpp +++ b/src/IO/HDF5/HDF5IOHandler.cpp @@ -386,7 +386,7 @@ HDF5IOHandlerImpl::createDataset(Writable* writable, VERIFY(status == 0, "[HDF5] Internal error: Failed to set chunk size during dataset creation"); } - std::string const& compression = parameters.compression; + std::string const& compression = ""; // @todo read from JSON if( !compression.empty() ) std::cerr << "[HDF5] Compression not yet implemented in HDF5 backend." << std::endl; @@ -410,11 +410,6 @@ HDF5IOHandlerImpl::createDataset(Writable* writable, } */ - std::string const& transform = parameters.transform; - if( !transform.empty() ) - std::cerr << "[HDF5] Custom transform not yet implemented in HDF5 backend." - << std::endl; - GetH5DataType getH5DataType({ { typeid(bool).name(), m_H5T_BOOL_ENUM }, { typeid(std::complex< float >).name(), m_H5T_CFLOAT }, diff --git a/src/RecordComponent.cpp b/src/RecordComponent.cpp index b1727b8c7d..305a0cab4b 100644 --- a/src/RecordComponent.cpp +++ b/src/RecordComponent.cpp @@ -256,9 +256,6 @@ RecordComponent::flush(std::string const& name) dCreate.name = name; dCreate.extent = getExtent(); dCreate.dtype = getDatatype(); - dCreate.chunkSize = rc.m_dataset.chunkSize; - dCreate.compression = rc.m_dataset.compression; - dCreate.transform = rc.m_dataset.transform; dCreate.options = rc.m_dataset.options; IOHandler()->enqueue(IOTask(this, dCreate)); } diff --git a/src/backend/PatchRecordComponent.cpp b/src/backend/PatchRecordComponent.cpp index ed705f1cc5..9331e65b90 100644 --- a/src/backend/PatchRecordComponent.cpp +++ b/src/backend/PatchRecordComponent.cpp @@ -102,9 +102,6 @@ PatchRecordComponent::flush(std::string const& name) dCreate.name = name; dCreate.extent = getExtent(); dCreate.dtype = getDatatype(); - dCreate.chunkSize = getExtent(); - dCreate.compression = rc.m_dataset.compression; - dCreate.transform = rc.m_dataset.transform; dCreate.options = rc.m_dataset.options; IOHandler()->enqueue(IOTask(this, dCreate)); } diff --git a/src/binding/python/Dataset.cpp b/src/binding/python/Dataset.cpp index 3979244e45..16d2ef4cf3 100644 --- a/src/binding/python/Dataset.cpp +++ b/src/binding/python/Dataset.cpp @@ -61,12 +61,6 @@ void init_Dataset(py::module &m) { .def_readonly("extent", &Dataset::extent) .def("extend", &Dataset::extend) - .def_readonly("chunk_size", &Dataset::chunkSize) - .def("set_chunk_size", &Dataset::setChunkSize) - .def_readonly("compression", &Dataset::compression) - .def("set_compression", &Dataset::setCompression) - .def_readonly("transform", &Dataset::transform) - .def("set_custom_transform", &Dataset::setCustomTransform) .def_readonly("rank", &Dataset::rank) .def_property_readonly("dtype", [](const Dataset &d) { return dtype_to_numpy( d.dtype ); From 46b2b56e0b7989984aa104c4983e1b4a27e97fa8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Franz=20P=C3=B6schel?= Date: Tue, 14 Sep 2021 13:53:20 +0200 Subject: [PATCH 06/23] Use JSON config in MPI benchmarks --- examples/8_benchmark_parallel.cpp | 8 +++++-- .../openPMD/benchmark/mpi/MPIBenchmark.hpp | 23 ++++++++++++++++--- .../benchmark/mpi/MPIBenchmarkReport.hpp | 9 ++++++++ 3 files changed, 35 insertions(+), 5 deletions(-) diff --git a/examples/8_benchmark_parallel.cpp b/examples/8_benchmark_parallel.cpp index c1fd68c7ae..8fef3e86b4 100644 --- a/examples/8_benchmark_parallel.cpp +++ b/examples/8_benchmark_parallel.cpp @@ -148,10 +148,14 @@ int main( // * The number of iterations. Effectively, the benchmark will be repeated for this many // times. #if openPMD_HAVE_ADIOS1 || openPMD_HAVE_ADIOS2 - benchmark.addConfiguration("bp", dt, 10); + benchmark.addConfiguration( + R"({"adios2": {"dataset":{"operators":[{"type": "blosc"}]}}})", + "bp", + dt, + 10 ); #endif #if openPMD_HAVE_HDF5 - benchmark.addConfiguration("h5", dt, 10); + benchmark.addConfiguration( "{}", "h5", dt, 10 ); #endif // Execute all previously configured benchmarks. Will return a MPIBenchmarkReport object diff --git a/include/openPMD/benchmark/mpi/MPIBenchmark.hpp b/include/openPMD/benchmark/mpi/MPIBenchmark.hpp index cea5f00726..8f48f8879b 100644 --- a/include/openPMD/benchmark/mpi/MPIBenchmark.hpp +++ b/include/openPMD/benchmark/mpi/MPIBenchmark.hpp @@ -93,8 +93,8 @@ namespace openPMD MPI_Comm comm = MPI_COMM_WORLD ); - // @TODO replace former compression operator with JSON config /** + * @param jsonConfig Backend-specific configuration. * @param backend Backend to use, specified by filename extension (eg "bp" or "h5"). * @param dt Type of data to write and read. * @param iterations The number of iterations to write and read for each @@ -103,6 +103,7 @@ namespace openPMD * @param threadSize Number of threads to use. */ void addConfiguration( + std::string jsonConfig, std::string backend, Datatype dt, typename decltype( Series::iterations )::key_type iterations, @@ -112,6 +113,7 @@ namespace openPMD /** * Version of addConfiguration() that automatically sets the number of used * threads to the MPI size. + * @param jsonConfig Backend-specific configuration. * @param backend Backend to use, specified by filename extension (eg "bp" or "h5"). * @param dt Type of data to write and read. * @param iterations The number of iterations to write and read for each @@ -119,6 +121,7 @@ namespace openPMD * iteration, so it should create sufficient data for one iteration. */ void addConfiguration( + std::string jsonConfig, std::string backend, Datatype dt, typename decltype( Series::iterations)::key_type iterations @@ -144,6 +147,7 @@ namespace openPMD std::string m_basePath; std::vector< std::tuple< + std::string, std::string, int, Datatype, @@ -152,7 +156,8 @@ namespace openPMD enum Config { - BACKEND = 0, + JSON_CONFIG = 0, + BACKEND, NRANKS, DTYPE, ITERATIONS @@ -183,6 +188,7 @@ namespace openPMD /** * Execute a single read benchmark. * @tparam T Type of the dataset to write. + * @param jsonConfig Backend-specific config. * @param offset Local offset of the chunk to write. * @param extent Local extent of the chunk to write. * @param extension File extension to control the openPMD backend. @@ -194,6 +200,7 @@ namespace openPMD typename T > typename Clock::duration writeBenchmark( + std::string const & jsonConfig, Offset & offset, Extent & extent, std::string const & extension, @@ -316,6 +323,7 @@ namespace openPMD template< typename DatasetFillerProvider > void MPIBenchmark< DatasetFillerProvider >::addConfiguration( + std::string jsonConfig, std::string backend, Datatype dt, typename decltype( Series::iterations)::key_type iterations, @@ -324,6 +332,7 @@ namespace openPMD { this->m_configurations .emplace_back( + std::move( jsonConfig ), backend, threadSize, dt, @@ -334,6 +343,7 @@ namespace openPMD template< typename DatasetFillerProvider > void MPIBenchmark< DatasetFillerProvider >::addConfiguration( + std::string jsonConfig, std::string backend, Datatype dt, typename decltype( Series::iterations)::key_type iterations @@ -345,6 +355,7 @@ namespace openPMD &size ); addConfiguration( + std::move( jsonConfig ), backend, dt, iterations, @@ -366,6 +377,7 @@ namespace openPMD template< typename T > typename Clock::duration MPIBenchmark< DatasetFillerProvider >::BenchmarkExecution< Clock >::writeBenchmark( + std::string const & jsonConfig, Offset & offset, Extent & extent, std::string const & extension, @@ -380,7 +392,8 @@ namespace openPMD Series series = Series( m_benchmark->m_basePath + "." + extension, Access::CREATE, - m_benchmark->communicator + m_benchmark->communicator, + jsonConfig ); for( typename decltype( Series::iterations)::key_type i = 0; @@ -489,11 +502,13 @@ namespace openPMD ); for( auto const & config: exec.m_benchmark->m_configurations ) { + std::string jsonConfig; std::string backend; int size; Datatype dt2; typename decltype( Series::iterations)::key_type iterations; std::tie( + jsonConfig, backend, size, dt2, @@ -515,6 +530,7 @@ namespace openPMD dsf->setNumberOfItems( blockSize ); auto writeTime = exec.writeBenchmark< T >( + jsonConfig, localCuboid.first, localCuboid.second, backend, @@ -529,6 +545,7 @@ namespace openPMD ); report.addReport( rootThread, + jsonConfig, backend, size, dt2, diff --git a/include/openPMD/benchmark/mpi/MPIBenchmarkReport.hpp b/include/openPMD/benchmark/mpi/MPIBenchmarkReport.hpp index 0cc6fb348f..3ee01dd948 100644 --- a/include/openPMD/benchmark/mpi/MPIBenchmarkReport.hpp +++ b/include/openPMD/benchmark/mpi/MPIBenchmarkReport.hpp @@ -53,6 +53,7 @@ namespace openPMD std::map< std::tuple< int, // rank + std::string, // jsonConfig std::string, // extension int, // thread size Datatype, @@ -79,6 +80,7 @@ namespace openPMD * Add results for a certain compression strategy and level. * * @param rootThread The MPI rank which will collect the data. + * @param jsonConfig Compression strategy. * @param extension The openPMD filename extension. * @param threadSize The MPI size. * @param dt The openPMD datatype. @@ -87,6 +89,7 @@ namespace openPMD */ void addReport( int rootThread, + std::string jsonConfig, std::string extension, int threadSize, Datatype dt, @@ -100,6 +103,7 @@ namespace openPMD /** Retrieve the time measured for a certain compression strategy. * * @param rank Which MPI rank's duration results to retrieve. + * @param jsonConfig Compression strategy. * @param extension The openPMD filename extension. * @param threadSize The MPI size. * @param dt The openPMD datatype. @@ -111,6 +115,7 @@ namespace openPMD Duration > getReport( int rank, + std::string jsonConfig, std::string extension, int threadSize, Datatype dt, @@ -234,6 +239,7 @@ namespace openPMD template< typename Duration > void MPIBenchmarkReport< Duration >::addReport( int rootThread, + std::string jsonConfig, std::string extension, int threadSize, Datatype dt, @@ -304,6 +310,7 @@ namespace openPMD .emplace( std::make_tuple( i, + jsonConfig, extension, threadSize, dt, @@ -334,6 +341,7 @@ namespace openPMD Duration > MPIBenchmarkReport< Duration >::getReport( int rank, + std::string jsonConfig, std::string extension, int threadSize, Datatype dt, @@ -346,6 +354,7 @@ namespace openPMD .find( std::make_tuple( rank, + jsonConfig, extension, threadSize, dt, From e9ec75f56374503cc4282083ec62ddfdca58d8a0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Franz=20P=C3=B6schel?= Date: Mon, 13 Sep 2021 16:15:29 +0200 Subject: [PATCH 07/23] Improvements to ADIOS2 and HDF5 These are the backends that are already configurable via JSON. New additions: 1. Use of error::BackendConfigSchema 2. Lower case string value reading where applicable 3. Warning when global Dataset options are unused 4. Lax datatypes for string values --- include/openPMD/IO/IOTask.hpp | 10 ++++ src/IO/ADIOS/ADIOS2IOHandler.cpp | 70 +++++++++++++++++++-------- src/IO/HDF5/HDF5IOHandler.cpp | 40 +++++++++------ src/IO/HDF5/ParallelHDF5IOHandler.cpp | 4 +- src/IO/IOTask.cpp | 35 ++++++++++++++ test/ParallelIOTest.cpp | 8 ++- test/SerialIOTest.cpp | 20 +++++--- 7 files changed, 141 insertions(+), 46 deletions(-) diff --git a/include/openPMD/IO/IOTask.hpp b/include/openPMD/IO/IOTask.hpp index f556e08c81..808f62d9f9 100644 --- a/include/openPMD/IO/IOTask.hpp +++ b/include/openPMD/IO/IOTask.hpp @@ -281,6 +281,16 @@ struct OPENPMDAPI_EXPORT Parameter< Operation::CREATE_DATASET > : public Abstrac Extent extent = {}; Datatype dtype = Datatype::UNDEFINED; std::string options = "{}"; + + // template parameter so we don't have to include the JSON lib here + // this function is useful for the createDataset() methods in, + // IOHandlerImpl's, so putting that here is the simplest way to make it + // available for them + template< typename TracingJSON > + static void warnUnusedParameters( + TracingJSON &, + std::string const & currentBackendName, + std::string const & warningMessage ); }; template<> diff --git a/src/IO/ADIOS/ADIOS2IOHandler.cpp b/src/IO/ADIOS/ADIOS2IOHandler.cpp index 638fdfb577..2d042e2661 100644 --- a/src/IO/ADIOS/ADIOS2IOHandler.cpp +++ b/src/IO/ADIOS/ADIOS2IOHandler.cpp @@ -22,6 +22,7 @@ #include "openPMD/IO/ADIOS/ADIOS2IOHandler.hpp" #include "openPMD/Datatype.hpp" +#include "openPMD/Error.hpp" #include "openPMD/IO/ADIOS/ADIOS2FilePosition.hpp" #include "openPMD/IO/ADIOS/ADIOS2IOHandler.hpp" #include "openPMD/auxiliary/Environment.hpp" @@ -140,12 +141,18 @@ ADIOS2IOHandlerImpl::init( json::TracingJSON cfg ) if( !engineTypeConfig.is_null() ) { // convert to string - m_engineType = engineTypeConfig; - std::transform( - m_engineType.begin(), - m_engineType.end(), - m_engineType.begin(), - []( unsigned char c ) { return std::tolower( c ); } ); + auto maybeEngine = + json::asLowerCaseStringDynamic( engineTypeConfig ); + if( maybeEngine.has_value() ) + { + m_engineType = std::move( maybeEngine.get() ); + } + else + { + throw error::BackendConfigSchema( + {"adios2", "engine", "type"}, + "Must be convertible to string type." ); + } } } auto operators = getOperators(); @@ -188,8 +195,22 @@ ADIOS2IOHandlerImpl::getOperators( json::TracingJSON cfg ) paramIterator != params.end(); ++paramIterator ) { - adiosParams[ paramIterator.key() ] = - paramIterator.value().get< std::string >(); + auto maybeString = + json::asStringDynamic( paramIterator.value() ); + if( maybeString.has_value() ) + { + adiosParams[ paramIterator.key() ] = + std::move( maybeString.get() ); + } + else + { + throw error::BackendConfigSchema( + { "adios2", + "dataset", + "operators", + paramIterator.key() }, + "Must be convertible to string type." ); + } } } auxiliary::Option< adios2::Operator > adiosOperator = @@ -352,29 +373,25 @@ void ADIOS2IOHandlerImpl::createDataset( auto const varName = nameOfVariable( writable ); std::vector< ParameterizedOperator > operators; - nlohmann::json options = nlohmann::json::parse( parameters.options ); - json::lowerCase( options ); - if( options.contains( "adios2" ) ) + json::TracingJSON options = json::parseOptions( + parameters.options, /* considerFiles = */ false ); + if( options.json().contains( "adios2" ) ) { json::TracingJSON datasetConfig( options[ "adios2" ] ); auto datasetOperators = getOperators( datasetConfig ); operators = datasetOperators ? std::move( datasetOperators.get() ) : defaultOperators; - - auto shadow = datasetConfig.invertShadow(); - if( shadow.size() > 0 ) - { - std::cerr << "Warning: parts of the JSON configuration for " - "ADIOS2 dataset '" - << varName << "' remain unused:\n" - << shadow << std::endl; - } } else { operators = defaultOperators; } + parameters.warnUnusedParameters( + options, + "adios2", + "Warning: parts of the JSON configuration for ADIOS2 dataset '" + + varName + "' remain unused:\n" ); // cast from openPMD::Extent to adios2::Dims adios2::Dims const shape( parameters.extent.begin(), parameters.extent.end() ); @@ -2315,7 +2332,18 @@ namespace detail for( auto it = params.json().begin(); it != params.json().end(); it++ ) { - m_IO.SetParameter( it.key(), it.value() ); + auto maybeString = json::asStringDynamic( it.value() ); + if( maybeString.has_value() ) + { + m_IO.SetParameter( + it.key(), std::move( maybeString.get() ) ); + } + else + { + throw error::BackendConfigSchema( + {"adios2", "engine", "parameters", it.key() }, + "Must be convertible to string type." ); + } alreadyConfigured.emplace( it.key() ); } } diff --git a/src/IO/HDF5/HDF5IOHandler.cpp b/src/IO/HDF5/HDF5IOHandler.cpp index c581d9c37b..04c5993293 100644 --- a/src/IO/HDF5/HDF5IOHandler.cpp +++ b/src/IO/HDF5/HDF5IOHandler.cpp @@ -24,6 +24,7 @@ #if openPMD_HAVE_HDF5 # include "openPMD/Datatype.hpp" +# include "openPMD/Error.hpp" # include "openPMD/auxiliary/Filesystem.hpp" # include "openPMD/auxiliary/StringManip.hpp" # include "openPMD/backend/Attribute.hpp" @@ -97,7 +98,18 @@ HDF5IOHandlerImpl::HDF5IOHandlerImpl( auto datasetConfig = m_config[ "dataset" ]; if( datasetConfig.json().contains( "chunks" ) ) { - m_chunks = datasetConfig[ "chunks" ].json().get< std::string >(); + auto maybeChunks = json::asLowerCaseStringDynamic( + datasetConfig[ "chunks" ].json() ); + if( maybeChunks.has_value() ) + { + m_chunks = std::move( maybeChunks.get() ); + } + else + { + throw error::BackendConfigSchema( + {"hdf5", "dataset", "chunks"}, + "Must be convertible to string type." ); + } } } if( m_chunks != "auto" && m_chunks != "none" ) @@ -294,19 +306,19 @@ HDF5IOHandlerImpl::createDataset(Writable* writable, if( auxiliary::ends_with(name, '/') ) name = auxiliary::replace_last(name, "/", ""); - auto config = nlohmann::json::parse( parameters.options ); - json::lowerCase( config ); + json::TracingJSON config = json::parseOptions( + parameters.options, /* considerFiles = */ false ); // general bool is_resizable_dataset = false; - if( config.contains( "resizable" ) ) + if( config.json().contains( "resizable" ) ) { - is_resizable_dataset = config.at( "resizable" ).get< bool >(); + is_resizable_dataset = config[ "resizable" ].json().get< bool >(); } // HDF5 specific - if( config.contains( "hdf5" ) && - config[ "hdf5" ].contains( "dataset" ) ) + if( config.json().contains( "hdf5" ) && + config[ "hdf5" ].json().contains( "dataset" ) ) { json::TracingJSON datasetConfig{ config[ "hdf5" ][ "dataset" ] }; @@ -314,15 +326,13 @@ HDF5IOHandlerImpl::createDataset(Writable* writable, /* * @todo Read more options from config here. */ - auto shadow = datasetConfig.invertShadow(); - if( shadow.size() > 0 ) - { - std::cerr << "Warning: parts of the JSON configuration for " - "HDF5 dataset '" - << name << "' remain unused:\n" - << shadow << std::endl; - } + ( void )datasetConfig; } + parameters.warnUnusedParameters( + config, + "hdf5", + "Warning: parts of the JSON configuration for HDF5 dataset '" + + name + "' remain unused:\n" ); hid_t gapl = H5Pcreate(H5P_GROUP_ACCESS); #if H5_VERSION_GE(1,10,0) && openPMD_HAVE_MPI diff --git a/src/IO/HDF5/ParallelHDF5IOHandler.cpp b/src/IO/HDF5/ParallelHDF5IOHandler.cpp index fde421d8d6..59178825c5 100644 --- a/src/IO/HDF5/ParallelHDF5IOHandler.cpp +++ b/src/IO/HDF5/ParallelHDF5IOHandler.cpp @@ -150,7 +150,7 @@ ParallelHDF5IOHandlerImpl::~ParallelHDF5IOHandlerImpl() ParallelHDF5IOHandler::ParallelHDF5IOHandler(std::string path, Access at, MPI_Comm comm, - nlohmann::json /* config */) + json::TracingJSON /* config */) : AbstractIOHandler(std::move(path), at, comm) { throw std::runtime_error("openPMD-api built without HDF5 support"); @@ -158,7 +158,7 @@ ParallelHDF5IOHandler::ParallelHDF5IOHandler(std::string path, # else ParallelHDF5IOHandler::ParallelHDF5IOHandler(std::string path, Access at, - nlohmann::json /* config */) + json::TracingJSON /* config */) : AbstractIOHandler(std::move(path), at) { throw std::runtime_error("openPMD-api built without parallel support and without HDF5 support"); diff --git a/src/IO/IOTask.cpp b/src/IO/IOTask.cpp index 5ff2a31b8d..b7b54ab860 100644 --- a/src/IO/IOTask.cpp +++ b/src/IO/IOTask.cpp @@ -19,12 +19,47 @@ * If not, see . */ #include "openPMD/IO/IOTask.hpp" +#include "openPMD/auxiliary/JSON_internal.hpp" #include "openPMD/backend/Attributable.hpp" +#include // std::cerr + namespace openPMD { Writable* getWritable(Attributable* a) { return &a->writable(); } + +template<> +void Parameter< Operation::CREATE_DATASET >::warnUnusedParameters< + json::TracingJSON >( + json::TracingJSON & config, + std::string const & currentBackendName, + std::string const & warningMessage ) +{ + /* + * Fake-read non-backend-specific options. Some backends don't read those + * and we don't want to have warnings for them. + */ + for( std::string const & key : { "resizable" } ) + { + config[ key ]; + } + + auto shadow = config.invertShadow(); + // The backends are supposed to deal with this + // Only global options here + for( auto const & backendKey : json::backendKeys ) + { + if( backendKey != currentBackendName ) + { + shadow.erase( backendKey ); + } + } + if( shadow.size() > 0 ) + { + std::cerr << warningMessage << shadow.dump() << std::endl; + } +} } // openPMD diff --git a/test/ParallelIOTest.cpp b/test/ParallelIOTest.cpp index da93297358..6c38ba7b73 100644 --- a/test/ParallelIOTest.cpp +++ b/test/ParallelIOTest.cpp @@ -804,11 +804,15 @@ hipace_like_write( std::string file_ending ) bool const isHDF5 = file_ending == "h5"; std::string options = "{}"; if( isHDF5 ) + /* + * some keys and values capitalized randomly to check whether + * capitalization-insensitivity is working. + */ options = R"( { - "hdf5": { + "HDF5": { "dataset": { - "chunks": "none" + "chunks": "NONE" } } })"; diff --git a/test/SerialIOTest.cpp b/test/SerialIOTest.cpp index 1a4536562c..a5c2bbe893 100644 --- a/test/SerialIOTest.cpp +++ b/test/SerialIOTest.cpp @@ -3321,7 +3321,7 @@ TEST_CASE( "serial_adios2_json_config", "[serial][adios2]" ) "type": "bp3", "unused": "parameter", "parameters": { - "BufferGrowthFactor": "2.0", + "BufferGrowthFactor": 2, "Profile": "On" } }, @@ -3347,7 +3347,7 @@ TEST_CASE( "serial_adios2_json_config", "[serial][adios2]" ) "type": "bp4", "unused": "parameter", "parameters": { - "BufferGrowthFactor": "2.0", + "BufferGrowthFactor": 2.0, "Profile": "On" } }, @@ -3357,7 +3357,7 @@ TEST_CASE( "serial_adios2_json_config", "[serial][adios2]" ) { "type": "blosc", "parameters": { - "clevel": "1", + "clevel": 1, "doshuffle": "BLOSC_BITSHUFFLE" } } @@ -3394,6 +3394,8 @@ TEST_CASE( "serial_adios2_json_config", "[serial][adios2]" ) )END"; std::string datasetConfig = R"END( { + "resizable": true, + "asdf": "asdf", "adios2": { "unused": "dataset parameter", "dataset": { @@ -3402,12 +3404,15 @@ TEST_CASE( "serial_adios2_json_config", "[serial][adios2]" ) { "type": "blosc", "parameters": { - "clevel": "3", - "doshuffle": "BLOSC_BITSHUFFLE" + "clevel": 3, + "doshuffle": "BLOSC_BITSHUFFLE" } } ] } + }, + "hdf5": { + "this": "should not warn" } } )END"; @@ -4361,7 +4366,10 @@ extendDataset( std::string const & ext ) // only one iteration written anyway write.setIterationEncoding( IterationEncoding::variableBased ); - Dataset ds1{ Datatype::INT, { 5, 5 }, "{ \"resizable\": true }" }; + Dataset ds1{ + Datatype::INT, + { 5, 5 }, + "{ \"resizable\": true, \"resizeble\": \"typo\" }" }; Dataset ds2{ Datatype::INT, { 10, 5 } }; // array record component -> array record component From 399686b7e7a198cca840048f644fa5c4cd9c7b07 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Franz=20P=C3=B6schel?= Date: Mon, 13 Sep 2021 16:29:56 +0200 Subject: [PATCH 08/23] Series global keys: backend and iteration_encoding Also, warn on unused global keys --- include/openPMD/Series.hpp | 3 ++ src/Format.cpp | 3 +- src/Series.cpp | 80 +++++++++++++++++++++++++++---- test/CoreTest.cpp | 98 ++++++++++++++++++++++++++++++++++++-- test/SerialIOTest.cpp | 13 ++++- 5 files changed, 181 insertions(+), 16 deletions(-) diff --git a/include/openPMD/Series.hpp b/include/openPMD/Series.hpp index b4488656b5..c117c9bd01 100644 --- a/include/openPMD/Series.hpp +++ b/include/openPMD/Series.hpp @@ -408,6 +408,9 @@ class Series : public Attributable } } std::unique_ptr< ParsedInput > parseInput(std::string); + // template parameter so we don't have to include the JSON lib here + template< typename TracingJSON > + void parseJsonOptions( TracingJSON & options, ParsedInput & ); bool hasExpansionPattern( std::string filenameWithExtension ); bool reparseExpansionPattern( std::string filenameWithExtension ); void init(std::shared_ptr< AbstractIOHandler >, std::unique_ptr< ParsedInput >); diff --git a/src/Format.cpp b/src/Format.cpp index 98defa14b3..04d68cb2bf 100644 --- a/src/Format.cpp +++ b/src/Format.cpp @@ -59,9 +59,8 @@ namespace openPMD { return Format::ADIOS2_SSC; if (auxiliary::ends_with(filename, ".json")) return Format::JSON; - if (std::string::npos != filename.find('.') /* extension is provided */ ) - throw std::runtime_error("Unknown file format. Did you append a valid filename extension?"); + // Format might still be specified via JSON return Format::DUMMY; } diff --git a/src/Series.cpp b/src/Series.cpp index 4aa411aa44..816ec7a24e 100644 --- a/src/Series.cpp +++ b/src/Series.cpp @@ -1474,15 +1474,75 @@ namespace { if( config.json().contains( key ) ) { - dest = json::asLowerCaseStringDynamic( config[ key ].json() ); + auto maybeString = + json::asLowerCaseStringDynamic( config[ key ].json() ); + if( maybeString.has_value() ) + { + dest = std::move( maybeString.get() ); + } + else + { + throw error::BackendConfigSchema( + { key }, "Must be convertible to string type." ); + } } } +} - void parseJsonOptions( - internal::SeriesData & series, json::TracingJSON & options ) +template< typename TracingJSON > +void Series::parseJsonOptions( + TracingJSON & options, ParsedInput & input ) +{ + auto & series = get(); + getJsonOption< bool >( + options, "defer_iteration_parsing", series.m_parseLazily ); + // backend key + { + std::map< std::string, Format > const backendDescriptors{ + { "hdf5", Format::HDF5 }, + { "adios1", Format::ADIOS1 }, + { "adios2", Format::ADIOS2 }, + { "json", Format::JSON } }; + std::string backend; + getJsonOptionLowerCase( options, "backend", backend ); + if( !backend.empty() ) + { + auto it = backendDescriptors.find( backend ); + if( it != backendDescriptors.end() ) + { + input.format = it->second; + } + else + { + throw error::BackendConfigSchema( + { "backend" }, "Unknown backend specified: " + backend ); + } + } + } + // iteration_encoding key { - getJsonOption< bool >( - options, "defer_iteration_parsing", series.m_parseLazily ); + std::map< std::string, IterationEncoding > const ieDescriptors{ + { "file_based", IterationEncoding::fileBased }, + { "group_based", IterationEncoding::groupBased }, + { "variable_based", IterationEncoding::variableBased } }; + std::string iterationEncoding; + getJsonOptionLowerCase( + options, "iteration_encoding", iterationEncoding ); + if( !iterationEncoding.empty() ) + { + auto it = ieDescriptors.find( iterationEncoding ); + if( it != ieDescriptors.end() ) + { + input.iterationEncoding = it->second; + } + else + { + throw error::BackendConfigSchema( + { "iteration_encoding" }, + "Unknown iteration encoding specified: " + + iterationEncoding ); + } + } } } @@ -1544,11 +1604,12 @@ Series::Series( iterations = m_series->iterations; json::TracingJSON optionsJson = json::parseOptions( options, comm, /* considerFiles = */ true ); - parseJsonOptions( get(), optionsJson ); auto input = parseInput( filepath ); + parseJsonOptions( optionsJson, *input ); auto handler = createIOHandler( - input->path, at, input->format, comm, std::move( optionsJson ) ); + input->path, at, input->format, comm, optionsJson ); init( handler, std::move( input ) ); + json::warnGlobalUnusedOptions( optionsJson ); } #endif @@ -1561,11 +1622,12 @@ Series::Series( iterations = m_series->iterations; json::TracingJSON optionsJson = json::parseOptions( options, /* considerFiles = */ true ); - parseJsonOptions( get(), optionsJson ); auto input = parseInput( filepath ); + parseJsonOptions( optionsJson, *input ); auto handler = createIOHandler( - input->path, at, input->format, std::move( optionsJson ) ); + input->path, at, input->format, optionsJson ); init( handler, std::move( input ) ); + json::warnGlobalUnusedOptions( optionsJson ); } Series::operator bool() const diff --git a/test/CoreTest.cpp b/test/CoreTest.cpp index 63aa300800..7148fb374d 100644 --- a/test/CoreTest.cpp +++ b/test/CoreTest.cpp @@ -5,17 +5,23 @@ #endif #include "openPMD/openPMD.hpp" +#include "openPMD/auxiliary/Filesystem.hpp" +#include "openPMD/auxiliary/JSON.hpp" + #include +#include #include -#include -#include #include #include #include #include -#include #include +#include +// cstdlib does not have setenv +#include // NOLINT(modernize-deprecated-headers) +#include +#include using namespace openPMD; @@ -866,6 +872,92 @@ TEST_CASE( "no_file_ending", "[core]" ) Catch::Equals("Unknown file format! Did you specify a file ending?")); REQUIRE_THROWS_WITH(Series("./new_openpmd_output_%05T", Access::CREATE), Catch::Equals("Unknown file format! Did you specify a file ending?")); + { + Series( + "../samples/no_extension_specified", + Access::CREATE, + "{\"backend\": \"json\"}" ); + } + REQUIRE( + auxiliary::file_exists( "../samples/no_extension_specified.json" ) ); +} + +TEST_CASE( "backend_via_json", "[core]" ) +{ + std::string encodingVariableBased = + "{\"backend\": \"json\", \"iteration_encoding\": \"variable_based\"}"; + { + Series series( + "../samples/optionsViaJson", + Access::CREATE, + encodingVariableBased ); + REQUIRE( series.backend() == "JSON" ); + REQUIRE( + series.iterationEncoding() == IterationEncoding::variableBased ); + } +#if openPMD_HAVE_ADIOS2 + { + Series series( + "../samples/optionsViaJson.bp", + Access::CREATE, + encodingVariableBased ); + REQUIRE( series.backend() == "JSON" ); + REQUIRE( + series.iterationEncoding() == IterationEncoding::variableBased ); + } + + { + Series series( + "../samples/optionsViaJsonOverwritesAutomaticDetection.sst", + Access::CREATE, + "{\"adios2\": {\"engine\": {\"type\": \"bp4\"}}}" ); + } + REQUIRE( auxiliary::directory_exists( + "../samples/optionsViaJsonOverwritesAutomaticDetection.bp" ) ); + +#if openPMD_HAVE_ADIOS1 + setenv( "OPENPMD_BP_BACKEND", "ADIOS1", 1 ); + { + // JSON option should overwrite environment variable + Series series( + "../samples/optionsPreferJsonOverEnvVar.bp", + Access::CREATE, + "{\"backend\": \"ADIOS2\"}" ); + REQUIRE( series.backend() == "ADIOS2" ); + } + // unset again + unsetenv( "OPENPMD_BP_BACKEND" ); + REQUIRE( auxiliary::directory_exists( + "../samples/optionsPreferJsonOverEnvVar.bp" ) ); +#endif +#endif + std::string encodingFileBased = + "{\"backend\": \"json\", \"iteration_encoding\": \"file_based\"}"; + { + /* + * Should we add JSON options to set the filebased expansion pattern? + * For now, let's require setting that as part of the filename. + */ + REQUIRE_THROWS_AS( + [ & ]() { + Series series( + "../samples/optionsViaJson", + Access::CREATE, + encodingFileBased ); + }(), + error::WrongAPIUsage ); + } + std::string encodingGroupBased = + "{\"backend\": \"json\", \"iteration_encoding\": \"group_based\"}"; + { + Series series( + "../samples/optionsViaJsonPseudoFilebased%T.json", + Access::CREATE, + encodingGroupBased ); + REQUIRE( series.iterationEncoding() == IterationEncoding::groupBased ); + } + REQUIRE( auxiliary::file_exists( + "../samples/optionsViaJsonPseudoFilebased%T.json" ) ); } TEST_CASE( "custom_geometries", "[core]" ) diff --git a/test/SerialIOTest.cpp b/test/SerialIOTest.cpp index a5c2bbe893..60d5e80b7e 100644 --- a/test/SerialIOTest.cpp +++ b/test/SerialIOTest.cpp @@ -3316,6 +3316,10 @@ TEST_CASE( "serial_adios2_json_config", "[serial][adios2]" ) } std::string writeConfigBP3 = R"END( { + "unused": "global parameter", + "hdf5": { + "unused": "hdf5 parameter please dont warn" + }, "adios2": { "engine": { "type": "bp3", @@ -3678,8 +3682,13 @@ variableBasedSingleIteration( std::string const & file ) { constexpr Extent::value_type extent = 1000; { - Series writeSeries( file, Access::CREATE ); - writeSeries.setIterationEncoding( IterationEncoding::variableBased ); + Series writeSeries( + file, + Access::CREATE, + "{\"iteration_encoding\": \"variable_based\"}" ); + REQUIRE( + writeSeries.iterationEncoding() == + IterationEncoding::variableBased ); auto iterations = writeSeries.writeIterations(); auto iteration = writeSeries.iterations[ 0 ]; auto E_x = iteration.meshes[ "E" ][ "x" ]; From 241f9db1c99852b668e0623ff3a31b3dfab9f64f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Franz=20P=C3=B6schel?= Date: Mon, 13 Sep 2021 16:30:54 +0200 Subject: [PATCH 09/23] Documentation --- docs/source/details/adios1.json | 7 +++++++ docs/source/details/backendconfig.rst | 29 ++++++++++++++++++++++++++- 2 files changed, 35 insertions(+), 1 deletion(-) create mode 100644 docs/source/details/adios1.json diff --git a/docs/source/details/adios1.json b/docs/source/details/adios1.json new file mode 100644 index 0000000000..5d2cb4df71 --- /dev/null +++ b/docs/source/details/adios1.json @@ -0,0 +1,7 @@ +{ + "adios2": { + "dataset": { + "transform": "blosc:compressor=zlib,shuffle=bit,lvl=1;nometa" + } + } +} diff --git a/docs/source/details/backendconfig.rst b/docs/source/details/backendconfig.rst index 43bed65b82..cbbfda4a9a 100644 --- a/docs/source/details/backendconfig.rst +++ b/docs/source/details/backendconfig.rst @@ -14,7 +14,18 @@ The fundamental structure of this JSON configuration string is given as follows: This structure allows keeping one configuration string for several backends at once, with the concrete backend configuration being chosen upon choosing the backend itself. -The configuration is read in a case-sensitive manner. +Options that can be configured via JSON are often also accessible via other means, e.g. environment variables. +The following list specifies the priority of these means, beginning with the lowest priority: + +1. Default values +2. Automatically detected options, e.g. the backend being detected by inspection of the file extension +3. Environment variables +4. JSON configuration. For JSON, a dataset-specific configuration overwrites a global, Series-wide configuration. +5. Explicit API calls such as ``setIterationEncoding()`` + +The configuration is read in a case-insensitive manner, keys as well as values. +An exception to this are string values which are forwarded to other libraries such as ADIOS1 and ADIOS2. +Those are read "as-is" and interpreted by the backend library. Generally, keys of the configuration are *lower case*. Parameters that are directly passed through to an external library and not interpreted within openPMD API (e.g. ``adios2.engine.parameters``) are unaffected by this and follow the respective library's conventions. @@ -36,6 +47,11 @@ For a consistent user interface, backends shall follow the following rules: Backend-independent JSON configuration -------------------------------------- +The openPMD backend can be chosen via the JSON key ``backend`` which recognizes the alternatives ``["hdf5", "adios1", "adios2", "json"]``. + +The iteration encoding can be chosen via the JSON key ``iteration_encoding`` which recognizes the alternatives ``["file_based", "group_based", "variable_based"]``. +Note that for file-based iteration encoding, specification of the expansion pattern in the file name (e.g. ``data_%T.json``) remains mandatory. + The key ``defer_iteration_parsing`` can be used to optimize the process of opening an openPMD Series (deferred/lazy parsing). By default, a Series is parsed eagerly, i.e. opening a Series implies reading all available iterations. Especially when a Series has many iterations, this can be a costly operation and users may wish to defer parsing of iterations to a later point adding ``{"defer_iteration_parsing": true}`` to their JSON configuration. @@ -100,6 +116,17 @@ Explanation of the single keys: ``"none"`` can be used to disable chunking. Chunking generally improves performance and only needs to be disabled in corner-cases, e.g. when heavily relying on independent, parallel I/O that non-collectively declares data records. +ADIOS1 +^^^^^^ + +ADIOS1 allows configuring custom dataset transforms via JSON: + +.. literalinclude:: adios1.json + :language: json + +This configuration can be passed globally (i.e. for the ``Series`` object) to apply for all datasets. +Alternatively, it can also be passed for single ``Dataset`` objects to only apply for single datasets. + Other backends ^^^^^^^^^^^^^^ From 9d2ffccc2a6e95f7fae2b13441acc39dcb865be7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Franz=20P=C3=B6schel?= Date: Mon, 13 Sep 2021 19:28:54 +0200 Subject: [PATCH 10/23] Use fancy C++ strings The CI doesn't like having too many escaped characters in a normal string --- test/CoreTest.cpp | 12 ++++++------ test/SerialIOTest.cpp | 4 ++-- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/test/CoreTest.cpp b/test/CoreTest.cpp index 7148fb374d..d1b4ba8cd7 100644 --- a/test/CoreTest.cpp +++ b/test/CoreTest.cpp @@ -876,7 +876,7 @@ TEST_CASE( "no_file_ending", "[core]" ) Series( "../samples/no_extension_specified", Access::CREATE, - "{\"backend\": \"json\"}" ); + R"({"backend": "json"})" ); } REQUIRE( auxiliary::file_exists( "../samples/no_extension_specified.json" ) ); @@ -885,7 +885,7 @@ TEST_CASE( "no_file_ending", "[core]" ) TEST_CASE( "backend_via_json", "[core]" ) { std::string encodingVariableBased = - "{\"backend\": \"json\", \"iteration_encoding\": \"variable_based\"}"; + R"({"backend": "json", "iteration_encoding": "variable_based"})"; { Series series( "../samples/optionsViaJson", @@ -910,7 +910,7 @@ TEST_CASE( "backend_via_json", "[core]" ) Series series( "../samples/optionsViaJsonOverwritesAutomaticDetection.sst", Access::CREATE, - "{\"adios2\": {\"engine\": {\"type\": \"bp4\"}}}" ); + R"({"adios2": {"engine": {"type": "bp4"}}})" ); } REQUIRE( auxiliary::directory_exists( "../samples/optionsViaJsonOverwritesAutomaticDetection.bp" ) ); @@ -922,7 +922,7 @@ TEST_CASE( "backend_via_json", "[core]" ) Series series( "../samples/optionsPreferJsonOverEnvVar.bp", Access::CREATE, - "{\"backend\": \"ADIOS2\"}" ); + R"({"backend": "ADIOS2"})" ); REQUIRE( series.backend() == "ADIOS2" ); } // unset again @@ -932,7 +932,7 @@ TEST_CASE( "backend_via_json", "[core]" ) #endif #endif std::string encodingFileBased = - "{\"backend\": \"json\", \"iteration_encoding\": \"file_based\"}"; + R"({"backend": "json", "iteration_encoding": "file_based"})"; { /* * Should we add JSON options to set the filebased expansion pattern? @@ -948,7 +948,7 @@ TEST_CASE( "backend_via_json", "[core]" ) error::WrongAPIUsage ); } std::string encodingGroupBased = - "{\"backend\": \"json\", \"iteration_encoding\": \"group_based\"}"; + R"({"backend": "json", "iteration_encoding": "group_based"})"; { Series series( "../samples/optionsViaJsonPseudoFilebased%T.json", diff --git a/test/SerialIOTest.cpp b/test/SerialIOTest.cpp index 60d5e80b7e..48151b83d2 100644 --- a/test/SerialIOTest.cpp +++ b/test/SerialIOTest.cpp @@ -3685,7 +3685,7 @@ variableBasedSingleIteration( std::string const & file ) Series writeSeries( file, Access::CREATE, - "{\"iteration_encoding\": \"variable_based\"}" ); + R"({"iteration_encoding": "variable_based"})" ); REQUIRE( writeSeries.iterationEncoding() == IterationEncoding::variableBased ); @@ -4378,7 +4378,7 @@ extendDataset( std::string const & ext ) Dataset ds1{ Datatype::INT, { 5, 5 }, - "{ \"resizable\": true, \"resizeble\": \"typo\" }" }; + R"({ "resizable": true, "resizeble": "typo" })" }; Dataset ds2{ Datatype::INT, { 10, 5 } }; // array record component -> array record component From 49a980d17880e94172eb14b5edacc5e02d31467b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Franz=20P=C3=B6schel?= Date: Mon, 11 Oct 2021 11:14:36 +0200 Subject: [PATCH 11/23] Correct precedence in ADIOS2: env var vs. JSON param JSON params may be specified in arbitrary capitalization. The backend should recognize this and not read from environment variables if the parameter has already been set via JSON. --- src/IO/ADIOS/ADIOS2IOHandler.cpp | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/IO/ADIOS/ADIOS2IOHandler.cpp b/src/IO/ADIOS/ADIOS2IOHandler.cpp index 2d042e2661..f32ac50bb7 100644 --- a/src/IO/ADIOS/ADIOS2IOHandler.cpp +++ b/src/IO/ADIOS/ADIOS2IOHandler.cpp @@ -2344,7 +2344,8 @@ namespace detail {"adios2", "engine", "parameters", it.key() }, "Must be convertible to string type." ); } - alreadyConfigured.emplace( it.key() ); + alreadyConfigured.emplace( + auxiliary::lowerCase( std::string( it.key() ) ) ); } } auto _useAdiosSteps = @@ -2371,8 +2372,9 @@ namespace detail << shadow << std::endl; } auto notYetConfigured = - [&alreadyConfigured]( std::string const & param ) { - auto it = alreadyConfigured.find( param ); + [ &alreadyConfigured ]( std::string const & param ) { + auto it = alreadyConfigured.find( + auxiliary::lowerCase( std::string( param ) ) ); return it == alreadyConfigured.end(); }; From 67ca6616b4941664c26881466b509e791bec8e2a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Franz=20P=C3=B6schel?= Date: Mon, 11 Oct 2021 16:38:32 +0200 Subject: [PATCH 12/23] Lower case transformation: Ignore some paths in JSON Currently those objects that contain keys forwarded to ADIOS2 Can be extended as needed --- src/auxiliary/JSON.cpp | 54 ++++++++++++++++++++++++++++++++++-------- test/AuxiliaryTest.cpp | 9 +++---- 2 files changed, 49 insertions(+), 14 deletions(-) diff --git a/src/auxiliary/JSON.cpp b/src/auxiliary/JSON.cpp index 5732a2e00d..357c534e02 100644 --- a/src/auxiliary/JSON.cpp +++ b/src/auxiliary/JSON.cpp @@ -185,12 +185,14 @@ namespace json } #endif - static nlohmann::json & - lowerCase( nlohmann::json & json, std::vector< std::string > & currentPath ) + template< typename F > + static nlohmann::json & lowerCase( + nlohmann::json & json, + std::vector< std::string > & currentPath, + F const & ignoreCurrentPath ) { - if( json.is_object() ) - { - auto & val = json.get_ref< nlohmann::json::object_t & >(); + auto transFormCurrentObject = [ ¤tPath ]( + nlohmann::json::object_t & val ) { // somekey -> SomeKey std::map< std::string, std::string > originalKeys; for( auto & pair : val ) @@ -217,12 +219,25 @@ namespace json newObject[ pair.first ] = std::move( val[ pair.second ] ); } val = newObject; + }; + + if( json.is_object() ) + { + auto & val = json.get_ref< nlohmann::json::object_t & >(); + + if( !ignoreCurrentPath( currentPath ) ) + { + transFormCurrentObject( val ); + } // now recursively for( auto & pair : val ) { - currentPath.push_back( pair.first ); - lowerCase( pair.second, currentPath ); + // ensure that the path consists only of lowercase strings, + // even if ignoreCurrentPath() was true + currentPath.push_back( + auxiliary::lowerCase( std::string( pair.first ) ) ); + lowerCase( pair.second, currentPath, ignoreCurrentPath ); currentPath.pop_back(); } } @@ -230,7 +245,9 @@ namespace json { for( auto & val : json ) { - lowerCase( val ); + currentPath.emplace_back( "\vnum" ); + lowerCase( val, currentPath, ignoreCurrentPath ); + currentPath.pop_back(); } } return json; @@ -240,8 +257,25 @@ namespace json { std::vector< std::string > currentPath; // that's as deep as our config currently goes, +1 for good measure - currentPath.reserve( 6 ); - return lowerCase( json, currentPath ); + currentPath.reserve( 7 ); + return lowerCase( + json, currentPath, []( std::vector< std::string > const & path ) { + std::vector< std::string > const ignoredPaths[] = { + { "adios2", "engine", "parameters" }, + { "adios2", + "dataset", + "operators", + "\vnum", + "parameters" } }; + for( auto const & ignored : ignoredPaths ) + { + if( ignored == path ) + { + return true; + } + } + return false; + } ); } auxiliary::Option< std::string > diff --git a/test/AuxiliaryTest.cpp b/test/AuxiliaryTest.cpp index bf2c6c2c06..fa10f4c3f7 100644 --- a/test/AuxiliaryTest.cpp +++ b/test/AuxiliaryTest.cpp @@ -95,6 +95,7 @@ TEST_CASE( "json_parsing", "[auxiliary]" ) json::parseOptions( same1, false ).dump() != json::parseOptions( different, false ).dump() ); + // Keys forwarded to ADIOS2 should remain untouched std::string upper = R"END( { "ADIOS2": { @@ -128,8 +129,8 @@ TEST_CASE( "json_parsing", "[auxiliary]" ) "type": "BP3", "unused": "PARAMETER", "parameters": { - "buffergrowthfactor": "2.0", - "profile": "ON" + "BUFFERGROWTHFACTOR": "2.0", + "PROFILE": "ON" } }, "unused": "AS WELL", @@ -138,8 +139,8 @@ TEST_CASE( "json_parsing", "[auxiliary]" ) { "type": "BLOSC", "parameters": { - "clevel": "1", - "doshuffle": "BLOSC_BITSHUFFLE" + "CLEVEL": "1", + "DOSHUFFLE": "BLOSC_BITSHUFFLE" } } ] From f2fac759512f41198432db0f457ebec1a37d7f04 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Franz=20P=C3=B6schel?= Date: Thu, 21 Oct 2021 11:40:59 +0200 Subject: [PATCH 13/23] Add json::merge, including test --- include/openPMD/auxiliary/JSON.hpp | 63 +++++++++++++++++++ include/openPMD/auxiliary/JSON_internal.hpp | 3 + include/openPMD/openPMD.hpp | 1 + src/auxiliary/JSON.cpp | 47 ++++++++++++++ test/AuxiliaryTest.cpp | 69 ++++++++++++++++++--- 5 files changed, 176 insertions(+), 7 deletions(-) create mode 100644 include/openPMD/auxiliary/JSON.hpp diff --git a/include/openPMD/auxiliary/JSON.hpp b/include/openPMD/auxiliary/JSON.hpp new file mode 100644 index 0000000000..eace4191d7 --- /dev/null +++ b/include/openPMD/auxiliary/JSON.hpp @@ -0,0 +1,63 @@ +/* Copyright 2021 Franz Poeschel + * + * This file is part of openPMD-api. + * + * openPMD-api is free software: you can redistribute it and/or modify + * it under the terms of of either the GNU General Public License or + * the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * openPMD-api is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License and the GNU Lesser General Public License + * for more details. + * + * You should have received a copy of the GNU General Public License + * and the GNU Lesser General Public License along with openPMD-api. + * If not, see . + */ + +#pragma once + +#include + +namespace openPMD +{ +namespace json +{ + /** + * @brief Merge two JSON datasets into one. + * + * Merging rules: + * 1. If both `defaultValue` and `overwrite` are JSON objects, then the + * resulting JSON object will contain the union of both objects' keys. + * If a key is specified in both objects, the values corresponding to the + * key are merged recursively. + * Keys that point to a null value after this procedure will be pruned. + * 2. In any other case, the JSON dataset `defaultValue` is replaced in its + * entirety with the JSON dataset `overwrite`. + * + * Note that item 2 means that datasets of different type will replace each + * other without error. + * It also means that array types will replace each other without any notion + * of appending or merging. + * + * Possible use case: + * An application uses openPMD-api and wants to do the following: + * 1. Set some default backend options as JSON parameters. + * 2. Let its users specify custom backend options additionally. + * + * By using the json::merge() function, this application can then allow + * users to overwrite default options, while keeping any other ones. + * + * @param defaultValue + * @param overwrite + * @return std::string + */ + std::string merge( + std::string const & defaultValue, + std::string const & overwrite ); +} +} diff --git a/include/openPMD/auxiliary/JSON_internal.hpp b/include/openPMD/auxiliary/JSON_internal.hpp index d6ef9dccc5..8d02c62704 100644 --- a/include/openPMD/auxiliary/JSON_internal.hpp +++ b/include/openPMD/auxiliary/JSON_internal.hpp @@ -195,5 +195,8 @@ namespace json extern std::vector< std::string > backendKeys; void warnGlobalUnusedOptions( TracingJSON const & config ); + + nlohmann::json & + merge( nlohmann::json & defaultVal, nlohmann::json const & overwrite ); } // namespace json } // namespace openPMD diff --git a/include/openPMD/openPMD.hpp b/include/openPMD/openPMD.hpp index b54a1810c8..55a39cf2bb 100644 --- a/include/openPMD/openPMD.hpp +++ b/include/openPMD/openPMD.hpp @@ -55,6 +55,7 @@ namespace openPMD {} #include "openPMD/auxiliary/Date.hpp" #include "openPMD/auxiliary/DerefDynamicCast.hpp" +#include "openPMD/auxiliary/JSON.hpp" #include "openPMD/auxiliary/Option.hpp" #include "openPMD/auxiliary/OutOfRangeMsg.hpp" #include "openPMD/auxiliary/ShareRaw.hpp" diff --git a/src/auxiliary/JSON.cpp b/src/auxiliary/JSON.cpp index 357c534e02..05816483bb 100644 --- a/src/auxiliary/JSON.cpp +++ b/src/auxiliary/JSON.cpp @@ -19,6 +19,7 @@ * If not, see . */ +#include "openPMD/auxiliary/JSON.hpp" #include "openPMD/auxiliary/JSON_internal.hpp" #include "openPMD/auxiliary/Filesystem.hpp" @@ -331,5 +332,51 @@ namespace json << shadow.dump() << std::endl; } } + + nlohmann::json & + merge( nlohmann::json & defaultVal, nlohmann::json const & overwrite ) + { + if( defaultVal.is_object() && overwrite.is_object() ) + { + std::vector< std::string > prunedKeys; + for( auto it = overwrite.begin(); it != overwrite.end(); ++it ) + { + auto & valueInDefault = defaultVal[ it.key() ]; + merge( valueInDefault, it.value() ); + if( valueInDefault.is_null() ) + { + prunedKeys.emplace_back( it.key() ); + } + } + for( auto const & key : prunedKeys ) + { + defaultVal.erase( key ); + } + } + else + { + /* + * Anything else, just overwrite. + * Note: There's no clear generic way to merge arrays: + * Should we concatenate? Or should we merge at the same indices? + * From the user side, this means: + * An application can specify a number of default compression + * operators, e.g. in adios2.dataset.operators, but a user can + * overwrite the operators. Neither appending nor pointwise update + * are quite useful here. + */ + defaultVal = overwrite; + } + return defaultVal; + } + + std::string merge( + std::string const & defaultValue, + std::string const & overwrite ) + { + auto res = parseOptions( defaultValue, /* considerFiles = */ false ); + merge( res, parseOptions( overwrite, /* considerFiles = */ false ) ); + return res.dump(); + } } // namespace json } // namespace openPMD diff --git a/test/AuxiliaryTest.cpp b/test/AuxiliaryTest.cpp index fa10f4c3f7..9ccb3ed227 100644 --- a/test/AuxiliaryTest.cpp +++ b/test/AuxiliaryTest.cpp @@ -3,19 +3,20 @@ # define OPENPMD_private public # define OPENPMD_protected public #endif -#include "openPMD/config.hpp" -#include "openPMD/backend/Writable.hpp" -#include "openPMD/backend/Attributable.hpp" -#include "openPMD/backend/Container.hpp" +#include "openPMD/Dataset.hpp" +#include "openPMD/IO/AbstractIOHandler.hpp" +#include "openPMD/IO/AbstractIOHandlerHelper.hpp" #include "openPMD/auxiliary/DerefDynamicCast.hpp" #include "openPMD/auxiliary/Filesystem.hpp" +#include "openPMD/auxiliary/JSON.hpp" #include "openPMD/auxiliary/JSON_internal.hpp" #include "openPMD/auxiliary/Option.hpp" #include "openPMD/auxiliary/StringManip.hpp" #include "openPMD/auxiliary/Variant.hpp" -#include "openPMD/IO/AbstractIOHandler.hpp" -#include "openPMD/IO/AbstractIOHandlerHelper.hpp" -#include "openPMD/Dataset.hpp" +#include "openPMD/backend/Attributable.hpp" +#include "openPMD/backend/Container.hpp" +#include "openPMD/backend/Writable.hpp" +#include "openPMD/config.hpp" #include @@ -155,6 +156,60 @@ TEST_CASE( "json_parsing", "[auxiliary]" ) REQUIRE( jsonUpper.dump() == jsonLower.dump() ); } +TEST_CASE( "json_merging", "auxiliary" ) +{ + std::string defaultVal = R"END( +{ + "mergeRecursively": { + "changed": 43, + "unchanged": true, + "delete_me": "adsf" + }, + "dontmergearrays": [ + 1, + 2, + 3, + 4, + 5 + ], + "delete_me": [345,2345,36] +} +)END"; + + std::string overwrite = R"END( +{ + "mergeRecursively": { + "changed": "new value", + "newValue": "44", + "delete_me": null + }, + "dontmergearrays": [ + 5, + 6, + 7 + ], + "delete_me": null +} +)END"; + + std::string expect = R"END( +{ + "mergeRecursively": { + "changed": "new value", + "unchanged": true, + "newValue": "44" + }, + "dontmergearrays": [ + 5, + 6, + 7 + ] +})END"; + REQUIRE( + json::merge( defaultVal, overwrite ) == + json::parseOptions( expect, false ).dump() ); +} + TEST_CASE( "optional", "[auxiliary]" ) { using namespace auxiliary; From 3273b0cc395c42fab73be2b5e4ba4230f76af65d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Franz=20P=C3=B6schel?= Date: Thu, 21 Oct 2021 12:22:37 +0200 Subject: [PATCH 14/23] Use {"backend": } in tests --- test/ParallelIOTest.cpp | 10 ++-- test/SerialIOTest.cpp | 121 +++++++++++++++++++++++++++------------- 2 files changed, 88 insertions(+), 43 deletions(-) diff --git a/test/ParallelIOTest.cpp b/test/ParallelIOTest.cpp index 6c38ba7b73..62968fa7e4 100644 --- a/test/ParallelIOTest.cpp +++ b/test/ParallelIOTest.cpp @@ -374,9 +374,11 @@ TEST_CASE( "available_chunks_test", "[parallel][adios]" ) { available_chunks_test( "bp" ); } +#endif +#if openPMD_HAVE_ADIOS2 && openPMD_HAVE_MPI void -extendDataset( std::string const & ext ) +extendDataset( std::string const & ext, std::string const & jsonConfig ) { std::string filename = "../samples/parallelExtendDataset." + ext; int r_mpi_rank{ -1 }, r_mpi_size{ -1 }; @@ -389,7 +391,7 @@ extendDataset( std::string const & ext ) std::iota( data1.begin(), data1.end(), 0 ); std::iota( data2.begin(), data2.end(), 25 ); { - Series write( filename, Access::CREATE, MPI_COMM_WORLD ); + Series write( filename, Access::CREATE, MPI_COMM_WORLD, jsonConfig ); if( ext == "bp" && write.backend() != "ADIOS2" ) { // dataset resizing unsupported in ADIOS1 @@ -413,7 +415,7 @@ extendDataset( std::string const & ext ) MPI_Barrier( MPI_COMM_WORLD ); { - Series read( filename, Access::READ_ONLY ); + Series read( filename, Access::READ_ONLY, jsonConfig ); auto E_x = read.iterations[ 0 ].meshes[ "E" ][ "x" ]; REQUIRE( E_x.getExtent() == Extent{ mpi_size, 50 } ); auto chunk = E_x.loadChunk< int >( { 0, 0 }, { mpi_size, 50 } ); @@ -430,7 +432,7 @@ extendDataset( std::string const & ext ) TEST_CASE( "extend_dataset", "[parallel]" ) { - extendDataset( "bp" ); + extendDataset( "bp", R"({"backend": "adios2"})" ); } #endif diff --git a/test/SerialIOTest.cpp b/test/SerialIOTest.cpp index 48151b83d2..2903864298 100644 --- a/test/SerialIOTest.cpp +++ b/test/SerialIOTest.cpp @@ -32,6 +32,42 @@ using namespace openPMD; +struct BackendSelection +{ + std::string backendName; + std::string extension; + + inline std::string jsonBaseConfig() const + { + return R"({"backend": ")" + backendName + "\"}"; + } +}; + +std::vector< BackendSelection > testedBackends() +{ + auto variants = getVariants(); + std::map< std::string, std::string > extensions{ + { "json", "json" }, + { "adios1", "bp1" }, + { "adios2", "bp" }, + { "hdf5", "h5" } }; + std::vector< BackendSelection > res; + for( auto const & pair : variants ) + { + if( pair.second ) + { + auto lookup = extensions.find( pair.first ); + if( lookup != extensions.end() ) + { + std::string extension = lookup->second; + res.push_back( + { std::move( pair.first ), std::move( extension ) } ); + } + } + } + return res; +} + std::vector< std::string > testedFileExtensions() { auto allExtensions = getFileExtensions(); @@ -2057,14 +2093,16 @@ void optional_paths_110_test(const std::string & backend) void git_early_chunk_query( std::string const filename, std::string const species, - int const step + int const step, + std::string const & jsonConfig = "{}" ) { try { Series s = Series( filename, - Access::READ_ONLY + Access::READ_ONLY, + jsonConfig ); auto electrons = s.iterations[step].particles[species]; @@ -3304,7 +3342,11 @@ TEST_CASE( "no_serial_adios1", "[serial][adios]") #if openPMD_HAVE_ADIOS2 TEST_CASE( "git_adios2_early_chunk_query", "[serial][adios2]" ) { - git_early_chunk_query("../samples/git-sample/3d-bp4/example-3d-bp4_%T.bp", "e", 600); + git_early_chunk_query( + "../samples/git-sample/3d-bp4/example-3d-bp4_%T.bp", + "e", + 600, + R"({"backend": "adios2"})" ); } TEST_CASE( "serial_adios2_json_config", "[serial][adios2]" ) @@ -3778,7 +3820,7 @@ TEST_CASE( "git_adios2_sample_test", "[serial][adios2]" ) << samplePath << "' not accessible \n"; return; } - Series o( samplePath, Access::READ_ONLY ); + Series o( samplePath, Access::READ_ONLY, R"({"backend": "adios2"})" ); REQUIRE( o.openPMD() == "1.1.0" ); REQUIRE( o.openPMDextension() == 0 ); REQUIRE( o.basePath() == "/data/%T/" ); @@ -4032,16 +4074,13 @@ TEST_CASE( "git_adios2_sample_test", "[serial][adios2]" ) void variableBasedSeries( std::string const & file ) { + std::string selectADIOS2 = R"({"backend": "adios2"})"; constexpr Extent::value_type extent = 1000; { - Series writeSeries( file, Access::CREATE ); + Series writeSeries( file, Access::CREATE, selectADIOS2 ); writeSeries.setIterationEncoding( IterationEncoding::variableBased ); REQUIRE( writeSeries.iterationEncoding() == IterationEncoding::variableBased ); - if( writeSeries.backend() == "ADIOS1" ) - { - return; - } auto iterations = writeSeries.writeIterations(); for( size_t i = 0; i < 10; ++i ) { @@ -4079,9 +4118,10 @@ void variableBasedSeries( std::string const & file ) REQUIRE( auxiliary::directory_exists( file ) ); - auto testRead = [ &file, &extent ]( std::string const & jsonConfig ) - { - Series readSeries( file, Access::READ_ONLY, jsonConfig ); + auto testRead = [ &file, &extent, &selectADIOS2 ]( + std::string const & jsonConfig ) { + Series readSeries( + file, Access::READ_ONLY, json::merge( selectADIOS2, jsonConfig ) ); size_t last_iteration_index = 0; for( auto iteration : readSeries.readIterations() ) @@ -4139,7 +4179,9 @@ void variableBasedSeries( std::string const & file ) TEST_CASE( "variableBasedSeries", "[serial][adios2]" ) { +#if openPMD_HAVE_ADIOS2 variableBasedSeries( "../samples/variableBasedSeries.bp" ); +#endif } void variableBasedParticleData() @@ -4224,19 +4266,14 @@ TEST_CASE( "variableBasedParticleData", "[serial][adios2]" ) #endif // @todo Upon switching to ADIOS2 2.7.0, test this the other way around also -void -iterate_nonstreaming_series( - std::string const & file, bool variableBasedLayout ) +void iterate_nonstreaming_series( + std::string const & file, bool variableBasedLayout, std::string jsonConfig ) { constexpr size_t extent = 100; { - Series writeSeries( file, Access::CREATE ); + Series writeSeries( file, Access::CREATE, jsonConfig ); if( variableBasedLayout ) { - if( writeSeries.backend() != "ADIOS2" ) - { - return; - } writeSeries.setIterationEncoding( IterationEncoding::variableBased ); } @@ -4310,7 +4347,10 @@ iterate_nonstreaming_series( } } - Series readSeries( file, Access::READ_ONLY, "{\"defer_iteration_parsing\": true}" ); + Series readSeries( + file, + Access::READ_ONLY, + json::merge( jsonConfig, R"({"defer_iteration_parsing": true})" ) ); size_t last_iteration_index = 0; // conventionally written Series must be readable with streaming-aware API! @@ -4346,19 +4386,28 @@ iterate_nonstreaming_series( TEST_CASE( "iterate_nonstreaming_series", "[serial][adios2]" ) { - for( auto const & t : testedFileExtensions() ) + for( auto const & backend : testedBackends() ) { iterate_nonstreaming_series( - "../samples/iterate_nonstreaming_series_filebased_%T." + t, false ); + "../samples/iterate_nonstreaming_series_filebased_%T." + + backend.extension, + false, + backend.jsonBaseConfig() ); iterate_nonstreaming_series( - "../samples/iterate_nonstreaming_series_groupbased." + t, false ); - iterate_nonstreaming_series( - "../samples/iterate_nonstreaming_series_variablebased." + t, true ); + "../samples/iterate_nonstreaming_series_groupbased." + + backend.extension, + false, + backend.jsonBaseConfig() ); } +#if openPMD_HAVE_ADIOS2 + iterate_nonstreaming_series( + "../samples/iterate_nonstreaming_series_variablebased.bp", + true, + R"({"backend": "adios2"})" ); +#endif } -void -extendDataset( std::string const & ext ) +void extendDataset( std::string const & ext, std::string const & jsonConfig ) { std::string filename = "../samples/extendDataset." + ext; std::vector< int > data1( 25 ); @@ -4366,12 +4415,7 @@ extendDataset( std::string const & ext ) std::iota( data1.begin(), data1.end(), 0 ); std::iota( data2.begin(), data2.end(), 25 ); { - Series write( filename, Access::CREATE ); - if( ext == "bp" && write.backend() != "ADIOS2" ) - { - // dataset resizing unsupported in ADIOS1 - return; - } + Series write( filename, Access::CREATE, jsonConfig ); // only one iteration written anyway write.setIterationEncoding( IterationEncoding::variableBased ); @@ -4460,7 +4504,7 @@ extendDataset( std::string const & ext ) } { - Series read( filename, Access::READ_ONLY ); + Series read( filename, Access::READ_ONLY, jsonConfig ); auto E_x = read.iterations[ 0 ].meshes[ "E" ][ "x" ]; REQUIRE( E_x.getExtent() == Extent{ 10, 5 } ); auto chunk = E_x.loadChunk< int >( { 0, 0 }, { 10, 5 } ); @@ -4490,21 +4534,20 @@ extendDataset( std::string const & ext ) TEST_CASE( "extend_dataset", "[serial]" ) { - extendDataset( "json" ); + extendDataset( "json", R"({"backend": "json"})" ); #if openPMD_HAVE_ADIOS2 - extendDataset( "bp" ); + extendDataset( "bp", R"({"backend": "adios2"})" ); #endif #if openPMD_HAVE_HDF5 // extensible datasets require chunking // skip this test for if chunking is disabled if( auxiliary::getEnvString( "OPENPMD_HDF5_CHUNKS", "auto" ) != "none" ) { - extendDataset("h5"); + extendDataset( "h5", R"({"backend": "hdf5"})" ); } #endif } - void deferred_parsing( std::string const & extension ) { if( auxiliary::directory_exists( "../samples/lazy_parsing" ) ) From 276c50ea7233512d309984feca08339eaa3c31c0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Franz=20P=C3=B6schel?= Date: Thu, 21 Oct 2021 15:13:47 +0200 Subject: [PATCH 15/23] Warn if using contradicting filename extension to backend key --- src/Series.cpp | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/src/Series.cpp b/src/Series.cpp index 816ec7a24e..4f0f1f318b 100644 --- a/src/Series.cpp +++ b/src/Series.cpp @@ -1510,6 +1510,15 @@ void Series::parseJsonOptions( auto it = backendDescriptors.find( backend ); if( it != backendDescriptors.end() ) { + if( input.format != Format::DUMMY && + suffix( input.format ) != suffix( it->second ) ) + { + std::cerr << "[Warning] Supplied filename extension '" + << suffix( input.format ) + << "' contradicts the backend specified via the " + "'backend' key. Will go on with backend " + << it->first << "." << std::endl; + } input.format = it->second; } else From 99a307cf28ca4e5af9a8dc695de1476f60037927 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Franz=20P=C3=B6schel?= Date: Thu, 16 Dec 2021 11:55:44 +0100 Subject: [PATCH 16/23] Move JSON test to separate binary --- CMakeLists.txt | 7 +- test/AuxiliaryTest.cpp | 181 ++----------------------------------- test/CoreTest.cpp | 1 - test/JSONTest.cpp | 198 +++++++++++++++++++++++++++++++++++++++++ 4 files changed, 210 insertions(+), 177 deletions(-) create mode 100644 test/JSONTest.cpp diff --git a/CMakeLists.txt b/CMakeLists.txt index 443482ccd4..d09355ea7a 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -777,6 +777,7 @@ set(openPMD_TEST_NAMES Auxiliary SerialIO ParallelIO + JSON ) # command line tools set(openPMD_CLI_TOOL_NAMES @@ -863,8 +864,10 @@ if(openPMD_BUILD_TESTING) target_link_libraries(${testname}Tests PRIVATE CatchMain) endif() - target_include_directories(${testname}Tests SYSTEM PRIVATE - $) + if(${testname} STREQUAL JSON) + target_include_directories(${testname}Tests SYSTEM PRIVATE + $) + endif() endforeach() endif() diff --git a/test/AuxiliaryTest.cpp b/test/AuxiliaryTest.cpp index 9ccb3ed227..aaadcc9bd0 100644 --- a/test/AuxiliaryTest.cpp +++ b/test/AuxiliaryTest.cpp @@ -3,20 +3,18 @@ # define OPENPMD_private public # define OPENPMD_protected public #endif -#include "openPMD/Dataset.hpp" -#include "openPMD/IO/AbstractIOHandler.hpp" -#include "openPMD/IO/AbstractIOHandlerHelper.hpp" +#include "openPMD/config.hpp" +#include "openPMD/backend/Writable.hpp" +#include "openPMD/backend/Attributable.hpp" +#include "openPMD/backend/Container.hpp" #include "openPMD/auxiliary/DerefDynamicCast.hpp" #include "openPMD/auxiliary/Filesystem.hpp" -#include "openPMD/auxiliary/JSON.hpp" -#include "openPMD/auxiliary/JSON_internal.hpp" #include "openPMD/auxiliary/Option.hpp" #include "openPMD/auxiliary/StringManip.hpp" #include "openPMD/auxiliary/Variant.hpp" -#include "openPMD/backend/Attributable.hpp" -#include "openPMD/backend/Container.hpp" -#include "openPMD/backend/Writable.hpp" -#include "openPMD/config.hpp" +#include "openPMD/IO/AbstractIOHandler.hpp" +#include "openPMD/IO/AbstractIOHandlerHelper.hpp" +#include "openPMD/Dataset.hpp" #include @@ -44,171 +42,6 @@ struct TestHelper : public Attributable } // test } // openPMD -TEST_CASE( "json_parsing", "[auxiliary]" ) -{ - std::string wrongValue = R"END( -{ - "ADIOS2": { - "duplicate key": 1243, - "DUPLICATE KEY": 234 - } -})END"; - REQUIRE_THROWS_WITH( - json::parseOptions( wrongValue, false ), - error::BackendConfigSchema( - { "adios2", "duplicate key" }, "JSON config: duplicate keys." ) - .what() ); - std::string same1 = R"( -{ - "ADIOS2": { - "type": "nullcore", - "engine": { - "type": "bp4", - "usesteps": true - } - } -})"; - std::string same2 = R"( -{ - "adios2": { - "type": "nullcore", - "ENGINE": { - "type": "bp4", - "usesteps": true - } - } -})"; - std::string different = R"( -{ - "adios2": { - "type": "NULLCORE", - "ENGINE": { - "type": "bp4", - "usesteps": true - } - } -})"; - REQUIRE( - json::parseOptions( same1, false ).dump() == - json::parseOptions( same2, false ).dump() ); - // Only keys should be transformed to lower case, values must stay the same - REQUIRE( - json::parseOptions( same1, false ).dump() != - json::parseOptions( different, false ).dump() ); - - // Keys forwarded to ADIOS2 should remain untouched - std::string upper = R"END( -{ - "ADIOS2": { - "ENGINE": { - "TYPE": "BP3", - "UNUSED": "PARAMETER", - "PARAMETERS": { - "BUFFERGROWTHFACTOR": "2.0", - "PROFILE": "ON" - } - }, - "UNUSED": "AS WELL", - "DATASET": { - "OPERATORS": [ - { - "TYPE": "BLOSC", - "PARAMETERS": { - "CLEVEL": "1", - "DOSHUFFLE": "BLOSC_BITSHUFFLE" - } - } - ] - } - } -} -)END"; - std::string lower = R"END( -{ - "adios2": { - "engine": { - "type": "BP3", - "unused": "PARAMETER", - "parameters": { - "BUFFERGROWTHFACTOR": "2.0", - "PROFILE": "ON" - } - }, - "unused": "AS WELL", - "dataset": { - "operators": [ - { - "type": "BLOSC", - "parameters": { - "CLEVEL": "1", - "DOSHUFFLE": "BLOSC_BITSHUFFLE" - } - } - ] - } - } -} -)END"; - nlohmann::json jsonUpper = nlohmann::json::parse( upper ); - nlohmann::json jsonLower = nlohmann::json::parse( lower ); - REQUIRE( jsonUpper.dump() != jsonLower.dump() ); - json::lowerCase( jsonUpper ); - REQUIRE( jsonUpper.dump() == jsonLower.dump() ); -} - -TEST_CASE( "json_merging", "auxiliary" ) -{ - std::string defaultVal = R"END( -{ - "mergeRecursively": { - "changed": 43, - "unchanged": true, - "delete_me": "adsf" - }, - "dontmergearrays": [ - 1, - 2, - 3, - 4, - 5 - ], - "delete_me": [345,2345,36] -} -)END"; - - std::string overwrite = R"END( -{ - "mergeRecursively": { - "changed": "new value", - "newValue": "44", - "delete_me": null - }, - "dontmergearrays": [ - 5, - 6, - 7 - ], - "delete_me": null -} -)END"; - - std::string expect = R"END( -{ - "mergeRecursively": { - "changed": "new value", - "unchanged": true, - "newValue": "44" - }, - "dontmergearrays": [ - 5, - 6, - 7 - ] -})END"; - REQUIRE( - json::merge( defaultVal, overwrite ) == - json::parseOptions( expect, false ).dump() ); -} TEST_CASE( "optional", "[auxiliary]" ) { using namespace auxiliary; diff --git a/test/CoreTest.cpp b/test/CoreTest.cpp index d1b4ba8cd7..468285dcdc 100644 --- a/test/CoreTest.cpp +++ b/test/CoreTest.cpp @@ -9,7 +9,6 @@ #include "openPMD/auxiliary/JSON.hpp" #include -#include #include #include diff --git a/test/JSONTest.cpp b/test/JSONTest.cpp new file mode 100644 index 0000000000..592f390733 --- /dev/null +++ b/test/JSONTest.cpp @@ -0,0 +1,198 @@ +#include "openPMD/auxiliary/JSON.hpp" +#include "openPMD/auxiliary/JSON_internal.hpp" + +#include + +using namespace openPMD; + +TEST_CASE( "json_parsing", "[auxiliary]" ) +{ + std::string wrongValue = R"END( +{ + "ADIOS2": { + "duplicate key": 1243, + "DUPLICATE KEY": 234 + } +})END"; + REQUIRE_THROWS_WITH( + json::parseOptions( wrongValue, false ), + error::BackendConfigSchema( + { "adios2", "duplicate key" }, "JSON config: duplicate keys." ) + .what() ); + std::string same1 = R"( +{ + "ADIOS2": { + "type": "nullcore", + "engine": { + "type": "bp4", + "usesteps": true + } + } +})"; + std::string same2 = R"( +{ + "adios2": { + "type": "nullcore", + "ENGINE": { + "type": "bp4", + "usesteps": true + } + } +})"; + std::string different = R"( +{ + "adios2": { + "type": "NULLCORE", + "ENGINE": { + "type": "bp4", + "usesteps": true + } + } +})"; + REQUIRE( + json::parseOptions( same1, false ).dump() == + json::parseOptions( same2, false ).dump() ); + // Only keys should be transformed to lower case, values must stay the same + REQUIRE( + json::parseOptions( same1, false ).dump() != + json::parseOptions( different, false ).dump() ); + + // Keys forwarded to ADIOS2 should remain untouched + std::string upper = R"END( +{ + "ADIOS2": { + "ENGINE": { + "TYPE": "BP3", + "UNUSED": "PARAMETER", + "PARAMETERS": { + "BUFFERGROWTHFACTOR": "2.0", + "PROFILE": "ON" + } + }, + "UNUSED": "AS WELL", + "DATASET": { + "OPERATORS": [ + { + "TYPE": "BLOSC", + "PARAMETERS": { + "CLEVEL": "1", + "DOSHUFFLE": "BLOSC_BITSHUFFLE" + } + } + ] + } + } +} +)END"; + std::string lower = R"END( +{ + "adios2": { + "engine": { + "type": "BP3", + "unused": "PARAMETER", + "parameters": { + "BUFFERGROWTHFACTOR": "2.0", + "PROFILE": "ON" + } + }, + "unused": "AS WELL", + "dataset": { + "operators": [ + { + "type": "BLOSC", + "parameters": { + "CLEVEL": "1", + "DOSHUFFLE": "BLOSC_BITSHUFFLE" + } + } + ] + } + } +} +)END"; + nlohmann::json jsonUpper = nlohmann::json::parse( upper ); + nlohmann::json jsonLower = nlohmann::json::parse( lower ); + REQUIRE( jsonUpper.dump() != jsonLower.dump() ); + json::lowerCase( jsonUpper ); + REQUIRE( jsonUpper.dump() == jsonLower.dump() ); +} + +TEST_CASE( "json_merging", "auxiliary" ) +{ + std::string defaultVal = R"END( +{ + "mergeRecursively": { + "changed": 43, + "unchanged": true, + "delete_me": "adsf" + }, + "dontmergearrays": [ + 1, + 2, + 3, + 4, + 5 + ], + "delete_me": [345,2345,36] +} +)END"; + + std::string overwrite = R"END( +{ + "mergeRecursively": { + "changed": "new value", + "newValue": "44", + "delete_me": null + }, + "dontmergearrays": [ + 5, + 6, + 7 + ], + "delete_me": null +} +)END"; + + std::string expect = R"END( +{ + "mergeRecursively": { + "changed": "new value", + "unchanged": true, + "newValue": "44" + }, + "dontmergearrays": [ + 5, + 6, + 7 + ] +})END"; + REQUIRE( + json::merge( defaultVal, overwrite ) == + json::parseOptions( expect, false ).dump() ); +} + +TEST_CASE( "optional", "[auxiliary]" ) { + using namespace auxiliary; + + Option opt; + + REQUIRE_THROWS_AS(opt.get(), variantSrc::bad_variant_access); + REQUIRE_THROWS_AS(opt.get() = 42, variantSrc::bad_variant_access); + REQUIRE(!opt); + REQUIRE(!opt.has_value()); + + opt = 43; + REQUIRE(opt); + REQUIRE(opt.has_value()); + REQUIRE(opt.get() == 43); + + Option opt2{ opt }; + REQUIRE(opt2); + REQUIRE(opt2.has_value()); + REQUIRE(opt2.get() == 43); + + Option opt3 = makeOption( 3 ); + REQUIRE(opt3); + REQUIRE(opt3.has_value()); + REQUIRE(opt3.get() == 3); +} From 2508a78d52b64c48d3369108ebcfe0a5df1c5df6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Franz=20P=C3=B6schel?= Date: Thu, 16 Dec 2021 12:26:31 +0100 Subject: [PATCH 17/23] Apply suggestions from code review --- examples/7_extended_write_serial.py | 36 ++++++++++----------- include/openPMD/Series.hpp | 10 +++++- include/openPMD/auxiliary/JSON_internal.hpp | 33 +++++++++++++++++++ src/Series.cpp | 10 ++++++ src/auxiliary/JSON.cpp | 3 ++ test/SerialIOTest.cpp | 21 ++++++++++-- 6 files changed, 90 insertions(+), 23 deletions(-) diff --git a/examples/7_extended_write_serial.py b/examples/7_extended_write_serial.py index 6f939f9c75..a9cdcd291e 100755 --- a/examples/7_extended_write_serial.py +++ b/examples/7_extended_write_serial.py @@ -8,6 +8,7 @@ """ from openpmd_api import Series, Access, Dataset, Mesh_Record_Component, \ Unit_Dimension +import json import numpy as np @@ -102,27 +103,24 @@ # component this describes the datatype and shape of data as it should be # written to disk d = Dataset(partial_mesh.dtype, extent=[2, 5]) - dataset_config = """ -{ - "adios1": { - "dataset": { - "transform": "blosc:compressor=zlib,shuffle=bit,lvl=1;nometa" - } - }, - "adios2": { - "dataset": { - "operators": [ - { - "type": "zlib", - "parameters": { - "clevel": 9 - } + dataset_config = { + "adios1": { + "dataset": { + "transform": "blosc:compressor=zlib,shuffle=bit,lvl=1;nometa" + } + }, + "adios2": { + "dataset": { + "operators": [{ + "type": "zlib", + "parameters": { + "clevel": 9 + } + }] + } } - ] } - } -}""" - d.options = dataset_config + d.options = json.dumps(dataset_config) mesh["x"].reset_dataset(d) electrons = cur_it.particles["electrons"] diff --git a/include/openPMD/Series.hpp b/include/openPMD/Series.hpp index c117c9bd01..d56ad8ecb9 100644 --- a/include/openPMD/Series.hpp +++ b/include/openPMD/Series.hpp @@ -408,7 +408,15 @@ class Series : public Attributable } } std::unique_ptr< ParsedInput > parseInput(std::string); - // template parameter so we don't have to include the JSON lib here + /** + * @brief Parse non-backend-specific configuration in JSON config. + * + * Currently this parses the keys defer_iteration_parsing, backend and + * iteration_encoding. + * + * @tparam TracingJSON template parameter so we don't have + * to include the JSON lib here + */ template< typename TracingJSON > void parseJsonOptions( TracingJSON & options, ParsedInput & ); bool hasExpansionPattern( std::string filenameWithExtension ); diff --git a/include/openPMD/auxiliary/JSON_internal.hpp b/include/openPMD/auxiliary/JSON_internal.hpp index 8d02c62704..7cc6256d46 100644 --- a/include/openPMD/auxiliary/JSON_internal.hpp +++ b/include/openPMD/auxiliary/JSON_internal.hpp @@ -185,17 +185,50 @@ namespace json #endif + /** + * Recursively transform all keys in a JSON dataset to lower case. + * String values are unaffected. + * JSON objects at the following openPMD-defined locations are not affected: + * * adios2.engine.parameters + * * adios2.dataset.operators..parameters + * This helps us forward configurations from these locations to ADIOS2 + * "as-is". + */ nlohmann::json & lowerCase( nlohmann::json & ); + /** + * Read a JSON literal as a string. + * If the literal is a number, convert that number to its string + * representation. + * If it is a bool, convert it to either "0" or "1". + * If it is not a literal, return an empty option. + */ auxiliary::Option< std::string > asStringDynamic( nlohmann::json const & ); + /** + * Like asStringDynamic(), but convert the string to lowercase afterwards. + */ auxiliary::Option< std::string > asLowerCaseStringDynamic( nlohmann::json const & ); + /** + * Vector containing the lower-case keys to the single backends' + * configurations. + */ extern std::vector< std::string > backendKeys; + /** + * Function that can be called after reading all global options from the + * JSON configuration (i.e. all the options that are not handled by the + * single backends). + * If any unread value persists, a warning is printed to stderr. + */ void warnGlobalUnusedOptions( TracingJSON const & config ); + /** + * Like merge() as defined in JSON.hpp, but this overload works directly + * on nlohmann::json values. + */ nlohmann::json & merge( nlohmann::json & defaultVal, nlohmann::json const & overwrite ); } // namespace json diff --git a/src/Series.cpp b/src/Series.cpp index 4f0f1f318b..519710aa3f 100644 --- a/src/Series.cpp +++ b/src/Series.cpp @@ -38,6 +38,7 @@ #include #include #include +#include namespace openPMD @@ -1458,6 +1459,10 @@ void Series::openIteration( uint64_t index, Iteration iteration ) namespace { + /** + * Look up if the specified key is contained in the JSON dataset. + * If yes, read it into the specified location. + */ template< typename From, typename Dest = From > void getJsonOption( json::TracingJSON & config, std::string const & key, Dest & dest ) @@ -1468,6 +1473,11 @@ namespace } } + /** + * Like getJsonOption(), but for string types. + * Numbers and booleans are converted to their string representation. + * The string is converted to lower case. + */ template< typename Dest = std::string > void getJsonOptionLowerCase( json::TracingJSON & config, std::string const & key, Dest & dest ) diff --git a/src/auxiliary/JSON.cpp b/src/auxiliary/JSON.cpp index 05816483bb..ae3e1dfc58 100644 --- a/src/auxiliary/JSON.cpp +++ b/src/auxiliary/JSON.cpp @@ -266,6 +266,9 @@ namespace json { "adios2", "dataset", "operators", + /* + * We use "\vnum" to indicate "any array index". + */ "\vnum", "parameters" } }; for( auto const & ignored : ignoredPaths ) diff --git a/test/SerialIOTest.cpp b/test/SerialIOTest.cpp index 2903864298..0f9a70f846 100644 --- a/test/SerialIOTest.cpp +++ b/test/SerialIOTest.cpp @@ -3360,7 +3360,7 @@ TEST_CASE( "serial_adios2_json_config", "[serial][adios2]" ) { "unused": "global parameter", "hdf5": { - "unused": "hdf5 parameter please dont warn" + "unused": "hdf5 parameter please do not warn" }, "adios2": { "engine": { @@ -3438,6 +3438,21 @@ TEST_CASE( "serial_adios2_json_config", "[serial][adios2]" ) } } )END"; + /* + * Notes on the upcoming dataset JSON configuration: + * * The resizable key is needed by some backends (HDF5) so the backend can + * create a dataset that can later be resized. + * * The asdf key should lead to a warning about unused parameters. + * * Inside the hdf5 configuration, there are unused keys ("this"). + * However, since this configuration is used by the ADIOS2 backend, there + * will be no warning for it. + * + * In the end, this config should lead to a warning similar to: + * > Warning: parts of the JSON configuration for ADIOS2 dataset + * > '/data/0/meshes/E/y' remain unused: + * > {"adios2":{"dataset":{"unused":"too"},"unused":"dataset parameter"}, + * > "asdf":"asdf"} + */ std::string datasetConfig = R"END( { "resizable": true, @@ -4177,12 +4192,12 @@ void variableBasedSeries( std::string const & file ) testRead( "{\"defer_iteration_parsing\": false}" ); } +#if openPMD_HAVE_ADIOS2 TEST_CASE( "variableBasedSeries", "[serial][adios2]" ) { -#if openPMD_HAVE_ADIOS2 variableBasedSeries( "../samples/variableBasedSeries.bp" ); -#endif } +#endif void variableBasedParticleData() { From b7312eb1dedf07912e6d2695a1d662633cd5e4c2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Franz=20P=C3=B6schel?= Date: Thu, 16 Dec 2021 16:01:07 +0100 Subject: [PATCH 18/23] Remove duplicate friend declarations Fix after rebase --- include/openPMD/backend/Attributable.hpp | 1 - include/openPMD/backend/BaseRecord.hpp | 2 -- include/openPMD/backend/Container.hpp | 1 - 3 files changed, 4 deletions(-) diff --git a/include/openPMD/backend/Attributable.hpp b/include/openPMD/backend/Attributable.hpp index ded6aabd37..fe6f82b4a0 100644 --- a/include/openPMD/backend/Attributable.hpp +++ b/include/openPMD/backend/Attributable.hpp @@ -131,7 +131,6 @@ class Attributable friend struct traits::GenerationPolicy; friend class Iteration; friend class Series; - friend class Series; friend class Writable; friend class WriteIterations; diff --git a/include/openPMD/backend/BaseRecord.hpp b/include/openPMD/backend/BaseRecord.hpp index dd4328e047..e32b0b6af1 100644 --- a/include/openPMD/backend/BaseRecord.hpp +++ b/include/openPMD/backend/BaseRecord.hpp @@ -56,9 +56,7 @@ class BaseRecord : public Container< T_elem > friend class ParticleSpecies; friend class PatchRecord; friend class Record; - friend class Mesh; - friend class ParticleSpecies; std::shared_ptr< internal::BaseRecordData< T_elem > > m_baseRecordData{ new internal::BaseRecordData< T_elem >() }; diff --git a/include/openPMD/backend/Container.hpp b/include/openPMD/backend/Container.hpp index 72abb64d58..74f4a65883 100644 --- a/include/openPMD/backend/Container.hpp +++ b/include/openPMD/backend/Container.hpp @@ -135,7 +135,6 @@ class Container : public Attributable friend class ParticlePatches; friend class internal::SeriesData; friend class Series; - friend class Series; template< typename > friend class internal::EraseStaleEntries; protected: From 6065d569189905fab4da3673ae614d8b9bdffe67 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Franz=20P=C3=B6schel?= Date: Thu, 16 Dec 2021 16:10:17 +0100 Subject: [PATCH 19/23] HDF5 fix (to be rebased) --- include/openPMD/IO/HDF5/ParallelHDF5IOHandler.hpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/include/openPMD/IO/HDF5/ParallelHDF5IOHandler.hpp b/include/openPMD/IO/HDF5/ParallelHDF5IOHandler.hpp index fd115c94b7..71e1a13a25 100644 --- a/include/openPMD/IO/HDF5/ParallelHDF5IOHandler.hpp +++ b/include/openPMD/IO/HDF5/ParallelHDF5IOHandler.hpp @@ -40,7 +40,7 @@ namespace openPMD ParallelHDF5IOHandler( std::string path, Access, MPI_Comm, json::TracingJSON config); #else - ParallelHDF5IOHandler(std::string path, Access, nlohmann::json config); + ParallelHDF5IOHandler(std::string path, Access, json::TracingJSON config); #endif ~ParallelHDF5IOHandler() override; From 965bc07eece7d77fd28bdf91d84b5efae00f3987 Mon Sep 17 00:00:00 2001 From: Axel Huebl Date: Thu, 16 Dec 2021 18:09:53 -0800 Subject: [PATCH 20/23] Fix verbatim chevrons in Doxygen --- include/openPMD/auxiliary/JSON_internal.hpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/include/openPMD/auxiliary/JSON_internal.hpp b/include/openPMD/auxiliary/JSON_internal.hpp index 7cc6256d46..b6b6538b4e 100644 --- a/include/openPMD/auxiliary/JSON_internal.hpp +++ b/include/openPMD/auxiliary/JSON_internal.hpp @@ -189,8 +189,8 @@ namespace json * Recursively transform all keys in a JSON dataset to lower case. * String values are unaffected. * JSON objects at the following openPMD-defined locations are not affected: - * * adios2.engine.parameters - * * adios2.dataset.operators..parameters + * * `adios2.engine.parameters` + * * `adios2.dataset.operators..parameters` * This helps us forward configurations from these locations to ADIOS2 * "as-is". */ From bd9978993d6b35e78b909b16437b7b48d4dee7b2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Franz=20P=C3=B6schel?= Date: Fri, 17 Dec 2021 12:17:01 +0100 Subject: [PATCH 21/23] Some commenting on backend_via_json test --- test/CoreTest.cpp | 32 +++++++++++++++++++++++++++++--- 1 file changed, 29 insertions(+), 3 deletions(-) diff --git a/test/CoreTest.cpp b/test/CoreTest.cpp index 468285dcdc..9488ba8008 100644 --- a/test/CoreTest.cpp +++ b/test/CoreTest.cpp @@ -896,6 +896,10 @@ TEST_CASE( "backend_via_json", "[core]" ) } #if openPMD_HAVE_ADIOS2 { + /* + * JSON backend should be chosen even if ending .bp is given + * {"backend": "json"} overwrites automatic detection + */ Series series( "../samples/optionsViaJson.bp", Access::CREATE, @@ -906,6 +910,9 @@ TEST_CASE( "backend_via_json", "[core]" ) } { + /* + * BP4 engine should be selected even if ending .sst is given + */ Series series( "../samples/optionsViaJsonOverwritesAutomaticDetection.sst", Access::CREATE, @@ -917,7 +924,11 @@ TEST_CASE( "backend_via_json", "[core]" ) #if openPMD_HAVE_ADIOS1 setenv( "OPENPMD_BP_BACKEND", "ADIOS1", 1 ); { - // JSON option should overwrite environment variable + /* + * ADIOS2 backend should be selected even if OPENPMD_BP_BACKEND is set + * as ADIOS1 + * JSON config overwrites environment variables + */ Series series( "../samples/optionsPreferJsonOverEnvVar.bp", Access::CREATE, @@ -934,8 +945,8 @@ TEST_CASE( "backend_via_json", "[core]" ) R"({"backend": "json", "iteration_encoding": "file_based"})"; { /* - * Should we add JSON options to set the filebased expansion pattern? - * For now, let's require setting that as part of the filename. + * File-based iteration encoding can only be chosen if an expansion + * pattern is detected in the filename. */ REQUIRE_THROWS_AS( [ & ]() { @@ -946,9 +957,24 @@ TEST_CASE( "backend_via_json", "[core]" ) }(), error::WrongAPIUsage ); } + { + /* + * … but specifying both the pattern and the option in JSON should work. + */ + Series series( + "../samples/optionsViaJson%06T", + Access::CREATE, + encodingFileBased ); + series.iterations[1456]; + } std::string encodingGroupBased = R"({"backend": "json", "iteration_encoding": "group_based"})"; { + /* + * … and if a pattern is detected, but the JSON config says to use + * an iteration encoding that is not file-based, the pattern should + * be ignored. + */ Series series( "../samples/optionsViaJsonPseudoFilebased%T.json", Access::CREATE, From f9c2f8c21d155c86568dc4634c29dda95abef93d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Franz=20P=C3=B6schel?= Date: Fri, 17 Dec 2021 12:36:32 +0100 Subject: [PATCH 22/23] Add breaking changes to NEWS.rst --- NEWS.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/NEWS.rst b/NEWS.rst index f8d7f10906..cf23818ae5 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -9,6 +9,10 @@ Upgrade Guide Python 3.10 is now supported. openPMD-api now depends on `toml11 `__ 3.7.0+. +The following backend-specific members of the ``Dataset`` class have been removed: ``Dataset::setChunkSize()``, ``Dataset::setCompression()``, ``Dataset::setCustomTransform()``, ``Dataset::chunkSize``, ``Dataset::compression``, ``Dataset::transform``. +They are replaced by backend-specific options in the JSON-based backend configuration. +This can be passed in ``Dataset::options``. + 0.14.0 ------ From a69af6ca0f1699f69f67132f825a9b1efcbe2ef8 Mon Sep 17 00:00:00 2001 From: Axel Huebl Date: Fri, 17 Dec 2021 15:16:56 -0800 Subject: [PATCH 23/23] Doxygen: Warn Unused JSON Params --- include/openPMD/IO/IOTask.hpp | 10 ++++++---- test/CoreTest.cpp | 4 ++-- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/include/openPMD/IO/IOTask.hpp b/include/openPMD/IO/IOTask.hpp index 808f62d9f9..650a9c9bbb 100644 --- a/include/openPMD/IO/IOTask.hpp +++ b/include/openPMD/IO/IOTask.hpp @@ -282,10 +282,12 @@ struct OPENPMDAPI_EXPORT Parameter< Operation::CREATE_DATASET > : public Abstrac Datatype dtype = Datatype::UNDEFINED; std::string options = "{}"; - // template parameter so we don't have to include the JSON lib here - // this function is useful for the createDataset() methods in, - // IOHandlerImpl's, so putting that here is the simplest way to make it - // available for them + /** Warn about unused JSON paramters + * + * Template parameter so we don't have to include the JSON lib here. + * This function is useful for the createDataset() methods in, + * IOHandlerImpl's, so putting that here is the simplest way to make it + * available for them. */ template< typename TracingJSON > static void warnUnusedParameters( TracingJSON &, diff --git a/test/CoreTest.cpp b/test/CoreTest.cpp index 9488ba8008..93205a8acc 100644 --- a/test/CoreTest.cpp +++ b/test/CoreTest.cpp @@ -959,7 +959,7 @@ TEST_CASE( "backend_via_json", "[core]" ) } { /* - * … but specifying both the pattern and the option in JSON should work. + * ... but specifying both the pattern and the option in JSON should work. */ Series series( "../samples/optionsViaJson%06T", @@ -971,7 +971,7 @@ TEST_CASE( "backend_via_json", "[core]" ) R"({"backend": "json", "iteration_encoding": "group_based"})"; { /* - * … and if a pattern is detected, but the JSON config says to use + * ... and if a pattern is detected, but the JSON config says to use * an iteration encoding that is not file-based, the pattern should * be ignored. */