diff --git a/.github/workflows/linux.yml b/.github/workflows/linux.yml index d90fc6c863..adcc7d5535 100644 --- a/.github/workflows/linux.yml +++ b/.github/workflows/linux.yml @@ -257,7 +257,7 @@ jobs: - name: Install run: | sudo apt-get update - sudo apt-get install g++ libopenmpi-dev libhdf5-openmpi-dev python3 python3-numpy python3-mpi4py python3-pandas + sudo apt-get install g++ libopenmpi-dev libhdf5-openmpi-dev python3 python3-numpy python3-mpi4py python3-pandas python3-pip # TODO ADIOS2 - name: Build env: {CXXFLAGS: -Werror, PKG_CONFIG_PATH: /usr/lib/x86_64-linux-gnu/pkgconfig} @@ -272,6 +272,22 @@ jobs: cd build ctest --output-on-failure + python3 -m pip install jsonschema + cd ../share/openPMD/json_schema + PATH="../../../build/bin:$PATH" make -j 2 + # We need to exclude the thetaMode example since that has a different + # meshesPath and the JSON schema needs to hardcode that. + find ../../../build/samples/ \ + ! -path '*thetaMode*' \ + ! -path '/*many_iterations/*' \ + ! -name 'profiling.json' \ + ! -name '*config.json' \ + -iname '*.json' \ + | while read i; do + echo "Checking $i" + ./check.py "$i" + done + musllinux_py10: runs-on: ubuntu-20.04 if: github.event.pull_request.draft == false diff --git a/CMakeLists.txt b/CMakeLists.txt index c17631c6cb..807dcb774d 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -747,11 +747,12 @@ set(openPMD_TEST_NAMES # command line tools set(openPMD_CLI_TOOL_NAMES ls + convert-json-toml ) set(openPMD_PYTHON_CLI_TOOL_NAMES pipe ) -set(openPMD_PYTHON_CLI_MODULE_NAMES ${openPMD_CLI_TOOL_NAMES}) +set(openPMD_PYTHON_CLI_MODULE_NAMES ls) # examples set(openPMD_EXAMPLE_NAMES 1_structure @@ -920,6 +921,9 @@ if(openPMD_BUILD_CLI_TOOLS) endif() target_link_libraries(openpmd-${toolname} PRIVATE openPMD) + target_include_directories(openpmd-${toolname} SYSTEM PRIVATE + $ + $) endforeach() endif() diff --git a/include/openPMD/auxiliary/JSON_internal.hpp b/include/openPMD/auxiliary/JSON_internal.hpp index dc72cffbdc..36774cebd8 100644 --- a/include/openPMD/auxiliary/JSON_internal.hpp +++ b/include/openPMD/auxiliary/JSON_internal.hpp @@ -190,16 +190,25 @@ namespace json * @param options as a parsed JSON object. * @param considerFiles If yes, check if `options` refers to a file and read * from there. + * @param convertLowercase If yes, lowercase conversion is applied + * recursively to keys and values, except for some hardcoded places + * that should be left untouched. */ - ParsedConfig parseOptions(std::string const &options, bool considerFiles); + ParsedConfig parseOptions( + std::string const &options, + bool considerFiles, + bool convertLowercase = true); #if openPMD_HAVE_MPI /** * Parallel version of parseOptions(). MPI-collective. */ - ParsedConfig - parseOptions(std::string const &options, MPI_Comm comm, bool considerFiles); + ParsedConfig parseOptions( + std::string const &options, + MPI_Comm comm, + bool considerFiles, + bool convertLowercase = true); #endif diff --git a/share/openPMD/json_schema/Makefile b/share/openPMD/json_schema/Makefile new file mode 100644 index 0000000000..dcbc1584d3 --- /dev/null +++ b/share/openPMD/json_schema/Makefile @@ -0,0 +1,13 @@ +convert := openpmd-convert-json-toml + +json_files = attribute_defs.json attributes.json dataset_defs.json iteration.json mesh.json mesh_record_component.json particle_patches.json particle_species.json patch_record.json record.json record_component.json series.json + +.PHONY: all +all: $(json_files) + +$(json_files): %.json: %.toml + $(convert) @$^ > $@ + +.PHONY: clean +clean: + -rm $(json_files) diff --git a/share/openPMD/json_schema/README.md b/share/openPMD/json_schema/README.md new file mode 100644 index 0000000000..ae9c641e77 --- /dev/null +++ b/share/openPMD/json_schema/README.md @@ -0,0 +1,47 @@ +# JSON Validation + +This folder contains a JSON schema for validation of openPMD files written as `.json` files. + +## Usage + +### Generating the JSON schema + +For improved readability, maintainability and documentation purposes, the JSON schema is written in `.toml` format and needs to be "compiled" to `.json` files first before usage. +To do this, the openPMD-api installs a tool named `openpmd-convert-json-toml` which can be used to convert between JSON and TOML files in both directions, e.g.: + +```bash +openpmd_convert-json-toml @series.toml > series.json +``` + +A `Makefile` is provided in this folder to simplify the application of this conversion tool. + +### Verifying a file against the JSON schema + +In theory, the JSON schema should be applicable by any JSON validator. This JSON schema is written in terms of multiple files however, and most validators require special care to properly set up the links between the single files. A Python script `check.py` is provided in this folder which sets up the [Python jsonschema](https://python-jsonschema.readthedocs.io) library and verifies a file against it, e.g.: + +```bash +./check.py path/to/my/dataset.json +``` + +For further usage notes check the documentation of the script itself `./check.py --help`. + +## Caveats + +The openPMD standard is not entirely expressible in terms of a JSON schema: + +* Many semantic dependencies, e.g. that the `position/x` and `position/y` vector of a particle species be of the same size, or that the `axisLabels` have the same dimensionality as the dataset itself, will go unchecked. +* The `meshesPath` is assumed to be `meshes/` and the `particlesPath` is assumed to be `particles/`. This dependency cannot be expressed. + +While a large part of the openPMD standard can indeed be verified by checking against a JSON schema, the standard is generally large enough to make this approach come to its limits. Verification of a JSON schema is similar to the use of a naive recursive-descent parser. Error messages will often be unexpectedly verbose and not very informative. +A challenge for the JSON validator are disjunctive statements such as "A Record is either a scalar Record Component or a vector of non-scalar Record Components". If there is even a tiny mistake somewhere down in the hierarchy, the entire disjunctive branch will fail evaluating. + +The layout of attributes is assumed to be that which is created by the JSON backend of the openPMD-api, e.g.: + +```json +"meshesPath": { + "datatype": "STRING", + "value": "meshes/" +} +``` + +Support for an abbreviated notation such as `"meshesPath": "meshes/"` is currently not (yet) available. diff --git a/share/openPMD/json_schema/attribute_defs.toml b/share/openPMD/json_schema/attribute_defs.toml new file mode 100644 index 0000000000..70f3b9ec16 --- /dev/null +++ b/share/openPMD/json_schema/attribute_defs.toml @@ -0,0 +1,236 @@ + +["$defs"] + +###################### +# Vectors of strings # +###################### + +[["$defs".vec_string_attribute.oneOf]] +title = "Shorthand notation" +anyOf = [ + { type = "string" }, + { type = "array", items = { "type" = "string" } }, +] + +[["$defs".vec_string_attribute.oneOf]] +title = "Long notation" +type = "object" +required = ["value", "datatype"] + +["$defs".vec_string_attribute.oneOf.properties] + +value.anyOf = [ + { type = "string" }, + { type = "array", items = { "type" = "string" } }, +] + +datatype.enum = [ + "STRING", + "CHAR", + "SCHAR", + "UCHAR", + "VEC_STRING", + "VEC_CHAR", + "VEC_SCHAR", + "VEC_UCHAR", +] + +################## +# Vectors of int # +################## + +[["$defs".vec_int_attribute.oneOf]] +title = "Shorthand notation" +anyOf = [ + { type = "integer" }, + { type = "array", items = { "type" = "integer" } }, +] + +[["$defs".vec_int_attribute.oneOf]] +title = "Long notation" +type = "object" +required = ["value", "datatype"] + +["$defs".vec_int_attribute.oneOf.properties] + +value.anyOf = [ + { type = "integer" }, + { type = "array", items = { "type" = "integer" } }, +] + +datatype.enum = [ + "SHORT", + "INT", + "LONG", + "LONGLONG", + "USHORT", + "UINT", + "ULONG", + "ULONGLONG", + "VEC_SHORT", + "VEC_INT", + "VEC_LONG", + "VEC_LONGLONG", + "VEC_USHORT", + "VEC_UINT", + "VEC_ULONG", + "VEC_ULONGLONG", +] + +#################### +# Vectors of float # +#################### + +[["$defs".vec_float_attribute.oneOf]] +title = "Shorthand notation" +anyOf = [ + { type = "number" }, + { type = "array", items = { "type" = "number" } }, +] + +[["$defs".vec_float_attribute.oneOf]] +title = "Long notation" +type = "object" +required = ["value", "datatype"] + +["$defs".vec_float_attribute.oneOf.properties] + +value.anyOf = [ + { type = "number" }, + { type = "array", items = { "type" = "number" } }, +] + +datatype.enum = [ + "CHAR", + "UCHAR", + "SCHAR", + "SHORT", + "INT", + "LONG", + "LONGLONG", + "USHORT", + "UINT", + "ULONG", + "ULONGLONG", + "FLOAT", + "DOUBLE", + "LONG_DOUBLE", + "CFLOAT", + "CDOUBLE", + "CLONG_DOUBLE", + "VEC_SHORT", + "VEC_INT", + "VEC_LONG", + "VEC_LONGLONG", + "VEC_USHORT", + "VEC_UINT", + "VEC_ULONG", + "VEC_ULONGLONG", + "VEC_FLOAT", + "VEC_DOUBLE", + "VEC_LONG_DOUBLE", + "VEC_CFLOAT", + "VEC_CDOUBLE", + "VEC_CLONG_DOUBLE", +] + +########################### +# Special case: # +# unitDimension attribute # +########################### + +[["$defs".unitDimension.oneOf]] +title = "Shorthand notation" +type = "array" +items.type = "number" + +[["$defs".unitDimension.oneOf]] +title = "Long notation" +type = "object" +required = ["value", "datatype"] + +["$defs".unitDimension.oneOf.properties] + +value = { type = "array", items = { type = "number" } } +datatype.const = "ARR_DBL_7" + +##################### +# string attributes # +##################### + +[["$defs".string_attribute.oneOf]] +title = "Shorthand notation" +type = "string" + +[["$defs".string_attribute.oneOf]] +title = "Long notation" +type = "object" +required = ["value", "datatype"] + +["$defs".string_attribute.oneOf.properties] + +value.type = "string" +datatype.enum = ["STRING", "CHAR", "SCHAR", "UCHAR"] + +################## +# int attributes # +################## + +[["$defs".int_attribute.oneOf]] +title = "Shorthand notation" +type = "integer" + +[["$defs".int_attribute.oneOf]] +title = "Long notation" +type = "object" +required = ["value", "datatype"] + +["$defs".int_attribute.oneOf.properties] + +value.type = "integer" +datatype.enum = [ + "SHORT", + "INT", + "LONG", + "LONGLONG", + "USHORT", + "UINT", + "ULONG", + "ULONGLONG", +] + +#################### +# float attributes # +#################### + +[["$defs".float_attribute.oneOf]] +title = "Shorthand notation" +type = "number" + +[["$defs".float_attribute.oneOf]] +title = "Long notation" +type = "object" +required = ["value", "datatype"] + +["$defs".float_attribute.oneOf.properties] + +value.type = "number" +datatype.enum = [ + "CHAR", + "UCHAR", + "SCHAR", + "SHORT", + "INT", + "LONG", + "LONGLONG", + "USHORT", + "UINT", + "ULONG", + "ULONGLONG", + "FLOAT", + "DOUBLE", + "LONG_DOUBLE", + "CFLOAT", + "CDOUBLE", + "CLONG_DOUBLE", +] diff --git a/share/openPMD/json_schema/attributes.toml b/share/openPMD/json_schema/attributes.toml new file mode 100644 index 0000000000..18cfc36101 --- /dev/null +++ b/share/openPMD/json_schema/attributes.toml @@ -0,0 +1,89 @@ +title = "Attribute layout" + +[[oneOf]] +type = "null" +title = "No attributes" + +[[oneOf]] +type = "object" +title = "Dictionary of attributes" +description = "Generic layout of an attributes object." + +[[oneOf.patternProperties.".*".oneOf]] +title = "A generic attribute - short form" +anyOf = [ + # Any primitive value + { not = { anyOf = [ + { type = "object", title = "An object" }, + { type = "array", title = "An array" }, + ] }, title = "No complex type" }, + # Or an array of any primitive value + { type = "array", items = { not = { anyOf = [ + { type = "object", title = "An object" }, + { type = "array", title = "An array" }, + ] } }, title = "An array of non-complex types" }, +] + +[[oneOf.patternProperties.".*".oneOf]] +title = "A generic attribute - long form" +type = "object" + +[oneOf.patternProperties.".*".oneOf.properties] + +value.anyOf = [ + # Any primitive value + { not = { anyOf = [ + { type = "object", title = "An object" }, + { type = "array", title = "An array" }, + ] }, title = "No complex type" }, + # Or an array of any primitive value + { type = "array", items = { not = { anyOf = [ + { type = "object", title = "An object" }, + { type = "array", title = "An array" }, + ] } }, title = "An array of non-complex types" }, +] + +datatype.type = "string" +datatype.enum = [ + "CHAR", + "UCHAR", + "SCHAR", + "SHORT", + "INT", + "LONG", + "LONGLONG", + "USHORT", + "UINT", + "ULONG", + "ULONGLONG", + "FLOAT", + "DOUBLE", + "LONG_DOUBLE", + "CFLOAT", + "CDOUBLE", + "CLONG_DOUBLE", + "STRING", + "VEC_CHAR", + "VEC_SHORT", + "VEC_INT", + "VEC_LONG", + "VEC_LONGLONG", + "VEC_UCHAR", + "VEC_USHORT", + "VEC_UINT", + "VEC_ULONG", + "VEC_ULONGLONG", + "VEC_FLOAT", + "VEC_DOUBLE", + "VEC_LONG_DOUBLE", + "VEC_CFLOAT", + "VEC_CDOUBLE", + "VEC_CLONG_DOUBLE", + "VEC_SCHAR", + "VEC_STRING", + "ARR_DBL_7", + "BOOL", +] + +[oneOf.propertyNames] +pattern = "^\\w*$" diff --git a/share/openPMD/json_schema/check.py b/share/openPMD/json_schema/check.py new file mode 100755 index 0000000000..07f76b6ce0 --- /dev/null +++ b/share/openPMD/json_schema/check.py @@ -0,0 +1,70 @@ +#!/usr/bin/env python + +import argparse +import json +import os +from pathlib import Path +import sys + +import jsonschema.validators + + +def parse_args(program_name): + script_path = Path(os.path.dirname(os.path.realpath(sys.argv[0]))) + parser = argparse.ArgumentParser( + # we need this for line breaks + formatter_class=argparse.RawDescriptionHelpFormatter, + description=""" +Check JSON files against the openPMD JSON schema. + +This tool validates an openPMD-formatted JSON file against the openPMD JSON +schema, using the jsonschema Python library as a backend. +Please use this script instead of the jsonschema directly since the openPMD +schema consists of several JSON files and this script ensures that +cross-referencing is set up correctly. + +Note that the JSON schema is shipped in form of .toml files for ease +of reading, maintenance and documentation. +In order to perform a check, the .toml files need to be converted to .json +first. +The openPMD-api install a tool openpmd-convert-json-toml for this purpose. +Additionally, there is a Makefile shipped in the same folder as this Python +script which can be directly applied to generate the JSON schema. + + +Examples: + {0} --help + {0} --schema_root={1} +""".format(os.path.basename(program_name), script_path / "series.json")) + + parser.add_argument( + '--schema_root', + default=script_path / 'series.json', + help="""\ +The .json file describing the root file of the schema to validate against. +""" + ) + parser.add_argument('openpmd_file', + metavar='file', + nargs=1, + help="The file which to validate.") + + return parser.parse_args() + + +args = parse_args(sys.argv[0]) + +path = Path(os.path.dirname(os.path.realpath(args.schema_root))) +resolver = jsonschema.validators.RefResolver( + base_uri=f"{path.as_uri()}/", + referrer=True, +) + +with open(args.openpmd_file[0], "r") as instance: + jsonschema.validate( + instance=json.load(instance), + schema={"$ref": "./series.json"}, + resolver=resolver, + ) + print("File {} was validated successfully against schema {}.".format( + instance.name, args.schema_root)) diff --git a/share/openPMD/json_schema/dataset_defs.toml b/share/openPMD/json_schema/dataset_defs.toml new file mode 100644 index 0000000000..32ac56a63c --- /dev/null +++ b/share/openPMD/json_schema/dataset_defs.toml @@ -0,0 +1,131 @@ +["$defs"] + +###################################### +# n-dimensional datasets of any type # +###################################### + +[["$defs".any_type_recursive_array.anyOf]] +title = "A numeric type" +type = "array" +items.anyOf = [{ "type" = "number" }, { "type" = "null" }] + +[["$defs".any_type_recursive_array.anyOf]] +title = "A recursive array of numeric types" +type = "array" +items."$ref" = "#/$defs/any_type_recursive_array" + + +["$defs".any_type_dataset_properties.properties] +datatype.enum = [ + "CHAR", + "UCHAR", + "SCHAR", + "SHORT", + "INT", + "LONG", + "LONGLONG", + "USHORT", + "UINT", + "ULONG", + "ULONGLONG", + "FLOAT", + "DOUBLE", + "LONG_DOUBLE", + "CFLOAT", + "CDOUBLE", + "CLONG_DOUBLE", + "BOOL", +] +data."$ref" = "#/$defs/any_type_recursive_array" +extent = { type = "array", items = { type = "integer" } } + + +################################################### +# Either `datatype` and `data` must be defined... # +################################################### + +[["$defs".any_type_dataset.anyOf]] +title = "A dataset of any numeric type" +allOf = [ + { required = [ + "datatype", + "data", + ], title = "Required properties" }, + { "$ref" = "#/$defs/any_type_dataset_properties", title = "Property definitions" }, +] + +######################################################### +# ...or `datatype` and `extent` must be (template form) # +######################################################### + +[["$defs".any_type_dataset.anyOf]] +title = "A template dataset of any numeric type" +allOf = [ + { required = [ + "datatype", + "extent", + ], title = "Required properties" }, + { "$ref" = "#/$defs/any_type_dataset_properties", title = "Property definitions" }, +] + +###################################### +# n-dimensional datasets of int type # +###################################### + +[["$defs".int_type_recursive_array.anyOf]] +title = "An integer type" +type = "array" +items.anyOf = [{ "type" = "integer" }, { "type" = "null" }] + +[["$defs".int_type_recursive_array.anyOf]] +title = "A recursive array of integer types" +type = "array" +items."$ref" = "#/$defs/int_type_recursive_array" + + +["$defs".int_type_dataset_properties.properties] +datatype.enum = [ + "CHAR", + "UCHAR", + "SCHAR", + "SHORT", + "INT", + "LONG", + "LONGLONG", + "USHORT", + "UINT", + "ULONG", + "ULONGLONG", + "BOOL", +] +data."$ref" = "#/$defs/int_type_recursive_array" +extent = { type = "array", items = { type = "integer" } } + + +################################################### +# Either `datatype` and `data` must be defined... # +################################################### + +[["$defs".int_type_dataset.anyOf]] +title = "A dataset of integer type" +allOf = [ + { required = [ + "datatype", + "data", + ], title = "Required properties" }, + { "$ref" = "#/$defs/int_type_dataset_properties", title = "Property definitions" }, +] + +######################################################### +# ...or `datatype` and `extent` must be (template form) # +######################################################### + +[["$defs".int_type_dataset.anyOf]] +title = "A dataset template of integer type" +allOf = [ + { required = [ + "datatype", + "extent", + ], title = "Required properties" }, + { "$ref" = "#/$defs/int_type_dataset_properties", title = "Property definitions" }, +] diff --git a/share/openPMD/json_schema/iteration.toml b/share/openPMD/json_schema/iteration.toml new file mode 100644 index 0000000000..31a6c74417 --- /dev/null +++ b/share/openPMD/json_schema/iteration.toml @@ -0,0 +1,77 @@ +type = "object" +required = ["attributes"] +title = "Iteration" +description = "One iteration/snapshot." + +[properties] + +################# +# Particle data # +################# + +[properties.particles] +type = "object" +title = "Particles" +description = "Dict of particle species types." + +[properties.particles.properties.attributes] +title = "Attribute layout" +description = "Custom attributes allowed, no required attributes defined." +"$ref" = "attributes.json" + +##################################### +# Particle data -> Particle Species # +##################################### + +[properties.particles.patternProperties."^(?!attributes).*"] +title = "Particle Species" +"$ref" = "particle_species.json" + +############# +# Mesh data # +############# + +[properties.meshes] +type = "object" +title = "Meshes" +description = "Dict of meshes." + +[properties.meshes.properties.attributes] +title = "Attribute layout" +description = "Custom attributes allowed, no required attributes defined." +"$ref" = "attributes.json" + + +########################### +# Mesh data -> Mesh types # +########################### + +[properties.meshes.patternProperties."^(?!attributes).*"] +title = "Mesh" +"$ref" = "mesh.json" + +######################## +# Iteration attributes # +######################## + + +[properties.attributes] +title = "Attributes" + +# First requirement: standard-defined attributes + +[[properties.attributes.allOf]] +required = ["dt", "time", "timeUnitSI"] +title = "Iteration attributes" +description = "Standard-defined attributes at the Iteration level." + +[properties.attributes.allOf.properties] +timeUnitSI."$ref" = "attribute_defs.json#/$defs/float_attribute" +time."$ref" = "attribute_defs.json#/$defs/float_attribute" +dt."$ref" = "attribute_defs.json#/$defs/float_attribute" + +# Second condition: General layout of attributes + +[[properties.attributes.allOf]] +title = "Attribute layout" +"$ref" = "attributes.json" diff --git a/share/openPMD/json_schema/mesh.toml b/share/openPMD/json_schema/mesh.toml new file mode 100644 index 0000000000..f7c82cec9a --- /dev/null +++ b/share/openPMD/json_schema/mesh.toml @@ -0,0 +1,85 @@ +######################################################### +# Requirement 1: Mesh-specific structure and attributes # +######################################################### + +[[allOf]] + +type = "object" +required = ["attributes"] +title = "Mesh" +description = "A mesh/grid of cells." + +[allOf.properties.attributes] +title = "Attributes" + +#################################### +# Requirement 1.1: Mesh attributes # +#################################### + +[[allOf.properties.attributes.allOf]] + +title = "Mesh attributes" +description = "Standard-defined attributes at the Mesh level." +required = [ + "axisLabels", + "geometry", + "gridGlobalOffset", + "gridSpacing", + "gridUnitSI", + "timeOffset", + "unitDimension", +] + +[allOf.properties.attributes.allOf.properties] + +gridUnitSI."$ref" = "attribute_defs.json#/$defs/float_attribute" +gridSpacing."$ref" = "attribute_defs.json#/$defs/vec_float_attribute" +gridGlobalOffset."$ref" = "attribute_defs.json#/$defs/vec_float_attribute" +timeOffset."$ref" = "attribute_defs.json#/$defs/float_attribute" +geometryParameters."$ref" = "attribute_defs.json#/$defs/string_attribute" +unitDimension."$ref" = "attribute_defs.json#/$defs/unitDimension" +geometry."$ref" = "attribute_defs.json#/$defs/string_attribute" +dataOrder."$ref" = "attribute_defs.json#/$defs/string_attribute" +axisLabels."$ref" = "attribute_defs.json#/$defs/vec_string_attribute" + +################################################ +# Requirement 1.2: Generic attribute structure # +################################################ + +[[allOf.properties.attributes.allOf]] +title = "Attribute layout" +"$ref" = "attributes.json" + +###################################### +# Requirement 2: Contains components # +###################################### + +[[allOf]] + +title = "Contains components" + +#################################################### +# Requirement 2.1: Either this is a scalar mesh... # +#################################################### + +[[allOf.oneOf]] +title = "Scalar mesh component" +"$ref" = "mesh_record_component.json" + +################################################# +# Requirement 2.2: ... or it's a vector mesh. # +# Note that exactly one of these two conditions # +# must be true, not both at once (oneOf). # +################################################# + +[[allOf.oneOf]] +title = "Vector component" +description = "Additionally to the attributes, at least one component must be contained" +# The attributes are contained in this dict, and at least one further +# non-scalar component. Hence, we require at least two entries. +minProperties = 2 +propertyNames.pattern = "^\\w*$" + +[allOf.oneOf.patternProperties."^(?!attributes).*"] +title = "Vector mesh component" +"$ref" = "mesh_record_component.json" diff --git a/share/openPMD/json_schema/mesh_record_component.toml b/share/openPMD/json_schema/mesh_record_component.toml new file mode 100644 index 0000000000..6622ea2335 --- /dev/null +++ b/share/openPMD/json_schema/mesh_record_component.toml @@ -0,0 +1,27 @@ +title = "Mesh Record Component" + +############################################# +# Requirement 1: This is a record component # +############################################# + +[[allOf]] +title = "Record Component" +"$ref" = "record_component.json" + +################################################## +# Requirement 2: Mesh Record Component Specifics # +################################################## + +[[allOf]] +title = "Mesh Record Component" +description = "Single component in a mesh record." +type = "object" +required = ["attributes"] + +[allOf.properties.attributes] +required = ["position"] +title = "Record Component attributes" +description = "Standard-defined attributes at the Record Component level." + +[allOf.properties.attributes.properties] +position."$ref" = "attribute_defs.json#/$defs/vec_float_attribute" diff --git a/share/openPMD/json_schema/particle_patches.toml b/share/openPMD/json_schema/particle_patches.toml new file mode 100644 index 0000000000..0144acb5bd --- /dev/null +++ b/share/openPMD/json_schema/particle_patches.toml @@ -0,0 +1,48 @@ +type = "object" +title = "Particle Patches" +description = "Recommended group for post-processing. It logically orders the 1D arrays of attributes into local patches of particles that can be read and processed in parallel." + +required = ["numParticles", "numParticlesOffset", "offset", "extent"] + +################################ +# General layout of attributes # +################################ + +[properties.attributes] +title = "Attribute layout" +description = "Custom attributes allowed, no required attributes defined." +"$ref" = "attributes.json" + +####################################################### +# Definition of particle patches via # +# numParticles, numParticlesOffset, offset and extent # +####################################################### + +[properties.numParticles] +title = "numParticles" +description = "number of particles in this patch" +allOf = [ + { "$ref" = "record_component.json" }, + { "$ref" = "dataset_defs.json#/$defs/int_type_dataset" }, +] + + +[properties.numParticlesOffset] +title = "numParticles" +description = "offset within the one-dimensional records of the particle species where the first particle in this patch is stored" +allOf = [ + { "$ref" = "record_component.json" }, + { "$ref" = "dataset_defs.json#/$defs/int_type_dataset" }, +] + + +[properties.offset] +title = "Offset" +description = "absolute position (position + positionOffset as defined above) where the particle patch begins: defines the (inclusive) lower bound with positions that are associated with the patch; the same requirements as for regular record components apply" +"$ref" = "patch_record.json" + + +[properties.extent] +title = "Extent" +description = "extent of the particle patch; the offset + extent must be larger than the maximum absolute position of particles in the patch as the exact upper bound of position offset + extent is excluded from the patch; the same requirements as for regular record components apply" +"$ref" = "patch_record.json" diff --git a/share/openPMD/json_schema/particle_species.toml b/share/openPMD/json_schema/particle_species.toml new file mode 100644 index 0000000000..43dc33444f --- /dev/null +++ b/share/openPMD/json_schema/particle_species.toml @@ -0,0 +1,40 @@ +type = "object" +title = "Particle Species" +description = "Dict of particle quantities." +propertyNames.pattern = "^\\w*$" + +# A particle species requires at least a "position" record +required = ["position"] + +[properties.attributes] + +################################################## +# First requirement: standard-defined attributes # +################################################## + +[[properties.attributes.allOf]] +title = "Particle Species attributes" +description = "Standard-defined attributes at the Particle Species level." +# No required attributes +required = [] + +[properties.attributes.allOf.properties] +id."$ref" = "attribute_defs.json#/$defs/int_attribute" + +#################################################### +# Second requirement: General layout of attributes # +#################################################### + +[[properties.attributes.allOf]] +title = "Attribute layout" +description = "Custom attributes allowed, no required attributes defined." +"$ref" = "attributes.json" + +[properties.particlePatches] +title = "Particle Patches" +"$ref" = "particle_patches.json" + + +[patternProperties."^(?!(attributes|particlePatches)).*"] +title = "Record" +"$ref" = "record.json" diff --git a/share/openPMD/json_schema/patch_record.toml b/share/openPMD/json_schema/patch_record.toml new file mode 100644 index 0000000000..1494410c57 --- /dev/null +++ b/share/openPMD/json_schema/patch_record.toml @@ -0,0 +1,37 @@ +######################################################### +# Requirement 1: Mesh-specific structure and attributes # +######################################################### + +[[allOf]] + +type = "object" +required = ["attributes"] +title = "Record" +description = "A list of particle quantities." + +############################################## +# Requirement 1: Generic attribute structure # +############################################## + + +[allOf.properties.attributes] +title = "Attribute layout" +"$ref" = "attributes.json" + +####################################################### +# Requirement 2: Contains components of a vector mesh # +####################################################### + +[[allOf]] +title = "Contains vector components" +description = "Additionally to the attributes, at least one component must be contained" +# The attributes are contained in this dict, and at least one further +# non-scalar component. Hence, we require at least two entries. +minProperties = 2 +propertyNames.pattern = "^\\w*$" + +[allOf.patternProperties] + +[allOf.patternProperties."^(?!attributes).*"] +title = "Scalar component" +"$ref" = "record_component.json" diff --git a/share/openPMD/json_schema/record.toml b/share/openPMD/json_schema/record.toml new file mode 100644 index 0000000000..60209fd5a2 --- /dev/null +++ b/share/openPMD/json_schema/record.toml @@ -0,0 +1,72 @@ +######################################################### +# Requirement 1: Mesh-specific structure and attributes # +######################################################### + +[[allOf]] + +type = "object" +required = ["attributes"] +title = "Record" +description = "A list of particle quantities." + +[allOf.properties.attributes] +title = "Attributes" + +#################################### +# Requirement 1.1: Mesh attributes # +#################################### + +[[allOf.properties.attributes.allOf]] + +title = "Particle Record attributes" +description = "Standard-defined attributes at the Particle Record level." +required = ["timeOffset", "unitDimension"] + +[allOf.properties.attributes.allOf.properties] + +timeOffset."$ref" = "attribute_defs.json#/$defs/float_attribute" +unitDimension."$ref" = "attribute_defs.json#/$defs/unitDimension" +dataOrder."$ref" = "attribute_defs.json#/$defs/string_attribute" + +################################################ +# Requirement 1.2: Generic attribute structure # +################################################ + +[[allOf.properties.attributes.allOf]] +title = "Attribute layout" +"$ref" = "attributes.json" + +###################################### +# Requirement 2: Contains components # +###################################### + +[[allOf]] +title = "Contains components" + +#################################################### +# Requirement 2.1: Either this is a scalar mesh... # +#################################################### + +[[allOf.oneOf]] +title = "Scalar component" +"$ref" = "record_component.json" + +################################################# +# Requirement 2.2: ... or it's a vector mesh. # +# Note that exactly one of these two conditions # +# must be true, not both at once (oneOf). # +################################################# + +[[allOf.oneOf]] +title = "Vector component" +description = "Additionally to the attributes, at least one component must be contained" +# The attributes are contained in this dict, and at least one further +# non-scalar component. Hence, we require at least two entries. +minProperties = 2 +propertyNames.pattern = "^\\w*$" + +[allOf.oneOf.patternProperties] + +[allOf.oneOf.patternProperties."^(?!attributes).*"] +title = "Scalar component" +"$ref" = "record_component.json" diff --git a/share/openPMD/json_schema/record_component.toml b/share/openPMD/json_schema/record_component.toml new file mode 100644 index 0000000000..b280db94b5 --- /dev/null +++ b/share/openPMD/json_schema/record_component.toml @@ -0,0 +1,65 @@ +############################################## +# Requirement 1: Record Component attributes # +############################################## + +[[allOf]] +title = "Record Component" +description = "Single component in a record." +type = "object" +required = ["attributes"] + +[allOf.properties.attributes] +title = "Attributes" + +################################################ +# Requirement 1.1: Standard-defined attributes # +################################################ + +[[allOf.properties.attributes.allOf]] +title = "Record Component attributes" +description = "Standard-defined attributes at the Record Component level." +required = ["unitSI"] + +[allOf.properties.attributes.allOf.properties] +unitSI."$ref" = "attribute_defs.json#/$defs/float_attribute" + +############################################# +# Requirement 1.2: Generic attribute layout # +############################################# + +[[allOf.properties.attributes.allOf]] +title = "Attribute layout" +"$ref" = "attributes.json" + +########################################### +# Requirement 2: Either array or constant # +########################################### + +[[allOf]] +title = "Either array or constant" + +##################### +# Option 2.1: Array # +##################### + +[[allOf.oneOf]] +description = "An n-dimensional dataset containing the payload." +title = "Array dataset" + +"$ref" = "dataset_defs.json#/$defs/any_type_dataset" + +######################## +# Option 2.2: Constant # +######################## + +[[allOf.oneOf]] +title = "Constant dataset" +description = "A dataset represented by two attributes: The constant value and its shape." +required = ["attributes"] + +[allOf.oneOf.properties.attributes] +required = ["shape", "value"] + +[allOf.oneOf.properties.attributes.properties] +value."$ref" = "attribute_defs.json#/$defs/float_attribute" +shape."$ref" = "attribute_defs.json#/$defs/vec_int_attribute" diff --git a/share/openPMD/json_schema/series.toml b/share/openPMD/json_schema/series.toml new file mode 100644 index 0000000000..ea5b4b1218 --- /dev/null +++ b/share/openPMD/json_schema/series.toml @@ -0,0 +1,122 @@ +type = "object" +required = ["attributes", "data"] +title = "Series" +description = "The root group in the hierarchical openPMD standard." + +######################################################### +# Requirement 1: Basic layout of the root path (Series) # +######################################################### + +[[allOf]] +title = "Basic Series layout" + +##################### +# Series attributes # +##################### + +[allOf.properties.attributes] +title = "Attributes" + +################################################ +# Requirement 1.1: standard-defined attributes # +################################################ + +[[allOf.properties.attributes.allOf]] +required = [ + "openPMD", + "openPMDextension", + "basePath", + "iterationEncoding", + "iterationFormat", +] +title = "Series attributes" +description = "Standard-defined attributes at the Series level." + +[allOf.properties.attributes.allOf.properties] + +author."$ref" = "attribute_defs.json#/$defs/string_attribute" +comment."$ref" = "attribute_defs.json#/$defs/string_attribute" +date."$ref" = "attribute_defs.json#/$defs/string_attribute" +openPMD."$ref" = "attribute_defs.json#/$defs/string_attribute" +iterationEncoding."$ref" = "attribute_defs.json#/$defs/string_attribute" +softwareVersion."$ref" = "attribute_defs.json#/$defs/string_attribute" +basePath."$ref" = "attribute_defs.json#/$defs/string_attribute" +iterationFormat."$ref" = "attribute_defs.json#/$defs/string_attribute" +openPMDextension."$ref" = "attribute_defs.json#/$defs/int_attribute" +software."$ref" = "attribute_defs.json#/$defs/string_attribute" +machine."$ref" = "attribute_defs.json#/$defs/string_attribute" +softwareDependencies."$ref" = "attribute_defs.json#/$defs/string_attribute" + +meshesPath.description = "Note that the meshesPath is hardcoded as its semantics are impossible to model in a JSON schema." +meshesPath.oneOf = [ + { const = { value = "meshes/", datatype = "STRING" } }, + { const = "meshes/" }, +] + +particlesPath.description = "Note that the particlesPath is hardcoded as its semantics are impossible to model in a JSON schema." +particlesPath.oneOf = [ + { const = { value = "particles/", datatype = "STRING" } }, + { const = "particles/" }, +] + +################################################# +# Requirement 1.2: General layout of attributes # +################################################# + +[[allOf.properties.attributes.allOf]] +title = "Attribute layout" +"$ref" = "attributes.json" + +################################################# +# Requirement 2: The Series contains iterations # +################################################# + +[[allOf]] +title = "Contains iterations" + +######################################################################### +# Requirement 2.1: Either a single iteration in variable-based encoding # +######################################################################### + +[[allOf.oneOf]] +title = "Variable-based encoding" +properties.attributes.properties.iterationEncoding.properties.value = { const = "variableBased" } + +[allOf.oneOf.properties.data] +type = "object" +title = "An iteration" +description = "A single iteration." +"$ref" = "iteration.json" + +############################################################################ +# Requirement 2.2: Or multiple iterations in group- or file-based encoding # +############################################################################ + +[[allOf.oneOf]] +title = "Group-based (or file-based) encoding" +properties.attributes.properties.iterationEncoding.properties.value = { oneOf = [ + { const = "groupBased" }, + { const = "fileBased" }, +] } + +# Base Path + +[allOf.oneOf.properties.data] +type = "object" +title = "Base path" +description = "A map of all iterations/snapshots in the Series." + +propertyNames.pattern = "^(-?[0-9]*|attributes)$" + +[allOf.oneOf.properties.data.properties] + +[allOf.oneOf.properties.data.properties.attributes] +title = "Attribute layout" +description = "Custom attributes allowed, no required attributes defined." +"$ref" = "attributes.json" + +# Base Path -> Iterations + +[allOf.oneOf.properties.data.patternProperties."^-?[0-9]*$"] +title = "Iteration" +"$ref" = "iteration.json" diff --git a/src/Series.cpp b/src/Series.cpp index d587575b44..57e8940de1 100644 --- a/src/Series.cpp +++ b/src/Series.cpp @@ -1292,8 +1292,13 @@ void Series::flushFileBased( bool flushIOHandler) { auto &series = get(); - if (end == begin) - throw std::runtime_error( + if (end == begin && + /* + * At parsing time, this might happen since iterations might contain + * errors and be deleted. + */ + IOHandler()->m_seriesStatus != internal::SeriesStatus::Parsing) + throw error::WrongAPIUsage( "fileBased output can not be written with no iterations."); switch (IOHandler()->m_frontendAccess) @@ -1410,8 +1415,30 @@ void Series::flushGorVBased( internal::FlushParams const &flushParams, bool flushIOHandler) { - auto &series = get(); + if (iterationEncoding() == IterationEncoding::variableBased && + /* + * At parsing time, this might happen since iterations might contain + * errors and be deleted. + */ + IOHandler()->m_seriesStatus != internal::SeriesStatus::Parsing && + iterations.empty()) + { + /* + * Note: Unlike flushFileBased, it's ok if `begin == end` since this + * method may be called without an explicit iteration. + * But since in variable-based encoding the base path is the same as the + * path to the (currently active) iteration, there must be at least one + * iteration present since the openPMD standard requires mandatory + * attributes. + * In group-based encoding, any number of iterations might be included + * in the base path, in variable-based encoding there must be exactly + * one iteration currently active. + */ + throw error::WrongAPIUsage( + "variableBased output can not be written with no iterations."); + } + auto &series = get(); if (access::readOnly(IOHandler()->m_frontendAccess)) { for (auto it = begin; it != end; ++it) diff --git a/src/auxiliary/JSON.cpp b/src/auxiliary/JSON.cpp index 7c96221026..7401fcba94 100644 --- a/src/auxiliary/JSON.cpp +++ b/src/auxiliary/JSON.cpp @@ -289,7 +289,8 @@ toml::value jsonToToml(nlohmann::json const &val) namespace { - ParsedConfig parseInlineOptions(std::string const &options) + ParsedConfig + parseInlineOptions(std::string const &options, bool convertLowercase) { // speed up default options ParsedConfig res; @@ -325,12 +326,16 @@ namespace res.config = json::tomlToJson(tomlVal); res.originallySpecifiedAs = SupportedLanguages::TOML; } - lowerCase(res.config); + if (convertLowercase) + { + lowerCase(res.config); + } return res; } } // namespace -ParsedConfig parseOptions(std::string const &options, bool considerFiles) +ParsedConfig parseOptions( + std::string const &options, bool considerFiles, bool convertLowercase) { if (considerFiles) { @@ -340,6 +345,12 @@ ParsedConfig parseOptions(std::string const &options, bool considerFiles) std::fstream handle; handle.open( filename.value(), std::ios_base::binary | std::ios_base::in); + if (!handle.good()) + { + throw std::runtime_error( + "Failed opening '" + filename.value() + + "': " + strerror(errno)); + } ParsedConfig res; if (auxiliary::ends_with(filename.value(), ".toml")) { @@ -359,16 +370,22 @@ ParsedConfig parseOptions(std::string const &options, bool considerFiles) "Failed reading JSON config from file " + filename.value() + "."); } - lowerCase(res.config); + if (convertLowercase) + { + lowerCase(res.config); + } return res; } } - return parseInlineOptions(options); + return parseInlineOptions(options, convertLowercase); } #if openPMD_HAVE_MPI -ParsedConfig -parseOptions(std::string const &options, MPI_Comm comm, bool considerFiles) +ParsedConfig parseOptions( + std::string const &options, + MPI_Comm comm, + bool considerFiles, + bool convertLowercase) { if (considerFiles) { @@ -392,11 +409,14 @@ parseOptions(std::string const &options, MPI_Comm comm, bool considerFiles) res.config = nlohmann::json::parse(fileContent); res.originallySpecifiedAs = SupportedLanguages::JSON; } - lowerCase(res.config); + if (convertLowercase) + { + lowerCase(res.config); + } return res; } } - return parseInlineOptions(options); + return parseInlineOptions(options, convertLowercase); } #endif diff --git a/src/cli/convert-json-toml.cpp b/src/cli/convert-json-toml.cpp new file mode 100644 index 0000000000..f13317947d --- /dev/null +++ b/src/cli/convert-json-toml.cpp @@ -0,0 +1,72 @@ +#include +#include +#include + +#include +#include +#include + +namespace json = openPMD::json; + +void parsed_main(std::string jsonOrToml) +{ + auto [config, originallySpecifiedAs] = json::parseOptions( + jsonOrToml, /* considerFiles = */ true, /* convertLowercase = */ false); + { + [[maybe_unused]] auto _ = std::move(jsonOrToml); + } + switch (originallySpecifiedAs) + { + using SL = json::SupportedLanguages; + case SL::JSON: { + auto asToml = json::jsonToToml(config); + std::cout << json::format_toml(asToml); + } + break; + case SL::TOML: + std::cout << config << '\n'; + break; + } +} + +int main(int argc, char const **argv) +{ + std::string jsonOrToml; + switch (argc) + { + case 0: + case 1: + // Just read the whole stream into memory + // Not very elegant, but we'll hold the entire JSON/TOML dataset + // in memory at some point anyway, so it doesn't really matter + { + std::stringbuf readEverything; + std::cin >> &readEverything; + jsonOrToml = readEverything.str(); + } + break; + case 2: + if (strcmp(argv[1], "--help") == 0 || strcmp(argv[1], "-h") == 0) + { + std::cout << "Usage: " << std::string(argv[0]) << R"( [json_or_toml] +'json_or_toml' can be a JSON or TOML dataset specified inline or a reference +to a file prepended by an '@'. +Inline datasets will be interpreted as JSON if they start with an '{', as TOML +otherwise. Datasets from a file will be interpreted as JSON or TOML depending +on the file ending '.json' or '.toml' respectively. +Inline dataset specifications can be replaced by input read from stdin. + +If the input is JSON, then it will be converted to TOML and written to stdout, +equivalently from TOML to JSON. +)"; + exit(0); + } + jsonOrToml = argv[1]; + break; + default: + throw std::runtime_error( + std::string("Usage: ") + argv[0] + + " [file location or inline JSON/TOML]"); + } + parsed_main(std::move(jsonOrToml)); +} diff --git a/test/CoreTest.cpp b/test/CoreTest.cpp index 17739e0b28..ddb1bdb1bb 100644 --- a/test/CoreTest.cpp +++ b/test/CoreTest.cpp @@ -202,7 +202,8 @@ TEST_CASE("myPath", "[core]") REQUIRE( pathOf(scalarMesh) == vec_t{"iterations", "1234", "meshes", "e_chargeDensity"}); - auto scalarMeshComponent = scalarMesh[RecordComponent::SCALAR]; + auto scalarMeshComponent = scalarMesh[RecordComponent::SCALAR].resetDataset( + {Datatype::FLOAT, {10}}); REQUIRE( pathOf(scalarMeshComponent) == vec_t{"iterations", "1234", "meshes", "e_chargeDensity"}); @@ -210,7 +211,8 @@ TEST_CASE("myPath", "[core]") auto vectorMesh = iteration.meshes["E"]; REQUIRE(pathOf(vectorMesh) == vec_t{"iterations", "1234", "meshes", "E"}); - auto vectorMeshComponent = vectorMesh["x"]; + auto vectorMeshComponent = + vectorMesh["x"].resetDataset({Datatype::FLOAT, {10}}); REQUIRE( pathOf(vectorMeshComponent) == vec_t{"iterations", "1234", "meshes", "E", "x"}); @@ -227,7 +229,8 @@ TEST_CASE("myPath", "[core]") pathOf(speciesPosition) == vec_t{"iterations", "1234", "particles", "e", "position"}); - auto speciesPositionX = speciesPosition["x"]; + auto speciesPositionX = + speciesPosition["x"].resetDataset({Datatype::FLOAT, {10}}); REQUIRE( pathOf(speciesPositionX) == vec_t{"iterations", "1234", "particles", "e", "position", "x"}); @@ -238,7 +241,9 @@ TEST_CASE("myPath", "[core]") pathOf(speciesWeighting) == vec_t{"iterations", "1234", "particles", "e", "weighting"}); - auto speciesWeightingX = speciesWeighting[RecordComponent::SCALAR]; + auto speciesWeightingX = + speciesWeighting[RecordComponent::SCALAR].resetDataset( + {Datatype::FLOAT, {10}}); REQUIRE( pathOf(speciesWeightingX) == vec_t{"iterations", "1234", "particles", "e", "weighting"}); @@ -259,7 +264,7 @@ TEST_CASE("myPath", "[core]") "particlePatches", "extent"}); - auto patchExtentX = patchExtent["x"]; + auto patchExtentX = patchExtent["x"].resetDataset({Datatype::INT, {10}}); REQUIRE( pathOf(patchExtentX) == vec_t{ @@ -283,7 +288,8 @@ TEST_CASE("myPath", "[core]") "numParticles"}); auto patchNumParticlesComponent = - patchNumParticles[RecordComponent::SCALAR]; + patchNumParticles[RecordComponent::SCALAR].resetDataset( + {Datatype::INT, {10}}); REQUIRE( pathOf(patchNumParticlesComponent) == vec_t{ @@ -293,6 +299,10 @@ TEST_CASE("myPath", "[core]") "e", "particlePatches", "numParticles"}); + + speciesE.particlePatches["offset"]["x"].resetDataset({Datatype::INT, {10}}); + speciesE.particlePatches["numParticlesOffset"][RecordComponent::SCALAR] + .resetDataset({Datatype::INT, {10}}); #endif } @@ -1096,6 +1106,7 @@ TEST_CASE("backend_via_json", "[core]") { Series series( "../samples/optionsViaJson", Access::CREATE, encodingVariableBased); + series.iterations[0]; // v-based encoding requires at least 1 iteration REQUIRE(series.backend() == "JSON"); REQUIRE(series.iterationEncoding() == IterationEncoding::variableBased); } @@ -1109,6 +1120,7 @@ TEST_CASE("backend_via_json", "[core]") "../samples/optionsViaJson.bp", Access::CREATE, encodingVariableBased); + series.iterations[0]; // v-based encoding requires at least 1 iteration REQUIRE(series.backend() == "JSON"); REQUIRE(series.iterationEncoding() == IterationEncoding::variableBased); } diff --git a/test/SerialIOTest.cpp b/test/SerialIOTest.cpp index 7323a32582..dc8ae6afce 100644 --- a/test/SerialIOTest.cpp +++ b/test/SerialIOTest.cpp @@ -2333,8 +2333,8 @@ inline void bool_test(const std::string &backend) { Series o = Series("../samples/serial_bool." + backend, Access::CREATE); - o.setAttribute("Bool attribute true", true); - o.setAttribute("Bool attribute false", false); + o.setAttribute("Bool_attribute_true", true); + o.setAttribute("Bool_attribute_false", false); } { Series o = @@ -2342,12 +2342,12 @@ inline void bool_test(const std::string &backend) auto attrs = o.attributes(); REQUIRE( - std::count(attrs.begin(), attrs.end(), "Bool attribute true") == 1); + std::count(attrs.begin(), attrs.end(), "Bool_attribute_true") == 1); REQUIRE( - std::count(attrs.begin(), attrs.end(), "Bool attribute false") == + std::count(attrs.begin(), attrs.end(), "Bool_attribute_false") == 1); - REQUIRE(o.getAttribute("Bool attribute true").get() == true); - REQUIRE(o.getAttribute("Bool attribute false").get() == false); + REQUIRE(o.getAttribute("Bool_attribute_true").get() == true); + REQUIRE(o.getAttribute("Bool_attribute_false").get() == false); } { Series list{"../samples/serial_bool." + backend, Access::READ_ONLY};