Skip to content

Commit

Permalink
Add test for old HDF5-plugin written openPMD dataset from PIConGPU
Browse files Browse the repository at this point in the history
  • Loading branch information
franzpoeschel committed Jul 24, 2023
1 parent fa77a72 commit 4c24ef7
Showing 1 changed file with 92 additions and 0 deletions.
92 changes: 92 additions & 0 deletions test/SerialIOTest.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -2828,6 +2828,98 @@ TEST_CASE("git_hdf5_sample_structure_test", "[serial][hdf5]")
#endif
}

namespace
{
struct LoadDataset
{
template <typename T>
static void call(RecordComponent &rc)
{
auto chunk = rc.loadChunk<T>();
rc.seriesFlush();
// std::cout << "Successfully loaded mask" << std::endl;
// for (size_t i = 0; i < rc.getExtent()[0]; ++i)
// {
// if constexpr (std::is_convertible_v<T, int>)
// {
// std::cout << int(chunk.get()[i]) << ", ";
// }
// else
// {
// std::cout << chunk.get()[i] << ", ";
// }
// }
// std::cout << std::endl;

Check notice

Code scanning / CodeQL

Commented-out code Note test

This comment appears to contain commented-out code.
}

static constexpr char const *errorMsg = "LoadDataset";
};
} // namespace

TEST_CASE("git_hdf5_legacy_picongpu", "[serial][hdf5]")

Check notice

Code scanning / CodeQL

Unused static function Note test

Static function C_A_T_C_H_T_E_S_T_50 is unreachable (
autoRegistrar51
must be removed at the same time)
{
try
{
Series o = Series(
"../samples/git-sample/legacy/simData_%T.h5", Access::READ_ONLY);

/*
* That dataset was written directly via HDF5 (not the openPMD-api)
* and had two issues:
*
* 1) No unitSI defined for numParticles and numParticlesOffset.
* unitSI does not really make sense there, but the openPMD-standard
* is not quite clear if it is required, so the API writes it and
* also required it. We will keep writing it, but we don't require
* it any longer.
* 2) A custom enum was used for writing a boolean dataset.
* At the least, the dataset should be skipped in parsing instead
* of failing the entire procedure. Ideally, the custom datatype
* should be upcasted to char type and treated as such.
*/

auto radiationMask =
o.iterations[200]
.particles["e"]["radiationMask"][RecordComponent::SCALAR];
switchNonVectorType<LoadDataset>(
radiationMask.getDatatype(), radiationMask);

auto particlePatches = o.iterations[200].particles["e"].particlePatches;
REQUIRE(particlePatches.size() == 4);
for (auto key : {"extent", "offset"})
{
REQUIRE(particlePatches.contains(key));
REQUIRE(particlePatches.at(key).size() == 3);
for (auto subkey : {"x", "y", "z"})
{
REQUIRE(particlePatches.at(key).contains(subkey));
// unitSI is present in those records
particlePatches.at(key).at(subkey).unitSI();
}
}
for (auto key : {"numParticles", "numParticlesOffset"})
{
REQUIRE(particlePatches.contains(key));
REQUIRE(particlePatches.at(key).contains(RecordComponent::SCALAR));
// unitSI is not present in those records
REQUIRE_THROWS_AS(
particlePatches.at(key).at(RecordComponent::SCALAR).unitSI(),
no_such_attribute_error);
}

helper::listSeries(o, true, std::cout);
}
catch (error::ReadError &e)
{
if (e.reason == error::Reason::Inaccessible)
{
std::cerr << "git sample not accessible. (" << e.what() << ")\n";
return;
}
throw;
}
}

TEST_CASE("git_hdf5_sample_attribute_test", "[serial][hdf5]")
{
try
Expand Down

0 comments on commit 4c24ef7

Please sign in to comment.