Skip to content

Commit

Permalink
Merge pull request #38626 from rosswhitfield/remove_hdf5.h
Browse files Browse the repository at this point in the history
Modify code that uses hdf5.h directly (other than napi5.h) to use H5Cpp and H5Util
  • Loading branch information
peterfpeterson authored Jan 21, 2025
2 parents caf334b + 3359aa9 commit fb29b11
Show file tree
Hide file tree
Showing 19 changed files with 128 additions and 382 deletions.
12 changes: 6 additions & 6 deletions Framework/DataHandling/inc/MantidDataHandling/LoadSassena.h
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
#include "MantidDataObjects/Workspace2D.h"
#include "MantidKernel/NexusDescriptor.h"

#include <hdf5.h>
#include <H5Cpp.h>

namespace Mantid {

Expand Down Expand Up @@ -62,17 +62,17 @@ class MANTID_DATAHANDLING_DLL LoadSassena : public API::IFileLoader<Kernel::Nexu
void registerWorkspace(const API::WorkspaceGroup_sptr &gws, const std::string &wsName,
const DataObjects::Workspace2D_sptr &ws, const std::string &description);
/// Read info about one HDF5 dataset, log if error
herr_t dataSetInfo(const hid_t &h5file, const std::string &setName, hsize_t *dims) const;
void dataSetInfo(const H5::H5File &h5file, const std::string &setName, hsize_t *dims) const;
/// Read dataset data to a buffer ot type double
herr_t dataSetDouble(const hid_t &h5file, const std::string &setName, std::vector<double> &buf);
void dataSetDouble(const H5::H5File &h5file, const std::string &setName, std::vector<double> &buf);
/// Load qvectors dataset, calculate modulus of vectors
HistogramData::Points loadQvectors(const hid_t &h5file, const API::WorkspaceGroup_sptr &gws,
HistogramData::Points loadQvectors(const H5::H5File &h5file, const API::WorkspaceGroup_sptr &gws,
std::vector<int> &sorting_indexes);
/// Load structure factor asa function of q-vector modulus
void loadFQ(const hid_t &h5file, const API::WorkspaceGroup_sptr &gws, const std::string &setName,
void loadFQ(const H5::H5File &h5file, const API::WorkspaceGroup_sptr &gws, const std::string &setName,
const HistogramData::Points &qvmod, const std::vector<int> &sorting_indexes);
/// Load time-dependent structure factor
void loadFQT(const hid_t &h5file, const API::WorkspaceGroup_sptr &gws, const std::string &setName,
void loadFQT(const H5::H5File &h5file, const API::WorkspaceGroup_sptr &gws, const std::string &setName,
const HistogramData::Points &qvmod, const std::vector<int> &sorting_indexes);

private:
Expand Down
8 changes: 0 additions & 8 deletions Framework/DataHandling/src/LoadNXcanSAS.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -483,15 +483,7 @@ void loadTransmissionData(H5::Group &transmission, const Mantid::API::MatrixWork
else if (lambda.size() == workspace->blocksize() + 1)
workspace->setBinEdges(0, std::move(lambda));
else {
#if defined(H5_USE_18_API)
const std::string objectName{transmission.getObjName()};
#else
const size_t nchars = H5Iget_name(transmission.getId(), nullptr, 0);
std::string objectName;
objectName.resize(nchars);
H5Iget_name(transmission.getId(), objectName.data(),
nchars + 1); // +1 for null terminator
#endif
throw std::runtime_error("Unexpected array size for lambda in transmission group '" + objectName +
"'. Expected length=" + std::to_string(workspace->blocksize()) +
", found length=" + std::to_string(lambda.size()));
Expand Down
72 changes: 31 additions & 41 deletions Framework/DataHandling/src/LoadSassena.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,7 @@
#include "MantidKernel/Exception.h"
#include "MantidKernel/Unit.h"
#include "MantidKernel/UnitFactory.h"

#include <hdf5_hl.h>
#include "MantidNexus/H5Util.h"

#include <utility>

Expand Down Expand Up @@ -61,14 +60,8 @@ void LoadSassena::registerWorkspace(const API::WorkspaceGroup_sptr &gws, const s
* @param dims storing dimensionality
*/

herr_t LoadSassena::dataSetInfo(const hid_t &h5file, const std::string &setName, hsize_t *dims) const {
H5T_class_t class_id;
size_t type_size;
herr_t errorcode = H5LTget_dataset_info(h5file, setName.c_str(), dims, &class_id, &type_size);
if (errorcode < 0) {
g_log.error("Unable to read " + setName + " dataset info");
}
return errorcode;
void LoadSassena::dataSetInfo(const H5::H5File &h5file, const std::string &setName, hsize_t *dims) const {
h5file.openDataSet(setName).getSpace().getSimpleExtentDims(dims);
}

/**
Expand All @@ -77,12 +70,8 @@ herr_t LoadSassena::dataSetInfo(const hid_t &h5file, const std::string &setName,
* @param setName string name of dataset
* @param buf storing dataset
*/
herr_t LoadSassena::dataSetDouble(const hid_t &h5file, const std::string &setName, std::vector<double> &buf) {
herr_t errorcode = H5LTread_dataset_double(h5file, setName.c_str(), buf.data());
if (errorcode < 0) {
this->g_log.error("Cannot read " + setName + " dataset");
}
return errorcode;
void LoadSassena::dataSetDouble(const H5::H5File &h5file, const std::string &setName, std::vector<double> &buf) {
Mantid::NeXus::H5Util::readArray1DCoerce(h5file.openDataSet(setName), buf);
}

/* Helper object and function to sort modulus of Q-vectors
Expand All @@ -100,7 +89,7 @@ bool compare(const mypair &left, const mypair &right) { return left.first < righ
* @param sorting_indexes permutation of qvmod indexes to render it in
* increasing order of momemtum transfer
*/
HistogramData::Points LoadSassena::loadQvectors(const hid_t &h5file, const API::WorkspaceGroup_sptr &gws,
HistogramData::Points LoadSassena::loadQvectors(const H5::H5File &h5file, const API::WorkspaceGroup_sptr &gws,
std::vector<int> &sorting_indexes) {

// store the modulus of the vector
Expand All @@ -110,14 +99,18 @@ HistogramData::Points LoadSassena::loadQvectors(const hid_t &h5file, const API::
const std::string setName("qvectors");

hsize_t dims[3];
if (dataSetInfo(h5file, setName, dims) < 0) {
try {
this->dataSetInfo(h5file, setName, dims);
} catch (H5::Exception &) {
throw Kernel::Exception::FileError("Unable to read " + setName + " dataset info:", m_filename);
}

auto nq = static_cast<int>(dims[0]); // number of q-vectors
std::vector<double> buf(nq * 3);

herr_t errorcode = this->dataSetDouble(h5file, "qvectors", buf);
if (errorcode < 0) {
try {
this->dataSetDouble(h5file, "qvectors", buf);
} catch (H5::Exception &) {
this->g_log.error("LoadSassena::loadQvectors cannot proceed");
qvmod.resize(0);
return HistogramData::Points(std::move(qvmod));
Expand Down Expand Up @@ -169,13 +162,14 @@ HistogramData::Points LoadSassena::loadQvectors(const hid_t &h5file, const API::
* @param sorting_indexes permutation of qvmod indexes to render it in
* increasing order of momemtum transfer
*/
void LoadSassena::loadFQ(const hid_t &h5file, const API::WorkspaceGroup_sptr &gws, const std::string &setName,
void LoadSassena::loadFQ(const H5::H5File &h5file, const API::WorkspaceGroup_sptr &gws, const std::string &setName,
const HistogramData::Points &qvmod, const std::vector<int> &sorting_indexes) {

auto nq = static_cast<int>(qvmod.size()); // number of q-vectors
std::vector<double> buf(nq * 2);
herr_t errorcode = this->dataSetDouble(h5file, setName, buf);
if (errorcode < 0) {
try {
this->dataSetDouble(h5file, setName, buf);
} catch (H5::Exception &) {
this->g_log.error("LoadSassena::loadFQ cannot proceed");
return;
}
Expand Down Expand Up @@ -217,11 +211,13 @@ void LoadSassena::loadFQ(const hid_t &h5file, const API::WorkspaceGroup_sptr &gw
* @param sorting_indexes permutation of qvmod indexes to render it in
* increasing order of momemtum transfer
*/
void LoadSassena::loadFQT(const hid_t &h5file, const API::WorkspaceGroup_sptr &gws, const std::string &setName,
void LoadSassena::loadFQT(const H5::H5File &h5file, const API::WorkspaceGroup_sptr &gws, const std::string &setName,
const HistogramData::Points &qvmod, const std::vector<int> &sorting_indexes) {

hsize_t dims[3];
if (dataSetInfo(h5file, setName, dims) < 0) {
try {
this->dataSetInfo(h5file, setName, dims);
} catch (H5::Exception &) {
this->g_log.error("Unable to read " + setName + " dataset info");
this->g_log.error("LoadSassena::loadFQT cannot proceed");
return;
Expand All @@ -231,8 +227,9 @@ void LoadSassena::loadFQT(const hid_t &h5file, const API::WorkspaceGroup_sptr &g

auto nq = static_cast<int>(qvmod.size()); // number of q-vectors
std::vector<double> buf(nq * nnt * 2);
herr_t errorcode = this->dataSetDouble(h5file, setName, buf);
if (errorcode < 0) {
try {
this->dataSetDouble(h5file, setName, buf);
} catch (H5::Exception &) {
this->g_log.error("LoadSassena::loadFQT cannot proceed");
return;
}
Expand Down Expand Up @@ -356,34 +353,27 @@ void LoadSassena::exec() {

// open the HDF5 file for reading
m_filename = this->getPropertyValue("Filename");
hid_t h5file = H5Fopen(m_filename.c_str(), H5F_ACC_RDONLY, H5P_DEFAULT);
if (h5file < 0) {
H5::H5File h5file;
try {
h5file = H5::H5File(m_filename.c_str(), H5F_ACC_RDONLY);
} catch (H5::FileIException &) {
this->g_log.error("Cannot open " + m_filename);
throw Kernel::Exception::FileError("Unable to open:", m_filename);
}

// find out the sassena version used
char cversion[16];
if (H5LTget_attribute_string(h5file, "/", "sassena_version", cversion) < 0) {
this->g_log.error("Unable to read Sassena version");
}
// const std::string version(cversion);
// determine which loader protocol to use based on the version
// to be done at a later time, maybe implement a Version class

// Block to read the Q-vectors
std::vector<int> sorting_indexes;
const auto qvmod = this->loadQvectors(h5file, gws, sorting_indexes);
if (qvmod.empty()) {
this->g_log.error("No Q-vectors read. Unable to proceed");
H5Fclose(h5file);
h5file.close();
return;
}

// iterate over the valid sets
for (std::vector<std::string>::const_iterator it = this->m_validSets.begin(); it != this->m_validSets.end(); ++it) {
std::string setName = *it;
if (H5Lexists(h5file, setName.c_str(), H5P_DEFAULT)) {
if (h5file.nameExists(setName)) {
if (setName == "fq" || setName == "fq0" || setName == "fq2")
this->loadFQ(h5file, gws, setName, qvmod, sorting_indexes);
else if (setName == "fqt")
Expand All @@ -392,7 +382,7 @@ void LoadSassena::exec() {
this->g_log.information("Dataset " + setName + " not present in file");
} // end of iterate over the valid sets

H5Fclose(h5file);
h5file.close();
} // end of LoadSassena::exec()

} // namespace Mantid::DataHandling
21 changes: 7 additions & 14 deletions Framework/DataHandling/test/LoadNexusProcessedTest.h
Original file line number Diff line number Diff line change
Expand Up @@ -27,13 +27,14 @@
#include "MantidGeometry/Instrument.h"
#include "MantidGeometry/Instrument/InstrumentDefinitionParser.h"
#include "MantidHistogramData/Histogram.h"
#include "MantidNexus/H5Util.h"

#include <H5Cpp.h>

#include "SaveNexusProcessedTest.h"

#include <cxxtest/TestSuite.h>

#include <hdf5.h>

#include <Poco/File.h>

#include <string>
Expand All @@ -45,6 +46,7 @@ using namespace Mantid::Kernel;
using namespace Mantid::DataObjects;
using namespace Mantid::API;
using namespace Mantid::HistogramData;
using namespace Mantid::NeXus;
using Mantid::detid_t;

// Note that this suite tests an old version of Nexus processed files that we
Expand Down Expand Up @@ -797,18 +799,9 @@ class LoadNexusProcessedTest : public CxxTest::TestSuite {

// Remove the coordinate_system entry so it falls back on the log. NeXus
// can't do this so use the HDF5 API directly
auto fid = H5Fopen(filePath.c_str(), H5F_ACC_RDWR, H5P_DEFAULT);
auto mantid_id = H5Gopen(fid, "mantid_workspace_1", H5P_DEFAULT);
auto peaks_id = H5Gopen(mantid_id, "peaks_workspace", H5P_DEFAULT);
if (peaks_id > 0) {
H5Ldelete(peaks_id, "coordinate_system", H5P_DEFAULT);
H5Gclose(peaks_id);
H5Gclose(mantid_id);
} else {
TS_FAIL("Cannot unlink coordinate_system group. Test file has unexpected "
"structure.");
}
H5Fclose(fid);
H5::H5File h5file(filePath.c_str(), H5F_ACC_RDWR);
H5Util::deleteObjectLink(h5file, "/mantid_workspace_1/peaks_workspace/coordinate_system");
h5file.close();

LoadNexusProcessed loadAlg;
loadAlg.setChild(true);
Expand Down
44 changes: 19 additions & 25 deletions Framework/DataHandling/test/SaveNXSPETest.h
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@
#include "boost/tuple/tuple.hpp"
#include <memory>

#include <hdf5.h>
#include <hdf5_hl.h>
#include "MantidNexus/H5Util.h"
#include <H5Cpp.h>

#include <Poco/File.h>

Expand Down Expand Up @@ -261,53 +261,47 @@ class SaveNXSPETest : public CxxTest::TestSuite {
std::vector<double>());
}

auto h5file = H5Fopen(outputFile.c_str(), H5F_ACC_RDWR, H5P_DEFAULT);
H5::H5File h5file(outputFile, H5F_ACC_RDONLY);
const char *dset = "/mantid_workspace/data/data";
int rank(0);
herr_t status = H5LTget_dataset_ndims(h5file, dset, &rank);
TS_ASSERT_EQUALS(0, status);
H5::DataSet dataset = h5file.openDataSet(dset);
rank = dataset.getSpace().getSimpleExtentNdims();
TS_ASSERT_EQUALS(2, rank);

std::vector<hsize_t> dims(rank);
H5T_class_t classId(H5T_NO_CLASS);
size_t typeSize(0);
status = H5LTget_dataset_info(h5file, dset, dims.data(), &classId, &typeSize);
TS_ASSERT_EQUALS(0, status);
TS_ASSERT_EQUALS(H5T_FLOAT, classId);
TS_ASSERT_EQUALS(8, typeSize);
dataset.getSpace().getSimpleExtentDims(dims.data());
H5::DataType dataType = dataset.getDataType();
TS_ASSERT_EQUALS(H5T_FLOAT, dataType.getClass());
TS_ASSERT_EQUALS(8, dataType.getSize());

size_t bufferSize(dims[0] * dims[1]);
std::vector<double> signal(bufferSize), error(bufferSize);
status = H5LTread_dataset_double(h5file, dset, signal.data());
TS_ASSERT_EQUALS(0, status);
Mantid::NeXus::H5Util::readArray1DCoerce(dataset, signal);

const char *dsetErr = "/mantid_workspace/data/error";
status = H5LTread_dataset_double(h5file, dsetErr, error.data());
TS_ASSERT_EQUALS(0, status);
Mantid::NeXus::H5Util::readArray1DCoerce(h5file.openDataSet(dsetErr), error);
//---------------------------------------------------------------
// check efixed
const char *efixed_dset = "/mantid_workspace/NXSPE_info/fixed_energy";
status = H5LTget_dataset_ndims(h5file, efixed_dset, &rank);
TS_ASSERT_EQUALS(0, status);
H5::DataSet efixed_dataset = h5file.openDataSet(efixed_dset);
rank = efixed_dataset.getSpace().getSimpleExtentNdims();
TS_ASSERT_EQUALS(1, rank);

std::vector<hsize_t> efix_dims(rank);
status = H5LTget_dataset_info(h5file, efixed_dset, efix_dims.data(), &classId, &typeSize);
TS_ASSERT_EQUALS(0, status);
TS_ASSERT_EQUALS(H5T_FLOAT, classId);
TS_ASSERT_EQUALS(8, typeSize);
efixed_dataset.getSpace().getSimpleExtentDims(efix_dims.data());
H5::DataType efixed_dataType = efixed_dataset.getDataType();
TS_ASSERT_EQUALS(H5T_FLOAT, efixed_dataType.getClass());
TS_ASSERT_EQUALS(8, efixed_dataType.getSize());

size_t EnBuffer(efix_dims[0]);
std::vector<double> efixed(EnBuffer);
status = H5LTread_dataset_double(h5file, efixed_dset, efixed.data());
TS_ASSERT_EQUALS(0, status);
Mantid::NeXus::H5Util::readArray1DCoerce(efixed_dataset, efixed);
if (set_efixed) {
TS_ASSERT_EQUALS(EnBuffer, 1);
TS_ASSERT_DELTA(efixed[0], efix_value, 1.e-8);
}

H5Fclose(h5file);
// Poco::File(outputFile).remove();
h5file.close();

return boost::make_tuple(dims, signal, error, efixed);
}
Expand Down
1 change: 0 additions & 1 deletion Framework/Kernel/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -499,7 +499,6 @@ target_link_libraries(
GSL::gsl
OpenSSL::SSL
${HDF5_LIBRARIES}
${HDF5_HL_LIBRARIES}
PRIVATE ${NETWORK_LIBRARIES}
)
if(WIN32)
Expand Down
Loading

0 comments on commit fb29b11

Please sign in to comment.