Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .github/ci/spack-envs/clang15_py311_nompi_h5_ad2/spack.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,11 @@ spack:
specs:
- adios2
- hdf5
- py-h5py

packages:
py-h5py:
variants: ~mpi
hdf5:
variants: ~mpi
adios2:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ spack:
- [email protected]
- hdf5
- openmpi
- py-h5py

packages:
adios2:
Expand Down
1 change: 1 addition & 0 deletions .github/ci/spack-envs/gcc12_py36_ompi_h5_ad2/spack.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ spack:
- [email protected]
- hdf5
- openmpi
- py-h5py

packages:
adios2:
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/linux.yml
Original file line number Diff line number Diff line change
Expand Up @@ -260,7 +260,7 @@ jobs:
- name: Install
run: |
sudo apt-get update
sudo apt-get install g++ libopenmpi-dev libhdf5-openmpi-dev python3 python3-numpy python3-mpi4py python3-pandas
sudo apt-get install g++ libopenmpi-dev libhdf5-openmpi-dev python3 python3-numpy python3-mpi4py python3-pandas python3-h5py-mpi
# TODO ADIOS2
- name: Build
env: {CXXFLAGS: -Werror, PKG_CONFIG_PATH: /usr/lib/x86_64-linux-gnu/pkgconfig}
Expand All @@ -286,7 +286,7 @@ jobs:
run: |
apk update
apk add hdf5-dev
python3.10 -m pip install numpy
python3.10 -m pip install numpy h5py
- name: Build
env: {CXXFLAGS: -Werror}
run: |
Expand Down
1 change: 1 addition & 0 deletions conda.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ dependencies:
- doxygen
- git
- hdf5=*=mpi_openmpi_*
- h5py
- mamba
- make
- mpi4py
Expand Down
161 changes: 111 additions & 50 deletions src/IO/HDF5/HDF5IOHandler.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1294,15 +1294,24 @@ void HDF5IOHandlerImpl::openDataset(
*dtype = d;

int ndims = H5Sget_simple_extent_ndims(dataset_space);
std::vector<hsize_t> dims(ndims, 0);
std::vector<hsize_t> maxdims(ndims, 0);
if (ndims == 0)
{
// Is a scalar. Since the openPMD-api frontend supports no scalar
// datasets, return the extent as {1}
*parameters.extent = {1};
}
else
{
std::vector<hsize_t> dims(ndims, 0);
std::vector<hsize_t> maxdims(ndims, 0);

H5Sget_simple_extent_dims(dataset_space, dims.data(), maxdims.data());
Extent e;
for (auto const &val : dims)
e.push_back(val);
auto extent = parameters.extent;
*extent = e;
H5Sget_simple_extent_dims(dataset_space, dims.data(), maxdims.data());
Extent e;
for (auto const &val : dims)
e.push_back(val);
auto &extent = parameters.extent;
*extent = e;
}

herr_t status;
status = H5Sclose(dataset_space);
Expand Down Expand Up @@ -1555,28 +1564,54 @@ void HDF5IOHandlerImpl::writeDataset(
"[HDF5] Internal error: Failed to open HDF5 dataset during dataset "
"write");

std::vector<hsize_t> start;
for (auto const &val : parameters.offset)
start.push_back(static_cast<hsize_t>(val));
std::vector<hsize_t> stride(start.size(), 1); /* contiguous region */
std::vector<hsize_t> count(start.size(), 1); /* single region */
std::vector<hsize_t> block;
for (auto const &val : parameters.extent)
block.push_back(static_cast<hsize_t>(val));
memspace =
H5Screate_simple(static_cast<int>(block.size()), block.data(), nullptr);
filespace = H5Dget_space(dataset_id);
status = H5Sselect_hyperslab(
filespace,
H5S_SELECT_SET,
start.data(),
stride.data(),
count.data(),
block.data());
VERIFY(
status == 0,
"[HDF5] Internal error: Failed to select hyperslab during dataset "
"write");
int ndims = H5Sget_simple_extent_ndims(filespace);

if (ndims == 0)
{
if (parameters.offset != Offset{0} || parameters.extent != Extent{1})
{
std::stringstream errorMessage;
errorMessage
<< "HDF5 dataset '" << concrete_h5_file_position(writable)
<< "' is scalar (dimensionality 0) and must be accessed with "
"offset [0] and extent [1]. Was accessed with offset ";
auxiliary::write_vec_to_stream(errorMessage, parameters.offset)
<< " and extent ";
auxiliary::write_vec_to_stream(errorMessage, parameters.extent)
<< ".";
throw error::WrongAPIUsage(errorMessage.str());
}
memspace = H5Screate_simple(0, nullptr, nullptr);
VERIFY(
memspace > 0,
"[HDF5] Internal error: Failed to create memspace during dataset "
"write");
}
else
{
std::vector<hsize_t> start;
for (auto const &val : parameters.offset)
start.push_back(static_cast<hsize_t>(val));
std::vector<hsize_t> stride(start.size(), 1); /* contiguous region */
std::vector<hsize_t> count(start.size(), 1); /* single region */
std::vector<hsize_t> block;
for (auto const &val : parameters.extent)
block.push_back(static_cast<hsize_t>(val));
memspace = H5Screate_simple(
static_cast<int>(block.size()), block.data(), nullptr);
status = H5Sselect_hyperslab(
filespace,
H5S_SELECT_SET,
start.data(),
stride.data(),
count.data(),
block.data());
VERIFY(
status == 0,
"[HDF5] Internal error: Failed to select hyperslab during dataset "
"write");
}

void const *data = parameters.data.get();

Expand Down Expand Up @@ -2013,28 +2048,54 @@ void HDF5IOHandlerImpl::readDataset(
"[HDF5] Internal error: Failed to open HDF5 dataset during dataset "
"read");

std::vector<hsize_t> start;
for (auto const &val : parameters.offset)
start.push_back(static_cast<hsize_t>(val));
std::vector<hsize_t> stride(start.size(), 1); /* contiguous region */
std::vector<hsize_t> count(start.size(), 1); /* single region */
std::vector<hsize_t> block;
for (auto const &val : parameters.extent)
block.push_back(static_cast<hsize_t>(val));
memspace =
H5Screate_simple(static_cast<int>(block.size()), block.data(), nullptr);
filespace = H5Dget_space(dataset_id);
status = H5Sselect_hyperslab(
filespace,
H5S_SELECT_SET,
start.data(),
stride.data(),
count.data(),
block.data());
VERIFY(
status == 0,
"[HDF5] Internal error: Failed to select hyperslab during dataset "
"read");
int ndims = H5Sget_simple_extent_ndims(filespace);

if (ndims == 0)
{
if (parameters.offset != Offset{0} || parameters.extent != Extent{1})
{
std::stringstream errorMessage;
errorMessage
<< "HDF5 dataset '" << concrete_h5_file_position(writable)
<< "' is scalar (dimensionality 0) and must be accessed with "
"offset [0] and extent [1]. Was accessed with offset ";
auxiliary::write_vec_to_stream(errorMessage, parameters.offset)
<< " and extent ";
auxiliary::write_vec_to_stream(errorMessage, parameters.extent)
<< ".";
throw error::WrongAPIUsage(errorMessage.str());
}
memspace = H5Screate_simple(0, nullptr, nullptr);
VERIFY(
memspace > 0,
"[HDF5] Internal error: Failed to create memspace during dataset "
"read");
}
else
{
std::vector<hsize_t> start;
for (auto const &val : parameters.offset)
start.push_back(static_cast<hsize_t>(val));
std::vector<hsize_t> stride(start.size(), 1); /* contiguous region */
std::vector<hsize_t> count(start.size(), 1); /* single region */
std::vector<hsize_t> block;
for (auto const &val : parameters.extent)
block.push_back(static_cast<hsize_t>(val));
memspace = H5Screate_simple(
static_cast<int>(block.size()), block.data(), nullptr);
status = H5Sselect_hyperslab(
filespace,
H5S_SELECT_SET,
start.data(),
stride.data(),
count.data(),
block.data());
VERIFY(
status == 0,
"[HDF5] Internal error: Failed to select hyperslab during dataset "
"read");
}

void *data = parameters.data.get();

Expand Down
52 changes: 52 additions & 0 deletions test/python/unittest/API/APITest.py
Original file line number Diff line number Diff line change
Expand Up @@ -2239,6 +2239,58 @@ def testSeriesConstructors(self):
s = io.Series(f, io.Access.create, c)
s.close()

def testScalarHdf5Fields(self):
if "hdf5" not in io.variants:
return
try:
import h5py
except ImportError:
return

# While the openPMD-api (currently) does not create scalar HDF5
# datasets, we should at least try reading and modifying them in files
# that were created elsewhere. Scalar here refers to a dataset without
# dimension. Interacting with them in the openPMD-api is possible by
# specifying a single element, i.e. offset=[0], extent=[1].
# For testing this, create a dataset, then use h5py to create a scalar
# dataset in the file. Then, open first for reading, then for
# modifying.

file = "../samples/scalar_hdf5.h5"
series_write = io.Series(file, io.Access.create)
E_x = series_write.write_iterations()[0].meshes["E"]["x"]
E_x.reset_dataset(io.Dataset(np.dtype(np.int_), [1]))
E_x[:] = np.array([43])
series_write.close()

# Now turn E_x into a scalar
with h5py.File(file, "r+") as f:
E = f["data"]["0"]["meshes"]["E"]
reapply_attributes = \
{key: val for key, val in E["x"].attrs.items()}
del E["x"]
E["x"] = 44
for key, val in reapply_attributes.items():
E["x"].attrs[key] = val

series_read = io.Series(file, io.Access.read_only)
loaded_from_scalar = series_read.iterations[0].meshes["E"]["x"][:]
series_read.flush()
self.assertEqual(loaded_from_scalar, np.array([44]))
series_read.close()

series_read_write = io.Series(file, io.Access.read_write)
E_x = series_read_write.iterations[0].meshes["E"]["x"]
E_x[:] = np.array([45])
series_read_write.close()

series_read_again = io.Series(file, io.Access.read_only)
loaded_from_scalar = \
series_read_again.iterations[0].meshes["E"]["x"][:]
series_read_again.flush()
self.assertEqual(loaded_from_scalar, np.array([45]))
series_read_again.close()


if __name__ == '__main__':
unittest.main()
Loading