Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions doc/whats-new.rst
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,10 @@ New Features
:py:class:`~xarray.indexes.PandasIndex` to perform the selection
(:issue:`9703`, :pull:`11029`).
By `Ian Hunt-Isaak <https://github.com/ianhi>`_.
- The minimum supported version of ``h5netcdf`` is now 1.4. Version 1.4.0
brings improved alignment between h5netcdf and libnetcdf4 in the storage of
complex numbers (:pull:`11068`). By `Mark Harfouche
<https://github.com/hmaarrfk>`_.


Breaking Changes
Expand Down
2 changes: 1 addition & 1 deletion pixi.toml
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ cftime = "1.6.*"
dask-core = "2024.6.*"
distributed = "2024.6.*"
flox = "0.9.*"
h5netcdf = "1.3.*"
h5netcdf = "1.4.*"
# h5py and hdf5 tend to cause conflicts
# for e.g. hdf5 1.12 conflicts with h5py=3.1
# prioritize bumping other packages instead
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ accel = [
complete = ["xarray[accel,etc,io,parallel,viz]"]
io = [
"netCDF4>=1.6.0",
"h5netcdf",
"h5netcdf>=1.4.0",
"pydap",
"scipy>=1.13",
"zarr>=2.18",
Expand Down
4 changes: 0 additions & 4 deletions xarray/tests/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -230,10 +230,6 @@ def _importorskip_h5netcdf_ros3(has_h5netcdf: bool):
"netCDF4", "1.6.2"
)

has_h5netcdf_1_4_0_or_above, requires_h5netcdf_1_4_0_or_above = _importorskip(
"h5netcdf", "1.4.0.dev"
)

has_h5netcdf_1_7_0_or_above, requires_h5netcdf_1_7_0_or_above = _importorskip(
"h5netcdf", "1.7.0.dev"
)
Expand Down
74 changes: 5 additions & 69 deletions xarray/tests/test_backends.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,6 @@
assert_identical,
assert_no_warnings,
has_dask,
has_h5netcdf_1_4_0_or_above,
has_netCDF4,
has_numpy_2,
has_scipy,
Expand All @@ -89,7 +88,6 @@
requires_dask,
requires_fsspec,
requires_h5netcdf,
requires_h5netcdf_1_4_0_or_above,
requires_h5netcdf_1_7_0_or_above,
requires_h5netcdf_or_netCDF4,
requires_h5netcdf_ros3,
Expand Down Expand Up @@ -2124,20 +2122,14 @@ def test_encoding_enum__no_fill_value(self, recwarn):
)
v[:] = 1
with open_dataset(tmp_file, engine="netcdf4") as original:
save_kwargs = {}
# We don't expect any errors.
# This is effectively a void context manager
expected_warnings = 0
if self.engine == "h5netcdf":
if not has_h5netcdf_1_4_0_or_above:
save_kwargs["invalid_netcdf"] = True
expected_warnings = 1
expected_msg = "You are writing invalid netcdf features to file"
else:
expected_warnings = 1
expected_msg = "Creating variable with default fill_value 0 which IS defined in enum type"

with self.roundtrip(original, save_kwargs=save_kwargs) as actual:
expected_warnings = 1
expected_msg = "Creating variable with default fill_value 0 which IS defined in enum type"

with self.roundtrip(original) as actual:
assert len(recwarn) == expected_warnings
if expected_warnings:
assert issubclass(recwarn[0].category, UserWarning)
Expand All @@ -2147,14 +2139,6 @@ def test_encoding_enum__no_fill_value(self, recwarn):
actual.clouds.encoding["dtype"].metadata["enum"]
== cloud_type_dict
)
if not (
self.engine == "h5netcdf" and not has_h5netcdf_1_4_0_or_above
):
# not implemented in h5netcdf yet
assert (
actual.clouds.encoding["dtype"].metadata["enum_name"]
== "cloud_type"
)

@requires_netCDF4
def test_encoding_enum__multiple_variable_with_enum(self):
Expand All @@ -2176,10 +2160,7 @@ def test_encoding_enum__multiple_variable_with_enum(self):
fill_value=255,
)
with open_dataset(tmp_file, engine="netcdf4") as original:
save_kwargs = {}
if self.engine == "h5netcdf" and not has_h5netcdf_1_4_0_or_above:
save_kwargs["invalid_netcdf"] = True
with self.roundtrip(original, save_kwargs=save_kwargs) as actual:
with self.roundtrip(original) as actual:
assert_equal(original, actual)
assert (
actual.clouds.encoding["dtype"] == actual.tifa.encoding["dtype"]
Expand All @@ -2192,14 +2173,6 @@ def test_encoding_enum__multiple_variable_with_enum(self):
actual.clouds.encoding["dtype"].metadata["enum"]
== cloud_type_dict
)
if not (
self.engine == "h5netcdf" and not has_h5netcdf_1_4_0_or_above
):
# not implemented in h5netcdf yet
assert (
actual.clouds.encoding["dtype"].metadata["enum_name"]
== "cloud_type"
)

@requires_netCDF4
def test_encoding_enum__error_multiple_variable_with_changing_enum(self):
Expand Down Expand Up @@ -2235,17 +2208,6 @@ def test_encoding_enum__error_multiple_variable_with_changing_enum(self):
"u1",
metadata={"enum": modified_enum, "enum_name": "cloud_type"},
)
if not (self.engine == "h5netcdf" and not has_h5netcdf_1_4_0_or_above):
# not implemented yet in h5netcdf
with pytest.raises(
ValueError,
match=(
r"Cannot save variable .*"
r" because an enum `cloud_type` already exists in the Dataset .*"
),
):
with self.roundtrip(original):
pass

@pytest.mark.parametrize("create_default_indexes", [True, False])
def test_create_default_indexes(self, tmp_path, create_default_indexes) -> None:
Expand Down Expand Up @@ -4927,31 +4889,6 @@ def create_store(self):
with create_tmp_file() as tmp_file:
yield backends.H5NetCDFStore.open(tmp_file, "w")

@pytest.mark.skipif(
has_h5netcdf_1_4_0_or_above, reason="only valid for h5netcdf < 1.4.0"
)
def test_complex(self) -> None:
expected = Dataset({"x": ("y", np.ones(5) + 1j * np.ones(5))})
save_kwargs = {"invalid_netcdf": True}
with pytest.warns(UserWarning, match="You are writing invalid netcdf features"):
with self.roundtrip(expected, save_kwargs=save_kwargs) as actual:
assert_equal(expected, actual)

@pytest.mark.skipif(
has_h5netcdf_1_4_0_or_above, reason="only valid for h5netcdf < 1.4.0"
)
@pytest.mark.parametrize("invalid_netcdf", [None, False])
def test_complex_error(self, invalid_netcdf) -> None:
import h5netcdf

expected = Dataset({"x": ("y", np.ones(5) + 1j * np.ones(5))})
save_kwargs = {"invalid_netcdf": invalid_netcdf}
with pytest.raises(
h5netcdf.CompatibilityError, match="are not a supported NetCDF feature"
):
with self.roundtrip(expected, save_kwargs=save_kwargs) as actual:
assert_equal(expected, actual)

def test_numpy_bool_(self) -> None:
# h5netcdf loads booleans as numpy.bool_, this type needs to be supported
# when writing invalid_netcdf datasets in order to support a roundtrip
Expand Down Expand Up @@ -5105,7 +5042,6 @@ def test_byte_attrs(self, byte_attrs_dataset: dict[str, Any]) -> None:
with pytest.raises(ValueError, match=byte_attrs_dataset["h5netcdf_error"]):
super().test_byte_attrs(byte_attrs_dataset)

@requires_h5netcdf_1_4_0_or_above
def test_roundtrip_complex(self):
expected = Dataset({"x": ("y", np.ones(5) + 1j * np.ones(5))})
with self.roundtrip(expected) as actual:
Expand Down
Loading