From ff9b13e0d2d7dcb328956cd4fcb3af98f9d9329d Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Sat, 14 Aug 2021 00:51:26 +0100 Subject: [PATCH 01/60] First draft of a sliceable meta obect. --- ndcube/meta.py | 172 ++++++++++++++++++++++++++++++++++++++ ndcube/tests/helpers.py | 23 ++++- ndcube/tests/test_meta.py | 172 ++++++++++++++++++++++++++++++++++++++ 3 files changed, 364 insertions(+), 3 deletions(-) create mode 100644 ndcube/meta.py create mode 100644 ndcube/tests/test_meta.py diff --git a/ndcube/meta.py b/ndcube/meta.py new file mode 100644 index 000000000..3f2d8ee2d --- /dev/null +++ b/ndcube/meta.py @@ -0,0 +1,172 @@ +import collections.abc +import copy +import numbers + +import numpy as np + +__all__ = ["Meta"] + + +class Meta(dict): + def __init__(self, header=None, comments=None, axes=None, data_shape=None): + self.original_header = header + + # Sanitize inputs + if header is None: + header = {} + else: + header = dict(header) + if comments is None: + comments = {} + else: + comments = dict(comments) + if axes is None: + axes = {} + self._data_shape = None + else: + # Verify data_shape is set if axes is set. + if not (isinstance(data_shape, collections.abc.Iterable) and + all([isinstance(i, numbers.Integral) for i in data_shape])): + raise TypeError("If axes is set, data_shape must be an iterable giving " + "the length of each axis of the assocated cube.") + self._data_shape = np.asarray(data_shape) + + axes = dict(axes) + for key, axis in axes.items(): + axes[key] = self._sanitize_entry_with_axis(key, header[key], axis) + + # Build meta entries and instantiate class. + entries = [(key, (header[key], comments.get(key), axes.get(key))) for key in header] + super().__init__(entries) + + def _sanitize_entry_with_axis(self, key, value, axis): + # Verify each entry in axes is an iterable of ints. + if isinstance(axis, numbers.Integral): + axis = (axis,) + if not (isinstance(axis, collections.abc.Iterable) and + all([isinstance(i, numbers.Integral) for i in axis])): + raise TypeError("Values in axes must be an int or tuple of ints giving " + "the data axis/axes associated with the metadata.") + axis = np.asarray(axis) + + # Confirm each axis-associated piece of metadata has the same shape + # as its associated axes. + shape_error_msg = (f"{key} must have shape {tuple(self.shape[axis])} " + f"as it is associated with axes {axis}") + if len(axis) == 1: + if not hasattr(value, "__len__"): + raise TypeError(shape_error_msg) + meta_shape = (len(value),) + else: + if not hasattr(value, "shape"): + raise TypeError(shape_error_msg) + meta_shape = value.shape + if not all(meta_shape == self.shape[axis]): + raise ValueError(shape_error_msg) + + return axis + + @property + def meta_values(self): + return [value[0] for value in self.values()] + + @property + def comments(self): + return dict([(key, value[1]) for key, value in self.items() if value[1] is not None]) + + @property + def axes(self): + return dict([(key, value[2]) for key, value in self.items() if value[2] is not None]) + + @property + def shape(self): + return self._data_shape + + def add(self, name, value, comment=None, axis=None): + if name in self.keys(): + raise KeyError(f"'{name}' already exists. " + "To edit and existing entry, first delete and then re-add it " + "with the update parameters.") + self.__setitem__(name, value, comment, axis) + + def __setitem__(self, name, value, **kwargs): + if + if axis is not None: + if self.shape is None: + raise TypeError("Meta instance does not have a shape so new metadata " + "cannot be assigned to an axis.") + axis = self._sanitize_entry_with_axis(name, value, axis) + super().__setitem__(name, (value, comment, axis)) + + def __getitem__(self, item): + # There are two ways to slice: + # by key, or + # by typical python numeric slicing API, + # i.e. slice the each piece of metadata associated with an axes. + + # If item is single string, slicing is simple. + if isinstance(item, str): + return super().__getitem__(item)[0] + + # Else, the item is assumed to be a typical slicing item. + elif self.shape is None: + raise TypeError("Meta object does not have a shape and so cannot be sliced.") + + else: + new_meta = copy.deepcopy(self) + # Convert item to array of ints and slices for consistent behaviour. + if isinstance(item, (numbers.Integral, slice)): + item = [item] + item = np.array(list(item) + [slice(None)] * (len(self.shape) - len(item)), + dtype=object) + + # Edit data shape and calculate which axis will be dropped. + dropped_axes = np.zeros(len(self.shape), dtype=bool) + new_shape = new_meta.shape + j = 0 + for i, axis_item in enumerate(item): + if isinstance(axis_item, numbers.Integral): + new_shape = np.delete(new_shape, i) + dropped_axes[i] = True + elif isinstance(axis_item, slice): + start = axis_item.start + if start is None: + start = 0 + if start < 0: + start = self.shape[i] - start + stop = axis_item.stop + if stop is None: + stop = self.shape[i] + # Mustn't use new_shape here as indexing will be misaligned + # if an axis was deleted above. + if stop < 0: + stop = self.shape[i] - stop + new_shape[i - dropped_axes[:i].sum()] = stop - start + else: + raise TypeError("Unrecognized slice type. " + "Must be an int, slice and tuple of the same.") + new_meta._data_shape = new_shape + + # Calculate the cumulative number of dropped axis. + cumul_dropped_axes = np.cumsum(dropped_axes) + + # Slice all metadata associated with axes. + for key, (value, comment, axis) in new_meta.items(): + if axis is None: + new_value, comment, new_axis = value + else: + print(key, value, comment, axis) + new_item = tuple(item[axis]) + if len(new_item) == 1: + new_value = value[new_item[0]] + else: + new_value = value[new_item] + new_axis = np.array([-1 if isinstance(i, numbers.Integral) else a + for i, a in zip(new_item, axis)]) + new_axis -= cumul_dropped_axes[axis] + new_axis = new_axis[new_axis >= 0] + if len(new_axis) == 0: + new_axis = None + new_meta.__setitem__(key, new_value, comment=comment, axis=new_axis) + + return new_meta diff --git a/ndcube/tests/helpers.py b/ndcube/tests/helpers.py index 0cb9f3b35..0e23b7483 100644 --- a/ndcube/tests/helpers.py +++ b/ndcube/tests/helpers.py @@ -18,6 +18,7 @@ from numpy.testing import assert_equal from ndcube import NDCube, NDCubeSequence +from ndcube.meta import Meta __all__ = ['figure_test', 'get_hash_library_name', @@ -66,15 +67,31 @@ def test_wrapper(*args, **kwargs): def assert_extra_coords_equal(test_input, extra_coords): assert test_input.keys() == extra_coords.keys() - for key in list(test_input.keys()): + for key in test_input: assert test_input[key]['axis'] == extra_coords[key]['axis'] assert (test_input[key]['value'] == extra_coords[key]['value']).all() def assert_metas_equal(test_input, expected_output): - if not (test_input is None and expected_output is None): + if type(test_input) is not type(expected_output): + raise AssertionError( + "input and expected are of different type. " + f"Input: {type(test_input)}; Expected: {type(expected_output)}") + if isinstance(test_input, Meta) and isinstance(expected_output, Meta): + if test_input.shape is None or expected_output.shape is None: + assert test_input.shape == expected_output.shape + else: + assert all(test_input.shape == expected_output.shape) + for test_value, expected_value in zip(test_input.values(), expected_output.values()): + try: + assert test_value[0] == expected_value[0] + except ValueError as err: + if "more than one element is ambiguous" in err.args[0]: + assert all(test_value[0] == expected_value[0]) + assert all(test_value[2] == expected_value[2]) + elif isinstance(test_input, dict) and isinstance(expected_output, dict): assert test_input.keys() == expected_output.keys() - for key in list(test_input.keys()): + for key in test_input: assert test_input[key] == expected_output[key] diff --git a/ndcube/tests/test_meta.py b/ndcube/tests/test_meta.py new file mode 100644 index 000000000..c5fad570e --- /dev/null +++ b/ndcube/tests/test_meta.py @@ -0,0 +1,172 @@ +import numpy as np +import pytest + +from ndcube.meta import Meta +from .helpers import assert_metas_equal + + +# Fixtures + +@pytest.fixture +def basic_meta_values(): + return {"a": "hello", + "b": list(range(10, 25, 10)), + "c": np.array([[1, 2, 3, 4], [10, 20, 30, 40], [100, 200, 300, 400]]), + "d": list(range(3, 13, 3)) + } + + +@pytest.fixture +def basic_comments(): + return {"a": "Comment A", + "b": "Comment B", + "c": "Comment C", + } + + +@pytest.fixture +def basic_axes(): + return {"b": 0, + "c": (1, 2), + "d": (2,), + } + + +@pytest.fixture +def basic_data_shape(): + return (2, 3, 4, 5) + + +@pytest.fixture +def basic_meta(basic_meta_values, basic_comments, basic_axes, basic_data_shape): + return Meta(basic_meta_values, basic_comments, basic_axes, basic_data_shape) + + +@pytest.fixture +def no_shape_meta(): + return Meta({"a": "hello"}) + + +def test_meta_values(basic_meta, basic_meta_values): + meta = basic_meta + expected_values = list(basic_meta_values.values()) + assert meta.meta_values == expected_values + + +def test_comments(basic_meta, basic_comments): + meta = basic_meta + comments = basic_comments + assert list(meta.comments.keys()) == list(comments.keys()) + assert list(meta.comments.values()) == list(comments.values()) + + +def test_axes(basic_meta, basic_axes): + meta = basic_meta + axes = basic_axes + axes["b"] = np.array([0]) + axes["c"] = np.asarray(axes["c"]) + axes["d"] = np.asarray(axes["d"]) + assert list(meta.axes.keys()) == list(axes.keys()) + for output_axis, expected_axis in zip(meta.axes.values(), axes.values()): + assert all(output_axis == expected_axis) + + +def test_shape(basic_meta, basic_data_shape): + meta = basic_meta + shape = np.asarray(basic_data_shape) + assert all(meta.shape == shape) + + +def test_slice_axis_with_no_meta(basic_meta): + meta = basic_meta + output = meta[:, :, :, 0] + assert_metas_equal(output, meta) + + +def test_slice_away_independent_axis(basic_meta): + meta = basic_meta + # Get output + sliced_axis = 0 + item = 0 + output = meta[item] + # Build expected result. + values = dict([(key, value[0]) for key, value in meta.items()]) + values["b"] = values["b"][0] + comments = meta.comments + axes = dict([(key, axis) for key, axis in meta.axes.items()]) + del axes["b"] + axes["c"] -= 1 + axes["d"] -= 1 + shape = meta.shape[1:] + print(values, comments, axes, shape) + expected = Meta(values, comments, axes, shape) + # Compare output and expected. + assert_metas_equal(output, expected) + + +def test_slice_dependent_axes(basic_meta): + meta = basic_meta + print(meta["a"]) + # Get output + output = meta[:, 1:3, 1] + print(meta["a"]) + # Build expected result. + values = dict([(key, value[0]) for key, value in meta.items()]) + values["c"] = values["c"][1:3, 1] + values["d"] = values["d"][1] + comments = meta.comments + axes = dict([(key, axis) for key, axis in meta.axes.items()]) + axes["c"] = 1 + del axes["d"] + shape = np.array([2, 2, 5]) + expected = Meta(values, comments, axes, shape) + # Compare output and expected. + assert_metas_equal(output, expected) + + +@pytest.mark.parametrize("meta, item, expected", + ( + ("basic_meta", "a", "hello"), + ("basic_meta", "b", list(range(10, 25, 10))), + ), + indirect=("meta",)) +def test_slice_by_str(meta, item, expected): + meta = basic_meta + assert meta["a"] == "hello" + assert meta["b"] == list(range(10, 25, 10)) + + +def test_add1(basic_meta): + meta = basic_meta + name = "z" + value = 100 + comment = "Comment E" + meta.add(name, value, comment=comment) + assert name in meta.keys() + assert meta[name] == value + assert meta.comments[name] == comment + assert name not in meta.axes.keys() + + +def test_add2(basic_meta): + meta = basic_meta + name = "z" + value = list(range(2)) + axis = 0 + meta.add(name, value, axis=axis) + assert name in meta.keys() + assert meta[name] == value + assert meta.axes[name] == np.array([axis]) + assert name not in meta.comments.keys() + + +def test_add_overwrite_error(basic_meta): + meta = basic_meta + with pytest.raises(KeyError): + meta.add("a", "world") + + +def test_add_axis_without_shape(no_shape_meta): + meta = no_shape_meta + with pytest.raises(TypeError): + meta.add("z", [100], axis=0) From 6d0d25496d27df5aebe844b9afdce3809afc62b3 Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Sat, 14 Aug 2021 02:33:04 +0100 Subject: [PATCH 02/60] Refactor Meta. Tests now passing. Refactor involved moving comments and axes out of the core values and putting them into their own dictionaries. This removed cascading consequences for self.values(), self.__setitem__, etc. --- ndcube/meta.py | 76 ++++++++++++++++++--------------------- ndcube/tests/helpers.py | 33 +++++++++++------ ndcube/tests/test_meta.py | 46 ++++++------------------ 3 files changed, 68 insertions(+), 87 deletions(-) diff --git a/ndcube/meta.py b/ndcube/meta.py index 3f2d8ee2d..c4a2acc74 100644 --- a/ndcube/meta.py +++ b/ndcube/meta.py @@ -11,17 +11,24 @@ class Meta(dict): def __init__(self, header=None, comments=None, axes=None, data_shape=None): self.original_header = header - # Sanitize inputs + # Sanitize metadata values and instantiate class. if header is None: header = {} else: header = dict(header) + super().__init__(header.items()) + header_keys = header.keys() + + # Generate dictionary for comments. if comments is None: - comments = {} + self.comments = dict(zip(header.keys(), [None] * len(header_keys))) else: comments = dict(comments) + self.comments = dict([(key, comments.get(key)) for key in header]) + + # Generate dictionary for axes. if axes is None: - axes = {} + self.axes = dict(zip(header.keys(), [None] * len(header_keys))) self._data_shape = None else: # Verify data_shape is set if axes is set. @@ -30,16 +37,16 @@ def __init__(self, header=None, comments=None, axes=None, data_shape=None): raise TypeError("If axes is set, data_shape must be an iterable giving " "the length of each axis of the assocated cube.") self._data_shape = np.asarray(data_shape) - axes = dict(axes) - for key, axis in axes.items(): - axes[key] = self._sanitize_entry_with_axis(key, header[key], axis) - - # Build meta entries and instantiate class. - entries = [(key, (header[key], comments.get(key), axes.get(key))) for key in header] - super().__init__(entries) - - def _sanitize_entry_with_axis(self, key, value, axis): + self.axes = dict([(key, self._sanitize_axis_value(axes.get(key), header[key], key)) + for key in header_keys]) + + def _sanitize_axis_value(self, axis, value, key): + if axis is None: + return None + if self.shape is None: + raise TypeError("Meta instance does not have a shape so new metadata " + "cannot be assigned to an axis.") # Verify each entry in axes is an iterable of ints. if isinstance(axis, numbers.Integral): axis = (axis,) @@ -66,37 +73,26 @@ def _sanitize_entry_with_axis(self, key, value, axis): return axis - @property - def meta_values(self): - return [value[0] for value in self.values()] - - @property - def comments(self): - return dict([(key, value[1]) for key, value in self.items() if value[1] is not None]) - - @property - def axes(self): - return dict([(key, value[2]) for key, value in self.items() if value[2] is not None]) - @property def shape(self): return self._data_shape def add(self, name, value, comment=None, axis=None): + """Need docstring!""" if name in self.keys(): raise KeyError(f"'{name}' already exists. " "To edit and existing entry, first delete and then re-add it " "with the update parameters.") - self.__setitem__(name, value, comment, axis) - - def __setitem__(self, name, value, **kwargs): - if if axis is not None: - if self.shape is None: - raise TypeError("Meta instance does not have a shape so new metadata " - "cannot be assigned to an axis.") - axis = self._sanitize_entry_with_axis(name, value, axis) - super().__setitem__(name, (value, comment, axis)) + axis = self._sanitize_axis_value(axis, value, name) + self[name] = value + self.comments[name] = comment + self.axes[name] = axis + + def __del__(self, name): + del self[name] + del self.comments[name] + del self.axes[name] def __getitem__(self, item): # There are two ways to slice: @@ -106,7 +102,7 @@ def __getitem__(self, item): # If item is single string, slicing is simple. if isinstance(item, str): - return super().__getitem__(item)[0] + return super().__getitem__(item) # Else, the item is assumed to be a typical slicing item. elif self.shape is None: @@ -147,15 +143,12 @@ def __getitem__(self, item): "Must be an int, slice and tuple of the same.") new_meta._data_shape = new_shape - # Calculate the cumulative number of dropped axis. + # Calculate the cumulative number of dropped axes. cumul_dropped_axes = np.cumsum(dropped_axes) # Slice all metadata associated with axes. - for key, (value, comment, axis) in new_meta.items(): - if axis is None: - new_value, comment, new_axis = value - else: - print(key, value, comment, axis) + for (key, value), axis in zip(self.items(), self.axes.values()): + if axis is not None: new_item = tuple(item[axis]) if len(new_item) == 1: new_value = value[new_item[0]] @@ -167,6 +160,7 @@ def __getitem__(self, item): new_axis = new_axis[new_axis >= 0] if len(new_axis) == 0: new_axis = None - new_meta.__setitem__(key, new_value, comment=comment, axis=new_axis) + del new_meta[key] + new_meta.add(key, new_value, self.comments[key], new_axis) return new_meta diff --git a/ndcube/tests/helpers.py b/ndcube/tests/helpers.py index 0e23b7483..4ee687074 100644 --- a/ndcube/tests/helpers.py +++ b/ndcube/tests/helpers.py @@ -67,7 +67,7 @@ def test_wrapper(*args, **kwargs): def assert_extra_coords_equal(test_input, extra_coords): assert test_input.keys() == extra_coords.keys() - for key in test_input: + for key in list(test_input.keys()): assert test_input[key]['axis'] == extra_coords[key]['axis'] assert (test_input[key]['value'] == extra_coords[key]['value']).all() @@ -77,22 +77,35 @@ def assert_metas_equal(test_input, expected_output): raise AssertionError( "input and expected are of different type. " f"Input: {type(test_input)}; Expected: {type(expected_output)}") + multi_element_msg = "more than one element is ambiguous" if isinstance(test_input, Meta) and isinstance(expected_output, Meta): + # Check keys are the same. + assert test_input.keys() == expected_output.keys() + + # Check shapes are the same. if test_input.shape is None or expected_output.shape is None: assert test_input.shape == expected_output.shape else: - assert all(test_input.shape == expected_output.shape) + assert np.allclose(test_input.shape, expected_output.shape) + + # Check values and axes are the same. for test_value, expected_value in zip(test_input.values(), expected_output.values()): try: - assert test_value[0] == expected_value[0] + assert test_value == expected_value except ValueError as err: - if "more than one element is ambiguous" in err.args[0]: - assert all(test_value[0] == expected_value[0]) - assert all(test_value[2] == expected_value[2]) - elif isinstance(test_input, dict) and isinstance(expected_output, dict): - assert test_input.keys() == expected_output.keys() - for key in test_input: - assert test_input[key] == expected_output[key] + if multi_element_msg in err.args[0]: + assert np.allclose(test_value, expected_value) + + # Check axes are the same. + for test_axis, expected_axis in zip(test_input.axes.values(), + expected_output.axes.values()): + assert ((test_axis is None and expected_axis is None) or + all(test_axis == expected_axis)) + else: + if not (test_input is None and expected_output is None): + assert test_input.keys() == expected_output.keys() + for key in list(test_input.keys()): + assert test_input[key] == expected_output[key] def assert_cubes_equal(test_input, expected_cube): diff --git a/ndcube/tests/test_meta.py b/ndcube/tests/test_meta.py index c5fad570e..51c5b0f5a 100644 --- a/ndcube/tests/test_meta.py +++ b/ndcube/tests/test_meta.py @@ -1,3 +1,5 @@ +import copy + import numpy as np import pytest @@ -47,30 +49,6 @@ def no_shape_meta(): return Meta({"a": "hello"}) -def test_meta_values(basic_meta, basic_meta_values): - meta = basic_meta - expected_values = list(basic_meta_values.values()) - assert meta.meta_values == expected_values - - -def test_comments(basic_meta, basic_comments): - meta = basic_meta - comments = basic_comments - assert list(meta.comments.keys()) == list(comments.keys()) - assert list(meta.comments.values()) == list(comments.values()) - - -def test_axes(basic_meta, basic_axes): - meta = basic_meta - axes = basic_axes - axes["b"] = np.array([0]) - axes["c"] = np.asarray(axes["c"]) - axes["d"] = np.asarray(axes["d"]) - assert list(meta.axes.keys()) == list(axes.keys()) - for output_axis, expected_axis in zip(meta.axes.values(), axes.values()): - assert all(output_axis == expected_axis) - - def test_shape(basic_meta, basic_data_shape): meta = basic_meta shape = np.asarray(basic_data_shape) @@ -80,7 +58,9 @@ def test_shape(basic_meta, basic_data_shape): def test_slice_axis_with_no_meta(basic_meta): meta = basic_meta output = meta[:, :, :, 0] - assert_metas_equal(output, meta) + expected = copy.deepcopy(meta) + expected._data_shape = meta._data_shape[:-1] + assert_metas_equal(output, expected) def test_slice_away_independent_axis(basic_meta): @@ -90,7 +70,7 @@ def test_slice_away_independent_axis(basic_meta): item = 0 output = meta[item] # Build expected result. - values = dict([(key, value[0]) for key, value in meta.items()]) + values = dict([(key, value) for key, value in meta.items()]) values["b"] = values["b"][0] comments = meta.comments axes = dict([(key, axis) for key, axis in meta.axes.items()]) @@ -111,7 +91,7 @@ def test_slice_dependent_axes(basic_meta): output = meta[:, 1:3, 1] print(meta["a"]) # Build expected result. - values = dict([(key, value[0]) for key, value in meta.items()]) + values = dict([(key, value) for key, value in meta.items()]) values["c"] = values["c"][1:3, 1] values["d"] = values["d"][1] comments = meta.comments @@ -124,13 +104,7 @@ def test_slice_dependent_axes(basic_meta): assert_metas_equal(output, expected) -@pytest.mark.parametrize("meta, item, expected", - ( - ("basic_meta", "a", "hello"), - ("basic_meta", "b", list(range(10, 25, 10))), - ), - indirect=("meta",)) -def test_slice_by_str(meta, item, expected): +def test_slice_by_str(basic_meta): meta = basic_meta assert meta["a"] == "hello" assert meta["b"] == list(range(10, 25, 10)) @@ -145,7 +119,7 @@ def test_add1(basic_meta): assert name in meta.keys() assert meta[name] == value assert meta.comments[name] == comment - assert name not in meta.axes.keys() + assert meta.axes[name] is None def test_add2(basic_meta): @@ -156,8 +130,8 @@ def test_add2(basic_meta): meta.add(name, value, axis=axis) assert name in meta.keys() assert meta[name] == value + assert meta.comments[name] is None assert meta.axes[name] == np.array([axis]) - assert name not in meta.comments.keys() def test_add_overwrite_error(basic_meta): From 9166378eae7b08aaf395a6879b91f55f32ca88e2 Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Sat, 14 Aug 2021 14:56:59 +0100 Subject: [PATCH 03/60] Add overwrite option to Meta.add. Also put in overwrite error check to Meta.__setitem__ if axis is not None. This way the value shape and axes cannot easily be corrupted accidentally. --- ndcube/meta.py | 54 +++++++++++++++++++++++++++------------ ndcube/tests/helpers.py | 4 +-- ndcube/tests/test_meta.py | 12 ++++++--- 3 files changed, 48 insertions(+), 22 deletions(-) diff --git a/ndcube/meta.py b/ndcube/meta.py index c4a2acc74..437e49d4c 100644 --- a/ndcube/meta.py +++ b/ndcube/meta.py @@ -21,14 +21,14 @@ def __init__(self, header=None, comments=None, axes=None, data_shape=None): # Generate dictionary for comments. if comments is None: - self.comments = dict(zip(header.keys(), [None] * len(header_keys))) + self._comments = dict(zip(header.keys(), [None] * len(header_keys))) else: comments = dict(comments) - self.comments = dict([(key, comments.get(key)) for key in header]) + self._comments = dict([(key, comments.get(key)) for key in header]) # Generate dictionary for axes. if axes is None: - self.axes = dict(zip(header.keys(), [None] * len(header_keys))) + self._axes = dict(zip(header.keys(), [None] * len(header_keys))) self._data_shape = None else: # Verify data_shape is set if axes is set. @@ -38,8 +38,8 @@ def __init__(self, header=None, comments=None, axes=None, data_shape=None): "the length of each axis of the assocated cube.") self._data_shape = np.asarray(data_shape) axes = dict(axes) - self.axes = dict([(key, self._sanitize_axis_value(axes.get(key), header[key], key)) - for key in header_keys]) + self._axes = dict([(key, self._sanitize_axis_value(axes.get(key), header[key], key)) + for key in header_keys]) def _sanitize_axis_value(self, axis, value, key): if axis is None: @@ -73,26 +73,49 @@ def _sanitize_axis_value(self, axis, value, key): return axis + @property + def comments(self): + return self._comments + + @property + def axes(self): + return self._axes + @property def shape(self): return self._data_shape - def add(self, name, value, comment=None, axis=None): + def add(self, name, value, comment, axis, overwrite=False): """Need docstring!""" - if name in self.keys(): + if name in self.keys() and overwrite is not True: raise KeyError(f"'{name}' already exists. " - "To edit and existing entry, first delete and then re-add it " - "with the update parameters.") + "To update an existing metadata entry set overwrite=True.") if axis is not None: axis = self._sanitize_axis_value(axis, value, name) - self[name] = value - self.comments[name] = comment - self.axes[name] = axis + self._comments[name] = comment + self._axes[name] = axis + self.__setitem__(name, value) # This must be done after updating self._axes otherwise it may error. def __del__(self, name): + del self._comments[name] + del self._axes[name] del self[name] - del self.comments[name] - del self.axes[name] + + def __setitem__(self, key, val): + axis = self.axes[key] + if axis is not None: + recommendation = "We recommend using the 'add' method to set values." + if len(axis) == 1: + if not (hasattr(val, "__len__") and len(val) == self.shape[axis[0]]): + raise TypeError(f"{key} must have same length as associated axis, " + f"i.e. axis {axis[0]}: {self.shape[axis[0]]}\n" + f"{recommendation}") + else: + if not (hasattr(val, "shape") and all(val.shape == self.shape[axis])): + raise TypeError(f"{key} must have same shape as associated axes, " + f"i.e axes {axis}: {self.shape[axis]}\n" + f"{recommendation}") + super().__setitem__(key, val) def __getitem__(self, item): # There are two ways to slice: @@ -160,7 +183,6 @@ def __getitem__(self, item): new_axis = new_axis[new_axis >= 0] if len(new_axis) == 0: new_axis = None - del new_meta[key] - new_meta.add(key, new_value, self.comments[key], new_axis) + new_meta.add(key, new_value, self.comments[key], new_axis, overwrite=True) return new_meta diff --git a/ndcube/tests/helpers.py b/ndcube/tests/helpers.py index 4ee687074..4462f0e49 100644 --- a/ndcube/tests/helpers.py +++ b/ndcube/tests/helpers.py @@ -71,12 +71,11 @@ def assert_extra_coords_equal(test_input, extra_coords): assert test_input[key]['axis'] == extra_coords[key]['axis'] assert (test_input[key]['value'] == extra_coords[key]['value']).all() - def assert_metas_equal(test_input, expected_output): if type(test_input) is not type(expected_output): raise AssertionError( "input and expected are of different type. " - f"Input: {type(test_input)}; Expected: {type(expected_output)}") + f"input: {type(test_input)}; expected: {type(expected_output)}") multi_element_msg = "more than one element is ambiguous" if isinstance(test_input, Meta) and isinstance(expected_output, Meta): # Check keys are the same. @@ -95,7 +94,6 @@ def assert_metas_equal(test_input, expected_output): except ValueError as err: if multi_element_msg in err.args[0]: assert np.allclose(test_value, expected_value) - # Check axes are the same. for test_axis, expected_axis in zip(test_input.axes.values(), expected_output.axes.values()): diff --git a/ndcube/tests/test_meta.py b/ndcube/tests/test_meta.py index 51c5b0f5a..cbe4ab645 100644 --- a/ndcube/tests/test_meta.py +++ b/ndcube/tests/test_meta.py @@ -115,7 +115,7 @@ def test_add1(basic_meta): name = "z" value = 100 comment = "Comment E" - meta.add(name, value, comment=comment) + meta.add(name, value, comment, None) assert name in meta.keys() assert meta[name] == value assert meta.comments[name] == comment @@ -127,17 +127,23 @@ def test_add2(basic_meta): name = "z" value = list(range(2)) axis = 0 - meta.add(name, value, axis=axis) + meta.add(name, value, None, axis) assert name in meta.keys() assert meta[name] == value assert meta.comments[name] is None assert meta.axes[name] == np.array([axis]) +def test_add_overwrite(basic_meta): + meta = basic_meta + name = "a" + value = "goodbye" + meta.add(name, value, None, None, overwrite=True) + assert meta[name] == value def test_add_overwrite_error(basic_meta): meta = basic_meta with pytest.raises(KeyError): - meta.add("a", "world") + meta.add("a", "world", None, None) def test_add_axis_without_shape(no_shape_meta): From a2248c71b3584f7883f746132b7171619118aa1b Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Sat, 14 Aug 2021 15:37:51 +0100 Subject: [PATCH 04/60] Fixes codestyle. --- ndcube/meta.py | 5 ++--- ndcube/tests/test_meta.py | 5 ++--- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/ndcube/meta.py b/ndcube/meta.py index 437e49d4c..60f4e73b0 100644 --- a/ndcube/meta.py +++ b/ndcube/meta.py @@ -1,6 +1,6 @@ -import collections.abc import copy import numbers +import collections.abc import numpy as np @@ -18,7 +18,7 @@ def __init__(self, header=None, comments=None, axes=None, data_shape=None): header = dict(header) super().__init__(header.items()) header_keys = header.keys() - + # Generate dictionary for comments. if comments is None: self._comments = dict(zip(header.keys(), [None] * len(header_keys))) @@ -142,7 +142,6 @@ def __getitem__(self, item): # Edit data shape and calculate which axis will be dropped. dropped_axes = np.zeros(len(self.shape), dtype=bool) new_shape = new_meta.shape - j = 0 for i, axis_item in enumerate(item): if isinstance(axis_item, numbers.Integral): new_shape = np.delete(new_shape, i) diff --git a/ndcube/tests/test_meta.py b/ndcube/tests/test_meta.py index cbe4ab645..9ff571e98 100644 --- a/ndcube/tests/test_meta.py +++ b/ndcube/tests/test_meta.py @@ -4,8 +4,8 @@ import pytest from ndcube.meta import Meta -from .helpers import assert_metas_equal +from .helpers import assert_metas_equal # Fixtures @@ -37,7 +37,7 @@ def basic_axes(): @pytest.fixture def basic_data_shape(): return (2, 3, 4, 5) - + @pytest.fixture def basic_meta(basic_meta_values, basic_comments, basic_axes, basic_data_shape): @@ -66,7 +66,6 @@ def test_slice_axis_with_no_meta(basic_meta): def test_slice_away_independent_axis(basic_meta): meta = basic_meta # Get output - sliced_axis = 0 item = 0 output = meta[item] # Build expected result. From 02cdcbb1954b557e1c4dd11662092c71b8233a75 Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Sat, 14 Aug 2021 15:39:01 +0100 Subject: [PATCH 05/60] Adds #455 changelog. --- changelog/455.feature.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog/455.feature.rst diff --git a/changelog/455.feature.rst b/changelog/455.feature.rst new file mode 100644 index 000000000..0b4dcfcb3 --- /dev/null +++ b/changelog/455.feature.rst @@ -0,0 +1 @@ +Add Sliceable `~ndcube.meta.Meta` class for axis-associated metadata. From eb57c05c2f4040cf6b5e07eb473f974bb7631cbc Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Sat, 14 Aug 2021 17:02:25 +0100 Subject: [PATCH 06/60] Codestyle fixes. --- ndcube/meta.py | 4 ++-- ndcube/tests/helpers.py | 3 ++- ndcube/tests/test_meta.py | 5 ++++- 3 files changed, 8 insertions(+), 4 deletions(-) diff --git a/ndcube/meta.py b/ndcube/meta.py index 60f4e73b0..c01af3984 100644 --- a/ndcube/meta.py +++ b/ndcube/meta.py @@ -50,8 +50,8 @@ def _sanitize_axis_value(self, axis, value, key): # Verify each entry in axes is an iterable of ints. if isinstance(axis, numbers.Integral): axis = (axis,) - if not (isinstance(axis, collections.abc.Iterable) and - all([isinstance(i, numbers.Integral) for i in axis])): + if not (isinstance(axis, collections.abc.Iterable) and all([isinstance(i, numbers.Integral) + for i in axis])): raise TypeError("Values in axes must be an int or tuple of ints giving " "the data axis/axes associated with the metadata.") axis = np.asarray(axis) diff --git a/ndcube/tests/helpers.py b/ndcube/tests/helpers.py index 4462f0e49..4eb3859a6 100644 --- a/ndcube/tests/helpers.py +++ b/ndcube/tests/helpers.py @@ -50,7 +50,7 @@ def figure_test(test_function): All such decorated tests are marked with `pytest.mark.mpl_image` for convenient filtering. """ hash_library_name = get_hash_library_name() - hash_library_file = Path(__file__).parent / ".." / "visualization" / "tests" / hash_library_name + hash_library_file = Path(__file__).parent / ".." / "visualization" / "tests" / hash_library_name @pytest.mark.remote_data @pytest.mark.mpl_image_compare(hash_library=hash_library_file.resolve(), @@ -71,6 +71,7 @@ def assert_extra_coords_equal(test_input, extra_coords): assert test_input[key]['axis'] == extra_coords[key]['axis'] assert (test_input[key]['value'] == extra_coords[key]['value']).all() + def assert_metas_equal(test_input, expected_output): if type(test_input) is not type(expected_output): raise AssertionError( diff --git a/ndcube/tests/test_meta.py b/ndcube/tests/test_meta.py index 9ff571e98..31d93ba66 100644 --- a/ndcube/tests/test_meta.py +++ b/ndcube/tests/test_meta.py @@ -9,6 +9,7 @@ # Fixtures + @pytest.fixture def basic_meta_values(): return {"a": "hello", @@ -31,7 +32,7 @@ def basic_axes(): return {"b": 0, "c": (1, 2), "d": (2,), - } + } @pytest.fixture @@ -132,6 +133,7 @@ def test_add2(basic_meta): assert meta.comments[name] is None assert meta.axes[name] == np.array([axis]) + def test_add_overwrite(basic_meta): meta = basic_meta name = "a" @@ -139,6 +141,7 @@ def test_add_overwrite(basic_meta): meta.add(name, value, None, None, overwrite=True) assert meta[name] == value + def test_add_overwrite_error(basic_meta): meta = basic_meta with pytest.raises(KeyError): From 96b8ab959a9a36b6d3337d608d71a45bef47b789 Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Sat, 14 Aug 2021 18:13:00 +0100 Subject: [PATCH 07/60] Slice meta if possible when slicing NDCube. --- ndcube/mixins/ndslicing.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/ndcube/mixins/ndslicing.py b/ndcube/mixins/ndslicing.py index 7efbc9635..cd0f37767 100644 --- a/ndcube/mixins/ndslicing.py +++ b/ndcube/mixins/ndslicing.py @@ -25,4 +25,13 @@ def __getitem__(self, item): sliced_cube._global_coords._internal_coords = self.global_coords._internal_coords sliced_cube._extra_coords = self.extra_coords[item] + # Slice metadata if possible. + try: + sliced_cube.meta = self.meta[item] + except TypeError as err: + if "unhashable type" not in err.args[0]: + raise err + except KeyError: + pass + return sliced_cube From 303320a34c7dd2d42ada3832f548e528ba4c0b4e Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Sat, 14 Aug 2021 19:13:57 +0100 Subject: [PATCH 08/60] Add docstrings to Meta object. --- ndcube/meta.py | 49 ++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 48 insertions(+), 1 deletion(-) diff --git a/ndcube/meta.py b/ndcube/meta.py index c01af3984..f38d22af0 100644 --- a/ndcube/meta.py +++ b/ndcube/meta.py @@ -8,6 +8,33 @@ class Meta(dict): + """ + A sliceable object for storing metadata. + + Metdata can be linked to a data axis which causes it to be sliced when the + standard Python numeric slicing API is applied to the object. + Specific pieces of metadata can be obtain using the dict-like str slicing API. + Metadata associated with an axis/axes must have the same length/shape as those axes. + + Parameters + ---------- + header: dict-like + The names and values of metadata. + + comments: dict-like (optional) + Comments associated with any of the above pieces of metadata. + + axes: dict-like (optional) + The axis/axes associated with the above metadata values. + Each axis value must be None (for no axis association), and `int` + or an iterable of `int` if the metadata is associated with multiple axes. + Metadata in header without a corresponding entry here are assumed to not + be associated with an axis. + + data_shape: iterable of `int` (optional) + The shape of the data with which this metadata is associated. + Must be set if axes input is set. + """ def __init__(self, header=None, comments=None, axes=None, data_shape=None): self.original_header = header @@ -86,7 +113,27 @@ def shape(self): return self._data_shape def add(self, name, value, comment, axis, overwrite=False): - """Need docstring!""" + """Add a new piece of metadata to instance. + + Parameters + ---------- + name: `str` + The name/label of the metadata. + + value: + The value of the metadata. If axes input is not None, this must have the + same length/shape as those axes as defined by self.shape. + + comment: `str` or `None` + Any comment associated with this metadata. Set to None if no comment desired. + + axis: `int`, iterable of `int`, or `None` + The axis/axes with which the metadata is linked. If not associated with any + axis, set this to None. + + overwrite: `bool` (optional) + If True, overwrites the entry of the name name if already present. + """ if name in self.keys() and overwrite is not True: raise KeyError(f"'{name}' already exists. " "To update an existing metadata entry set overwrite=True.") From 3cc208d052085ab337bc2774af92087f478930ac Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Mon, 16 Aug 2021 09:58:56 +0100 Subject: [PATCH 09/60] Create remove method for Meta. --- ndcube/meta.py | 2 +- ndcube/tests/test_meta.py | 9 +++++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/ndcube/meta.py b/ndcube/meta.py index f38d22af0..172e0aa14 100644 --- a/ndcube/meta.py +++ b/ndcube/meta.py @@ -143,7 +143,7 @@ def add(self, name, value, comment, axis, overwrite=False): self._axes[name] = axis self.__setitem__(name, value) # This must be done after updating self._axes otherwise it may error. - def __del__(self, name): + def remove(self, name): del self._comments[name] del self._axes[name] del self[name] diff --git a/ndcube/tests/test_meta.py b/ndcube/tests/test_meta.py index 31d93ba66..eb9b4b442 100644 --- a/ndcube/tests/test_meta.py +++ b/ndcube/tests/test_meta.py @@ -152,3 +152,12 @@ def test_add_axis_without_shape(no_shape_meta): meta = no_shape_meta with pytest.raises(TypeError): meta.add("z", [100], axis=0) + + +def test_remove(basic_meta): + meta = basic_meta + name = "b" + meta.remove(name) + assert name not in meta.keys() + assert name not in meta.comments.keys() + assert name not in meta.axes.keys() From 041cd1b2469791a33bf678e57da2f68c34010afa Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Mon, 23 Aug 2021 01:38:29 +0100 Subject: [PATCH 10/60] Make Meta more memory efficient. Only store entries in the comments and axes dicts that aren't None. This saves a lot of space and requires the get method be used when looking for comment and axes values to avoid errors is that key doesn't have a comment or axis. --- ndcube/meta.py | 37 +++++++++++++++++++++++++------------ ndcube/tests/test_meta.py | 4 ++-- 2 files changed, 27 insertions(+), 14 deletions(-) diff --git a/ndcube/meta.py b/ndcube/meta.py index 172e0aa14..1d6b29c99 100644 --- a/ndcube/meta.py +++ b/ndcube/meta.py @@ -48,14 +48,17 @@ def __init__(self, header=None, comments=None, axes=None, data_shape=None): # Generate dictionary for comments. if comments is None: - self._comments = dict(zip(header.keys(), [None] * len(header_keys))) + self._comments = dict() else: comments = dict(comments) - self._comments = dict([(key, comments.get(key)) for key in header]) + if not set(comments.keys()).issubset(set(header_keys)): + raise ValueError( + "All comments must correspond to a value in header under the same key.") + self._comments = comments # Generate dictionary for axes. if axes is None: - self._axes = dict(zip(header.keys(), [None] * len(header_keys))) + self._axes = dict() self._data_shape = None else: # Verify data_shape is set if axes is set. @@ -65,8 +68,11 @@ def __init__(self, header=None, comments=None, axes=None, data_shape=None): "the length of each axis of the assocated cube.") self._data_shape = np.asarray(data_shape) axes = dict(axes) - self._axes = dict([(key, self._sanitize_axis_value(axes.get(key), header[key], key)) - for key in header_keys]) + if not set(axes.keys()).issubset(set(header_keys)): + raise ValueError( + "All axes must correspond to a value in header under the same key.") + self._axes = dict([(key, self._sanitize_axis_value(axis, header[key], key)) + for key, axis in axes.items()]) def _sanitize_axis_value(self, axis, value, key): if axis is None: @@ -137,19 +143,24 @@ def add(self, name, value, comment, axis, overwrite=False): if name in self.keys() and overwrite is not True: raise KeyError(f"'{name}' already exists. " "To update an existing metadata entry set overwrite=True.") + if comment is not None: + self._comments[name] = comment if axis is not None: axis = self._sanitize_axis_value(axis, value, name) - self._comments[name] = comment - self._axes[name] = axis + self._axes[name] = axis + elif name in self._axes: + del self._axes[name] self.__setitem__(name, value) # This must be done after updating self._axes otherwise it may error. def remove(self, name): - del self._comments[name] - del self._axes[name] + if name in self._comments: + del self._comments[name] + if name in self._axes: + del self._axes[name] del self[name] def __setitem__(self, key, val): - axis = self.axes[key] + axis = self.axes.get(key, None) if axis is not None: recommendation = "We recommend using the 'add' method to set values." if len(axis) == 1: @@ -216,7 +227,8 @@ def __getitem__(self, item): cumul_dropped_axes = np.cumsum(dropped_axes) # Slice all metadata associated with axes. - for (key, value), axis in zip(self.items(), self.axes.values()): + for key, value in self.items(): + axis = self.axes.get(key, None) if axis is not None: new_item = tuple(item[axis]) if len(new_item) == 1: @@ -229,6 +241,7 @@ def __getitem__(self, item): new_axis = new_axis[new_axis >= 0] if len(new_axis) == 0: new_axis = None - new_meta.add(key, new_value, self.comments[key], new_axis, overwrite=True) + new_meta.add(key, new_value, self.comments.get(key, None), new_axis, + overwrite=True) return new_meta diff --git a/ndcube/tests/test_meta.py b/ndcube/tests/test_meta.py index eb9b4b442..dcc50597a 100644 --- a/ndcube/tests/test_meta.py +++ b/ndcube/tests/test_meta.py @@ -119,7 +119,7 @@ def test_add1(basic_meta): assert name in meta.keys() assert meta[name] == value assert meta.comments[name] == comment - assert meta.axes[name] is None + assert meta.axes.get(name, None) is None def test_add2(basic_meta): @@ -130,7 +130,7 @@ def test_add2(basic_meta): meta.add(name, value, None, axis) assert name in meta.keys() assert meta[name] == value - assert meta.comments[name] is None + assert meta.comments.get(name, None) is None assert meta.axes[name] == np.array([axis]) From cd13518be1a3ba1f459eebac7ede4ad9f810904f Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Wed, 25 Aug 2021 11:49:38 +0100 Subject: [PATCH 11/60] Add Meta to top-level init imports. --- ndcube/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ndcube/__init__.py b/ndcube/__init__.py index fd3edc4aa..2b53a2f7f 100644 --- a/ndcube/__init__.py +++ b/ndcube/__init__.py @@ -26,9 +26,10 @@ class UnsupportedPythonError(Exception): from .extra_coords import ExtraCoords from .global_coords import GlobalCoords +from .meta import Meta from .ndcollection import NDCollection from .ndcube import NDCube from .ndcube_sequence import NDCubeSequence from .version import version as __version__ -__all__ = ['NDCube', 'NDCubeSequence', "NDCollection", "ExtraCoords", "GlobalCoords"] +__all__ = ["ExtraCoords", "GlobalCoords", "Meta", "NDCollection", "NDCube", "NDCubeSequence"] From ad88f5f2b5ac42569d3d67ecfe04bca89d55502d Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Wed, 25 Aug 2021 22:57:07 +0100 Subject: [PATCH 12/60] Enable Meta object's shape to be set without axes being set. --- ndcube/meta.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/ndcube/meta.py b/ndcube/meta.py index 1d6b29c99..efd1fb4ab 100644 --- a/ndcube/meta.py +++ b/ndcube/meta.py @@ -56,17 +56,21 @@ def __init__(self, header=None, comments=None, axes=None, data_shape=None): "All comments must correspond to a value in header under the same key.") self._comments = comments + # Define data shape. + if data_shape is None: + self._data_shape = data_shape + else: + self._data_shape = np.asarray(data_shape, dtype=int) + # Generate dictionary for axes. if axes is None: self._axes = dict() - self._data_shape = None else: # Verify data_shape is set if axes is set. if not (isinstance(data_shape, collections.abc.Iterable) and all([isinstance(i, numbers.Integral) for i in data_shape])): raise TypeError("If axes is set, data_shape must be an iterable giving " "the length of each axis of the assocated cube.") - self._data_shape = np.asarray(data_shape) axes = dict(axes) if not set(axes.keys()).issubset(set(header_keys)): raise ValueError( From 4611e1e1dbebaf83d328f85be0160222ab2fe917 Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Thu, 4 Nov 2021 10:58:59 +0100 Subject: [PATCH 13/60] Avoid possible indexing error when dropping axes during slicing of Meta object. --- ndcube/meta.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/ndcube/meta.py b/ndcube/meta.py index efd1fb4ab..baad509ef 100644 --- a/ndcube/meta.py +++ b/ndcube/meta.py @@ -206,7 +206,6 @@ def __getitem__(self, item): new_shape = new_meta.shape for i, axis_item in enumerate(item): if isinstance(axis_item, numbers.Integral): - new_shape = np.delete(new_shape, i) dropped_axes[i] = True elif isinstance(axis_item, slice): start = axis_item.start @@ -217,15 +216,13 @@ def __getitem__(self, item): stop = axis_item.stop if stop is None: stop = self.shape[i] - # Mustn't use new_shape here as indexing will be misaligned - # if an axis was deleted above. if stop < 0: stop = self.shape[i] - stop - new_shape[i - dropped_axes[:i].sum()] = stop - start + new_shape[i] = stop - start else: raise TypeError("Unrecognized slice type. " "Must be an int, slice and tuple of the same.") - new_meta._data_shape = new_shape + new_meta._data_shape = new_shape[np.invert(dropped_axes)] # Calculate the cumulative number of dropped axes. cumul_dropped_axes = np.cumsum(dropped_axes) From cf923f9ce9d45ea9921a544b33b05262ab56a313 Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Thu, 18 Nov 2021 15:31:11 +0100 Subject: [PATCH 14/60] Make NDCube slice meta if it has a magic attribute. This gives more control over slicing the meta correctly. --- ndcube/meta.py | 1 + ndcube/mixins/ndslicing.py | 7 +------ 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/ndcube/meta.py b/ndcube/meta.py index baad509ef..093fdc24e 100644 --- a/ndcube/meta.py +++ b/ndcube/meta.py @@ -36,6 +36,7 @@ class Meta(dict): Must be set if axes input is set. """ def __init__(self, header=None, comments=None, axes=None, data_shape=None): + self.__ndcube_can_slice__ = True self.original_header = header # Sanitize metadata values and instantiate class. diff --git a/ndcube/mixins/ndslicing.py b/ndcube/mixins/ndslicing.py index cd0f37767..b2832e21c 100644 --- a/ndcube/mixins/ndslicing.py +++ b/ndcube/mixins/ndslicing.py @@ -26,12 +26,7 @@ def __getitem__(self, item): sliced_cube._extra_coords = self.extra_coords[item] # Slice metadata if possible. - try: + if hasattr(self.meta, "__ndcube_can_slice__") and self.meta.__ndcube_can_slice__: sliced_cube.meta = self.meta[item] - except TypeError as err: - if "unhashable type" not in err.args[0]: - raise err - except KeyError: - pass return sliced_cube From bbe6c12e7e7f8dd2bec074e822b56789949d5cba Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Tue, 7 May 2024 15:26:45 +0200 Subject: [PATCH 15/60] Fixes codestyle. --- ndcube/__init__.py | 2 +- ndcube/meta.py | 4 ++-- ndcube/tests/test_meta.py | 1 - 3 files changed, 3 insertions(+), 4 deletions(-) diff --git a/ndcube/__init__.py b/ndcube/__init__.py index 3cbefd5b5..eecd49ebe 100644 --- a/ndcube/__init__.py +++ b/ndcube/__init__.py @@ -23,7 +23,7 @@ class UnsupportedPythonError(Exception): raise UnsupportedPythonError( "sunpy does not support Python < {}".format(__minimum_python_version__)) - + from .extra_coords.extra_coords import ExtraCoords, ExtraCoordsABC from .global_coords import GlobalCoords, GlobalCoordsABC from .meta import Meta diff --git a/ndcube/meta.py b/ndcube/meta.py index 093fdc24e..e510456a4 100644 --- a/ndcube/meta.py +++ b/ndcube/meta.py @@ -11,7 +11,7 @@ class Meta(dict): """ A sliceable object for storing metadata. - Metdata can be linked to a data axis which causes it to be sliced when the + Metadata can be linked to a data axis which causes it to be sliced when the standard Python numeric slicing API is applied to the object. Specific pieces of metadata can be obtain using the dict-like str slicing API. Metadata associated with an axis/axes must have the same length/shape as those axes. @@ -71,7 +71,7 @@ def __init__(self, header=None, comments=None, axes=None, data_shape=None): if not (isinstance(data_shape, collections.abc.Iterable) and all([isinstance(i, numbers.Integral) for i in data_shape])): raise TypeError("If axes is set, data_shape must be an iterable giving " - "the length of each axis of the assocated cube.") + "the length of each axis of the associated cube.") axes = dict(axes) if not set(axes.keys()).issubset(set(header_keys)): raise ValueError( diff --git a/ndcube/tests/test_meta.py b/ndcube/tests/test_meta.py index dcc50597a..97f1f583b 100644 --- a/ndcube/tests/test_meta.py +++ b/ndcube/tests/test_meta.py @@ -4,7 +4,6 @@ import pytest from ndcube.meta import Meta - from .helpers import assert_metas_equal # Fixtures From 86dc36699d3f874b988142a1727b15aec1daac57 Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Tue, 7 May 2024 17:13:47 +0200 Subject: [PATCH 16/60] Reverting unintented changes to __init__.py --- ndcube/__init__.py | 17 ----------------- 1 file changed, 17 deletions(-) diff --git a/ndcube/__init__.py b/ndcube/__init__.py index eecd49ebe..e241ed2a1 100644 --- a/ndcube/__init__.py +++ b/ndcube/__init__.py @@ -7,23 +7,6 @@ * Homepage: https://github.com/sunpy/ndcube * Documentation: https://docs.sunpy.org/projects/ndcube/ """ -import sys - -# Enforce Python version check during package import. -# Must be done before any ndcube imports -__minimum_python_version__ = "3.7" - - -class UnsupportedPythonError(Exception): - """Running on an unsupported version of Python.""" - - -if sys.version_info < tuple(int(val) for val in __minimum_python_version__.split('.')): - # This has to be .format to keep backwards compatibly. - raise UnsupportedPythonError( - "sunpy does not support Python < {}".format(__minimum_python_version__)) - - from .extra_coords.extra_coords import ExtraCoords, ExtraCoordsABC from .global_coords import GlobalCoords, GlobalCoordsABC from .meta import Meta From 7160dc303fc5de6eba7f28a4c52c13d044f8e186 Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Wed, 8 May 2024 18:47:33 +0200 Subject: [PATCH 17/60] Apply suggestions from code review Co-authored-by: Nabil Freij --- ndcube/meta.py | 23 ++++++++++++----------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/ndcube/meta.py b/ndcube/meta.py index e510456a4..9eb1c8fb9 100644 --- a/ndcube/meta.py +++ b/ndcube/meta.py @@ -13,7 +13,7 @@ class Meta(dict): Metadata can be linked to a data axis which causes it to be sliced when the standard Python numeric slicing API is applied to the object. - Specific pieces of metadata can be obtain using the dict-like str slicing API. + Specific pieces of metadata can be obtain using the dict-like string slicing API. Metadata associated with an axis/axes must have the same length/shape as those axes. Parameters @@ -21,17 +21,17 @@ class Meta(dict): header: dict-like The names and values of metadata. - comments: dict-like (optional) + comments: dict-like, optional Comments associated with any of the above pieces of metadata. - axes: dict-like (optional) + axes: dict-like, optional The axis/axes associated with the above metadata values. Each axis value must be None (for no axis association), and `int` or an iterable of `int` if the metadata is associated with multiple axes. Metadata in header without a corresponding entry here are assumed to not be associated with an axis. - data_shape: iterable of `int` (optional) + data_shape: `iterable` of `int`, optional The shape of the data with which this metadata is associated. Must be set if axes input is set. """ @@ -90,7 +90,7 @@ def _sanitize_axis_value(self, axis, value, key): axis = (axis,) if not (isinstance(axis, collections.abc.Iterable) and all([isinstance(i, numbers.Integral) for i in axis])): - raise TypeError("Values in axes must be an int or tuple of ints giving " + raise TypeError("Values in axes must be an integer or iterable of integers giving " "the data axis/axes associated with the metadata.") axis = np.asarray(axis) @@ -131,18 +131,18 @@ def add(self, name, value, comment, axis, overwrite=False): name: `str` The name/label of the metadata. - value: - The value of the metadata. If axes input is not None, this must have the - same length/shape as those axes as defined by self.shape. + value: Any + The value of the metadata. If axes input is not None, this must have the + same length/shape as those axes as defined by ``self.shape``. comment: `str` or `None` Any comment associated with this metadata. Set to None if no comment desired. axis: `int`, iterable of `int`, or `None` - The axis/axes with which the metadata is linked. If not associated with any + The axis/axes with which the metadata is linked. If not associated with any axis, set this to None. - overwrite: `bool` (optional) + overwrite: `bool`, optional If True, overwrites the entry of the name name if already present. """ if name in self.keys() and overwrite is not True: @@ -155,7 +155,8 @@ def add(self, name, value, comment, axis, overwrite=False): self._axes[name] = axis elif name in self._axes: del self._axes[name] - self.__setitem__(name, value) # This must be done after updating self._axes otherwise it may error. + # This must be done after updating self._axes otherwise it may error. + self.__setitem__(name, value) def remove(self, name): if name in self._comments: From b3d439c0636028e936aabe26a6fc96ca15b19d7e Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Wed, 8 May 2024 19:07:53 +0200 Subject: [PATCH 18/60] Adds nitpicky exceptions for Meta docstring. --- docs/nitpick-exceptions | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/nitpick-exceptions b/docs/nitpick-exceptions index b87dce735..60019c2c1 100644 --- a/docs/nitpick-exceptions +++ b/docs/nitpick-exceptions @@ -16,3 +16,5 @@ py:class a set-like object providing a view on D's keys py:class an object providing a view on D's values py:class None. Remove all items from D. py:class a shallow copy of D +py:class v, remove specified key and return the corresponding value. +py:class None. Update D from dict/iterable E and F. From 1f04d623b33e7c85d290d759ed01174f06ea1cc3 Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Wed, 8 May 2024 19:09:18 +0200 Subject: [PATCH 19/60] Try fixing hyperlink to "iterable" Co-authored-by: Nabil Freij --- ndcube/meta.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ndcube/meta.py b/ndcube/meta.py index 9eb1c8fb9..ec609ddc7 100644 --- a/ndcube/meta.py +++ b/ndcube/meta.py @@ -31,7 +31,7 @@ class Meta(dict): Metadata in header without a corresponding entry here are assumed to not be associated with an axis. - data_shape: `iterable` of `int`, optional + data_shape: `iterator` of `int`, optional The shape of the data with which this metadata is associated. Must be set if axes input is set. """ From c27d904f04c3cb191565f3c4b88b3d4bc59e8d7f Mon Sep 17 00:00:00 2001 From: Nabil Freij Date: Wed, 8 May 2024 13:53:16 -0700 Subject: [PATCH 20/60] Apply suggestions from code review --- ndcube/meta.py | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/ndcube/meta.py b/ndcube/meta.py index ec609ddc7..95f5a7cc8 100644 --- a/ndcube/meta.py +++ b/ndcube/meta.py @@ -31,7 +31,7 @@ class Meta(dict): Metadata in header without a corresponding entry here are assumed to not be associated with an axis. - data_shape: `iterator` of `int`, optional + data_shape: iterator of `int`, optional The shape of the data with which this metadata is associated. Must be set if axes input is set. """ @@ -39,7 +39,6 @@ def __init__(self, header=None, comments=None, axes=None, data_shape=None): self.__ndcube_can_slice__ = True self.original_header = header - # Sanitize metadata values and instantiate class. if header is None: header = {} else: @@ -47,7 +46,6 @@ def __init__(self, header=None, comments=None, axes=None, data_shape=None): super().__init__(header.items()) header_keys = header.keys() - # Generate dictionary for comments. if comments is None: self._comments = dict() else: @@ -57,7 +55,6 @@ def __init__(self, header=None, comments=None, axes=None, data_shape=None): "All comments must correspond to a value in header under the same key.") self._comments = comments - # Define data shape. if data_shape is None: self._data_shape = data_shape else: @@ -85,7 +82,6 @@ def _sanitize_axis_value(self, axis, value, key): if self.shape is None: raise TypeError("Meta instance does not have a shape so new metadata " "cannot be assigned to an axis.") - # Verify each entry in axes is an iterable of ints. if isinstance(axis, numbers.Integral): axis = (axis,) if not (isinstance(axis, collections.abc.Iterable) and all([isinstance(i, numbers.Integral) @@ -197,7 +193,6 @@ def __getitem__(self, item): else: new_meta = copy.deepcopy(self) - # Convert item to array of ints and slices for consistent behaviour. if isinstance(item, (numbers.Integral, slice)): item = [item] item = np.array(list(item) + [slice(None)] * (len(self.shape) - len(item)), @@ -226,7 +221,6 @@ def __getitem__(self, item): "Must be an int, slice and tuple of the same.") new_meta._data_shape = new_shape[np.invert(dropped_axes)] - # Calculate the cumulative number of dropped axes. cumul_dropped_axes = np.cumsum(dropped_axes) # Slice all metadata associated with axes. From 829118973e412786ee8bd81d9c91959a21bcd56b Mon Sep 17 00:00:00 2001 From: Nabil Freij Date: Wed, 8 May 2024 13:56:23 -0700 Subject: [PATCH 21/60] Apply suggestions from code review --- ndcube/meta.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/ndcube/meta.py b/ndcube/meta.py index 95f5a7cc8..16b25bdf9 100644 --- a/ndcube/meta.py +++ b/ndcube/meta.py @@ -60,11 +60,9 @@ def __init__(self, header=None, comments=None, axes=None, data_shape=None): else: self._data_shape = np.asarray(data_shape, dtype=int) - # Generate dictionary for axes. if axes is None: self._axes = dict() else: - # Verify data_shape is set if axes is set. if not (isinstance(data_shape, collections.abc.Iterable) and all([isinstance(i, numbers.Integral) for i in data_shape])): raise TypeError("If axes is set, data_shape must be an iterable giving " @@ -90,8 +88,6 @@ def _sanitize_axis_value(self, axis, value, key): "the data axis/axes associated with the metadata.") axis = np.asarray(axis) - # Confirm each axis-associated piece of metadata has the same shape - # as its associated axes. shape_error_msg = (f"{key} must have shape {tuple(self.shape[axis])} " f"as it is associated with axes {axis}") if len(axis) == 1: @@ -183,11 +179,9 @@ def __getitem__(self, item): # by typical python numeric slicing API, # i.e. slice the each piece of metadata associated with an axes. - # If item is single string, slicing is simple. if isinstance(item, str): return super().__getitem__(item) - # Else, the item is assumed to be a typical slicing item. elif self.shape is None: raise TypeError("Meta object does not have a shape and so cannot be sliced.") From 5604de011c38ab0fe29572123981171ff1e72eb5 Mon Sep 17 00:00:00 2001 From: Nabil Freij Date: Wed, 8 May 2024 13:57:13 -0700 Subject: [PATCH 22/60] Apply suggestions from code review --- ndcube/tests/helpers.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/ndcube/tests/helpers.py b/ndcube/tests/helpers.py index 700d16f87..2d79f8516 100644 --- a/ndcube/tests/helpers.py +++ b/ndcube/tests/helpers.py @@ -97,23 +97,19 @@ def assert_metas_equal(test_input, expected_output): f"input: {type(test_input)}; expected: {type(expected_output)}") multi_element_msg = "more than one element is ambiguous" if isinstance(test_input, Meta) and isinstance(expected_output, Meta): - # Check keys are the same. assert test_input.keys() == expected_output.keys() - # Check shapes are the same. if test_input.shape is None or expected_output.shape is None: assert test_input.shape == expected_output.shape else: assert np.allclose(test_input.shape, expected_output.shape) - # Check values and axes are the same. for test_value, expected_value in zip(test_input.values(), expected_output.values()): try: assert test_value == expected_value except ValueError as err: if multi_element_msg in err.args[0]: assert np.allclose(test_value, expected_value) - # Check axes are the same. for test_axis, expected_axis in zip(test_input.axes.values(), expected_output.axes.values()): assert ((test_axis is None and expected_axis is None) or From cdece8272aaccb9cd8ef5bdce0782b7474af9f72 Mon Sep 17 00:00:00 2001 From: Nabil Freij Date: Wed, 8 May 2024 13:58:58 -0700 Subject: [PATCH 23/60] Apply suggestions from code review --- ndcube/tests/test_meta.py | 9 --------- 1 file changed, 9 deletions(-) diff --git a/ndcube/tests/test_meta.py b/ndcube/tests/test_meta.py index 97f1f583b..e7769243e 100644 --- a/ndcube/tests/test_meta.py +++ b/ndcube/tests/test_meta.py @@ -65,10 +65,8 @@ def test_slice_axis_with_no_meta(basic_meta): def test_slice_away_independent_axis(basic_meta): meta = basic_meta - # Get output item = 0 output = meta[item] - # Build expected result. values = dict([(key, value) for key, value in meta.items()]) values["b"] = values["b"][0] comments = meta.comments @@ -77,19 +75,13 @@ def test_slice_away_independent_axis(basic_meta): axes["c"] -= 1 axes["d"] -= 1 shape = meta.shape[1:] - print(values, comments, axes, shape) expected = Meta(values, comments, axes, shape) - # Compare output and expected. assert_metas_equal(output, expected) def test_slice_dependent_axes(basic_meta): meta = basic_meta - print(meta["a"]) - # Get output output = meta[:, 1:3, 1] - print(meta["a"]) - # Build expected result. values = dict([(key, value) for key, value in meta.items()]) values["c"] = values["c"][1:3, 1] values["d"] = values["d"][1] @@ -99,7 +91,6 @@ def test_slice_dependent_axes(basic_meta): del axes["d"] shape = np.array([2, 2, 5]) expected = Meta(values, comments, axes, shape) - # Compare output and expected. assert_metas_equal(output, expected) From 51617e63a035fc53f0555a3837e38f09c29c8468 Mon Sep 17 00:00:00 2001 From: Nabil Freij Date: Wed, 8 May 2024 13:59:08 -0700 Subject: [PATCH 24/60] Update ndcube/tests/test_meta.py --- ndcube/tests/test_meta.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/ndcube/tests/test_meta.py b/ndcube/tests/test_meta.py index e7769243e..ac99527b8 100644 --- a/ndcube/tests/test_meta.py +++ b/ndcube/tests/test_meta.py @@ -6,8 +6,6 @@ from ndcube.meta import Meta from .helpers import assert_metas_equal -# Fixtures - @pytest.fixture def basic_meta_values(): From 72bccb01c021f68f9dbf416324552746062f8480 Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Thu, 9 May 2024 19:22:40 +0200 Subject: [PATCH 25/60] Change denotation of metadata not associated with axes in Meta class from None to empty tuple. --- ndcube/meta.py | 20 ++++++++++---------- ndcube/tests/helpers.py | 3 +-- 2 files changed, 11 insertions(+), 12 deletions(-) diff --git a/ndcube/meta.py b/ndcube/meta.py index ec609ddc7..fcc84d071 100644 --- a/ndcube/meta.py +++ b/ndcube/meta.py @@ -25,11 +25,11 @@ class Meta(dict): Comments associated with any of the above pieces of metadata. axes: dict-like, optional - The axis/axes associated with the above metadata values. - Each axis value must be None (for no axis association), and `int` - or an iterable of `int` if the metadata is associated with multiple axes. - Metadata in header without a corresponding entry here are assumed to not - be associated with an axis. + The axis/axes associated with the metadata denoted by the keys. + Metadata not included are considered not to be associated with any axis. + Each axis value must be an iterable of `int`. An `int` itself is also + acceptable if the metadata is associated with a single axis. An empty + iterable also means the metadata is not associated with any axes. data_shape: `iterator` of `int`, optional The shape of the data with which this metadata is associated. @@ -80,14 +80,14 @@ def __init__(self, header=None, comments=None, axes=None, data_shape=None): for key, axis in axes.items()]) def _sanitize_axis_value(self, axis, value, key): - if axis is None: - return None + if isinstance(axis, numbers.Integral): + axis = (axis,) + if len(axis) == 0: + return tuple() if self.shape is None: raise TypeError("Meta instance does not have a shape so new metadata " "cannot be assigned to an axis.") # Verify each entry in axes is an iterable of ints. - if isinstance(axis, numbers.Integral): - axis = (axis,) if not (isinstance(axis, collections.abc.Iterable) and all([isinstance(i, numbers.Integral) for i in axis])): raise TypeError("Values in axes must be an integer or iterable of integers giving " @@ -123,7 +123,7 @@ def axes(self): def shape(self): return self._data_shape - def add(self, name, value, comment, axis, overwrite=False): + def add(self, name, value, comment=None, axis=None, overwrite=False): """Add a new piece of metadata to instance. Parameters diff --git a/ndcube/tests/helpers.py b/ndcube/tests/helpers.py index 700d16f87..75ff45790 100644 --- a/ndcube/tests/helpers.py +++ b/ndcube/tests/helpers.py @@ -116,8 +116,7 @@ def assert_metas_equal(test_input, expected_output): # Check axes are the same. for test_axis, expected_axis in zip(test_input.axes.values(), expected_output.axes.values()): - assert ((test_axis is None and expected_axis is None) or - all(test_axis == expected_axis)) + assert all(test_axis == expected_axis) else: if not (test_input is None and expected_output is None): assert test_input.keys() == expected_output.keys() From 120596d3310f0845fd43d6aaefc0957079b6c8cb Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Thu, 9 May 2024 19:23:41 +0200 Subject: [PATCH 26/60] Make NDCube.rebin remove axis-awareness from metadata associated with rebinned axes. --- ndcube/ndcube.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/ndcube/ndcube.py b/ndcube/ndcube.py index bd6642e15..d38e635f8 100644 --- a/ndcube/ndcube.py +++ b/ndcube/ndcube.py @@ -24,7 +24,7 @@ from astropy.wcs.wcsapi import BaseHighLevelWCS, HighLevelWCSWrapper from astropy.wcs.wcsapi.high_level_api import values_to_high_level_objects -from ndcube import utils +from ndcube import utils, Meta from ndcube.extra_coords.extra_coords import ExtraCoords, ExtraCoordsABC from ndcube.global_coords import GlobalCoords, GlobalCoordsABC from ndcube.mixins import NDCubeSlicingMixin @@ -1207,6 +1207,17 @@ def my_propagate(uncertainty, data, mask, **kwargs): # Resample WCS new_wcs = ResampledLowLevelWCS(self.wcs.low_level_wcs, bin_shape[::-1]) + # If meta is axis-aware, drop axis-awareness for metadata associated with rebinned axes. + if isinstance(self.meta, Meta): + rebinned_axes, = set(np.where(np.asarray(bin_shape) != 1)) + new_meta = copy.deepcopy(self.meta) + null_set = set() + for name, axes in new_meta._axes.item(): + if set(axes).intersection(rebinned_axes) != null_set: + del new_meta._axes[name] + else: + new_meta = self.meta + # Reform NDCube. new_cube = type(self)( data=new_data, From ad39c81297b75dcf1b3d06d0c34ea8566189616b Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Thu, 9 May 2024 19:32:30 +0200 Subject: [PATCH 27/60] Fix codestyle --- ndcube/ndcube.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ndcube/ndcube.py b/ndcube/ndcube.py index d38e635f8..b4dfa760d 100644 --- a/ndcube/ndcube.py +++ b/ndcube/ndcube.py @@ -24,7 +24,7 @@ from astropy.wcs.wcsapi import BaseHighLevelWCS, HighLevelWCSWrapper from astropy.wcs.wcsapi.high_level_api import values_to_high_level_objects -from ndcube import utils, Meta +from ndcube import Meta, utils from ndcube.extra_coords.extra_coords import ExtraCoords, ExtraCoordsABC from ndcube.global_coords import GlobalCoords, GlobalCoordsABC from ndcube.mixins import NDCubeSlicingMixin @@ -1210,7 +1210,7 @@ def my_propagate(uncertainty, data, mask, **kwargs): # If meta is axis-aware, drop axis-awareness for metadata associated with rebinned axes. if isinstance(self.meta, Meta): rebinned_axes, = set(np.where(np.asarray(bin_shape) != 1)) - new_meta = copy.deepcopy(self.meta) + new_meta = deepcopy(self.meta) null_set = set() for name, axes in new_meta._axes.item(): if set(axes).intersection(rebinned_axes) != null_set: From d01f6f659f8f5a327953d96a57b89602ca56bc28 Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Fri, 10 May 2024 16:06:35 +0200 Subject: [PATCH 28/60] Add test of meta handling by NDCube.rebin. --- ndcube/conftest.py | 20 +++++++++++++++++++- ndcube/ndcube.py | 6 +++--- ndcube/tests/helpers.py | 6 +++--- ndcube/tests/test_ndcube.py | 16 ++++++++++++++++ 4 files changed, 41 insertions(+), 7 deletions(-) diff --git a/ndcube/conftest.py b/ndcube/conftest.py index 130941e6a..36e63c008 100644 --- a/ndcube/conftest.py +++ b/ndcube/conftest.py @@ -15,7 +15,7 @@ from astropy.time import Time, TimeDelta from astropy.wcs import WCS -from ndcube import ExtraCoords, GlobalCoords, NDCube, NDCubeSequence +from ndcube import ExtraCoords, GlobalCoords, Meta, NDCube, NDCubeSequence # Force MPL to use non-gui backends for testing. try: @@ -334,6 +334,24 @@ def ndcube_4d_ln_lt_l_t(wcs_4d_t_l_lt_ln): return NDCube(data_cube, wcs=wcs_4d_t_l_lt_ln) +@pytest.fixture +def ndcube_4d_axis_aware_meta(wcs_4d_t_l_lt_ln): + shape = (5, 8, 10, 12) + wcs_4d_t_l_lt_ln.array_shape = shape + data_cube = data_nd(shape, dtype=int) + meta = Meta({"a": "scalar", + "slit position": np.arange(shape[0], dtype=int), + "pixel label": np.arange(np.prod(shape[:2])).reshape(shape[:2]), + "line": ["Si IV"] * shape[2], + "exposure time": ([2] * shape[-1]) * u.s}, + data_shape=shape, + axes={"slit position": 0, + "pixel label": (0, 1), + "line": (2,), + "exposure time": 3}) + return NDCube(data_cube, wcs=wcs_4d_t_l_lt_ln, meta=meta) + + @pytest.fixture def ndcube_4d_uncertainty(wcs_4d_t_l_lt_ln): shape = (5, 8, 10, 12) diff --git a/ndcube/ndcube.py b/ndcube/ndcube.py index b4dfa760d..617ded15c 100644 --- a/ndcube/ndcube.py +++ b/ndcube/ndcube.py @@ -1209,10 +1209,10 @@ def my_propagate(uncertainty, data, mask, **kwargs): # If meta is axis-aware, drop axis-awareness for metadata associated with rebinned axes. if isinstance(self.meta, Meta): - rebinned_axes, = set(np.where(np.asarray(bin_shape) != 1)) + rebinned_axes = set(np.where(np.asarray(bin_shape) != 1)[0]) new_meta = deepcopy(self.meta) null_set = set() - for name, axes in new_meta._axes.item(): + for name, axes in self.meta.axes.items(): if set(axes).intersection(rebinned_axes) != null_set: del new_meta._axes[name] else: @@ -1224,7 +1224,7 @@ def my_propagate(uncertainty, data, mask, **kwargs): wcs=new_wcs, uncertainty=new_uncertainty, mask=new_mask, - meta=self.meta, + meta=new_meta, unit=new_unit ) new_cube._global_coords = self._global_coords diff --git a/ndcube/tests/helpers.py b/ndcube/tests/helpers.py index 57bc0a107..cffd3aedd 100644 --- a/ndcube/tests/helpers.py +++ b/ndcube/tests/helpers.py @@ -110,9 +110,9 @@ def assert_metas_equal(test_input, expected_output): except ValueError as err: if multi_element_msg in err.args[0]: assert np.allclose(test_value, expected_value) - for test_axis, expected_axis in zip(test_input.axes.values(), - expected_output.axes.values()): - assert all(test_axis == expected_axis) + print(test_input.axes, expected_output.axes) + for key in test_input.axes.keys(): + assert all(test_input.axes[key] == expected_output.axes[key]) else: if not (test_input is None and expected_output is None): assert test_input.keys() == expected_output.keys() diff --git a/ndcube/tests/test_ndcube.py b/ndcube/tests/test_ndcube.py index e85a6c156..461148a47 100644 --- a/ndcube/tests/test_ndcube.py +++ b/ndcube/tests/test_ndcube.py @@ -1,3 +1,4 @@ +import copy from inspect import signature from textwrap import dedent @@ -948,6 +949,21 @@ def test_rebin_no_propagate(ndcube_2d_ln_lt_mask_uncert): assert output.uncertainty is None +def test_rebin_axis_aware_meta(ndcube_4d_axis_aware_meta): + # Execute rebin. + cube = ndcube_4d_axis_aware_meta + bin_shape = (1, 2, 5, 1) + output = cube.rebin(bin_shape, operation=np.sum) + + # Build expected meta + expected_meta = copy.deepcopy(cube.meta) + del expected_meta._axes["pixel label"] + del expected_meta._axes["line"] + + # Confirm output meta is as expected. + helpers.assert_metas_equal(output.meta, expected_meta) + + def test_reproject_adaptive(ndcube_2d_ln_lt, wcs_2d_lt_ln): shape_out = (10, 12) resampled_cube = ndcube_2d_ln_lt.reproject_to(wcs_2d_lt_ln, algorithm='adaptive', From d5ae2ef2aba69641f6277e4b4a6e41cbb79b9527 Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Mon, 13 May 2024 11:13:55 +0200 Subject: [PATCH 29/60] Add rebin capability to Meta. --- ndcube/meta.py | 40 +++++++++++++++++++++++++++++++++++++ ndcube/ndcube.py | 9 ++------- ndcube/tests/helpers.py | 1 - ndcube/tests/test_ndcube.py | 1 + 4 files changed, 43 insertions(+), 8 deletions(-) diff --git a/ndcube/meta.py b/ndcube/meta.py index 534f922cf..60d205feb 100644 --- a/ndcube/meta.py +++ b/ndcube/meta.py @@ -37,6 +37,7 @@ class Meta(dict): """ def __init__(self, header=None, comments=None, axes=None, data_shape=None): self.__ndcube_can_slice__ = True + self.__ndcube_can_rebin__ = True self.original_header = header if header is None: @@ -239,3 +240,42 @@ def __getitem__(self, item): overwrite=True) return new_meta + + def rebin(self, bin_shape): + """ + Adjusts axis-aware metadata to stay consistent with a rebinned `~ndcube.NDCube`. + + This is done by simply removing the axis-awareness of metadata associated with + rebinned axes. The metadata itself is not changed or removed. This operation + does not remove axis-awareness from metadata only associated with non-rebinned + axes, i.e. axes whose corresponding entries in ``bin_shape`` are 1. + + Parameters + ---------- + bin_shape: `tuple` or `int` + The new lengths of each axis of the associated data. + """ + # Sanitize input. + data_shape = self.shape + if len(bin_shape) != len(data_shape): + raise ValueError(f"bin_shape must be same length as data shape: " + f"{len(bin_shape)} != {len(self.shape)}") + if not all([isinstance(dim, numbers.Integral) for dim in bin_shape]): + raise TypeError("bin_shape must contain only integer types.") + # Convert bin_shape to array. Do this after checking types of elements to avoid + # floats being incorrectly rounded down. + bin_shape = np.asarray(bin_shape, dtype=int) + if any(data_shape % bin_shape): + raise ValueError( + "All elements in bin_shape must be a factor of corresponding element" + f" of data shape: data_shape mod bin_shape = {self.shape % bin_shape}") + # Remove axis-awareness from metadata associated with rebinned axes. + rebinned_axes = set(np.where(bin_shape != 1)[0]) + new_meta = copy.deepcopy(self) + null_set = set() + for name, axes in self.axes.items(): + if set(axes).intersection(rebinned_axes) != null_set: + del new_meta._axes[name] + # Update data shape. + new_meta._data_shape = (data_shape / bin_shape).astype(int) + return new_meta diff --git a/ndcube/ndcube.py b/ndcube/ndcube.py index 617ded15c..4fcc6d049 100644 --- a/ndcube/ndcube.py +++ b/ndcube/ndcube.py @@ -1208,13 +1208,8 @@ def my_propagate(uncertainty, data, mask, **kwargs): new_wcs = ResampledLowLevelWCS(self.wcs.low_level_wcs, bin_shape[::-1]) # If meta is axis-aware, drop axis-awareness for metadata associated with rebinned axes. - if isinstance(self.meta, Meta): - rebinned_axes = set(np.where(np.asarray(bin_shape) != 1)[0]) - new_meta = deepcopy(self.meta) - null_set = set() - for name, axes in self.meta.axes.items(): - if set(axes).intersection(rebinned_axes) != null_set: - del new_meta._axes[name] + if hasattr(self.meta, "__ndcube_can_rebin__") and self.meta.__ndcube_can_rebin__: + new_meta = self.meta.rebin(bin_shape) else: new_meta = self.meta diff --git a/ndcube/tests/helpers.py b/ndcube/tests/helpers.py index cffd3aedd..c9c71b3d1 100644 --- a/ndcube/tests/helpers.py +++ b/ndcube/tests/helpers.py @@ -110,7 +110,6 @@ def assert_metas_equal(test_input, expected_output): except ValueError as err: if multi_element_msg in err.args[0]: assert np.allclose(test_value, expected_value) - print(test_input.axes, expected_output.axes) for key in test_input.axes.keys(): assert all(test_input.axes[key] == expected_output.axes[key]) else: diff --git a/ndcube/tests/test_ndcube.py b/ndcube/tests/test_ndcube.py index 461148a47..5622bdc4a 100644 --- a/ndcube/tests/test_ndcube.py +++ b/ndcube/tests/test_ndcube.py @@ -959,6 +959,7 @@ def test_rebin_axis_aware_meta(ndcube_4d_axis_aware_meta): expected_meta = copy.deepcopy(cube.meta) del expected_meta._axes["pixel label"] del expected_meta._axes["line"] + expected_meta._data_shape = np.array([5, 4, 2, 12], dtype=int) # Confirm output meta is as expected. helpers.assert_metas_equal(output.meta, expected_meta) From 181d86d9c9bb2e8110799aa63105e62cdf2b5768 Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Mon, 13 May 2024 11:17:25 +0200 Subject: [PATCH 30/60] Fix codestyle. --- ndcube/ndcube.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ndcube/ndcube.py b/ndcube/ndcube.py index 4fcc6d049..e8baef20f 100644 --- a/ndcube/ndcube.py +++ b/ndcube/ndcube.py @@ -24,7 +24,7 @@ from astropy.wcs.wcsapi import BaseHighLevelWCS, HighLevelWCSWrapper from astropy.wcs.wcsapi.high_level_api import values_to_high_level_objects -from ndcube import Meta, utils +from ndcube import utils from ndcube.extra_coords.extra_coords import ExtraCoords, ExtraCoordsABC from ndcube.global_coords import GlobalCoords, GlobalCoordsABC from ndcube.mixins import NDCubeSlicingMixin From f17ba0b1881f7171f10b47b9eb856319a0ea0c17 Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Mon, 13 May 2024 11:42:50 +0200 Subject: [PATCH 31/60] Adds tests for Meta.rebin. --- ndcube/tests/test_meta.py | 32 +++++++++++++++++++++++++++++++- 1 file changed, 31 insertions(+), 1 deletion(-) diff --git a/ndcube/tests/test_meta.py b/ndcube/tests/test_meta.py index ac99527b8..e14c5cf26 100644 --- a/ndcube/tests/test_meta.py +++ b/ndcube/tests/test_meta.py @@ -12,7 +12,8 @@ def basic_meta_values(): return {"a": "hello", "b": list(range(10, 25, 10)), "c": np.array([[1, 2, 3, 4], [10, 20, 30, 40], [100, 200, 300, 400]]), - "d": list(range(3, 13, 3)) + "d": list(range(3, 13, 3)), + "e": list(range(2, 8, 2)) } @@ -29,6 +30,7 @@ def basic_axes(): return {"b": 0, "c": (1, 2), "d": (2,), + "e": 1 } @@ -72,6 +74,7 @@ def test_slice_away_independent_axis(basic_meta): del axes["b"] axes["c"] -= 1 axes["d"] -= 1 + axes["e"] -= 1 shape = meta.shape[1:] expected = Meta(values, comments, axes, shape) assert_metas_equal(output, expected) @@ -83,6 +86,7 @@ def test_slice_dependent_axes(basic_meta): values = dict([(key, value) for key, value in meta.items()]) values["c"] = values["c"][1:3, 1] values["d"] = values["d"][1] + values["e"] = values["e"][1:3] comments = meta.comments axes = dict([(key, axis) for key, axis in meta.axes.items()]) axes["c"] = 1 @@ -149,3 +153,29 @@ def test_remove(basic_meta): assert name not in meta.keys() assert name not in meta.comments.keys() assert name not in meta.axes.keys() + + +def test_rebin(basic_meta): + meta = basic_meta + bin_shape = (2, 1, 2, 1) + output = meta.rebin(bin_shape) + # Build expected result. + expected = copy.deepcopy(meta) + del expected._axes["b"] + del expected._axes["c"] + del expected._axes["d"] + expected._data_shape = np.array([1, 3, 2, 5], dtype=int) + assert_metas_equal(output, expected) + + +def test_rebin_wrong_len(basic_meta): + with pytest.raises(ValueError): + basic_meta.rebin((1,)) + +def test_rebin_not_ints(basic_meta): + with pytest.raises(TypeError): + basic_meta.rebin((1, 3.9, 1, 1)) + +def test_rebin_not_factors(basic_meta): + with pytest.raises(ValueError): + basic_meta.rebin((1, 2, 1, 1)) From fb45151944bbd1c9970d7cd74e77391a34d25b7d Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Sun, 19 May 2024 09:59:10 +0200 Subject: [PATCH 32/60] Support scalar axis-aware metadata. --- ndcube/meta.py | 57 ++++++++++++++++++++++----------------- ndcube/tests/test_meta.py | 11 ++++++-- 2 files changed, 42 insertions(+), 26 deletions(-) diff --git a/ndcube/meta.py b/ndcube/meta.py index 60d205feb..77f2c405d 100644 --- a/ndcube/meta.py +++ b/ndcube/meta.py @@ -83,7 +83,7 @@ def _sanitize_axis_value(self, axis, value, key): if self.shape is None: raise TypeError("Meta instance does not have a shape so new metadata " "cannot be assigned to an axis.") - # Verify each entry in axes is an iterable of ints. + # Verify each entry in axes is an iterable of ints or a scalar. if isinstance(axis, numbers.Integral): axis = (axis,) if not (isinstance(axis, collections.abc.Iterable) and all([isinstance(i, numbers.Integral) @@ -94,16 +94,17 @@ def _sanitize_axis_value(self, axis, value, key): shape_error_msg = (f"{key} must have shape {tuple(self.shape[axis])} " f"as it is associated with axes {axis}") - if len(axis) == 1: - if not hasattr(value, "__len__"): - raise TypeError(shape_error_msg) - meta_shape = (len(value),) - else: - if not hasattr(value, "shape"): - raise TypeError(shape_error_msg) - meta_shape = value.shape - if not all(meta_shape == self.shape[axis]): - raise ValueError(shape_error_msg) + if not _isscalar(value): + if len(axis) == 1: + if not hasattr(value, "__len__"): + raise TypeError(shape_error_msg) + meta_shape = (len(value),) + else: + if not hasattr(value, "shape"): + raise TypeError(shape_error_msg) + meta_shape = value.shape + if not all(meta_shape == self.shape[axis]): + raise ValueError(shape_error_msg) return axis @@ -165,16 +166,17 @@ def __setitem__(self, key, val): axis = self.axes.get(key, None) if axis is not None: recommendation = "We recommend using the 'add' method to set values." - if len(axis) == 1: - if not (hasattr(val, "__len__") and len(val) == self.shape[axis[0]]): - raise TypeError(f"{key} must have same length as associated axis, " - f"i.e. axis {axis[0]}: {self.shape[axis[0]]}\n" - f"{recommendation}") - else: - if not (hasattr(val, "shape") and all(val.shape == self.shape[axis])): - raise TypeError(f"{key} must have same shape as associated axes, " - f"i.e axes {axis}: {self.shape[axis]}\n" - f"{recommendation}") + if not _isscalar(val): + if len(axis) == 1: + if not (hasattr(val, "__len__") and len(val) == self.shape[axis[0]]): + raise TypeError(f"{key} must have same length as associated axis, " + f"i.e. axis {axis[0]}: {self.shape[axis[0]]}\n" + f"{recommendation}") + else: + if not (hasattr(val, "shape") and all(val.shape == self.shape[axis])): + raise TypeError(f"{key} must have same shape as associated axes, " + f"i.e axes {axis}: {self.shape[axis]}\n" + f"{recommendation}") super().__setitem__(key, val) def __getitem__(self, item): @@ -226,7 +228,9 @@ def __getitem__(self, item): axis = self.axes.get(key, None) if axis is not None: new_item = tuple(item[axis]) - if len(new_item) == 1: + if _isscalar(value): + new_value = value + elif len(new_item) == 1: new_value = value[new_item[0]] else: new_value = value[new_item] @@ -269,13 +273,18 @@ def rebin(self, bin_shape): raise ValueError( "All elements in bin_shape must be a factor of corresponding element" f" of data shape: data_shape mod bin_shape = {self.shape % bin_shape}") - # Remove axis-awareness from metadata associated with rebinned axes. + # Remove axis-awareness from metadata associated with rebinned axes, + # unless the value is scalar. rebinned_axes = set(np.where(bin_shape != 1)[0]) new_meta = copy.deepcopy(self) null_set = set() for name, axes in self.axes.items(): - if set(axes).intersection(rebinned_axes) != null_set: + if set(axes).intersection(rebinned_axes) != null_set and not _isscalar(self[name]): del new_meta._axes[name] # Update data shape. new_meta._data_shape = (data_shape / bin_shape).astype(int) return new_meta + + +def _isscalar(value): + return ((hasattr(value, "isscalar") and value.isscalar) or np.isscalar(value)) diff --git a/ndcube/tests/test_meta.py b/ndcube/tests/test_meta.py index e14c5cf26..b6e555876 100644 --- a/ndcube/tests/test_meta.py +++ b/ndcube/tests/test_meta.py @@ -13,7 +13,9 @@ def basic_meta_values(): "b": list(range(10, 25, 10)), "c": np.array([[1, 2, 3, 4], [10, 20, 30, 40], [100, 200, 300, 400]]), "d": list(range(3, 13, 3)), - "e": list(range(2, 8, 2)) + "e": list(range(2, 8, 2)), + "f": "world", + "g": "!" } @@ -30,7 +32,9 @@ def basic_axes(): return {"b": 0, "c": (1, 2), "d": (2,), - "e": 1 + "e": 1, + "f": 0, + "g": (1, 2) } @@ -72,9 +76,11 @@ def test_slice_away_independent_axis(basic_meta): comments = meta.comments axes = dict([(key, axis) for key, axis in meta.axes.items()]) del axes["b"] + del axes["f"] axes["c"] -= 1 axes["d"] -= 1 axes["e"] -= 1 + axes["g"] -= 1 shape = meta.shape[1:] expected = Meta(values, comments, axes, shape) assert_metas_equal(output, expected) @@ -90,6 +96,7 @@ def test_slice_dependent_axes(basic_meta): comments = meta.comments axes = dict([(key, axis) for key, axis in meta.axes.items()]) axes["c"] = 1 + axes["g"] = 1 del axes["d"] shape = np.array([2, 2, 5]) expected = Meta(values, comments, axes, shape) From 9726b65878407cbd013c92d65a5240295d041f5b Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Mon, 20 May 2024 01:58:04 +0200 Subject: [PATCH 33/60] Add support for axis-aware metadata that give a scalar per axis. --- ndcube/meta.py | 100 ++++++++++++++++++++++++++------------ ndcube/tests/test_meta.py | 35 +++++++++++-- 2 files changed, 99 insertions(+), 36 deletions(-) diff --git a/ndcube/meta.py b/ndcube/meta.py index 77f2c405d..572fbecfe 100644 --- a/ndcube/meta.py +++ b/ndcube/meta.py @@ -93,19 +93,19 @@ def _sanitize_axis_value(self, axis, value, key): axis = np.asarray(axis) shape_error_msg = (f"{key} must have shape {tuple(self.shape[axis])} " - f"as it is associated with axes {axis}") - if not _isscalar(value): - if len(axis) == 1: - if not hasattr(value, "__len__"): - raise TypeError(shape_error_msg) + f"as its associated axes {axis}, ", + f"or same length as number of associated axes ({len(axis)}). " + f"Has shape {value.shape if hasattr(value, 'shape') else len(value)}") + if _not_scalar(value): + if hasattr(value, "shape"): + meta_shape = value.shape + elif hasattr(value, "__len__"): meta_shape = (len(value),) else: - if not hasattr(value, "shape"): - raise TypeError(shape_error_msg) - meta_shape = value.shape - if not all(meta_shape == self.shape[axis]): + raise TypeError(shape_error_msg) + data_shape = tuple(self.shape[axis]) + if not (meta_shape == data_shape or (len(axis) > 1 and meta_shape == (len(data_shape),))): raise ValueError(shape_error_msg) - return axis @property @@ -166,14 +166,16 @@ def __setitem__(self, key, val): axis = self.axes.get(key, None) if axis is not None: recommendation = "We recommend using the 'add' method to set values." - if not _isscalar(val): + if _not_scalar(val): + data_shape = tuple(self.shape[axis]) if len(axis) == 1: - if not (hasattr(val, "__len__") and len(val) == self.shape[axis[0]]): + if not (hasattr(val, "__len__") and (len(val),) == data_shape): raise TypeError(f"{key} must have same length as associated axis, " f"i.e. axis {axis[0]}: {self.shape[axis[0]]}\n" f"{recommendation}") else: - if not (hasattr(val, "shape") and all(val.shape == self.shape[axis])): + if ((not (hasattr(val, "shape") and val.shape == data_shape)) + and (not (hasattr(val, "__len__") and len(val) == len(data_shape)))): raise TypeError(f"{key} must have same shape as associated axes, " f"i.e axes {axis}: {self.shape[axis]}\n" f"{recommendation}") @@ -195,11 +197,12 @@ def __getitem__(self, item): new_meta = copy.deepcopy(self) if isinstance(item, (numbers.Integral, slice)): item = [item] - item = np.array(list(item) + [slice(None)] * (len(self.shape) - len(item)), + naxes = len(self.shape) + item = np.array(list(item) + [slice(None)] * (naxes - len(item)), dtype=object) # Edit data shape and calculate which axis will be dropped. - dropped_axes = np.zeros(len(self.shape), dtype=bool) + dropped_axes = np.zeros(naxes, dtype=bool) new_shape = new_meta.shape for i, axis_item in enumerate(item): if isinstance(axis_item, numbers.Integral): @@ -219,27 +222,53 @@ def __getitem__(self, item): else: raise TypeError("Unrecognized slice type. " "Must be an int, slice and tuple of the same.") - new_meta._data_shape = new_shape[np.invert(dropped_axes)] - - cumul_dropped_axes = np.cumsum(dropped_axes) + kept_axes = np.invert(dropped_axes) + new_meta._data_shape = new_shape[kept_axes] # Slice all metadata associated with axes. for key, value in self.items(): axis = self.axes.get(key, None) if axis is not None: - new_item = tuple(item[axis]) - if _isscalar(value): + val_is_scalar = not _not_scalar(value) + if val_is_scalar: new_value = value - elif len(new_item) == 1: - new_value = value[new_item[0]] else: - new_value = value[new_item] - new_axis = np.array([-1 if isinstance(i, numbers.Integral) else a - for i, a in zip(new_item, axis)]) - new_axis -= cumul_dropped_axes[axis] - new_axis = new_axis[new_axis >= 0] + scalar_per_axis = (len(axis) > 1 + and not (hasattr(value, "shape") + and value.shape == tuple(self.shape[axis])) + and len(value) == len(axis)) + if scalar_per_axis: + # If shape/len of metadata value equals number of axes, + # the metadata represents a single value per axis. + # Change item so values for dropped axes are dropped. + new_item = kept_axes[axis] + else: + new_item = tuple(item[axis]) + # Slice metadata value. + try: + new_value = value[new_item] + except: + # If value cannot be sliced by fancy slicing, convert it + # it to an array, slice it, and then if necessary, convert + # it back to its original type. + new_value = (np.asanyarray(value)[new_item]) + if hasattr(new_value, "__len__"): + new_value = type(value)(new_value) + if scalar_per_axis and len(new_value) == 1: + # If value gives a scalar for each axis, the value itself must + # be scalar if it only applies to one axis. Therefore, if + # slice down length is one, extract value out of iterable. + new_value = new_value[0] + # Update axis indices. + new_axis = np.asarray(list( + set(axis).intersection(set(np.arange(naxes)[kept_axes])) + )) if len(new_axis) == 0: new_axis = None + else: + cumul_dropped_axes = np.cumsum(dropped_axes)[new_axis] + new_axis -= cumul_dropped_axes + # Overwrite metadata value with newly sliced version. new_meta.add(key, new_value, self.comments.get(key, None), new_axis, overwrite=True) @@ -274,17 +303,26 @@ def rebin(self, bin_shape): "All elements in bin_shape must be a factor of corresponding element" f" of data shape: data_shape mod bin_shape = {self.shape % bin_shape}") # Remove axis-awareness from metadata associated with rebinned axes, - # unless the value is scalar. + # unless the value is scalar or gives a single value for each axis. rebinned_axes = set(np.where(bin_shape != 1)[0]) new_meta = copy.deepcopy(self) null_set = set() for name, axes in self.axes.items(): - if set(axes).intersection(rebinned_axes) != null_set and not _isscalar(self[name]): + value = self[name] + if _not_scalar(value) and set(axes).intersection(rebinned_axes) != null_set: del new_meta._axes[name] # Update data shape. new_meta._data_shape = (data_shape / bin_shape).astype(int) return new_meta -def _isscalar(value): - return ((hasattr(value, "isscalar") and value.isscalar) or np.isscalar(value)) +def _not_scalar(value): + return ( + ( + hasattr(value, "shape") + or hasattr(value, "__len__") + ) + and not + ( + isinstance(value, str) + )) diff --git a/ndcube/tests/test_meta.py b/ndcube/tests/test_meta.py index b6e555876..edfb7a22d 100644 --- a/ndcube/tests/test_meta.py +++ b/ndcube/tests/test_meta.py @@ -15,7 +15,7 @@ def basic_meta_values(): "d": list(range(3, 13, 3)), "e": list(range(2, 8, 2)), "f": "world", - "g": "!" + "g": ["hello", "world", "!"] } @@ -34,7 +34,7 @@ def basic_axes(): "d": (2,), "e": 1, "f": 0, - "g": (1, 2) + "g": (0, 1, 2) } @@ -73,6 +73,7 @@ def test_slice_away_independent_axis(basic_meta): output = meta[item] values = dict([(key, value) for key, value in meta.items()]) values["b"] = values["b"][0] + values["g"] = ["world", "!"] comments = meta.comments axes = dict([(key, axis) for key, axis in meta.axes.items()]) del axes["b"] @@ -80,12 +81,34 @@ def test_slice_away_independent_axis(basic_meta): axes["c"] -= 1 axes["d"] -= 1 axes["e"] -= 1 - axes["g"] -= 1 + axes["g"] = (0, 1) shape = meta.shape[1:] expected = Meta(values, comments, axes, shape) assert_metas_equal(output, expected) +def test_slice_away_independent_and_dependent_axis(basic_meta): + meta = basic_meta + item = (0, 1) + output = meta[item] + values = dict([(key, value) for key, value in meta.items()]) + values["b"] = values["b"][0] + values["c"] = values["c"][1] + values["e"] = values["e"][1] + values["g"] = "!" + comments = meta.comments + axes = dict([(key, axis) for key, axis in meta.axes.items()]) + del axes["b"] + del axes["e"] + del axes["f"] + axes["c"] = 0 + axes["d"] = 0 + axes["g"] = 0 + shape = meta.shape[2:] + expected = Meta(values, comments, axes, shape) + assert_metas_equal(output, expected) + + def test_slice_dependent_axes(basic_meta): meta = basic_meta output = meta[:, 1:3, 1] @@ -93,11 +116,12 @@ def test_slice_dependent_axes(basic_meta): values["c"] = values["c"][1:3, 1] values["d"] = values["d"][1] values["e"] = values["e"][1:3] + values["g"] = values["g"][:2] comments = meta.comments axes = dict([(key, axis) for key, axis in meta.axes.items()]) - axes["c"] = 1 - axes["g"] = 1 del axes["d"] + axes["c"] = 1 + axes["g"] = (0, 1) shape = np.array([2, 2, 5]) expected = Meta(values, comments, axes, shape) assert_metas_equal(output, expected) @@ -171,6 +195,7 @@ def test_rebin(basic_meta): del expected._axes["b"] del expected._axes["c"] del expected._axes["d"] + del expected._axes["g"] expected._data_shape = np.array([1, 3, 2, 5], dtype=int) assert_metas_equal(output, expected) From a6266027cc6cfc3cc50a25f1f7b4777976eb2fbb Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Mon, 20 May 2024 13:01:38 +0200 Subject: [PATCH 34/60] Clarify behavior of axis-aligned and grid-aligned metadata and fix code to reflect newly defined behavior. --- ndcube/meta.py | 170 ++++++++++++++++++++++++-------------- ndcube/tests/test_meta.py | 3 +- 2 files changed, 108 insertions(+), 65 deletions(-) diff --git a/ndcube/meta.py b/ndcube/meta.py index 572fbecfe..9c28c48e8 100644 --- a/ndcube/meta.py +++ b/ndcube/meta.py @@ -28,12 +28,49 @@ class Meta(dict): The axis/axes associated with the metadata denoted by the keys. Metadata not included are considered not to be associated with any axis. Each axis value must be an iterable of `int`. An `int` itself is also - acceptable if the metadata is associated with a single axis. An empty - iterable also means the metadata is not associated with any axes. + acceptable if the metadata is associated with a single axis. + The value of axis-assigned metadata in header must be same length as + number of associated axes (axis-aligned), or same shape as the associated + data array's axes (grid-aligned). data_shape: iterator of `int`, optional The shape of the data with which this metadata is associated. Must be set if axes input is set. + + Notes + ----- + **Axis-aware Metadata** + There are two valid types of axis-aware metadata: axis-aligned and grid-aligned. + Axis-aligned metadata gives one value per associated axis, while grid-aligned + metadata gives a value for each data array element in the associated axes. + Consequently, axis-aligned metadata has the same length as the number of + associated axes, while grid-aligned metadata has the same shape as the associated + axes. To avoid confusion, axis-aligned metadata that is only associated with one + axis must be scalar or a string. Length-1 objects (excluding strings) are assumed + to be grid-aligned and associated with a length-1 axis. + + **Slicing and Rebinning Axis-aware Metadata** + Axis-aligned metadata is only considered valid if the associated axes are present. + Therefore, axis-aligned metadata is only changed if an associated axis is dropped + by an operation, e.g. slicing. In such a case, the value associated with the + dropped axes is also dropped and hence lost. If the axis of a 1-axis-aligned + metadata value (scalar) is slicing away, the metadata key is entirely removed + from the Meta object. + + Grid-aligned metadata is mirrors the data array, it is sliced following + the same rules with one exception. If an axis is dropped by slicing, the metadata + name is kept, but its value is set to the value at the row/column where the + axis/axes was sliced away, and the metadata axis-awareness is removed. This is + similar to how coordinate values are transferred to ``global_coords`` when their + associated axes are sliced away. + + Note that because rebinning does not drop axes, axis-aligned metadata is unaltered + by rebinning. By contrast, grid-aligned metadata must necessarily by affected by + rebinning. However, how it is affected depends on the nature of the metadata and + there is no generalized solution. Therefore, this class does not alter the shape + or values of grid-aligned metadata during rebinning, but simply removes its + axis-awareness. If specific pieces of metadata have a known way to behave during + rebinning, this can be handled by subclasses or mixins. """ def __init__(self, header=None, comments=None, axes=None, data_shape=None): self.__ndcube_can_slice__ = True @@ -76,36 +113,28 @@ def __init__(self, header=None, comments=None, axes=None, data_shape=None): for key, axis in axes.items()]) def _sanitize_axis_value(self, axis, value, key): + axis_err_msg = ("Values in axes must be an integer or iterable of integers giving " + f"the data axis/axes associated with the metadata. axis = {axis}.") if isinstance(axis, numbers.Integral): axis = (axis,) if len(axis) == 0: - return tuple() + return ValueError(axis_err_msg) if self.shape is None: raise TypeError("Meta instance does not have a shape so new metadata " "cannot be assigned to an axis.") # Verify each entry in axes is an iterable of ints or a scalar. - if isinstance(axis, numbers.Integral): - axis = (axis,) if not (isinstance(axis, collections.abc.Iterable) and all([isinstance(i, numbers.Integral) for i in axis])): - raise TypeError("Values in axes must be an integer or iterable of integers giving " - "the data axis/axes associated with the metadata.") + return ValueError(axis_err_msg) axis = np.asarray(axis) - - shape_error_msg = (f"{key} must have shape {tuple(self.shape[axis])} " - f"as its associated axes {axis}, ", - f"or same length as number of associated axes ({len(axis)}). " - f"Has shape {value.shape if hasattr(value, 'shape') else len(value)}") if _not_scalar(value): - if hasattr(value, "shape"): - meta_shape = value.shape - elif hasattr(value, "__len__"): - meta_shape = (len(value),) - else: - raise TypeError(shape_error_msg) - data_shape = tuple(self.shape[axis]) - if not (meta_shape == data_shape or (len(axis) > 1 and meta_shape == (len(data_shape),))): - raise ValueError(shape_error_msg) + axis_shape = tuple(self.shape[axis]) + if not _is_grid_aligned(value, axis_shape) and not _is_axis_aligned(value, axis_shape): + raise ValueError( + f"{key} must have shape {tuple(self.shape[axis])} " + f"as its associated axes {axis}, ", + f"or same length as number of associated axes ({len(axis)}). " + f"Has shape {value.shape if hasattr(value, 'shape') else len(value)}") return axis @property @@ -165,20 +194,15 @@ def remove(self, name): def __setitem__(self, key, val): axis = self.axes.get(key, None) if axis is not None: - recommendation = "We recommend using the 'add' method to set values." if _not_scalar(val): - data_shape = tuple(self.shape[axis]) - if len(axis) == 1: - if not (hasattr(val, "__len__") and (len(val),) == data_shape): - raise TypeError(f"{key} must have same length as associated axis, " - f"i.e. axis {axis[0]}: {self.shape[axis[0]]}\n" - f"{recommendation}") - else: - if ((not (hasattr(val, "shape") and val.shape == data_shape)) - and (not (hasattr(val, "__len__") and len(val) == len(data_shape)))): - raise TypeError(f"{key} must have same shape as associated axes, " - f"i.e axes {axis}: {self.shape[axis]}\n" - f"{recommendation}") + axis_shape = tuple(self.shape[axis]) + if not _is_grid_aligned(val, axis_shape) and not _is_axis_aligned(val, axis_shape): + raise TypeError( + f"{key} is already associated with axis/axes {axis}. val must therefore " + f"must either have same length as number associated axes ({len(axis)}), " + f"or the same shape as associated data axes {tuple(self.shape[axis])}. " + f"val shape = {val.shape if hasattr(val, 'shape') else (len(val),)}\n" + "We recommend using the 'add' method to set values.") super().__setitem__(key, val) def __getitem__(self, item): @@ -228,19 +252,28 @@ def __getitem__(self, item): # Slice all metadata associated with axes. for key, value in self.items(): axis = self.axes.get(key, None) + drop_key = False if axis is not None: - val_is_scalar = not _not_scalar(value) - if val_is_scalar: + # Calculate new axis indices. + new_axis = np.asarray(list( + set(axis).intersection(set(np.arange(naxes)[kept_axes])) + )) + if len(new_axis) == 0: + new_axis = None + else: + cumul_dropped_axes = np.cumsum(dropped_axes)[new_axis] + new_axis -= cumul_dropped_axes + + # Calculate sliced metadata values. + axis_shape = tuple(self.shape[axis]) + if _is_scalar(value): new_value = value + # If scalar metadata's axes have been dropped, mark metadata to be dropped. + if new_axis is None: + drop_key = True else: - scalar_per_axis = (len(axis) > 1 - and not (hasattr(value, "shape") - and value.shape == tuple(self.shape[axis])) - and len(value) == len(axis)) - if scalar_per_axis: - # If shape/len of metadata value equals number of axes, - # the metadata represents a single value per axis. - # Change item so values for dropped axes are dropped. + value_is_axis_aligned = _is_axis_aligned(value, axis_shape) + if value_is_axis_aligned: new_item = kept_axes[axis] else: new_item = tuple(item[axis]) @@ -254,23 +287,15 @@ def __getitem__(self, item): new_value = (np.asanyarray(value)[new_item]) if hasattr(new_value, "__len__"): new_value = type(value)(new_value) - if scalar_per_axis and len(new_value) == 1: - # If value gives a scalar for each axis, the value itself must - # be scalar if it only applies to one axis. Therefore, if - # slice down length is one, extract value out of iterable. - new_value = new_value[0] - # Update axis indices. - new_axis = np.asarray(list( - set(axis).intersection(set(np.arange(naxes)[kept_axes])) - )) - if len(new_axis) == 0: - new_axis = None - else: - cumul_dropped_axes = np.cumsum(dropped_axes)[new_axis] - new_axis -= cumul_dropped_axes + # If axis-aligned metadata sliced down to length 1, convert to scalar. + if value_is_axis_aligned and len(new_value) == 1: + new_value = new_value[0] # Overwrite metadata value with newly sliced version. - new_meta.add(key, new_value, self.comments.get(key, None), new_axis, - overwrite=True) + if drop_key: + new_meta.remove(key) + else: + new_meta.add(key, new_value, self.comments.get(key, None), new_axis, + overwrite=True) return new_meta @@ -302,14 +327,13 @@ def rebin(self, bin_shape): raise ValueError( "All elements in bin_shape must be a factor of corresponding element" f" of data shape: data_shape mod bin_shape = {self.shape % bin_shape}") - # Remove axis-awareness from metadata associated with rebinned axes, - # unless the value is scalar or gives a single value for each axis. + # Remove axis-awareness from grid-aligned metadata associated with rebinned axes. rebinned_axes = set(np.where(bin_shape != 1)[0]) new_meta = copy.deepcopy(self) null_set = set() for name, axes in self.axes.items(): - value = self[name] - if _not_scalar(value) and set(axes).intersection(rebinned_axes) != null_set: + if (_is_grid_aligned(self[name], tuple(self.shape[axes])) + and set(axes).intersection(rebinned_axes) != null_set): del new_meta._axes[name] # Update data shape. new_meta._data_shape = (data_shape / bin_shape).astype(int) @@ -326,3 +350,21 @@ def _not_scalar(value): ( isinstance(value, str) )) + + +def _is_scalar(value): + return not _not_scalar(value) + + +def _is_grid_aligned(value, axis_shape): + if _is_scalar(value): + return False + value_shape = value.shape if hasattr(value, "shape") else (len(value),) + if value_shape != axis_shape: + return False + return True + + +def _is_axis_aligned(value, axis_shape): + len_value = len(value) if _not_scalar(value) else 1 + return not _is_grid_aligned(value, axis_shape) and len_value == len(axis_shape) diff --git a/ndcube/tests/test_meta.py b/ndcube/tests/test_meta.py index edfb7a22d..a87947fbc 100644 --- a/ndcube/tests/test_meta.py +++ b/ndcube/tests/test_meta.py @@ -74,6 +74,7 @@ def test_slice_away_independent_axis(basic_meta): values = dict([(key, value) for key, value in meta.items()]) values["b"] = values["b"][0] values["g"] = ["world", "!"] + del values["f"] comments = meta.comments axes = dict([(key, axis) for key, axis in meta.axes.items()]) del axes["b"] @@ -92,6 +93,7 @@ def test_slice_away_independent_and_dependent_axis(basic_meta): item = (0, 1) output = meta[item] values = dict([(key, value) for key, value in meta.items()]) + del values["f"] values["b"] = values["b"][0] values["c"] = values["c"][1] values["e"] = values["e"][1] @@ -195,7 +197,6 @@ def test_rebin(basic_meta): del expected._axes["b"] del expected._axes["c"] del expected._axes["d"] - del expected._axes["g"] expected._data_shape = np.array([1, 3, 2, 5], dtype=int) assert_metas_equal(output, expected) From e848e9fd88d13f2f9381283a2a0a8558869b062a Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Mon, 20 May 2024 23:45:11 +0200 Subject: [PATCH 35/60] Make API of Meta.rebin more flexible. --- ndcube/meta.py | 35 +++++++++++++++++++---------------- ndcube/ndcube.py | 4 +++- ndcube/tests/test_meta.py | 18 +++--------------- 3 files changed, 25 insertions(+), 32 deletions(-) diff --git a/ndcube/meta.py b/ndcube/meta.py index 9c28c48e8..e7ef95732 100644 --- a/ndcube/meta.py +++ b/ndcube/meta.py @@ -299,7 +299,7 @@ def __getitem__(self, item): return new_meta - def rebin(self, bin_shape): + def rebin(self, rebinned_axes, new_shape): """ Adjusts axis-aware metadata to stay consistent with a rebinned `~ndcube.NDCube`. @@ -310,25 +310,28 @@ def rebin(self, bin_shape): Parameters ---------- - bin_shape: `tuple` or `int` - The new lengths of each axis of the associated data. + rebinned_axes: `set` of `int` + Set of array indices of axes that are rebinned. + new_shape: `tuple` of `int` + The new shape of the rebinned data. """ # Sanitize input. data_shape = self.shape - if len(bin_shape) != len(data_shape): - raise ValueError(f"bin_shape must be same length as data shape: " - f"{len(bin_shape)} != {len(self.shape)}") - if not all([isinstance(dim, numbers.Integral) for dim in bin_shape]): - raise TypeError("bin_shape must contain only integer types.") - # Convert bin_shape to array. Do this after checking types of elements to avoid - # floats being incorrectly rounded down. - bin_shape = np.asarray(bin_shape, dtype=int) - if any(data_shape % bin_shape): + if not isinstance(rebinned_axes, set): + raise TypeError( + f"rebinned_axes must be a set. type of rebinned_axes is {type(rebinned_axes)}") + if not all([isinstance(dim, numbers.Integral) for dim in rebinned_axes]): + raise ValueError("All elements of rebinned_axes must be ints.") + list_axes = list(rebinned_axes) + if min(list_axes) < 0 or max(list_axes) >= len(data_shape): raise ValueError( - "All elements in bin_shape must be a factor of corresponding element" - f" of data shape: data_shape mod bin_shape = {self.shape % bin_shape}") + f"Elements in rebinned_axes must be in range 0--{len(data_shape)-1} inclusive.") + if len(new_shape) != len(data_shape): + raise ValueError(f"new_shape must be a tuple of same length as data shape: " + f"{len(new_shape)} != {len(self.shape)}") + if not all([isinstance(dim, numbers.Integral) for dim in new_shape]): + raise TypeError("bin_shape must contain only integer types.") # Remove axis-awareness from grid-aligned metadata associated with rebinned axes. - rebinned_axes = set(np.where(bin_shape != 1)[0]) new_meta = copy.deepcopy(self) null_set = set() for name, axes in self.axes.items(): @@ -336,7 +339,7 @@ def rebin(self, bin_shape): and set(axes).intersection(rebinned_axes) != null_set): del new_meta._axes[name] # Update data shape. - new_meta._data_shape = (data_shape / bin_shape).astype(int) + new_meta._data_shape = np.asarray(new_shape).astype(int) return new_meta diff --git a/ndcube/ndcube.py b/ndcube/ndcube.py index 1f2bb30a9..16a6a2f0d 100644 --- a/ndcube/ndcube.py +++ b/ndcube/ndcube.py @@ -1200,7 +1200,9 @@ def my_propagate(uncertainty, data, mask, **kwargs): # If meta is axis-aware, drop axis-awareness for metadata associated with rebinned axes. if hasattr(self.meta, "__ndcube_can_rebin__") and self.meta.__ndcube_can_rebin__: - new_meta = self.meta.rebin(bin_shape) + new_shape = (np.asarray(self.shape) / np.asarray(bin_shape)).astype(int) + rebinned_axes = set(np.where(bin_shape != 1)[0]) + new_meta = self.meta.rebin(rebinned_axes, new_shape) else: new_meta = self.meta diff --git a/ndcube/tests/test_meta.py b/ndcube/tests/test_meta.py index a87947fbc..1680034b4 100644 --- a/ndcube/tests/test_meta.py +++ b/ndcube/tests/test_meta.py @@ -190,8 +190,9 @@ def test_remove(basic_meta): def test_rebin(basic_meta): meta = basic_meta - bin_shape = (2, 1, 2, 1) - output = meta.rebin(bin_shape) + rebinned_axes = {0, 2} + new_shape = (1, 3, 2, 5) + output = meta.rebin(rebinned_axes, new_shape) # Build expected result. expected = copy.deepcopy(meta) del expected._axes["b"] @@ -199,16 +200,3 @@ def test_rebin(basic_meta): del expected._axes["d"] expected._data_shape = np.array([1, 3, 2, 5], dtype=int) assert_metas_equal(output, expected) - - -def test_rebin_wrong_len(basic_meta): - with pytest.raises(ValueError): - basic_meta.rebin((1,)) - -def test_rebin_not_ints(basic_meta): - with pytest.raises(TypeError): - basic_meta.rebin((1, 3.9, 1, 1)) - -def test_rebin_not_factors(basic_meta): - with pytest.raises(ValueError): - basic_meta.rebin((1, 2, 1, 1)) From 754385e23f0f26660ac4bd133c579cefbb2ea7e5 Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Tue, 2 Jul 2024 11:19:31 +0100 Subject: [PATCH 36/60] change name for metadata used by Meta class from header to meta --- ndcube/meta.py | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/ndcube/meta.py b/ndcube/meta.py index e7ef95732..f61e4eb36 100644 --- a/ndcube/meta.py +++ b/ndcube/meta.py @@ -18,7 +18,7 @@ class Meta(dict): Parameters ---------- - header: dict-like + meta: dict-like The names and values of metadata. comments: dict-like, optional @@ -29,7 +29,7 @@ class Meta(dict): Metadata not included are considered not to be associated with any axis. Each axis value must be an iterable of `int`. An `int` itself is also acceptable if the metadata is associated with a single axis. - The value of axis-assigned metadata in header must be same length as + The value of axis-assigned metadata in meta must be same length as number of associated axes (axis-aligned), or same shape as the associated data array's axes (grid-aligned). @@ -72,25 +72,25 @@ class Meta(dict): axis-awareness. If specific pieces of metadata have a known way to behave during rebinning, this can be handled by subclasses or mixins. """ - def __init__(self, header=None, comments=None, axes=None, data_shape=None): + def __init__(self, meta=None, comments=None, axes=None, data_shape=None): self.__ndcube_can_slice__ = True self.__ndcube_can_rebin__ = True - self.original_header = header + self.original_meta = meta - if header is None: - header = {} + if meta is None: + meta = {} else: - header = dict(header) - super().__init__(header.items()) - header_keys = header.keys() + meta = dict(header) + super().__init__(meta.items()) + meta_keys = meta.keys() if comments is None: self._comments = dict() else: comments = dict(comments) - if not set(comments.keys()).issubset(set(header_keys)): + if not set(comments.keys()).issubset(set(meta_keys)): raise ValueError( - "All comments must correspond to a value in header under the same key.") + "All comments must correspond to a value in meta under the same key.") self._comments = comments if data_shape is None: @@ -106,10 +106,10 @@ def __init__(self, header=None, comments=None, axes=None, data_shape=None): raise TypeError("If axes is set, data_shape must be an iterable giving " "the length of each axis of the associated cube.") axes = dict(axes) - if not set(axes.keys()).issubset(set(header_keys)): + if not set(axes.keys()).issubset(set(meta_keys)): raise ValueError( - "All axes must correspond to a value in header under the same key.") - self._axes = dict([(key, self._sanitize_axis_value(axis, header[key], key)) + "All axes must correspond to a value in meta under the same key.") + self._axes = dict([(key, self._sanitize_axis_value(axis, meta[key], key)) for key, axis in axes.items()]) def _sanitize_axis_value(self, axis, value, key): From e8f95350c042e92818611cf8844ea037a38f2c03 Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Tue, 2 Jul 2024 11:20:39 +0100 Subject: [PATCH 37/60] Update ndcube/meta.py Co-authored-by: Stuart Mumford --- ndcube/meta.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ndcube/meta.py b/ndcube/meta.py index f61e4eb36..2d9fb609d 100644 --- a/ndcube/meta.py +++ b/ndcube/meta.py @@ -150,7 +150,8 @@ def shape(self): return self._data_shape def add(self, name, value, comment=None, axis=None, overwrite=False): - """Add a new piece of metadata to instance. + """ + Add a new piece of metadata to instance. Parameters ---------- From fef0de63042ec07c536f9ebe8986f280ac64a221 Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Tue, 2 Jul 2024 11:35:19 +0100 Subject: [PATCH 38/60] Rename Meta to NDMeta. Also fix bugs introduced by changin header to meta as NDMeta's internal name for the metadata. --- ndcube/__init__.py | 2 +- ndcube/conftest.py | 4 ++-- ndcube/meta.py | 12 ++++++------ ndcube/tests/helpers.py | 4 ++-- ndcube/tests/test_meta.py | 12 ++++++------ 5 files changed, 17 insertions(+), 17 deletions(-) diff --git a/ndcube/__init__.py b/ndcube/__init__.py index e241ed2a1..a9051923f 100644 --- a/ndcube/__init__.py +++ b/ndcube/__init__.py @@ -9,7 +9,7 @@ """ from .extra_coords.extra_coords import ExtraCoords, ExtraCoordsABC from .global_coords import GlobalCoords, GlobalCoordsABC -from .meta import Meta +from .meta import NDMeta from .ndcollection import NDCollection from .ndcube import NDCube, NDCubeBase from .ndcube_sequence import NDCubeSequence, NDCubeSequenceBase diff --git a/ndcube/conftest.py b/ndcube/conftest.py index 36e63c008..878f20b8a 100644 --- a/ndcube/conftest.py +++ b/ndcube/conftest.py @@ -15,7 +15,7 @@ from astropy.time import Time, TimeDelta from astropy.wcs import WCS -from ndcube import ExtraCoords, GlobalCoords, Meta, NDCube, NDCubeSequence +from ndcube import ExtraCoords, GlobalCoords, NDMeta, NDCube, NDCubeSequence # Force MPL to use non-gui backends for testing. try: @@ -339,7 +339,7 @@ def ndcube_4d_axis_aware_meta(wcs_4d_t_l_lt_ln): shape = (5, 8, 10, 12) wcs_4d_t_l_lt_ln.array_shape = shape data_cube = data_nd(shape, dtype=int) - meta = Meta({"a": "scalar", + meta = NDMeta({"a": "scalar", "slit position": np.arange(shape[0], dtype=int), "pixel label": np.arange(np.prod(shape[:2])).reshape(shape[:2]), "line": ["Si IV"] * shape[2], diff --git a/ndcube/meta.py b/ndcube/meta.py index 2d9fb609d..73701aaf8 100644 --- a/ndcube/meta.py +++ b/ndcube/meta.py @@ -4,10 +4,10 @@ import numpy as np -__all__ = ["Meta"] +__all__ = ["NDMeta"] -class Meta(dict): +class NDMeta(dict): """ A sliceable object for storing metadata. @@ -55,7 +55,7 @@ class Meta(dict): by an operation, e.g. slicing. In such a case, the value associated with the dropped axes is also dropped and hence lost. If the axis of a 1-axis-aligned metadata value (scalar) is slicing away, the metadata key is entirely removed - from the Meta object. + from the NDMeta object. Grid-aligned metadata is mirrors the data array, it is sliced following the same rules with one exception. If an axis is dropped by slicing, the metadata @@ -80,7 +80,7 @@ def __init__(self, meta=None, comments=None, axes=None, data_shape=None): if meta is None: meta = {} else: - meta = dict(header) + meta = dict(meta) super().__init__(meta.items()) meta_keys = meta.keys() @@ -120,7 +120,7 @@ def _sanitize_axis_value(self, axis, value, key): if len(axis) == 0: return ValueError(axis_err_msg) if self.shape is None: - raise TypeError("Meta instance does not have a shape so new metadata " + raise TypeError("NDMeta instance does not have a shape so new metadata " "cannot be assigned to an axis.") # Verify each entry in axes is an iterable of ints or a scalar. if not (isinstance(axis, collections.abc.Iterable) and all([isinstance(i, numbers.Integral) @@ -216,7 +216,7 @@ def __getitem__(self, item): return super().__getitem__(item) elif self.shape is None: - raise TypeError("Meta object does not have a shape and so cannot be sliced.") + raise TypeError("NDMeta object does not have a shape and so cannot be sliced.") else: new_meta = copy.deepcopy(self) diff --git a/ndcube/tests/helpers.py b/ndcube/tests/helpers.py index c9c71b3d1..6ba325ed5 100644 --- a/ndcube/tests/helpers.py +++ b/ndcube/tests/helpers.py @@ -19,7 +19,7 @@ from astropy.wcs.wcsapi.wrappers.sliced_wcs import sanitize_slices from ndcube import NDCube, NDCubeSequence -from ndcube.meta import Meta +from ndcube.meta import NDMeta __all__ = ['figure_test', 'get_hash_library_name', @@ -96,7 +96,7 @@ def assert_metas_equal(test_input, expected_output): "input and expected are of different type. " f"input: {type(test_input)}; expected: {type(expected_output)}") multi_element_msg = "more than one element is ambiguous" - if isinstance(test_input, Meta) and isinstance(expected_output, Meta): + if isinstance(test_input, NDMeta) and isinstance(expected_output, NDMeta): assert test_input.keys() == expected_output.keys() if test_input.shape is None or expected_output.shape is None: diff --git a/ndcube/tests/test_meta.py b/ndcube/tests/test_meta.py index 1680034b4..313f8575f 100644 --- a/ndcube/tests/test_meta.py +++ b/ndcube/tests/test_meta.py @@ -3,7 +3,7 @@ import numpy as np import pytest -from ndcube.meta import Meta +from ndcube.meta import NDMeta from .helpers import assert_metas_equal @@ -45,12 +45,12 @@ def basic_data_shape(): @pytest.fixture def basic_meta(basic_meta_values, basic_comments, basic_axes, basic_data_shape): - return Meta(basic_meta_values, basic_comments, basic_axes, basic_data_shape) + return NDMeta(basic_meta_values, basic_comments, basic_axes, basic_data_shape) @pytest.fixture def no_shape_meta(): - return Meta({"a": "hello"}) + return NDMeta({"a": "hello"}) def test_shape(basic_meta, basic_data_shape): @@ -84,7 +84,7 @@ def test_slice_away_independent_axis(basic_meta): axes["e"] -= 1 axes["g"] = (0, 1) shape = meta.shape[1:] - expected = Meta(values, comments, axes, shape) + expected = NDMeta(values, comments, axes, shape) assert_metas_equal(output, expected) @@ -107,7 +107,7 @@ def test_slice_away_independent_and_dependent_axis(basic_meta): axes["d"] = 0 axes["g"] = 0 shape = meta.shape[2:] - expected = Meta(values, comments, axes, shape) + expected = NDMeta(values, comments, axes, shape) assert_metas_equal(output, expected) @@ -125,7 +125,7 @@ def test_slice_dependent_axes(basic_meta): axes["c"] = 1 axes["g"] = (0, 1) shape = np.array([2, 2, 5]) - expected = Meta(values, comments, axes, shape) + expected = NDMeta(values, comments, axes, shape) assert_metas_equal(output, expected) From 28eafbbf5d6e86dcad6056d7129aaf20710cd4fa Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Tue, 2 Jul 2024 11:39:58 +0100 Subject: [PATCH 39/60] Move ndcube_can_slice and ndcube_can_rebin to class attributes of NDMeta. --- ndcube/meta.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/ndcube/meta.py b/ndcube/meta.py index 73701aaf8..a5a7ec1be 100644 --- a/ndcube/meta.py +++ b/ndcube/meta.py @@ -72,9 +72,10 @@ class NDMeta(dict): axis-awareness. If specific pieces of metadata have a known way to behave during rebinning, this can be handled by subclasses or mixins. """ + __ndcube_can_slice__ = True + __ndcube_can_rebin__ = True + def __init__(self, meta=None, comments=None, axes=None, data_shape=None): - self.__ndcube_can_slice__ = True - self.__ndcube_can_rebin__ = True self.original_meta = meta if meta is None: From 40b8b261e053ffb36b76bef796d1d68c28a29626 Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Tue, 2 Jul 2024 14:38:25 +0100 Subject: [PATCH 40/60] Create NDMetaABC. --- ndcube/meta.py | 85 ++++++++++++++++++++++++++++++++++++-------------- 1 file changed, 62 insertions(+), 23 deletions(-) diff --git a/ndcube/meta.py b/ndcube/meta.py index a5a7ec1be..879d59cd3 100644 --- a/ndcube/meta.py +++ b/ndcube/meta.py @@ -1,3 +1,4 @@ +import abc import copy import numbers import collections.abc @@ -7,7 +8,7 @@ __all__ = ["NDMeta"] -class NDMeta(dict): +class NDMetaABC(collections.abc.Mapping): """ A sliceable object for storing metadata. @@ -72,6 +73,65 @@ class NDMeta(dict): axis-awareness. If specific pieces of metadata have a known way to behave during rebinning, this can be handled by subclasses or mixins. """ + @property + @abc.abstractmethod + def axes(self): + """ + Mapping from metadata keys to axes with which they are associated. + + Metadata not associated with any axes need not be represented here. + """ + + @property + @abc.abstractmethod + def comments(self): + """ + Mapping from metadata keys to associated comments. + + Metadata without a comment need not be represented here. + """ + + @property + @abc.abstractmethod + def shape(self): + """ + The shape of the data with which the metadata is associated. + """ + + @abc.abstractmethod + def add(self, name, value, comment=None, axis=None, overwrite=False): + """ + Add a new piece of metadata to instance. + + Parameters + ---------- + name: `str` + The name/label of the metadata. + + value: Any + The value of the metadata. If axes input is not None, this must have the + same length/shape as those axes as defined by ``self.shape``. + + comment: `str` or `None` + Any comment associated with this metadata. Set to None if no comment desired. + + axis: `int`, iterable of `int`, or `None` + The axis/axes with which the metadata is linked. If not associated with any + axis, set this to None. + + overwrite: `bool`, optional + If True, overwrites the entry of the name name if already present. + """ + + @abc.abstractmethod + def rebin(self, rebinned_axes, new_shape): + """ + Adjusts grid-aware metadata to stay consistent with rebinned data. + """ + + +class NDMeta(dict, NDMetaABC): + # Docstring in ABC __ndcube_can_slice__ = True __ndcube_can_rebin__ = True @@ -151,28 +211,7 @@ def shape(self): return self._data_shape def add(self, name, value, comment=None, axis=None, overwrite=False): - """ - Add a new piece of metadata to instance. - - Parameters - ---------- - name: `str` - The name/label of the metadata. - - value: Any - The value of the metadata. If axes input is not None, this must have the - same length/shape as those axes as defined by ``self.shape``. - - comment: `str` or `None` - Any comment associated with this metadata. Set to None if no comment desired. - - axis: `int`, iterable of `int`, or `None` - The axis/axes with which the metadata is linked. If not associated with any - axis, set this to None. - - overwrite: `bool`, optional - If True, overwrites the entry of the name name if already present. - """ + # Docstring in ABC. if name in self.keys() and overwrite is not True: raise KeyError(f"'{name}' already exists. " "To update an existing metadata entry set overwrite=True.") From 3ba2d9c2a2062425b9c934e53e5c571ec157ac4b Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Tue, 2 Jul 2024 14:45:42 +0100 Subject: [PATCH 41/60] Move slicing by data item in NDMeta to new slice method. --- ndcube/meta.py | 206 ++++++++++++++++++++----------------- ndcube/mixins/ndslicing.py | 2 +- ndcube/tests/test_meta.py | 8 +- 3 files changed, 118 insertions(+), 98 deletions(-) diff --git a/ndcube/meta.py b/ndcube/meta.py index 879d59cd3..32093760d 100644 --- a/ndcube/meta.py +++ b/ndcube/meta.py @@ -123,6 +123,17 @@ def add(self, name, value, comment=None, axis=None, overwrite=False): If True, overwrites the entry of the name name if already present. """ + @property + @abc.abstractmethod + def slice(self): + """ + A helper class which, when sliced, returns a new NDMeta with axis- and grid-aligned metadata sliced. + + Example + ------- + >>> sliced_meta = meta.slice[0:3, :, 2] # doctest: +SKIP + """ + @abc.abstractmethod def rebin(self, rebinned_axes, new_shape): """ @@ -246,99 +257,10 @@ def __setitem__(self, key, val): "We recommend using the 'add' method to set values.") super().__setitem__(key, val) - def __getitem__(self, item): - # There are two ways to slice: - # by key, or - # by typical python numeric slicing API, - # i.e. slice the each piece of metadata associated with an axes. - - if isinstance(item, str): - return super().__getitem__(item) - - elif self.shape is None: - raise TypeError("NDMeta object does not have a shape and so cannot be sliced.") - - else: - new_meta = copy.deepcopy(self) - if isinstance(item, (numbers.Integral, slice)): - item = [item] - naxes = len(self.shape) - item = np.array(list(item) + [slice(None)] * (naxes - len(item)), - dtype=object) - - # Edit data shape and calculate which axis will be dropped. - dropped_axes = np.zeros(naxes, dtype=bool) - new_shape = new_meta.shape - for i, axis_item in enumerate(item): - if isinstance(axis_item, numbers.Integral): - dropped_axes[i] = True - elif isinstance(axis_item, slice): - start = axis_item.start - if start is None: - start = 0 - if start < 0: - start = self.shape[i] - start - stop = axis_item.stop - if stop is None: - stop = self.shape[i] - if stop < 0: - stop = self.shape[i] - stop - new_shape[i] = stop - start - else: - raise TypeError("Unrecognized slice type. " - "Must be an int, slice and tuple of the same.") - kept_axes = np.invert(dropped_axes) - new_meta._data_shape = new_shape[kept_axes] - - # Slice all metadata associated with axes. - for key, value in self.items(): - axis = self.axes.get(key, None) - drop_key = False - if axis is not None: - # Calculate new axis indices. - new_axis = np.asarray(list( - set(axis).intersection(set(np.arange(naxes)[kept_axes])) - )) - if len(new_axis) == 0: - new_axis = None - else: - cumul_dropped_axes = np.cumsum(dropped_axes)[new_axis] - new_axis -= cumul_dropped_axes - - # Calculate sliced metadata values. - axis_shape = tuple(self.shape[axis]) - if _is_scalar(value): - new_value = value - # If scalar metadata's axes have been dropped, mark metadata to be dropped. - if new_axis is None: - drop_key = True - else: - value_is_axis_aligned = _is_axis_aligned(value, axis_shape) - if value_is_axis_aligned: - new_item = kept_axes[axis] - else: - new_item = tuple(item[axis]) - # Slice metadata value. - try: - new_value = value[new_item] - except: - # If value cannot be sliced by fancy slicing, convert it - # it to an array, slice it, and then if necessary, convert - # it back to its original type. - new_value = (np.asanyarray(value)[new_item]) - if hasattr(new_value, "__len__"): - new_value = type(value)(new_value) - # If axis-aligned metadata sliced down to length 1, convert to scalar. - if value_is_axis_aligned and len(new_value) == 1: - new_value = new_value[0] - # Overwrite metadata value with newly sliced version. - if drop_key: - new_meta.remove(key) - else: - new_meta.add(key, new_value, self.comments.get(key, None), new_axis, - overwrite=True) - - return new_meta + @property + def slice(self): + # Docstring in ABC. + return _NDMetaSlicer(self) def rebin(self, rebinned_axes, new_shape): """ @@ -384,6 +306,104 @@ def rebin(self, rebinned_axes, new_shape): return new_meta +class _NDMetaSlicer: + """ + Helper class to slice an NDMeta instance using a slicing item. + + Parameters + ---------- + meta: `NDMetaABC` + The metadata object to slice. + """ + def __init__(self, meta): + self.meta = meta + + def __getitem__(self, item): + if self.meta.shape is None: + raise TypeError("NDMeta object does not have a shape and so cannot be sliced.") + + new_meta = copy.deepcopy(self.meta) + if isinstance(item, (numbers.Integral, slice)): + item = [item] + naxes = len(self.meta.shape) + item = np.array(list(item) + [slice(None)] * (naxes - len(item)), + dtype=object) + + # Edit data shape and calculate which axis will be dropped. + dropped_axes = np.zeros(naxes, dtype=bool) + new_shape = new_meta.shape + for i, axis_item in enumerate(item): + if isinstance(axis_item, numbers.Integral): + dropped_axes[i] = True + elif isinstance(axis_item, slice): + start = axis_item.start + if start is None: + start = 0 + if start < 0: + start = self.meta.shape[i] - start + stop = axis_item.stop + if stop is None: + stop = self.meta.shape[i] + if stop < 0: + stop = self.meta.shape[i] - stop + new_shape[i] = stop - start + else: + raise TypeError("Unrecognized slice type. " + "Must be an int, slice and tuple of the same.") + kept_axes = np.invert(dropped_axes) + new_meta._data_shape = new_shape[kept_axes] + + # Slice all metadata associated with axes. + for key, value in self.meta.items(): + axis = self.meta.axes.get(key, None) + drop_key = False + if axis is not None: + # Calculate new axis indices. + new_axis = np.asarray(list( + set(axis).intersection(set(np.arange(naxes)[kept_axes])) + )) + if len(new_axis) == 0: + new_axis = None + else: + cumul_dropped_axes = np.cumsum(dropped_axes)[new_axis] + new_axis -= cumul_dropped_axes + + # Calculate sliced metadata values. + axis_shape = tuple(self.meta.shape[axis]) + if _is_scalar(value): + new_value = value + # If scalar metadata's axes have been dropped, mark metadata to be dropped. + if new_axis is None: + drop_key = True + else: + value_is_axis_aligned = _is_axis_aligned(value, axis_shape) + if value_is_axis_aligned: + new_item = kept_axes[axis] + else: + new_item = tuple(item[axis]) + # Slice metadata value. + try: + new_value = value[new_item] + except: + # If value cannot be sliced by fancy slicing, convert it + # it to an array, slice it, and then if necessary, convert + # it back to its original type. + new_value = (np.asanyarray(value)[new_item]) + if hasattr(new_value, "__len__"): + new_value = type(value)(new_value) + # If axis-aligned metadata sliced down to length 1, convert to scalar. + if value_is_axis_aligned and len(new_value) == 1: + new_value = new_value[0] + # Overwrite metadata value with newly sliced version. + if drop_key: + new_meta.remove(key) + else: + new_meta.add(key, new_value, self.meta.comments.get(key, None), new_axis, + overwrite=True) + + return new_meta + + def _not_scalar(value): return ( ( diff --git a/ndcube/mixins/ndslicing.py b/ndcube/mixins/ndslicing.py index cdb086d0d..b1cbde57e 100644 --- a/ndcube/mixins/ndslicing.py +++ b/ndcube/mixins/ndslicing.py @@ -27,6 +27,6 @@ def __getitem__(self, item): # Slice metadata if possible. if hasattr(self.meta, "__ndcube_can_slice__") and self.meta.__ndcube_can_slice__: - sliced_cube.meta = self.meta[item] + sliced_cube.meta = self.meta.slice(item) return sliced_cube diff --git a/ndcube/tests/test_meta.py b/ndcube/tests/test_meta.py index 313f8575f..015cb8a30 100644 --- a/ndcube/tests/test_meta.py +++ b/ndcube/tests/test_meta.py @@ -61,7 +61,7 @@ def test_shape(basic_meta, basic_data_shape): def test_slice_axis_with_no_meta(basic_meta): meta = basic_meta - output = meta[:, :, :, 0] + output = meta.slice[:, :, :, 0] expected = copy.deepcopy(meta) expected._data_shape = meta._data_shape[:-1] assert_metas_equal(output, expected) @@ -70,7 +70,7 @@ def test_slice_axis_with_no_meta(basic_meta): def test_slice_away_independent_axis(basic_meta): meta = basic_meta item = 0 - output = meta[item] + output = meta.slice[item] values = dict([(key, value) for key, value in meta.items()]) values["b"] = values["b"][0] values["g"] = ["world", "!"] @@ -91,7 +91,7 @@ def test_slice_away_independent_axis(basic_meta): def test_slice_away_independent_and_dependent_axis(basic_meta): meta = basic_meta item = (0, 1) - output = meta[item] + output = meta.slice[item] values = dict([(key, value) for key, value in meta.items()]) del values["f"] values["b"] = values["b"][0] @@ -113,7 +113,7 @@ def test_slice_away_independent_and_dependent_axis(basic_meta): def test_slice_dependent_axes(basic_meta): meta = basic_meta - output = meta[:, 1:3, 1] + output = meta.slice[:, 1:3, 1] values = dict([(key, value) for key, value in meta.items()]) values["c"] = values["c"][1:3, 1] values["d"] = values["d"][1] From f723445cfd29803120184d14451f9a90598351da Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Tue, 2 Jul 2024 15:16:14 +0100 Subject: [PATCH 42/60] Change NDMeta.shape to data_shape. --- ndcube/meta.py | 36 ++++++++++++++++++------------------ ndcube/tests/helpers.py | 6 +++--- ndcube/tests/test_meta.py | 6 +++--- 3 files changed, 24 insertions(+), 24 deletions(-) diff --git a/ndcube/meta.py b/ndcube/meta.py index 32093760d..8d88c0984 100644 --- a/ndcube/meta.py +++ b/ndcube/meta.py @@ -93,7 +93,7 @@ def comments(self): @property @abc.abstractmethod - def shape(self): + def data_shape(self): """ The shape of the data with which the metadata is associated. """ @@ -110,7 +110,7 @@ def add(self, name, value, comment=None, axis=None, overwrite=False): value: Any The value of the metadata. If axes input is not None, this must have the - same length/shape as those axes as defined by ``self.shape``. + same length/shape as those axes as defined by ``self.data_shape``. comment: `str` or `None` Any comment associated with this metadata. Set to None if no comment desired. @@ -191,7 +191,7 @@ def _sanitize_axis_value(self, axis, value, key): axis = (axis,) if len(axis) == 0: return ValueError(axis_err_msg) - if self.shape is None: + if self.data_shape is None: raise TypeError("NDMeta instance does not have a shape so new metadata " "cannot be assigned to an axis.") # Verify each entry in axes is an iterable of ints or a scalar. @@ -200,10 +200,10 @@ def _sanitize_axis_value(self, axis, value, key): return ValueError(axis_err_msg) axis = np.asarray(axis) if _not_scalar(value): - axis_shape = tuple(self.shape[axis]) + axis_shape = tuple(self.data_shape[axis]) if not _is_grid_aligned(value, axis_shape) and not _is_axis_aligned(value, axis_shape): raise ValueError( - f"{key} must have shape {tuple(self.shape[axis])} " + f"{key} must have shape {tuple(self.data_shape[axis])} " f"as its associated axes {axis}, ", f"or same length as number of associated axes ({len(axis)}). " f"Has shape {value.shape if hasattr(value, 'shape') else len(value)}") @@ -218,7 +218,7 @@ def axes(self): return self._axes @property - def shape(self): + def data_shape(self): return self._data_shape def add(self, name, value, comment=None, axis=None, overwrite=False): @@ -247,12 +247,12 @@ def __setitem__(self, key, val): axis = self.axes.get(key, None) if axis is not None: if _not_scalar(val): - axis_shape = tuple(self.shape[axis]) + axis_shape = tuple(self.data_shape[axis]) if not _is_grid_aligned(val, axis_shape) and not _is_axis_aligned(val, axis_shape): raise TypeError( f"{key} is already associated with axis/axes {axis}. val must therefore " f"must either have same length as number associated axes ({len(axis)}), " - f"or the same shape as associated data axes {tuple(self.shape[axis])}. " + f"or the same shape as associated data axes {tuple(self.data_shape[axis])}. " f"val shape = {val.shape if hasattr(val, 'shape') else (len(val),)}\n" "We recommend using the 'add' method to set values.") super().__setitem__(key, val) @@ -279,7 +279,7 @@ def rebin(self, rebinned_axes, new_shape): The new shape of the rebinned data. """ # Sanitize input. - data_shape = self.shape + data_shape = self.data_shape if not isinstance(rebinned_axes, set): raise TypeError( f"rebinned_axes must be a set. type of rebinned_axes is {type(rebinned_axes)}") @@ -291,14 +291,14 @@ def rebin(self, rebinned_axes, new_shape): f"Elements in rebinned_axes must be in range 0--{len(data_shape)-1} inclusive.") if len(new_shape) != len(data_shape): raise ValueError(f"new_shape must be a tuple of same length as data shape: " - f"{len(new_shape)} != {len(self.shape)}") + f"{len(new_shape)} != {len(self.data_shape)}") if not all([isinstance(dim, numbers.Integral) for dim in new_shape]): raise TypeError("bin_shape must contain only integer types.") # Remove axis-awareness from grid-aligned metadata associated with rebinned axes. new_meta = copy.deepcopy(self) null_set = set() for name, axes in self.axes.items(): - if (_is_grid_aligned(self[name], tuple(self.shape[axes])) + if (_is_grid_aligned(self[name], tuple(self.data_shape[axes])) and set(axes).intersection(rebinned_axes) != null_set): del new_meta._axes[name] # Update data shape. @@ -319,19 +319,19 @@ def __init__(self, meta): self.meta = meta def __getitem__(self, item): - if self.meta.shape is None: + if self.meta.data_shape is None: raise TypeError("NDMeta object does not have a shape and so cannot be sliced.") new_meta = copy.deepcopy(self.meta) if isinstance(item, (numbers.Integral, slice)): item = [item] - naxes = len(self.meta.shape) + naxes = len(self.meta.data_shape) item = np.array(list(item) + [slice(None)] * (naxes - len(item)), dtype=object) # Edit data shape and calculate which axis will be dropped. dropped_axes = np.zeros(naxes, dtype=bool) - new_shape = new_meta.shape + new_shape = new_meta.data_shape for i, axis_item in enumerate(item): if isinstance(axis_item, numbers.Integral): dropped_axes[i] = True @@ -340,12 +340,12 @@ def __getitem__(self, item): if start is None: start = 0 if start < 0: - start = self.meta.shape[i] - start + start = self.meta.data_shape[i] - start stop = axis_item.stop if stop is None: - stop = self.meta.shape[i] + stop = self.meta.data_shape[i] if stop < 0: - stop = self.meta.shape[i] - stop + stop = self.meta.data_shape[i] - stop new_shape[i] = stop - start else: raise TypeError("Unrecognized slice type. " @@ -369,7 +369,7 @@ def __getitem__(self, item): new_axis -= cumul_dropped_axes # Calculate sliced metadata values. - axis_shape = tuple(self.meta.shape[axis]) + axis_shape = tuple(self.meta.data_shape[axis]) if _is_scalar(value): new_value = value # If scalar metadata's axes have been dropped, mark metadata to be dropped. diff --git a/ndcube/tests/helpers.py b/ndcube/tests/helpers.py index 6ba325ed5..9b81cbccb 100644 --- a/ndcube/tests/helpers.py +++ b/ndcube/tests/helpers.py @@ -99,10 +99,10 @@ def assert_metas_equal(test_input, expected_output): if isinstance(test_input, NDMeta) and isinstance(expected_output, NDMeta): assert test_input.keys() == expected_output.keys() - if test_input.shape is None or expected_output.shape is None: - assert test_input.shape == expected_output.shape + if test_input.data_shape is None or expected_output.data_shape is None: + assert test_input.data_shape == expected_output.data_shape else: - assert np.allclose(test_input.shape, expected_output.shape) + assert np.allclose(test_input.data_shape, expected_output.data_shape) for test_value, expected_value in zip(test_input.values(), expected_output.values()): try: diff --git a/ndcube/tests/test_meta.py b/ndcube/tests/test_meta.py index 015cb8a30..e7186730b 100644 --- a/ndcube/tests/test_meta.py +++ b/ndcube/tests/test_meta.py @@ -56,7 +56,7 @@ def no_shape_meta(): def test_shape(basic_meta, basic_data_shape): meta = basic_meta shape = np.asarray(basic_data_shape) - assert all(meta.shape == shape) + assert all(meta.data_shape == shape) def test_slice_axis_with_no_meta(basic_meta): @@ -83,7 +83,7 @@ def test_slice_away_independent_axis(basic_meta): axes["d"] -= 1 axes["e"] -= 1 axes["g"] = (0, 1) - shape = meta.shape[1:] + shape = meta.data_shape[1:] expected = NDMeta(values, comments, axes, shape) assert_metas_equal(output, expected) @@ -106,7 +106,7 @@ def test_slice_away_independent_and_dependent_axis(basic_meta): axes["c"] = 0 axes["d"] = 0 axes["g"] = 0 - shape = meta.shape[2:] + shape = meta.data_shape[2:] expected = NDMeta(values, comments, axes, shape) assert_metas_equal(output, expected) From 6d84e5632fb4b88fb49c17b23229c8405bb2f200 Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Tue, 2 Jul 2024 23:09:28 +0100 Subject: [PATCH 43/60] Make NDMeta infer data_shape if not provided. Currently creates a bug in NDMeta.rebin. --- ndcube/conftest.py | 1 - ndcube/meta.py | 86 +++++++++++++++++++++++++-------------- ndcube/tests/test_meta.py | 47 ++++++++++----------- 3 files changed, 77 insertions(+), 57 deletions(-) diff --git a/ndcube/conftest.py b/ndcube/conftest.py index 878f20b8a..3378aeb3f 100644 --- a/ndcube/conftest.py +++ b/ndcube/conftest.py @@ -344,7 +344,6 @@ def ndcube_4d_axis_aware_meta(wcs_4d_t_l_lt_ln): "pixel label": np.arange(np.prod(shape[:2])).reshape(shape[:2]), "line": ["Si IV"] * shape[2], "exposure time": ([2] * shape[-1]) * u.s}, - data_shape=shape, axes={"slit position": 0, "pixel label": (0, 1), "line": (2,), diff --git a/ndcube/meta.py b/ndcube/meta.py index 8d88c0984..2ce75aba7 100644 --- a/ndcube/meta.py +++ b/ndcube/meta.py @@ -36,7 +36,6 @@ class NDMetaABC(collections.abc.Mapping): data_shape: iterator of `int`, optional The shape of the data with which this metadata is associated. - Must be set if axes input is set. Notes ----- @@ -148,6 +147,7 @@ class NDMeta(dict, NDMetaABC): def __init__(self, meta=None, comments=None, axes=None, data_shape=None): self.original_meta = meta + self._data_shape = np.array([], dtype=int) if data_shape is None else np.asarray(data_shape, dtype=int) if meta is None: meta = {} @@ -165,18 +165,9 @@ def __init__(self, meta=None, comments=None, axes=None, data_shape=None): "All comments must correspond to a value in meta under the same key.") self._comments = comments - if data_shape is None: - self._data_shape = data_shape - else: - self._data_shape = np.asarray(data_shape, dtype=int) - if axes is None: self._axes = dict() else: - if not (isinstance(data_shape, collections.abc.Iterable) and - all([isinstance(i, numbers.Integral) for i in data_shape])): - raise TypeError("If axes is set, data_shape must be an iterable giving " - "the length of each axis of the associated cube.") axes = dict(axes) if not set(axes.keys()).issubset(set(meta_keys)): raise ValueError( @@ -191,22 +182,37 @@ def _sanitize_axis_value(self, axis, value, key): axis = (axis,) if len(axis) == 0: return ValueError(axis_err_msg) - if self.data_shape is None: - raise TypeError("NDMeta instance does not have a shape so new metadata " - "cannot be assigned to an axis.") # Verify each entry in axes is an iterable of ints or a scalar. - if not (isinstance(axis, collections.abc.Iterable) and all([isinstance(i, numbers.Integral) - for i in axis])): + if not (isinstance(axis, collections.abc.Iterable) + and all([isinstance(i, numbers.Integral) for i in axis])): return ValueError(axis_err_msg) + # If metadata's axis/axes include axis beyond current data shape, extend it. + data_shape = copy.deepcopy(self.data_shape) + if max(axis) >= len(data_shape): + data_shape = np.concatenate((data_shape, + np.zeros(max(axis) + 1 - len(data_shape), dtype=int))) + # Check whether metadata is compatible with data shape based on shapes + # of metadata already present. axis = np.asarray(axis) if _not_scalar(value): - axis_shape = tuple(self.data_shape[axis]) - if not _is_grid_aligned(value, axis_shape) and not _is_axis_aligned(value, axis_shape): - raise ValueError( - f"{key} must have shape {tuple(self.data_shape[axis])} " - f"as its associated axes {axis}, ", - f"or same length as number of associated axes ({len(axis)}). " - f"Has shape {value.shape if hasattr(value, 'shape') else len(value)}") + axis_shape = data_shape[axis] + if not _is_axis_aligned(value, axis_shape): + # If metadata corresponds to previously unconstrained axis, update data_shape. + idx0 = axis_shape == 0 + if idx0.any(): + axis_shape[idx0] = np.array(_get_metadata_shape(value))[idx0] + data_shape[axis] = axis_shape + # Confirm metadata is compatible with data shape. + if not _is_grid_aligned(value, axis_shape): + raise ValueError( + f"{key} must have same shape {tuple(data_shape[axis])} " + f"as its associated axes {axis}, ", + f"or same length as number of associated axes ({len(axis)}). " + f"Has shape {value.shape if hasattr(value, 'shape') else len(value)}") + elif len(axis) != 1: + raise ValueError("Scalar and str metadata can only be assigned to one axis. " + f"key = {key}; value = {value}; axes = {axes}") + self._data_shape = data_shape return axis @property @@ -319,15 +325,29 @@ def __init__(self, meta): self.meta = meta def __getitem__(self, item): - if self.meta.data_shape is None: + data_shape = self.meta.data_shape + if len(data_shape) == 0: raise TypeError("NDMeta object does not have a shape and so cannot be sliced.") new_meta = copy.deepcopy(self.meta) + naxes = len(data_shape) if isinstance(item, (numbers.Integral, slice)): item = [item] - naxes = len(self.meta.data_shape) - item = np.array(list(item) + [slice(None)] * (naxes - len(item)), - dtype=object) + if len(item) < naxes: + item = np.array(list(item) + [slice(None)] * (naxes - len(item)), dtype=object) + elif len(item) > naxes: + # If item applies to more axes than have been defined in NDMeta, + # ignore items applying to those additional axes. + item = np.array(item[:naxes]) + else: + item = np.asarray(item) + # Replace non-int item elements corresponding to length-0 axes + # with slice(None) so data shape is not altered. + idx = [not isinstance(i, numbers.Integral) and s == 0 for i, s in zip(item, data_shape)] + idx = np.arange(len(idx))[idx] + print("original item = ", item) + item[idx] = np.array([slice(None)] * len(idx)) + print("updated item = ", item) # Edit data shape and calculate which axis will be dropped. dropped_axes = np.zeros(naxes, dtype=bool) @@ -340,12 +360,12 @@ def __getitem__(self, item): if start is None: start = 0 if start < 0: - start = self.meta.data_shape[i] - start + start = data_shape[i] - start stop = axis_item.stop if stop is None: - stop = self.meta.data_shape[i] + stop = data_shape[i] if stop < 0: - stop = self.meta.data_shape[i] - stop + stop = data_shape[i] - stop new_shape[i] = stop - start else: raise TypeError("Unrecognized slice type. " @@ -382,6 +402,7 @@ def __getitem__(self, item): else: new_item = tuple(item[axis]) # Slice metadata value. + print(new_item) try: new_value = value[new_item] except: @@ -420,11 +441,14 @@ def _is_scalar(value): return not _not_scalar(value) +def _get_metadata_shape(value): + return value.shape if hasattr(value, "shape") else (len(value),) + def _is_grid_aligned(value, axis_shape): if _is_scalar(value): return False - value_shape = value.shape if hasattr(value, "shape") else (len(value),) - if value_shape != axis_shape: + value_shape = _get_metadata_shape(value) + if value_shape != tuple(axis_shape): return False return True diff --git a/ndcube/tests/test_meta.py b/ndcube/tests/test_meta.py index e7186730b..42aef7610 100644 --- a/ndcube/tests/test_meta.py +++ b/ndcube/tests/test_meta.py @@ -30,22 +30,22 @@ def basic_comments(): @pytest.fixture def basic_axes(): return {"b": 0, - "c": (1, 2), - "d": (2,), + "c": (1, 3), + "d": (3,), "e": 1, "f": 0, - "g": (0, 1, 2) + "g": (0, 1, 3) } @pytest.fixture def basic_data_shape(): - return (2, 3, 4, 5) + return (2, 3, 0, 4) @pytest.fixture -def basic_meta(basic_meta_values, basic_comments, basic_axes, basic_data_shape): - return NDMeta(basic_meta_values, basic_comments, basic_axes, basic_data_shape) +def basic_meta(basic_meta_values, basic_comments, basic_axes): + return NDMeta(basic_meta_values, basic_comments, basic_axes) @pytest.fixture @@ -61,9 +61,12 @@ def test_shape(basic_meta, basic_data_shape): def test_slice_axis_with_no_meta(basic_meta): meta = basic_meta - output = meta.slice[:, :, :, 0] + output = meta.slice[:, :, 0] expected = copy.deepcopy(meta) - expected._data_shape = meta._data_shape[:-1] + expected._data_shape = meta._data_shape[[0, 1, 3]] + expected._axes["c"] = (1, 2) + expected._axes["d"] = (2,) + expected._axes["g"] = (0, 1, 2) assert_metas_equal(output, expected) @@ -82,9 +85,9 @@ def test_slice_away_independent_axis(basic_meta): axes["c"] -= 1 axes["d"] -= 1 axes["e"] -= 1 - axes["g"] = (0, 1) + axes["g"] = (0, 2) shape = meta.data_shape[1:] - expected = NDMeta(values, comments, axes, shape) + expected = NDMeta(values, comments, axes) assert_metas_equal(output, expected) @@ -103,17 +106,17 @@ def test_slice_away_independent_and_dependent_axis(basic_meta): del axes["b"] del axes["e"] del axes["f"] - axes["c"] = 0 - axes["d"] = 0 - axes["g"] = 0 + axes["c"] = 1 + axes["d"] = 1 + axes["g"] = 1 shape = meta.data_shape[2:] - expected = NDMeta(values, comments, axes, shape) + expected = NDMeta(values, comments, axes) assert_metas_equal(output, expected) def test_slice_dependent_axes(basic_meta): meta = basic_meta - output = meta.slice[:, 1:3, 1] + output = meta.slice[:, 1:3, :, 1] values = dict([(key, value) for key, value in meta.items()]) values["c"] = values["c"][1:3, 1] values["d"] = values["d"][1] @@ -124,8 +127,8 @@ def test_slice_dependent_axes(basic_meta): del axes["d"] axes["c"] = 1 axes["g"] = (0, 1) - shape = np.array([2, 2, 5]) - expected = NDMeta(values, comments, axes, shape) + shape = np.array([2, 2, 0]) + expected = NDMeta(values, comments, axes, data_shape=shape) assert_metas_equal(output, expected) @@ -173,12 +176,6 @@ def test_add_overwrite_error(basic_meta): meta.add("a", "world", None, None) -def test_add_axis_without_shape(no_shape_meta): - meta = no_shape_meta - with pytest.raises(TypeError): - meta.add("z", [100], axis=0) - - def test_remove(basic_meta): meta = basic_meta name = "b" @@ -191,12 +188,12 @@ def test_remove(basic_meta): def test_rebin(basic_meta): meta = basic_meta rebinned_axes = {0, 2} - new_shape = (1, 3, 2, 5) + new_shape = (1, 3, 5, 2) output = meta.rebin(rebinned_axes, new_shape) # Build expected result. expected = copy.deepcopy(meta) del expected._axes["b"] del expected._axes["c"] del expected._axes["d"] - expected._data_shape = np.array([1, 3, 2, 5], dtype=int) + expected._data_shape = np.array([1, 3, 5, 2], dtype=int) assert_metas_equal(output, expected) From bf078bdc4143b0624a9170b4ce3ce91d560786fa Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Tue, 2 Jul 2024 23:45:00 +0100 Subject: [PATCH 44/60] Refactor NDMeta.rebin to take bin_shape as input. --- ndcube/meta.py | 36 +++++++++++------------------------- ndcube/ndcube.py | 4 +--- ndcube/tests/test_meta.py | 8 +++----- 3 files changed, 15 insertions(+), 33 deletions(-) diff --git a/ndcube/meta.py b/ndcube/meta.py index 2ce75aba7..1f6eeaec5 100644 --- a/ndcube/meta.py +++ b/ndcube/meta.py @@ -268,7 +268,7 @@ def slice(self): # Docstring in ABC. return _NDMetaSlicer(self) - def rebin(self, rebinned_axes, new_shape): + def rebin(self, bin_shape): """ Adjusts axis-aware metadata to stay consistent with a rebinned `~ndcube.NDCube`. @@ -279,36 +279,25 @@ def rebin(self, rebinned_axes, new_shape): Parameters ---------- - rebinned_axes: `set` of `int` - Set of array indices of axes that are rebinned. - new_shape: `tuple` of `int` - The new shape of the rebinned data. + bin_shape : array-like + The number of pixels in a bin in each dimension. """ - # Sanitize input. + # Sanitize input + bin_shape = np.round(bin_shape).astype(int) data_shape = self.data_shape - if not isinstance(rebinned_axes, set): - raise TypeError( - f"rebinned_axes must be a set. type of rebinned_axes is {type(rebinned_axes)}") - if not all([isinstance(dim, numbers.Integral) for dim in rebinned_axes]): - raise ValueError("All elements of rebinned_axes must be ints.") - list_axes = list(rebinned_axes) - if min(list_axes) < 0 or max(list_axes) >= len(data_shape): - raise ValueError( - f"Elements in rebinned_axes must be in range 0--{len(data_shape)-1} inclusive.") - if len(new_shape) != len(data_shape): - raise ValueError(f"new_shape must be a tuple of same length as data shape: " - f"{len(new_shape)} != {len(self.data_shape)}") - if not all([isinstance(dim, numbers.Integral) for dim in new_shape]): - raise TypeError("bin_shape must contain only integer types.") + bin_shape = bin_shape[:len(data_shape)] # Drop info on axes not defined by NDMeta. + if (np.mod(data_shape, bin_shape) != 0).any(): + raise ValueError("bin_shape must be integer factors of their associated axes.") # Remove axis-awareness from grid-aligned metadata associated with rebinned axes. + rebinned_axes = set(np.where(bin_shape != 1)[0]) new_meta = copy.deepcopy(self) null_set = set() for name, axes in self.axes.items(): - if (_is_grid_aligned(self[name], tuple(self.data_shape[axes])) + if (_is_grid_aligned(self[name], data_shape[axes]) and set(axes).intersection(rebinned_axes) != null_set): del new_meta._axes[name] # Update data shape. - new_meta._data_shape = np.asarray(new_shape).astype(int) + new_meta._data_shape = new_meta._data_shape // bin_shape return new_meta @@ -345,9 +334,7 @@ def __getitem__(self, item): # with slice(None) so data shape is not altered. idx = [not isinstance(i, numbers.Integral) and s == 0 for i, s in zip(item, data_shape)] idx = np.arange(len(idx))[idx] - print("original item = ", item) item[idx] = np.array([slice(None)] * len(idx)) - print("updated item = ", item) # Edit data shape and calculate which axis will be dropped. dropped_axes = np.zeros(naxes, dtype=bool) @@ -402,7 +389,6 @@ def __getitem__(self, item): else: new_item = tuple(item[axis]) # Slice metadata value. - print(new_item) try: new_value = value[new_item] except: diff --git a/ndcube/ndcube.py b/ndcube/ndcube.py index 7f2a345ea..cd7f05120 100644 --- a/ndcube/ndcube.py +++ b/ndcube/ndcube.py @@ -1210,9 +1210,7 @@ def my_propagate(uncertainty, data, mask, **kwargs): # If meta is axis-aware, drop axis-awareness for metadata associated with rebinned axes. if hasattr(self.meta, "__ndcube_can_rebin__") and self.meta.__ndcube_can_rebin__: - new_shape = (np.asarray(self.shape) / np.asarray(bin_shape)).astype(int) - rebinned_axes = set(np.where(bin_shape != 1)[0]) - new_meta = self.meta.rebin(rebinned_axes, new_shape) + new_meta = self.meta.rebin(bin_shape) else: new_meta = self.meta diff --git a/ndcube/tests/test_meta.py b/ndcube/tests/test_meta.py index 42aef7610..bbb6a9bd8 100644 --- a/ndcube/tests/test_meta.py +++ b/ndcube/tests/test_meta.py @@ -42,7 +42,6 @@ def basic_axes(): def basic_data_shape(): return (2, 3, 0, 4) - @pytest.fixture def basic_meta(basic_meta_values, basic_comments, basic_axes): return NDMeta(basic_meta_values, basic_comments, basic_axes) @@ -187,13 +186,12 @@ def test_remove(basic_meta): def test_rebin(basic_meta): meta = basic_meta - rebinned_axes = {0, 2} - new_shape = (1, 3, 5, 2) - output = meta.rebin(rebinned_axes, new_shape) + bin_shape = (2, 1, 1, 2) + output = meta.rebin(bin_shape) # Build expected result. expected = copy.deepcopy(meta) del expected._axes["b"] del expected._axes["c"] del expected._axes["d"] - expected._data_shape = np.array([1, 3, 5, 2], dtype=int) + expected._data_shape = np.array([1, 3, 0, 2], dtype=int) assert_metas_equal(output, expected) From 81b84e5451618df47e31a6e1f1ffaa612555b7e9 Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Tue, 2 Jul 2024 23:55:24 +0100 Subject: [PATCH 45/60] Remove data_shape kwarg from NDMeta init. --- ndcube/meta.py | 7 ++----- ndcube/tests/test_meta.py | 4 ++-- 2 files changed, 4 insertions(+), 7 deletions(-) diff --git a/ndcube/meta.py b/ndcube/meta.py index 1f6eeaec5..92c438125 100644 --- a/ndcube/meta.py +++ b/ndcube/meta.py @@ -34,9 +34,6 @@ class NDMetaABC(collections.abc.Mapping): number of associated axes (axis-aligned), or same shape as the associated data array's axes (grid-aligned). - data_shape: iterator of `int`, optional - The shape of the data with which this metadata is associated. - Notes ----- **Axis-aware Metadata** @@ -145,9 +142,9 @@ class NDMeta(dict, NDMetaABC): __ndcube_can_slice__ = True __ndcube_can_rebin__ = True - def __init__(self, meta=None, comments=None, axes=None, data_shape=None): + def __init__(self, meta=None, comments=None, axes=None): self.original_meta = meta - self._data_shape = np.array([], dtype=int) if data_shape is None else np.asarray(data_shape, dtype=int) + self._data_shape = np.array([], dtype=int) if meta is None: meta = {} diff --git a/ndcube/tests/test_meta.py b/ndcube/tests/test_meta.py index bbb6a9bd8..bef465bde 100644 --- a/ndcube/tests/test_meta.py +++ b/ndcube/tests/test_meta.py @@ -126,8 +126,8 @@ def test_slice_dependent_axes(basic_meta): del axes["d"] axes["c"] = 1 axes["g"] = (0, 1) - shape = np.array([2, 2, 0]) - expected = NDMeta(values, comments, axes, data_shape=shape) + expected = NDMeta(values, comments, axes) + expected._data_shape = np.array([2, 2, 0]) assert_metas_equal(output, expected) From b526e3ed0c260674957d1c556161837f66377294 Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Wed, 3 Jul 2024 00:06:22 +0100 Subject: [PATCH 46/60] Fix codestyle. --- ndcube/conftest.py | 18 +++++----- ndcube/meta.py | 85 +++++++++++++++++++++++----------------------- 2 files changed, 52 insertions(+), 51 deletions(-) diff --git a/ndcube/conftest.py b/ndcube/conftest.py index 3378aeb3f..91166eae0 100644 --- a/ndcube/conftest.py +++ b/ndcube/conftest.py @@ -15,7 +15,7 @@ from astropy.time import Time, TimeDelta from astropy.wcs import WCS -from ndcube import ExtraCoords, GlobalCoords, NDMeta, NDCube, NDCubeSequence +from ndcube import ExtraCoords, GlobalCoords, NDCube, NDCubeSequence, NDMeta # Force MPL to use non-gui backends for testing. try: @@ -340,14 +340,14 @@ def ndcube_4d_axis_aware_meta(wcs_4d_t_l_lt_ln): wcs_4d_t_l_lt_ln.array_shape = shape data_cube = data_nd(shape, dtype=int) meta = NDMeta({"a": "scalar", - "slit position": np.arange(shape[0], dtype=int), - "pixel label": np.arange(np.prod(shape[:2])).reshape(shape[:2]), - "line": ["Si IV"] * shape[2], - "exposure time": ([2] * shape[-1]) * u.s}, - axes={"slit position": 0, - "pixel label": (0, 1), - "line": (2,), - "exposure time": 3}) + "slit position": np.arange(shape[0], dtype=int), + "pixel label": np.arange(np.prod(shape[:2])).reshape(shape[:2]), + "line": ["Si IV"] * shape[2], + "exposure time": ([2] * shape[-1]) * u.s}, + axes={"slit position": 0, + "pixel label": (0, 1), + "line": (2,), + "exposure time": 3}) return NDCube(data_cube, wcs=wcs_4d_t_l_lt_ln, meta=meta) diff --git a/ndcube/meta.py b/ndcube/meta.py index 92c438125..c51e51b5e 100644 --- a/ndcube/meta.py +++ b/ndcube/meta.py @@ -361,49 +361,50 @@ def __getitem__(self, item): for key, value in self.meta.items(): axis = self.meta.axes.get(key, None) drop_key = False - if axis is not None: - # Calculate new axis indices. - new_axis = np.asarray(list( - set(axis).intersection(set(np.arange(naxes)[kept_axes])) - )) - if len(new_axis) == 0: - new_axis = None - else: - cumul_dropped_axes = np.cumsum(dropped_axes)[new_axis] - new_axis -= cumul_dropped_axes - - # Calculate sliced metadata values. - axis_shape = tuple(self.meta.data_shape[axis]) - if _is_scalar(value): - new_value = value - # If scalar metadata's axes have been dropped, mark metadata to be dropped. - if new_axis is None: - drop_key = True - else: - value_is_axis_aligned = _is_axis_aligned(value, axis_shape) - if value_is_axis_aligned: - new_item = kept_axes[axis] - else: - new_item = tuple(item[axis]) - # Slice metadata value. - try: - new_value = value[new_item] - except: - # If value cannot be sliced by fancy slicing, convert it - # it to an array, slice it, and then if necessary, convert - # it back to its original type. - new_value = (np.asanyarray(value)[new_item]) - if hasattr(new_value, "__len__"): - new_value = type(value)(new_value) - # If axis-aligned metadata sliced down to length 1, convert to scalar. - if value_is_axis_aligned and len(new_value) == 1: - new_value = new_value[0] - # Overwrite metadata value with newly sliced version. - if drop_key: - new_meta.remove(key) + if axis is None: + continue + # Calculate new axis indices. + new_axis = np.asarray(list( + set(axis).intersection(set(np.arange(naxes)[kept_axes])) + )) + if len(new_axis) == 0: + new_axis = None + else: + cumul_dropped_axes = np.cumsum(dropped_axes)[new_axis] + new_axis -= cumul_dropped_axes + + # Calculate sliced metadata values. + axis_shape = tuple(self.meta.data_shape[axis]) + if _is_scalar(value): + new_value = value + # If scalar metadata's axes have been dropped, mark metadata to be dropped. + if new_axis is None: + drop_key = True + else: + value_is_axis_aligned = _is_axis_aligned(value, axis_shape) + if value_is_axis_aligned: + new_item = kept_axes[axis] else: - new_meta.add(key, new_value, self.meta.comments.get(key, None), new_axis, - overwrite=True) + new_item = tuple(item[axis]) + # Slice metadata value. + try: + new_value = value[new_item] + except: + # If value cannot be sliced by fancy slicing, convert it + # it to an array, slice it, and then if necessary, convert + # it back to its original type. + new_value = (np.asanyarray(value)[new_item]) + if hasattr(new_value, "__len__"): + new_value = type(value)(new_value) + # If axis-aligned metadata sliced down to length 1, convert to scalar. + if value_is_axis_aligned and len(new_value) == 1: + new_value = new_value[0] + # Overwrite metadata value with newly sliced version. + if drop_key: + new_meta.remove(key) + else: + new_meta.add(key, new_value, self.meta.comments.get(key, None), new_axis, + overwrite=True) return new_meta From cdc3f8f44551edf8e23e5af53f1fe74bdd7fdeb7 Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Wed, 3 Jul 2024 00:28:37 +0100 Subject: [PATCH 47/60] Add setter for NDMeta.data_shape. --- ndcube/meta.py | 24 ++++++++++++++++++++++++ ndcube/tests/test_meta.py | 13 +++++++++++++ 2 files changed, 37 insertions(+) diff --git a/ndcube/meta.py b/ndcube/meta.py index c51e51b5e..16bc6ffc5 100644 --- a/ndcube/meta.py +++ b/ndcube/meta.py @@ -224,6 +224,30 @@ def axes(self): def data_shape(self): return self._data_shape + @data_shape.setter + def data_shape(self, new_shape): + """ + Set data shape to new shape. + + Must agree with shpaes of any axes already associated with metadata + + Parameters + ---------- + new_shape: array-like + The new shape of the data. Elements must of of type `int`. + """ + new_shape = np.round(new_shape).astype(int) + if (new_shape < 0).any(): + raise ValueError("new_shape cannot include negative numbers.") + # Confirm input shape agrees with shapes of pre-existin metadata. + old_shape = self.data_shape + idx, = np.where(old_shape > 0) + print(idx, old_shape, new_shape) + if len(idx) > 0 and (old_shape[idx] != new_shape[idx]).any(): + raise ValueError("new_shape not compatible with pre-existing metadata. " + f"old shape = {old_shape}, new_shape = {new_shape}") + self._data_shape = new_shape + def add(self, name, value, comment=None, axis=None, overwrite=False): # Docstring in ABC. if name in self.keys() and overwrite is not True: diff --git a/ndcube/tests/test_meta.py b/ndcube/tests/test_meta.py index bef465bde..face17e11 100644 --- a/ndcube/tests/test_meta.py +++ b/ndcube/tests/test_meta.py @@ -195,3 +195,16 @@ def test_rebin(basic_meta): del expected._axes["d"] expected._data_shape = np.array([1, 3, 0, 2], dtype=int) assert_metas_equal(output, expected) + + +def test_data_shape_setter(basic_meta): + meta = basic_meta + new_shape = np.array([2, 3, 5, 4], dtype=int) + meta.data_shape = new_shape + assert tuple(meta.data_shape) == tuple(new_shape) + + +def test_data_shape_setter_error(basic_meta): + meta = basic_meta + with pytest.raises(ValueError): + meta.data_shape = (3, 3, 0, 4) From 23732091a065061f0edab42067709447ad50e012 Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Wed, 3 Jul 2024 00:35:44 +0100 Subject: [PATCH 48/60] Replace NDMeta.remove with __delitem__. --- ndcube/meta.py | 6 +++--- ndcube/tests/test_meta.py | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/ndcube/meta.py b/ndcube/meta.py index 16bc6ffc5..2353750df 100644 --- a/ndcube/meta.py +++ b/ndcube/meta.py @@ -263,12 +263,12 @@ def add(self, name, value, comment=None, axis=None, overwrite=False): # This must be done after updating self._axes otherwise it may error. self.__setitem__(name, value) - def remove(self, name): + def __delitem__(self, name): if name in self._comments: del self._comments[name] if name in self._axes: del self._axes[name] - del self[name] + super().__delitem__(name) def __setitem__(self, key, val): axis = self.axes.get(key, None) @@ -425,7 +425,7 @@ def __getitem__(self, item): new_value = new_value[0] # Overwrite metadata value with newly sliced version. if drop_key: - new_meta.remove(key) + del new_meta[key] else: new_meta.add(key, new_value, self.meta.comments.get(key, None), new_axis, overwrite=True) diff --git a/ndcube/tests/test_meta.py b/ndcube/tests/test_meta.py index face17e11..099db56f5 100644 --- a/ndcube/tests/test_meta.py +++ b/ndcube/tests/test_meta.py @@ -175,10 +175,10 @@ def test_add_overwrite_error(basic_meta): meta.add("a", "world", None, None) -def test_remove(basic_meta): +def test_del(basic_meta): meta = basic_meta name = "b" - meta.remove(name) + del meta[name] assert name not in meta.keys() assert name not in meta.comments.keys() assert name not in meta.axes.keys() From 6d38c6758239e73345e00b602ed909f58dadeeef Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Wed, 3 Jul 2024 00:44:10 +0100 Subject: [PATCH 49/60] Make NDCube.meta have same shape as NDCube if meta is axis-aware. --- ndcube/meta.py | 2 +- ndcube/ndcube.py | 5 +++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/ndcube/meta.py b/ndcube/meta.py index 2353750df..457520c9f 100644 --- a/ndcube/meta.py +++ b/ndcube/meta.py @@ -5,7 +5,7 @@ import numpy as np -__all__ = ["NDMeta"] +__all__ = ["NDMeta", "NDMetaABC"] class NDMetaABC(collections.abc.Mapping): diff --git a/ndcube/ndcube.py b/ndcube/ndcube.py index cd7f05120..f4f7c31ab 100644 --- a/ndcube/ndcube.py +++ b/ndcube/ndcube.py @@ -27,6 +27,7 @@ from ndcube import utils from ndcube.extra_coords.extra_coords import ExtraCoords, ExtraCoordsABC from ndcube.global_coords import GlobalCoords, GlobalCoordsABC +from ndcube.meta import NDMetaABC from ndcube.mixins import NDCubeSlicingMixin from ndcube.ndcube_sequence import NDCubeSequence from ndcube.utils.exceptions import warn_deprecated, warn_user @@ -399,6 +400,10 @@ def __init__(self, data, wcs=None, uncertainty=None, mask=None, meta=None, global_coords = deepcopy(global_coords) self._global_coords = global_coords + # If meta is axis-aware, make it to have same shape as cube. + if isinstance(self.meta, NDMetaABC): + self.meta.data_shape = self.shape + @property def extra_coords(self): # Docstring in NDCubeABC. From fb788f73c16ee9b1376ef262bbc040c766e07adb Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Wed, 3 Jul 2024 09:53:36 +0100 Subject: [PATCH 50/60] Fix codestyle. --- ndcube/meta.py | 2 +- ndcube/tests/test_meta.py | 2 -- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/ndcube/meta.py b/ndcube/meta.py index 457520c9f..2c08425b4 100644 --- a/ndcube/meta.py +++ b/ndcube/meta.py @@ -208,7 +208,7 @@ def _sanitize_axis_value(self, axis, value, key): f"Has shape {value.shape if hasattr(value, 'shape') else len(value)}") elif len(axis) != 1: raise ValueError("Scalar and str metadata can only be assigned to one axis. " - f"key = {key}; value = {value}; axes = {axes}") + f"key = {key}; value = {value}; axes = {axis}") self._data_shape = data_shape return axis diff --git a/ndcube/tests/test_meta.py b/ndcube/tests/test_meta.py index 099db56f5..f5935c349 100644 --- a/ndcube/tests/test_meta.py +++ b/ndcube/tests/test_meta.py @@ -85,7 +85,6 @@ def test_slice_away_independent_axis(basic_meta): axes["d"] -= 1 axes["e"] -= 1 axes["g"] = (0, 2) - shape = meta.data_shape[1:] expected = NDMeta(values, comments, axes) assert_metas_equal(output, expected) @@ -108,7 +107,6 @@ def test_slice_away_independent_and_dependent_axis(basic_meta): axes["c"] = 1 axes["d"] = 1 axes["g"] = 1 - shape = meta.data_shape[2:] expected = NDMeta(values, comments, axes) assert_metas_equal(output, expected) From f315ca6d48fe425755ee54c2816f9f7ce8904df2 Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Thu, 4 Jul 2024 14:29:20 +0100 Subject: [PATCH 51/60] Apply suggestions from code review Co-authored-by: Stuart Mumford --- ndcube/meta.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/ndcube/meta.py b/ndcube/meta.py index 2c08425b4..c3284952d 100644 --- a/ndcube/meta.py +++ b/ndcube/meta.py @@ -69,6 +69,7 @@ class NDMetaABC(collections.abc.Mapping): axis-awareness. If specific pieces of metadata have a known way to behave during rebinning, this can be handled by subclasses or mixins. """ + @property @abc.abstractmethod def axes(self): @@ -242,7 +243,6 @@ def data_shape(self, new_shape): # Confirm input shape agrees with shapes of pre-existin metadata. old_shape = self.data_shape idx, = np.where(old_shape > 0) - print(idx, old_shape, new_shape) if len(idx) > 0 and (old_shape[idx] != new_shape[idx]).any(): raise ValueError("new_shape not compatible with pre-existing metadata. " f"old shape = {old_shape}, new_shape = {new_shape}") @@ -383,10 +383,9 @@ def __getitem__(self, item): # Slice all metadata associated with axes. for key, value in self.meta.items(): - axis = self.meta.axes.get(key, None) - drop_key = False - if axis is None: + if (axis := self.meta.axes.get(key, None)) is None: continue + drop_key = False # Calculate new axis indices. new_axis = np.asarray(list( set(axis).intersection(set(np.arange(naxes)[kept_axes])) From d81db9ee5f357555aea5167149c83caa6f43a7e0 Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Thu, 4 Jul 2024 14:31:52 +0100 Subject: [PATCH 52/60] Change NDMeta.comments to key_comments. And other minor changes suggested by code review. --- ndcube/meta.py | 41 ++++++++++++++++++--------------------- ndcube/ndcube.py | 2 +- ndcube/tests/test_meta.py | 28 +++++++++++++------------- 3 files changed, 34 insertions(+), 37 deletions(-) diff --git a/ndcube/meta.py b/ndcube/meta.py index 2c08425b4..25a61d84f 100644 --- a/ndcube/meta.py +++ b/ndcube/meta.py @@ -22,7 +22,7 @@ class NDMetaABC(collections.abc.Mapping): meta: dict-like The names and values of metadata. - comments: dict-like, optional + key_comments: dict-like, optional Comments associated with any of the above pieces of metadata. axes: dict-like, optional @@ -80,7 +80,7 @@ def axes(self): @property @abc.abstractmethod - def comments(self): + def key_comments(self): """ Mapping from metadata keys to associated comments. @@ -95,7 +95,7 @@ def data_shape(self): """ @abc.abstractmethod - def add(self, name, value, comment=None, axis=None, overwrite=False): + def add(self, name, value, key_comment=None, axis=None, overwrite=False): """ Add a new piece of metadata to instance. @@ -108,7 +108,7 @@ def add(self, name, value, comment=None, axis=None, overwrite=False): The value of the metadata. If axes input is not None, this must have the same length/shape as those axes as defined by ``self.data_shape``. - comment: `str` or `None` + key_comment: `str` or `None` Any comment associated with this metadata. Set to None if no comment desired. axis: `int`, iterable of `int`, or `None` @@ -142,25 +142,22 @@ class NDMeta(dict, NDMetaABC): __ndcube_can_slice__ = True __ndcube_can_rebin__ = True - def __init__(self, meta=None, comments=None, axes=None): + def __init__(self, meta=None, key_comments=None, axes=None): self.original_meta = meta self._data_shape = np.array([], dtype=int) if meta is None: meta = {} - else: - meta = dict(meta) super().__init__(meta.items()) meta_keys = meta.keys() - if comments is None: - self._comments = dict() + if key_comments is None: + self._key_comments = dict() else: - comments = dict(comments) - if not set(comments.keys()).issubset(set(meta_keys)): + if not set(key_comments.keys()).issubset(set(meta_keys)): raise ValueError( "All comments must correspond to a value in meta under the same key.") - self._comments = comments + self._key_comments = key_comments if axes is None: self._axes = dict() @@ -184,7 +181,7 @@ def _sanitize_axis_value(self, axis, value, key): and all([isinstance(i, numbers.Integral) for i in axis])): return ValueError(axis_err_msg) # If metadata's axis/axes include axis beyond current data shape, extend it. - data_shape = copy.deepcopy(self.data_shape) + data_shape = self.data_shape if max(axis) >= len(data_shape): data_shape = np.concatenate((data_shape, np.zeros(max(axis) + 1 - len(data_shape), dtype=int))) @@ -213,8 +210,8 @@ def _sanitize_axis_value(self, axis, value, key): return axis @property - def comments(self): - return self._comments + def key_comments(self): + return self._key_comments @property def axes(self): @@ -229,7 +226,7 @@ def data_shape(self, new_shape): """ Set data shape to new shape. - Must agree with shpaes of any axes already associated with metadata + Must agree with shapes of any axes already associated with metadata Parameters ---------- @@ -248,13 +245,13 @@ def data_shape(self, new_shape): f"old shape = {old_shape}, new_shape = {new_shape}") self._data_shape = new_shape - def add(self, name, value, comment=None, axis=None, overwrite=False): + def add(self, name, value, key_comment=None, axis=None, overwrite=False): # Docstring in ABC. if name in self.keys() and overwrite is not True: raise KeyError(f"'{name}' already exists. " "To update an existing metadata entry set overwrite=True.") - if comment is not None: - self._comments[name] = comment + if key_comment is not None: + self._key_comments[name] = key_comment if axis is not None: axis = self._sanitize_axis_value(axis, value, name) self._axes[name] = axis @@ -264,8 +261,8 @@ def add(self, name, value, comment=None, axis=None, overwrite=False): self.__setitem__(name, value) def __delitem__(self, name): - if name in self._comments: - del self._comments[name] + if name in self._key_comments: + del self._key_comments[name] if name in self._axes: del self._axes[name] super().__delitem__(name) @@ -427,7 +424,7 @@ def __getitem__(self, item): if drop_key: del new_meta[key] else: - new_meta.add(key, new_value, self.meta.comments.get(key, None), new_axis, + new_meta.add(key, new_value, self.meta.key_comments.get(key, None), new_axis, overwrite=True) return new_meta diff --git a/ndcube/ndcube.py b/ndcube/ndcube.py index f4f7c31ab..a7642d32f 100644 --- a/ndcube/ndcube.py +++ b/ndcube/ndcube.py @@ -1217,7 +1217,7 @@ def my_propagate(uncertainty, data, mask, **kwargs): if hasattr(self.meta, "__ndcube_can_rebin__") and self.meta.__ndcube_can_rebin__: new_meta = self.meta.rebin(bin_shape) else: - new_meta = self.meta + new_meta = deepcopy(self.meta) # Reform NDCube. new_cube = type(self)( diff --git a/ndcube/tests/test_meta.py b/ndcube/tests/test_meta.py index f5935c349..b1e8aaba7 100644 --- a/ndcube/tests/test_meta.py +++ b/ndcube/tests/test_meta.py @@ -20,7 +20,7 @@ def basic_meta_values(): @pytest.fixture -def basic_comments(): +def basic_key_comments(): return {"a": "Comment A", "b": "Comment B", "c": "Comment C", @@ -43,8 +43,8 @@ def basic_data_shape(): return (2, 3, 0, 4) @pytest.fixture -def basic_meta(basic_meta_values, basic_comments, basic_axes): - return NDMeta(basic_meta_values, basic_comments, basic_axes) +def basic_meta(basic_meta_values, basic_key_comments, basic_axes): + return NDMeta(basic_meta_values, basic_key_comments, basic_axes) @pytest.fixture @@ -77,7 +77,7 @@ def test_slice_away_independent_axis(basic_meta): values["b"] = values["b"][0] values["g"] = ["world", "!"] del values["f"] - comments = meta.comments + key_comments = meta.key_comments axes = dict([(key, axis) for key, axis in meta.axes.items()]) del axes["b"] del axes["f"] @@ -85,7 +85,7 @@ def test_slice_away_independent_axis(basic_meta): axes["d"] -= 1 axes["e"] -= 1 axes["g"] = (0, 2) - expected = NDMeta(values, comments, axes) + expected = NDMeta(values, key_comments, axes) assert_metas_equal(output, expected) @@ -99,7 +99,7 @@ def test_slice_away_independent_and_dependent_axis(basic_meta): values["c"] = values["c"][1] values["e"] = values["e"][1] values["g"] = "!" - comments = meta.comments + key_comments = meta.key_comments axes = dict([(key, axis) for key, axis in meta.axes.items()]) del axes["b"] del axes["e"] @@ -107,7 +107,7 @@ def test_slice_away_independent_and_dependent_axis(basic_meta): axes["c"] = 1 axes["d"] = 1 axes["g"] = 1 - expected = NDMeta(values, comments, axes) + expected = NDMeta(values, key_comments, axes) assert_metas_equal(output, expected) @@ -119,12 +119,12 @@ def test_slice_dependent_axes(basic_meta): values["d"] = values["d"][1] values["e"] = values["e"][1:3] values["g"] = values["g"][:2] - comments = meta.comments + key_comments = meta.key_comments axes = dict([(key, axis) for key, axis in meta.axes.items()]) del axes["d"] axes["c"] = 1 axes["g"] = (0, 1) - expected = NDMeta(values, comments, axes) + expected = NDMeta(values, key_comments, axes) expected._data_shape = np.array([2, 2, 0]) assert_metas_equal(output, expected) @@ -139,11 +139,11 @@ def test_add1(basic_meta): meta = basic_meta name = "z" value = 100 - comment = "Comment E" - meta.add(name, value, comment, None) + key_comment = "Comment E" + meta.add(name, value, key_comment, None) assert name in meta.keys() assert meta[name] == value - assert meta.comments[name] == comment + assert meta.key_comments[name] == key_comment assert meta.axes.get(name, None) is None @@ -155,7 +155,7 @@ def test_add2(basic_meta): meta.add(name, value, None, axis) assert name in meta.keys() assert meta[name] == value - assert meta.comments.get(name, None) is None + assert meta.key_comments.get(name, None) is None assert meta.axes[name] == np.array([axis]) @@ -178,7 +178,7 @@ def test_del(basic_meta): name = "b" del meta[name] assert name not in meta.keys() - assert name not in meta.comments.keys() + assert name not in meta.key_comments.keys() assert name not in meta.axes.keys() From 5776dcabd93294d6dd9407b9a22e5634c8501f79 Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Thu, 4 Jul 2024 14:42:50 +0100 Subject: [PATCH 53/60] Make NDMeta.original_header read-only. --- ndcube/meta.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/ndcube/meta.py b/ndcube/meta.py index a84c57dee..4e2e21394 100644 --- a/ndcube/meta.py +++ b/ndcube/meta.py @@ -2,6 +2,7 @@ import copy import numbers import collections.abc +from types import MappingProxyType import numpy as np @@ -144,7 +145,7 @@ class NDMeta(dict, NDMetaABC): __ndcube_can_rebin__ = True def __init__(self, meta=None, key_comments=None, axes=None): - self.original_meta = meta + self._original_meta = meta self._data_shape = np.array([], dtype=int) if meta is None: @@ -281,6 +282,10 @@ def __setitem__(self, key, val): "We recommend using the 'add' method to set values.") super().__setitem__(key, val) + @property + def original_meta(self): + return MappingProxyType(self._original_meta) + @property def slice(self): # Docstring in ABC. From 50763847bd435ed5b7332f971f377a4cb05da2c1 Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Mon, 8 Jul 2024 20:16:33 +0100 Subject: [PATCH 54/60] First incomplete version of docs on NDMeta. --- docs/explaining_ndcube/metadata.rst | 73 +++++++++++++++++++++++++++++ 1 file changed, 73 insertions(+) create mode 100644 docs/explaining_ndcube/metadata.rst diff --git a/docs/explaining_ndcube/metadata.rst b/docs/explaining_ndcube/metadata.rst new file mode 100644 index 000000000..a6d00517e --- /dev/null +++ b/docs/explaining_ndcube/metadata.rst @@ -0,0 +1,73 @@ +.. _ndmeta: + +***************** +Handling Metadata +***************** + +`ndcube`'s data objects do not enforce any requirements on the object assigned to their ``.meta`` attributes. +However, it does provide an optional object for handling metadata, `~ndcube.NDMeta`, with capabilities beyond a plain `dict`. +Chief among these is the ability to associate metadata with data array axes. +This enables `~ndcube.NDMeta` to update itself through certain operations, e.g. slicing, so that the metadata remains consistent with the associated ND object. +In this section, we explain the needs that `~ndcube.NDMeta` serves, the concepts underpinning it, and the functionalities it provides. + +.. _meta_concepts: + +Key Concepts +============ + +.. _coords_vs_meta: + +Coordinates vs. Axis-aware Metadata +----------------------------------- + +The difference between coordinates and axis-aware metadata is a subtle but important one. +Formally, a coordinate is a physical space sampled by one or more data axis, whereas axis-aware metadata is information describing the data that can alter along one or more physical dimension. +An informative example is the difference between time and exposure time. +The temporal axis of a 3-D image cube samples the physical dimension of time in a strictly increasing monotonic way. +The times along the temporal axis are therefore coordinate values, not metadata. +Additionally, a scalar timestamp of a 2-D image is also considered a coordinate in the `ndcube` framework. +This is because it describes where in the physical dimension of time the data has been sampled. +The fact that it's not associated with an array/pixel axis of the data does not change this. +it does, however, determine that the scalar coordinate is stored in `~ndcube.GlobalCoords`, rather than the WCS or `~ndcube.ExtraCoords`. +(See the :ref:`global_coords` section for more discussion on the difference between global and other coordinates.) +By contrast, exposure time describes the interval over which each image was accumulated. +Exposure time can remain constant, increase or decrease with time, and may switch between these during the time extent of the image cube. +Like a coordinate, it should be associated with the image cube's temporal axis. +However, exposure time is reflective of the telescope's operational mode, not a sampling of a physical dimension. +Exposure time is therefore metadata, not a coordinate. + +One reason why it is important for `ndcube` to distinguish between coordinates and axis-aware metadata is its dependence on WCS. +Most WCS implementations require that there be a unique invertible mapping between pixel and world coordinates, i.e., there is only one pixel value that corresponds to a specific real world value (or combination of such if the coordinate is multi-dimensionsal), and vice versa. +Therefore, while there may be exceptions for rare and exotic WCS implementations, a good rule of thumb for deciding whether something is a coordinate is: +coordinates are numeric and strictly monotonic. +If either of these characteristics do not apply, you have metadata. + +.. _axis_and_grid_aligned_meta: + +Types of Axis-aware Metadata: Axis-aligned vs. Grid-aligned +----------------------------------------------------------- + +There are two types of axis-aware metadata: axis-aligned and grid-aligned. +Axis-aligned metadata assigned a scalar or string to each of mutliple array azes. +For example, the data produced by a scanning slit spectrograph is associated with real world values. +But each axis also corresponds to features of the instrument: dispersion (spectral), pixels along the slit (spatial), position of the slit in the rastering sequence (spatial and short timescales), and the raster number (longer timescales). +The axis-aligned metadata concept allows us to avoid ambiguity by assigning each axis with a label (e.g. ``("dispersion", "slit", "slit step", "raster")``). + +By contrast, grid aligned metadata assigns a value to each pixel along axes. +The exposure time discussion above is an example of 1-D grid-aligned metadata. +However, grid-aligned metadata can also be multi-dimensional. +For example, a pixel-dependent response function could be represented as grid-aligned metadata associated with 2 spatial axes. + +`~ndcube.NDMeta` supports both axis-aligned and grid-aligned metadata with the same API, which will be discussed in the next section. + +.. _ndmeta: + + +NDMeta +====== +`~ndcube.NDMeta` is a `dict`-based object for handling metadata that provides a additional functionalities beyond those of a plain `dict`. +Chief among these are the ability to support axis-aware metadata (see :ref:`key_concepts`) and assigning comments to individual pieces of metadata. + +Initializing an NDMeta +---------------------- + From 8127514bf9feb0fd9ccb64f0a2149fc164935c90 Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Sun, 14 Jul 2024 11:48:53 +0100 Subject: [PATCH 55/60] First complete draft of metadata narrative docs. --- docs/explaining_ndcube/metadata.rst | 244 ++++++++++++++++++++++++++-- docs/explaining_ndcube/slicing.rst | 107 ++++++++++++ 2 files changed, 338 insertions(+), 13 deletions(-) diff --git a/docs/explaining_ndcube/metadata.rst b/docs/explaining_ndcube/metadata.rst index a6d00517e..d4cd3f95e 100644 --- a/docs/explaining_ndcube/metadata.rst +++ b/docs/explaining_ndcube/metadata.rst @@ -5,8 +5,8 @@ Handling Metadata ***************** `ndcube`'s data objects do not enforce any requirements on the object assigned to their ``.meta`` attributes. -However, it does provide an optional object for handling metadata, `~ndcube.NDMeta`, with capabilities beyond a plain `dict`. -Chief among these is the ability to associate metadata with data array axes. +However, it does provide an optional object for handling metadata, `~ndcube.NDMeta`. +This class inherits from `dict`, but provides additional functionalities, chief among which is the ability to associate metadata with data array axes. This enables `~ndcube.NDMeta` to update itself through certain operations, e.g. slicing, so that the metadata remains consistent with the associated ND object. In this section, we explain the needs that `~ndcube.NDMeta` serves, the concepts underpinning it, and the functionalities it provides. @@ -26,29 +26,35 @@ An informative example is the difference between time and exposure time. The temporal axis of a 3-D image cube samples the physical dimension of time in a strictly increasing monotonic way. The times along the temporal axis are therefore coordinate values, not metadata. Additionally, a scalar timestamp of a 2-D image is also considered a coordinate in the `ndcube` framework. -This is because it describes where in the physical dimension of time the data has been sampled. -The fact that it's not associated with an array/pixel axis of the data does not change this. -it does, however, determine that the scalar coordinate is stored in `~ndcube.GlobalCoords`, rather than the WCS or `~ndcube.ExtraCoords`. +This is because it describes where in the physical dimension of time the data has been sampled, even though it does not correspond to a data axis. +Because of this, such scalar coordinates are stored in `~ndcube.GlobalCoords`, while coordinates associated with array/pixel axes are stored in the WCS or `~ndcube.ExtraCoords`. (See the :ref:`global_coords` section for more discussion on the difference between global and other coordinates.) + By contrast, exposure time describes the interval over which each image was accumulated. -Exposure time can remain constant, increase or decrease with time, and may switch between these during the time extent of the image cube. +Exposure time can remain constant, increase or decrease with time, and may switch between these regimes during the time extent of the image cube. Like a coordinate, it should be associated with the image cube's temporal axis. However, exposure time is reflective of the telescope's operational mode, not a sampling of a physical dimension. Exposure time is therefore metadata, not a coordinate. -One reason why it is important for `ndcube` to distinguish between coordinates and axis-aware metadata is its dependence on WCS. +One reason why it is important to distinguish between coordinates and axis-aware metadata is `ndcube`'s dependence on WCS. Most WCS implementations require that there be a unique invertible mapping between pixel and world coordinates, i.e., there is only one pixel value that corresponds to a specific real world value (or combination of such if the coordinate is multi-dimensionsal), and vice versa. Therefore, while there may be exceptions for rare and exotic WCS implementations, a good rule of thumb for deciding whether something is a coordinate is: -coordinates are numeric and strictly monotonic. -If either of these characteristics do not apply, you have metadata. +coordinates are numeric and strictly monotonic. Otherwise you have metadata. + +The keen-eyed reader may have realised of the above framework that, while not all axis-aligned metadata can be treated as coordinates, all coordinates can be treated like axis-aware metadata. +This raises the question of why not dispense with coordinates altogether and only have axis-aligned metadata? +The reason is that the stricter requirements on coordinates have led to a host of powerful coordinate infrastructures that are not valid for generalised axis-aware metadata. +These include functional WCS implementations which save memory as well as saving compute time through operations such as interpolation, and `~astropy.visualization.wcsaxes.WCSAxes`, which make complicated coordinate-aware plotting easy. +Therefore, where appropriate, it is beneficial to store coordinates separately from axis-aware metadata. -.. _axis_and_grid_aligned_meta: +.. _axis_and_grid_aligned_metadata: Types of Axis-aware Metadata: Axis-aligned vs. Grid-aligned ----------------------------------------------------------- There are two types of axis-aware metadata: axis-aligned and grid-aligned. -Axis-aligned metadata assigned a scalar or string to each of mutliple array azes. +Axis-aligned metadata associates a scalar or string with an array axis. +It can also assign an array of scalars or strings to mutliple array axes, so long as there is one value per associated axis. For example, the data produced by a scanning slit spectrograph is associated with real world values. But each axis also corresponds to features of the instrument: dispersion (spectral), pixels along the slit (spatial), position of the slit in the rastering sequence (spatial and short timescales), and the raster number (longer timescales). The axis-aligned metadata concept allows us to avoid ambiguity by assigning each axis with a label (e.g. ``("dispersion", "slit", "slit step", "raster")``). @@ -65,9 +71,221 @@ For example, a pixel-dependent response function could be represented as grid-al NDMeta ====== -`~ndcube.NDMeta` is a `dict`-based object for handling metadata that provides a additional functionalities beyond those of a plain `dict`. -Chief among these are the ability to support axis-aware metadata (see :ref:`key_concepts`) and assigning comments to individual pieces of metadata. + +.. _initializing_ndmeta: Initializing an NDMeta ---------------------- +To initialize an `~ndcube.NDMeta`, simply provide it with a `~collections.abc.Mapping` object, e.g. a `dict` or `astropy.io.fits.header.Header`. + +.. code-block:: python + + >>> from ndcube import NDMeta + >>> raw_meta = {"salutation": "hello", "name": "world"} + >>> meta = NDMeta(raw_meta) + +We can now access each piece of metadata by indexing ``meta`` as if it were a `dict`: + +.. code-block:: python + + >>> meta["name"] + "world" + +In this example we have provided a very simple set of metadata. +In fact, it is so simple that there is no practical difference between ``meta`` and a simple `dict`. +To demonstrate one of the additional functionalities of `~ndcube.NDMeta`, let reinstantiate ``meta``, adding some comments to the metadata. +To do this, we provide another `~collections.abc.Mapping`, e.g. a `dict`, with the same keys as the main metadata keys, or a subset of them, to the ``key_comments`` kwarg. + +.. code-block:: python + + + >>> key_comments = {"name": "Each planet in the solar system has a name."} + >>> meta = NDMeta(raw_meta, key_comments=key_comments) + +We can now access the comments by indexing the `~ndcube.NDMeta.key_comments` property: + +.. code-block:: python + + >>> meta.key_comments["name"] + "Each planet in the solar system has a name." + +Now let's discuss how to initialize how to `~ndcube.NDMeta` with axis-aware metadata. +(Here, we will specifically consider grid-aligned metadata. Axis-aligned metadata is assigned in the same way. But see the :ref:`assigning_axis_aligned_metadata` section for more details.) +Similar to ``key_comments``, we assign metadata to axes by providing a `~collections.abc.Mapping`, e.g. a `dict`, via its ``axes`` kwarg. +And like with ``key_comments``, the keys of ``axes`` must be the same, or a subset of, the main metadata keys. +The axis value must be an `int` or `tuple` of `int` giving the array axes of the data that correspond to the axes of the metadata. +Note that this means that metadata can be multidimensional. +Let's say we want to add exposure time that varies with the 1st (temporal) axis of that data, and a pixel response that varies with time and pixel column (1st and 3rd axes). + +.. code-block:: python + + >>> import astropy.units as u + >>> import numpy as np + >>> raw_meta["exposure time"] = [1.9, 2.1, 5, 2, 2] * u.s + >>> raw_meta["pixel response"] = np.array([[100., 100., 100., 90., 100.], [85., 100., 90., 100., 100.]]) * u.percent + >>> axes = {"exposure time": 0, "pixel response": (0, 2)} + >>> meta = NDMeta(raw_meta, axes=axes) + +It is easy to see which axes a piece of metadata corresponds to by indexing the `~ndcube.NDMeta.axes` property: + +.. code-block:: python + + >>> meta.axes["exposure time"] + (0,) + >>> meta.axes["pixel response"] + (0, 2) + +Finally, it is possible to attach the shape of the associated data to the `~ndcube.NDMeta` instance via the ``data_shape`` kwarg: + +.. code-block:: python + + >>> meta = NDMeta(raw_meta, axes=axes, key_comments=key_comments, data_shape=(5, 1, 2)) + +Or by directly setting the ``~ndcube.NDMeta.data_shape`` property after instantiation: + +.. code-block:: python + + >>> meta = NDMeta(raw_meta, axes=axes, key_comments=key_comments) + >>> meta.data_shape = (5, 1, 2) + +Note that the ``data_shape`` must be compatible with the shapes and associated axes of any axis-aware metadata. +For example, we couldn't set the length of the first axis to ``6``, because ``meta["exposure time"]`` is associated with the first axis and has a length of ``5``. +If no ``data_shape`` is provided, it is determined from the axis-aware metadata, if any is provided. +See the :ref:`data_shape` section for more details. + +.. _adding_removing_metadata: + +Adding and Removing Metadata +---------------------------- + +Because `~ndcube.NDMeta` is a subclass of `dict`, it is possible to add new metadata via the simple ``__setitem__`` API, e.g ``meta[new_key] = new_value``. +However, this API is not sufficient if we want to add axis-aware or commented metadata. +This is why `~ndcube.NDMeta` provides an `~ndcube.NDMeta.add` method. +This method requires the key and value of the new metadata, an optionally accepts a comment and/or axes. +Let's use this method to add a voltage that varies with time, i.e. the first data axis. + +.. code-block:: python + + >>> meta.add("voltage", u.Quantity([1.]*5, unit=u.V), key_comment="detector bias voltage can vary with time and pixel column.", axes=(0,)) + >>> meta["voltage"] + + +If you try to add metadata with a pre-existing key, `~ndcube.NDMeta.add` will error. +To replace the value, comment, or axes values of pre-existing metadata, set the ``overwrite`` kwarg to ``True``. + +.. code-block:: python + + >>> meta.add("voltage", u.Quantity([-300.]*5, unit=u.V), comment="detector bias voltage", axes=(0,), overwrite=True) + >>> meta["voltage"] + + +Unwanted metadata can be removing by employing the `del` operator. + +.. code-block:: python + + >>> del meta["voltage"] + >>> meta.get("voltage", "deleted") + "deleted" + +Note that the `del` operator also removes associated comments and axes. + +.. code-block:: python + + >>> del meta["voltage"] + >>> meta.key_comments.get("voltage", "deleted") + "deleted" + >>> meta.axes.get("voltage", "deleted") + "deleted" + +Data Shape +---------- + +The `~ndcube.NDMeta.data_shape` property tracks the shape of the data with which the metadata is associated. +We have already seen in the :ref:`initialzing_ndmeta` section, that it can be assigned during initialization or by subsequently setting the `~ndcube.NDMeta.data_shape` property directly. +However, if the ``data_shape`` is not provided, it is inferred from the shapes of axis-aware metadata. +If no axis-aware metadata is present, `~ndcube.NDMeta.data_shape` is empty: + +.. code-block:: python + + >>> from ndcube import NDMeta + >>> raw_meta = {"salutation": "hello", "name": "world"} + >>> meta = NDMeta(raw_meta) + >>> meta.data_shape + array([], dtype=int64) + +If we now add the ``"pixel response"`` metadata that we used, earlier the `~ndcube.NDMeta.data_shape` will be updated. + +.. code-block:: python + + >>> meta.add("pixel response", np.array([[100., 100., 100., 90., 100.], [85., 100., 90., 100., 100.]]) * u.percent, axes=(0, 2)) + >>> meta.data_shape + array([5, 0, 2]) + +Note that since ``"pixel response"`` is associated with the 1st and 3rd axes, those axes now have the same shape as ``"pixel response"``. +The existence of a 3rd axis, implies the presence of a 2nd. +However, we have no metadata associated with it, and hence no knowledge of its length. +It has therefore been assigned a length of ``0``. + +Now that the shape has been set for the 1st and 3rd axes, subsequently added grid-aligned metadata associated with those axes must be compatible with those axis lengths. +For example, if we add a 1-D ``"exposure time"`` and associate it with the 1st axis, it must have a length of of ``5``, otherwise an error will be raised: + +.. code-block:: python + + >>> meta.add("exposure time", [1.9, 2.1, 5, 2, 2] * u.s, axes=0) + +Moreover, if we now directly set the `~ndcube.NDMeta.data_shape` via ``meta.data_shape = new_shape``, we cannot change the length of axes already associated with grid-aligned metadata, without first removing or altering that metadata. +However, these restrictions do not apply if we want to change the shape of the 2nd axis, or add new metadata to it, because its length is ``0``, and hence considered undefined. + +.. code-block:: python + + >>> meta.add("row temperature", [-10, -11, -12] * u.deg_C, axes=1) + >>> meta.data_shape + array([5, 3, 2]) + +.. _assigning_axis_aligned_metadata + +Assigning Axis-aligned Metadata +------------------------------- + +So far, we have only dealt with grid-aligned metadata, i.e. axis-aware metadata which provides a value for each pixel. +To provide axis-aligned metadata, i.e. where each axis has a single value (see :ref:`axis_and_grid_aligned_metadata`), provide a scalar or string for a single axis, or a 1-D array-like with the same length as the number of associated axes for multi-axis-aligned metadata. + +.. code-block:: python + + >>> meta.add("axis name", np.array(["a", "b", "c", "d"]), axes=(0, 1, 2, 3)) + +Note that the length of ``"axis name"`` is the same as the number of its associated axes. +Also note that we have now indicated that there is 4th axis. +``meta.data_shape`` has therefore been automatically updated accordingly. + +.. code-block:: python + + >>> meta.data_shape + array([5, 3, 2, 0]) + +However, because axis-aligned metadata does not tell us about the length of the axes, the 4th axis has been assigned a length of zero. + +Original_meta +------------- + +As metadata is added, removed, and altered through certain operations, it may still be desirable to refer back to the initial state of the metadata. +This is the purpose of the `ndcube.NDMeta.original_meta` property. +It stores the metadata that was originally passed to the `~ndcube.NDMeta` constructor, and it never altered. + +.. code-block:: python + + >>> raw_meta = {"salutation": "hello", "name": "world"} + >>> meta = NDMeta(raw_meta) + >>> del meta["name"] + >>> meta.add("exclamation", "!") + >>> meta + ??? + >>> meta.original_meta + +Note that, ``meta.original_meta`` does not contain ``"exclamation"``, but still contains ``"name"``. +This is because these were added and removed after initialzation. +Also note that the type of the original metadata object is maintained. + +The `~ndcube.NDMeta.original_shape` property is a useful reference back to the original metadata, even after it has been altered via a complex sequence of operations. +In the :ref:`meta_slicing` section, we discuss the most common of these, slicing. diff --git a/docs/explaining_ndcube/slicing.rst b/docs/explaining_ndcube/slicing.rst index 57daf848f..6aecd3a05 100644 --- a/docs/explaining_ndcube/slicing.rst +++ b/docs/explaining_ndcube/slicing.rst @@ -416,3 +416,110 @@ Let's say we axis order of the ``linewidths`` cube was reversed. array([2, 1], dtype=object) The same result is obtained. + +.. _meta_slicing: + +Slicing NDMeta +-------------- + +Slicing is arguably the most important of `~ndcube.NDMeta`'s functionalities that sets it apart from a simple `dict`. +It allows axis-aware metadata to be kept consistent with its associated data. + +Because `~ndcube.NDMeta` is a subclass of `dict`, the standard Python slicing API is reserved for accessing the values of keys. +Therefore, slicing is achieved by applying Python's slicing API to `~ndcube.NDMeta`'s `~ndcube.NDMeta.slice` attribute. + +.. code-block:: python + + >>> # Instantiate an NDMeta object with axis-aware and axis-agnostic metadata. + >>> import astropy.units as u + >>> import numpy as np + >>> from ndcube import NDMeta + >>> raw_meta = {"salutation": "hello", "name": "world", + ... "exposure time": u.Quantity([2] * 4, unit=u.s), + ... "pixel response", np.ones((4, 5))} + >>> axes = {"exposure time": 0, "pixel response": (1, 2)} + >>> meta = NDMeta(raw_meta, axes=axes, data_shape=(4, 4, 5)) + + >>> # Slice NDMeta object. + >>> sliced_meta = meta.slice[0, 1:3] + >>> sliced_meta.data_shape + array([2, 5]) + +Note that by applying the slice item ``[0, 1:3]`` to ``meta``, the shape of the ``sliced_meta`` has been altered accordingly. +The first axis has been sliced away, the second has been truncated to a length of 2, and the third remains unchanged. +The shape of ``"pixel response"`` has been altered: + +.. code-block:: python + + >>> sliced_meta["pixel response"].shape + (2, 5) + +while ``"exposure time"`` has been reduced to a scalar: + +.. code-block:: python + + >>> sliced_meta["exposure time"] + + +Moreover, because the first axis has been sliced away, ``"exposure time"`` is no longer associated with a data array axis, and so is no longer present in the ``axes`` property: + +.. code-block:: python + + >>> list(sliced_meta.axes.keys()) + ["pixel response"] + +Finally, note that axis-agnostic metadata is unaltered by the slicing process. + +.. code-block:: python + + >>> sliced_meta["salultation"] + "hello" + >>> sliced_meta["name"] + "world" + + +Automatically Slicing NDMeta Attached to Other ND Objects +********************************************************* + +`~ndcube.NDMeta` has a dunder property called ``__ndcube_can_slice__``, which, by default, is set to ``True``. +When attached to `~ndcube.NDCube`, `~ndcube.NDCubeSequence`, and `~ndcube.NDCollection`, this property lets those classes know that that the metadata object is sliceable. +Hence, when their ``__getitem__`` methods are called (i.e. when they are sliced), they apply the slice item to ``self.meta.slice`` and attach the result as the meta attribute of the newly sliced ND object. +To demonstrate this, let's reinstantiate the same metadata object as in the above section and attach it to a new instantiation of ``my_cube``. + +.. code-block:: python + + >>> meta = NDMeta(raw_meta, axes=axes) + >>> my_cube = NDCube(data, wcs=wcs, meta=meta) + >>> my_cube.shape + (4, 4, 5) + >>> my_cube.meta.data_shape + array([4, 4, 5]) + +Now let's apply the same slice item to the cube as we applied to ``meta`` in the above section. +Note that shape of the resultant `~ndcube.NDCube` and its associated `~ndcube.NDMeta` object now have the same new shape consistent with the slice item. + +.. code-block:: python + + >>> sliced_cube = my_cube[0, 1:3] + >>> sliced_cube.shape + (2, 5) + >>> sliced_cube.meta.data_shape + array([2, 5]) + +Furthermore, the metadata's values, axis-awareness, etc., have also been altered in line with the slice item. +In fact, ``sliced_cube.meta`` is equivalent to ``sliced_meta`` from the previous section, because we have applied the same slice item to two equivalent `~ndcube.NDMeta` objects. + +.. code-block:: python + + >>> sliced_cube.meta.data_shape + array([2, 5]) + >>> sliced_cube.meta["pixel response"].shape + (2, 5) + >>> sliced_cube.meta["exposure time"] + + >>> list(sliced_cube.meta.axes.keys()) + ["pixel response"] + >>> sliced_cube.meta["salultation"] + "hello" + >>> sliced_cube.meta["name"] + "world" From c4a83aa2d6f8ee8fdc24b9ba00922fd4d606b470 Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Sun, 14 Jul 2024 11:59:12 +0100 Subject: [PATCH 56/60] Enable NDMeta to accept data shape on instantiation. --- ndcube/meta.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/ndcube/meta.py b/ndcube/meta.py index 4e2e21394..f65d1e088 100644 --- a/ndcube/meta.py +++ b/ndcube/meta.py @@ -144,9 +144,12 @@ class NDMeta(dict, NDMetaABC): __ndcube_can_slice__ = True __ndcube_can_rebin__ = True - def __init__(self, meta=None, key_comments=None, axes=None): + def __init__(self, meta=None, key_comments=None, axes=None, data_shape=None): self._original_meta = meta - self._data_shape = np.array([], dtype=int) + if data_shape is None: + self._data_shape = np.array([], dtype=int) + else: + self._data_shape = np.asarray(data_shape).astype(int) if meta is None: meta = {} From d79a5e6cb24a68a09f0de6625bf682890b7d1d33 Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Sun, 14 Jul 2024 11:59:53 +0100 Subject: [PATCH 57/60] Enable NDCubeSequence and NDCollection to support sliceable metadata. --- ndcube/ndcollection.py | 4 +++- ndcube/ndcube_sequence.py | 4 +++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/ndcube/ndcollection.py b/ndcube/ndcollection.py index 0b37e4e91..e60b49417 100644 --- a/ndcube/ndcollection.py +++ b/ndcube/ndcollection.py @@ -168,9 +168,11 @@ def __getitem__(self, item): # Since item is not strings, no cube in collection is dropped. # Therefore the collection keys remain unchanged. new_keys = list(self.keys()) + # Slice meta if sliceable + new_meta = self.meta.slice[item] if self.meta.__ndcube_can_slice else copy.deepcopy(self.meta) return self.__class__(list(zip(new_keys, new_data)), aligned_axes=new_aligned_axes, - meta=self.meta, sanitize_inputs=False) + meta=new_meta, sanitize_inputs=False) def _generate_collection_getitems(self, item): # There are 3 supported cases of the slice item: int, slice, tuple of ints and/or slices. diff --git a/ndcube/ndcube_sequence.py b/ndcube/ndcube_sequence.py index 35d541c7c..30f8c6c08 100644 --- a/ndcube/ndcube_sequence.py +++ b/ndcube/ndcube_sequence.py @@ -122,8 +122,10 @@ def cube_like_array_axis_physical_types(self): def __getitem__(self, item): if isinstance(item, numbers.Integral): return self.data[item] + # Determine whether meta attribute should be sliced. + new_meta = self.meta.slice[item] if self.meta.__ndcube_can_slice else copy.deepcopy(self.meta) # Create an empty sequence in which to place the sliced cubes. - result = type(self)([], meta=self.meta, common_axis=self._common_axis) + result = type(self)([], meta=new_meta, common_axis=self._common_axis) if isinstance(item, slice): result.data = self.data[item] else: From 7f9f04266648d300bbe54851547b8cc9b1550f2d Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Tue, 16 Jul 2024 12:10:20 +0100 Subject: [PATCH 58/60] Add tests for meta slicing via NDCubeSequence and NDCollection. --- ndcube/conftest.py | 5 +++-- ndcube/tests/helpers.py | 6 ++++++ ndcube/tests/test_ndcollection.py | 17 +++++++++-------- ndcube/tests/test_ndcubesequence.py | 12 +++++++++++- 4 files changed, 29 insertions(+), 11 deletions(-) diff --git a/ndcube/conftest.py b/ndcube/conftest.py index 91166eae0..897ff6237 100644 --- a/ndcube/conftest.py +++ b/ndcube/conftest.py @@ -16,6 +16,7 @@ from astropy.wcs import WCS from ndcube import ExtraCoords, GlobalCoords, NDCube, NDCubeSequence, NDMeta +from . import helpers # Force MPL to use non-gui backends for testing. try: @@ -313,7 +314,6 @@ def extra_coords_sharing_axis(): ) ) - ################################################################################ # NDCube Fixtures ################################################################################ @@ -616,7 +616,8 @@ def ndcubesequence_4c_ln_lt_l_cax1(ndcube_3d_ln_lt_l): cube2.data[:] *= 2 cube3.data[:] *= 3 cube4.data[:] *= 4 - return NDCubeSequence([cube1, cube2, cube3, cube4], common_axis=1) + meta = helpers.ndmeta_et0_pr02((4, 2, 3, 4)) + return NDCubeSequence([cube1, cube2, cube3, cube4], common_axis=1, meta=meta) @pytest.fixture diff --git a/ndcube/tests/helpers.py b/ndcube/tests/helpers.py index 9b81cbccb..34330fb56 100644 --- a/ndcube/tests/helpers.py +++ b/ndcube/tests/helpers.py @@ -195,3 +195,9 @@ def assert_collections_equal(collection1, collection2, check_data=True): assert_cubesequences_equal(cube1, cube2, check_data=check_data) else: raise TypeError(f"Unsupported Type in NDCollection: {type(cube1)}") + +def ndmeta_et0_pr02(shape): + return NDMeta({"salutation": "hello", + "exposure time": u.Quantity([2.] * shape[0], unit=u.s) + "pixel response": (100 * np.ones((shape[0], shape[2]), dtype=float)) * u.percent}, + axes={"exposure time": 0, "pixel response": (0, 2)}) diff --git a/ndcube/tests/test_ndcollection.py b/ndcube/tests/test_ndcollection.py index 8ec12630f..9510324aa 100644 --- a/ndcube/tests/test_ndcollection.py +++ b/ndcube/tests/test_ndcollection.py @@ -37,7 +37,8 @@ # Define collections aligned_axes = ((1, 2), (2, 0), (1, 2)) keys = ("cube0", "cube1", "cube2") -cube_collection = NDCollection([("cube0", cube0), ("cube1", cube1), ("cube2", cube2)], aligned_axes) +cube_coll_meta = helpers.ndmeta_et0_pr02((4, 5)) +cube_collection = NDCollection([("cube0", cube0), ("cube1", cube1), ("cube2", cube2)], aligned_axes, meta=cube_coll_meta) unaligned_collection = NDCollection([("cube0", cube0), ("cube1", cube1), ("cube2", cube2)], aligned_axes=None) seq_collection = NDCollection([("seq0", sequence02), ("seq1", sequence20)], aligned_axes="all") @@ -45,30 +46,30 @@ @pytest.mark.parametrize("item,collection,expected", [ (0, cube_collection, NDCollection([("cube0", cube0[:, 0]), ("cube1", cube1[:, :, 0]), ("cube2", cube2[:, 0])], - aligned_axes=((1,), (0,), (1,)))), + aligned_axes=((1,), (0,), (1,)), meta=cube_coll_meta.slice[0])), (slice(1, 3), cube_collection, NDCollection( [("cube0", cube0[:, 1:3]), ("cube1", cube1[:, :, 1:3]), ("cube2", cube2[:, 1:3])], - aligned_axes=aligned_axes)), + aligned_axes=aligned_axes, meta=cube_coll_meta.slice[1:3]))), (slice(-3, -1), cube_collection, NDCollection( [("cube0", cube0[:, -3:-1]), ("cube1", cube1[:, :, -3:-1]), ("cube2", cube2[:, -3:-1])], - aligned_axes=aligned_axes)), + aligned_axes=aligned_axes, meta=cube_coll_meta.slice[-3:-1]))), ((slice(None), slice(1, 2)), cube_collection, NDCollection( [("cube0", cube0[:, :, 1:2]), ("cube1", cube1[1:2]), ("cube2", cube2[:, :, 1:2])], - aligned_axes=aligned_axes)), + aligned_axes=aligned_axes, meta=cube_coll_meta.slice[:, 1:2]))), ((slice(2, 4), slice(-3, -1)), cube_collection, NDCollection( [("cube0", cube0[:, 2:4, -3:-1]), ("cube1", cube1[-3:-1, :, 2:4]), - ("cube2", cube2[:, 2:4, -3:-1])], aligned_axes=aligned_axes)), + ("cube2", cube2[:, 2:4, -3:-1])], aligned_axes=aligned_axes, meta=cube_coll_meta.slice[2:4, -3:-1]))), ((0, 0), cube_collection, NDCollection( [("cube0", cube0[:, 0, 0]), ("cube1", cube1[0, :, 0]), ("cube2", cube2[:, 0, 0])], - aligned_axes=None)), + aligned_axes=None, meta=cube_coll_meta.slice[0, 0]))), (("cube0", "cube2"), cube_collection, NDCollection( - [("cube0", cube0), ("cube2", cube2)], aligned_axes=(aligned_axes[0], aligned_axes[2]))), + [("cube0", cube0), ("cube2", cube2)], aligned_axes=(aligned_axes[0], aligned_axes[2]), meta=cube_coll_meta)), (0, seq_collection, NDCollection([("seq0", sequence02[0]), ("seq1", sequence20[0])], aligned_axes=((0, 1, 2), (0, 1, 2)))), diff --git a/ndcube/tests/test_ndcubesequence.py b/ndcube/tests/test_ndcubesequence.py index 1dee3e096..e8b80113e 100644 --- a/ndcube/tests/test_ndcubesequence.py +++ b/ndcube/tests/test_ndcubesequence.py @@ -5,7 +5,7 @@ import astropy.units as u from astropy.time import Time, TimeDelta -from ndcube import NDCube, NDCubeSequence +from ndcube import NDCube, NDCubeSequence, NDMeta from ndcube.tests import helpers @@ -195,3 +195,13 @@ def test_crop_by_values(ndcubesequence_4c_ln_lt_l): expected = seq[:, 1:3, 0:2, 0:3] output = seq.crop_by_values(lower_corner, upper_corner) helpers.assert_cubesequences_equal(output, expected) + + +def test_slice_meta(ndcubesequence_4c_ln_lt_l_cax1): + seq = ndcubesequence_4c_ln_lt_l_cax1 + sliced_seq = seq[:, :, 0] + expected_meta = NDMeta({"salutation": "hello", + "exposure time": u.Quantity([2] * 4, unit=u.s), + "pixel response": u.Quantity([100] * 4, unit=u.percent)}, + axes={"exposure time": 0, "pixel response": 0}) + helpers.assert_metas_equal(sliced_seq.meta, expected_meta) From a58c7053a29691cbd3a325166393815db6ab68fb Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Sat, 20 Jul 2024 17:51:05 -0600 Subject: [PATCH 59/60] Fixes to make NDMeta slicing work, especially as part of an NDCollection. --- docs/explaining_ndcube/metadata.rst | 26 ++++++++++++------------ docs/explaining_ndcube/slicing.rst | 12 +++++------ ndcube/conftest.py | 2 +- ndcube/ndcollection.py | 31 ++++++++++++++++++++++++++++- ndcube/ndcube_sequence.py | 2 +- ndcube/tests/helpers.py | 12 +++++++++-- ndcube/tests/test_ndcollection.py | 12 +++++------ ndcube/tests/test_ndcube.py | 2 +- ndcube/tests/test_ndcubesequence.py | 2 +- 9 files changed, 69 insertions(+), 32 deletions(-) diff --git a/docs/explaining_ndcube/metadata.rst b/docs/explaining_ndcube/metadata.rst index d4cd3f95e..a26eddd3c 100644 --- a/docs/explaining_ndcube/metadata.rst +++ b/docs/explaining_ndcube/metadata.rst @@ -54,7 +54,7 @@ Types of Axis-aware Metadata: Axis-aligned vs. Grid-aligned There are two types of axis-aware metadata: axis-aligned and grid-aligned. Axis-aligned metadata associates a scalar or string with an array axis. -It can also assign an array of scalars or strings to mutliple array axes, so long as there is one value per associated axis. +It can also assign an array of scalars or strings to multiple array axes, so long as there is one value per associated axis. For example, the data produced by a scanning slit spectrograph is associated with real world values. But each axis also corresponds to features of the instrument: dispersion (spectral), pixels along the slit (spatial), position of the slit in the rastering sequence (spatial and short timescales), and the raster number (longer timescales). The axis-aligned metadata concept allows us to avoid ambiguity by assigning each axis with a label (e.g. ``("dispersion", "slit", "slit step", "raster")``). @@ -102,14 +102,14 @@ To do this, we provide another `~collections.abc.Mapping`, e.g. a `dict`, with t >>> key_comments = {"name": "Each planet in the solar system has a name."} >>> meta = NDMeta(raw_meta, key_comments=key_comments) - + We can now access the comments by indexing the `~ndcube.NDMeta.key_comments` property: .. code-block:: python >>> meta.key_comments["name"] "Each planet in the solar system has a name." - + Now let's discuss how to initialize how to `~ndcube.NDMeta` with axis-aware metadata. (Here, we will specifically consider grid-aligned metadata. Axis-aligned metadata is assigned in the same way. But see the :ref:`assigning_axis_aligned_metadata` section for more details.) Similar to ``key_comments``, we assign metadata to axes by providing a `~collections.abc.Mapping`, e.g. a `dict`, via its ``axes`` kwarg. @@ -135,13 +135,13 @@ It is easy to see which axes a piece of metadata corresponds to by indexing the (0,) >>> meta.axes["pixel response"] (0, 2) - + Finally, it is possible to attach the shape of the associated data to the `~ndcube.NDMeta` instance via the ``data_shape`` kwarg: .. code-block:: python >>> meta = NDMeta(raw_meta, axes=axes, key_comments=key_comments, data_shape=(5, 1, 2)) - + Or by directly setting the ``~ndcube.NDMeta.data_shape`` property after instantiation: .. code-block:: python @@ -170,7 +170,7 @@ Let's use this method to add a voltage that varies with time, i.e. the first dat >>> meta.add("voltage", u.Quantity([1.]*5, unit=u.V), key_comment="detector bias voltage can vary with time and pixel column.", axes=(0,)) >>> meta["voltage"] - + If you try to add metadata with a pre-existing key, `~ndcube.NDMeta.add` will error. To replace the value, comment, or axes values of pre-existing metadata, set the ``overwrite`` kwarg to ``True``. @@ -179,7 +179,7 @@ To replace the value, comment, or axes values of pre-existing metadata, set the >>> meta.add("voltage", u.Quantity([-300.]*5, unit=u.V), comment="detector bias voltage", axes=(0,), overwrite=True) >>> meta["voltage"] - + Unwanted metadata can be removing by employing the `del` operator. .. code-block:: python @@ -187,7 +187,7 @@ Unwanted metadata can be removing by employing the `del` operator. >>> del meta["voltage"] >>> meta.get("voltage", "deleted") "deleted" - + Note that the `del` operator also removes associated comments and axes. .. code-block:: python @@ -213,7 +213,7 @@ If no axis-aware metadata is present, `~ndcube.NDMeta.data_shape` is empty: >>> meta = NDMeta(raw_meta) >>> meta.data_shape array([], dtype=int64) - + If we now add the ``"pixel response"`` metadata that we used, earlier the `~ndcube.NDMeta.data_shape` will be updated. .. code-block:: python @@ -233,7 +233,7 @@ For example, if we add a 1-D ``"exposure time"`` and associate it with the 1st a .. code-block:: python >>> meta.add("exposure time", [1.9, 2.1, 5, 2, 2] * u.s, axes=0) - + Moreover, if we now directly set the `~ndcube.NDMeta.data_shape` via ``meta.data_shape = new_shape``, we cannot change the length of axes already associated with grid-aligned metadata, without first removing or altering that metadata. However, these restrictions do not apply if we want to change the shape of the 2nd axis, or add new metadata to it, because its length is ``0``, and hence considered undefined. @@ -254,7 +254,7 @@ To provide axis-aligned metadata, i.e. where each axis has a single value (see : .. code-block:: python >>> meta.add("axis name", np.array(["a", "b", "c", "d"]), axes=(0, 1, 2, 3)) - + Note that the length of ``"axis name"`` is the same as the number of its associated axes. Also note that we have now indicated that there is 4th axis. ``meta.data_shape`` has therefore been automatically updated accordingly. @@ -282,9 +282,9 @@ It stores the metadata that was originally passed to the `~ndcube.NDMeta` constr >>> meta ??? >>> meta.original_meta - + Note that, ``meta.original_meta`` does not contain ``"exclamation"``, but still contains ``"name"``. -This is because these were added and removed after initialzation. +This is because these were added and removed after initialization. Also note that the type of the original metadata object is maintained. The `~ndcube.NDMeta.original_shape` property is a useful reference back to the original metadata, even after it has been altered via a complex sequence of operations. diff --git a/docs/explaining_ndcube/slicing.rst b/docs/explaining_ndcube/slicing.rst index 6aecd3a05..1873e43d8 100644 --- a/docs/explaining_ndcube/slicing.rst +++ b/docs/explaining_ndcube/slicing.rst @@ -444,7 +444,7 @@ Therefore, slicing is achieved by applying Python's slicing API to `~ndcube.NDMe >>> sliced_meta = meta.slice[0, 1:3] >>> sliced_meta.data_shape array([2, 5]) - + Note that by applying the slice item ``[0, 1:3]`` to ``meta``, the shape of the ``sliced_meta`` has been altered accordingly. The first axis has been sliced away, the second has been truncated to a length of 2, and the third remains unchanged. The shape of ``"pixel response"`` has been altered: @@ -453,21 +453,21 @@ The shape of ``"pixel response"`` has been altered: >>> sliced_meta["pixel response"].shape (2, 5) - + while ``"exposure time"`` has been reduced to a scalar: .. code-block:: python >>> sliced_meta["exposure time"] - + Moreover, because the first axis has been sliced away, ``"exposure time"`` is no longer associated with a data array axis, and so is no longer present in the ``axes`` property: .. code-block:: python >>> list(sliced_meta.axes.keys()) ["pixel response"] - + Finally, note that axis-agnostic metadata is unaltered by the slicing process. .. code-block:: python @@ -494,7 +494,7 @@ To demonstrate this, let's reinstantiate the same metadata object as in the abov (4, 4, 5) >>> my_cube.meta.data_shape array([4, 4, 5]) - + Now let's apply the same slice item to the cube as we applied to ``meta`` in the above section. Note that shape of the resultant `~ndcube.NDCube` and its associated `~ndcube.NDMeta` object now have the same new shape consistent with the slice item. @@ -505,7 +505,7 @@ Note that shape of the resultant `~ndcube.NDCube` and its associated `~ndcube.ND (2, 5) >>> sliced_cube.meta.data_shape array([2, 5]) - + Furthermore, the metadata's values, axis-awareness, etc., have also been altered in line with the slice item. In fact, ``sliced_cube.meta`` is equivalent to ``sliced_meta`` from the previous section, because we have applied the same slice item to two equivalent `~ndcube.NDMeta` objects. diff --git a/ndcube/conftest.py b/ndcube/conftest.py index 897ff6237..566df925f 100644 --- a/ndcube/conftest.py +++ b/ndcube/conftest.py @@ -16,7 +16,7 @@ from astropy.wcs import WCS from ndcube import ExtraCoords, GlobalCoords, NDCube, NDCubeSequence, NDMeta -from . import helpers +from ndcube.tests import helpers # Force MPL to use non-gui backends for testing. try: diff --git a/ndcube/ndcollection.py b/ndcube/ndcollection.py index e60b49417..077f847bc 100644 --- a/ndcube/ndcollection.py +++ b/ndcube/ndcollection.py @@ -1,3 +1,5 @@ +import copy +import numbers import textwrap import collections.abc @@ -152,6 +154,7 @@ def __getitem__(self, item): new_data = [self[_item] for _item in item] new_keys = item new_aligned_axes = tuple([self.aligned_axes[item_] for item_ in item]) + new_meta = copy.deepcopy(self.meta) # Else, the item is assumed to be a typical slicing item. # Slice each cube in collection using information in this item. @@ -169,7 +172,33 @@ def __getitem__(self, item): # Therefore the collection keys remain unchanged. new_keys = list(self.keys()) # Slice meta if sliceable - new_meta = self.meta.slice[item] if self.meta.__ndcube_can_slice else copy.deepcopy(self.meta) + if hasattr(self.meta, "__ndcube_can_slice__") and self.meta.__ndcube_can_slice__: + # Convert negative indices to positive indices as they are not supported by NDMeta.slice + sanitized_item = copy.deepcopy(item) + aligned_shape = self.aligned_dimensions + if isinstance(item, numbers.Integral): + if item < 0: + sanitized_item = int(self.aligned_dimensions[0] + item) + elif isinstance(item, slice): + if (item.start is not None and item.start < 0) or (item.stop is not None and item.stop < 0): + new_start = aligned_shape[0] + item.start if item.start < 0 else item.start + new_stop = aligned_shape[0] + item.stop if item.stop < 0 else item.stop + sanitized_item = slice(new_start, new_stop) + else: + sanitized_item = list(sanitized_item) + for i, ax_it in enumerate(item): + if isinstance(ax_it, numbers.Integral) and ax_it < 0: + sanitized_item[i] = aligned_shape[i] + ax_it + elif isinstance(ax_it, slice): + if (ax_it.start is not None and ax_it.start < 0) or (ax_it.stop is not None and ax_it.stop < 0): + new_start = aligned_shape[i] + ax_it.start if ax_it.start < 0 else ax_it.start + new_stop = aligned_shape[i] + ax_it.stop if ax_it.stop < 0 else ax_it.stop + sanitized_item[i] = slice(new_start, new_stop) + sanitized_item = tuple(sanitized_item) + # Use sanitized item to slice meta. + new_meta = self.meta.slice[sanitized_item] + else: + new_meta = copy.deepcopy(self.meta) return self.__class__(list(zip(new_keys, new_data)), aligned_axes=new_aligned_axes, meta=new_meta, sanitize_inputs=False) diff --git a/ndcube/ndcube_sequence.py b/ndcube/ndcube_sequence.py index 30f8c6c08..75c9ed0b2 100644 --- a/ndcube/ndcube_sequence.py +++ b/ndcube/ndcube_sequence.py @@ -123,7 +123,7 @@ def __getitem__(self, item): if isinstance(item, numbers.Integral): return self.data[item] # Determine whether meta attribute should be sliced. - new_meta = self.meta.slice[item] if self.meta.__ndcube_can_slice else copy.deepcopy(self.meta) + new_meta = self.meta.slice[item] if (hasattr(self.meta, "__ndcube_can_slice__") and self.meta.__ndcube_can_slice__) else copy.deepcopy(self.meta) # Create an empty sequence in which to place the sliced cubes. result = type(self)([], meta=new_meta, common_axis=self._common_axis) if isinstance(item, slice): diff --git a/ndcube/tests/helpers.py b/ndcube/tests/helpers.py index 34330fb56..031eb2253 100644 --- a/ndcube/tests/helpers.py +++ b/ndcube/tests/helpers.py @@ -13,6 +13,7 @@ from numpy.testing import assert_equal import astropy +import astropy.units as u from astropy.wcs.wcsapi import BaseHighLevelWCS from astropy.wcs.wcsapi.fitswcs import SlicedFITSWCS from astropy.wcs.wcsapi.low_level_api import BaseLowLevelWCS @@ -196,8 +197,15 @@ def assert_collections_equal(collection1, collection2, check_data=True): else: raise TypeError(f"Unsupported Type in NDCollection: {type(cube1)}") +def ndmeta_et0_pr01(shape): + return NDMeta({"salutation": "hello", + "exposure time": u.Quantity([2.] * shape[0], unit=u.s), + "pixel response": (100 * np.ones((shape[0], shape[1]), dtype=float)) * u.percent}, + axes={"exposure time": 0, "pixel response": (0, 1)}, data_shape=shape) + + def ndmeta_et0_pr02(shape): return NDMeta({"salutation": "hello", - "exposure time": u.Quantity([2.] * shape[0], unit=u.s) + "exposure time": u.Quantity([2.] * shape[0], unit=u.s), "pixel response": (100 * np.ones((shape[0], shape[2]), dtype=float)) * u.percent}, - axes={"exposure time": 0, "pixel response": (0, 2)}) + axes={"exposure time": 0, "pixel response": (0, 2)}, data_shape=shape) diff --git a/ndcube/tests/test_ndcollection.py b/ndcube/tests/test_ndcollection.py index 9510324aa..6eb2aa26f 100644 --- a/ndcube/tests/test_ndcollection.py +++ b/ndcube/tests/test_ndcollection.py @@ -37,7 +37,7 @@ # Define collections aligned_axes = ((1, 2), (2, 0), (1, 2)) keys = ("cube0", "cube1", "cube2") -cube_coll_meta = helpers.ndmeta_et0_pr02((4, 5)) +cube_coll_meta = helpers.ndmeta_et0_pr01((4, 5)) cube_collection = NDCollection([("cube0", cube0), ("cube1", cube1), ("cube2", cube2)], aligned_axes, meta=cube_coll_meta) unaligned_collection = NDCollection([("cube0", cube0), ("cube1", cube1), ("cube2", cube2)], aligned_axes=None) seq_collection = NDCollection([("seq0", sequence02), ("seq1", sequence20)], aligned_axes="all") @@ -50,23 +50,23 @@ (slice(1, 3), cube_collection, NDCollection( [("cube0", cube0[:, 1:3]), ("cube1", cube1[:, :, 1:3]), ("cube2", cube2[:, 1:3])], - aligned_axes=aligned_axes, meta=cube_coll_meta.slice[1:3]))), + aligned_axes=aligned_axes, meta=cube_coll_meta.slice[1:3])), (slice(-3, -1), cube_collection, NDCollection( [("cube0", cube0[:, -3:-1]), ("cube1", cube1[:, :, -3:-1]), ("cube2", cube2[:, -3:-1])], - aligned_axes=aligned_axes, meta=cube_coll_meta.slice[-3:-1]))), + aligned_axes=aligned_axes, meta=cube_coll_meta.slice[1:3])), ((slice(None), slice(1, 2)), cube_collection, NDCollection( [("cube0", cube0[:, :, 1:2]), ("cube1", cube1[1:2]), ("cube2", cube2[:, :, 1:2])], - aligned_axes=aligned_axes, meta=cube_coll_meta.slice[:, 1:2]))), + aligned_axes=aligned_axes, meta=cube_coll_meta.slice[:, 1:2])), ((slice(2, 4), slice(-3, -1)), cube_collection, NDCollection( [("cube0", cube0[:, 2:4, -3:-1]), ("cube1", cube1[-3:-1, :, 2:4]), - ("cube2", cube2[:, 2:4, -3:-1])], aligned_axes=aligned_axes, meta=cube_coll_meta.slice[2:4, -3:-1]))), + ("cube2", cube2[:, 2:4, -3:-1])], aligned_axes=aligned_axes, meta=cube_coll_meta.slice[2:4, 2:4])), ((0, 0), cube_collection, NDCollection( [("cube0", cube0[:, 0, 0]), ("cube1", cube1[0, :, 0]), ("cube2", cube2[:, 0, 0])], - aligned_axes=None, meta=cube_coll_meta.slice[0, 0]))), + aligned_axes=None, meta=cube_coll_meta.slice[0, 0])), (("cube0", "cube2"), cube_collection, NDCollection( [("cube0", cube0), ("cube2", cube2)], aligned_axes=(aligned_axes[0], aligned_axes[2]), meta=cube_coll_meta)), diff --git a/ndcube/tests/test_ndcube.py b/ndcube/tests/test_ndcube.py index 34fb4db31..ece9b5cbc 100644 --- a/ndcube/tests/test_ndcube.py +++ b/ndcube/tests/test_ndcube.py @@ -1,5 +1,5 @@ -import copy import re +import copy from inspect import signature from textwrap import dedent diff --git a/ndcube/tests/test_ndcubesequence.py b/ndcube/tests/test_ndcubesequence.py index e8b80113e..daaf93075 100644 --- a/ndcube/tests/test_ndcubesequence.py +++ b/ndcube/tests/test_ndcubesequence.py @@ -203,5 +203,5 @@ def test_slice_meta(ndcubesequence_4c_ln_lt_l_cax1): expected_meta = NDMeta({"salutation": "hello", "exposure time": u.Quantity([2] * 4, unit=u.s), "pixel response": u.Quantity([100] * 4, unit=u.percent)}, - axes={"exposure time": 0, "pixel response": 0}) + axes={"exposure time": 0, "pixel response": 0}, data_shape=(4, 2, 4)) helpers.assert_metas_equal(sliced_seq.meta, expected_meta) From 944b0df21bf9977bc34f66bc83505bc05399785d Mon Sep 17 00:00:00 2001 From: DanRyanIrish Date: Sat, 20 Jul 2024 18:10:27 -0600 Subject: [PATCH 60/60] Some docs bugfixes relating to NDMeta. --- docs/explaining_ndcube/metadata.rst | 14 +++++++------- docs/explaining_ndcube/slicing.rst | 16 ++++++++-------- ndcube/__init__.py | 2 +- 3 files changed, 16 insertions(+), 16 deletions(-) diff --git a/docs/explaining_ndcube/metadata.rst b/docs/explaining_ndcube/metadata.rst index a26eddd3c..02bc60b13 100644 --- a/docs/explaining_ndcube/metadata.rst +++ b/docs/explaining_ndcube/metadata.rst @@ -90,7 +90,7 @@ We can now access each piece of metadata by indexing ``meta`` as if it were a `d .. code-block:: python >>> meta["name"] - "world" + 'world' In this example we have provided a very simple set of metadata. In fact, it is so simple that there is no practical difference between ``meta`` and a simple `dict`. @@ -108,7 +108,7 @@ We can now access the comments by indexing the `~ndcube.NDMeta.key_comments` pro .. code-block:: python >>> meta.key_comments["name"] - "Each planet in the solar system has a name." + 'Each planet in the solar system has a name.' Now let's discuss how to initialize how to `~ndcube.NDMeta` with axis-aware metadata. (Here, we will specifically consider grid-aligned metadata. Axis-aligned metadata is assigned in the same way. But see the :ref:`assigning_axis_aligned_metadata` section for more details.) @@ -132,9 +132,9 @@ It is easy to see which axes a piece of metadata corresponds to by indexing the .. code-block:: python >>> meta.axes["exposure time"] - (0,) + array([0]) >>> meta.axes["pixel response"] - (0, 2) + array([0, 2]) Finally, it is possible to attach the shape of the associated data to the `~ndcube.NDMeta` instance via the ``data_shape`` kwarg: @@ -186,7 +186,7 @@ Unwanted metadata can be removing by employing the `del` operator. >>> del meta["voltage"] >>> meta.get("voltage", "deleted") - "deleted" + 'deleted' Note that the `del` operator also removes associated comments and axes. @@ -194,9 +194,9 @@ Note that the `del` operator also removes associated comments and axes. >>> del meta["voltage"] >>> meta.key_comments.get("voltage", "deleted") - "deleted" + 'deleted' >>> meta.axes.get("voltage", "deleted") - "deleted" + 'deleted' Data Shape ---------- diff --git a/docs/explaining_ndcube/slicing.rst b/docs/explaining_ndcube/slicing.rst index 1873e43d8..7712893ea 100644 --- a/docs/explaining_ndcube/slicing.rst +++ b/docs/explaining_ndcube/slicing.rst @@ -436,7 +436,7 @@ Therefore, slicing is achieved by applying Python's slicing API to `~ndcube.NDMe >>> from ndcube import NDMeta >>> raw_meta = {"salutation": "hello", "name": "world", ... "exposure time": u.Quantity([2] * 4, unit=u.s), - ... "pixel response", np.ones((4, 5))} + ... "pixel response": np.ones((4, 5))} >>> axes = {"exposure time": 0, "pixel response": (1, 2)} >>> meta = NDMeta(raw_meta, axes=axes, data_shape=(4, 4, 5)) @@ -466,16 +466,16 @@ Moreover, because the first axis has been sliced away, ``"exposure time"`` is no .. code-block:: python >>> list(sliced_meta.axes.keys()) - ["pixel response"] + ['pixel response'] Finally, note that axis-agnostic metadata is unaltered by the slicing process. .. code-block:: python - >>> sliced_meta["salultation"] - "hello" + >>> sliced_meta["salutation"] + 'hello' >>> sliced_meta["name"] - "world" + 'world' Automatically Slicing NDMeta Attached to Other ND Objects @@ -518,8 +518,8 @@ In fact, ``sliced_cube.meta`` is equivalent to ``sliced_meta`` from the previous >>> sliced_cube.meta["exposure time"] >>> list(sliced_cube.meta.axes.keys()) - ["pixel response"] + ['pixel response'] >>> sliced_cube.meta["salultation"] - "hello" + 'hello' >>> sliced_cube.meta["name"] - "world" + 'world' diff --git a/ndcube/__init__.py b/ndcube/__init__.py index a9051923f..b4f740497 100644 --- a/ndcube/__init__.py +++ b/ndcube/__init__.py @@ -15,4 +15,4 @@ from .ndcube_sequence import NDCubeSequence, NDCubeSequenceBase from .version import version as __version__ -__all__ = ['NDCube', 'NDCubeSequence', "Meta", "NDCollection", "ExtraCoords", "GlobalCoords", "ExtraCoordsABC", "GlobalCoordsABC", "NDCubeBase", "NDCubeSequenceBase", "__version__"] +__all__ = ['NDCube', 'NDCubeSequence', "NDCollection", "NDMeta", "ExtraCoords", "GlobalCoords", "ExtraCoordsABC", "GlobalCoordsABC", "NDCubeBase", "NDCubeSequenceBase", "__version__"]