|
14 | 14 | from zarr.storage import DirectoryStore |
15 | 15 |
|
16 | 16 | import parcels |
17 | | -from parcels._constants import DATATYPES_TO_FILL_VALUES |
18 | 17 | from parcels.particle import _SAME_AS_FIELDSET_TIME_INTERVAL, ParticleClass |
19 | 18 | from parcels.tools._helpers import timedelta_to_float |
20 | 19 |
|
|
25 | 24 |
|
26 | 25 | __all__ = ["ParticleFile"] |
27 | 26 |
|
| 27 | +_DATATYPES_TO_FILL_VALUES = { |
| 28 | + np.dtype(np.float16): np.nan, |
| 29 | + np.dtype(np.float32): np.nan, |
| 30 | + np.dtype(np.float64): np.nan, |
| 31 | + np.dtype(np.bool_): np.iinfo(np.int8).max, |
| 32 | + np.dtype(np.int8): np.iinfo(np.int8).max, |
| 33 | + np.dtype(np.int16): np.iinfo(np.int16).max, |
| 34 | + np.dtype(np.int32): np.iinfo(np.int32).max, |
| 35 | + np.dtype(np.int64): np.iinfo(np.int64).max, |
| 36 | + np.dtype(np.uint8): np.iinfo(np.uint8).max, |
| 37 | + np.dtype(np.uint16): np.iinfo(np.uint16).max, |
| 38 | + np.dtype(np.uint32): np.iinfo(np.uint32).max, |
| 39 | + np.dtype(np.uint64): np.iinfo(np.uint64).max, |
| 40 | +} |
| 41 | + |
28 | 42 |
|
29 | 43 | class ParticleFile: |
30 | 44 | """Initialise trajectory output. |
@@ -109,16 +123,16 @@ def _convert_varout_name(self, var): |
109 | 123 |
|
110 | 124 | def _extend_zarr_dims(self, Z, store, dtype, axis): |
111 | 125 | if axis == 1: |
112 | | - a = np.full((Z.shape[0], self.chunks[1]), DATATYPES_TO_FILL_VALUES[dtype], dtype=dtype) |
| 126 | + a = np.full((Z.shape[0], self.chunks[1]), _DATATYPES_TO_FILL_VALUES[dtype], dtype=dtype) |
113 | 127 | obs = zarr.group(store=store, overwrite=False)["obs"] |
114 | 128 | if len(obs) == Z.shape[1]: |
115 | 129 | obs.append(np.arange(self.chunks[1]) + obs[-1] + 1) |
116 | 130 | else: |
117 | 131 | extra_trajs = self._maxids - Z.shape[0] |
118 | 132 | if len(Z.shape) == 2: |
119 | | - a = np.full((extra_trajs, Z.shape[1]), DATATYPES_TO_FILL_VALUES[dtype], dtype=dtype) |
| 133 | + a = np.full((extra_trajs, Z.shape[1]), _DATATYPES_TO_FILL_VALUES[dtype], dtype=dtype) |
120 | 134 | else: |
121 | | - a = np.full((extra_trajs,), DATATYPES_TO_FILL_VALUES[dtype], dtype=dtype) |
| 135 | + a = np.full((extra_trajs,), _DATATYPES_TO_FILL_VALUES[dtype], dtype=dtype) |
122 | 136 | Z.append(a, axis=axis) |
123 | 137 | zarr.consolidate_metadata(store) |
124 | 138 |
|
@@ -194,13 +208,13 @@ def _write_particle_data(self, *, particle_data, pclass, time_interval, time, in |
194 | 208 | if var.to_write == "once": |
195 | 209 | data = np.full( |
196 | 210 | (arrsize[0],), |
197 | | - DATATYPES_TO_FILL_VALUES[dtype], |
| 211 | + _DATATYPES_TO_FILL_VALUES[dtype], |
198 | 212 | dtype=dtype, |
199 | 213 | ) |
200 | 214 | data[ids_once] = particle_data[var.name][indices_to_write_once] |
201 | 215 | dims = ["trajectory"] |
202 | 216 | else: |
203 | | - data = np.full(arrsize, DATATYPES_TO_FILL_VALUES[dtype], dtype=dtype) |
| 217 | + data = np.full(arrsize, _DATATYPES_TO_FILL_VALUES[dtype], dtype=dtype) |
204 | 218 | data[ids, 0] = particle_data[var.name][indices_to_write] |
205 | 219 | dims = ["trajectory", "obs"] |
206 | 220 | ds[varout] = xr.DataArray(data=data, dims=dims, attrs=attrs[var.name]) |
@@ -269,7 +283,7 @@ def _create_variables_attribute_dict(particle: ParticleClass, time_interval: Tim |
269 | 283 | for var in vars: |
270 | 284 | fill_value = {} |
271 | 285 | if var.dtype is not _SAME_AS_FIELDSET_TIME_INTERVAL.VALUE: |
272 | | - fill_value = {"_FillValue": DATATYPES_TO_FILL_VALUES[var.dtype]} |
| 286 | + fill_value = {"_FillValue": _DATATYPES_TO_FILL_VALUES[var.dtype]} |
273 | 287 |
|
274 | 288 | attrs[var.name] = {**var.attrs, **fill_value} |
275 | 289 |
|
|
0 commit comments