Skip to content

Commit ce31aec

Browse files
committed
Remove netcdf_engine from Field and FieldSet
1 parent 97eff43 commit ce31aec

File tree

3 files changed

+8
-22
lines changed

3 files changed

+8
-22
lines changed

parcels/field.py

Lines changed: 4 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -228,7 +228,6 @@ def __init__(
228228

229229
self._dimensions = kwargs.pop("dimensions", None)
230230
self._dataFiles = kwargs.pop("dataFiles", None)
231-
self._netcdf_engine = kwargs.pop("netcdf_engine", "netcdf4")
232231
self._creation_log = kwargs.pop("creation_log", "")
233232

234233
# data_full_zdim is the vertical dimension of the complete field data, ignoring the indices.
@@ -278,10 +277,6 @@ def interp_method(self, value):
278277
def gridindexingtype(self):
279278
return self._gridindexingtype
280279

281-
@property
282-
def netcdf_engine(self):
283-
return self._netcdf_engine
284-
285280
@classmethod
286281
def _get_dim_filenames(cls, filenames, dim):
287282
if isinstance(filenames, str) or not isinstance(filenames, collections.abc.Iterable):
@@ -297,11 +292,11 @@ def _get_dim_filenames(cls, filenames, dim):
297292
return filenames
298293

299294
@staticmethod
300-
def _collect_timeslices(data_filenames, dimensions, indices, netcdf_engine):
295+
def _collect_timeslices(data_filenames, dimensions, indices):
301296
timeslices = []
302297
dataFiles = []
303298
for fname in data_filenames:
304-
with NetcdfFileBuffer(fname, dimensions, indices, netcdf_engine=netcdf_engine) as filebuffer:
299+
with NetcdfFileBuffer(fname, dimensions, indices) as filebuffer:
305300
ftime = filebuffer.time
306301
timeslices.append(ftime)
307302
dataFiles.append([fname] * len(ftime))
@@ -387,7 +382,6 @@ def from_netcdf(
387382
raise NotImplementedError("Vertically adaptive meshes not implemented for from_netcdf()")
388383
depth_filename = depth_filename[0]
389384

390-
netcdf_engine = kwargs.pop("netcdf_engine", "netcdf4")
391385
gridindexingtype = kwargs.get("gridindexingtype", "nemo")
392386

393387
indices: dict[str, npt.NDArray] = {} # TODO Nick: Cleanup
@@ -442,9 +436,7 @@ def from_netcdf(
442436
# Concatenate time variable to determine overall dimension
443437
# across multiple files
444438
if "time" in dimensions:
445-
time, time_origin, timeslices, dataFiles = cls._collect_timeslices(
446-
data_filenames, dimensions, indices, netcdf_engine
447-
)
439+
time, time_origin, timeslices, dataFiles = cls._collect_timeslices(data_filenames, dimensions, indices)
448440
grid = Grid.create_grid(lon, lat, depth, time, time_origin=time_origin, mesh=mesh)
449441
kwargs["dataFiles"] = dataFiles
450442
else: # e.g. for the CROCO CS_w field, see https://github.com/OceanParcels/Parcels/issues/1831
@@ -453,7 +445,7 @@ def from_netcdf(
453445
elif grid is not None and ("dataFiles" not in kwargs or kwargs["dataFiles"] is None):
454446
# ==== means: the field has a shared grid, but may have different data files, so we need to collect the
455447
# ==== correct file time series again.
456-
_, _, _, dataFiles = cls._collect_timeslices(data_filenames, dimensions, indices, netcdf_engine)
448+
_, _, _, dataFiles = cls._collect_timeslices(data_filenames, dimensions, indices)
457449
kwargs["dataFiles"] = dataFiles
458450

459451
if "time" in indices:
@@ -488,7 +480,6 @@ def from_netcdf(
488480
allow_time_extrapolation = False if "time" in dimensions else True
489481

490482
kwargs["dimensions"] = dimensions.copy()
491-
kwargs["netcdf_engine"] = netcdf_engine
492483

493484
return cls(
494485
variable,

parcels/fieldfilebuffer.py

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@ def __init__(
1919
interp_method: InterpMethodOption = "linear",
2020
data_full_zdim=None,
2121
gridindexingtype="nemo",
22-
netcdf_engine="netcdf4",
2322
):
2423
self.filename: PathLike | list[PathLike] = filename
2524
self.dimensions = dimensions # Dict with dimension keys for file data
@@ -28,10 +27,9 @@ def __init__(
2827
self.interp_method = interp_method
2928
self.gridindexingtype = gridindexingtype
3029
self.data_full_zdim = data_full_zdim
31-
self.netcdf_engine = netcdf_engine
3230

3331
def __enter__(self):
34-
self.dataset = open_xarray_dataset(self.filename, self.netcdf_engine)
32+
self.dataset = open_xarray_dataset(self.filename)
3533
return self
3634

3735
def __exit__(self, type, value, traceback):
@@ -159,12 +157,12 @@ def time_access(self):
159157
return time
160158

161159

162-
def open_xarray_dataset(filename: Path | str, netcdf_engine: str) -> xr.Dataset:
160+
def open_xarray_dataset(filename: Path | str) -> xr.Dataset:
163161
try:
164162
# Unfortunately we need to do if-else here, cause the lock-parameter is either False or a Lock-object
165163
# (which we would rather want to have being auto-managed).
166164
# If 'lock' is not specified, the Lock-object is auto-created and managed by xarray internally.
167-
ds = xr.open_mfdataset(filename, decode_cf=True, engine=netcdf_engine)
165+
ds = xr.open_mfdataset(filename, decode_cf=True)
168166
ds["decoded"] = True
169167
except:
170168
warnings.warn( # TODO: Is this warning necessary? What cases does this except block get triggered - is it to do with the bare except???
@@ -174,7 +172,7 @@ def open_xarray_dataset(filename: Path | str, netcdf_engine: str) -> xr.Dataset:
174172
stacklevel=2,
175173
)
176174

177-
ds = xr.open_mfdataset(filename, decode_cf=False, engine=netcdf_engine)
175+
ds = xr.open_mfdataset(filename, decode_cf=False)
178176
ds["decoded"] = False
179177
return ds
180178

parcels/fieldset.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -356,9 +356,6 @@ def from_netcdf(
356356
gridindexingtype : str
357357
The type of gridindexing. Either 'nemo' (default), 'mitgcm', 'mom5', 'pop', or 'croco' are supported.
358358
See also the Grid indexing documentation on oceanparcels.org
359-
netcdf_engine :
360-
engine to use for netcdf reading in xarray. Default is 'netcdf',
361-
but in cases where this doesn't work, setting netcdf_engine='scipy' could help. Accepted options are the same as the ``engine`` parameter in ``xarray.open_dataset()``.
362359
**kwargs :
363360
Keyword arguments passed to the :class:`parcels.Field` constructor.
364361

0 commit comments

Comments
 (0)