Skip to content

Commit 740dc25

Browse files
committed
Remove netcdf_engine from Field and FieldSet
1 parent eff0efe commit 740dc25

File tree

3 files changed

+8
-22
lines changed

3 files changed

+8
-22
lines changed

parcels/field.py

Lines changed: 4 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -226,7 +226,6 @@ def __init__(
226226

227227
self._dimensions = kwargs.pop("dimensions", None)
228228
self._dataFiles = kwargs.pop("dataFiles", None)
229-
self._netcdf_engine = kwargs.pop("netcdf_engine", "netcdf4")
230229
self._creation_log = kwargs.pop("creation_log", "")
231230

232231
# data_full_zdim is the vertical dimension of the complete field data, ignoring the indices.
@@ -276,10 +275,6 @@ def interp_method(self, value):
276275
def gridindexingtype(self):
277276
return self._gridindexingtype
278277

279-
@property
280-
def netcdf_engine(self):
281-
return self._netcdf_engine
282-
283278
@classmethod
284279
def _get_dim_filenames(cls, filenames, dim):
285280
if isinstance(filenames, str) or not isinstance(filenames, collections.abc.Iterable):
@@ -295,11 +290,11 @@ def _get_dim_filenames(cls, filenames, dim):
295290
return filenames
296291

297292
@staticmethod
298-
def _collect_timeslices(data_filenames, dimensions, indices, netcdf_engine):
293+
def _collect_timeslices(data_filenames, dimensions, indices):
299294
timeslices = []
300295
dataFiles = []
301296
for fname in data_filenames:
302-
with NetcdfFileBuffer(fname, dimensions, indices, netcdf_engine=netcdf_engine) as filebuffer:
297+
with NetcdfFileBuffer(fname, dimensions, indices) as filebuffer:
303298
ftime = filebuffer.time
304299
timeslices.append(ftime)
305300
dataFiles.append([fname] * len(ftime))
@@ -385,7 +380,6 @@ def from_netcdf(
385380
raise NotImplementedError("Vertically adaptive meshes not implemented for from_netcdf()")
386381
depth_filename = depth_filename[0]
387382

388-
netcdf_engine = kwargs.pop("netcdf_engine", "netcdf4")
389383
gridindexingtype = kwargs.get("gridindexingtype", "nemo")
390384

391385
indices = {} # TODO Nick: Cleanup
@@ -440,9 +434,7 @@ def from_netcdf(
440434
# Concatenate time variable to determine overall dimension
441435
# across multiple files
442436
if "time" in dimensions:
443-
time, time_origin, timeslices, dataFiles = cls._collect_timeslices(
444-
data_filenames, dimensions, indices, netcdf_engine
445-
)
437+
time, time_origin, timeslices, dataFiles = cls._collect_timeslices(data_filenames, dimensions, indices)
446438
grid = Grid.create_grid(lon, lat, depth, time, time_origin=time_origin, mesh=mesh)
447439
kwargs["dataFiles"] = dataFiles
448440
else: # e.g. for the CROCO CS_w field, see https://github.com/OceanParcels/Parcels/issues/1831
@@ -451,7 +443,7 @@ def from_netcdf(
451443
elif grid is not None and ("dataFiles" not in kwargs or kwargs["dataFiles"] is None):
452444
# ==== means: the field has a shared grid, but may have different data files, so we need to collect the
453445
# ==== correct file time series again.
454-
_, _, _, dataFiles = cls._collect_timeslices(data_filenames, dimensions, indices, netcdf_engine)
446+
_, _, _, dataFiles = cls._collect_timeslices(data_filenames, dimensions, indices)
455447
kwargs["dataFiles"] = dataFiles
456448

457449
if "time" in indices:
@@ -486,7 +478,6 @@ def from_netcdf(
486478
allow_time_extrapolation = False if "time" in dimensions else True
487479

488480
kwargs["dimensions"] = dimensions.copy()
489-
kwargs["netcdf_engine"] = netcdf_engine
490481

491482
return cls(
492483
variable,

parcels/fieldfilebuffer.py

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@ def __init__(
1919
interp_method: InterpMethodOption = "linear",
2020
data_full_zdim=None,
2121
gridindexingtype="nemo",
22-
netcdf_engine="netcdf4",
2322
):
2423
self.filename: PathLike | list[PathLike] = filename
2524
self.dimensions = dimensions # Dict with dimension keys for file data
@@ -28,10 +27,9 @@ def __init__(
2827
self.interp_method = interp_method
2928
self.gridindexingtype = gridindexingtype
3029
self.data_full_zdim = data_full_zdim
31-
self.netcdf_engine = netcdf_engine
3230

3331
def __enter__(self):
34-
self.dataset = open_xarray_dataset(self.filename, self.netcdf_engine)
32+
self.dataset = open_xarray_dataset(self.filename)
3533
return self
3634

3735
def __exit__(self, type, value, traceback):
@@ -159,12 +157,12 @@ def time_access(self):
159157
return time
160158

161159

162-
def open_xarray_dataset(filename: Path | str, netcdf_engine: str) -> xr.Dataset:
160+
def open_xarray_dataset(filename: Path | str) -> xr.Dataset:
163161
try:
164162
# Unfortunately we need to do if-else here, cause the lock-parameter is either False or a Lock-object
165163
# (which we would rather want to have being auto-managed).
166164
# If 'lock' is not specified, the Lock-object is auto-created and managed by xarray internally.
167-
ds = xr.open_mfdataset(filename, decode_cf=True, engine=netcdf_engine)
165+
ds = xr.open_mfdataset(filename, decode_cf=True)
168166
ds["decoded"] = True
169167
except:
170168
warnings.warn( # TODO: Is this warning necessary? What cases does this except block get triggered - is it to do with the bare except???
@@ -174,7 +172,7 @@ def open_xarray_dataset(filename: Path | str, netcdf_engine: str) -> xr.Dataset:
174172
stacklevel=2,
175173
)
176174

177-
ds = xr.open_mfdataset(filename, decode_cf=False, engine=netcdf_engine)
175+
ds = xr.open_mfdataset(filename, decode_cf=False)
178176
ds["decoded"] = False
179177
return ds
180178

parcels/fieldset.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -356,9 +356,6 @@ def from_netcdf(
356356
gridindexingtype : str
357357
The type of gridindexing. Either 'nemo' (default), 'mitgcm', 'mom5', 'pop', or 'croco' are supported.
358358
See also the Grid indexing documentation on oceanparcels.org
359-
netcdf_engine :
360-
engine to use for netcdf reading in xarray. Default is 'netcdf',
361-
but in cases where this doesn't work, setting netcdf_engine='scipy' could help. Accepted options are the same as the ``engine`` parameter in ``xarray.open_dataset()``.
362359
**kwargs :
363360
Keyword arguments passed to the :class:`parcels.Field` constructor.
364361

0 commit comments

Comments
 (0)