@@ -177,10 +177,6 @@ def __init__(
177177 self .filebuffername = name [1 ]
178178 self .data = data
179179 if grid :
180- if grid .defer_load and isinstance (data , np .ndarray ):
181- raise ValueError (
182- "Cannot combine Grid from defer_loaded Field with np.ndarray data. please specify lon, lat, depth and time dimensions separately"
183- )
184180 self ._grid = grid
185181 else :
186182 if (time is not None ) and isinstance (time [0 ], np .datetime64 ):
@@ -225,14 +221,12 @@ def __init__(
225221 else :
226222 self .allow_time_extrapolation = allow_time_extrapolation
227223
228- if not self .grid .defer_load :
229- self .data = self ._reshape (self .data )
230- self ._loaded_time_indices = range (self .grid .tdim )
231-
232- # Hack around the fact that NaN and ridiculously large values
233- # propagate in SciPy's interpolators
234- self .data [np .isnan (self .data )] = 0.0
224+ self .data = self ._reshape (self .data )
225+ self ._loaded_time_indices = range (self .grid .tdim )
235226
227+ # Hack around the fact that NaN and ridiculously large values
228+ # propagate in SciPy's interpolators
229+ self .data [np .isnan (self .data )] = 0.0
236230 self ._scaling_factor = None
237231
238232 self ._dimensions = kwargs .pop ("dimensions" , None )
@@ -355,7 +349,6 @@ def from_netcdf(
355349 mesh : Mesh = "spherical" ,
356350 timestamps = None ,
357351 allow_time_extrapolation : bool | None = None ,
358- deferred_load : bool = True ,
359352 ** kwargs ,
360353 ) -> "Field" :
361354 """Create field from netCDF file.
@@ -388,11 +381,6 @@ def from_netcdf(
388381 boolean whether to allow for extrapolation in time
389382 (i.e. beyond the last available time snapshot)
390383 Default is False if dimensions includes time, else True
391- deferred_load : bool
392- boolean whether to only pre-load data (in deferred mode) or
393- fully load them (default: True). It is advised to deferred load the data, since in
394- that case Parcels deals with a better memory management during particle set execution.
395- deferred_load=False is however sometimes necessary for plotting the fields.
396384 gridindexingtype : str
397385 The type of gridindexing. Either 'nemo' (default), 'mitgcm', 'mom5', 'pop', or 'croco' are supported.
398386 See also the Grid indexing documentation on oceanparcels.org
@@ -551,56 +539,29 @@ def from_netcdf(
551539 "time dimension in indices is not necessary anymore. It is then ignored." , FieldSetWarning , stacklevel = 2
552540 )
553541
554- if grid .time .size <= 2 :
555- deferred_load = False
556-
557- if not deferred_load :
558- # Pre-allocate data before reading files into buffer
559- data_list = []
560- ti = 0
561- for tslice , fname in zip (grid .timeslices , data_filenames , strict = True ):
562- with NetcdfFileBuffer ( # type: ignore[operator]
563- fname ,
564- dimensions ,
565- indices ,
566- netcdf_engine ,
567- interp_method = interp_method ,
568- data_full_zdim = data_full_zdim ,
569- ) as filebuffer :
570- # If Field.from_netcdf is called directly, it may not have a 'data' dimension
571- # In that case, assume that 'name' is the data dimension
572- filebuffer .name = variable [1 ]
573- buffer_data = filebuffer .data
574- if len (buffer_data .shape ) == 4 :
575- errormessage = (
576- f"Field { filebuffer .name } expecting a data shape of [tdim={ grid .tdim } , zdim={ grid .zdim } , "
577- f"ydim={ grid .ydim } , xdim={ grid .xdim } ] "
578- f"but got shape { buffer_data .shape } ."
579- )
580- assert buffer_data .shape [0 ] == grid .tdim , errormessage
581- assert buffer_data .shape [2 ] == grid .ydim , errormessage
582- assert buffer_data .shape [3 ] == grid .xdim , errormessage
583-
584- if len (buffer_data .shape ) == 2 :
585- data_list .append (buffer_data .reshape (sum (((len (tslice ), 1 ), buffer_data .shape ), ())))
586- elif len (buffer_data .shape ) == 3 :
587- if len (filebuffer .indices ["depth" ]) > 1 :
588- data_list .append (buffer_data .reshape (sum (((1 ,), buffer_data .shape ), ())))
589- else :
590- if type (tslice ) not in [list , np .ndarray , xr .DataArray ]:
591- tslice = [tslice ]
592- data_list .append (buffer_data .reshape (sum (((len (tslice ), 1 ), buffer_data .shape [1 :]), ())))
593- else :
594- data_list .append (buffer_data )
595- if type (tslice ) not in [list , np .ndarray , xr .DataArray ]:
596- tslice = [tslice ]
597- ti += len (tslice )
598- data = np .concatenate (data_list , axis = 0 )
599- else :
600- grid ._defer_load = True
601- grid ._ti = - 1
602- data = DeferredArray ()
603- data .compute_shape (grid .xdim , grid .ydim , grid .zdim , grid .tdim , len (grid .timeslices ))
542+ with NetcdfFileBuffer ( # type: ignore[operator]
543+ data_filenames ,
544+ dimensions ,
545+ indices ,
546+ netcdf_engine ,
547+ interp_method = interp_method ,
548+ data_full_zdim = data_full_zdim ,
549+ ) as filebuffer :
550+ # If Field.from_netcdf is called directly, it may not have a 'data' dimension
551+ # In that case, assume that 'name' is the data dimension
552+ filebuffer .name = variable [1 ]
553+ buffer_data = filebuffer .data
554+ if len (buffer_data .shape ) == 4 :
555+ errormessage = (
556+ f"Field { filebuffer .name } expecting a data shape of [tdim={ grid .tdim } , zdim={ grid .zdim } , "
557+ f"ydim={ grid .ydim } , xdim={ grid .xdim } ] "
558+ f"but got shape { buffer_data .shape } ."
559+ )
560+ assert buffer_data .shape [0 ] == grid .tdim , errormessage
561+ assert buffer_data .shape [2 ] == grid .ydim , errormessage
562+ assert buffer_data .shape [3 ] == grid .xdim , errormessage
563+
564+ data = buffer_data
604565
605566 if allow_time_extrapolation is None :
606567 allow_time_extrapolation = False if "time" in dimensions else True
@@ -727,8 +688,7 @@ def set_scaling_factor(self, factor):
727688 if self ._scaling_factor :
728689 raise NotImplementedError (f"Scaling factor for field { self .name } already defined." )
729690 self ._scaling_factor = factor
730- if not self .grid .defer_load :
731- self .data *= factor
691+ self .data *= factor
732692
733693 def set_depth_from_field (self , field ):
734694 """Define the depth dimensions from another (time-varying) field.
@@ -913,69 +873,6 @@ def _rescale_and_set_minmax(self, data):
913873 data *= self ._scaling_factor
914874 return data
915875
916- def _data_concatenate (self , data , data_to_concat , tindex ):
917- if data [tindex ] is not None :
918- if isinstance (data , np .ndarray ):
919- data [tindex ] = None
920- elif isinstance (data , list ):
921- del data [tindex ]
922- if tindex == 0 :
923- data = np .concatenate ([data_to_concat , data [tindex + 1 :, :]], axis = 0 )
924- elif tindex == 1 :
925- data = np .concatenate ([data [:tindex , :], data_to_concat ], axis = 0 )
926- else :
927- raise ValueError ("data_concatenate is used for computeTimeChunk, with tindex in [0, 1]" )
928- return data
929-
930- def computeTimeChunk (self , data , tindex ):
931- g = self .grid
932- timestamp = self .timestamps
933- if timestamp is not None :
934- summedlen = np .cumsum ([len (ls ) for ls in self .timestamps ])
935- if g ._ti + tindex >= summedlen [- 1 ]:
936- ti = g ._ti + tindex - summedlen [- 1 ]
937- else :
938- ti = g ._ti + tindex
939- timestamp = self .timestamps [np .where (ti < summedlen )[0 ][0 ]]
940-
941- filebuffer = NetcdfFileBuffer (
942- self ._dataFiles [g ._ti + tindex ],
943- self .dimensions ,
944- self .indices ,
945- netcdf_engine = self .netcdf_engine ,
946- timestamp = timestamp ,
947- interp_method = self .interp_method ,
948- data_full_zdim = self .data_full_zdim ,
949- )
950- filebuffer .__enter__ ()
951- time_data = filebuffer .time
952- time_data = g .time_origin .reltime (time_data )
953- filebuffer .ti = (time_data <= g .time [tindex ]).argmin () - 1
954- if self .netcdf_engine != "xarray" :
955- filebuffer .name = self .filebuffername
956- buffer_data = filebuffer .data
957- if len (buffer_data .shape ) == 2 :
958- buffer_data = np .reshape (buffer_data , sum (((1 , 1 ), buffer_data .shape ), ()))
959- elif len (buffer_data .shape ) == 3 and g .zdim > 1 :
960- buffer_data = np .reshape (buffer_data , sum (((1 ,), buffer_data .shape ), ()))
961- elif len (buffer_data .shape ) == 3 :
962- buffer_data = np .reshape (
963- buffer_data ,
964- sum (
965- (
966- (
967- buffer_data .shape [0 ],
968- 1 ,
969- ),
970- buffer_data .shape [1 :],
971- ),
972- (),
973- ),
974- )
975- data = self ._data_concatenate (data , buffer_data , tindex )
976- self .filebuffers [tindex ] = filebuffer
977- return data
978-
979876 def ravel_index (self , zi , yi , xi ):
980877 """Return the flat index of the given grid points.
981878
@@ -1560,32 +1457,6 @@ def __getitem__(self, key):
15601457 return _deal_with_errors (error , key , vector_type = self .vector_type )
15611458
15621459
1563- class DeferredArray :
1564- """Class used for throwing error when Field.data is not read in deferred loading mode."""
1565-
1566- data_shape = ()
1567-
1568- def __init__ (self ):
1569- self .data_shape = (1 ,)
1570-
1571- def compute_shape (self , xdim , ydim , zdim , tdim , tslices ):
1572- if zdim == 1 and tdim == 1 :
1573- self .data_shape = (tslices , 1 , ydim , xdim )
1574- elif zdim > 1 or tdim > 1 :
1575- if zdim > 1 :
1576- self .data_shape = (1 , zdim , ydim , xdim )
1577- else :
1578- self .data_shape = (max (tdim , tslices ), 1 , ydim , xdim )
1579- else :
1580- self .data_shape = (tdim , zdim , ydim , xdim )
1581- return self .data_shape
1582-
1583- def __getitem__ (self , key ):
1584- raise RuntimeError (
1585- "Field is in deferred_load mode, so can't be accessed. Use .computeTimeChunk() method to force loading of data"
1586- )
1587-
1588-
15891460class NestedField (list ):
15901461 """NestedField is a class that allows for interpolation of fields on different grids of potentially varying resolution.
15911462
0 commit comments