Skip to content

Commit 37a8970

Browse files
ammeddpre-commit-ci[bot]erikvansebille
authored
add drifter + argo data dowload (quick fix, can be made nicer) (#122)
* ugly add drifter + argo data dowload * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Changing default maximum_depth * Only downloading default fields if needed * Fixing some copernicusmarine commands --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Erik van Sebille <[email protected]>
1 parent ee17b6c commit 37a8970

File tree

2 files changed

+149
-52
lines changed

2 files changed

+149
-52
lines changed

src/virtualship/cli/commands.py

Lines changed: 148 additions & 51 deletions
Original file line numberDiff line numberDiff line change
@@ -107,6 +107,9 @@ def fetch(path: str | Path, username: str | None, password: str | None) -> None:
107107
time_range = schedule.space_time_region.time_range
108108
start_datetime = time_range.start_time
109109
end_datetime = time_range.end_time
110+
instruments_in_schedule = [
111+
waypoint.instrument.name for waypoint in schedule.waypoints
112+
]
110113

111114
# Create download folder and set download metadata
112115
download_folder = data_folder / hash_to_filename(space_time_region_hash)
@@ -116,57 +119,151 @@ def fetch(path: str | Path, username: str | None, password: str | None) -> None:
116119
)
117120
shutil.copyfile(path / SCHEDULE, download_folder / SCHEDULE)
118121

119-
# Define all datasets to download, including bathymetry
120-
download_dict = {
121-
"Bathymetry": {
122-
"dataset_id": "cmems_mod_glo_phy_my_0.083deg_static",
123-
"variables": ["deptho"],
124-
"output_filename": "bathymetry.nc",
125-
},
126-
"UVdata": {
127-
"dataset_id": "cmems_mod_glo_phy-cur_anfc_0.083deg_PT6H-i",
128-
"variables": ["uo", "vo"],
129-
"output_filename": "default_uv.nc",
130-
},
131-
"Sdata": {
132-
"dataset_id": "cmems_mod_glo_phy-so_anfc_0.083deg_PT6H-i",
133-
"variables": ["so"],
134-
"output_filename": "default_s.nc",
135-
},
136-
"Tdata": {
137-
"dataset_id": "cmems_mod_glo_phy-thetao_anfc_0.083deg_PT6H-i",
138-
"variables": ["thetao"],
139-
"output_filename": "default_t.nc",
140-
},
141-
}
142-
143-
# Iterate over all datasets and download each based on space_time_region
144-
try:
145-
for dataset in download_dict.values():
146-
copernicusmarine.subset(
147-
dataset_id=dataset["dataset_id"],
148-
variables=dataset["variables"],
149-
minimum_longitude=spatial_range.minimum_longitude,
150-
maximum_longitude=spatial_range.maximum_longitude,
151-
minimum_latitude=spatial_range.minimum_latitude,
152-
maximum_latitude=spatial_range.maximum_latitude,
153-
start_datetime=start_datetime,
154-
end_datetime=end_datetime,
155-
minimum_depth=abs(spatial_range.minimum_depth),
156-
maximum_depth=abs(spatial_range.maximum_depth),
157-
output_filename=dataset["output_filename"],
158-
output_directory=download_folder,
159-
username=username,
160-
password=password,
161-
force_download=True,
162-
overwrite=True,
163-
)
164-
except InvalidUsernameOrPassword as e:
165-
shutil.rmtree(download_folder)
166-
raise e
167-
168-
complete_download(download_folder)
169-
click.echo("Data download based on space-time region completed.")
122+
if set(["XBT", "CTD", "SHIP_UNDERWATER_ST"]) & set(instruments_in_schedule):
123+
print("Ship data will be downloaded")
124+
125+
# Define all ship datasets to download, including bathymetry
126+
download_dict = {
127+
"Bathymetry": {
128+
"dataset_id": "cmems_mod_glo_phy_my_0.083deg_static",
129+
"variables": ["deptho"],
130+
"output_filename": "bathymetry.nc",
131+
},
132+
"UVdata": {
133+
"dataset_id": "cmems_mod_glo_phy-cur_anfc_0.083deg_PT6H-i",
134+
"variables": ["uo", "vo"],
135+
"output_filename": "default_uv.nc",
136+
},
137+
"Sdata": {
138+
"dataset_id": "cmems_mod_glo_phy-so_anfc_0.083deg_PT6H-i",
139+
"variables": ["so"],
140+
"output_filename": "default_s.nc",
141+
},
142+
"Tdata": {
143+
"dataset_id": "cmems_mod_glo_phy-thetao_anfc_0.083deg_PT6H-i",
144+
"variables": ["thetao"],
145+
"output_filename": "default_t.nc",
146+
},
147+
}
148+
149+
# Iterate over all datasets and download each based on space_time_region
150+
try:
151+
for dataset in download_dict.values():
152+
copernicusmarine.subset(
153+
dataset_id=dataset["dataset_id"],
154+
variables=dataset["variables"],
155+
minimum_longitude=spatial_range.minimum_longitude,
156+
maximum_longitude=spatial_range.maximum_longitude,
157+
minimum_latitude=spatial_range.minimum_latitude,
158+
maximum_latitude=spatial_range.maximum_latitude,
159+
start_datetime=start_datetime,
160+
end_datetime=end_datetime,
161+
minimum_depth=abs(spatial_range.minimum_depth),
162+
maximum_depth=abs(spatial_range.maximum_depth),
163+
output_filename=dataset["output_filename"],
164+
output_directory=download_folder,
165+
username=username,
166+
password=password,
167+
overwrite=True,
168+
coordinates_selection_method="outside",
169+
)
170+
except InvalidUsernameOrPassword as e:
171+
shutil.rmtree(download_folder)
172+
raise e
173+
174+
complete_download(download_folder)
175+
click.echo("Ship data download based on space-time region completed.")
176+
177+
if "DRIFTER" in instruments_in_schedule:
178+
print("Drifter data will be downloaded")
179+
drifter_download_dict = {
180+
"UVdata": {
181+
"dataset_id": "cmems_mod_glo_phy-cur_anfc_0.083deg_PT6H-i",
182+
"variables": ["uo", "vo"],
183+
"output_filename": "drifter_uv.nc",
184+
},
185+
"Tdata": {
186+
"dataset_id": "cmems_mod_glo_phy-thetao_anfc_0.083deg_PT6H-i",
187+
"variables": ["thetao"],
188+
"output_filename": "drifter_t.nc",
189+
},
190+
}
191+
192+
# Iterate over all datasets and download each based on space_time_region
193+
try:
194+
for dataset in drifter_download_dict.values():
195+
copernicusmarine.subset(
196+
dataset_id=dataset["dataset_id"],
197+
variables=dataset["variables"],
198+
minimum_longitude=spatial_range.minimum_longitude + 3.0,
199+
maximum_longitude=spatial_range.maximum_longitude + 3.0,
200+
minimum_latitude=spatial_range.minimum_latitude + 3.0,
201+
maximum_latitude=spatial_range.maximum_latitude + 3.0,
202+
start_datetime=start_datetime,
203+
end_datetime=end_datetime,
204+
minimum_depth=abs(1),
205+
maximum_depth=abs(1),
206+
output_filename=dataset["output_filename"],
207+
output_directory=download_folder,
208+
username=username,
209+
password=password,
210+
overwrite=True,
211+
coordinates_selection_method="outside",
212+
)
213+
except InvalidUsernameOrPassword as e:
214+
shutil.rmtree(download_folder)
215+
raise e
216+
217+
complete_download(download_folder)
218+
click.echo("Drifter data download based on space-time region completed.")
219+
220+
if "ARGO_FLOAT" in instruments_in_schedule:
221+
print("Argo float data will be downloaded")
222+
argo_download_dict = {
223+
"UVdata": {
224+
"dataset_id": "cmems_mod_glo_phy-cur_anfc_0.083deg_PT6H-i",
225+
"variables": ["uo", "vo"],
226+
"output_filename": "argo_float_uv.nc",
227+
},
228+
"Sdata": {
229+
"dataset_id": "cmems_mod_glo_phy-so_anfc_0.083deg_PT6H-i",
230+
"variables": ["so"],
231+
"output_filename": "argo_float_s.nc",
232+
},
233+
"Tdata": {
234+
"dataset_id": "cmems_mod_glo_phy-thetao_anfc_0.083deg_PT6H-i",
235+
"variables": ["thetao"],
236+
"output_filename": "argo_float_t.nc",
237+
},
238+
}
239+
240+
# Iterate over all datasets and download each based on space_time_region
241+
try:
242+
for dataset in argo_download_dict.values():
243+
copernicusmarine.subset(
244+
dataset_id=dataset["dataset_id"],
245+
variables=dataset["variables"],
246+
minimum_longitude=spatial_range.minimum_longitude + 3.0,
247+
maximum_longitude=spatial_range.maximum_longitude + 3.0,
248+
minimum_latitude=spatial_range.minimum_latitude + 3.0,
249+
maximum_latitude=spatial_range.maximum_latitude + 3.0,
250+
start_datetime=start_datetime,
251+
end_datetime=end_datetime,
252+
minimum_depth=abs(0),
253+
maximum_depth=abs(spatial_range.maximum_depth),
254+
output_filename=dataset["output_filename"],
255+
output_directory=download_folder,
256+
username=username,
257+
password=password,
258+
overwrite=True,
259+
coordinates_selection_method="outside",
260+
)
261+
except InvalidUsernameOrPassword as e:
262+
shutil.rmtree(download_folder)
263+
raise e
264+
265+
complete_download(download_folder)
266+
click.echo("Argo_float data download based on space-time region completed.")
170267

171268

172269
@click.command()

src/virtualship/static/schedule.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ space_time_region:
55
minimum_latitude: -5
66
maximum_latitude: 5
77
minimum_depth: 0
8-
maximum_depth: 1500
8+
maximum_depth: 2000
99
time_range:
1010
start_time: 2023-01-01 00:00:00
1111
end_time: 2023-02-01 00:00:00

0 commit comments

Comments
 (0)