-
Notifications
You must be signed in to change notification settings - Fork 10
Converting MFP CSV to YAML schedule #111
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 21 commits
5c5bf9a
40c7ff9
366dede
d046444
e3199fa
d9fe46a
7dc9bd7
a79433c
11332f8
ad54992
66adb18
2672afa
a370641
b87d944
eba08b8
c0a52ac
526d2af
c51043d
f3daaa7
4c59420
b67b15d
6f63cd4
222df85
c94567b
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -12,6 +12,7 @@ dependencies: | |
| - pip | ||
| - pyyaml | ||
| - copernicusmarine >= 2 | ||
| - openpyxl | ||
|
|
||
| # linting | ||
| - pre-commit | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -9,3 +9,4 @@ class InstrumentType(Enum): | |
| CTD = "CTD" | ||
| DRIFTER = "DRIFTER" | ||
| ARGO_FLOAT = "ARGO_FLOAT" | ||
| XBT = "XBT" | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -42,13 +42,14 @@ def _check_lon_lat_domain(self) -> Self: | |
| class TimeRange(BaseModel): | ||
| """Defines the temporal boundaries for a space-time region.""" | ||
|
|
||
| start_time: datetime | ||
| end_time: datetime | ||
| start_time: datetime | None = None | ||
| end_time: datetime | None = None | ||
|
|
||
| @model_validator(mode="after") | ||
| def _check_time_range(self) -> Self: | ||
| if not self.start_time < self.end_time: | ||
| raise ValueError("start_time must be before end_time") | ||
| if self.start_time and self.end_time: | ||
| if not self.start_time < self.end_time: | ||
| raise ValueError("start_time must be before end_time") | ||
| return self | ||
|
Collaborator
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. @VeckoTheGecko @ammedd
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I see. I thought that pydantic would have a way of disabling validation during the initialisation of the object, but looking further at the documentation its looking that that isn't possible ... . Longterm it would be good to have |
||
|
|
||
|
|
||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -1,16 +1,28 @@ | ||
| """Waypoint class.""" | ||
|
|
||
| from dataclasses import dataclass | ||
| from datetime import datetime | ||
|
|
||
| from pydantic import BaseModel, field_serializer | ||
|
|
||
| from ..location import Location | ||
| from .instrument_type import InstrumentType | ||
|
|
||
|
|
||
| @dataclass | ||
| class Waypoint: | ||
| class Waypoint(BaseModel): | ||
| """A Waypoint to sail to with an optional time and an optional instrument.""" | ||
|
|
||
| location: Location | ||
| time: datetime | None = None | ||
| instrument: InstrumentType | list[InstrumentType] | None = None | ||
|
|
||
| @field_serializer("instrument") | ||
| def serialize_instrument(self, instrument): | ||
| """Ensure InstrumentType is serialized as a string (or list of strings).""" | ||
| if isinstance(instrument, list): | ||
| return [inst.value for inst in instrument] | ||
| return instrument.value if instrument else None | ||
|
|
||
| @field_serializer("time") | ||
| def serialize_time(self, time): | ||
| """Ensure datetime is formatted properly in YAML.""" | ||
| return time.strftime("%Y-%m-%d %H:%M:%S") if time else None | ||
|
||
iuryt marked this conversation as resolved.
Show resolved
Hide resolved
|
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,102 @@ | ||
| from unittest.mock import patch | ||
|
|
||
| import pandas as pd | ||
| import pytest | ||
|
|
||
| from virtualship.expedition.instrument_type import InstrumentType | ||
| from virtualship.expedition.schedule import Schedule | ||
| from virtualship.utils import mfp_to_yaml | ||
|
|
||
| # Sample correct MFP data | ||
| VALID_MFP_DATA = pd.DataFrame( | ||
| { | ||
| "Station Type": ["A", "B", "C"], | ||
| "Name": ["Station1", "Station2", "Station3"], | ||
| "Latitude": [30, 31, 32], | ||
| "Longitude": [-44, -45, -46], | ||
| "Instrument": ["CTD, DRIFTER", "ARGO_FLOAT", "XBT, CTD, DRIFTER"], | ||
ammedd marked this conversation as resolved.
Show resolved
Hide resolved
|
||
| } | ||
| ) | ||
|
|
||
| # Missing required columns | ||
| MISSING_HEADERS_DATA = pd.DataFrame( | ||
| {"Station Type": ["A"], "Name": ["Station1"], "Latitude": [10.5]} | ||
| ) | ||
|
|
||
| # Extra unexpected columns | ||
| EXTRA_HEADERS_DATA = VALID_MFP_DATA.copy() | ||
| EXTRA_HEADERS_DATA["Unexpected Column"] = ["Extra1", "Extra2", "Extra3"] | ||
|
|
||
|
|
||
| @patch("pandas.read_excel", return_value=VALID_MFP_DATA) | ||
| def test_mfp_to_yaml_success(mock_read_excel, tmp_path): | ||
| """Test that mfp_to_yaml correctly processes a valid MFP Excel file.""" | ||
| yaml_output_path = tmp_path / "schedule.yaml" | ||
|
|
||
| # Run function (No need to mock open() for YAML, real file is created) | ||
| mfp_to_yaml("mock_file.xlsx", yaml_output_path) | ||
|
|
||
| # Ensure the YAML file was written | ||
| assert yaml_output_path.exists() | ||
|
|
||
| # Load YAML and validate contents | ||
| data = Schedule.from_yaml(yaml_output_path) | ||
|
|
||
| assert len(data.waypoints) == 3 | ||
| assert data.waypoints[0].instrument == [InstrumentType.CTD, InstrumentType.DRIFTER] | ||
| assert data.waypoints[1].instrument == [InstrumentType.ARGO_FLOAT] | ||
| assert data.waypoints[2].instrument == [ | ||
| InstrumentType.XBT, | ||
| InstrumentType.CTD, | ||
| InstrumentType.DRIFTER, | ||
| ] | ||
|
|
||
|
|
||
| @patch("pandas.read_excel", return_value=MISSING_HEADERS_DATA) | ||
| def test_mfp_to_yaml_missing_headers(mock_read_excel, tmp_path): | ||
| """Test that mfp_to_yaml raises an error when required columns are missing.""" | ||
| yaml_output_path = tmp_path / "schedule.yaml" | ||
|
|
||
| with pytest.raises( | ||
| ValueError, match="Error: Found columns .* but expected columns .*" | ||
| ): | ||
| mfp_to_yaml("mock_file.xlsx", yaml_output_path) | ||
|
|
||
|
|
||
| @patch("pandas.read_excel", return_value=EXTRA_HEADERS_DATA) | ||
| @patch("builtins.print") # Capture printed warnings | ||
| def test_mfp_to_yaml_extra_headers(mock_print, mock_read_excel, tmp_path): | ||
| """Test that mfp_to_yaml prints a warning when extra columns are found.""" | ||
| yaml_output_path = tmp_path / "schedule.yaml" | ||
|
|
||
| # Run function | ||
| mfp_to_yaml("mock_file.xlsx", yaml_output_path) | ||
|
|
||
| # Ensure a warning message was printed | ||
| mock_print.assert_any_call( | ||
| "Warning: Found additional unexpected columns ['Unexpected Column']. " | ||
| "Manually added columns have no effect. " | ||
| "If the MFP export format changed, please submit an issue: " | ||
| "https://github.com/OceanParcels/virtualship/issues." | ||
| ) | ||
|
|
||
|
|
||
| @patch("pandas.read_excel", return_value=VALID_MFP_DATA) | ||
| def test_mfp_to_yaml_instrument_conversion(mock_read_excel, tmp_path): | ||
| """Test that instruments are correctly converted into InstrumentType enums.""" | ||
| yaml_output_path = tmp_path / "schedule.yaml" | ||
|
|
||
| # Run function | ||
| mfp_to_yaml("mock_file.xlsx", yaml_output_path) | ||
|
|
||
| # Load the generated YAML | ||
| data = Schedule.from_yaml(yaml_output_path) | ||
|
|
||
| assert isinstance(data.waypoints[0].instrument, list) | ||
| assert data.waypoints[0].instrument == [InstrumentType.CTD, InstrumentType.DRIFTER] | ||
| assert data.waypoints[1].instrument == [InstrumentType.ARGO_FLOAT] | ||
| assert data.waypoints[2].instrument == [ | ||
| InstrumentType.XBT, | ||
| InstrumentType.CTD, | ||
| InstrumentType.DRIFTER, | ||
| ] | ||
Uh oh!
There was an error while loading. Please reload this page.