Skip to content

Commit

Permalink
Addressed review comments
Browse files Browse the repository at this point in the history
  • Loading branch information
bourque committed Sep 3, 2024
1 parent ba75ec4 commit 940348a
Show file tree
Hide file tree
Showing 2 changed files with 50 additions and 58 deletions.
34 changes: 13 additions & 21 deletions imap_processing/codice/codice_l1a.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@

from imap_processing import imap_module_directory
from imap_processing.cdf.imap_cdf_manager import ImapCdfAttributes
from imap_processing.cdf.utils import met_to_j2000ns
from imap_processing.codice import constants
from imap_processing.codice.decompress import decompress
from imap_processing.codice.utils import CODICEAPID
Expand Down Expand Up @@ -100,16 +99,18 @@ def configure_data_products(self, apid: int) -> None:
self.dataset_name = config["dataset_name"]
self.instrument = config["instrument"]

def create_science_dataset(self, met: np.int64, data_version: str) -> xr.Dataset:
def create_science_dataset(
self, packet: xr.Dataset, data_version: str
) -> xr.Dataset:
"""
Create an ``xarray`` dataset for the unpacked science data.
The dataset can then be written to a CDF file.
Parameters
----------
met : numpy.int64
The mission elapsed time of the packet, used to determine epoch data.
packet : xarray.Dataset
The packet to process.
data_version : str
Version of the data product being created.
Expand All @@ -126,7 +127,7 @@ def create_science_dataset(self, met: np.int64, data_version: str) -> xr.Dataset

# Define coordinates
epoch = xr.DataArray(
[met_to_j2000ns(met)],
packet.epoch,
name="epoch",
dims=["epoch"],
attrs=cdf_attrs.get_variable_attributes("epoch"),
Expand Down Expand Up @@ -371,9 +372,6 @@ def create_event_dataset(
elif apid == CODICEAPID.COD_HI_PHA:
dataset_name = "imap_codice_l1a_hi_pha"

# Determine the start time of the packet
met = packet.acq_start_seconds.data[0]

# Extract the data
# event_data = packet.event_data.data (Currently turned off, see TODO)

Expand All @@ -384,7 +382,7 @@ def create_event_dataset(

# Define coordinates
epoch = xr.DataArray(
met_to_j2000ns([met]),
packet.epoch,
name="epoch",
dims=["epoch"],
attrs=cdf_attrs.get_variable_attributes("epoch"),
Expand Down Expand Up @@ -426,10 +424,7 @@ def create_hskp_dataset(
cdf_attrs.add_global_attribute("Data_version", data_version)

epoch = xr.DataArray(
met_to_j2000ns(
packet.shcoarse.data,
reference_epoch=np.datetime64("2010-01-01T00:01:06.184", "ns"),
),
packet.epoch,
name="epoch",
dims=["epoch"],
attrs=cdf_attrs.get_variable_attributes("epoch"),
Expand Down Expand Up @@ -493,10 +488,10 @@ def get_params(packet: xr.Dataset) -> tuple[int, int, int, int]:
view_id : int
Provides information about how data was collapsed and/or compressed.
"""
table_id = packet.table_id.data[0]
plan_id = packet.plan_id.data[0]
plan_step = packet.plan_step.data[0]
view_id = packet.view_id.data[0]
table_id = int(packet.table_id.data)
plan_id = int(packet.plan_id.data)
plan_step = int(packet.plan_step.data)
view_id = int(packet.view_id.data)

return table_id, plan_id, plan_step, view_id

Expand Down Expand Up @@ -534,9 +529,6 @@ def process_codice_l1a(file_path: Path, data_version: str) -> xr.Dataset:
dataset = create_event_dataset(apid, packet, data_version)

elif apid in constants.APIDS_FOR_SCIENCE_PROCESSING:
# Determine the start time of the packet
met = packet.acq_start_seconds.data[0]

# Extract the data
science_values = packet.data.data[0]

Expand All @@ -547,7 +539,7 @@ def process_codice_l1a(file_path: Path, data_version: str) -> xr.Dataset:
pipeline = CoDICEL1aPipeline(table_id, plan_id, plan_step, view_id)
pipeline.configure_data_products(apid)
pipeline.unpack_science_data(science_values)
dataset = pipeline.create_science_dataset(met, data_version)
dataset = pipeline.create_science_dataset(packet, data_version)

logger.info(f"\nFinal data product:\n{dataset}\n")

Expand Down
74 changes: 37 additions & 37 deletions imap_processing/tests/codice/test_codice_l1a.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,23 +34,6 @@
(1, 1, 1, 128), # lo-nsw-species
(1, 128), # lo-pha
]
EXPECTED_ARRAY_SIZES = [
129, # hskp
1, # hi-counters-aggregated
3, # hi-counters-singles
8, # hi-omni
4, # hi-sectored
0, # hi-pha
3, # lo-counters-aggregated
3, # lo-counters-singles
6, # lo-sw-angular
3, # lo-nsw-angular
7, # lo-sw-priority
4, # lo-nsw-priority
18, # lo-sw-species
10, # lo-nsw-species
0, # lo-pha
]
EXPECTED_LOGICAL_SOURCE = [
"imap_codice_l1a_hskp",
"imap_codice_l1a_hi-counters-aggregated",
Expand All @@ -68,6 +51,23 @@
"imap_codice_l1a_lo-nsw-species",
"imap_codice_l1a_lo-pha",
]
EXPECTED_NUM_VARIABLES = [
129, # hskp
1, # hi-counters-aggregated
3, # hi-counters-singles
8, # hi-omni
4, # hi-sectored
0, # hi-pha
3, # lo-counters-aggregated
3, # lo-counters-singles
6, # lo-sw-angular
3, # lo-nsw-angular
7, # lo-sw-priority
4, # lo-nsw-priority
18, # lo-sw-species
10, # lo-nsw-species
0, # lo-pha
]


@pytest.fixture(params=TEST_PACKETS)
Expand Down Expand Up @@ -134,26 +134,6 @@ def test_l1a_data_array_shape(test_l1a_data: xr.Dataset, expected_shape: tuple):
assert dataset[variable].data.shape == expected_shape


@pytest.mark.parametrize(
"test_l1a_data, expected_size",
list(zip(TEST_PACKETS, EXPECTED_ARRAY_SIZES)),
indirect=["test_l1a_data"],
)
def test_l1a_data_array_size(test_l1a_data: xr.Dataset, expected_size: int):
"""Tests that the data arrays in the generated CDFs have the expected size.
Parameters
----------
test_l1a_data : xarray.Dataset
A ``xarray`` dataset containing the test data
expected_size : int
The expected size of the data array
"""

dataset = test_l1a_data
assert len(dataset) == expected_size


@pytest.mark.skip("Awaiting validation data")
@pytest.mark.parametrize(
"test_l1a_data, validation_data",
Expand Down Expand Up @@ -185,3 +165,23 @@ def test_l1a_data_array_values(test_l1a_data: xr.Dataset, validation_data: Path)
np.testing.assert_array_equal(
validation_data[variable].data, generated_dataset[variable].data[0]
)


@pytest.mark.parametrize(
"test_l1a_data, expected_num_variables",
list(zip(TEST_PACKETS, EXPECTED_NUM_VARIABLES)),
indirect=["test_l1a_data"],
)
def test_l1a_num_variables(test_l1a_data: xr.Dataset, expected_num_variables: int):
"""Tests that the data arrays in the generated CDFs have the expected size.
Parameters
----------
test_l1a_data : xarray.Dataset
A ``xarray`` dataset containing the test data
expected_num_variables : int
The expected number of data variables in the CDF
"""

dataset = test_l1a_data
assert len(dataset) == expected_num_variables

0 comments on commit 940348a

Please sign in to comment.