diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 3f0520aa..46354684 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -9,8 +9,8 @@ CHANGELOG * Fix bug related to Probabilistic History Matching result reader. The shape of result was wrong. * Add the heat transfer mechanism for fluid materials composed by a parcel of radiation and convection. * Changed the constructor of result metadata objects in ``alfasim_sdk.result_reader.aggregator`` from ``attr`` to ``dataclasses.dataclass`` to make then more easily integrated into the (de)serialization engine of popular packages such as ``pyserde`` and ``pydantic``. -* Changed function ``read_global_sensitivity_coefficients`` to accept multiple keys so it can perform bulk reads without having to open the result file every time. -* Added ``GlobalSensitivityAnalysisResults``, ``HistoryMatchingDeterministicResults`` and ``HistoryMatchingProbabilisticResults``, which are objects to read and interact with the Uncertainty Quantification analyses results in a more user-friendly way. +* Changed function ``read_global_sensitivity_coefficients`` and ``read_uncertainty_propagation_results`` to accept multiple keys so it can perform bulk reads without having to open the result file every time. +* Added ``GlobalSensitivityAnalysisResults``, ``HistoryMatchingDeterministicResults``, ``HistoryMatchingProbabilisticResults`` and ``UncertaintyPropagationResults``, which are objects to read and interact with the Uncertainty Quantification analyses results in a more user-friendly way. 2024.2 (2024-09-10) =================== diff --git a/src/alfasim_sdk/result_reader/aggregator.py b/src/alfasim_sdk/result_reader/aggregator.py index e8217ddc..333ce7da 100644 --- a/src/alfasim_sdk/result_reader/aggregator.py +++ b/src/alfasim_sdk/result_reader/aggregator.py @@ -166,6 +166,22 @@ class BaseUQMetaData: unit: str +@dataclasses.dataclass(slots=True, frozen=True) +class UPOutputKey: + property_name: str + element_name: str + + def __str__(self): + return f"{self.property_name}@{self.element_name}" + + @classmethod + def from_string(cls, raw_output_key: str) -> Self: + r = re.match(r"^(.*)@(.*)", raw_output_key) + assert r is not None, f"Invalid output key: {raw_output_key}" + property_name, element_name = r.groups() + return cls(property_name, element_name) + + @dataclasses.dataclass(slots=True, frozen=True) class UncertaintyPropagationAnalysesMetaData: """ @@ -204,22 +220,18 @@ def from_dict(cls, data: Dict[str, Any]) -> Self: sample_indexes=data["sample_indexes"], ) - items: Dict[str, UPItem] + items: Dict[UPOutputKey, UPItem] result_directory: Path @classmethod - def empty(cls, result_directory: Path) -> Self: - return UncertaintyPropagationAnalysesMetaData( - items={}, result_directory=result_directory - ) - - @classmethod - def get_metadata_from_dir(cls, result_directory: Path) -> Self: + def get_metadata_from_dir(cls, result_directory: Path) -> Self | None: def map_data( up_metadata: Dict, - ) -> Dict[str, UncertaintyPropagationAnalysesMetaData.UPItem]: + ) -> Dict[UPOutputKey, UncertaintyPropagationAnalysesMetaData.UPItem]: return { - key: UncertaintyPropagationAnalysesMetaData.UPItem.from_dict(data) + UPOutputKey.from_string( + key + ): UncertaintyPropagationAnalysesMetaData.UPItem.from_dict(data) for key, data in up_metadata.items() } @@ -325,23 +337,6 @@ def map_data( ] -@attr.s(frozen=True) -class UPResult: - """ - Holder for each uncertainty propagation result. - """ - - category: str = attr.ib( - validator=attr.validators.optional(attr.validators.instance_of(str)) - ) - unit: str = attr.ib( - validator=attr.validators.optional(attr.validators.instance_of(str)) - ) - realization_output: List[np.ndarray] = attr.ib(default=attr.Factory(List)) - std_result: np.ndarray = attr.ib(default=attr.Factory(lambda: np.array([]))) - mean_result: np.ndarray = attr.ib(default=attr.Factory(lambda: np.array([]))) - - MetadataClassType = TypeVar("MetadataClassType", bound=UQMetadataClass) @@ -1960,45 +1955,66 @@ def read_uncertainty_propagation_analyses_meta_data( ) +@attr.s(frozen=True) +class UPResult: + """ + Holder for each uncertainty propagation result. + """ + + realization_output: List[np.ndarray] = attr.ib(default=attr.Factory(List)) + std_result: np.ndarray = attr.ib(default=attr.Factory(lambda: np.array([]))) + mean_result: np.ndarray = attr.ib(default=attr.Factory(lambda: np.array([]))) + + def read_uncertainty_propagation_results( - metadata: UncertaintyPropagationAnalysesMetaData, results_key: str -) -> Optional[UPResult]: + metadata: UncertaintyPropagationAnalysesMetaData, + result_keys: Sequence[UPOutputKey] | None = None, +) -> dict[UPOutputKey, UPResult]: """ Get the uncertainty propagation results. :param metadata: The uncertainty propagation metadata previously read. - :param results_key: - The result key as follows: + :param result_keys: + A sequence of result key in the form of "@". If None, will read the + result of all entries found in the metadata. """ - meta = metadata.items.get(results_key) - if not meta: - return None - with open_result_file(metadata.result_directory) as file: + if file is None: + return {} + up_group = file[UNCERTAINTY_PROPAGATION_GROUP_NAME] realization_output_samples = up_group[ UNCERTAINTY_PROPAGATION_DSET_REALIZATION_OUTPUTS ] - realization_outputs = [ - realization_output_samples[sample_index][qoi_index] - for qoi_index, sample_index in meta.sample_indexes - ] - mean_result = up_group[UNCERTAINTY_PROPAGATION_DSET_MEAN_RESULT][ - meta.result_index - ] - std_result = up_group[UNCERTAINTY_PROPAGATION_DSET_STD_RESULT][ - meta.result_index - ] - return UPResult( - realization_output=realization_outputs, - mean_result=mean_result, - std_result=std_result, - category=meta.category, - unit=meta.unit, - ) + result_keys = result_keys if result_keys else list(metadata.items.keys()) + items_meta = { + r_key: meta + for r_key, meta in metadata.items.items() + if r_key in result_keys + } + + result: dict[UPOutputKey, UPResult] = {} + for key, meta in items_meta.items(): + realization_outputs = [ + realization_output_samples[sample_index][qoi_index] + for qoi_index, sample_index in meta.sample_indexes + ] + mean_result = up_group[UNCERTAINTY_PROPAGATION_DSET_MEAN_RESULT][ + meta.result_index + ] + std_result = up_group[UNCERTAINTY_PROPAGATION_DSET_STD_RESULT][ + meta.result_index + ] + result[key] = UPResult( + realization_output=realization_outputs, + mean_result=mean_result, + std_result=std_result, + ) + + return result def read_uq_time_set(result_directory: Path, group_name: str) -> Optional[numpy.array]: diff --git a/src/alfasim_sdk/result_reader/reader.py b/src/alfasim_sdk/result_reader/reader.py index 6e92d747..b682402c 100644 --- a/src/alfasim_sdk/result_reader/reader.py +++ b/src/alfasim_sdk/result_reader/reader.py @@ -36,11 +36,21 @@ from alfasim_sdk.result_reader.aggregator import read_profiles_domain_data from alfasim_sdk.result_reader.aggregator import read_time_sets from alfasim_sdk.result_reader.aggregator import read_trends_data +from alfasim_sdk.result_reader.aggregator import ( + read_uncertainty_propagation_analyses_meta_data, +) +from alfasim_sdk.result_reader.aggregator import read_uncertainty_propagation_results from alfasim_sdk.result_reader.aggregator import read_uq_time_set +from alfasim_sdk.result_reader.aggregator import UncertaintyPropagationAnalysesMetaData +from alfasim_sdk.result_reader.aggregator import UPOutputKey +from alfasim_sdk.result_reader.aggregator import UPResult from alfasim_sdk.result_reader.aggregator_constants import ( GLOBAL_SENSITIVITY_ANALYSIS_GROUP_NAME, ) from alfasim_sdk.result_reader.aggregator_constants import RESULTS_FOLDER_NAME +from alfasim_sdk.result_reader.aggregator_constants import ( + UNCERTAINTY_PROPAGATION_GROUP_NAME, +) @define(frozen=True) @@ -448,3 +458,26 @@ def _read_curves_data( domain = Array("time", raw_curve[1], info.domain_unit) result[curve_id] = (info, Curve(image, domain)) return result + + +@define(frozen=True) +class UncertaintyPropagationResults: + timeset: np.ndarray = attr.field(validator=attr.validators.min_len(1)) + results: dict[UPOutputKey, UPResult] = attr.field( + validator=_non_empty_dict_validator(UPResult) + ) + metadata: UncertaintyPropagationAnalysesMetaData = attr.field( + validator=_non_empty_attr_validator("items") + ) + + @classmethod + def from_directory(cls, result_dir: Path) -> Self | None: + metadata = read_uncertainty_propagation_analyses_meta_data(result_dir) + if metadata is None: + return None + + return cls( + timeset=read_uq_time_set(result_dir, UNCERTAINTY_PROPAGATION_GROUP_NAME), + results=read_uncertainty_propagation_results(metadata), + metadata=metadata, + ) diff --git a/tests/results/test_aggregator.py b/tests/results/test_aggregator.py index 7e1f20ea..87c8197a 100644 --- a/tests/results/test_aggregator.py +++ b/tests/results/test_aggregator.py @@ -40,6 +40,7 @@ ) from alfasim_sdk.result_reader.aggregator import ResultsNeedFullReloadError from alfasim_sdk.result_reader.aggregator import TimeSetInfoItem +from alfasim_sdk.result_reader.aggregator import UPOutputKey from alfasim_sdk.result_reader.aggregator_constants import ( GLOBAL_SENSITIVITY_ANALYSIS_GROUP_NAME, ) @@ -597,22 +598,17 @@ def test_read_uncertainty_propagation_results( empty_metadata = read_uncertainty_propagation_analyses_meta_data( result_directory=datadir ) - assert empty_metadata.items == {} - result = read_uncertainty_propagation_results( - metadata=empty_metadata, results_key="absolute_pressure@trend_id_1" - ) - assert result is None + assert empty_metadata is None metadata = read_uncertainty_propagation_analyses_meta_data( result_directory=up_results_dir ) - assert list(metadata.items.keys()) == [ - "temperature@trend_id_1", - "absolute_pressure@trend_id_1", - ] + temp_key = UPOutputKey("temperature", "trend_id_1") + pressure_key = UPOutputKey("absolute_pressure", "trend_id_1") + assert list(metadata.items.keys()) == [temp_key, pressure_key] - assert metadata.items["temperature@trend_id_1"].result_index == 0 - assert metadata.items["temperature@trend_id_1"].sample_indexes == [ + assert metadata.items[temp_key].result_index == 0 + assert metadata.items[temp_key].sample_indexes == [ [0, 0], [0, 1], [0, 2], @@ -620,8 +616,8 @@ def test_read_uncertainty_propagation_results( [0, 4], ] - assert metadata.items["absolute_pressure@trend_id_1"].result_index == 1 - assert metadata.items["absolute_pressure@trend_id_1"].sample_indexes == [ + assert metadata.items[pressure_key].result_index == 1 + assert metadata.items[pressure_key].sample_indexes == [ [1, 0], [1, 1], [1, 2], @@ -630,22 +626,25 @@ def test_read_uncertainty_propagation_results( ] result = read_uncertainty_propagation_results( - metadata=metadata, results_key="temperature@trend_id_1" + metadata=metadata, result_keys=[temp_key] ) + assert len(result) == 1 dict_1 = { - "sample_0": result.realization_output[0], - "sample_1": result.realization_output[-1], - "mean_result": result.mean_result, - "std_result": result.std_result, + "sample_0": result[temp_key].realization_output[0], + "sample_1": result[temp_key].realization_output[-1], + "mean_result": result[temp_key].mean_result, + "std_result": result[temp_key].std_result, } - num_regression.check(dict_1, basename="temperature@trend_id_1") + num_regression.check(dict_1, basename=str(temp_key)) + result = read_uncertainty_propagation_results( - metadata=metadata, results_key="absolute_pressure@trend_id_1" + metadata=metadata, result_keys=[pressure_key] ) + assert len(result) == 1 dict_2 = { - "sample_0": result.realization_output[0], - "sample_1": result.realization_output[-1], - "mean_result": result.mean_result, - "std_result": result.std_result, + "sample_0": result[pressure_key].realization_output[0], + "sample_1": result[pressure_key].realization_output[-1], + "mean_result": result[pressure_key].mean_result, + "std_result": result[pressure_key].std_result, } - num_regression.check(dict_2, basename="absolute_pressure@trend_id_1") + num_regression.check(dict_2, basename=str(pressure_key)) diff --git a/tests/results/test_result_reader.py b/tests/results/test_result_reader.py index 73d33d41..a25f7934 100644 --- a/tests/results/test_result_reader.py +++ b/tests/results/test_result_reader.py @@ -15,6 +15,8 @@ from alfasim_sdk.result_reader.aggregator import HMOutputKey from alfasim_sdk.result_reader.aggregator import read_history_matching_metadata from alfasim_sdk.result_reader.aggregator import read_history_matching_result +from alfasim_sdk.result_reader.aggregator import UncertaintyPropagationAnalysesMetaData +from alfasim_sdk.result_reader.aggregator import UPOutputKey from alfasim_sdk.result_reader.reader import GlobalSensitivityAnalysisResults from alfasim_sdk.result_reader.reader import GlobalTrendMetadata from alfasim_sdk.result_reader.reader import HistoryMatchingDeterministicResults @@ -23,6 +25,7 @@ from alfasim_sdk.result_reader.reader import PositionalTrendMetadata from alfasim_sdk.result_reader.reader import ProfileMetadata from alfasim_sdk.result_reader.reader import Results +from alfasim_sdk.result_reader.reader import UncertaintyPropagationResults def test_fail_to_get_curves(results: Results) -> None: @@ -295,3 +298,37 @@ def _validate_meta_and_historic_curves( data_index=1, ), } + + +def test_uncertainty_propagation_results_reader(up_results_dir: Path) -> None: + reader = UncertaintyPropagationResults.from_directory(up_results_dir) + + assert np.allclose( + reader.timeset, np.array([0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0]) + ) + assert len(reader.results) == 2 + some_result = reader.results[ + UPOutputKey(property_name="absolute_pressure", element_name="trend_id_1") + ] + assert np.allclose( + some_result.mean_result, np.array([12.1, 12.2, 12.3, 12.4, 12.5, 12.6, 12.7]) + ) + assert len(reader.metadata.items) == 2 + assert reader.metadata.items[ + UPOutputKey(property_name="temperature", element_name="trend_id_1") + ] == UncertaintyPropagationAnalysesMetaData.UPItem( + property_id="temperature", + trend_id="trend_id_1", + category="temperature", + network_element_name="Conexao 1", + position=100.0, + position_unit="m", + unit="K", + samples=5, + result_index=0, + sample_indexes=[[0, 0], [0, 1], [0, 2], [0, 3], [0, 4]], + ) + + # Ensure the reader can handle a nonexistent result file. + reader = UncertaintyPropagationResults.from_directory(Path("foo")) + assert reader is None