From b84fc2c7cf585e46b5da7ce0215aa6b451eab263 Mon Sep 17 00:00:00 2001 From: Anirban Chaudhuri <75496534+anirban-chaudhuri@users.noreply.github.com> Date: Mon, 15 Apr 2024 15:59:40 -0400 Subject: [PATCH] lint --- pyciemss/integration_utils/result_processing.py | 12 +++++++----- pyciemss/interfaces.py | 4 +++- 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/pyciemss/integration_utils/result_processing.py b/pyciemss/integration_utils/result_processing.py index 2b34a9319..918882662 100644 --- a/pyciemss/integration_utils/result_processing.py +++ b/pyciemss/integration_utils/result_processing.py @@ -294,11 +294,11 @@ def cdc_format( ) # Number of days for which data is available number_data_days = max( - q_ensemble_data[q_ensemble_data["Forecast_Backcast"].str.contains("Backcast")][ - f"number_{time_unit}" - ] + q_ensemble_data[ + q_ensemble_data["Forecast_Backcast"].str.contains("Backcast") + ][f"number_{time_unit}"] ) - drop_column_names.extend(["Forecast_Backcast"]) + drop_column_names.extend(["Forecast_Backcast"]) # Subtracting number of backast days from number_days q_ensemble_data[f"number_{time_unit}"] = ( q_ensemble_data[f"number_{time_unit}"] - number_data_days @@ -311,7 +311,9 @@ def cdc_format( if solution_string_mapping: # Drop rows that are not present in the solution_string_mapping keys q_ensemble_data = q_ensemble_data[ - q_ensemble_data["output"].str.contains("|".join(solution_string_mapping.keys())) + q_ensemble_data["output"].str.contains( + "|".join(solution_string_mapping.keys()) + ) ] for k, v in solution_string_mapping.items(): q_ensemble_data["output"] = q_ensemble_data["output"].replace(k, v) diff --git a/pyciemss/interfaces.py b/pyciemss/interfaces.py index 33adb90a4..301842cb8 100644 --- a/pyciemss/interfaces.py +++ b/pyciemss/interfaces.py @@ -149,7 +149,9 @@ def ensemble_sample( # logging_times = torch.arange( # start_time + logging_step_size, end_time, logging_step_size # ) - logging_times = torch.arange(start_time, end_time + logging_step_size, logging_step_size) + logging_times = torch.arange( + start_time, end_time + logging_step_size, logging_step_size + ) # Check that num_samples is a positive integer if not (isinstance(num_samples, int) and num_samples > 0):