Skip to content

Commit

Permalink
Merge pull request #52 from MetaSys-LISBP/dev
Browse files Browse the repository at this point in the history
Dev
  • Loading branch information
llegregam committed Mar 4, 2024
2 parents 4979be3 + 187270c commit 0fc192b
Show file tree
Hide file tree
Showing 3 changed files with 883 additions and 886 deletions.
71 changes: 36 additions & 35 deletions physiofit/base/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,6 @@
import matplotlib.pyplot as plt
import numpy as np

# Switch matplotlib logger to higher level to not get debug logs in root logger
logging.getLogger("matplotlib").setLevel(logging.WARNING)

from matplotlib.backends.backend_pdf import PdfPages
from pandas import DataFrame, read_csv, concat
import yaml
Expand All @@ -22,9 +19,13 @@
from physiofit.base.fitter import PhysioFitter
from physiofit.models.base_model import StandardDevs, Bounds

# Switch matplotlib logger to higher level to not get debug logs in root logger
logging.getLogger("matplotlib").setLevel(logging.WARNING)

logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)


class IoHandler:
"""
Input/Output class that handles the former and initializes the
Expand Down Expand Up @@ -68,7 +69,7 @@ def read_data(data: str) -> DataFrame:
try:
if isinstance(data, str):
data_path = Path(data).resolve()
# .dat file type for galaxy implementation
# .dat file type for galaxy implementation
if data_path.suffix in [".txt", ".tsv", ".dat"]:
data = read_csv(str(data_path), sep="\t")
elif data_path.suffix == ".csv":
Expand Down Expand Up @@ -129,7 +130,6 @@ def read_model(self, model_file):

return model_class


@staticmethod
def _verify_data(data: DataFrame):
"""
Expand Down Expand Up @@ -163,8 +163,11 @@ def _verify_data(data: DataFrame):
)
if all(data[x].isnull()) or all(data[x].isna()):
raise ValueError(
f"The column {x} contains only null or NA values"
)
f"The column {x} contains only null or NA values"
)

# To avoid errors when concatenating dataframes for the final summary
data["experiments"] = data["experiments"].str.replace(pat=" ", repl="_")

@staticmethod
def get_model_list():
Expand All @@ -182,7 +185,7 @@ def get_model_list():
model = model_class(df)
print(model.model_name)
return

def get_models(self, data=None):
"""
Read modules containing the different models and add them to models attribute
Expand Down Expand Up @@ -237,7 +240,7 @@ def read_yaml(yaml_file: str | bytes) -> ConfigParser:
raise IOError(
f"Error while reading yaml configuration file {yaml_file}. "
f"\nTraceback:\n\n{e}"
)
)
return config_parser

def initialize_fitter(self, data: pd.DataFrame, **kwargs) -> PhysioFitter:
Expand All @@ -263,11 +266,11 @@ def initialize_fitter(self, data: pd.DataFrame, **kwargs) -> PhysioFitter:

if "sd" not in kwargs:
fitter.sd.update(
{"X" : 0.2}
{"X": 0.2}
)
for col in self.data.columns[2:]:
fitter.sd.update(
{col : 0.2}
{col: 0.2}
)

fitter.initialize_sd_matrix()
Expand All @@ -278,7 +281,6 @@ def initialize_fitter(self, data: pd.DataFrame, **kwargs) -> PhysioFitter:

return fitter


def output_pdf(self, fitter: PhysioFitter, export_path: str | Path = None):
"""
Handle the creation and output of a pdf file containing fit results as
Expand Down Expand Up @@ -340,13 +342,13 @@ def output_recap(self, export_path: str, galaxy=False):
else:
final_df.to_csv(f"{str(Path(export_path))}/summary.csv")


def output_report(self, fitter, export_path: str |list = None):
def output_report(self, fitter, export_path: str | list = None):
"""
Handle creation and export of the report containing stats from monte
carlo analysis of optimization parameters
:param export_paths: list of paths to export the stats and fluxes. [0]
:param fitter: PhysioFitter object containing results from the optimization of parameters
:param export_path: list of paths to export the stats and fluxes. [0]
is for stats and [1] for fluxes.
"""

Expand Down Expand Up @@ -456,6 +458,7 @@ def plot_data(self, fitter, display: bool = False):
"""
Plot the data
:param fitter: PhysioFitter object after optimization of parameters has been executed
:param display: should plots be displayed
"""

Expand Down Expand Up @@ -533,7 +536,6 @@ def _add_sd_area(self, element: str, ax: plt.Axes):


class ConfigParser:

allowed_keys = ["model", "sds", "mc", "iterations"]

def __init__(
Expand Down Expand Up @@ -561,7 +563,6 @@ def __init__(
f"Number of iterations must be an integer: Detected input: {self.mc}, type: {type(self.iterations)}"
)


@classmethod
def from_file(cls, yaml_file):

Expand All @@ -581,9 +582,9 @@ def from_file(cls, yaml_file):
return ConfigParser(
path_to_data=data["path_to_data"],
selected_model=data["model"],
sds = data["sds"],
mc = data["mc"],
iterations = data["iterations"]
sds=data["sds"],
mc=data["mc"],
iterations=data["iterations"]
)
except KeyError:
return ConfigParser(
Expand All @@ -598,13 +599,13 @@ def from_galaxy(cls, galaxy_yaml):
pass

def get_kwargs(self):
return {
"path_to_data" : self.path_to_data,
"model" : self.model,
"mc" : self.mc,
"iterations" : self.iterations,
"sd" : self.sds
}
return {
"path_to_data": self.path_to_data,
"model": self.model,
"mc": self.mc,
"iterations": self.iterations,
"sd": self.sds
}

def update_model(self, model):

Expand All @@ -618,15 +619,15 @@ def export_config(self, export_path):

with open(fr"{export_path}/config_file.yml", "w") as file:
data = {
"model" : {
"model_name" : self.model.model_name,
"parameters_to_estimate" : self.model.parameters_to_estimate,
"bounds" : {name : f"{bounds[0], bounds[1]}" for name, bounds in self.model.bounds.items()}
"model": {
"model_name": self.model.model_name,
"parameters_to_estimate": self.model.parameters_to_estimate,
"bounds": {name: f"{bounds[0], bounds[1]}" for name, bounds in self.model.bounds.items()}
},
"sds" : dict(self.sds),
"mc" : self.mc,
"iterations" : self.iterations,
"path_to_data" : str(self.path_to_data)
"sds": dict(self.sds),
"mc": self.mc,
"iterations": self.iterations,
"path_to_data": str(self.path_to_data)
}
yaml.safe_dump(
data,
Expand Down
Loading

0 comments on commit 0fc192b

Please sign in to comment.