Skip to content

Commit

Permalink
clean codes
Browse files Browse the repository at this point in the history
  • Loading branch information
Beforerr committed Jun 1, 2024
1 parent 8a4175d commit 012924e
Show file tree
Hide file tree
Showing 26 changed files with 92 additions and 806 deletions.
2 changes: 1 addition & 1 deletion .envrc
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
watch_file pixi.lock
watch_file pyproject.toml
eval "$(pixi shell-hook)"
export PATH="$PATH:$HOME/Library/TinyTeX/bin/universal-darwin"
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -178,5 +178,6 @@ checklink/cookies.txt
/.luarc.json
# pixi environments
.pixi
pixi.lock
*.egg-info

19 changes: 3 additions & 16 deletions discontinuitypy/__init__.py
Original file line number Diff line number Diff line change
@@ -1,21 +1,8 @@
__version__ = "0.1.0"
# AUTOGENERATED! DO NOT EDIT! File to edit: ../notebooks/__init__.ipynb.

# %% auto 0
__all__ = ['ROOT_DIR']
__all__ = []

# %% ../notebooks/__init__.ipynb 1
__version__ = "0.0.1"

# %% ../notebooks/__init__.ipynb 2
from .datasets import IDsDataset

# %% ../notebooks/__init__.ipynb 3
from pathlib import Path

# %% ../notebooks/__init__.ipynb 4
#| eval: false
ROOT_DIR = Path(__file__).parent.parent

# %% ../notebooks/__init__.ipynb 7
# from discontinuitypy.utils.basic import load_params
# PARAMS = load_params()
from .datasets import IdsEvents, IDsDataset
34 changes: 2 additions & 32 deletions discontinuitypy/_modidx.py
Original file line number Diff line number Diff line change
Expand Up @@ -202,9 +202,7 @@
'discontinuitypy/utils/analysis.py'),
'discontinuitypy.utils.analysis.n2_normalize': ( 'utils/analysis_utils.html#n2_normalize',
'discontinuitypy/utils/analysis.py')},
'discontinuitypy.utils.basic': { 'discontinuitypy.utils.basic.DataConfig': ( 'utils/basic.html#dataconfig',
'discontinuitypy/utils/basic.py'),
'discontinuitypy.utils.basic._expand_selectors': ( 'utils/basic.html#_expand_selectors',
'discontinuitypy.utils.basic': { 'discontinuitypy.utils.basic._expand_selectors': ( 'utils/basic.html#_expand_selectors',
'discontinuitypy/utils/basic.py'),
'discontinuitypy.utils.basic.calc_vec_mag': ( 'utils/basic.html#calc_vec_mag',
'discontinuitypy/utils/basic.py'),
Expand Down Expand Up @@ -232,39 +230,11 @@
'discontinuitypy/utils/basic.py'),
'discontinuitypy.utils.basic.pl.DataFrame.plot': ( 'utils/basic.html#pl.dataframe.plot',
'discontinuitypy/utils/basic.py'),
'discontinuitypy.utils.basic.pl_norm': ( 'utils/basic.html#pl_norm',
'discontinuitypy/utils/basic.py'),
'discontinuitypy.utils.basic.resample': ( 'utils/basic.html#resample',
'discontinuitypy/utils/basic.py')},
'discontinuitypy.utils.kedro': { 'discontinuitypy.utils.kedro.load_context': ( 'utils/kedro.html#load_context',
'discontinuitypy/utils/kedro.py')},
'discontinuitypy.utils.lbl': { 'discontinuitypy.utils.lbl.LblDataset': ( 'utils/lbl.html#lbldataset',
'discontinuitypy/utils/lbl.py'),
'discontinuitypy.utils.lbl.LblDataset.__init__': ( 'utils/lbl.html#lbldataset.__init__',
'discontinuitypy/utils/lbl.py'),
'discontinuitypy.utils.lbl.LblDataset._describe': ( 'utils/lbl.html#lbldataset._describe',
'discontinuitypy/utils/lbl.py'),
'discontinuitypy.utils.lbl.LblDataset._load': ( 'utils/lbl.html#lbldataset._load',
'discontinuitypy/utils/lbl.py'),
'discontinuitypy.utils.lbl.LblDataset._save': ( 'utils/lbl.html#lbldataset._save',
'discontinuitypy/utils/lbl.py'),
'discontinuitypy.utils.lbl.load_lbl': ( 'utils/lbl.html#load_lbl',
'discontinuitypy/utils/lbl.py')},
'discontinuitypy.utils.plot': { 'discontinuitypy.utils.plot.plot_candidate': ( 'utils/plotting.html#plot_candidate',
'discontinuitypy/utils/plot.py'),
'discontinuitypy.utils.plot.setup_mva_plot': ( 'utils/plotting.html#setup_mva_plot',
'discontinuitypy/utils/plot.py'),
'discontinuitypy.utils.plot.time_stamp': ( 'utils/plotting.html#time_stamp',
'discontinuitypy/utils/plot.py')},
'discontinuitypy.utils.polars': { 'discontinuitypy.utils.polars._expand_selectors': ( 'utils/polars.html#_expand_selectors',
'discontinuitypy/utils/polars.py'),
'discontinuitypy.utils.polars.convert_to_pd_dataframe': ( 'utils/polars.html#convert_to_pd_dataframe',
'discontinuitypy/utils/polars.py'),
'discontinuitypy.utils.polars.create_partitions': ( 'utils/polars.html#create_partitions',
'discontinuitypy/utils/polars.py'),
'discontinuitypy.utils.polars.decompose_vector': ( 'utils/polars.html#decompose_vector',
'discontinuitypy/utils/polars.py'),
'discontinuitypy.utils.polars.pl_norm': ( 'utils/polars.html#pl_norm',
'discontinuitypy/utils/polars.py'),
'discontinuitypy.utils.polars.sort': ( 'utils/polars.html#sort',
'discontinuitypy/utils/polars.py')}}}
'discontinuitypy/utils/plot.py')}}}
4 changes: 2 additions & 2 deletions discontinuitypy/core/propeties.py
Original file line number Diff line number Diff line change
Expand Up @@ -246,7 +246,7 @@ def calc_normal_direction(data, name="normal_direction", **kwargs):
)

# %% ../../notebooks/02_ids_properties.ipynb 21
from ..utils.polars import convert_to_pd_dataframe, decompose_vector # noqa: E402
from beforerr.polars import convert_to_pd_dataframe, decompose_vector

# %% ../../notebooks/02_ids_properties.ipynb 22
from typing import Literal
Expand All @@ -262,7 +262,7 @@ def process_events(
) -> pl.DataFrame:
"Process candidates DataFrame"

candidates = convert_to_pd_dataframe(candidates_pl, modin=modin)
candidates = pd.DataFrame(convert_to_pd_dataframe(candidates_pl, modin=modin))

if method == "fit":
duration_method = "distance"
Expand Down
3 changes: 1 addition & 2 deletions discontinuitypy/detection/variance.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,7 @@
from datetime import timedelta

# %% ../../notebooks/detection/01_variance.ipynb 4
from ..utils.polars import pl_norm

from beforerr.polars import pl_norm

def compute_std(
df: pl.LazyFrame,
Expand Down
2 changes: 1 addition & 1 deletion discontinuitypy/integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -238,7 +238,7 @@ def calc_plasma_parameter_change(
)

# %% ../notebooks/03_mag_plasma.ipynb 15
from .utils.polars import decompose_vector
from beforerr.polars import decompose_vector
from space_analysis.ds.meta import PlasmaMeta

J_FACTOR = ((u.nT / u.s) * (1 / mu0 / (u.km / u.s))).to(u.nA / u.m**2)
Expand Down
58 changes: 13 additions & 45 deletions discontinuitypy/utils/basic.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,11 @@
# AUTOGENERATED! DO NOT EDIT! File to edit: ../../notebooks/utils/00_basic.ipynb.

# %% auto 0
__all__ = ['DF_TYPE', 'DataConfig', 'filter_tranges', 'filter_tranges_df', 'pl_norm', 'partition_data_by_ts',
'partition_data_by_year', 'partition_data_by_year_month', 'partition_data_by_time', 'concat_df',
'concat_partitions', 'format_timedelta', 'resample', 'df2ts', 'calc_vec_mag', 'check_fgm']
__all__ = ['DF_TYPE', 'filter_tranges', 'filter_tranges_df', 'partition_data_by_ts', 'partition_data_by_year',
'partition_data_by_year_month', 'partition_data_by_time', 'concat_df', 'concat_partitions',
'format_timedelta', 'resample', 'df2ts', 'calc_vec_mag', 'check_fgm']

# %% ../../notebooks/utils/00_basic.ipynb 1
from functools import partial

from typing import overload

# %% ../../notebooks/utils/00_basic.ipynb 3
Expand All @@ -30,25 +28,10 @@
from typing import Any, Dict


# %% ../../notebooks/utils/00_basic.ipynb 4
from beforerr.basics import pmap

# %% ../../notebooks/utils/00_basic.ipynb 7
from pydantic import BaseModel
from datetime import datetime, timedelta
from pandas import Timedelta

class DataConfig(BaseModel):
sat_id: str = None
start: datetime = None
end: datetime = None
ts: timedelta = None
coord: str = None

# %% ../../notebooks/utils/00_basic.ipynb 9
# %% ../../notebooks/utils/00_basic.ipynb 6
from fastcore.utils import patch

# %% ../../notebooks/utils/00_basic.ipynb 10
# %% ../../notebooks/utils/00_basic.ipynb 7
def filter_tranges(time: pl.Series, tranges: Tuple[list, list]):
"""
- Filter data by time ranges, return the indices of the time that are in the time ranges (left inclusive, right exclusive)
Expand Down Expand Up @@ -76,12 +59,12 @@ def filter_tranges_df(df: pl.DataFrame, tranges: Tuple[list, list], time_col: st
filtered_indices = filter_tranges(time, tranges)
return df[filtered_indices]

# %% ../../notebooks/utils/00_basic.ipynb 11
# %% ../../notebooks/utils/00_basic.ipynb 8
@patch
def plot(self:pl.DataFrame, *args, **kwargs):
return self.to_pandas().plot(*args, **kwargs)

# %% ../../notebooks/utils/00_basic.ipynb 12
# %% ../../notebooks/utils/00_basic.ipynb 9
def _expand_selectors(items: Any, *more_items: Any) -> list[Any]:
"""
See `_expand_selectors` in `polars`.
Expand All @@ -98,22 +81,7 @@ def _expand_selectors(items: Any, *more_items: Any) -> list[Any]:
expanded.append(item)
return expanded

def pl_norm(columns, *more_columns) -> pl.Expr:
"""
Computes the square root of the sum of squares for the given columns.
Args:
*columns (str): Names of the columns.
Returns:
pl.Expr: Expression representing the square root of the sum of squares.
"""
all_columns = _expand_selectors(columns, *more_columns)
squares = [pl.col(column).pow(2) for column in all_columns]

return sum(squares).sqrt()

# %% ../../notebooks/utils/00_basic.ipynb 14
# %% ../../notebooks/utils/00_basic.ipynb 11
def partition_data_by_ts(df: pl.DataFrame, ts: timedelta) -> Dict[str, pl.DataFrame]:
"""Partition the dataset by time
Expand Down Expand Up @@ -179,7 +147,7 @@ def partition_data_by_time(df: pl.LazyFrame | pl.DataFrame, method) -> Dict[str,
ts = pd.Timedelta(method)
return partition_data_by_ts(df, ts)

# %% ../../notebooks/utils/00_basic.ipynb 15
# %% ../../notebooks/utils/00_basic.ipynb 12
DF_TYPE = Union[pl.DataFrame, pl.LazyFrame, pd.DataFrame]
def concat_df(dfs: list[DF_TYPE]) -> DF_TYPE:
"""Concatenate a list of DataFrames into one DataFrame.
Expand Down Expand Up @@ -208,7 +176,7 @@ def concat_partitions(partitioned_input: Dict[str, Callable]):
result = concat_df(partitions_data)
return result

# %% ../../notebooks/utils/00_basic.ipynb 17
# %% ../../notebooks/utils/00_basic.ipynb 14
def format_timedelta(time):
"""Format timedelta to `timedelta`"""
if isinstance(time, timedelta):
Expand All @@ -220,7 +188,7 @@ def format_timedelta(time):
else:
raise TypeError(f"Unsupported type: {type(time)}")

# %% ../../notebooks/utils/00_basic.ipynb 18
# %% ../../notebooks/utils/00_basic.ipynb 15
@overload
def resample(
df: pl.DataFrame,
Expand Down Expand Up @@ -263,7 +231,7 @@ def resample(
.with_columns((pl.col(time_column) + shift))
)

# %% ../../notebooks/utils/00_basic.ipynb 19
# %% ../../notebooks/utils/00_basic.ipynb 16
def df2ts(
df: Union[pandas.DataFrame, pl.DataFrame, pl.LazyFrame],
cols=None,
Expand Down Expand Up @@ -303,7 +271,7 @@ def df2ts(
def calc_vec_mag(vec) -> DataArray:
return linalg.norm(vec, dims="v_dim")

# %% ../../notebooks/utils/00_basic.ipynb 20
# %% ../../notebooks/utils/00_basic.ipynb 17
def check_fgm(vec: xr.DataArray):
# check if time is monotonic increasing
logger.info("Check if time is monotonic increasing")
Expand Down
30 changes: 0 additions & 30 deletions discontinuitypy/utils/kedro.py

This file was deleted.

75 changes: 0 additions & 75 deletions discontinuitypy/utils/lbl.py

This file was deleted.

Loading

0 comments on commit 012924e

Please sign in to comment.