Skip to content

Commit

Permalink
tooling update (bye black)
Browse files Browse the repository at this point in the history
  • Loading branch information
AKuederle committed Mar 27, 2024
1 parent d7e1e15 commit ce60e9b
Show file tree
Hide file tree
Showing 151 changed files with 1,453 additions and 1,495 deletions.
141 changes: 141 additions & 0 deletions .ruff.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,141 @@
line-length = 120
target-version = "py38"

[lint]
select = [
# pyflakes
"F",
# pycodestyle
"E",
"W",
# mccabe
"C90",
# isort
"I",
# pydocstyle
"D",
# pyupgrade
"UP",
# pep8-naming
"N",
# flake8-blind-except
"BLE",
# flake8-2020
"YTT",
# flake8-builtins
"A",
# flake8-comprehensions
"C4",
# flake8-debugger
"T10",
# flake8-errmsg
"EM",
# flake8-implicit-str-concat
"ISC",
# flake8-pytest-style
"PT",
# flake8-return
"RET",
# flake8-simplify
"SIM",
# flake8-unused-arguments
"ARG",
# pandas-vet
"PD",
# pygrep-hooks
"PGH",
# flake8-bugbear
"B",
# flake8-quotes
"Q",
# pylint
"PL",
# flake8-pie
"PIE",
# flake8-type-checking
"TCH",
# tryceratops
"TRY",
# flake8-use-pathlib
"PTH",
"RUF",
# Numpy rules
"NPY",
# Implicit namespace packages
"INP",
# No relative imports
"TID252",
# f-strings over string concatenation
"FLY",
# Annotations
# No enforced annotations
# "ANN"


]

ignore = [
# controversial
"B006",
# controversial
"B008",
"B010",
# Magic constants
"PLR2004",
# Strings in error messages
"EM101",
"EM102",
"EM103",
# Exception strings
"TRY003",
# Varaibles before return
"RET504",
# Abstract raise into inner function
"TRY301",
# df as varaible name
"PD901",
# melt over stack
"PD013",
# No Any annotations
"ANN401",
# Self annotation
"ANN101",
# To many arguments
"PLR0913",
# Class attribute shadows builtin
"A003",
# No typing for `cls`
"ANN102",
# Ignore because of formatting
"ISC001",
# Use type-checking block
"TCH001",
"TCH002",
"TCH003",
# No stacklevel
"B028",
# Overwriting loop variable
"PLW2901"

]


exclude = [
"doc/sphinxext/*.py",
"doc/build/*.py",
"doc/temp/*.py",
".eggs/*.py",
"example_data",
]


[lint.per-file-ignores]
# https://github.com/astral-sh/ruff/issues/8925
"examples/**/*.py" = ["D400"]


[lint.pydocstyle]
convention = "numpy"

[format]
docstring-code-format = true
22 changes: 11 additions & 11 deletions _tasks.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import platform
import re
import shutil
import subprocess
import sys
from pathlib import Path
Expand All @@ -9,18 +9,18 @@
HERE = Path(__file__).parent


def task_docs():
def task_docs(clean=False, builder="html") -> None:
"""Build the html docs using Sphinx."""
# Delete Autogenerated files from previous run
# shutil.rmtree(str(HERE / "docs/modules/generated"), ignore_errors=True)
if clean:
shutil.rmtree(str(HERE / "docs/modules/generated"), ignore_errors=True)
shutil.rmtree(str(HERE / "docs/_build"), ignore_errors=True)
shutil.rmtree(str(HERE / "docs/auto_examples"), ignore_errors=True)

if platform.system() == "Windows":
subprocess.run([HERE / "docs/make.bat", "html"], shell=False, check=True)
else:
subprocess.run(["make", "-C", HERE / "docs", "html"], shell=False, check=True)
subprocess.run(f"sphinx-build -b {builder} -j auto -d docs/_build docs docs/_build/html", shell=True, check=True)


def update_version_strings(file_path, new_version):
def update_version_strings(file_path, new_version) -> None:
# taken from:
# https://stackoverflow.com/questions/57108712/replace-updated-version-strings-in-files-via-python
version_regex = re.compile(r"(^_*?version_*?\s*=\s*\")(\d+\.\d+\.\d+-?\S*)\"", re.M)
Expand All @@ -37,7 +37,7 @@ def update_version_strings(file_path, new_version):
f.truncate()


def update_version(version):
def update_version(version) -> None:
subprocess.run(["poetry", "version", version], shell=False, check=True)
new_version = (
subprocess.run(["poetry", "version"], shell=False, check=True, capture_output=True)
Expand All @@ -51,11 +51,11 @@ def update_version(version):
update_version_strings(HERE / "gaitmap_mad/gaitmap_mad/__init__.py", new_version)


def task_update_version():
def task_update_version() -> None:
update_version(sys.argv[1])


def task_bump_all_dev():
def task_bump_all_dev() -> None:
"""Bump all dev dependencies."""
pyproject = toml.load(HERE.joinpath("pyproject.toml"))
try:
Expand Down
13 changes: 7 additions & 6 deletions docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
from datetime import datetime
from inspect import getsourcefile
from pathlib import Path
from typing import List
from typing import List, Optional

import toml
from sphinx_gallery.sorting import ExplicitOrder
Expand All @@ -32,7 +32,7 @@
def replace_gitlab_links(base_url, text):
regex = base_url + r"-/(merge_requests|issues|commit)/(\w+)"

def substitute(matchobj):
def substitute(matchobj) -> str:
tokens = {"merge_requests": "!", "issues": "#"}
if matchobj.group(1) == "commit":
return f"[mad-gitlab: {matchobj.group(2)[:5]}]({matchobj.group(0)})"
Expand All @@ -45,7 +45,7 @@ def substitute(matchobj):
def convert_github_links(base_url, text):
regex = base_url + r"(pull|issues|commit)/(\w+)"

def substitute(matchobj):
def substitute(matchobj) -> str:
if matchobj.group(1) == "commit":
return f"[{matchobj.group(2)[:5]}]({matchobj.group(0)})"
return f"[#{matchobj.group(2)}]({matchobj.group(0)})"
Expand Down Expand Up @@ -229,10 +229,11 @@ def get_nested_attr(obj, attr):
)


def skip_properties(app, what, name, obj, skip, options):
def skip_properties(app, what, name, obj, skip, options) -> Optional[bool]:
"""This removes all properties from the documentation as they are expected to be documented in the docstring."""
if isinstance(obj, property):
return True
return None


GAITMAP_MAD_TEST = """
Expand All @@ -244,7 +245,7 @@ def skip_properties(app, what, name, obj, skip, options):
"""


def add_info_about_origin(app, what, name, obj, options, lines: List[str]):
def add_info_about_origin(app, what, name, obj, options, lines: List[str]) -> None:
"""Add a short info text to all algorithms that are only available via gaitmap_mad."""
if what != "class":
return
Expand All @@ -259,6 +260,6 @@ def add_info_about_origin(app, what, name, obj, options, lines: List[str]):
lines.insert(2, l)


def setup(app):
def setup(app) -> None:
app.connect("autodoc-skip-member", skip_properties)
app.connect("autodoc-process-docstring", add_info_about_origin)
7 changes: 4 additions & 3 deletions docs/image_src/create_coordinate_definition_template_plots.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,9 @@

colors = [docs_red, docs_green, docs_blue]


# helper to plot different coordinate frames
def plot_stride(data, column_names, sensor_id, stride_id, export_name):
def plot_stride(data, column_names, sensor_id, stride_id, export_name) -> None:
fig, axs = plt.subplots(2, figsize=(7, 7))
start = dtw.stride_list_[sensor_id].iloc[stride_id].start
end = dtw.stride_list_[sensor_id].iloc[stride_id].end
Expand Down Expand Up @@ -61,12 +62,12 @@ def plot_stride(data, column_names, sensor_id, stride_id, export_name):
fig.savefig(sensor_id + col[3:] + ".pdf", bbox_inches="tight")


#%%
# %%
# Plot "Stride-Template" in Sensor Frame
plot_stride(dataset_sf, SF_COLS, "left_sensor", 5, "left_sensor_sensor_frame_template.pdf")
plot_stride(dataset_sf, SF_COLS, "right_sensor", 18, "right_sensor_sensor_frame_template.pdf")

#%%
# %%
# Plot "Stride-Template" in Body Frame
plot_stride(dataset_bf, BF_COLS, "left_sensor", 5, "left_sensor_body_frame_template.pdf")
plot_stride(dataset_bf, BF_COLS, "right_sensor", 18, "right_sensor_body_frame_template.pdf")
17 changes: 8 additions & 9 deletions docs/sphinxext/githublink.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,12 +30,13 @@ def _linkcode_resolve(domain, info, package, url_fmt, revision):
This is called by sphinx.ext.linkcode
An example with a long-untouched module that everyone has
>>> _linkcode_resolve('py', {'module': 'tty',
... 'fullname': 'setraw'},
... package='tty',
... url_fmt='http://hg.python.org/cpython/file/'
... '{revision}/Lib/{package}/{path}#L{lineno}',
... revision='xxxx')
>>> _linkcode_resolve(
... "py",
... {"module": "tty", "fullname": "setraw"},
... package="tty",
... url_fmt="http://hg.python.org/cpython/file/" "{revision}/Lib/{package}/{path}#L{lineno}",
... revision="xxxx",
... )
'http://hg.python.org/cpython/file/xxxx/Lib/tty/tty.py#L18'
"""
if revision is None:
Expand Down Expand Up @@ -85,6 +86,4 @@ def make_linkcode_resolve(package, url_fmt):
'{path}#L{lineno}')
"""
revision = _get_git_revision()
return partial(
_linkcode_resolve, revision=revision, package=package, url_fmt=url_fmt
)
return partial(_linkcode_resolve, revision=revision, package=package, url_fmt=url_fmt)
19 changes: 8 additions & 11 deletions example_data/extract_example_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,10 +73,7 @@ def normalize(v: np.ndarray) -> np.ndarray:
If a 2D array is provided, each row is considered a vector, which is normalized independently.
"""
v = np.array(v)
if len(v.shape) == 1:
ax = 0
else:
ax = 1
ax = 0 if len(v.shape) == 1 else 1
return (v.T / np.linalg.norm(v, axis=ax)).T


Expand Down Expand Up @@ -130,8 +127,8 @@ def normalize(v: np.ndarray) -> np.ndarray:
rotation_from_angle(np.array([0, 0, 1]), np.deg2rad(-90))
* rotation_from_angle(np.array([1, 0, 0]), np.deg2rad(-90))
).inv()
rotations = dict(left_sensor=left_rot, right_sensor=right_rot)
test_df = test_df.rename(columns={"l_{}".format(sensor): "left_sensor", "r_{}".format(sensor): "right_sensor"})
rotations = {"left_sensor": left_rot, "right_sensor": right_rot}
test_df = test_df.rename(columns={f"l_{sensor}": "left_sensor", f"r_{sensor}": "right_sensor"})
test_df.columns = test_df.columns.set_names(("sensor", "axis"))
test_df.sort_index(axis=1).to_csv("./imu_sample_not_rotated.csv")

Expand All @@ -147,7 +144,7 @@ def normalize(v: np.ndarray) -> np.ndarray:
test_df.to_csv("./imu_sample.csv")

# Example events
test_events = test_borders = pd.read_csv(get_subject_mocap_folder(subject) / "{}_steps.csv".format(test), index_col=0)
test_events = test_borders = pd.read_csv(get_subject_mocap_folder(subject) / f"{test}_steps.csv", index_col=0)
test_events = test_events.rename(columns={"hs": "ic", "to": "tc", "ms": "min_vel"})
# convert to 204.8 Hz
test_events[["ic", "tc", "min_vel"]] *= 204.8 / 100
Expand All @@ -172,8 +169,8 @@ def normalize(v: np.ndarray) -> np.ndarray:
# Back to 100 Hz
test_events[["start", "end"]] *= 100 / 204.8

test_orientation = dict()
test_position = dict()
test_orientation = {}
test_position = {}
for sensor, short in [("left_sensor", "L"), ("right_sensor", "R")]:
normal_vectors = find_plane_from_points(
test_mocap[f"{short}_FCC"], test_mocap[f"{short}_TOE"], test_mocap[f"{short}_FM5"]
Expand All @@ -182,8 +179,8 @@ def normalize(v: np.ndarray) -> np.ndarray:
sidewards = np.cross(normal_vectors, forward_vector, axis=1)
rot_mat = np.hstack([forward_vector, sidewards, normal_vectors]).reshape((-1, 3, 3))
ori = pd.DataFrame(Rotation.from_matrix(rot_mat).inv().as_quat(), columns=["q_x", "q_y", "q_z", "q_w"])
ori_per_stride = dict()
pos_per_stride = dict()
ori_per_stride = {}
pos_per_stride = {}
for _, s in test_events[test_events["foot"] == sensor.split("_")[0]].iterrows():
ori_per_stride[s["s_id"]] = ori.iloc[int(s["start"]) : int(s["end"])].reset_index(drop=True)
pos = test_mocap[short + "_FCC"].iloc[int(s["start"]) : int(s["end"])].reset_index(drop=True)
Expand Down
1 change: 1 addition & 0 deletions examples/advanced_features/algo_serialize.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
reproducibility.
This means you should save the exact library version together with the json version of the used algorithms.
"""

from pprint import pprint

# %%
Expand Down
1 change: 0 additions & 1 deletion examples/advanced_features/caching.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@
# ----------------
# We will simply copy the stride segmentation example to have some data to work with.
from gaitmap.example_data import get_healthy_example_imu_data
from gaitmap.stride_segmentation import BarthOriginalTemplate
from gaitmap.utils.coordinate_conversion import convert_to_fbf

data = get_healthy_example_imu_data().iloc[:2000]
Expand Down
1 change: 1 addition & 0 deletions examples/advanced_features/multi_process.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
Other Python helpers to spawn multiple processes will of course work as well.
"""

from pprint import pprint
from typing import Any, Dict

Expand Down
Loading

0 comments on commit ce60e9b

Please sign in to comment.