diff --git a/docs/conf.py b/docs/conf.py index abfc132a..d71d365d 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -95,7 +95,7 @@ def substitute(matchobj): mathjax_path = "" else: extensions.append("sphinx.ext.mathjax") - mathjax_path = "https://cdn.jsdelivr.net/npm/mathjax@3/es5/" "tex-chtml.js" + mathjax_path = "https://cdn.jsdelivr.net/npm/mathjax@3/es5/tex-chtml.js" autodoc_default_options = {"members": True, "inherited-members": True, "special_members": True} # autodoc_typehints = 'description' # Does not work as expected. Maybe try at future date again @@ -202,6 +202,7 @@ def skip_properties(app, what, name, obj, skip, options): """This removes all properties from the documentation as they are expected to be documented in the docstring.""" if isinstance(obj, property): return True + return None def setup(app): diff --git a/poetry.lock b/poetry.lock index f6ec4908..fa854162 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1962,27 +1962,28 @@ pyasn1 = ">=0.1.3" [[package]] name = "ruff" -version = "0.0.235" +version = "0.0.286" description = "An extremely fast Python linter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.0.235-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:50327fe28aa914c4b2e3d06c3e41f47bcfbd595843a26f5f7fda30ca5318755f"}, - {file = "ruff-0.0.235-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:d29966029ff77a1c336004ff3e1effd33db8554ad9ec9f87ff339d0f3d44ae35"}, - {file = "ruff-0.0.235-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50baf2635584b93c09d1e69bca51041eb4ff584b20b0a443124feb7019591a4e"}, - {file = "ruff-0.0.235-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cc67f4e8095ad4af9bdd81f76db9cdc4e07533aeb91037dc3548d1384200de0f"}, - {file = "ruff-0.0.235-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fa9d2ba750180e3d7c23ee0151c52f1900e601be54ab516283ada368b1bb1672"}, - {file = "ruff-0.0.235-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:fbd10fbb7643a8334e0f6ca1095a877e2f1fb240bbd0ee23f8196592e0c092d3"}, - {file = "ruff-0.0.235-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8738cabb41216d467ac92d747380c6c943d74dd4d7d1bf8a3106787ecccbd36f"}, - {file = "ruff-0.0.235-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64de46e14c30a6eb9c6a458c62048b1711c46e45ff0468f14118c4d24d2fa750"}, - {file = "ruff-0.0.235-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9b475d800a6f356a7e7afae89a8ce1297e06f365eaa23b9eb80e6cb16a0915f"}, - {file = "ruff-0.0.235-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ed8771ab7bbaa9b350eb64a3d6d6628e800800cb15c5c3cc6e3e3217ff67703d"}, - {file = "ruff-0.0.235-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:e60c855babdc3d8df77ac044fb3f893c2084efebc606726ecb078edc9d3c5702"}, - {file = "ruff-0.0.235-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9efb9b87b92deeaeb707581a884e1764343165df0d37c3bdc4dc297edd837dce"}, - {file = "ruff-0.0.235-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:856ec6bfda0912f8010e15ffe04c33f2793971452379dfc8bd1f30b849483ede"}, - {file = "ruff-0.0.235-py3-none-win32.whl", hash = "sha256:82cf33ce2a998d1762517cc2e4ec0f79bbd985d005b312f31674411100c41899"}, - {file = "ruff-0.0.235-py3-none-win_amd64.whl", hash = "sha256:4a8b0284d52ea7b486894899cf5ba705c7b03a9d5fa780d55ac99ab64d3967ad"}, - {file = "ruff-0.0.235.tar.gz", hash = "sha256:270c0c83c01d00370851813edfd1502f2146a0a0b4e75b723e0c388252840f5a"}, + {file = "ruff-0.0.286-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:8e22cb557e7395893490e7f9cfea1073d19a5b1dd337f44fd81359b2767da4e9"}, + {file = "ruff-0.0.286-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:68ed8c99c883ae79a9133cb1a86d7130feee0397fdf5ba385abf2d53e178d3fa"}, + {file = "ruff-0.0.286-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8301f0bb4ec1a5b29cfaf15b83565136c47abefb771603241af9d6038f8981e8"}, + {file = "ruff-0.0.286-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:acc4598f810bbc465ce0ed84417ac687e392c993a84c7eaf3abf97638701c1ec"}, + {file = "ruff-0.0.286-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88c8e358b445eb66d47164fa38541cfcc267847d1e7a92dd186dddb1a0a9a17f"}, + {file = "ruff-0.0.286-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:0433683d0c5dbcf6162a4beb2356e820a593243f1fa714072fec15e2e4f4c939"}, + {file = "ruff-0.0.286-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddb61a0c4454cbe4623f4a07fef03c5ae921fe04fede8d15c6e36703c0a73b07"}, + {file = "ruff-0.0.286-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:47549c7c0be24c8ae9f2bce6f1c49fbafea83bca80142d118306f08ec7414041"}, + {file = "ruff-0.0.286-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:559aa793149ac23dc4310f94f2c83209eedb16908a0343663be19bec42233d25"}, + {file = "ruff-0.0.286-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:d73cfb1c3352e7aa0ce6fb2321f36fa1d4a2c48d2ceac694cb03611ddf0e4db6"}, + {file = "ruff-0.0.286-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:3dad93b1f973c6d1db4b6a5da8690c5625a3fa32bdf38e543a6936e634b83dc3"}, + {file = "ruff-0.0.286-py3-none-musllinux_1_2_i686.whl", hash = "sha256:26afc0851f4fc3738afcf30f5f8b8612a31ac3455cb76e611deea80f5c0bf3ce"}, + {file = "ruff-0.0.286-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:9b6b116d1c4000de1b9bf027131dbc3b8a70507788f794c6b09509d28952c512"}, + {file = "ruff-0.0.286-py3-none-win32.whl", hash = "sha256:556e965ac07c1e8c1c2d759ac512e526ecff62c00fde1a046acb088d3cbc1a6c"}, + {file = "ruff-0.0.286-py3-none-win_amd64.whl", hash = "sha256:5d295c758961376c84aaa92d16e643d110be32add7465e197bfdaec5a431a107"}, + {file = "ruff-0.0.286-py3-none-win_arm64.whl", hash = "sha256:1d6142d53ab7f164204b3133d053c4958d4d11ec3a39abf23a40b13b0784e3f0"}, + {file = "ruff-0.0.286.tar.gz", hash = "sha256:f1e9d169cce81a384a26ee5bb8c919fe9ae88255f39a1a69fd1ebab233a85ed2"}, ] [[package]] @@ -2760,4 +2761,4 @@ optuna = ["optuna"] [metadata] lock-version = "2.0" python-versions = ">=3.8,<4.0" -content-hash = "d97f9820017449f036af9d8a4802dc1bf4cbead3a488af458f1c70fc6d1ddc33" +content-hash = "6daa136520c52dcb2dd2c19b3e378b3249537cf1803ccde8bd9e3aa93a7156ff" diff --git a/pyproject.toml b/pyproject.toml index 3beb0926..805ded20 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,7 +45,7 @@ memory-profiler = "^0.58.0" matplotlib = "^3.4.3" toml = "^0.10.2" Sphinx = "^6.1.3" -ruff = "^0.0.235" +ruff = "^0.0.286" [[tool.poetry.source]] @@ -156,8 +156,6 @@ ignore = [ "EM101", "EM102", "EM103", - # Multiline docstring summary - "D213", # Varaibles before return "RET504", # Abstract raise into inner function @@ -169,7 +167,11 @@ ignore = [ # df as varaible name "PD901", # melt over stack - "PD013" + "PD013", + # Avoid specifying long messages outside the exception class + "TRY003", + # To many arguments + "PLR0913" ] diff --git a/tests/test_base.py b/tests/test_base.py index 1d8eb864..62e175c1 100644 --- a/tests/test_base.py +++ b/tests/test_base.py @@ -291,7 +291,7 @@ def test_nested_mutable_algorithm_copy(): assert ( joblib.hash(test_instance.mutable.get_params()) == joblib.hash(nested_instance.get_params()) - == joblib.hash({k: f for k, f in nested_params.items()}) + == joblib.hash(dict(nested_params.items())) ) diff --git a/tests/test_dataset.py b/tests/test_dataset.py index 1b86b732..e0b2dc5c 100644 --- a/tests/test_dataset.py +++ b/tests/test_dataset.py @@ -61,7 +61,7 @@ def _create_valid_index(input_dict=None, columns_names=None): def _create_random_bool_map(n, seed): np.random.seed(seed) - return list(map(lambda x: x >= 0.5, np.random.rand(n))) + return [x >= 0.5 for x in np.random.rand(n)] class TestDataset: @@ -340,7 +340,7 @@ def test_getitem_error_input(self, subscript, select_lvl, what_to_expect): "groupby_level", (["patients"], ["patients", "tests"], ["patients", "tests", "extra with space"]) ) @pytest.mark.parametrize( - "index,is_single_level", + ("index", "is_single_level"), ( ( _create_valid_index( diff --git a/tests/test_parameter_string_annot.py b/tests/test_parameter_string_annot.py index bd0105fd..9acbc7fc 100644 --- a/tests/test_parameter_string_annot.py +++ b/tests/test_parameter_string_annot.py @@ -37,9 +37,6 @@ def __init__(self, hyper: int, normal: str, custom_annotated: int, normal_no_ann def test_import_forward(): - if TYPE_CHECKING: - pass - class Test(BaseTpcpObject): hyper: HyperPara[int] normal: Para[renamed_optimize] @@ -85,11 +82,11 @@ def __init__(self, hyper: int, normal: optimize.GridSearch, custom_annotated: in def test_test_str_based_forward(): class Test(BaseTpcpObject): hyper: HyperPara[int] - normal: Para["Dataset"] + normal: Para[Dataset] custom_annotated: Annotated[HyperPara[int], "custom_metadata"] normal_no_annot: int - def __init__(self, hyper: int, normal: "Dataset", custom_annotated: int, normal_no_annot: int): + def __init__(self, hyper: int, normal: Dataset, custom_annotated: int, normal_no_annot: int): self.hyper = hyper self.normal = normal self.custom_annotated = custom_annotated diff --git a/tpcp/_algorithm_utils.py b/tpcp/_algorithm_utils.py index 9d8598a3..0cd1fc74 100644 --- a/tpcp/_algorithm_utils.py +++ b/tpcp/_algorithm_utils.py @@ -11,13 +11,14 @@ from typing_extensions import Concatenate, ParamSpec +from tpcp import Algorithm from tpcp._base import NOTHING, _get_annotated_fields_of_type from tpcp._hash import custom_hash from tpcp._parameters import _ParaTypes from tpcp.exceptions import PotentialUserErrorWarning if TYPE_CHECKING: - from tpcp import Algorithm, OptimizableAlgorithm, OptimizablePipeline + from tpcp import OptimizableAlgorithm, OptimizablePipeline from tpcp._algorithm import AlgorithmT OptimizableT = TypeVar("OptimizableT", OptimizablePipeline, OptimizableAlgorithm) @@ -140,11 +141,9 @@ def _check_safe_run(algorithm: AlgorithmT, old_method: Callable, *args: Any, **k before_paras = algorithm.get_params() before_paras_hash = custom_hash(before_paras) output: AlgorithmT - if hasattr(old_method, "__self__"): - # In this case the method is already bound and we do not need to pass the algo as first argument - output = old_method(*args, **kwargs) - else: - output = old_method(algorithm, *args, **kwargs) + + # In this case the method is already bound and we do not need to pass the algo as first argument + output = old_method(*args, **kwargs) if hasattr(old_method, "__self__") else old_method(algorithm, *args, **kwargs) after_paras = algorithm.get_params() after_paras_hash = custom_hash(after_paras) if not before_paras_hash == after_paras_hash: @@ -220,6 +219,7 @@ def safe_wrapped(self: AlgorithmT, *args: P.args, **kwargs: P.kwargs) -> Algorit f"` _action_methods = ({action_method.__name__},)`\n\n" "Or append it to the tuple, if it already exists.", PotentialUserErrorWarning, + stacklevel=2, ) return _check_safe_run(self, action_method, *args, **kwargs) @@ -232,11 +232,9 @@ def _get_nested_opti_paras(algorithm: Algorithm, opti_para_names: List[str]) -> optimizable_paras = {} other_paras = {} for p, v in paras.items(): - if p in opti_para_names: - optimizable_paras[p] = v - # For each optimizable parameter, we also add all children, as they are also allowed to change, - # if the parent is allowed to. - elif any(p.startswith(o + "__") for o in opti_para_names): + if p in opti_para_names or any(p.startswith(o + "__") for o in opti_para_names): + # For each optimizable parameter, we also add all children, as they are also allowed to change, + # if the parent is allowed to. optimizable_paras[p] = v else: other_paras[p] = v @@ -249,7 +247,7 @@ def _get_nested_opti_paras(algorithm: Algorithm, opti_para_names: List[str]) -> return optimizable_paras, other_paras -def _check_safe_optimize( # noqa: C901 +def _check_safe_optimize( # noqa: C901, PLR0912 algorithm: OptimizableT, old_method: Callable, *args: Any, **kwargs: Any ) -> OptimizableT: @@ -349,6 +347,7 @@ def _check_safe_optimize( # noqa: C901 f"({optimizable_paras}). " "This could indicate an implementation error of the `self_optimize` method.", PotentialUserErrorWarning, + stacklevel=2, ) if other_returns != (NOTHING, NOTHING): return optimized_algorithm, other_returns @@ -402,6 +401,7 @@ def safe_wrapped(self: OptimizableT, *args: P.args, **kwargs: P.kwargs) -> Optim "The `make_optimize_safe` decorator is only meant for the `self_optimize` method, but you applied it " f"to the `{self_optimize_method.__name__}` method.", PotentialUserErrorWarning, + stacklevel=2, ) try: return _check_safe_optimize(self, self_optimize_method, *args, **kwargs) diff --git a/tpcp/_base.py b/tpcp/_base.py index ae7261f9..fc31855a 100644 --- a/tpcp/_base.py +++ b/tpcp/_base.py @@ -184,7 +184,7 @@ def _retry_eval_with_missing_locals( def _custom_get_type_hints(cls: Type[_BaseTpcpObject]) -> Dict[str, Any]: """Extract type hints while avoiding issues with forward references. - We automatically skip all douple_underscore methods. + We automatically skip all douple-underscore methods. """ hints = {} for base in reversed(cls.__mro__): @@ -194,11 +194,11 @@ def _custom_get_type_hints(cls: Type[_BaseTpcpObject]) -> Dict[str, Any]: if name.startswith("__"): continue if value is None: - value = type(None) + value = type(None) # noqa: PLW2901 elif isinstance(value, str): # NOTE: This does not check if the str is a valid expression. # This might not be an issue, but could lead to obscure error messages. - value = _retry_eval_with_missing_locals(value, base_globals) + value = _retry_eval_with_missing_locals(value, base_globals) # noqa: PLW2901 hints[name] = value return hints @@ -212,7 +212,7 @@ def _extract_annotations( origin = get_origin(v) if origin is ClassVar: # If the parameter is a ClassVar, we go one level deeper and check, if its argument was annotated. - v = get_args(v)[0] + v = get_args(v)[0] # noqa: PLW2901 origin = get_origin(v) if origin is Annotated: for annot in get_args(v)[1:]: @@ -265,7 +265,8 @@ def _validate_all_parent_parameters_implemented(cls: Type[_BaseTpcpObject]): f"Missing parameters: {missing_params}\n" "This might not be a problem, but indicates bad design and you might run into actual issues with some " "of the validation magic `tpcp` does in the background. " - "We would recommend to implement all parameters of your parents in a subclass." + "We would recommend to implement all parameters of your parents in a subclass.", + stacklevel=2, ) @@ -495,7 +496,7 @@ def _set_comp_field(instance, field_name, params): # We first partition our field names to know to which index they belong comp_params: DefaultDict[str, Any] = defaultdict(dict) for key, value in params.items(): - key, delim, sub_key = key.partition("__") + key, delim, sub_key = key.partition("__") # noqa: PLW2901 if delim: comp_params[key][sub_key] = value else: @@ -537,7 +538,7 @@ def _set_params(instance: BaseTpcpObjectObjT, **params: Any) -> BaseTpcpObjectOb nested_params: DefaultDict[str, Any] = defaultdict(dict) # grouped by prefix for key, value in params.items(): - key, delim, sub_key = key.partition("__") + key, delim, sub_key = key.partition("__") # noqa: PLW2901 if key not in valid_params: raise ValueError(f"`{key}` is not a valid parameter name for {type(instance).__name__}.") @@ -626,6 +627,7 @@ def _annotations_are_valid( "Annotating a nested parameter (parameter like `nested_object__nest_para` as a simple " "Parameter has no effect and the entire line should be removed.", PotentialUserErrorWarning, + stacklevel=2, ) elif k not in fields: raise ValueError( @@ -728,7 +730,7 @@ def clone(algorithm: T, *, safe: bool = False) -> T: with Path(os.devnull).open("w") as devnull, contextlib.redirect_stdout(devnull): return copy.deepcopy(algorithm) raise TypeError( - f"Cannot clone object '{repr(algorithm)}' (type {type(algorithm)}): " + f"Cannot clone object '{algorithm!r}' (type {type(algorithm)}): " "it does not seem to be a compatible algorithm/pipline class or general `tpcp` object as it does not " "inherit from `BaseTpcpObject` or `Algorithm` or `Pipeline`." ) diff --git a/tpcp/_utils/_general.py b/tpcp/_utils/_general.py index e454c324..e7f68fdc 100644 --- a/tpcp/_utils/_general.py +++ b/tpcp/_utils/_general.py @@ -94,7 +94,8 @@ def _split_hyper_and_pure_parameters( return [(c, None) for c in param_dict] split_param_dict = [] for c in param_dict: - c = copy.copy(c) # Otherwise, we remove elements from the actual parameter list that is passed as input. + # We need to copy, otherwise, we remove elements from the actual parameter list that is passed as input. + c = copy.copy(c) # noqa: PLW2901 tmp = {} for k in list(c.keys()): if k in pure_parameters: diff --git a/tpcp/_utils/_score.py b/tpcp/_utils/_score.py index db61cfad..d6192170 100644 --- a/tpcp/_utils/_score.py +++ b/tpcp/_utils/_score.py @@ -124,7 +124,7 @@ def _score( return result -def _optimize_and_score( # noqa: C901 +def _optimize_and_score( optimizer: BaseOptimize, scorer: Scorer, train_set: Dataset, @@ -229,7 +229,7 @@ def _optimize_and_score( # noqa: C901 # instance of the trained pipeline. result["optimizer"] = optimizer if return_parameters: - result["parameters"] = {**hyperparameters, **pure_parameters} or None + result["parameters"] = {**hyperparameters, **pure_parameters} return result diff --git a/tpcp/optimize/_optimize.py b/tpcp/optimize/_optimize.py index 2eda9297..aa336639 100644 --- a/tpcp/optimize/_optimize.py +++ b/tpcp/optimize/_optimize.py @@ -97,6 +97,7 @@ def optimize(self, dataset: DatasetT, **optimize_params: Any) -> Self: # noqa: "`DummyOptimize` does never call this method and skips any optimization steps! " "Use `Optimize` if you actually want to optimize your pipeline.", PotentialUserErrorWarning, + stacklevel=2, ) self.optimized_pipeline_ = self.pipeline.clone() return self @@ -830,6 +831,7 @@ def _store(key_name: str, array, weights=None, splits=False, rank=False): warnings.warn( f"One or more of the {key_name.split('_')[0]} scores are non-finite: {array_means}", category=UserWarning, + stacklevel=2, ) # Weighted std is not directly available in numpy array_stds = np.sqrt(np.average((array - array_means[:, np.newaxis]) ** 2, axis=1, weights=weights)) @@ -914,7 +916,8 @@ def _validate_return_optimized(return_optimized, multi_metric, results) -> Tuple "single score." "`return_optimized` is set to True. " "The only allowed string value for `return_optimized` in a single metric case is `-score`, " - "to invert the metric before score selection." + "to invert the metric before score selection.", + stacklevel=2, ) return reverse, "score" raise ValueError("`return_optimized` must be a bool or explicitly `score` or `-score` in a single metric case.") diff --git a/tpcp/optimize/optuna.py b/tpcp/optimize/optuna.py index 898483ae..b76fce54 100644 --- a/tpcp/optimize/optuna.py +++ b/tpcp/optimize/optuna.py @@ -38,6 +38,7 @@ "sense even for larger projects. " "This means, the interface for `CustomOptunaOptimize` will likely change in the future.", UserWarning, + stacklevel=2, ) @@ -169,7 +170,8 @@ def optimize(self, dataset: DatasetT, **_: Any) -> Self: "anymore.\n" "You can use use `optuna.delete_study(study_name=opti_instance.study_.study_name, " "storage=opti_instance.study_._storage)`. " - "Note that all result object that depend on the study are not available anymore after deletion." + "Note that all result object that depend on the study are not available anymore after deletion.", + stacklevel=2, ) if self.n_jobs == 1: @@ -192,7 +194,8 @@ def optimize(self, dataset: DatasetT, **_: Any) -> Self: warnings.warn( "You are using a progress bar with n_jobs > 1. " "This might lead to strange behaviour, as each process will launch its own process bar with " - "n_trials/n_jobs steps." + "n_trials/n_jobs steps.", + stacklevel=2, ) # This solution is based on the solution proposed here: @@ -836,6 +839,7 @@ def objective(trial: Trial, pipeline: PipelineT, dataset: DatasetT) -> float: warnings.warn( "score_name is ignored if scoring returns a single score", UserWarning, + stacklevel=2, ) score = average_scores diff --git a/tpcp/validate/_scorer.py b/tpcp/validate/_scorer.py index 96a465c2..e0bd7fdc 100644 --- a/tpcp/validate/_scorer.py +++ b/tpcp/validate/_scorer.py @@ -48,7 +48,7 @@ def __call__( *, step: int, scores: Tuple[ScoreTypeT[T], ...], - scorer: "Scorer[PipelineT, DatasetT, T]", + scorer: Scorer[PipelineT, DatasetT, T], pipeline: PipelineT, dataset: DatasetT, ) -> None: @@ -75,7 +75,7 @@ def __init__(self, _value: T): def __repr__(self): """Show the representation of the object.""" - return f"{self.__class__.__name__}({repr(self._value)})" + return f"{self.__class__.__name__}({self._value!r})" def get_value(self) -> T: """Return the value wrapped by aggregator.""" @@ -195,7 +195,7 @@ def __call__( """ return self._score(pipeline=pipeline, dataset=dataset) - def _aggregate( # mccabe: disable=MC0001, pylint: disable=too-many-branches # noqa: C901 + def _aggregate( # noqa: C901, PLR0912 self, scores: Union[Tuple[Type[Aggregator[T]], List[T]], Dict[str, Tuple[Type[Aggregator[T]], List[T]]]], datapoints: List[DatasetT], @@ -258,7 +258,7 @@ def _score(self, pipeline: PipelineT, dataset: DatasetT): try: # We need to clone here again, to make sure that the run for each data point is truly independent. score = self._score_func(pipeline.clone(), d) - except Exception as e: # noqa: broad-except + except Exception as e: # noqa: BLE001 raise ScorerFailedError( f"Scorer raised an exception while scoring data point {i} ({d.group}). " "Tpcp does not support that (compared to sklearn) and you need to handle error cases yourself " @@ -321,7 +321,7 @@ def _validate_scorer( ) -def _check_and_invert_score_dict( # noqa: C901 +def _check_and_invert_score_dict( # I don't care that this is to complex, some things need to be complex scores: List[ScoreTypeT[T]], default_agg: Type[Aggregator],