Skip to content

Commit eaf7448

Browse files
committed
Fix GitHub Actions workflows and type checking errors
Workflow Fixes: - Fix YAML syntax in check_pull_request_title.yml - Remove problematic types parameter (not needed with custom subjectPattern) Type Checking Fixes (66 errors resolved): - Keep strict mypy flags enabled (disallow_any_generics, warn_unreachable) - Add explicit type parameters throughout (dict[str, Any], list[Any], set[Any]) - Add type annotations to helper functions (_is_union_type, etc.) - Fix PathLike type parameter (Union[str, os.PathLike[str]]) - Add proper type hints to levenshtein_distance and ensure_tuple - Add type assertions for dataclass field types - Handle false positive unreachable warnings with targeted type ignores Files modified: - schema.py, coercion.py: Type annotations and assertions - config.py, loader.py, items.py: Dict/list/set type parameters - preprocessor.py, operators.py, resolver.py: Type parameters - utils/types.py, utils/misc.py, utils/module.py: Type fixes - errors/suggestions.py: Levenshtein distance types All 24 source files pass strict type checking.
1 parent a1fe29a commit eaf7448

File tree

14 files changed

+70
-62
lines changed

14 files changed

+70
-62
lines changed

.github/workflows/check_pull_request_title.yml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,6 @@ jobs:
4242
Valid: "Add new feature", "Fix bug in parser"
4343
Invalid: "add feature", "Adds feature", "Add.", "Fix"
4444
# Disable type prefixes (we don't use conventional commits format)
45-
types: []
4645
requireScope: false
4746
ignoreLabels:
4847
- ignore-title-check

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -87,10 +87,10 @@ show_error_codes = true
8787
show_error_context = true
8888
strict_equality = true
8989
strict_optional = true
90+
warn_unreachable = true
9091
warn_no_return = true
9192
warn_redundant_casts = true
9293
warn_return_any = true
93-
warn_unreachable = true
9494
warn_unused_configs = true
9595
warn_unused_ignores = true
9696

src/sparkwheel/coercion.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77
__all__ = ["coerce_value", "can_coerce"]
88

99

10-
def _is_union_type(origin) -> bool:
10+
def _is_union_type(origin: Any) -> bool:
1111
"""Check if origin is a Union type."""
1212
if origin is Union:
1313
return True
@@ -103,6 +103,8 @@ def coerce_value(value: Any, target_type: type, field_path: str = "") -> Any:
103103
if field_name in schema_fields:
104104
field_info = schema_fields[field_name]
105105
field_path_full = f"{field_path}.{field_name}" if field_path else field_name
106+
# field_info.type can be str in some edge cases, but for our use it's always type
107+
assert isinstance(field_info.type, type)
106108
coerced[field_name] = coerce_value(field_value, field_info.type, field_path_full)
107109
else:
108110
# Keep unknown fields as-is (strict mode will catch them)

src/sparkwheel/config.py

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ class Config:
6161

6262
def __init__(
6363
self,
64-
data: dict | None = None, # Internal/testing use only
64+
data: dict[str, Any] | None = None, # Internal/testing use only
6565
*, # Rest are keyword-only
6666
globals: dict[str, Any] | None = None,
6767
schema: type | None = None,
@@ -88,7 +88,7 @@ def __init__(
8888
>>> # Chaining
8989
>>> config = Config(schema=MySchema).update("config.yaml")
9090
"""
91-
self._data: dict = data or {} # Start with provided data or empty
91+
self._data: dict[str, Any] = data or {} # Start with provided data or empty
9292
self._metadata = MetadataRegistry()
9393
self._resolver = Resolver()
9494
self._is_parsed = False
@@ -163,7 +163,7 @@ def set(self, id: str, value: Any) -> None:
163163

164164
# Ensure root is dict
165165
if not isinstance(self._data, dict):
166-
self._data = {}
166+
self._data = {} # type: ignore[unreachable]
167167

168168
# Create missing intermediate paths
169169
current = self._data
@@ -231,7 +231,7 @@ def is_frozen(self) -> bool:
231231
"""
232232
return self._frozen
233233

234-
def update(self, source: PathLike | dict | "Config" | str) -> "Config":
234+
def update(self, source: PathLike | dict[str, Any] | "Config" | str) -> "Config":
235235
"""Update configuration with changes from another source.
236236
237237
Auto-detects strings as either file paths or CLI overrides:
@@ -315,15 +315,15 @@ def _update_from_config(self, source: "Config") -> None:
315315
self._metadata.merge(source._metadata)
316316
self._invalidate_resolution()
317317

318-
def _uses_nested_paths(self, source: dict) -> bool:
318+
def _uses_nested_paths(self, source: dict[str, Any]) -> bool:
319319
"""Check if dict uses :: path syntax."""
320320
return any(ID_SEP_KEY in str(k).lstrip(REPLACE_KEY).lstrip(REMOVE_KEY) for k in source.keys())
321321

322-
def _apply_path_updates(self, source: dict) -> None:
322+
def _apply_path_updates(self, source: dict[str, Any]) -> None:
323323
"""Apply nested path updates (e.g., model::lr=value, =model=replace, ~old::param=null)."""
324324
for key, value in source.items():
325325
if not isinstance(key, str):
326-
self.set(str(key), value)
326+
self.set(str(key), value) # type: ignore[unreachable]
327327
continue
328328

329329
if key.startswith(REPLACE_KEY):
@@ -364,7 +364,7 @@ def _delete_nested_key(self, key: str) -> None:
364364
del self._data[key]
365365
self._invalidate_resolution()
366366

367-
def _apply_structural_update(self, source: dict) -> None:
367+
def _apply_structural_update(self, source: dict[str, Any]) -> None:
368368
"""Apply structural update with operators."""
369369
validate_operators(source)
370370
self._data = apply_operators(self._data, source)
@@ -546,15 +546,15 @@ def __repr__(self) -> str:
546546
return f"Config({self._data})"
547547

548548
@staticmethod
549-
def export_config_file(config: dict, filepath: PathLike, **kwargs: Any) -> None:
549+
def export_config_file(config: dict[str, Any], filepath: PathLike, **kwargs: Any) -> None:
550550
"""Export config to YAML file.
551551
552552
Args:
553553
config: Config dict to export
554554
filepath: Target file path
555555
kwargs: Additional arguments for yaml.safe_dump
556556
"""
557-
import yaml
557+
import yaml # type: ignore[import-untyped]
558558

559559
filepath_str = str(Path(filepath))
560560
with open(filepath_str, "w") as f:
@@ -599,7 +599,7 @@ def parse_overrides(args: list[str]) -> dict[str, Any]:
599599
"""
600600
import ast
601601

602-
overrides = {}
602+
overrides: dict[str, Any] = {}
603603

604604
for arg in args:
605605
# Handle delete operator: ~key

src/sparkwheel/errors/suggestions.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -33,10 +33,10 @@ def levenshtein_distance(s1: str, s2: str) -> int:
3333
return len(s1)
3434

3535
# Create distance matrix
36-
previous_row = range(len(s2) + 1)
36+
previous_row: list[int] = list(range(len(s2) + 1))
3737

3838
for i, c1 in enumerate(s1):
39-
current_row = [i + 1]
39+
current_row: list[int] = [i + 1]
4040
for j, c2 in enumerate(s2):
4141
# Cost of insertions, deletions, or substitutions
4242
insertions = previous_row[j + 1] + 1

src/sparkwheel/items.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -278,7 +278,7 @@ def __init__(
278278
self,
279279
config: Any,
280280
id: str = "",
281-
globals: dict | None = None,
281+
globals: dict[str, Any] | None = None,
282282
source_location: SourceLocation | None = None,
283283
) -> None:
284284
super().__init__(config=config, id=id, source_location=source_location)
@@ -301,9 +301,9 @@ def _parse_import_string(self, import_string: str) -> Any | None:
301301
if isinstance(node, ast.Import):
302302
self.globals[asname], _ = optional_import(f"{name}")
303303
return self.globals[asname]
304-
return None
304+
return None # type: ignore[unreachable]
305305

306-
def evaluate(self, globals: dict | None = None, locals: dict | None = None) -> str | Any | None:
306+
def evaluate(self, globals: dict[str, Any] | None = None, locals: dict[str, Any] | None = None) -> str | Any | None:
307307
"""Evaluate the expression and return the result.
308308
309309
Uses Python's `eval()` to execute the expression string.
@@ -350,7 +350,7 @@ def evaluate(self, globals: dict | None = None, locals: dict | None = None) -> s
350350
return None
351351

352352
@classmethod
353-
def is_expression(cls, config: dict | list | str) -> bool:
353+
def is_expression(cls, config: dict[str, Any] | list[Any] | str) -> bool:
354354
"""
355355
Check whether the config is an executable expression string.
356356
Currently, a string starts with ``"$"`` character is interpreted as an expression.
@@ -361,7 +361,7 @@ def is_expression(cls, config: dict | list | str) -> bool:
361361
return isinstance(config, str) and config.startswith(cls.prefix)
362362

363363
@classmethod
364-
def is_import_statement(cls, config: dict | list | str) -> bool:
364+
def is_import_statement(cls, config: dict[str, Any] | list[Any] | str) -> bool:
365365
"""
366366
Check whether the config is an import statement (a special case of expression).
367367
@@ -372,4 +372,4 @@ def is_import_statement(cls, config: dict | list | str) -> bool:
372372
return False
373373
if "import" not in config:
374374
return False
375-
return isinstance(first(ast.iter_child_nodes(ast.parse(f"{config[len(cls.prefix) :]}"))), (ast.Import, ast.ImportFrom))
375+
return isinstance(first(ast.iter_child_nodes(ast.parse(f"{config[len(cls.prefix) :]}"))), (ast.Import, ast.ImportFrom)) # type: ignore[index]

src/sparkwheel/loader.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
from pathlib import Path
66
from typing import Any
77

8-
import yaml
8+
import yaml # type: ignore[import-untyped]
99

1010
from .metadata import MetadataRegistry
1111
from .path_patterns import is_yaml_file
@@ -23,7 +23,7 @@ class MetadataTrackingYamlLoader(CheckKeyDuplicatesYamlLoader):
2323
this loader populates a separate MetadataRegistry during loading.
2424
"""
2525

26-
def __init__(self, stream, filepath: str, registry: MetadataRegistry):
26+
def __init__(self, stream, filepath: str, registry: MetadataRegistry): # type: ignore[no-untyped-def]
2727
super().__init__(stream)
2828
self.filepath = filepath
2929
self.registry = registry
@@ -121,7 +121,7 @@ class Loader:
121121
```
122122
"""
123123

124-
def load_file(self, filepath: PathLike) -> tuple[dict, MetadataRegistry]:
124+
def load_file(self, filepath: PathLike) -> tuple[dict[str, Any], MetadataRegistry]:
125125
"""Load a single YAML file with metadata tracking.
126126
127127
Args:
@@ -163,7 +163,7 @@ def load_file(self, filepath: PathLike) -> tuple[dict, MetadataRegistry]:
163163

164164
return config, registry
165165

166-
def _load_yaml_with_metadata(self, stream, filepath: str, registry: MetadataRegistry) -> dict:
166+
def _load_yaml_with_metadata(self, stream, filepath: str, registry: MetadataRegistry) -> dict[str, Any]: # type: ignore[no-untyped-def]
167167
"""Load YAML and populate metadata registry during construction.
168168
169169
Args:
@@ -183,7 +183,7 @@ class TrackerLoader(MetadataTrackingYamlLoader):
183183
def loader_init(self, stream_arg):
184184
MetadataTrackingYamlLoader.__init__(self, stream_arg, filepath, registry)
185185

186-
TrackerLoader.__init__ = loader_init
186+
TrackerLoader.__init__ = loader_init # type: ignore[method-assign,assignment]
187187

188188
# Load and return clean config
189189
config = yaml.load(stream, TrackerLoader)
@@ -206,7 +206,7 @@ def _strip_metadata(config: Any) -> Any:
206206
else:
207207
return config
208208

209-
def load_files(self, filepaths: Sequence[PathLike]) -> tuple[dict, MetadataRegistry]:
209+
def load_files(self, filepaths: Sequence[PathLike]) -> tuple[dict[str, Any], MetadataRegistry]:
210210
"""Load multiple YAML files sequentially.
211211
212212
Files are loaded in order and merged using simple dict update

src/sparkwheel/operators.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@ def _validate_delete_operator(key: str, value: Any) -> None:
5454
)
5555

5656

57-
def validate_operators(config: dict, parent_key: str = "") -> None:
57+
def validate_operators(config: dict[str, Any], parent_key: str = "") -> None:
5858
"""Validate operator usage in config tree.
5959
6060
With composition-by-default, validation is simpler:
@@ -70,11 +70,11 @@ def validate_operators(config: dict, parent_key: str = "") -> None:
7070
ConfigMergeError: If operator usage is invalid
7171
"""
7272
if not isinstance(config, dict):
73-
return
73+
return # type: ignore[unreachable]
7474

7575
for key, value in config.items():
7676
if not isinstance(key, str):
77-
continue
77+
continue # type: ignore[unreachable]
7878

7979
actual_key = key
8080
operator = None
@@ -98,7 +98,7 @@ def validate_operators(config: dict, parent_key: str = "") -> None:
9898
validate_operators(value, full_key)
9999

100100

101-
def apply_operators(base: dict, override: dict) -> dict:
101+
def apply_operators(base: dict[str, Any], override: dict[str, Any]) -> dict[str, Any]:
102102
"""Apply configuration changes with composition-by-default semantics.
103103
104104
Default behavior: Compose (merge dicts, extend lists)
@@ -150,13 +150,13 @@ def apply_operators(base: dict, override: dict) -> dict:
150150
{"a": 1, "b": 5}
151151
"""
152152
if not isinstance(base, dict) or not isinstance(override, dict):
153-
return deepcopy(override)
153+
return deepcopy(override) # type: ignore[unreachable]
154154

155155
result = deepcopy(base)
156156

157157
for key, value in override.items():
158158
if not isinstance(key, str):
159-
result[key] = deepcopy(value)
159+
result[key] = deepcopy(value) # type: ignore[unreachable]
160160
continue
161161

162162
# Process replace operator (=key)

src/sparkwheel/preprocessor.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ class Preprocessor:
4949
>>> # }
5050
"""
5151

52-
def __init__(self, loader, globals: dict[str, Any] | None = None):
52+
def __init__(self, loader, globals: dict[str, Any] | None = None): # type: ignore[no-untyped-def]
5353
"""Initialize preprocessor.
5454
5555
Args:
@@ -59,7 +59,7 @@ def __init__(self, loader, globals: dict[str, Any] | None = None):
5959
self.loader = loader
6060
self.globals = globals or {}
6161

62-
def process(self, config: Any, base_data: dict, id: str = "") -> Any:
62+
def process(self, config: Any, base_data: dict[str, Any], id: str = "") -> Any:
6363
"""Preprocess entire config tree.
6464
6565
Main entry point - walks config tree recursively and applies
@@ -81,7 +81,7 @@ def process(self, config: Any, base_data: dict, id: str = "") -> Any:
8181
def _process_recursive(
8282
self,
8383
config: Any,
84-
base_data: dict,
84+
base_data: dict[str, Any],
8585
id: str,
8686
raw_ref_stack: set[str],
8787
) -> Any:
@@ -118,7 +118,7 @@ def _process_recursive(
118118

119119
return config
120120

121-
def _expand_raw_ref(self, raw_ref: str, base_data: dict, raw_ref_stack: set[str]) -> Any:
121+
def _expand_raw_ref(self, raw_ref: str, base_data: dict[str, Any], raw_ref_stack: set[str]) -> Any:
122122
"""Expand a single raw reference by loading external file or local YAML.
123123
124124
Args:
@@ -162,7 +162,7 @@ def _expand_raw_ref(self, raw_ref: str, base_data: dict, raw_ref_stack: set[str]
162162
raw_ref_stack.discard(raw_ref)
163163

164164
@staticmethod
165-
def _get_by_id(config: dict, id: str) -> Any:
165+
def _get_by_id(config: dict[str, Any], id: str) -> Any:
166166
"""Navigate config dict by ID path.
167167
168168
Args:
@@ -183,7 +183,7 @@ def _get_by_id(config: dict, id: str) -> Any:
183183
for key in split_id(id):
184184
if isinstance(current, dict):
185185
current = current[key]
186-
elif isinstance(current, list):
186+
elif isinstance(current, list): # type: ignore[unreachable]
187187
current = current[int(key)]
188188
else:
189189
raise TypeError(f"Cannot index {type(current).__name__} with key '{key}' at path '{id}'")

src/sparkwheel/resolver.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -320,7 +320,7 @@ def iter_subconfigs(cls, id: str, config: Any) -> Iterator[tuple[str, str, Any]]
320320
"""
321321
for k, v in config.items() if isinstance(config, dict) else enumerate(config):
322322
sub_id = f"{id}{cls.sep}{k}" if id != "" else f"{k}"
323-
yield k, sub_id, v
323+
yield k, sub_id, v # type: ignore[misc]
324324

325325
@classmethod
326326
def match_refs_pattern(cls, value: str) -> dict[str, int]:
@@ -336,7 +336,7 @@ def match_refs_pattern(cls, value: str) -> dict[str, int]:
336336
return scan_references(value)
337337

338338
@classmethod
339-
def update_refs_pattern(cls, value: str, refs: dict) -> str:
339+
def update_refs_pattern(cls, value: str, refs: dict[str, Any]) -> str:
340340
"""Replace reference patterns with resolved values.
341341
342342
Args:
@@ -390,7 +390,7 @@ def find_refs_in_config(cls, config: Any, id: str, refs: dict[str, int] | None =
390390
return refs_
391391

392392
@classmethod
393-
def update_config_with_refs(cls, config: Any, id: str, refs: dict | None = None) -> Any:
393+
def update_config_with_refs(cls, config: Any, id: str, refs: dict[str, Any] | None = None) -> Any:
394394
"""Update config by replacing references with resolved values.
395395
396396
Args:

0 commit comments

Comments
 (0)