From 73e0d6b87e93ccde8cf309b491336ade6ee3fb42 Mon Sep 17 00:00:00 2001 From: Chris Green Date: Tue, 2 Dec 2025 16:58:17 -0600 Subject: [PATCH 1/4] Resolve Python linting errors - Ignore `D10[0-3]` (missing docstring) --- scripts/check_codeql_alerts.py | 70 ++++++++++++++++++++++++---------- test/python/registry.py | 2 +- test/python/test_phlex.py | 4 +- 3 files changed, 52 insertions(+), 24 deletions(-) diff --git a/scripts/check_codeql_alerts.py b/scripts/check_codeql_alerts.py index f469941f7..df43cd9c9 100644 --- a/scripts/check_codeql_alerts.py +++ b/scripts/check_codeql_alerts.py @@ -140,7 +140,9 @@ def parse_args(argv: collections.abc.Sequence[str] | None = None) -> argparse.Na parser.add_argument( "--ref", required=False, - help="Optional Git ref to compare (e.g. refs/pull/104/merge). When provided the script will query the Code Scanning API instead of relying only on SARIF baselineState", + help="Optional Git ref to compare (e.g. refs/pull/104/merge). " + "When provided the script will query the Code Scanning API " + "instead of relying only on SARIF baselineState", ) parser.add_argument( "--min-level", @@ -244,7 +246,10 @@ def _paginate_alerts_api( def _format_physical_location(phys: dict[str, Any]) -> str | None: - """Return a formatted location string `path[:line[:col]]` from a SARIF physicalLocation dict, or None.""" + """Return a formatted location string `path[:line[:col]]`. + + Input: SARIF physicalLocation dict, or None. + """ if not phys: return None artifact = phys.get("artifactLocation", {}) @@ -540,7 +545,8 @@ def collect_alerts( "relatedLocations": result.get("relatedLocations"), } _log( - f"Unknown SARIF location for result: {json.dumps(snippet, default=str)[:4000]}" + f"Unknown SARIF location for result: " + f"{json.dumps(snippet, default=str)[:4000]}" ) except (TypeError, OSError) as exc: if DEBUG: @@ -575,11 +581,13 @@ def _format_section( ) lines.append( - f"- {bullet_prefix} **{alert.level_title()}**{severity_note} {prefix}{alert.rule_display()}{dismissed_note} at `{alert.location}` — {alert.message}" + f"- {bullet_prefix} **{alert.level_title()}**{severity_note} {prefix}" + f"{alert.rule_display()}{dismissed_note} at `{alert.location}` — {alert.message}" ) if remaining: lines.append( - f"- {bullet_prefix} …and {remaining} more alerts (see Code Scanning for the full list)." + f"- {bullet_prefix} …and {remaining} more alerts " + "(see Code Scanning for the full list)." ) return lines @@ -605,14 +613,16 @@ def _highest_severity(alerts: collections.abc.Sequence[Alert]) -> str | None: if new_alerts: sev_note = f" — Highest severity: {highest_new}" if highest_new else "" lines.append( - f"## ❌ {len(new_alerts)} new CodeQL alert{'s' if len(new_alerts) != 1 else ''} (level ≥ {threshold}){sev_note}" + f"## ❌ {len(new_alerts)} new CodeQL alert" + f"{'s' if len(new_alerts) != 1 else ''} (level ≥ {threshold}){sev_note}" ) lines.extend(_format_section(new_alerts, max_results=max_results, bullet_prefix=":x:")) lines.append("") if fixed_alerts: lines.append( - f"## ✅ {len(fixed_alerts)} CodeQL alert{'s' if len(fixed_alerts) != 1 else ''} resolved since the previous run" + f"## ✅ {len(fixed_alerts)} CodeQL alert" + f"{'s' if len(fixed_alerts) != 1 else ''} resolved since the previous run" ) lines.extend( _format_section( @@ -650,14 +660,16 @@ def write_summary( handle.write("## CodeQL Alerts\n\n") if new_alerts: handle.write( - f"❌ {len(new_alerts)} new alert{'s' if len(new_alerts) != 1 else ''} (level ≥ {threshold}).\n" + f"❌ {len(new_alerts)} new alert{'s' if len(new_alerts) != 1 else ''} " + "(level ≥ {threshold}).\n" ) for line in _format_section(new_alerts, max_results=max_results, bullet_prefix=":x:"): handle.write(f"{line}\n") handle.write("\n") if fixed_alerts: handle.write( - f"✅ {len(fixed_alerts)} alert{'s' if len(fixed_alerts) != 1 else ''} resolved since the previous run.\n" + f"✅ {len(fixed_alerts)} alert{'s' if len(fixed_alerts) != 1 else ''} " + "resolved since the previous run.\n" ) for line in _format_section( fixed_alerts, @@ -682,14 +694,17 @@ def _print_summary( print("CodeQL Summary:") print( - f"- New (unfiltered): {len(new_alerts)}{f' (Highest: {highest_new})' if highest_new else ''}" + f"- New (unfiltered): {len(new_alerts)}" + f"{f' (Highest: {highest_new})' if highest_new else ''}" ) print( - f"- Fixed (unfiltered): {len(fixed_alerts)}{f' (Highest: {highest_fixed})' if highest_fixed else ''}" + f"- Fixed (unfiltered): {len(fixed_alerts)}" + f"{f' (Highest: {highest_fixed})' if highest_fixed else ''}" ) if matched_available: print( - f"- Matched (preexisting): {len(matched_alerts)}{f' (Highest: {highest_matched})' if highest_matched else ''}" + f"- Matched (preexisting): {len(matched_alerts)}" + f"{f' (Highest: {highest_matched})' if highest_matched else ''}" ) else: print("- Matched (preexisting): N/A (no API comparison)") @@ -857,18 +872,24 @@ def _build_multi_section_comment( # Matching summary (always include in PR comment) if api_comp.new_vs_base or api_comp.fixed_vs_base: lines.append( - f"{len(api_comp.fixed_vs_base)} fixed, {len(api_comp.new_vs_base)} new since branch point ({api_comp.base_sha[:7] if api_comp.base_sha else 'unknown'})" + f"{len(api_comp.fixed_vs_base)} fixed, {len(api_comp.new_vs_base)} " + "new since branch point (" + f"{api_comp.base_sha[:7] if api_comp.base_sha else 'unknown'})" ) if api_comp.new_vs_prev or api_comp.fixed_vs_prev: lines.append( - f"{len(api_comp.fixed_vs_prev)} fixed, {len(api_comp.new_vs_prev)} new since previous report on PR ({api_comp.prev_commit_ref[:7] if api_comp.prev_commit_ref else 'unknown'})" + f"{len(api_comp.fixed_vs_prev)} fixed, {len(api_comp.new_vs_prev)} " + "new since previous report on PR (" + f"{api_comp.prev_commit_ref[:7] if api_comp.prev_commit_ref else 'unknown'})" ) lines.append("") # Add previous-commit comparison if available if api_comp.new_vs_prev: lines.append( - f"## ❌ {len(api_comp.new_vs_prev)} new CodeQL alert{'s' if len(api_comp.new_vs_prev) != 1 else ''} since the previous PR commit" + f"## ❌ {len(api_comp.new_vs_prev)} new CodeQL alert" + f"{'s' if len(api_comp.new_vs_prev) != 1 else ''} " + "since the previous PR commit" ) lines.extend( _format_section(api_comp.new_vs_prev, max_results=max_results, bullet_prefix=":x:") @@ -876,7 +897,9 @@ def _build_multi_section_comment( lines.append("") if api_comp.fixed_vs_prev: lines.append( - f"## ✅ {len(api_comp.fixed_vs_prev)} CodeQL alert{'s' if len(api_comp.fixed_vs_prev) != 1 else ''} resolved since the previous PR commit" + f"## ✅ {len(api_comp.fixed_vs_prev)} CodeQL alert" + f"{'s' if len(api_comp.fixed_vs_prev) != 1 else ''} " + "resolved since the previous PR commit" ) lines.extend( _format_section( @@ -888,7 +911,8 @@ def _build_multi_section_comment( # Add branch-point comparison if available if api_comp.new_vs_base: lines.append( - f"## ❌ {len(api_comp.new_vs_base)} new CodeQL alert{'s' if len(api_comp.new_vs_base) != 1 else ''} since the branch point" + f"## ❌ {len(api_comp.new_vs_base)} new CodeQL alert" + f"{'s' if len(api_comp.new_vs_base) != 1 else ''} since the branch point" ) lines.extend( _format_section(api_comp.new_vs_base, max_results=max_results, bullet_prefix=":x:") @@ -896,7 +920,8 @@ def _build_multi_section_comment( lines.append("") if api_comp.fixed_vs_base: lines.append( - f"## ✅ {len(api_comp.fixed_vs_base)} CodeQL alert{'s' if len(api_comp.fixed_vs_base) != 1 else ''} resolved since the branch point" + f"## ✅ {len(api_comp.fixed_vs_base)} CodeQL alert" + f"{'s' if len(api_comp.fixed_vs_base) != 1 else ''} resolved since the branch point" ) lines.extend( _format_section( @@ -958,7 +983,8 @@ def main(argv: collections.abc.Sequence[str] | None = None) -> int: return 2 # Compute unfiltered new/fixed and matched summaries for stdout - # If API-mode was used, pr_alerts/main_alerts dicts are available; otherwise fall back to SARIF unfiltered buckets + # If API-mode was used, pr_alerts/main_alerts dicts are available; + # otherwise fall back to SARIF unfiltered buckets if api_comp is not None: new_all = api_comp.new_alerts fixed_all = api_comp.fixed_alerts @@ -971,9 +997,11 @@ def main(argv: collections.abc.Sequence[str] | None = None) -> int: new_all = buckets_all.get("new", []) fixed_all = buckets_all.get("absent", []) except (ValueError, KeyError, TypeError, json.JSONDecodeError) as exc: - # If re-collection fails (malformed SARIF or unexpected structure), fall back to thresholded lists + # If re-collection fails (malformed SARIF or unexpected structure), + # fall back to thresholded lists _debug( - f"Re-collecting SARIF without threshold failed: {exc}; falling back to thresholded lists" + f"Re-collecting SARIF without threshold failed: {exc}; " + "falling back to thresholded lists" ) new_all = new_alerts fixed_all = fixed_alerts diff --git a/test/python/registry.py b/test/python/registry.py index 6d6db51bc..d267d3eac 100644 --- a/test/python/registry.py +++ b/test/python/registry.py @@ -7,7 +7,7 @@ cppyy.include("Python.h") -_registered_modules = dict() +_registered_modules = {} def register(m, config): diff --git a/test/python/test_phlex.py b/test/python/test_phlex.py index 52d964e28..f9ad8e689 100644 --- a/test/python/test_phlex.py +++ b/test/python/test_phlex.py @@ -3,10 +3,10 @@ class TestPYPHLEX: def setup_class(cls): import pyphlex # noqa: F401 - __all__ = ["pyphlex"] # For CodeQL + __all__ = ["pyphlex"] # noqa: F841 # For CodeQL def test01_phlex_existence(self): - """Test existence of the phlex namespace""" + """Test existence of the phlex namespace.""" import cppyy assert cppyy.gbl.phlex From 6c176fca50d4bfee4c674bc871d0f40916707bef Mon Sep 17 00:00:00 2001 From: "google-labs-jules[bot]" <161369871+google-labs-jules[bot]@users.noreply.github.com> Date: Wed, 3 Dec 2025 00:01:13 +0000 Subject: [PATCH 2/4] docs: Add missing docstrings to Python files Adds Google-style docstrings to public modules, classes, and functions in the 'scripts/' and 'test/python/' directories to resolve CI errors. --- scripts/check_codeql_alerts.py | 129 +++++++++++++++++++++++ scripts/codeql_reset_dismissed_alerts.py | 26 +++++ scripts/create_coverage_symlinks.py | 38 +++++++ scripts/export_llvm_lcov.py | 13 +++ scripts/sarif-alerts.py | 9 ++ test/python/adder.py | 1 + test/python/pyphlex.py | 6 ++ test/python/registry.py | 1 + test/python/test_phlex.py | 6 ++ 9 files changed, 229 insertions(+) diff --git a/scripts/check_codeql_alerts.py b/scripts/check_codeql_alerts.py index df43cd9c9..a1366e2c5 100644 --- a/scripts/check_codeql_alerts.py +++ b/scripts/check_codeql_alerts.py @@ -88,17 +88,21 @@ class Alert: analysis_key: str | None = None def icon(self) -> str: + """Returns an icon for the alert's level.""" return LEVEL_ICONS.get(self.level, ":grey_question:") def level_title(self) -> str: + """Returns a title-cased version of the alert's level.""" return self.level.capitalize() def rule_display(self) -> str: + """Returns a formatted string for the rule ID, including a link if available.""" if self.help_uri: return f"[{self.rule_id}]({self.help_uri})" return f"`{self.rule_id}`" def severity_suffix(self) -> str: + """Returns a string indicating the security severity, if available.""" if self.security_severity: return f" ({self.security_severity})" return "" @@ -120,6 +124,14 @@ class APIAlertComparison: def parse_args(argv: collections.abc.Sequence[str] | None = None) -> argparse.Namespace: + """Parses command-line arguments. + + Args: + argv: The command-line arguments to parse. + + Returns: + The parsed arguments. + """ parser = argparse.ArgumentParser(description=__doc__) parser.add_argument( "--sarif", @@ -362,6 +374,14 @@ def _load_sarif_file(path: Path) -> dict[str, Any]: def load_sarif(path: Path) -> dict[str, Any]: + """Loads a SARIF file, handling single files and directories. + + Args: + path: The path to the SARIF file or directory. + + Returns: + The loaded SARIF data. + """ if path.is_dir(): sarif_files = sorted(p for p in path.rglob("*.sarif") if p.is_file()) if not sarif_files: @@ -380,6 +400,14 @@ def load_sarif(path: Path) -> dict[str, Any]: def severity(level: str | None) -> str: + """Normalizes a severity level string. + + Args: + level: The severity level string. + + Returns: + The normalized severity level. + """ if not level: return "warning" normalized = level.lower() @@ -389,10 +417,27 @@ def severity(level: str | None) -> str: def severity_reaches_threshold(level: str, threshold: str) -> bool: + """Checks if a severity level meets a threshold. + + Args: + level: The severity level to check. + threshold: The threshold to compare against. + + Returns: + True if the severity level meets the threshold, False otherwise. + """ return LEVEL_ORDER.get(level, 0) >= LEVEL_ORDER.get(threshold, 0) def sanitize_message(message: str | None) -> str: + """Sanitizes and truncates a message string. + + Args: + message: The message to sanitize. + + Returns: + The sanitized message. + """ if not message: return "(no message provided)" flattened = " ".join(message.split()) @@ -402,6 +447,14 @@ def sanitize_message(message: str | None) -> str: def extract_message(result: dict[str, Any]) -> str: + """Extracts the message from a SARIF result. + + Args: + result: The SARIF result. + + Returns: + The extracted message. + """ message = result.get("message") or {} text = message.get("markdown") or message.get("text") if not text and isinstance(message, dict): @@ -412,6 +465,14 @@ def extract_message(result: dict[str, Any]) -> str: def extract_location(result: dict[str, Any]) -> str: + """Extracts the location from a SARIF result. + + Args: + result: The SARIF result. + + Returns: + The extracted location. + """ locations: collections.abc.Iterable[dict[str, Any]] = result.get("locations") or [] for location in locations: phys = location.get("physicalLocation") or {} @@ -485,6 +546,14 @@ def extract_location(result: dict[str, Any]) -> str: def rule_lookup_map(run: dict[str, Any]) -> dict[str, dict[str, Any]]: + """Creates a map of rule IDs to rule objects. + + Args: + run: The SARIF run object. + + Returns: + A dictionary mapping rule IDs to rule objects. + """ rules: dict[str, dict[str, Any]] = {} tool = run.get("tool") or {} driver = tool.get("driver") or {} @@ -496,6 +565,14 @@ def rule_lookup_map(run: dict[str, Any]) -> dict[str, dict[str, Any]]: def extract_security_severity(result: dict[str, Any]) -> str | None: + """Extracts the security severity from a SARIF result. + + Args: + result: The SARIF result. + + Returns: + The security severity, or None if not found. + """ props = result.get("properties") or {} for key in ("security-severity", "problem.severity", "problemSeverity"): value = props.get(key) @@ -509,6 +586,15 @@ def collect_alerts( *, min_level: str, ) -> dict[str, list[Alert]]: + """Collects new and absent alerts from a SARIF report. + + Args: + sarif: The SARIF report. + min_level: The minimum severity level to include. + + Returns: + A dictionary of new and absent alerts. + """ buckets: dict[str, list[Alert]] = {"new": [], "absent": []} runs: collections.abc.Iterable[dict[str, Any]] = sarif.get("runs") or [] for run in runs: @@ -600,6 +686,18 @@ def build_comment( max_results: int, threshold: str, ) -> str: + """Builds a comment body for a pull request. + + Args: + new_alerts: A list of new alerts. + fixed_alerts: A list of fixed alerts. + repo: The repository name. + max_results: The maximum number of results to include. + threshold: The severity threshold. + + Returns: + The formatted comment body. + """ lines: list[str] = [] def _highest_severity(alerts: collections.abc.Sequence[Alert]) -> str | None: @@ -640,6 +738,14 @@ def _highest_severity(alerts: collections.abc.Sequence[Alert]) -> str | None: def highest_severity_level_title(alerts: collections.abc.Sequence[Alert]) -> str | None: + """Finds the highest severity level in a list of alerts. + + Args: + alerts: A list of alerts. + + Returns: + The title of the highest severity level, or None if the list is empty. + """ if not alerts: return None best = max(alerts, key=lambda a: LEVEL_ORDER.get(a.level, 0)) @@ -653,6 +759,14 @@ def write_summary( max_results: int, threshold: str, ) -> None: + """Writes a summary of the alerts to the GitHub step summary. + + Args: + new_alerts: A list of new alerts. + fixed_alerts: A list of fixed alerts. + max_results: The maximum number of results to include. + threshold: The severity threshold. + """ summary_path = os.environ.get("GITHUB_STEP_SUMMARY") if not summary_path or (not new_alerts and not fixed_alerts): return @@ -717,6 +831,13 @@ def set_outputs( fixed_alerts: collections.abc.Sequence[Alert], comment_path: Path | None, ) -> None: + """Sets the GitHub action outputs. + + Args: + new_alerts: A list of new alerts. + fixed_alerts: A list of fixed alerts. + comment_path: The path to the comment file. + """ output_path = os.environ.get("GITHUB_OUTPUT") if not output_path: return @@ -941,6 +1062,14 @@ def _build_multi_section_comment( def main(argv: collections.abc.Sequence[str] | None = None) -> int: + """The main entry point of the script. + + Args: + argv: The command-line arguments. + + Returns: + The exit code. + """ args = parse_args(argv) # set global debug flag global DEBUG diff --git a/scripts/codeql_reset_dismissed_alerts.py b/scripts/codeql_reset_dismissed_alerts.py index f5c5a8d7b..74af40d5f 100644 --- a/scripts/codeql_reset_dismissed_alerts.py +++ b/scripts/codeql_reset_dismissed_alerts.py @@ -99,6 +99,8 @@ def _paginate_alerts(owner: str, repo: str) -> Iterator[dict]: @dataclass class Alert: + """Represents a dismissed CodeQL alert.""" + number: int html_url: str rule_id: str @@ -123,6 +125,14 @@ def _to_alert(raw: dict) -> Alert: def reopen_alert(owner: str, repo: str, alert: Alert, *, dry_run: bool) -> None: + """Reopens a dismissed CodeQL alert. + + Args: + owner: The GitHub organization or user. + repo: The repository name. + alert: The alert to reopen. + dry_run: If True, print the action without executing it. + """ if dry_run: print(f"DRY RUN: would reopen alert #{alert.number} ({alert.rule_id})") return @@ -135,6 +145,14 @@ def reopen_alert(owner: str, repo: str, alert: Alert, *, dry_run: bool) -> None: def parse_args(argv: Optional[List[str]] = None) -> argparse.Namespace: + """Parses command-line arguments. + + Args: + argv: The command-line arguments to parse. + + Returns: + The parsed arguments. + """ parser = argparse.ArgumentParser(description=__doc__) parser.add_argument("--owner", required=True, help="GitHub organization or user") parser.add_argument("--repo", required=True, help="Repository name") @@ -147,6 +165,14 @@ def parse_args(argv: Optional[List[str]] = None) -> argparse.Namespace: def main(argv: Optional[List[str]] = None) -> int: + """The main entry point of the script. + + Args: + argv: The command-line arguments. + + Returns: + The exit code. + """ args = parse_args(argv) try: alerts = [_to_alert(raw) for raw in _paginate_alerts(args.owner, args.repo)] diff --git a/scripts/create_coverage_symlinks.py b/scripts/create_coverage_symlinks.py index a0ab7297c..67b38e228 100644 --- a/scripts/create_coverage_symlinks.py +++ b/scripts/create_coverage_symlinks.py @@ -34,6 +34,14 @@ def should_link(path: pathlib.Path) -> bool: + """Checks if a file should be symlinked for coverage analysis. + + Args: + path: The path to the file. + + Returns: + True if the file should be symlinked, False otherwise. + """ if not path.is_file(): return False suffix = path.suffix @@ -49,6 +57,14 @@ def should_link(path: pathlib.Path) -> bool: def iter_source_files(build_root: pathlib.Path) -> Iterable[pathlib.Path]: + """Iterates over all source files in a directory. + + Args: + build_root: The root directory to search. + + Yields: + Paths to the source files. + """ for root, _dirs, files in os.walk(build_root): root_path = pathlib.Path(root) for filename in files: @@ -58,6 +74,12 @@ def iter_source_files(build_root: pathlib.Path) -> Iterable[pathlib.Path]: def create_symlinks(build_root: pathlib.Path, output_root: pathlib.Path) -> None: + """Creates symlinks for all source files in a directory. + + Args: + build_root: The root directory containing the source files. + output_root: The directory where the symlinks will be created. + """ if output_root.exists(): shutil.rmtree(output_root) output_root.mkdir(parents=True, exist_ok=True) @@ -79,6 +101,14 @@ def create_symlinks(build_root: pathlib.Path, output_root: pathlib.Path) -> None def parse_args(argv: Iterable[str]) -> argparse.Namespace: + """Parses command-line arguments. + + Args: + argv: The command-line arguments to parse. + + Returns: + The parsed arguments. + """ parser = argparse.ArgumentParser(description="Create coverage symlink tree") parser.add_argument("--build-root", required=True) parser.add_argument("--output-root", required=True) @@ -86,6 +116,14 @@ def parse_args(argv: Iterable[str]) -> argparse.Namespace: def main(argv: Iterable[str]) -> int: + """The main entry point of the script. + + Args: + argv: The command-line arguments. + + Returns: + The exit code. + """ args = parse_args(argv) build_root = pathlib.Path(args.build_root).resolve() output_root = pathlib.Path(args.output_root).resolve() diff --git a/scripts/export_llvm_lcov.py b/scripts/export_llvm_lcov.py index 9815485bc..6538c4785 100644 --- a/scripts/export_llvm_lcov.py +++ b/scripts/export_llvm_lcov.py @@ -11,6 +11,11 @@ def build_parser() -> argparse.ArgumentParser: + """Builds the command-line argument parser. + + Returns: + The argument parser. + """ parser = argparse.ArgumentParser( description="Run llvm-cov export and write the LCOV data to a file." ) @@ -28,6 +33,14 @@ def build_parser() -> argparse.ArgumentParser: def main(argv: list[str] | None = None) -> int: + """The main entry point of the script. + + Args: + argv: The command-line arguments. + + Returns: + The exit code. + """ parser = build_parser() args = parser.parse_args(argv) diff --git a/scripts/sarif-alerts.py b/scripts/sarif-alerts.py index 151e1eda9..f421598c4 100644 --- a/scripts/sarif-alerts.py +++ b/scripts/sarif-alerts.py @@ -1,3 +1,4 @@ +"""A simple tool to print SARIF results from one or more SARIF files.""" import argparse import json from pathlib import Path @@ -26,6 +27,14 @@ def _process_sarif(path: Path) -> Iterable[str]: def main(argv=None) -> int: + """The main entry point of the script. + + Args: + argv: The command-line arguments. + + Returns: + The exit code. + """ parser = argparse.ArgumentParser( description="Print SARIF results from one or more SARIF files" ) diff --git a/test/python/adder.py b/test/python/adder.py index a37b2c136..e5bc7717e 100644 --- a/test/python/adder.py +++ b/test/python/adder.py @@ -1,3 +1,4 @@ +"""This module defines a simple C++ function using cppyy.""" import cppyy cppyy.cppdef("""\ diff --git a/test/python/pyphlex.py b/test/python/pyphlex.py index 99a82acd2..2ab4bc8be 100644 --- a/test/python/pyphlex.py +++ b/test/python/pyphlex.py @@ -28,6 +28,12 @@ # `with` is a keyword in Python, so can not be the name of a method; fix this # by renaming it to `with_` for all phlex classes def fixwith(klass, name): + """A cppyy pythonization to rename the 'with' method to 'with_'. + + Args: + klass: The class to pythonize. + name: The name of the class. + """ try: klass.with_ = getattr(klass, "with") except AttributeError: diff --git a/test/python/registry.py b/test/python/registry.py index d267d3eac..c3559cac4 100644 --- a/test/python/registry.py +++ b/test/python/registry.py @@ -1,3 +1,4 @@ +"""This module provides a function to register Python algorithms with the Phlex framework.""" import cppyy import pyphlex # noqa: F401 diff --git a/test/python/test_phlex.py b/test/python/test_phlex.py index f9ad8e689..92f78697e 100644 --- a/test/python/test_phlex.py +++ b/test/python/test_phlex.py @@ -1,6 +1,12 @@ +"""This module contains tests for the pyphlex module.""" + + class TestPYPHLEX: + """Tests for the pyphlex module.""" + @classmethod def setup_class(cls): + """Set up the test class.""" import pyphlex # noqa: F401 __all__ = ["pyphlex"] # noqa: F841 # For CodeQL From 657e60e51c0b42233de6d3944057f5a2f5038ad9 Mon Sep 17 00:00:00 2001 From: "google-labs-jules[bot]" <161369871+google-labs-jules[bot]@users.noreply.github.com> Date: Wed, 3 Dec 2025 00:15:04 +0000 Subject: [PATCH 3/4] fix: Resolve mypy type-checking errors Resolves mypy errors in Python scripts by adding missing type annotations, handling potential `None` values, and correcting import statements. --- scripts/check_codeql_alerts.py | 8 ++++---- scripts/codeql_reset_dismissed_alerts.py | 4 ++-- scripts/normalize_coverage_xml.py | 15 ++++++++++----- test/python/registry.py | 4 +++- 4 files changed, 19 insertions(+), 12 deletions(-) diff --git a/scripts/check_codeql_alerts.py b/scripts/check_codeql_alerts.py index a1366e2c5..0df23570f 100644 --- a/scripts/check_codeql_alerts.py +++ b/scripts/check_codeql_alerts.py @@ -10,7 +10,7 @@ import urllib.parse import urllib.request from dataclasses import dataclass -from datetime import datetime +from datetime import datetime, timezone from pathlib import Path from typing import Any @@ -212,7 +212,7 @@ def _log(msg: str) -> None: try: _LOG_PATH.parent.mkdir(parents=True, exist_ok=True) # Use timezone-aware UTC timestamp to avoid deprecation warnings. - ts = datetime.now(datetime.timezone.utc).isoformat() + ts = datetime.now(timezone.utc).isoformat() with open(_LOG_PATH, "a", encoding="utf-8") as fh: fh.write(f"{ts} {msg}\n") except Exception: @@ -230,7 +230,7 @@ def _init_log(log_path: Path) -> None: _LOG_PATH = log_path try: _LOG_PATH.parent.mkdir(parents=True, exist_ok=True) - ts = datetime.now(datetime.timezone.utc).isoformat() + ts = datetime.now(timezone.utc).isoformat() with open(_LOG_PATH, "w", encoding="utf-8") as fh: fh.write(f"{ts} CodeQL alerts helper log (truncated for new run)\n") except Exception: @@ -331,7 +331,7 @@ def _to_alert_api(raw: dict) -> Alert: security_severity = str(inst_props.get(key)) break alert = Alert( - number=(int(raw.get("number")) if raw.get("number") is not None else None), + number=int(raw["number"]) if "number" in raw and raw["number"] is not None else None, html_url=raw.get("html_url"), rule_id=rule_id, level=(raw.get("severity") or "warning"), diff --git a/scripts/codeql_reset_dismissed_alerts.py b/scripts/codeql_reset_dismissed_alerts.py index 74af40d5f..9a37f8012 100644 --- a/scripts/codeql_reset_dismissed_alerts.py +++ b/scripts/codeql_reset_dismissed_alerts.py @@ -22,7 +22,7 @@ import urllib.parse import urllib.request from dataclasses import dataclass -from typing import Iterator, List, Optional +from typing import Any, Iterator, List, Optional API_ROOT = "https://api.github.com" API_VERSION = "2022-11-28" @@ -45,7 +45,7 @@ def _request( *, params: Optional[dict] = None, payload: Optional[dict] = None, -) -> dict: +) -> Any: url = urllib.parse.urljoin(API_ROOT, path) if params: query = urllib.parse.urlencode(params) diff --git a/scripts/normalize_coverage_xml.py b/scripts/normalize_coverage_xml.py index cb1053748..5ae230021 100644 --- a/scripts/normalize_coverage_xml.py +++ b/scripts/normalize_coverage_xml.py @@ -208,12 +208,17 @@ def _usable_prefix(prefix: Path | None) -> Path | None: absolute_candidate = (repo_root_resolved / relative).resolve() - cls.set("filename", relative.as_posix()) + if relative is not None: + cls.set("filename", relative.as_posix()) - candidate = repo_root / relative - candidate_resolved = absolute_candidate - if not candidate.exists() and not candidate_resolved.exists(): - missing.append(relative.as_posix()) + candidate = repo_root / relative + if absolute_candidate is not None: + candidate_resolved = absolute_candidate + else: + candidate_resolved = candidate.resolve() + + if not candidate.exists() and not candidate_resolved.exists(): + missing.append(relative.as_posix()) tree.write(report_path) return missing, external diff --git a/test/python/registry.py b/test/python/registry.py index c3559cac4..6e7c17b46 100644 --- a/test/python/registry.py +++ b/test/python/registry.py @@ -6,9 +6,11 @@ cpp = cppyy.gbl phlex = cpp.phlex.experimental +import types + cppyy.include("Python.h") -_registered_modules = {} +_registered_modules: dict[str, types.ModuleType] = {} def register(m, config): From 7a7be2b379d47b6c0eb210d01355ad01568e6547 Mon Sep 17 00:00:00 2001 From: Chris Green Date: Tue, 2 Dec 2025 18:30:15 -0600 Subject: [PATCH 4/4] Fix minor `ruff check` regression from `mypy` fixes --- test/python/registry.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/python/registry.py b/test/python/registry.py index 6e7c17b46..6352db06d 100644 --- a/test/python/registry.py +++ b/test/python/registry.py @@ -1,4 +1,6 @@ """This module provides a function to register Python algorithms with the Phlex framework.""" +import types + import cppyy import pyphlex # noqa: F401 @@ -6,8 +8,6 @@ cpp = cppyy.gbl phlex = cpp.phlex.experimental -import types - cppyy.include("Python.h") _registered_modules: dict[str, types.ModuleType] = {}