diff --git a/util/check_pr_rebase_status.py b/util/check_pr_rebase_status.py new file mode 100755 index 000000000..46e480246 --- /dev/null +++ b/util/check_pr_rebase_status.py @@ -0,0 +1,165 @@ +#!/usr/bin/env python3 +""" +Script to list open PRs that need a rebase but don't have the "needs rebase" label. +Uses the GitHub CLI (gh) tool. +""" + +import subprocess +import sys +import json +import time + +# Configuration +REPO_OWNER = 'OISF' +REPO_NAME = 'suricata-verify' +NEEDS_REBASE_LABEL = 'needs rebase' + +def run_gh_command(args): + """Run a gh CLI command and return the output.""" + try: + result = subprocess.run( + ['gh'] + args, + capture_output=True, + text=True, + check=True + ) + return result.stdout + except subprocess.CalledProcessError as e: + print(f"Error running gh command: {e}", file=sys.stderr) + print(f"stderr: {e.stderr}", file=sys.stderr) + sys.exit(1) + except FileNotFoundError: + print("Error: 'gh' CLI tool not found. Please install it first:", file=sys.stderr) + print(" https://cli.github.com/", file=sys.stderr) + sys.exit(1) + +def get_open_prs(): + """Fetch all open pull requests with mergeable status using gh CLI.""" + output = run_gh_command([ + 'pr', 'list', + '--repo', f'{REPO_OWNER}/{REPO_NAME}', + '--state', 'open', + '--json', 'number,title,url,author,labels,mergeable,mergeStateStatus', + '--limit', '1000' + ]) + + return json.loads(output) + +def needs_rebase(pr): + """Check if a PR needs a rebase by checking its mergeable status. + + Returns: + True: PR needs rebase (has conflicts) + False: PR does not need rebase + None: Status is unknown (GitHub still computing) + """ + # mergeable can be: MERGEABLE, CONFLICTING, UNKNOWN + # mergeStateStatus can be: DIRTY, UNSTABLE, BLOCKED, BEHIND, CLEAN, DRAFT, etc. + + mergeable = pr.get('mergeable', '').upper() + merge_state = pr.get('mergeStateStatus', '').upper() + + # If GitHub hasn't computed it yet, return None to indicate unknown status + if mergeable == 'UNKNOWN': + return None + + # Return True if there are actual conflicts + return mergeable == 'CONFLICTING' or merge_state == 'DIRTY' + +def has_needs_rebase_label(pr): + """Check if PR has the 'needs rebase' label.""" + labels = [label['name'].lower() for label in pr.get('labels', [])] + return NEEDS_REBASE_LABEL.lower() in labels + +def main(): + """Main function to find PRs that need rebase but don't have the label.""" + print(f"Fetching open PRs for {REPO_OWNER}/{REPO_NAME}...") + prs = get_open_prs() + print(f"Found {len(prs)} open PRs.\n") + + print("Checking rebase status for each PR...") + prs_needing_label = [] + prs_with_unknown_status = [] + + for i, pr in enumerate(prs, 1): + pr_number = pr['number'] + pr_title = pr['title'] + pr_url = pr['url'] + + print(f"[{i}/{len(prs)}] Checking PR #{pr_number}...", end=' ') + + has_label = has_needs_rebase_label(pr) + + if has_label: + print("already labeled") + continue + + rebase_status = needs_rebase(pr) + + if rebase_status is None: + print("status unknown (will retry)") + prs_with_unknown_status.append(pr) + elif rebase_status: + print("NEEDS REBASE!") + prs_needing_label.append({ + 'number': pr_number, + 'title': pr_title, + 'url': pr_url, + 'author': pr['author']['login'] + }) + else: + print("OK") + + # Retry PRs with unknown status after a short delay + if prs_with_unknown_status: + print(f"\n{len(prs_with_unknown_status)} PR(s) had unknown status. Waiting 3 seconds and retrying...") + time.sleep(3) + + # Fetch fresh data for unknown PRs + for pr in prs_with_unknown_status: + pr_number = pr['number'] + pr_title = pr['title'] + pr_url = pr['url'] + + print(f"Retrying PR #{pr_number}...", end=' ') + + # Fetch updated status + output = run_gh_command([ + 'pr', 'view', str(pr_number), + '--repo', f'{REPO_OWNER}/{REPO_NAME}', + '--json', 'mergeable,mergeStateStatus' + ]) + updated_pr = json.loads(output) + rebase_status = needs_rebase(updated_pr) + + if rebase_status is None: + print("still unknown (skipping)") + elif rebase_status: + print("NEEDS REBASE!") + prs_needing_label.append({ + 'number': pr_number, + 'title': pr_title, + 'url': pr_url, + 'author': pr['author']['login'] + }) + else: + print("OK") + + print("\n" + "="*80) + print("SUMMARY") + print("="*80) + + if prs_needing_label: + print(f"\nFound {len(prs_needing_label)} PR(s) that need rebase but don't have the '{NEEDS_REBASE_LABEL}' label:\n") + for pr in prs_needing_label: + print(f" PR #{pr['number']}: {pr['title']}") + print(f" Author: {pr['author']}") + print(f" URL: {pr['url']}") + print() + else: + print(f"\nAll PRs are properly labeled! No PRs need the '{NEEDS_REBASE_LABEL}' label.") + + return 0 if not prs_needing_label else 1 + +if __name__ == '__main__': + sys.exit(main()) diff --git a/util/check_redmine_backport_subtasks.py b/util/check_redmine_backport_subtasks.py new file mode 100644 index 000000000..c876c7f33 --- /dev/null +++ b/util/check_redmine_backport_subtasks.py @@ -0,0 +1,279 @@ +#!/usr/bin/env python3 +""" +Script to find open Redmine issues that have subtasks with "backport" in the title. +Displays the status of the main issue and its subtasks. +""" + +import requests +import sys +import os +import json +import argparse +from typing import Optional + +# Configuration +REDMINE_URL = 'https://redmine.openinfosecfoundation.org' +REDMINE_API_KEY = os.environ.get('REDMINE_API_KEY') +BACKPORT_KEYWORD = 'backport' + +def get_headers(): + """Get headers for Redmine API requests.""" + headers = { + 'Content-Type': 'application/json', + } + if REDMINE_API_KEY: + headers['X-Redmine-API-Key'] = REDMINE_API_KEY + return headers + +def get_open_backport_issues(limit: int = 100) -> list: + """Fetch all open issues with 'backport' in the title.""" + url = f'{REDMINE_URL}/issues.json' + all_issues = [] + offset = 0 + + while True: + params = { + 'status_id': 'open', + 'limit': limit, + 'offset': offset, + 'subject': '~backport', + } + + response = requests.get(url, headers=get_headers(), params=params) + + if response.status_code != 200: + print(f"Error fetching issues: {response.status_code}", file=sys.stderr) + print(response.text, file=sys.stderr) + sys.exit(1) + + data = response.json() + issues = data.get('issues', []) + + if not issues: + break + + # Filter for issues with backport in the subject + for issue in issues: + if BACKPORT_KEYWORD in issue.get('subject', '').lower(): + all_issues.append(issue) + + # Check if we've fetched all issues + total = data.get('total_count', 0) + if offset + limit >= total: + break + print("Fetching issues : %d / %d" % (offset + limit, total)) + + offset += limit + + return all_issues + +def get_last_suricata_pr(issue_id: int) -> Optional[str]: + """Fetch the last referenced Suricata PR URL from the issue journals.""" + url = f'{REDMINE_URL}/issues/{issue_id}.json?include=journals' + response = requests.get(url, headers=get_headers()) + if response.status_code != 200: + return None + issue = response.json().get('issue', {}) + journals = issue.get('journals', []) + pr_url = None + for journal in journals: + notes = journal.get('notes', '') + # Look for Suricata PR URLs + if notes is not None: + urls = [u for u in notes.split() if u.startswith('https://github.com/OISF/suricata/pull/')] + if urls: + pr_url = urls[-1] # last one in this journal + return pr_url + +def get_issue_details(issue_id: int) -> Optional[dict]: + """Fetch detailed information for a specific issue.""" + url = f'{REDMINE_URL}/issues/{issue_id}.json' + + response = requests.get(url, headers=get_headers()) + + if response.status_code != 200: + print(f"Error fetching issue #{issue_id}: {response.status_code}", file=sys.stderr) + return None + + return response.json().get('issue') + +def main(): + """Main function to find backport issues and group them by parent issue.""" + parser = argparse.ArgumentParser( + description='Find open Redmine issues with backport subtasks' + ) + parser.add_argument( + '-j', '--json', + metavar='FILE', + help='Write results as JSON to FILE for later processing' + ) + args = parser.parse_args() + + if not REDMINE_API_KEY: + print("Warning: REDMINE_API_KEY environment variable not set.", file=sys.stderr) + print("This may limit access to private issues.", file=sys.stderr) + print("Set REDMINE_API_KEY to use API authentication.\n", file=sys.stderr) + + print("Fetching open issues with 'backport' in the title from Redmine...") + + backport_issues = get_open_backport_issues() + + print(f"Found {len(backport_issues)} open backport issue(s).\n") + + # Group backport issues by parent + issues_by_parent = {} + standalone_issues = [] + + for issue in backport_issues: + parent_id = issue.get('parent', {}).get('id') + + if parent_id: + if parent_id not in issues_by_parent: + issues_by_parent[parent_id] = { + 'parent': None, + 'children': [] + } + issues_by_parent[parent_id]['children'].append(issue) + else: + standalone_issues.append(issue) + + # Fetch parent issue details for each group + for parent_id in issues_by_parent: + parent_issue = get_issue_details(parent_id) + if parent_issue: + issues_by_parent[parent_id]['parent'] = parent_issue + + # Write JSON output to file if requested + if args.json: + try: + with open(args.json, 'w') as f: + result = { + 'parent_issues': [], + 'standalone_issues': [] + } + + # Add parent issues and their children + for parent_id in sorted(issues_by_parent.keys()): + group = issues_by_parent[parent_id] + parent_issue = group['parent'] + children = group['children'] + + # TODO add a field being the last referenced suricata PR (in case of in review/resolved) and rewrite filter_backport_main_issues.py so that it does not need redmine access anymore + parent_entry = { + 'id': parent_id, + 'subject': parent_issue['subject'] if parent_issue else None, + 'status': parent_issue['status']['name'] if parent_issue else None, + 'url': f"{REDMINE_URL}/issues/{parent_id}", + 'last_suricata_pr': None, + 'backport_subtasks': [] + } + # Add last_suricata_pr for parent if status is in review/resolved + if parent_issue and parent_issue['status']['name'] in ('In Review', 'Resolved'): + parent_entry['last_suricata_pr'] = get_last_suricata_pr(parent_id) + for child in children: + child_entry = { + 'id': child['id'], + 'subject': child['subject'], + 'status': child['status']['name'], + 'url': f"{REDMINE_URL}/issues/{child['id']}", + 'last_suricata_pr': None + } + # Add last_suricata_pr for child if status is in review/resolved + if child['status']['name'] in ('In Review', 'Resolved'): + child_entry['last_suricata_pr'] = get_last_suricata_pr(child['id']) + parent_entry['backport_subtasks'].append(child_entry) + + result['parent_issues'].append(parent_entry) + + # Add standalone issues + for issue in standalone_issues: + standalone_entry = { + 'id': issue['id'], + 'subject': issue['subject'], + 'status': issue['status']['name'], + 'url': f"{REDMINE_URL}/issues/{issue['id']}", + 'last_suricata_pr': None + } + if issue['status']['name'] in ('In Review', 'Resolved'): + standalone_entry['last_suricata_pr'] = get_last_suricata_pr(issue['id']) + result['standalone_issues'].append(standalone_entry) + + json.dump(result, f, indent=2) + print(f"JSON output written to {args.json}\n") + except IOError as e: + print(f"Error writing JSON to {args.json}: {e}", file=sys.stderr) + return 1 + + print("="*80) + print("BACKPORT ISSUES GROUPED BY PARENT ISSUE") + print("="*80) + + if issues_by_parent: + print(f"\nFound {len(issues_by_parent)} parent issue(s) with backport subtask(s):\n") + + for parent_id in sorted(issues_by_parent.keys()): + group = issues_by_parent[parent_id] + parent_issue = group['parent'] + children = group['children'] + + if parent_issue: + parent_subject = parent_issue['subject'] + parent_status = parent_issue['status']['name'] + parent_url = f"{REDMINE_URL}/issues/{parent_id}" + + print(f"Parent Issue #{parent_id}: {parent_subject}") + print(f" Status: {parent_status}") + print(f" URL: {parent_url}") + print(f" Backport subtasks ({len(children)}):") + + for child in children: + child_id = child['id'] + child_subject = child['subject'] + child_status = child['status']['name'] + child_url = f"{REDMINE_URL}/issues/{child_id}" + + print(f" • #{child_id}: {child_subject}") + print(f" Status: {child_status}") + print(f" URL: {child_url}") + + print() + else: + print(f"Parent Issue #{parent_id} (details could not be fetched)") + print(f" Backport subtasks ({len(children)}):") + for child in children: + child_id = child['id'] + child_subject = child['subject'] + child_status = child['status']['name'] + child_url = f"{REDMINE_URL}/issues/{child_id}" + + print(f" • #{child_id}: {child_subject}") + print(f" Status: {child_status}") + print(f" URL: {child_url}") + print() + + if standalone_issues: + print("\n" + "="*80) + print("STANDALONE BACKPORT ISSUES (No parent issue)") + print("="*80 + "\n") + + print(f"Found {len(standalone_issues)} standalone backport issue(s):\n") + + for issue in standalone_issues: + issue_id = issue['id'] + issue_subject = issue['subject'] + issue_status = issue['status']['name'] + issue_url = f"{REDMINE_URL}/issues/{issue_id}" + + print(f"Backport Issue #{issue_id}: {issue_subject}") + print(f" Status: {issue_status}") + print(f" URL: {issue_url}") + print() + + total_found = len(issues_by_parent) + len(standalone_issues) + if not total_found: + print("\nNo open issues with 'backport' in the title found.") + + return 0 if total_found > 0 else 1 + +if __name__ == '__main__': + sys.exit(main()) diff --git a/util/check_requires_suricata_pr.py b/util/check_requires_suricata_pr.py new file mode 100644 index 000000000..bc0d2fcec --- /dev/null +++ b/util/check_requires_suricata_pr.py @@ -0,0 +1,164 @@ +#!/usr/bin/env python3 +""" +List open PRs in suricata-verify labeled "requires suricata pr" +that are NOT referenced by any open PR in the suricata repository. + +Uses the GitHub CLI (gh). +""" + +import json +import re +import subprocess +import sys +from collections import defaultdict + +VERIFY_REPO_OWNER = "OISF" +VERIFY_REPO_NAME = "suricata-verify" +SURICATA_REPO_OWNER = "OISF" +SURICATA_REPO_NAME = "suricata" +REQUIRED_LABEL = "requires suricata pr" + + +def run_gh_command(args): + """Run a gh CLI command and return stdout.""" + try: + result = subprocess.run( + ["gh"] + args, + capture_output=True, + text=True, + check=True, + ) + return result.stdout + except subprocess.CalledProcessError as error: + print("Error running gh command:", file=sys.stderr) + print(error.stderr, file=sys.stderr) + sys.exit(1) + except FileNotFoundError: + print("Error: 'gh' CLI tool not found. Please install it:", file=sys.stderr) + print(" https://cli.github.com/", file=sys.stderr) + sys.exit(1) + + +def get_open_verify_prs_with_label(): + """Fetch open PRs from suricata-verify with the required label.""" + output = run_gh_command( + [ + "pr", + "list", + "--repo", + f"{VERIFY_REPO_OWNER}/{VERIFY_REPO_NAME}", + "--state", + "open", + "--label", + REQUIRED_LABEL, + "--json", + "number,title,url,author,labels,body", + "--limit", + "1000", + ] + ) + return json.loads(output) + + +def get_open_suricata_prs(): + """Fetch open PRs from suricata repo.""" + output = run_gh_command( + [ + "pr", + "list", + "--repo", + f"{SURICATA_REPO_OWNER}/{SURICATA_REPO_NAME}", + "--state", + "open", + "--json", + "number,title,url,author,body", + "--limit", + "1000", + ] + ) + return json.loads(output) + + +def build_verify_mentions_index(suricata_prs): + """Return mapping of verify PR numbers -> list of suricata PRs that mention them.""" + patterns = re.compile( + r"(?:github\.com/)?(?:OISF/)?suricata-verify/pull/(\d+)" + r"|(?:OISF/)?suricata-verify#(\d+)", + re.IGNORECASE, + ) + + mentions = defaultdict(list) + + for pr in suricata_prs: + text = f"{pr.get('title', '')}\n{pr.get('body', '') or ''}" + for match in patterns.finditer(text): + number = match.group(1) or match.group(2) + if number: + mentions[int(number)].append(pr) + + return mentions + + +def main(): + print( + f"Fetching open PRs labeled '{REQUIRED_LABEL}' from " + f"{VERIFY_REPO_OWNER}/{VERIFY_REPO_NAME}..." + ) + verify_prs = get_open_verify_prs_with_label() + + print( + f"Fetching open PRs from {SURICATA_REPO_OWNER}/{SURICATA_REPO_NAME}..." + ) + suricata_prs = get_open_suricata_prs() + + if not verify_prs: + print("No open PRs with the required label.") + return 0 + + mentions_index = build_verify_mentions_index(suricata_prs) + + missing_references = [] + + print("\nChecking references...") + for pr in verify_prs: + pr_number = pr["number"] + pr_title = pr["title"] + pr_url = pr["url"] + + if pr_number in mentions_index: + print(f"PR #{pr_number}: referenced") + continue + + print(f"PR #{pr_number}: NOT referenced") + missing_references.append( + { + "number": pr_number, + "title": pr_title, + "url": pr_url, + "author": pr["author"]["login"], + } + ) + + print("\n" + "=" * 80) + print("SUMMARY") + print("=" * 80) + + if missing_references: + print( + f"\nFound {len(missing_references)} PR(s) with label " + f"'{REQUIRED_LABEL}' that are NOT referenced by any open " + f"PR in {SURICATA_REPO_OWNER}/{SURICATA_REPO_NAME}:\n" + ) + for pr in missing_references: + print(f" PR #{pr['number']}: {pr['title']}") + print(f" Author: {pr['author']}") + print(f" URL: {pr['url']}") + print() + return 1 + + print("\nAll labeled PRs are referenced by an open suricata PR.") + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/util/create_backport_pr.py b/util/create_backport_pr.py new file mode 100755 index 000000000..717192993 --- /dev/null +++ b/util/create_backport_pr.py @@ -0,0 +1,793 @@ +#!/usr/bin/env python3 +# +# Script to automate creation of backport PRs. +# Reads filtered Redmine issues, resolves PR commits, creates backport branch, +# and cherry-picks commits. + +import sys +import os +import argparse +import subprocess +import re +import json +import urllib.error +import urllib.parse +# urllib.request only for Redmine, not GitHub +import urllib.request + +REDMINE_URL = "https://redmine.openinfosecfoundation.org" +REDMINE_API_KEY = os.environ.get("REDMINE_API_KEY") +REMOTE_GIT = "catena" +GITHUB_USER = "catenacyber" + +class BackportError(Exception): + """Raised when backport workflow fails.""" + pass + +def run_command(cmd, dry_run=False, capture=False): + """Execute a shell command, optionally in dry-run mode.""" + if dry_run: + print(f"[DRY-RUN] {cmd}") + return "" + else: + if capture: + result = subprocess.run(cmd, shell=True, capture_output=True, text=True) + if result.returncode != 0: + raise BackportError(f"Command failed: {cmd}\n{result.stderr}") + return result.stdout.strip() + else: + print(f"$ {cmd}") + result = subprocess.run(cmd, shell=True) + if result.returncode != 0: + raise BackportError(f"Command failed with exit code {result.returncode}") + return "" + +def resolve_pr_to_commits(pr_url, parent_id, dry_run=False, from_staging=False): + """Extract PR number and return commits in PR not in main, plus SV PR URL if present. + + Strategy: + 1. List commits present in pr/XXXXX but not in main + 2. Return (commit_hashes, sv_pr_url) tuple where sv_pr_url is extracted from PR body if present + 3. Return ([], None) for staging PRs or if validation fails + """ + match = re.search(r'/pull/(\d+)$', pr_url) + if not match: + raise BackportError(f"Invalid PR URL: {pr_url}") + pr_number = match.group(1) + pr_branch = f"pr/{pr_number}" + + # Validate PR is linked to Redmine ticket + # Try to extract Redmine ticket from PR body/comments + # Use gh CLI to fetch PR body and comments + sv_pr_url = None + try: + pr_json = run_command(f'gh pr view {pr_number} --repo OISF/suricata --json body', dry_run=False, capture=True) + pr_data = json.loads(pr_json) + pr_body = pr_data.get("body", "") + except Exception as exc: + raise BackportError(f"Could not fetch GitHub PR #{pr_number} via gh: {exc}") + + # Extract SV_BRANCH URL from PR body if present + sv_match = re.search(r'SV_BRANCH=(https://github\.com/OISF/suricata-verify/pull/\d+)', pr_body) + if sv_match: + sv_pr_url = sv_match.group(1) + + # Accept ticket links like https://redmine.openinfosecfoundation.org/issues/XXXXX + redmine_ticket_pattern = re.compile(r'https://redmine\.openinfosecfoundation\.org/issues/(\d+)') + linked_tickets = set(redmine_ticket_pattern.findall(pr_body)) + # Fetch PR comments for more links + try: + # lol run only one gh pr view command for both body and comments + comments_json = run_command(f'gh pr view {pr_number} --repo OISF/suricata --json comments', dry_run=False, capture=True) + comments_data = json.loads(comments_json) + for comment in comments_data.get("comments", []): + body = comment.get("body", "") + linked_tickets.update(redmine_ticket_pattern.findall(body)) + except Exception: + pass + # Validate against parent/child ticket + if parent_id not in linked_tickets: + if from_staging: + return [], None + # ...existing code... + + # lol logic should be to first check if this is staging PR with pr_title.startswith("next/") + # else handle this case same as a PR whose commits hashes are in main + # Get commit hashes and subjects from the PR branch that are not in main + cmd = f'git log origin/main..{pr_branch} --format=%H:%s --reverse' + pr_commit_hashes = run_command(cmd, dry_run=False, capture=True) + + if not pr_commit_hashes: + # Fallback: check if PR is a next/staging PR + try: + pr_json = run_command(f'gh pr view {pr_number} --repo OISF/suricata --json title,body', dry_run=False, capture=True) + pr_data = json.loads(pr_json) + pr_title = pr_data.get("title", "") + pr_body = pr_data.get("body", "") + except Exception as exc: + raise BackportError(f"Could not fetch GitHub PR #{pr_number} via gh: {exc}") + if pr_title.startswith("next/"): + # Look for PR URLs in the body (first comment) + pr_urls = re.findall(r'- #(\d+)', pr_body) + all_hashes = [] + print(f"Trying to find sub PRs in staging : {pr_urls}") + for sub_pr in pr_urls: + sub_url = f"https://github.com/OISF/suricata/pull/{sub_pr}" + try: + hashes, _ = resolve_pr_to_commits(sub_url, parent_id, dry_run=dry_run,from_staging=True) + if len(hashes) > 0: + print(f"Found some in : {sub_url}") + all_hashes.extend(hashes) + except Exception as exc: + print(f"Warning: Could not resolve sub-PR {sub_url}: {exc}", file=sys.stderr) + if all_hashes: + return all_hashes, sv_pr_url + else: + raise BackportError(f"No commits found for next/staging PR {pr_url} or its listed PRs {pr_body}.") + else: + # lol 10 is arbitrary + cmd = f'git log -10 {pr_branch} --format="%H:%s|%d"' + pr_commit_hashes = run_command(cmd, dry_run=False, capture=True) + print(f"Inspecting first hashes {pr_commit_hashes}") + + commit_hashes = [] + for line in pr_commit_hashes.split('\n'): + if not line.strip(): + continue + if ':' not in line: + continue + commit_hash, subject = line.split(':', 1) + subject_prs = re.findall(r'\bpr/(\d+)\b', subject) + if subject_prs and pr_number not in subject_prs: + break + # lol we should know at this point if we have main hashes or if we should look for them + title = subject.split('|')[0] + print(f"Getting main commit for {title}") + # lol this does not work if commits have the same title + cmd = f'git log -1 origin/main --grep="{title}" --format="%H"' + main_commit_hash = run_command(cmd, dry_run=False, capture=True) + commit_hashes.append(main_commit_hash.strip()) + if not commit_hashes: + raise BackportError(f"No commits found in {pr_branch} that are not already in main") + + return commit_hashes, sv_pr_url + +# lol maybe we could import the script functionality instead of spawning a new process +def get_filter_script_path(): + """Locate the filter_backport_main_issues.py script.""" + # Assume it's in ../suricata-verify/util relative to suricata repo + repo_root = os.path.dirname(os.path.abspath(__file__)) # Go up from scripts/ to repo root + parent_dir = os.path.dirname(repo_root) # Go up from suricata/ to prod/ + filter_script = os.path.join(parent_dir, "suricata-verify", "util", "filter_backport_main_issues.py") + + if not os.path.exists(filter_script): + raise BackportError(f"Filter script not found: {filter_script}") + + return filter_script + +def run_filter_script(target, json_file, dry_run=False): + """Run filter_backport_main_issues.py and return list of (parent_id, child_id, pr_url) tuples.""" + filter_script = get_filter_script_path() + cmd = f"python3 {filter_script} {json_file} --target {target}" + + output = run_command(cmd, dry_run=False, capture=True) + + results = [] + for line in output.strip().split('\n'): + if not line: + continue + parts = line.split() + if len(parts) == 3: + parent_id, child_id, pr_url = parts + results.append((parent_id, child_id, pr_url)) + + return results + +def create_backport_branch(target, issue_ids, dry_run=False): + """Create a new backport branch with naming convention backport{7|8}-{id1}-{id2}-v1. + + Auto-increments version number if previous versions already exist. + For example, if backport7-1234-5678-v1 exists, creates v2 instead. + """ + base_name = f"backport{target}-{'-'.join(issue_ids)}" + + # Find the next available version number + version = 1 + while True: + branch_name = f"{base_name}-v{version}" + # Check if branch exists (locally or remotely) + check_cmd = f"git rev-parse --verify {branch_name}" + result = subprocess.run(check_cmd, shell=True, capture_output=True, text=True) + + if result.returncode != 0: + # Branch doesn't exist, we can use this version + break + + version += 1 + + cmd = f"git checkout -b {branch_name}" + run_command(cmd, dry_run=dry_run) + return branch_name + +def cherry_pick_commit(commit_hash, dry_run=False): + if dry_run: + print(f"[DRY-RUN] git cherry-pick -x {commit_hash}") + return "clean" + + print(f"$ git cherry-pick -x {commit_hash}") + result = subprocess.run(f"git cherry-pick -x {commit_hash}", shell=True) + if result.returncode == 0: + return "clean" + # lol specific handling if commit is empty (means was already merged and ticket was not closed) + + head_check = subprocess.run("git rev-parse --verify CHERRY_PICK_HEAD", shell=True, capture_output=True, text=True) + if head_check.returncode == 0: + print("Cherry-pick conflict detected. Resolve conflicts, then stage changes.") + input("Press Enter to continue with 'git cherry-pick --continue'...") + continue_result = subprocess.run("git cherry-pick --continue", shell=True) + if continue_result.returncode != 0: + raise BackportError("Cherry-pick continue failed. Resolve conflicts and run 'git cherry-pick --continue' manually.") + return "unclean" + + raise BackportError("Cherry-pick failed. Run 'git status' for details.") + +def get_redmine_headers(): + headers = { + "Content-Type": "application/json", + } + if REDMINE_API_KEY: + headers["X-Redmine-API-Key"] = REDMINE_API_KEY + return headers + +def get_redmine_status_id(name): + url = f"{REDMINE_URL}/issue_statuses.json" + request = urllib.request.Request(url, headers=get_redmine_headers()) + try: + with urllib.request.urlopen(request, timeout=20) as response: + payload = response.read().decode("utf-8") + statuses = json.loads(payload).get("issue_statuses", []) + except (urllib.error.URLError, urllib.error.HTTPError, json.JSONDecodeError) as exc: + raise BackportError(f"Failed to fetch Redmine statuses: {exc}") + for status in statuses: + if status.get("name") == name: + return status.get("id") + raise BackportError(f"Redmine status not found: {name}") + +def update_redmine_issue(issue_id, status_id, note, dry_run=False): + url = f"{REDMINE_URL}/issues/{issue_id}.json" + payload = json.dumps({"issue": {"status_id": status_id, "notes": note}}).encode("utf-8") + if dry_run: + print(f"[DRY-RUN] PUT {url} status_id={status_id} note={note}") + return + # lol maybe add some logging + request = urllib.request.Request(url, data=payload, headers=get_redmine_headers(), method="PUT") + try: + with urllib.request.urlopen(request, timeout=20) as response: + if response.status not in (200, 204): + raise BackportError(f"Redmine update failed for issue #{issue_id}: {response.status}") + except (urllib.error.URLError, urllib.error.HTTPError) as exc: + raise BackportError(f"Redmine update failed for issue #{issue_id}: {exc}") + +def get_commit_subject(commit_hash, dry_run=False): + return run_command(f"git log -n 1 --format=%s {commit_hash}", dry_run=False, capture=True) + +def build_pr_title(target, issue_ids): + return f"Backport{target} {' '.join(issue_ids)} v1" + +def build_pr_body(child_ids, pr_entries, sv_pr_url=None): + lines = [] + lines.append("Link to ticket: https://redmine.openinfosecfoundation.org/issues/") + for child_id in child_ids: + lines.append(f"https://redmine.openinfosecfoundation.org/issues/{child_id}") + lines.append("") + lines.append("Describe changes:") + for pr_url, status, nb_hashes in pr_entries: + plural = "s" if nb_hashes > 1 else "" + lines.append(f"- backport of {pr_url} {status} cherry-pick{plural}") + if sv_pr_url: + lines.append("") + lines.append(f"SV_BRANCH={sv_pr_url}") + return "\n".join(lines) + +def create_github_pr(title, body, base_branch, head_branch, dry_run=False, labels=None): + print("[DRY-RUN] gh pr create --base {} --head {} --title