From c9adc0d8c395732df244f1ec7a46223d83f8279d Mon Sep 17 00:00:00 2001 From: Ryan Krattiger Date: Mon, 30 Jun 2025 15:24:50 -0500 Subject: [PATCH 1/3] Quick fix to spackbot to enable usage on both core and packages In lieu of a more thorough update to spackbot a quick fix to simply enable the usage of spackbot from both of the repos maintained by the spack organization and not an update to make spackbot configurable to support workflows on other forks that wish to utilized the spackbot tools. The configurations for the two repos are hard-coded, similarly to before, for the "spack" and "spack-packages" projects in the PROJECTS variable. Labeling and package maintainer pinging have been dropped. These are now handled by GitHub actions that run on pull request automatically as part of the "triage" workflow. ref. https://github.com/spack/spack-packages/pull/507 Style is handled by configuring the tool to run for each project. This is hardcoded based on the project. For "spack" project the tool is the develop version of `spack style --fix` and for the "spack-packages" project the tool is simply `black`. --- spackbot/handlers/__init__.py | 3 - spackbot/handlers/gitlab.py | 3 +- spackbot/handlers/labels.py | 185 ---------------------- spackbot/handlers/mirrors.py | 3 +- spackbot/handlers/reviewers.py | 281 --------------------------------- spackbot/helpers.py | 37 +++-- spackbot/routes.py | 47 +++--- spackbot/workers.py | 59 +++++-- 8 files changed, 93 insertions(+), 525 deletions(-) delete mode 100644 spackbot/handlers/labels.py delete mode 100644 spackbot/handlers/reviewers.py diff --git a/spackbot/handlers/__init__.py b/spackbot/handlers/__init__.py index 26fa1e7..e117f2e 100644 --- a/spackbot/handlers/__init__.py +++ b/spackbot/handlers/__init__.py @@ -3,8 +3,5 @@ run_pipeline_rebuild_all, close_pr_gitlab_branch, ) -from .labels import add_labels # noqa -from .reviewers import add_reviewers, add_issue_maintainers # noqa -from .reviewers import add_reviewers # noqa from .style import style_comment, fix_style # noqa from .mirrors import close_pr_mirror # noqa diff --git a/spackbot/handlers/gitlab.py b/spackbot/handlers/gitlab.py index fe5f17b..9e5fd9a 100644 --- a/spackbot/handlers/gitlab.py +++ b/spackbot/handlers/gitlab.py @@ -65,7 +65,8 @@ async def close_pr_gitlab_branch(event, gh): pr_branch = payload["pull_request"]["head"]["ref"] pr_branch_name = f"pr{pr_number}_{pr_branch}" - url = helpers.gitlab_spack_project_url + event_project = payload["repository"]["name"] + url = helpers.PROJECT[event_project].gitlab_project_url url = f"{url}/repository/branches/{pr_branch_name}" GITLAB_TOKEN = os.environ.get("GITLAB_TOKEN") diff --git a/spackbot/handlers/labels.py b/spackbot/handlers/labels.py deleted file mode 100644 index 2b29ae7..0000000 --- a/spackbot/handlers/labels.py +++ /dev/null @@ -1,185 +0,0 @@ -# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other -# Spack Project Developers. See the top-level COPYRIGHT file for details. -# -# SPDX-License-Identifier: (Apache-2.0 OR MIT) - -import re - -import spackbot.helpers as helpers - -logger = helpers.get_logger(__name__) - - -#: ``label_patterns`` maps labels to patterns that tell us to apply the labels. -#: -#: Entries in the ``dict`` are of the form: -#: -#: ```python -#: { -#: "label": { -#: "attr1": [r"regex1.1", r"regex1.2"], -#: "attr2": [r"regex2.1", r"regex2.2", r"regex2.3"], -#: "attr3": r"regex3.1", -#: ... -#: }, -#: ... -#: } -#: ``` -#: -#: ``attr1``, ``attr2``, etc. are attributes on files in the PR (e.g., ``status``, -#: ``filename``, etc). If all attrs for a label have at least one regex match, -#: then that label will be added to the PR. -label_patterns = { - # - # Package types - # - "intel": {"package": r"intel"}, - "python": {"package": [r"^python$", r"^py_"]}, - "R": {"package": [r"^r$", r"^r_"]}, - # - # Package status - # - "new-package": { - "filename": r"^var/spack/repos/spack_repo/builtin/packages/[^/]+/package.py$", - "status": r"^added$", - }, - "update-package": { - "filename": r"^var/spack/repos/spack_repo/builtin/packages/[^/]+/package.py$", - "status": [r"^modified$", r"^renamed$"], - }, - # - # Variables - # - "maintainers": {"patch": r"[+-] +maintainers +="}, - # - # Directives - # - "new-version": {"patch": r"\+ +version\("}, - "conflicts": {"patch": r"\+ +conflicts\("}, - "dependencies": {"patch": r"\+ +depends_on\("}, - "extends": {"patch": r"\+ +extends\("}, - "virtual-dependencies": {"patch": r"\+ +provides\("}, - "patch": {"patch": r"\+ +patch\("}, - "new-variant": {"patch": r"\+ +variant\("}, - "resources": {"patch": r"\+ +resource\("}, - # - # Functions - # - "external-packages": {"patch": r"[+-] +def determine_spec_details\("}, - "libraries": {"patch": r"[+-] +def libs\("}, - "headers": {"patch": r"[+-] +def headers\("}, - "stand-alone-tests": {"patch": r"[+-] +def test[_]?.*\("}, - # - # Core spack - # - "core": {"filename": r"^(?!var)"}, - "architecture": { - "filename": r"^lib/spack/spack/(architecture|operating_systems|platforms)" - }, - "binary-packages": {"filename": r"^lib/spack/spack/binary_distribution"}, - "build-environment": {"filename": r"^lib/spack/spack/build_environment"}, - "build-systems": {"filename": r"^lib/spack/spack/build_systems"}, - "new-command": { - "filename": r"^lib/spack/spack/cmd/[^/]+.py$", - "status": r"^added$", - }, - "commands": { - "filename": [ - r"^lib/spack/spack/cmd/[^/]+.py$", - r"^lib/spack/spack/test/cmd/[^/]+.py$", - r"^lib/spack/spack/test/(cmd_extension|commands).py$", - ], - }, - "compilers": {"filename": r"^lib/spack/spack/compiler"}, - "directives": {"filename": r"^lib/spack/spack/directives"}, - "environments": {"filename": r"^lib/spack/spack/environment"}, - "fetching": {"filename": r"^lib/spack/spack/(fetch|url|util/url|util/web)"}, - "locking": {"filename": r"^lib/spack/(spack|llnl)/util/lock"}, - "modules": {"filename": r"^lib/spack/spack/modules"}, - "stage": {"filename": r"^lib/spack/spack/stage"}, - "tests": {"filename": r"^lib/spack/spack/test"}, - "utilities": {"filename": [r"^lib/spack/spack/util", r"^lib/spack/llnl"]}, - "versions": {"filename": r"^lib/spack/spack/version"}, - # - # Documentation - # - "documentation": {"filename": r"^lib/spack/docs"}, - # - # GitHub - # - "actions": {"filename": r"^\.github/actions"}, - "workflow": {"filename": r"^\.github/workflows"}, - "git": {"filename": r"^\.gitignore"}, - "flake8": {"filename": r"^\.flake8"}, - "licenses": {"filename": r"^LICENSE"}, - "gitlab": {"filename": r"^share/spack/gitlab"}, - # - # Other - # - "defaults": {"filename": r"^etc/spack/defaults"}, - "vendored-dependencies": {"filename": r"^lib/spack/external"}, - "sbang": {"filename": r"sbang"}, - "docker": {"filename": [r"[Dd]ockerfile$", r"^share/spack/docker"]}, - "shell-support": {"filename": r"^share/spack/.*\.(sh|csh|fish)$"}, -} - - -# compile all the regexes above, and ensure that all pattern dict values are lists -for label, pattern_dict in label_patterns.items(): - for attr in pattern_dict.keys(): - patterns = pattern_dict[attr] - if not isinstance(patterns, list): - patterns = [patterns] - pattern_dict[attr] = [re.compile(s) for s in patterns] - - -async def add_labels(event, gh): - """ - Add labels to a pull request - """ - pull_request = event.data["pull_request"] - number = event.data["number"] - logger.info(f"Labeling PR #{number}...") - - # Iterate over modified files and create a list of labels - # https://developer.github.com/v3/pulls/#list-pull-requests-files - labels = set() - async for file in gh.getiter(pull_request["url"] + "/files"): - filename = file["filename"] - status = file["status"] - logger.info(f"Filename: {filename}") - logger.info(f"Status: {status}") - - # Add our own "package" attribute to the file, if it's a package - match = re.match( - r"var/spack/repos/spack_repo/builtin/packages/([^/]+)/package.py$", filename - ) - file["package"] = match.group(1) if match else "" - - # If the file's attributes match any patterns in label_patterns, add - # the corresponding labels. - for label, pattern_dict in label_patterns.items(): - attr_matches = [] - # Pattern matches for for each attribute are or'd together - for attr, patterns in pattern_dict.items(): - # 'patch' is an example of an attribute that is not required to - # appear in response when listing pull request files. See here: - # - # https://docs.github.com/en/rest/pulls/pulls#list-pull-requests-files - # - # If we don't get some attribute in the response, no labels that - # depend on finding a match in that attribute should be added. - attr_matches.append( - any(p.search(file[attr]) for p in patterns) - if attr in file - else False - ) - # If all attributes have at least one pattern match, we add the label - if all(attr_matches): - labels.add(label) - - logger.info(f"Adding the following labels: {labels}") - - # https://developer.github.com/v3/issues/labels/#add-labels-to-an-issue - if labels: - await gh.post(pull_request["issue_url"] + "/labels", data=list(labels)) diff --git a/spackbot/handlers/mirrors.py b/spackbot/handlers/mirrors.py index 40b77fb..319cac7 100644 --- a/spackbot/handlers/mirrors.py +++ b/spackbot/handlers/mirrors.py @@ -50,7 +50,8 @@ async def close_pr_mirror(event, gh): "pr_branch": pr_branch, } - if is_merged and base_branch == pr_expected_base: + # PR Graduation Mirror is disabled + if False and is_merged and base_branch == pr_expected_base: logger.info( f"PR {pr_number}/{pr_branch} merged to develop, graduating binaries" ) diff --git a/spackbot/handlers/reviewers.py b/spackbot/handlers/reviewers.py deleted file mode 100644 index 5596697..0000000 --- a/spackbot/handlers/reviewers.py +++ /dev/null @@ -1,281 +0,0 @@ -# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other -# Spack Project Developers. See the top-level COPYRIGHT file for details. -# -# SPDX-License-Identifier: (Apache-2.0 OR MIT) - -import os -import re - -import sh -from sh.contrib import git -import spackbot.helpers as helpers -import spackbot.comments as comments -from gidgethub import BadRequest - -logger = helpers.get_logger(__name__) - - -async def parse_maintainers_from_patch(gh, pull_request): - """ - Get any new or removed maintainers from the patch data in the PR. - - We parse this from the patch because running the spack from the PR as this - bot is unsafe; the bot is privileged and we do not trust code from PRs. - - """ - maintainers = {} - async for file in gh.getiter(pull_request["url"] + "/files"): - filename = file["filename"] - if not filename.endswith("package.py"): - continue - - pkg = re.search(r"/([^/]+)/package.py", filename).group(1) - - code = file["patch"] - arrays = re.findall(r"maintainers(?:\(|\s*=\s*\[)[^\]\)]*(?:\)|\])", code) - for array in arrays: - file_maintainers = re.findall("['\"][^'\"]*['\"]", array) - for m in file_maintainers: - maintainers.setdefault(pkg, set()).add(m.strip("'\"")) - - return maintainers - - -async def find_maintainers(gh, packages, repository, pull_request, number): - """ - Return an array of packages with maintainers, an array of packages - without maintainers, and a set of maintainers. - - Ignore the author of the PR, as they don't need to review their own PR. - """ - author = pull_request["user"]["login"] - - # lists of packages - with_maintainers = [] - without_maintainers = [] - - # parse any added/removed maintainers from the PR. Do NOT run spack from the PR - patch_maintainers = await parse_maintainers_from_patch(gh, pull_request) - logger.info(f"Maintainers from patch: {patch_maintainers}") - - all_maintainers = set() - with helpers.temp_dir() as cwd: - # Clone spack develop (shallow clone for speed) - # WARNING: We CANNOT run spack from the PR, as it is untrusted code. - # WARNING: If we run that, an attacker could run anything as this bot. - git("clone", "--depth", "1", helpers.spack_develop_url) - - # Get spack executable - spack = sh.Command(f"{cwd}/spack/bin/spack") - - for package in packages: - logger.info(f"Package: {package}") - - # Query maintainers from develop - maintainers = spack("maintainers", package, _ok_code=(0, 1)).split() - maintainers = set(maintainers) - - # add in maintainers from the PR patch - maintainers |= patch_maintainers.get(package, set()) - - logger.info("Maintainers: %s" % ", ".join(sorted(maintainers))) - - if not maintainers: - without_maintainers.append(package) - continue - - # No need to ask the author to review their own PR - if author in maintainers: - maintainers.remove(author) - - if maintainers: - with_maintainers.append(package) - all_maintainers |= maintainers - - return with_maintainers, without_maintainers, all_maintainers - - -async def add_issue_maintainers(event, gh, package_list): - """ - Assign maintainers of packages based on issue title. - - **Important** this handler is currently not in use, but the logic - is kept here to preserve for the future. We can edit the issue template - to explicitly name a list of one or more packages, and then use that - instead of parsing the title, as we do here. - """ - # Add extra space to start and end of title so we catch matches at ends - title = " " + event.data["issue"]["title"].lower() + " " - - # Replace special characters with spaces - title = re.sub(r"[!#%^*(){}:_&$+@\/\[\]]+", " ", title) - - # Does the title have a known package (must have space before and after) - package_regex = "( %s )" % " | ".join(package_list) - packages = re.findall(package_regex, title) - - # If we match a package in the title, look for maintainers to ping - if packages: - # Remove extra spacing that helped search - packages = [x.strip() for x in packages] - - # Look for maintainers of the package - messages = [] - with helpers.temp_dir() as cwd: - git("clone", "--depth", "1", helpers.spack_develop_url) - - # Add `spack` to PATH - os.environ["PATH"] = f"{cwd}/spack/bin:" + os.environ["PATH"] - from sh import spack - - for package in packages: - # Query maintainers from develop - found_maintainers = spack( - "maintainers", package, _ok_code=(0, 1) - ).split() - if found_maintainers: - found_maintainers = " ".join( - ["@%s," % m for m in found_maintainers] - ) - messages.append( - "- Hey %s, it looks like you might know about the %s package. Can you help with this issue?" - % (found_maintainers, package) - ) - - # If we have maintainers, ping them for help in the issue - if messages: - comment = comments.maintainer_request.format( - maintainers="\n".join(messages) - ) - await gh.post( - event.data["issue"]["comments_url"], {}, data={"body": comment} - ) - - -async def add_reviewers(event, gh): - """ - Add a comment on a PR to ping maintainers to review the PR. - - If a package does not have any maintainers yet, request them. - """ - # If it's sent from a comment, the PR needs to be retrieved - if "pull_request" in event.data: - pull_request = event.data["pull_request"] - number = event.data["number"] - else: - pr_url = event.data["issue"]["pull_request"]["url"] - pull_request = await gh.getitem(pr_url) - number = pull_request["number"] - - repository = event.data["repository"] - - logger.info(f"Looking for reviewers for PR #{number}...") - - packages = await helpers.changed_packages(gh, pull_request) - - # Don't ask maintainers for review if hundreds of packages are modified, - # it's probably just a license or Spack API change, not a package change. - if len(packages) > 100: - return - - maintained_pkgs, unmaintained_pkgs, maintainers = await find_maintainers( - gh, packages, repository, pull_request, number - ) - - # Ask people to maintain packages that don't have maintainers. - if unmaintained_pkgs: - # Ask for maintainers - # https://docs.github.com/en/rest/reference/issues#create-an-issue-comment - unmaintained_pkgs = sorted(unmaintained_pkgs) - comment_body = comments.no_maintainers_comment.format( - author=pull_request["user"]["login"], - packages_without_maintainers="\n* ".join(unmaintained_pkgs), - first_package_without_maintainer=unmaintained_pkgs[0], - ) - await gh.post(pull_request["comments_url"], {}, data={"body": comment_body}) - - # for packages that *do* have maintainers listed - if maintainers: - # See which maintainers have permission to be requested for review - # Requires at least "read" permission. - reviewers = [] - non_reviewers = [] - for user in maintainers: - logger.info(f"User: {user}") - - # https://api.github.com/repos/spack/spack/collaborators/{user} - # will return 404 if the user is not a collaborator, BUT - # https://api.github.com/repos/spack/spack/collaborators/{user}/permission - # will show read for pretty much anyone for public repos. So we have to - # check the first URL first. - collaborators_url = repository["collaborators_url"] - if not await helpers.found( - gh.getitem(collaborators_url, {"collaborator": user}) - ): - logger.info(f"Not found: {user}") - non_reviewers.append(user) - continue - - # only check permission once we know they're a collaborator - result = await gh.getitem( - collaborators_url + "/permission", - {"collaborator": user}, - ) - level = result["permission"] - logger.info(f"Permission level: {level}") - reviewers.append(user) - - # If they have permission, add them - # https://docs.github.com/en/rest/reference/pulls#request-reviewers-for-a-pull-request - if reviewers: - logger.info(f"Requesting review from: {reviewers}") - - # There is a limit of 15 reviewers, so take the first 15 - await gh.post( - pull_request["url"] + "/requested_reviewers", - {}, - data={"reviewers": reviewers[:15]}, - ) - - # If not, try to make them collaborators and comment - if non_reviewers: - # If the repository has a team called "maintainers", we'll try to - # add the non-reviewers to it. That team determines what - # permissions the maintainers get on the repo. - teams_url = repository["teams_url"] - members_url = None - async for team in gh.getiter(teams_url): - if team["name"] == "maintainers": - # This URL will auto-invite the user if possible. It's not - # the same as the members_url in the teams_url response, - # and it seems like we have to construct it manually. - members_url = team["html_url"].replace( - "/github.com/", "/api.github.com/" - ) - members_url += "/memberships{/member}" - logger.info(f"made members_url: {members_url}") - break - - if not members_url: - logger.info("No 'maintainers' team; not adding collaborators") - else: - logger.info(f"Adding collaborators: {non_reviewers}") - for user in non_reviewers: - try: - await gh.put( - members_url, {"member": user}, data={"role": "member"} - ) - except BadRequest as e: - if e.status_code == 404: - logger.warning( - f"Skipping adding member {user}, likely already added: {e}" - ) - else: - raise e - - # https://docs.github.com/en/rest/reference/issues#create-an-issue-comment - comment_body = comments.non_reviewers_comment.format( - packages_with_maintainers="\n* ".join(sorted(maintained_pkgs)), - non_reviewers=" @".join(sorted(non_reviewers)), - ) - await gh.post(pull_request["comments_url"], {}, data={"body": comment_body}) diff --git a/spackbot/helpers.py b/spackbot/helpers.py index a5c95ae..b28bd75 100644 --- a/spackbot/helpers.py +++ b/spackbot/helpers.py @@ -16,6 +16,7 @@ from datetime import datetime from io import StringIO from sh import ErrorReturnCode +from typing import Dict from urllib.request import HTTPHandler, Request, build_opener from urllib.parse import urlparse @@ -23,16 +24,34 @@ """Shared function helpers that can be used across routes" """ -spack_develop_url = "https://github.com/spack/spack" -spack_gitlab_url = "https://gitlab.spack.io" -spack_upstream = "git@github.com:spack/spack" -# Spack has project ID 2 -gitlab_spack_project_url = os.environ.get( - "GITLAB_SPACK_PROJECT_URL", "https://gitlab.spack.io/api/v4/projects/2" -) +class SpackProject: + gitlab_url: str + gitlab_project_url: str + upstream_url: str + develop_url: str + + def __init__(self, project: str, default_gitlab_project_id: int): + self.gitlab_url = os.environ.get("GITLAB_INSTANCE", "https://gitlab.spack.io") + project_clean = project.replace("-", "_").upper() + self.gitlab_project_url = os.environ.get( + f"GITLAB_{project_clean}_PROJECT_URL", + f"{self.gitlab_url}/api/v4/projects/{default_gitlab_project_id}", + ) + self.upstream_url = f"git@github.com:spack/{project}" + self.develop_url = f"https://github.com/spack/{project}" + + +def init_spack_projects() -> Dict[str, SpackProject]: + return { + "spack": SpackProject("spack", 2), + "spack-packages": SpackProject("spack-packages", 57), + } + + +PROJECTS: Dict[str, SpackProject] = init_spack_projects() -package_path = r"^var/spack/repos/spack_repo/builtin/packages/(\w[\w-]*)/package.py$" +package_path = r"^repos/spack_repo/builtin/packages/(\w[\w-]*)/package.py$" # Bot name can be modified in the environment botname = os.environ.get("SPACKBOT_NAME", "@spackbot") @@ -61,7 +80,7 @@ def get_logger(name): global __spackbot_log_level if not __spackbot_log_level: - __spackbot_log_level = os.environ.get("SPACKBOT_LOG_LEVEL", "INFO").upper() + __spackbot_log_level = os.environ.get("SPACKBOT_LOG_LEVEL", "DEBUG").upper() if __spackbot_log_level not in __supported_log_levels: # Logging not yet configured, so just print this warning diff --git a/spackbot/routes.py b/spackbot/routes.py index fe7d335..0731030 100644 --- a/spackbot/routes.py +++ b/spackbot/routes.py @@ -61,14 +61,6 @@ async def add_style_comments(event, gh, *args, session, **kwargs): await handlers.style_comment(event, gh) -@router.register("pull_request", action="opened") -async def on_pull_request(event, gh, *args, session, **kwargs): - """ - Respond to the pull request being opened - """ - await handlers.add_reviewers(event, gh) - - @router.register("issue_comment", action="created") async def add_comments(event, gh, *args, session, **kwargs): """ @@ -80,6 +72,7 @@ async def add_comments(event, gh, *args, session, **kwargs): # Respond with appropriate messages comment = event.data["comment"]["body"] + event_project = event.data["repository"]["name"] # @spackbot hello message = None @@ -101,41 +94,39 @@ async def add_comments(event, gh, *args, session, **kwargs): logger.debug("Responding to request for help commands.") message = comments.commands_message - # @spackbot maintainers or @spackbot request review - elif re.search( - f"{helpers.botname} (maintainers|request review)", comment, re.IGNORECASE - ): - logger.debug("Responding to request to assign maintainers for review.") - await handlers.add_reviewers(event, gh) - # @spackbot run pipeline | @spackbot re-run pipeline elif re.search(f"{helpers.botname} (re-?)?run pipeline", comment, re.IGNORECASE): - logger.info("Responding to request to re-run pipeline...") - await handlers.run_pipeline(event, gh, **kwargs) + if event_project == "spack-packages": + logger.info("Responding to request to re-run pipeline...") + await handlers.run_pipeline(event, gh, **kwargs) + else: + message = ( + f'Ignoring request, "{event_project}" does not have pipelines enabled.' + ) # @spackbot rebuild everything elif re.search(f"{helpers.botname} rebuild everything", comment, re.IGNORECASE): - logger.info("Responding to request to rebuild everthing...") - await handlers.run_pipeline_rebuild_all(event, gh, **kwargs) + if event_project == "spack-packages": + logger.info("Responding to request to rebuild everthing...") + await handlers.run_pipeline_rebuild_all(event, gh, **kwargs) + else: + message = ( + f'Ignoring request, "{event_project}" does not have pipelines enabled.' + ) if message: await gh.post(event.data["issue"]["comments_url"], {}, data={"body": message}) -@router.register("pull_request", action="opened") -@router.register("pull_request", action="synchronize") -async def label_pull_requests(event, gh, *args, session, **kwargs): - """ - Add labels to PRs based on which files were modified. - """ - await handlers.add_labels(event, gh) - - @router.register("pull_request", action="closed") async def on_closed_pull_request(event, gh, *args, session, **kwargs): """ Respond to the pull request closed """ + event_project = event.data["repository"]["name"] + if not event_project == "spack-packages": + return + await handlers.close_pr_gitlab_branch(event, gh) await handlers.close_pr_mirror(event, gh) diff --git a/spackbot/workers.py b/spackbot/workers.py index ac2a0be..1ffee3e 100644 --- a/spackbot/workers.py +++ b/spackbot/workers.py @@ -56,7 +56,9 @@ def is_up_to_date(output): return "nothing to commit" in output -async def check_gitlab_has_latest(branch_name, pr_head_sha, gh, comments_url): +async def check_gitlab_has_latest( + gitlab_project_url, branch_name, pr_head_sha, gh, comments_url +): """ Given the name of the branch supposedly pushed to gitlab, check if it is the latest revision found on github. If gitlab doesn't have the @@ -73,7 +75,7 @@ async def check_gitlab_has_latest(branch_name, pr_head_sha, gh, comments_url): """ # Get the commit for the PR branch from GitLab to see what's been pushed there headers = {"PRIVATE-TOKEN": GITLAB_TOKEN} - commit_url = f"{helpers.gitlab_spack_project_url}/repository/commits/{branch_name}" + commit_url = f"{gitlab_project_url}/repository/commits/{branch_name}" gitlab_commit = await helpers.get(commit_url, headers) error_msg = comments.cannot_run_pipeline_comment @@ -146,6 +148,7 @@ async def run_pipeline_task(event): job = get_current_job() token = job.meta["token"] rebuild_everything = job.meta.get("rebuild_everything") + event_project = event.data["repository"]["name"] async with aiohttp.ClientSession() as session: gh = gh_aiohttp.GitHubAPI(session, REQUESTER, oauth_token=token) @@ -194,10 +197,16 @@ async def run_pipeline_task(event): # If gitlab doesn't have the latest PR head sha from GitHub, we can't run the # pipeline. head_sha = pr["head"]["sha"] - if not await check_gitlab_has_latest(branch, head_sha, gh, comments_url): + if not await check_gitlab_has_latest( + helpers.PROJECTS[event_project].gitlab_project_url, + branch, + head_sha, + gh, + comments_url, + ): return - url = f"{helpers.gitlab_spack_project_url}/pipeline?ref={branch}" + url = f"{helpers.PROJECTS[event_project].gitlab_project_url}/pipeline?ref={branch}" if rebuild_everything: # Rebuild everything is accomplished by telling spack pipeline generation @@ -239,7 +248,8 @@ async def run_pipeline_task(event): detailed_status = result.get("detailed_status", {}) if "details_path" in detailed_status: url = urllib.parse.urljoin( - helpers.spack_gitlab_url, detailed_status["details_path"] + helpers.PROJECTS[event_project].gitlab_url, + detailed_status["details_path"], ) logger.info(f"Triggering pipeline on {branch}: {url}") msg = f"I've started that [pipeline]({url}) for you!" @@ -305,30 +315,45 @@ async def fix_style_task(event): # We need to use the git url with ssh remote_branch = pr["head"]["ref"] local_branch = "spackbot-style-check-working-branch" - full_name = pr["head"]["repo"]["full_name"] - fork_url = f"git@github.com:{full_name}.git" + repo_name = pr["head"]["repo"]["name"] + ssh_url = pr["head"]["repo"]["ssh_url"] + fork_url = pr["head"]["repo"]["ssh_url"] logger.info( f"fix_style_task, user = {user}, email = {email}, fork = {fork_url}, branch = {remote_branch}\n" ) + # Style tool is run from the root dir expressed as {0} + upstream_url = helpers.PROJECTS[repo_name].upstream_url + if repo_name == "spack": + style_tool = ( + "bin/spack", + ["--color", "never", "style", "--fix", "--root", "{0}"], + ) + elif repo_name == "spack-packages": + # Packages calls black directly per changed file + style_tool = ("black", []) + # At this point, we can clone the repository and make the change with helpers.temp_dir() as cwd: # Clone a fresh spack develop to use for spack style - git.clone(helpers.spack_upstream, "spack-develop") + git.clone(upstream_url, "develop") - spack = sh.Command(f"{cwd}/spack-develop/bin/spack") + if os.path.exists(f"{cwd}/develop/{style_tool[0]}"): + fix_style_command = sh.Command(f"{cwd}/develop/{style_tool[0]}") + else: + fix_style_command = sh.Command(f"{style_tool[0]}") # clone the develop repository to another folder for our PR - git.clone("spack-develop", "spack") + git.clone("develop", "fork") - os.chdir("spack") + os.chdir("fork") git.config("user.name", user) git.config("user.email", email) # This will authenticate the push with the added ssh credentials - git.remote("add", "upstream", helpers.spack_upstream) + git.remote("add", "upstream", ssh_url) git.remote("set-url", "origin", fork_url) # we're on upstream/develop. Fetch just the PR branch @@ -342,11 +367,11 @@ async def fix_style_task(event): # Run the style check and save the message for the user check_dir = os.getcwd() res, err = helpers.run_command( - spack, ["--color", "never", "style", "--fix", "--root", check_dir] + fix_style_command, [arg.format(check_dir) for arg in style_tool[1]] ) - logger.debug("spack style [output]") + logger.debug("style [output]") logger.debug(res) - logger.debug("spack style [error]") + logger.debug("style [error]") logger.debug(err) message = comments.get_style_message(res) @@ -530,7 +555,7 @@ async def prune_mirror_duplicates(shared_pr_mirror_url, publish_mirror_url): helpers.pr_expected_base, "--depth", 1, - helpers.spack_upstream, + helpers.PROJECTS["spack-packages"].upstream_url, "spack", ) @@ -641,7 +666,7 @@ async def update_mirror_index(base_mirror_url): helpers.pr_expected_base, "--depth", 1, - helpers.spack_upstream, + helpers.PROJECTS["spack-packages"].upstream_url, "spack", ) spack = sh.Command(f"{cwd}/spack/bin/spack") From bd4b4e33292c3b825288e0ed63a5941f752d8e12 Mon Sep 17 00:00:00 2001 From: Ryan Krattiger Date: Thu, 3 Jul 2025 16:56:29 -0500 Subject: [PATCH 2/3] Update to use python3 version of awscli --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 4895f60..98f5938 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,4 +4,4 @@ gidgethub python_dotenv rq sh -aws +awscli From 83ba569970c55b65198eb6cbbcf06a3895624894 Mon Sep 17 00:00:00 2001 From: Ryan Krattiger Date: Thu, 3 Jul 2025 16:56:51 -0500 Subject: [PATCH 3/3] Update spackbotdev redeploy script --- redeploy-spackbot-dev.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/redeploy-spackbot-dev.sh b/redeploy-spackbot-dev.sh index 2c0830d..56b1c66 100755 --- a/redeploy-spackbot-dev.sh +++ b/redeploy-spackbot-dev.sh @@ -14,6 +14,6 @@ docker push ghcr.io/${gh_user}/spackbot:${image_tag} docker push ghcr.io/${gh_user}/spackbot-workers:${image_tag} # Rollout with the new containers -kubectl rollout restart -n spack deployments/spackbotdev-spack-io -kubectl rollout restart -n spack deployments/spackbotdev-workers -kubectl rollout restart -n spack deployments/spackbotdev-lworkers +kubectl -n spack rollout restart deployment spackbotdev-spack-io +kubectl -n spack rollout restart deployment spackbotdev-workers +kubectl -n spack rollout restart deployment spackbotdev-lworkers