Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 16 additions & 2 deletions .env-dummy
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,22 @@ REDIS_HOST=rq-server
# Optionally customize redis port
REDIS_PORT=6379

# Optionally customize the name of the task queue
TASK_QUEUE_NAME=devtasks
# Base of pr binaries mirror
PR_BINARIES_MIRROR_BASE_URL=s3://spack-binaries-prs/testing

# Name of expected base branch (we react to PRs merged to this branch)
PR_BINARIES_BASE_BRANCH=develop

# Number of days to keep shared PR binaries in mirror
SHARED_PR_MIRROR_RETIRE_AFTER_DAYS=7

# Optionally customize the name of the task queues
TASK_QUEUE_SHORT=devtasks
TASK_QUEUE_LONG=devtasks_long

# Set the worker task queue name, should match either the TASK_QUEUE_SHORT or TASK_QUEUE_LONG name
# this is done in the docker-compose.yml
# WORKER_TASK_QUEUE=devtasks

# Optionally configure time before jobs are killed and marked failed (in seconds, default 180s)
WORKER_JOB_TIMEOUT=21600
Expand Down
20 changes: 20 additions & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,9 @@ services:
restart: always
env_file:
- ./.env
environment:
- TASK_QUEUE_SHORT=devtasks
- TASK_QUEUE_LONG=devtasks_long
build:
context: .
dockerfile: smee/Dockerfile
Expand All @@ -12,6 +15,23 @@ services:
build:
context: .
dockerfile: workers/Dockerfile
environment:
- TASK_QUEUE_SHORT=devtasks
- TASK_QUEUE_LONG=devtasks_long
- WORKER_TASK_QUEUE=devtasks
env_file:
- ./.env
deploy:
replicas: 1

rq-long-worker:
build:
context: .
dockerfile: workers/Dockerfile
environment:
- TASK_QUEUE_SHORT=devtasks
- TASK_QUEUE_LONG=devtasks_long
- WORKER_TASK_QUEUE=devtasks_long
env_file:
- ./.env
deploy:
Expand Down
19 changes: 19 additions & 0 deletions redeploy-spackbot-dev.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
#!/bin/bash
set -e
set -x

project_root=$(pwd)

gh_user=kwryankrattiger
image_tag=0.0.1

# Rebuild images
docker build -f ${project_root}/Dockerfile -t ghcr.io/${gh_user}/spackbot:${image_tag} ${project_root}
docker build -f ${project_root}/workers/Dockerfile -t ghcr.io/${gh_user}/spackbot-workers:${image_tag} ${project_root}
docker push ghcr.io/${gh_user}/spackbot:${image_tag}
docker push ghcr.io/${gh_user}/spackbot-workers:${image_tag}

# Rollout with the new containers
kubectl rollout restart -n spack deployments/spackbotdev-spack-io
kubectl rollout restart -n spack deployments/spackbotdev-workers
kubectl rollout restart -n spack deployments/spackbotdev-lworkers
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,3 +4,4 @@ gidgethub
python_dotenv
rq
sh
aws
7 changes: 6 additions & 1 deletion spackbot/handlers/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,10 @@
from .pipelines import run_pipeline, run_pipeline_rebuild_all # noqa
from .gitlab import ( # noqa
run_pipeline,
run_pipeline_rebuild_all,
close_pr_gitlab_branch,
)
from .labels import add_labels # noqa
from .reviewers import add_reviewers, add_issue_maintainers # noqa
from .reviewers import add_reviewers # noqa
from .style import style_comment, fix_style # noqa
from .mirrors import close_pr_mirror # noqa
24 changes: 21 additions & 3 deletions spackbot/handlers/pipelines.py → spackbot/handlers/gitlab.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,13 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)

import spackbot.helpers as helpers
import os

from spackbot.workers import (
run_pipeline_task,
report_rebuild_failure,
work_queue,
get_queue,
TASK_QUEUE_SHORT,
WORKER_JOB_TIMEOUT,
)

Expand All @@ -25,7 +27,7 @@ async def run_pipeline_rebuild_all(event, gh, **kwargs):
"token": kwargs["token"],
}

task_q = work_queue.get_queue()
task_q = get_queue(TASK_QUEUE_SHORT)
scheduled_job = task_q.enqueue(
run_pipeline_task,
event,
Expand All @@ -45,7 +47,7 @@ async def run_pipeline(event, gh, **kwargs):
"token": kwargs["token"],
}

task_q = work_queue.get_queue()
task_q = get_queue(TASK_QUEUE_SHORT)
scheduled_job = task_q.enqueue(
run_pipeline_task,
event,
Expand All @@ -54,3 +56,19 @@ async def run_pipeline(event, gh, **kwargs):
on_failure=report_rebuild_failure,
)
logger.info(f"Run pipeline job enqueued: {scheduled_job.id}")


async def close_pr_gitlab_branch(event, gh):
payload = event.data

pr_number = payload["number"]
pr_branch = payload["pull_request"]["head"]["ref"]
pr_branch_name = f"pr{pr_number}_{pr_branch}"

url = helpers.gitlab_spack_project_url
url = f"{url}/repository/branches/{pr_branch_name}"

GITLAB_TOKEN = os.environ.get("GITLAB_TOKEN")
headers = {"PRIVATE-TOKEN": GITLAB_TOKEN}

await helpers.delete(url, headers=headers)
108 changes: 108 additions & 0 deletions spackbot/handlers/mirrors.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,108 @@
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)

import spackbot.helpers as helpers
from spackbot.helpers import (
pr_expected_base,
pr_mirror_base_url,
pr_shared_mirror,
publish_mirror_base_url,
)
from spackbot.workers import (
copy_pr_mirror,
prune_mirror_duplicates,
update_mirror_index,
delete_pr_mirror,
get_queue,
TASK_QUEUE_LONG,
)

# If we don't provide a timeout, the default in RQ is 180 seconds
WORKER_JOB_TIMEOUT = 6 * 60 * 60

logger = helpers.get_logger(__name__)


async def close_pr_mirror(event, gh):
payload = event.data

# This should only be called when a PR is closed
if not payload["pull_request"]["state"] == "closed":
return

# Get PR event info
base_branch = payload["pull_request"]["base"]["ref"]
is_merged = payload["pull_request"]["merged"]
pr_number = payload["number"]
pr_branch = payload["pull_request"]["head"]["ref"]

pr_mirror_url = f"{pr_mirror_base_url}/pr{pr_number}_{pr_branch}"
shared_pr_mirror_url = f"{pr_mirror_base_url}/{pr_shared_mirror}"

# Get task queue info
ltask_q = get_queue(TASK_QUEUE_LONG)
copy_job = None
job_metadata = {
"type": None,
"pr_number": pr_number,
"pr_branch": pr_branch,
}

if is_merged and base_branch == pr_expected_base:
logger.info(
f"PR {pr_number}/{pr_branch} merged to develop, graduating binaries"
)

# Copy all of the stack binaries from the PR to the shared PR
# mirror.
job_metadata.update({"type": "copy"})
copy_job = ltask_q.enqueue(
copy_pr_mirror,
pr_mirror_url,
shared_pr_mirror_url,
meta=job_metadata,
job_timeout=WORKER_JOB_TIMEOUT,
)
logger.info(f"Copy job queued: {copy_job.id}")

# Prune duplicates that have been published after copy
# since copy may have introduced duplicates for some reason
job_metadata.update({"type": "prune"})
shared_stack_pr_mirror_url = f"{shared_pr_mirror_url}/{{stack}}"
publish_stack_mirror_url = (
f"{publish_mirror_base_url}/{{stack}}/{pr_expected_base}"
)
prune_job = ltask_q.enqueue(
prune_mirror_duplicates,
shared_stack_pr_mirror_url,
publish_stack_mirror_url,
job_timeout=WORKER_JOB_TIMEOUT,
depends_on=copy_job,
meta=job_metadata,
)
logger.info(f"Pruning job queued: {prune_job.id}")

# Queue a reindex for the stack mirror to attempt to run after
# prune.
job_metadata.update({"type": "reindex"})
update_job = ltask_q.enqueue(
update_mirror_index,
shared_stack_pr_mirror_url,
job_timeout=WORKER_JOB_TIMEOUT,
depends_on=prune_job,
meta=job_metadata,
)
logger.info(f"Reindex job queued: {update_job.id}")

# Delete the mirror
job_metadata.update({"type": "delete"})
del_job = ltask_q.enqueue(
delete_pr_mirror,
pr_mirror_url,
meta=job_metadata,
job_timeout=WORKER_JOB_TIMEOUT,
depends_on=copy_job,
)
logger.info(f"Delete job queued: {del_job.id}")
5 changes: 3 additions & 2 deletions spackbot/handlers/style.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,9 @@
from spackbot.workers import (
fix_style_task,
report_style_failure,
work_queue,
get_queue,
WORKER_JOB_TIMEOUT,
TASK_QUEUE_SHORT,
)


Expand Down Expand Up @@ -46,7 +47,7 @@ async def fix_style(event, gh, *args, **kwargs):
"token": kwargs["token"],
}

task_q = work_queue.get_queue()
task_q = get_queue(TASK_QUEUE_SHORT)
fix_style_job = task_q.enqueue(
fix_style_task,
event,
Expand Down
42 changes: 41 additions & 1 deletion spackbot/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
from io import StringIO
from sh import ErrorReturnCode
from urllib.request import HTTPHandler, Request, build_opener
from urllib.parse import urlparse


"""Shared function helpers that can be used across routes"
Expand All @@ -37,7 +38,16 @@
botname = os.environ.get("SPACKBOT_NAME", "@spackbot")

# Bucket where pr binary mirrors live
pr_mirror_bucket = "spack-binaries-prs"
pr_mirror_base_url = os.environ.get(
"PR_BINARIES_MIRROR_BASE_URL", "s3://spack-binaries-prs"
)
shared_pr_mirror_retire_after_days = os.environ.get(
"SHARED_PR_MIRROR_RETIRE_AFTER_DAYS", 7
)
pr_shared_mirror = "shared_pr_mirror"
pr_expected_base = os.environ.get("PR_BINARIES_BASE_BRANCH", "develop")

publish_mirror_base_url = "s3://spack-binaries"

# Aliases for spackbot so spackbot doesn't respond to himself
aliases = ["spack-bot", "spackbot", "spack-bot-develop", botname]
Expand Down Expand Up @@ -182,6 +192,16 @@ async def post(url, headers):
return await response.json()


async def delete(url, headers):
"""
Convenience method to create a new session and make a one-off
delete request, given a url and headers to include in the request.
"""
async with aiohttp.ClientSession() as session:
async with session.delete(url, headers=headers) as response:
return await response.json()


def synchronous_http_request(url, data=None, token=None):
"""
Makes synchronous http request to the provided url, using the token for
Expand Down Expand Up @@ -232,3 +252,23 @@ def synchronous_http_request(url, data=None, token=None):
)

return response


def s3_parse_url(url, default_bucket="spack-binaries-prs", default_prefix="dummy"):
parsed = {
"bucket": default_bucket,
"prefix": default_prefix,
}

if type(url) == str:
url = urlparse(url)

if url.scheme == "s3":
parsed.update(
{
"bucket": url.netloc,
"prefix": url.path.strip("/"),
}
)

return parsed
10 changes: 10 additions & 0 deletions spackbot/routes.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,3 +130,13 @@ async def label_pull_requests(event, gh, *args, session, **kwargs):
Add labels to PRs based on which files were modified.
"""
await handlers.add_labels(event, gh)


@router.register("pull_request", action="closed")
async def on_closed_pull_request(event, gh, *args, session, **kwargs):
"""
Respond to the pull request closed
"""
await handlers.close_pr_gitlab_branch(event, gh)

await handlers.close_pr_mirror(event, gh)
Loading