diff --git a/src/lando/api/legacy/workers/landing_worker.py b/src/lando/api/legacy/workers/landing_worker.py
index b0382a6a6..9bc850961 100644
--- a/src/lando/api/legacy/workers/landing_worker.py
+++ b/src/lando/api/legacy/workers/landing_worker.py
@@ -36,6 +36,7 @@
)
from lando.pushlog.pushlog import PushLog, PushLogForRepo
from lando.utils.config import read_lando_config
+from lando.utils.github import GitHubAPIClient
from lando.utils.landing_checks import LandingChecks
from lando.utils.tasks import phab_trigger_repo_update
@@ -120,6 +121,15 @@ def run_job(self, job: LandingJob) -> bool:
job.set_landed_commit_ids()
job.transition_status(JobAction.LAND, commit_id=commit_id)
+ if job.is_pull_request_job:
+ # TODO: move this to different method, and retry if needed.
+ # NOTE: This may need to happen on the revision-level when stack support is added.
+ pull_number = job.revisions.first().pull_number
+ message = f"Pull request closed by commit {commit_id}"
+ client = GitHubAPIClient(job.target_repo.url)
+ client.add_comment_to_pull_request(pull_number, message)
+ client.close_pull_request(pull_number)
+
mots_path = Path(repo.path) / "mots.yaml"
if mots_path.exists():
logger.info(f"{mots_path} found, setting reviewer data.")
@@ -153,6 +163,30 @@ def run_job(self, job: LandingJob) -> bool:
return True
+ def convert_patches_to_diff(self, scm: AbstractSCM, job: LandingJob):
+ """Generate a unified diff from multiple patches stored in a revision."""
+ # NOTE: this only applies to git patches that are downloaded from GitHub
+ # at this time. In theory this would work for any provided patches in a
+ # standard format.
+
+ # NOTE: this is only supported for jobs with a single revision at this time.
+ # See bug 2001185.
+
+ if len(job.revisions) > 1:
+ raise NotImplementedError(
+ "This method is not supported when job has more than 1 revision."
+ )
+ if len(job.revisions) == 0:
+ raise ValueError("No revisions found in job.")
+
+ revision = job.revisions[0]
+ if not revision.patches:
+ raise ValueError("Revision is missing patches.")
+
+ diff = scm.get_diff_from_patches(revision.patches)
+ revision.set_patch(f"{diff}\r\n")
+ revision.save()
+
def apply_and_push(
self,
job: LandingJob,
@@ -164,7 +198,6 @@ def apply_and_push(
Returns a tuple of bug_ids and tip commit_id.
"""
- self.update_repo(repo, job, scm, job.target_commit_hash)
def apply_patch(revision: Revision):
logger.debug(f"Landing {revision} ...")
@@ -175,6 +208,12 @@ def apply_patch(revision: Revision):
revision.timestamp,
)
+ self.update_repo(repo, job, scm, job.target_commit_hash)
+
+ if job.is_pull_request_job:
+ self.convert_patches_to_diff(scm, job)
+ self.update_repo(repo, job, scm, job.target_commit_hash)
+
# Run through the patches one by one and try to apply them.
logger.debug(
f"About to land {job.revisions.count()} revisions: {job.revisions.all()} ..."
diff --git a/src/lando/api/tests/conftest.py b/src/lando/api/tests/conftest.py
index 1455dfe64..804e4f569 100644
--- a/src/lando/api/tests/conftest.py
+++ b/src/lando/api/tests/conftest.py
@@ -379,7 +379,7 @@ def mock_permissions():
@pytest.fixture
-def proxy_client(monkeypatch, fake_request):
+def proxy_client(monkeypatch, fake_request, mock_response):
"""A client that bridges tests designed to work with the API.
Most tests that use the API no longer need to access those endpoints through
@@ -391,18 +391,6 @@ def proxy_client(monkeypatch, fake_request):
reimplemented to not need a response or response-like object.
"""
- class MockResponse:
- """Mock response class to satisfy some requirements of tests."""
-
- # NOTE: The methods tested that rely on this class should be reimplemented
- # to no longer need the structure of a response to function.
- def __init__(self, status_code=200, json=None):
- self.json = json or {}
- self.status_code = status_code
- self.content_type = (
- "application/json" if status_code < 400 else "application/problem+json"
- )
-
class ProxyClient:
request = fake_request()
@@ -419,7 +407,8 @@ def _handle__get__stacks__id(self, path):
# and isn't required in the proxy client tests.
json_response.pop("stack")
- return MockResponse(json=json.loads(json.dumps(json_response)))
+ # The double encode/decode is to coerce Python tuples to lists.
+ return mock_response(json_dict=json.loads(json.dumps(json_response)))
def _handle__get__transplants__id(self, path):
stack_revision_id = path.removeprefix("/transplants?stack_revision_id=")
@@ -430,8 +419,8 @@ def _handle__get__transplants__id(self, path):
# For these endpoints, some responses contain different status codes
# which are represented as the second item in a tuple.
json_response, status_code = result
- return MockResponse(
- json=json.loads(json.dumps(json_response)),
+ return mock_response(
+ json_dict=json.loads(json.dumps(json_response)),
status_code=status_code,
)
# In the rest of the cases, the returned result is a response object.
@@ -439,7 +428,7 @@ def _handle__get__transplants__id(self, path):
def _handle__post__transplants__dryrun(self, **kwargs):
json_response = legacy_api_transplants.dryrun(self.request, kwargs["json"])
- return MockResponse(json=json.loads(json.dumps(json_response)))
+ return mock_response(json_dict=json.loads(json.dumps(json_response)))
def _handle__post__transplants(self, path, **kwargs):
try:
@@ -449,23 +438,23 @@ def _handle__post__transplants(self, path, **kwargs):
except LegacyAPIException as e:
# Handle exceptions and pass along the status code to the response object.
if e.extra:
- return MockResponse(json=e.extra, status_code=e.status)
+ return mock_response(json_dict=e.extra, status_code=e.status)
if e.json_detail:
- return MockResponse(json=e.json_detail, status_code=e.status)
- return MockResponse(json=e.args, status_code=e.status)
+ return mock_response(json_dict=e.json_detail, status_code=e.status)
+ return mock_response(json_dict=e.args, status_code=e.status)
except Exception as e:
# TODO: double check that this is a thing in legacy?
# Added this due to a validation error (test_transplant_wrong_landing_path_format)
- return MockResponse(json=[f"error ({e})"], status_code=400)
- return MockResponse(
- json=json.loads(json.dumps(json_response)), status_code=status_code
+ return mock_response(json_dict=[f"error ({e})"], status_code=400)
+ return mock_response(
+ json_dict=json.loads(json.dumps(json_response)), status_code=status_code
)
def _handle__put__landing_jobs__id(self, path, **kwargs):
job_id = int(path.removeprefix("/landing_jobs/"))
landing_job_api = LandingJobApiView()
response = landing_job_api.put(self.request, job_id)
- return MockResponse(json=json.loads(response.content))
+ return mock_response(json_dict=json.loads(response.content))
def get(self, path, *args, **kwargs):
"""Handle various get endpoints."""
diff --git a/src/lando/api/tests/test_hooks.py b/src/lando/api/tests/test_hooks.py
index fb9c0eba6..225b8ecb9 100644
--- a/src/lando/api/tests/test_hooks.py
+++ b/src/lando/api/tests/test_hooks.py
@@ -65,8 +65,8 @@ def treestatus_exception2():
response = client.get("__testing__/treestatus_exception1")
assert response.status_code == 500
- assert response.json["title"] == "Tree Status Error"
+ assert response.json()["title"] == "Tree Status Error"
response = client.get("__testing__/treestatus_exception2")
assert response.status_code == 500
- assert response.json["title"] == "Tree Status Error"
+ assert response.json()["title"] == "Tree Status Error"
diff --git a/src/lando/api/tests/test_revisions.py b/src/lando/api/tests/test_revisions.py
index bdbab98fd..f45b47544 100644
--- a/src/lando/api/tests/test_revisions.py
+++ b/src/lando/api/tests/test_revisions.py
@@ -42,7 +42,7 @@ def test_secure_api_flag_on_public_revision_is_false(
response = proxy_client.get("/stacks/D{}".format(revision["id"]))
assert response.status_code == 200
- response_revision = response.json["revisions"].pop()
+ response_revision = response.json()["revisions"].pop()
assert not response_revision["is_secure"]
@@ -61,7 +61,7 @@ def test_secure_api_flag_on_secure_revision_is_true(
response = proxy_client.get("/stacks/D{}".format(revision["id"]))
assert response.status_code == 200
- response_revision = response.json["revisions"].pop()
+ response_revision = response.json()["revisions"].pop()
assert response_revision["is_secure"]
diff --git a/src/lando/api/tests/test_stacks.py b/src/lando/api/tests/test_stacks.py
index 57a9bd35d..fecff0ade 100644
--- a/src/lando/api/tests/test_stacks.py
+++ b/src/lando/api/tests/test_stacks.py
@@ -808,18 +808,18 @@ def test_integrated_stack_endpoint_simple(
response = proxy_client.get("/stacks/D{}".format(r3["id"]))
assert response.status_code == 200
- assert len(response.json["edges"]) == 4
- assert [r2["phid"], r1["phid"]] in response.json["edges"]
- assert [r3["phid"], r1["phid"]] in response.json["edges"]
- assert [r4["phid"], r2["phid"]] in response.json["edges"]
- assert [r4["phid"], r3["phid"]] in response.json["edges"]
-
- assert len(response.json["landable_paths"]) == 2
- assert [r1["phid"], r2["phid"]] in response.json["landable_paths"]
- assert [r1["phid"], r3["phid"]] in response.json["landable_paths"]
-
- assert len(response.json["revisions"]) == 4
- revisions = {r["phid"]: r for r in response.json["revisions"]}
+ assert len(response.json()["edges"]) == 4
+ assert [r2["phid"], r1["phid"]] in response.json()["edges"]
+ assert [r3["phid"], r1["phid"]] in response.json()["edges"]
+ assert [r4["phid"], r2["phid"]] in response.json()["edges"]
+ assert [r4["phid"], r3["phid"]] in response.json()["edges"]
+
+ assert len(response.json()["landable_paths"]) == 2
+ assert [r1["phid"], r2["phid"]] in response.json()["landable_paths"]
+ assert [r1["phid"], r3["phid"]] in response.json()["landable_paths"]
+
+ assert len(response.json()["revisions"]) == 4
+ revisions = {r["phid"]: r for r in response.json()["revisions"]}
assert r1["phid"] in revisions
assert r2["phid"] in revisions
assert r3["phid"] in revisions
@@ -849,9 +849,9 @@ def test_integrated_stack_endpoint_repos(
response = proxy_client.get("/stacks/D{}".format(r4["id"]))
assert response.status_code == 200
- assert len(response.json["repositories"]) == 2
+ assert len(response.json()["repositories"]) == 2
- repositories = {r["phid"]: r for r in response.json["repositories"]}
+ repositories = {r["phid"]: r for r in response.json()["repositories"]}
assert repo["phid"] in repositories
assert unsupported_repo["phid"] in repositories
assert repositories[repo["phid"]]["landing_supported"]
@@ -881,7 +881,7 @@ def test_integrated_stack_has_revision_security_status(
response = proxy_client.get("/stacks/D{}".format(secure_revision["id"]))
assert response.status_code == 200
- revisions = {r["phid"]: r for r in response.json["revisions"]}
+ revisions = {r["phid"]: r for r in response.json()["revisions"]}
assert not revisions[public_revision["phid"]]["is_secure"]
assert revisions[secure_revision["phid"]]["is_secure"]
@@ -905,8 +905,8 @@ def test_integrated_stack_response_mismatch_returns_404(
response = proxy_client.get("/stacks/D{}".format(r1["id"]))
assert response.status_code == 200
- assert len(response.json["edges"]) == 1
- assert len(response.json["revisions"]) == 2
+ assert len(response.json()["edges"]) == 1
+ assert len(response.json()["revisions"]) == 2
# Remove r2 from the response.
phabdouble._revisions = [
@@ -921,8 +921,8 @@ def test_integrated_stack_response_mismatch_returns_404(
response = proxy_client.get("/stacks/D{}".format(r1["id"]))
assert response.status_code == 200
- assert len(response.json["edges"]) == 0
- assert len(response.json["revisions"]) == 1
+ assert len(response.json()["edges"]) == 0
+ assert len(response.json()["revisions"]) == 1
def test_revisionstack_single():
diff --git a/src/lando/api/tests/test_transplants.py b/src/lando/api/tests/test_transplants.py
index fb2eaa5af..561cf110e 100644
--- a/src/lando/api/tests/test_transplants.py
+++ b/src/lando/api/tests/test_transplants.py
@@ -94,7 +94,7 @@ def test_dryrun_no_warnings_or_blockers(
assert 200 == response.status_code
assert "application/json" == response.content_type
expected_json = {"confirmation_token": None, "warnings": [], "blocker": None}
- assert response.json == expected_json
+ assert response.json() == expected_json
@pytest.mark.django_db(transaction=True)
@@ -130,7 +130,7 @@ def test_dryrun_invalid_path_blocks(
assert "application/json" == response.content_type
assert (
"Depends on D1 which is open and has a different repository"
- in response.json["blocker"]
+ in response.json()["blocker"]
)
@@ -170,7 +170,7 @@ def test_dryrun_published_parent(
assert 200 == response.status_code
assert "application/json" == response.content_type
- assert response.json["blocker"] is None
+ assert response.json()["blocker"] is None
@pytest.mark.django_db
@@ -211,7 +211,7 @@ def test_dryrun_open_parent(
assert 200 == response.status_code
assert "application/json" == response.content_type
assert (
- "The requested set of revisions are not landable." in response.json["blocker"]
+ "The requested set of revisions are not landable." in response.json()["blocker"]
), "Landing should be blocked due to r1 still being open and part of the stack."
@@ -263,7 +263,7 @@ def test_dryrun_in_progress_transplant_blocks(
assert 200 == response.status_code
assert "application/json" == response.content_type
- assert response.json["blocker"] == (
+ assert response.json()["blocker"] == (
"A landing for revisions in this stack is already in progress."
)
@@ -295,9 +295,9 @@ def test_dryrun_reviewers_warns(
assert 200 == response.status_code
assert "application/json" == response.content_type
- assert response.json["warnings"]
- assert response.json["warnings"][0]["id"] == 0
- assert response.json["confirmation_token"] is not None
+ assert response.json()["warnings"]
+ assert response.json()["warnings"][0]["id"] == 0
+ assert response.json()["confirmation_token"] is not None
@pytest.mark.django_db(transaction=True)
@@ -352,13 +352,13 @@ def test_dryrun_codefreeze_warn(
assert response.status_code == 200
assert response.content_type == "application/json"
- assert response.json[
+ assert response.json()[
"warnings"
], "warnings should not be empty for a repo under code freeze"
assert (
- response.json["warnings"][0]["id"] == 8
+ response.json()["warnings"][0]["id"] == 8
), "the warning ID should match the ID for warning_code_freeze"
- assert response.json["confirmation_token"] is not None
+ assert response.json()["confirmation_token"] is not None
@pytest.mark.django_db(transaction=True)
@@ -412,7 +412,7 @@ def test_dryrun_outside_codefreeze(
assert response.status_code == 200
assert response.content_type == "application/json"
- assert not response.json["warnings"]
+ assert not response.json()["warnings"]
# auth related issue, blockers empty.
@@ -454,7 +454,7 @@ def test_integrated_dryrun_blocks_for_bad_userinfo(
)
assert response.status_code == status
- assert blocker in response.json["blocker"]
+ assert blocker in response.json()["blocker"]
@pytest.mark.django_db(transaction=True)
@@ -817,8 +817,8 @@ def test_integrated_transplant_simple_stack_saves_data_in_db(
)
assert response.status_code == 202
assert response.content_type == "application/json"
- assert "id" in response.json
- job_id = response.json["id"]
+ assert "id" in response.json()
+ job_id = response.json()["id"]
# Get LandingJob object by its id
job = LandingJob.objects.get(pk=job_id)
@@ -873,8 +873,8 @@ def test_integrated_transplant_simple_partial_stack_saves_data_in_db(
)
assert response.status_code == 202
assert response.content_type == "application/json"
- assert "id" in response.json
- job_id = response.json["id"]
+ assert "id" in response.json()
+ job_id = response.json()["id"]
# Get LandingJob object by its id
job = LandingJob.objects.get(pk=job_id)
@@ -945,8 +945,8 @@ def test_integrated_transplant_records_approvers_peers_and_owners(
)
assert response.status_code == 202
assert response.content_type == "application/json"
- assert "id" in response.json
- job_id = response.json["id"]
+ assert "id" in response.json()
+ job_id = response.json()["id"]
# Get LandingJob object by its id
job = LandingJob.objects.get(pk=job_id)
@@ -1027,8 +1027,8 @@ def test_integrated_transplant_updated_diff_id_reflected_in_landed_phabricator_r
)
assert response.status_code == 202
assert response.content_type == "application/json"
- assert "id" in response.json
- job_1_id = response.json["id"]
+ assert "id" in response.json()
+ job_1_id = response.json()["id"]
# Get LandingJob object by its id.
job = LandingJob.objects.get(pk=job_1_id)
@@ -1078,7 +1078,7 @@ def test_integrated_transplant_updated_diff_id_reflected_in_landed_phabricator_r
permissions=mock_permissions,
)
- job_2_id = response.json["id"]
+ job_2_id = response.json()["id"]
# Get LandingJob objects by their ids.
job_1 = LandingJob.objects.get(pk=job_1_id)
@@ -1288,7 +1288,7 @@ def test_integrated_transplant_without_auth0_permissions(
assert (
"You have insufficient permissions to land or your access has expired. "
"main.scm_level_3 is required. See the FAQ for help."
- ) in response.json["blocker"]
+ ) in response.json()["blocker"]
@pytest.mark.django_db(transaction=True)
@@ -1340,7 +1340,7 @@ def test_integrated_transplant_diff_not_in_revision(
permissions=mock_permissions,
)
assert response.status_code == 400
- assert "A requested diff is not the latest." in response.json["blocker"]
+ assert "A requested diff is not the latest." in response.json()["blocker"]
@pytest.mark.django_db(transaction=True)
@@ -1358,7 +1358,7 @@ def test_transplant_nonexisting_revision_returns_404(
)
assert response.status_code == 404
assert response.content_type == "application/problem+json"
- assert response.json["detail"] == "Stack Not Found"
+ assert response.json()["detail"] == "Stack Not Found"
@pytest.mark.django_db(transaction=True)
@@ -1382,7 +1382,9 @@ def test_integrated_transplant_revision_with_no_repo(
permissions=mock_permissions,
)
assert response.status_code == 400
- assert "Landing repository is missing for this landing." in response.json["blocker"]
+ assert (
+ "Landing repository is missing for this landing." in response.json()["blocker"]
+ )
@pytest.mark.django_db(transaction=True)
@@ -1407,7 +1409,9 @@ def test_integrated_transplant_revision_with_unmapped_repo(
permissions=mock_permissions,
)
assert response.status_code == 400
- assert "Landing repository is missing for this landing." in response.json["blocker"]
+ assert (
+ "Landing repository is missing for this landing." in response.json()["blocker"]
+ )
@pytest.mark.django_db(transaction=True)
@@ -1510,7 +1514,7 @@ def test_unresolved_comment_warn(
assert response.status_code == 200
assert response.content_type == "application/json"
- assert not response.json[
+ assert not response.json()[
"warnings"
], "warnings should be empty for a revision without unresolved comments"
@@ -1533,11 +1537,11 @@ def test_unresolved_comment_warn(
assert response.status_code == 200
assert response.content_type == "application/json"
- assert response.json[
+ assert response.json()[
"warnings"
], "warnings should not be empty for a revision with unresolved comments"
assert (
- response.json["warnings"][0]["id"] == 9
+ response.json()["warnings"][0]["id"] == 9
), "the warning ID should match the ID for warning_unresolved_comments"
@@ -1607,11 +1611,11 @@ def test_unresolved_comment_stack(
assert response.status_code == 200
assert response.content_type == "application/json"
- assert response.json[
+ assert response.json()[
"warnings"
], "warnings should not be empty for a stack with unresolved comments"
assert (
- response.json["warnings"][0]["id"] == 9
+ response.json()["warnings"][0]["id"] == 9
), "the warning ID should match the ID for warning_unresolved_comments"
@@ -2069,8 +2073,8 @@ def test_transplant_on_linked_legacy_repo(
)
assert response.status_code == 202
assert response.content_type == "application/json"
- assert "id" in response.json
- job_id = response.json["id"]
+ assert "id" in response.json()
+ job_id = response.json()["id"]
# Get LandingJob object by its id
job = LandingJob.objects.get(pk=job_id)
diff --git a/src/lando/api/tests/test_try.py b/src/lando/api/tests/test_try.py
index 1a5920218..9606f590c 100644
--- a/src/lando/api/tests/test_try.py
+++ b/src/lando/api/tests/test_try.py
@@ -120,7 +120,7 @@ def test_try_api_patch_decode_error(
)
assert response.status_code == 400, "Improperly encoded patch should return 400."
assert (
- response.json["title"] == "Patch decoding error."
+ response.json()["title"] == "Patch decoding error."
), "Response should indicate the patch could not be decoded."
@@ -161,7 +161,7 @@ def test_try_api_patch_format_mismatch(
response.status_code == 400
), "A patch which does not match the passed format should return 400."
assert (
- response.json["title"] == "Improper patch format."
+ response.json()["title"] == "Improper patch format."
), "Response should indicate the patch could not be decoded."
@@ -257,8 +257,8 @@ def test_symlink_diff_inspect(
response.status_code == 400
), "Try push which fails diff checks should return 400."
- assert response.json["title"] == "Errors found in pre-submission patch checks."
- assert response.json["detail"] == (
+ assert response.json()["title"] == "Errors found in pre-submission patch checks."
+ assert response.json()["detail"] == (
"Patch failed checks:\n\n"
" - Revision introduces symlinks in the files `blahfile_symlink`."
), "Details message should indicate an introduced symlink."
@@ -345,7 +345,7 @@ def test_try_api_success_hgexport(
)
assert response.status_code == 201, "Successful try push should return 201."
assert (
- "id" in response.json
+ "id" in response.json()
), "Response should include the ID of the new landing job."
queue_items = LandingJob.job_queue_query(
@@ -432,7 +432,7 @@ def test_try_api_success_gitformatpatch(
)
assert response.status_code == 201, "Successful try push should return 201."
assert (
- "id" in response.json
+ "id" in response.json()
), "Response should include the ID of the new landing job."
queue_items = LandingJob.job_queue_query(
diff --git a/src/lando/api/views.py b/src/lando/api/views.py
index 995a86ac3..35a1390cd 100644
--- a/src/lando/api/views.py
+++ b/src/lando/api/views.py
@@ -1,23 +1,47 @@
import json
+from collections import defaultdict
+from datetime import datetime
from functools import wraps
from typing import Callable
from django import forms
from django.core.handlers.wsgi import WSGIRequest
-from django.http import JsonResponse
+from django.http import HttpRequest, JsonResponse
from django.utils.decorators import method_decorator
from django.views import View
from django.views.decorators.csrf import csrf_exempt
-from lando.main.models import CommitMap
+from lando.main.auth import require_authenticated_user
+from lando.main.models import (
+ CommitMap,
+ JobStatus,
+ LandingJob,
+ Repo,
+ Revision,
+ add_revisions_to_job,
+)
+from lando.main.models.landing_job import get_jobs_for_pull
from lando.main.models.revision import DiffWarning, DiffWarningStatus
from lando.main.scm import (
SCM_TYPE_GIT,
SCM_TYPE_HG,
)
+from lando.utils.github import GitHubAPIClient, PullRequest, PullRequestPatchHelper
+from lando.utils.github_checks import (
+ ALL_PULL_REQUEST_BLOCKERS,
+ ALL_PULL_REQUEST_WARNINGS,
+ PullRequestChecks,
+)
+from lando.utils.landing_checks import LandingChecks
from lando.utils.phabricator import get_phabricator_client
+class APIView(View):
+ """A base class for API views."""
+
+ pass
+
+
def phabricator_api_key_required(func: callable) -> Callable:
"""A simple wrapper that checks for a valid Phabricator API token."""
@@ -39,6 +63,28 @@ def _wrapper(self, request, *args, **kwargs): # noqa: ANN001
return _wrapper
+def generate_warnings_and_blockers(
+ target_repo: Repo, pull_request: PullRequest, request: HttpRequest
+) -> dict[str, list[str]]:
+ """Run checks on a pull request and return blockers and warnings."""
+ # PullRequestPatchHelper.diff doesn't include binary changes.
+ # This is not considered an issue for checks at the moment, but may need to be kept in
+ # mind for the future.
+ patch_helper = PullRequestPatchHelper(pull_request)
+ author_email = pull_request.author[1]
+ landing_checks = LandingChecks(author_email)
+ blockers = landing_checks.run(
+ target_repo.hooks,
+ [patch_helper],
+ )
+ pr_checks = PullRequestChecks(pull_request.client, target_repo, request)
+ pr_blockers = [chk.name() for chk in ALL_PULL_REQUEST_BLOCKERS]
+ blockers += pr_checks.run(pr_blockers, pull_request)
+ pr_warnings = [chk.name() for chk in ALL_PULL_REQUEST_WARNINGS]
+ warnings = pr_checks.run(pr_warnings, pull_request)
+ return {"warnings": warnings, "blockers": blockers}
+
+
@method_decorator(csrf_exempt, name="dispatch")
class LegacyDiffWarningView(View):
"""
@@ -148,3 +194,96 @@ class hg2gitCommitMapView(CommitMapBaseView):
"""Return corresponding CommitMap given an hg hash."""
scm = SCM_TYPE_HG
+
+
+class LandingJobPullRequestAPIView(View):
+ """Handle pull request landing jobs in the API."""
+
+ def get(
+ self, request: WSGIRequest, repo_name: int, pull_number: int
+ ) -> JsonResponse:
+ """Return the status of a pull request based on landing job counts."""
+
+ target_repo = Repo.objects.get(name=repo_name)
+ landing_jobs = get_jobs_for_pull(target_repo, pull_number)
+ landing_jobs_by_status = defaultdict(list)
+ for landing_job in landing_jobs:
+ landing_jobs_by_status[landing_job.status].append(landing_job.id)
+
+ status = None
+ # Return the first encountered status in this list.
+ for _status in [
+ JobStatus.LANDED,
+ JobStatus.CREATED,
+ JobStatus.SUBMITTED,
+ JobStatus.IN_PROGRESS,
+ JobStatus.FAILED,
+ ]:
+ if landing_jobs_by_status[_status]:
+ status = str(_status).lower()
+ break
+
+ return JsonResponse({"status": status}, status=200)
+
+ @method_decorator(require_authenticated_user)
+ def post(
+ self, request: WSGIRequest, repo_name: int, pull_number: int
+ ) -> JsonResponse:
+ """Create a new landing job for a pull request."""
+
+ class Form(forms.Form):
+ """Simple form to get clean some fields."""
+
+ head_sha = forms.CharField()
+ # TODO: use this for verification later, see bug 1996571.
+ # base_ref = forms.CharField()
+
+ target_repo = Repo.objects.get(name=repo_name)
+ client = GitHubAPIClient(target_repo.url)
+ ldap_username = request.user.email
+ pull_request = client.build_pull_request(pull_number)
+
+ blockers = generate_warnings_and_blockers(target_repo, pull_request, request)[
+ "blockers"
+ ]
+
+ if blockers:
+ # Pull request has blockers that prevent it from landing.
+ return JsonResponse({"errors": blockers}, status=400)
+
+ form = Form(json.loads(request.body))
+
+ if not form.is_valid():
+ return JsonResponse(form.errors, 400)
+
+ job = LandingJob.objects.create(
+ target_repo=target_repo, requester_email=ldap_username
+ )
+ author_name, author_email = pull_request.author
+ patch_data = {
+ "author_name": author_name,
+ "author_email": author_email,
+ "commit_message": pull_request.commit_message,
+ "timestamp": int(datetime.now().timestamp()),
+ }
+ revision = Revision.objects.create(
+ pull_number=pull_request.number,
+ patches=pull_request.patch,
+ patch_data=patch_data,
+ )
+ add_revisions_to_job([revision], job)
+ job.status = JobStatus.SUBMITTED
+ job.save()
+
+ return JsonResponse({"id": job.id}, status=201)
+
+
+class PullRequestChecksAPIView(APIView):
+ def get(self, request: WSGIRequest, repo_name: str, number: int) -> JsonResponse:
+ target_repo = Repo.objects.get(name=repo_name)
+ client = GitHubAPIClient(target_repo.url)
+ pull_request = client.build_pull_request(number)
+ warnings_and_blockers = generate_warnings_and_blockers(
+ target_repo, pull_request, request
+ )
+ return JsonResponse(warnings_and_blockers)
diff --git a/src/lando/conftest.py b/src/lando/conftest.py
index 916fb2f0f..e1117209f 100644
--- a/src/lando/conftest.py
+++ b/src/lando/conftest.py
@@ -1,3 +1,4 @@
+import json
import os
import pathlib
import re
@@ -15,6 +16,7 @@
from django.conf import settings
from django.contrib.auth.models import Permission, User
from django.contrib.contenttypes.models import ContentType
+from requests.models import HTTPError
from lando.api.legacy.stacks import (
RevisionStack,
@@ -655,7 +657,6 @@ def factory(
name: str = "",
push_target: str = "",
) -> Repo:
-
# The BMO reference check 1) requires access to a BMO instance to test with and
# 2) is only needed for Try. We disable it here to be closer to a normal MC
# repo.
@@ -749,7 +750,6 @@ def hg_test_bundle() -> pathlib.Path:
@pytest.fixture
def hg_server(hg_test_bundle: pathlib.Path, tmpdir: os.PathLike):
-
# TODO: Select open port.
port = "8000"
hg_url = "http://localhost:" + port
@@ -1187,3 +1187,76 @@ def _active_mock(obj: object, method: str) -> mock.MagicMock:
return mock_method
return _active_mock
+
+
+class MockResponse:
+ """Mock response class to satisfy some requirements of tests.
+
+ Headers keys will be normalised to all-lowercase.
+ """
+
+ _json: dict | list | None = None
+
+ def __init__(
+ self,
+ *,
+ json_dict: dict | None = None,
+ text: str | None = None,
+ status_code: int = 200,
+ headers: dict | None = None,
+ ):
+ if json_dict and text:
+ raise Exception("MockResponse can't specify json and text at the same time")
+
+ self.status_code = status_code
+
+ headers = headers or {}
+ self.headers = {}
+ # Lower case all provided headers.
+ for hkey in headers:
+ self.headers[hkey.lower()] = headers[hkey]
+
+ # Set a reasonable content-type header value.
+ if not self.headers.get("content_type"):
+ self.headers["content_type"] = (
+ "text/plain"
+ if text
+ else (
+ "application/json"
+ if status_code < 400
+ else "application/problem+json"
+ )
+ )
+
+ try:
+ self._json = json_dict or json.loads(text or "")
+ except json.JSONDecodeError:
+ pass
+ self.text = text or json.dumps(json_dict)
+
+ def json(self) -> dict | list:
+ return self._json
+
+ @property
+ def content_type(self):
+ return self.headers["content_type"]
+
+ def raise_for_status(self):
+ if self.status_code >= 400:
+ raise HTTPError(f"Status code {self.status_code} in MockResponse")
+
+
+@pytest.fixture
+def mock_response() -> Callable:
+ def _mock_response(
+ *,
+ json_dict: dict | None = None,
+ text: str | None = None,
+ status_code: int = 200,
+ headers: dict | None = None,
+ ) -> MockResponse:
+ return MockResponse(
+ json_dict=json_dict, text=text, status_code=status_code, headers=headers
+ )
+
+ return _mock_response
diff --git a/src/lando/main/migrations/0035_revision_pull_number.py b/src/lando/main/migrations/0035_revision_pull_number.py
new file mode 100644
index 000000000..494e2ce8e
--- /dev/null
+++ b/src/lando/main/migrations/0035_revision_pull_number.py
@@ -0,0 +1,18 @@
+# Generated by Django 5.2.5 on 2025-11-06 13:59
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ("main", "0034_repo_hooks"),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name="revision",
+ name="pull_number",
+ field=models.IntegerField(blank=True, null=True),
+ ),
+ ]
diff --git a/src/lando/main/migrations/0036_revision_patches.py b/src/lando/main/migrations/0036_revision_patches.py
new file mode 100644
index 000000000..452154861
--- /dev/null
+++ b/src/lando/main/migrations/0036_revision_patches.py
@@ -0,0 +1,18 @@
+# Generated by Django 5.2.5 on 2025-11-13 18:38
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ("main", "0035_revision_pull_number"),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name="revision",
+ name="patches",
+ field=models.TextField(blank=True, default=""),
+ ),
+ ]
diff --git a/src/lando/main/models/landing_job.py b/src/lando/main/models/landing_job.py
index 8eff83156..f6bf043bd 100644
--- a/src/lando/main/models/landing_job.py
+++ b/src/lando/main/models/landing_job.py
@@ -11,6 +11,7 @@
from mots.directory import Directory
from lando.main.models.jobs import BaseJob
+from lando.main.models.repo import Repo
from lando.main.models.revision import Revision, RevisionLandingJob
logger = logging.getLogger(__name__)
@@ -45,6 +46,11 @@ class LandingJob(BaseJob):
Revision, through="RevisionLandingJob", related_name="landing_jobs"
)
+ @property
+ def is_pull_request_job(self) -> bool:
+ """Return True if all revisions in the landing job have a pull_number set."""
+ return not self.revisions.filter(pull_number__isnull=True).exists()
+
@property
def landed_phabricator_revisions(self) -> dict:
"""Return a mapping associating Phabricator revision IDs with the ID of the landed Diff."""
@@ -263,3 +269,13 @@ def add_revisions_to_job(revisions: list[Revision], job: LandingJob):
"""Given an existing job, add and sort provided revisions."""
job.add_revisions(revisions)
job.sort_revisions(revisions)
+
+
+def get_jobs_for_pull(target_repo: Repo, pull_number: int) -> QuerySet[LandingJob]:
+ """Given a target repo and a pull number, return all landing jobs."""
+ revisions = Revision.objects.filter(
+ landing_jobs__target_repo=target_repo, pull_number=pull_number
+ )
+ return LandingJob.objects.filter(unsorted_revisions__in=revisions).order_by(
+ "-created_at"
+ )
diff --git a/src/lando/main/models/repo.py b/src/lando/main/models/repo.py
index 3e842e4ef..5bdd9dbfe 100644
--- a/src/lando/main/models/repo.py
+++ b/src/lando/main/models/repo.py
@@ -327,6 +327,11 @@ def _github_repo_url(self) -> str | None:
if self.is_github:
return self.url.removesuffix(".git")
+ @property
+ def _github_repo_org(self) -> str | None:
+ if self.is_github:
+ return self._github_repo_url.split("/")[-2]
+
@property
def git_repo_name(self) -> str:
"""Provide the bare name of the Git repo."""
diff --git a/src/lando/main/models/revision.py b/src/lando/main/models/revision.py
index 0ca3ca973..8955e8b57 100644
--- a/src/lando/main/models/revision.py
+++ b/src/lando/main/models/revision.py
@@ -37,7 +37,7 @@ class Revision(BaseModel):
"""
A representation of a revision in the database.
- Includes a reference to the related Phabricator revision and diff ID if one exists.
+ Includes references to Phabricator revisions or GitHub pull requests if they exist.
"""
# revision_id and diff_id map to Phabricator IDs (integers).
@@ -47,9 +47,17 @@ class Revision(BaseModel):
# does not track all diffs.
diff_id = models.IntegerField(blank=True, null=True)
- # The actual patch with Mercurial metadata format.
+ # GitHub pull request number, if this is a pull request.
+ pull_number = models.IntegerField(blank=True, null=True)
+
+ # The generated patch with Mercurial metadata format.
+ # This patch is generated by combining a diff and patch metadata.
patch = models.TextField(blank=True, default="")
+ # Raw patch data that could contain multiple patches.
+ # These patches are fetched and stored directly (e.g., from GitHub).
+ patches = models.TextField(blank=True, default="")
+
# Patch metadata, such as
# - author_name
# - author_email
@@ -124,9 +132,9 @@ def new_from_patch(cls, raw_diff: str, patch_data: dict[str, str]) -> Self:
rev.save()
return rev
- def set_patch(self, raw_diff: str, patch_data: dict[str, str]):
+ def set_patch(self, raw_diff: str, patch_data: dict[str, str] | None = None):
"""Given a raw_diff and patch data, build the patch and store it."""
- self.patch_data = patch_data
+ self.patch_data = patch_data or self.patch_data
patch = build_patch_for_revision(raw_diff, **self.patch_data)
self.patch = patch
diff --git a/src/lando/main/scm/git.py b/src/lando/main/scm/git.py
index c35d1e7a4..b6406dfd8 100644
--- a/src/lando/main/scm/git.py
+++ b/src/lando/main/scm/git.py
@@ -1,4 +1,3 @@
-import asyncio
import io
import logging
import os
@@ -11,8 +10,6 @@
from pathlib import Path
from typing import Any
-from django.conf import settings
-from simple_github import AppAuth, AppInstallationAuth
from typing_extensions import override
from lando.main.scm.commit import CommitData
@@ -25,6 +22,8 @@
)
from lando.main.scm.helpers import GitPatchHelper, PatchHelper
from lando.settings import LANDO_USER_EMAIL, LANDO_USER_NAME
+from lando.utils.const import URL_USERINFO_RE
+from lando.utils.github import GitHub
from .abstract_scm import AbstractSCM
@@ -36,24 +35,6 @@
ENV_COMMITTER_NAME = "GIT_COMMITTER_NAME"
ENV_COMMITTER_EMAIL = "GIT_COMMITTER_EMAIL"
-# From RFC-3986 [0]:
-#
-# userinfo = *( unreserved / pct-encoded / sub-delims / ":" )
-#
-# unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~"
-# pct-encoded = "%" HEXDIG HEXDIG
-# sub-delims = "!" / "$" / "&" / "'" / "(" / ")"
-# / "*" / "+" / "," / ";" / "=
-#
-# [0] https://www.rfc-editor.org/rfc/rfc3986
-URL_USERINFO_RE = re.compile(
- "(?P[-A-Za-z0-9:._~%!$&'*()*+;=]*:[-A-Za-z0-9:._~%!$&'*()*+;=]*@)",
- flags=re.MULTILINE,
-)
-GITHUB_URL_RE = re.compile(
- f"https://{URL_USERINFO_RE.pattern}?github.com/(?P[-A-Za-z0-9]+)/(?P[^/]+)"
-)
-
class GitSCM(AbstractSCM):
"""An implementation of the AbstractVCS for Git, for use by the Repo and LandingWorkers."""
@@ -112,31 +93,14 @@ def push(
tags: list[str] | None = None,
):
"""Push local code to the remote repository."""
+
push_command = ["push"]
if force_push:
push_command += ["--force"]
- if match := re.match(GITHUB_URL_RE, push_path):
- # We only fetch a token if no authentication is explicitly specified in
- # the push_url.
- if not match["userinfo"]:
- logger.info(
- "Obtaining fresh GitHub token repo",
- extra={
- "push_path": push_path,
- "repo_name": match["repo"],
- "repo_owner": match["owner"],
- },
- )
-
- owner = match["owner"]
- repo = match["repo"]
- repo_name = repo.removesuffix(".git")
-
- token = self._get_github_token(owner, repo_name)
- if token:
- push_path = f"https://git:{token}@github.com/{owner}/{repo}"
+ if GitHub.is_supported_url(push_path):
+ push_path = GitHub(push_path).authenticated_url
push_command += [push_path]
@@ -152,32 +116,6 @@ def push(
self._git_run(*push_command, cwd=self.path)
- @staticmethod
- def _get_github_token(repo_owner: str, repo_name: str) -> str | None:
- """Obtain a fresh GitHub token to push to the specified repo.
-
- This relies on GITHUB_APP_ID and GITHUB_APP_PRIVKEY to be set in the
- settings. Returns None if those are missing.
-
- The app with ID GITHUB_APP_ID needs to be enabled for the target repo.
-
- """
- app_id = settings.GITHUB_APP_ID
- private_key = settings.GITHUB_APP_PRIVKEY
-
- if not app_id or not private_key:
- logger.warning(
- f"Missing GITHUB_APP_ID or GITHUB_APP_PRIVKEY to authenticate against GitHub repo {repo_owner}/{repo_name}",
- )
- return None
-
- app_auth = AppAuth(
- app_id,
- private_key,
- )
- session = AppInstallationAuth(app_auth, repo_owner, repositories=[repo_name])
- return asyncio.run(session.get_token())
-
def last_commit_for_path(self, path: str) -> str:
"""Find last commit to touch a path."""
command = ["log", "--max-count=1", "--format=%H", "--", path]
@@ -251,6 +189,22 @@ def apply_patch_git(self, patch_bytes: bytes):
# Re-raise the exception from the failed `git am`.
raise exc
+ def get_diff_from_patches(self, patches: str) -> str:
+ """Apply multiple patches and return the diff output."""
+ # TODO: add error handling so that if something goes wrong here,
+ # a meaningful error is stored in the landing job. This would be
+ # the same as what is done when actually applying the patches.
+ # See bug 2000268.
+ with tempfile.NamedTemporaryFile(
+ encoding="utf-8", mode="w+", suffix=".patch"
+ ) as patch_file:
+ patch_file.write(patches)
+ patch_file.flush()
+
+ self._git_run("apply", "--reject", patch_file.name, cwd=self.path)
+ self._git_run("add", "-A", "-f", cwd=self.path)
+ return self._git_run("diff", "--staged", "--binary", cwd=self.path)
+
@override
def get_patch(self, revision_id: str) -> str | None:
"""Return a complete patch for the given revision, in the git extended diff format.
diff --git a/src/lando/main/scm/helpers.py b/src/lando/main/scm/helpers.py
index 81cd98180..ba53825ee 100644
--- a/src/lando/main/scm/helpers.py
+++ b/src/lando/main/scm/helpers.py
@@ -111,6 +111,10 @@ def get_timestamp_from_hg_date_header(date_header: str) -> str:
class PatchHelper(ABC):
"""Base class for parsing patches/exports."""
+ # Expected headers (all lowercase!):
+ # - date
+ # - from
+ # - subject
headers: dict[str, str]
@classmethod
@@ -442,7 +446,6 @@ def get_diff(self) -> str:
"""Return the patch diff."""
return self.diff
- @override
def get_diff_bytes(self) -> bytes:
"""Return the patch diff."""
return self.diff.encode("utf-8", errors="surrogateescape")
diff --git a/src/lando/main/tests/test_git.py b/src/lando/main/tests/test_git.py
index e56d28a5a..9b5ceee53 100644
--- a/src/lando/main/tests/test_git.py
+++ b/src/lando/main/tests/test_git.py
@@ -6,7 +6,8 @@
from collections.abc import Callable
from pathlib import Path
from textwrap import dedent
-from unittest.mock import MagicMock
+from unittest import mock
+from unittest.mock import MagicMock, PropertyMock
import pytest
@@ -674,21 +675,36 @@ def test_GitSCM_push(
)
-def test_GitSCM_push_get_github_token(git_repo: Path):
+@pytest.fixture
+def mock_github_authenticated_url(monkeypatch: pytest.MonkeyPatch):
+ mock_authenticated_url = PropertyMock()
+
+ mock_authenticated_url.return_value = (
+ "ssh+git:ghs_yolo@github.com/some-org/some-repo"
+ )
+
+ monkeypatch.setattr(
+ "lando.utils.github.GitHub.authenticated_url", mock_authenticated_url
+ )
+
+ return mock_authenticated_url
+
+
+def test_GitSCM_push_github_authenticated_url(
+ git_repo: Path, mock_github_authenticated_url: mock.Mock
+):
scm = GitSCM(str(git_repo))
scm._git_run = MagicMock()
- scm._get_github_token = MagicMock()
- scm._get_github_token.side_effect = ["ghs_yolo"]
scm.push("https://github.com/some/repo")
- assert scm._git_run.call_count == 1, "_git_run wasn't called when pushing"
+ assert scm._git_run.call_count >= 1, "_git_run wasn't called when pushing"
assert (
- scm._get_github_token.call_count == 1
- ), "_get_github_token wasn't called when pushing to a github-like URL"
+ mock_github_authenticated_url.call_count == 1
+ ), "GitHub.authenticated_url wasn't accessed when pushing to a github-like URL"
assert (
"git:ghs_yolo@github.com" in scm._git_run.call_args[0][1]
- ), "github token not found in rewritten push_path"
+ ), "GitHub authenticated_url was not found in rewritten push_path"
@pytest.mark.parametrize(
diff --git a/src/lando/static_src/legacy/css/pages/StackPage.scss b/src/lando/static_src/legacy/css/pages/StackPage.scss
index 6e2b3fa32..70ecd088c 100644
--- a/src/lando/static_src/legacy/css/pages/StackPage.scss
+++ b/src/lando/static_src/legacy/css/pages/StackPage.scss
@@ -327,6 +327,10 @@ ul.StackPage-blockers {
background-color: #FFFFFF;
}
}
+ p {
+ margin-top: 0;
+ margin-bottom: 0;
+ }
}
%error-message {
@@ -352,3 +356,7 @@ ul.StackPage-blockers {
margin-top: 0.5em;
width: 123ch;
}
+
+p.acknowledge-warnings-section {
+ display: none;
+}
diff --git a/src/lando/static_src/legacy/js/components/Stack.js b/src/lando/static_src/legacy/js/components/Stack.js
index 4881e1259..129eb15cc 100644
--- a/src/lando/static_src/legacy/js/components/Stack.js
+++ b/src/lando/static_src/legacy/js/components/Stack.js
@@ -33,5 +33,137 @@ $.fn.stack = function() {
$('.link-assessment-close').on("click", function () {
$('.uplift-assessment-link-modal').removeClass("is-active");
});
+
+ // Simple check for time being. If the button exists, assume this is a pull request page.
+ // This should be cleaned up as part of bug 1995754.
+ var is_pull_request_page = Boolean($('button.post-landing-job').length);
+ if (is_pull_request_page) {
+
+ $('#acknowledge-warnings').on("click", function () {
+ if (this.checked) {
+ pull_request_button.prop("disabled", false);
+ pull_request_button.html("Request landing despite warnings");
+ } else {
+ pull_request_button.prop("disabled", true);
+ pull_request_button.html("Acknowledge warnings to continue");
+ }
+ });
+
+ var pull_request_button = $('button.post-landing-job');
+ if (pull_request_button.data("anonymous") == 1) {
+ pull_request_button.prop("disabled", true);
+ pull_request_button.removeClass("is-loading").addClass("is-danger");
+ pull_request_button.html("Log in to request landing");
+ return;
+ }
+
+ var pull_number = pull_request_button.data("pull-number");
+ var head_sha = pull_request_button.data("head-sha");
+ var repo_name = pull_request_button.data("repo-name");
+ var csrf_token = pull_request_button.data("csrf-token");
+
+ fetch(`/api/pulls/${repo_name}/${pull_number}/landing_jobs`, {
+ method: 'GET',
+ headers: {
+ 'Accept': 'application/json',
+ 'Content-Type': 'application/json',
+ 'X-CSRFToken': csrf_token
+ },
+ }).then(async response => {
+ if (response.status == 200) {
+ var result = await response.json();
+ if (result.status == "landed") {
+ var message = "Pull request landed"
+ pull_request_button.prop("disabled", true);
+ pull_request_button.removeClass("is-loading").addClass("is-danger");
+ pull_request_button.html(message);
+ $("#blockers").html(`${message}.`);
+ $("#warnings").html(`${message}.`);
+ } else if (["created", "submitted", "in_progress"].includes(result.status)) {
+ var message = "Landing job submitted"
+ pull_request_button.prop("disabled", true);
+ pull_request_button.removeClass("is-loading");
+ pull_request_button.html(message);
+ $("#blockers").html(`${message}.`);
+ $("#warnings").html(`${message}.`);
+ } else {
+ fetch(`/api/pulls/${repo_name}/${pull_number}/checks`, {
+ method: 'GET',
+ }).then(async response => {
+ if (response.status == 200) {
+ var result = await response.json();
+ var blockers = result.blockers;
+ var warnings = result.warnings;
+
+ var has_blockers = blockers.length !== 0;
+ var has_warnings = warnings.length !== 0;
+
+ if (!has_blockers) {
+ $("#blockers").html("None found.");
+ } else {
+ $("#blockers").html("");
+ for (var blocker of blockers) {
+ $("#blockers").append(`${blocker}`);
+ }
+ }
+
+ if (!has_warnings) {
+ $("#warnings").html("None found.");
+ } else {
+ $("#warnings").html("");
+ for (var warning of warnings) {
+ $("#warnings").append(`${warning}`);
+ }
+ }
+
+ if (!has_blockers && !has_warnings) {
+ pull_request_button.prop("disabled", false);
+ pull_request_button.removeClass("is-loading").addClass("is-success");;
+ pull_request_button.html("Request landing");
+ } else if (has_blockers) {
+ pull_request_button.prop("disabled", true);
+ pull_request_button.removeClass("is-loading").addClass("is-danger");
+ pull_request_button.html("Landing is blocked");
+ } else if (has_warnings) {
+ $('.acknowledge-warnings-section').show();
+ pull_request_button.prop("disabled", true);
+ pull_request_button.removeClass("is-loading").addClass("is-warning");
+ pull_request_button.html("Acknowledge warnings to continue");
+ }
+ } else {
+ // TODO: handle this case. See bug 1996000.
+ }
+ });
+ }
+ } else {
+ // TODO: handle this case. See bug 1996000.
+ }
+ });
+
+ pull_request_button.on('click', function(e) {
+ pull_request_button.addClass("is-loading");
+ fetch(`/api/pulls/${repo_name}/${pull_number}/landing_jobs`, {
+ method: 'POST',
+ body: JSON.stringify({"head_sha": head_sha}),
+ headers: {
+ 'Accept': 'application/json',
+ 'Content-Type': 'application/json',
+ 'X-CSRFToken': csrf_token
+ },
+ }).then(response => {
+ if (response.status == 201) {
+ window.location.reload();
+ } else if (response.status == 400) {
+ pull_request_button.prop("disabled", true);
+ pull_request_button.removeClass("is-danger").removeClass("is-loading").addClass("is-warning");
+ pull_request_button.html("Could not create landing job");
+ } else {
+ pull_request_button.prop("disabled", true);
+ pull_request_button.removeClass("is-danger").removeClass("is-loading").addClass("is-warning");
+ pull_request_button.html("An unknown error occurred");
+ }
+ });
+ });
+ };
});
};
diff --git a/src/lando/ui/jinja2/partials/job.html b/src/lando/ui/jinja2/partials/job.html
index df2525aa0..1eda351a0 100644
--- a/src/lando/ui/jinja2/partials/job.html
+++ b/src/lando/ui/jinja2/partials/job.html
@@ -21,7 +21,7 @@
{% endif %}
{% endif %}
- {% if job.revisions.exists() %}
+ {% if job.revisions.exists() and not job.is_pull_request_job %}
Revisions:
{% for i in job.serialized_landing_path -%}
diff --git a/src/lando/ui/jinja2/stack/pull_request.html b/src/lando/ui/jinja2/stack/pull_request.html
new file mode 100644
index 000000000..96913279c
--- /dev/null
+++ b/src/lando/ui/jinja2/stack/pull_request.html
@@ -0,0 +1,74 @@
+{% extends "partials/layout.html" %}
+{% block page_title %}{{ pull_request }}{% endblock %}
+{% block main %}
+
+
+ {{ pull_request.title }} #{{ pull_request.number }} ({{ target_repo.name }}@{{ target_repo.default_branch }})
+
+
+
+
+
+
+
+
+
+
+ | Warnings |
+
+
+ |
+
+
+ | Blockers |
+
+
+ |
+
+
+ | Branches |
+ {{ pull_request.head_ref }} -> {{ pull_request.base_ref }} |
+
+
+ | Repo |
+ {{ target_repo }} ({{ pull_request.head_repo_git_url }}) |
+
+
+ | Author |
+ {{ pull_request.user_login }} |
+
+
+ | State |
+ {{ pull_request.state }} |
+
+
+ | Title |
+ {{ pull_request.title }} |
+
+
+ | Description |
+ {{ pull_request.body }} |
+
+
+
+ Landings
+ {% include "stack/partials/timeline.html" %}
+
+{% endblock %}
diff --git a/src/lando/ui/pull_requests.py b/src/lando/ui/pull_requests.py
new file mode 100644
index 000000000..2513510bc
--- /dev/null
+++ b/src/lando/ui/pull_requests.py
@@ -0,0 +1,36 @@
+import logging
+
+from django.core.handlers.wsgi import WSGIRequest
+from django.template.response import TemplateResponse
+
+from lando.main.models import Repo
+from lando.main.models.landing_job import get_jobs_for_pull
+from lando.ui.views import LandoView
+from lando.utils.github import GitHubAPIClient
+
+logger = logging.getLogger(__name__)
+
+
+class PullRequestView(LandoView):
+ """A class-based view to handle pull requests in the Lando UI."""
+
+ def get(
+ self, request: WSGIRequest, repo_name: str, number: int, *args, **kwargs
+ ) -> TemplateResponse:
+ """Handle the GET request for the pull request view."""
+ target_repo = Repo.objects.get(name=repo_name)
+ client = GitHubAPIClient(target_repo.url)
+ pull_request = client.build_pull_request(number)
+ landing_jobs = get_jobs_for_pull(target_repo, number)
+
+ context = {
+ "target_repo": target_repo,
+ "pull_request": pull_request,
+ "landing_jobs": landing_jobs,
+ }
+
+ return TemplateResponse(
+ request=request,
+ template="stack/pull_request.html",
+ context=context,
+ )
diff --git a/src/lando/urls.py b/src/lando/urls.py
index 493345fe0..675292cd5 100644
--- a/src/lando/urls.py
+++ b/src/lando/urls.py
@@ -20,7 +20,9 @@
from lando.api.legacy.api import landing_jobs
from lando.api.views import (
+ LandingJobPullRequestAPIView,
LegacyDiffWarningView,
+ PullRequestChecksAPIView,
git2hgCommitMapView,
hg2gitCommitMapView,
)
@@ -38,7 +40,7 @@
from lando.try_api.api import (
api as try_api,
)
-from lando.ui import jobs
+from lando.ui import jobs, pull_requests
from lando.ui.legacy import pages, revisions, user_settings
urlpatterns = [
@@ -52,6 +54,11 @@
path(
"D/", revisions.RevisionView.as_view(), name="revisions-page"
),
+ path(
+ "pulls///",
+ pull_requests.PullRequestView.as_view(),
+ name="pull-request",
+ ),
path("manage_api_key/", user_settings.manage_api_key, name="user-settings"),
path("uplift/", revisions.UpliftRequestView.as_view(), name="uplift-page"),
path(
@@ -90,6 +97,19 @@
),
]
+urlpatterns += [
+ path(
+ "api/pulls///landing_jobs",
+ LandingJobPullRequestAPIView.as_view(),
+ name="api-landing-job-pull-request",
+ ),
+ path(
+ "api/pulls///checks",
+ PullRequestChecksAPIView.as_view(),
+ name="api-pull-request-checks",
+ ),
+]
+
# "API" endpoints ported from legacy API app.
urlpatterns += [
path(
diff --git a/src/lando/utils/const.py b/src/lando/utils/const.py
new file mode 100644
index 000000000..128ed0695
--- /dev/null
+++ b/src/lando/utils/const.py
@@ -0,0 +1,16 @@
+import re
+
+# From RFC-3986 [0]:
+#
+# userinfo = *( unreserved / pct-encoded / sub-delims / ":" )
+#
+# unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~"
+# pct-encoded = "%" HEXDIG HEXDIG
+# sub-delims = "!" / "$" / "&" / "'" / "(" / ")"
+# / "*" / "+" / "," / ";" / "=
+#
+# [0] https://www.rfc-editor.org/rfc/rfc3986
+URL_USERINFO_RE = re.compile(
+ "(?P[-A-Za-z0-9:._~%!$&'*()*+;=]*:[-A-Za-z0-9:._~%!$&'*()*+;=]*@)",
+ flags=re.MULTILINE,
+)
diff --git a/src/lando/utils/github.py b/src/lando/utils/github.py
new file mode 100644
index 000000000..25b83cf35
--- /dev/null
+++ b/src/lando/utils/github.py
@@ -0,0 +1,666 @@
+import asyncio
+import io
+import logging
+import math
+import re
+from collections import Counter
+from datetime import datetime
+from enum import Enum
+
+import requests
+from django.conf import settings
+from simple_github import AppAuth, AppInstallationAuth
+from typing_extensions import override
+
+from lando.main.scm.helpers import PatchHelper
+from lando.utils.cache import cache_method
+from lando.utils.const import URL_USERINFO_RE
+
+logger = logging.getLogger(__name__)
+
+
+class GitHub:
+ """Work with authentication to GitHub repositories."""
+
+ GITHUB_URL_RE = re.compile(
+ rf"https://{URL_USERINFO_RE.pattern}?github.com/(?P[-A-Za-z0-9]+)/(?P[^/]+?)(?:\.git)?(?:/|$)"
+ )
+
+ repo_url: str
+ repo_owner: str
+ repo_name: str
+ userinfo: str
+
+ def __init__(self, repo_url: str):
+ self.repo_url = repo_url
+
+ parsed_url_data = self.parse_url(self.repo_url)
+
+ if parsed_url_data is None:
+ raise ValueError(f"Cannot parse URL as GitHub repo: {repo_url}")
+
+ self.repo_owner = parsed_url_data["owner"]
+ self.repo_name = parsed_url_data["repo"]
+ self.userinfo = parsed_url_data["userinfo"]
+
+ @classmethod
+ def is_supported_url(cls, url: str) -> bool:
+ """Determine whether the passed URL is a supported GitHub URL."""
+ return cls.parse_url(url) is not None
+
+ @classmethod
+ def parse_url(cls, url: str) -> re.Match[str] | None:
+ """Parse GitHub data from URL, or return None if not Github.
+
+ Note: no normalisation is performed on the URL
+ """
+ return re.match(cls.GITHUB_URL_RE, url)
+
+ @property
+ def authenticated_url(self) -> str:
+ """Return an authenticated URL, suitable for use with `git` to push and pull.
+
+ If the URL already has authentication parameters, it is returned verbatim. If
+ not, a token is fetched by the GitHub app, and inserted into the USERINFO part of
+ the URL, without any other changes (e.g., in the REST path or Query String).
+ """
+ if self.userinfo:
+ # We only fetch a token if no authentication is explicitly specified in
+ # the repo_url.
+ return self.repo_url
+
+ logger.info(
+ f"Obtaining fresh GitHub token for GitHub repo at {self.repo_url}",
+ )
+
+ token = self._fetch_token()
+
+ if token:
+ return self.repo_url.replace(
+ "https://github.com", f"https://git:{token}@github.com"
+ )
+
+ # We didn't get a token
+ logger.warning(f"Couldn't obtain a token for GitHub repo at {self.repo_url}")
+ return self.repo_url
+
+ def _fetch_token(self) -> str | None:
+ """Obtain a fresh GitHub token to push to the specified repo.
+
+ This relies on GITHUB_APP_ID and GITHUB_APP_PRIVKEY to be set in the
+ environment. Returns None if those are missing.
+
+ The app with ID GITHUB_APP_ID needs to be enabled for the target repo.
+
+ """
+ app_id = settings.GITHUB_APP_ID
+ private_key = settings.GITHUB_APP_PRIVKEY
+
+ if not app_id or not private_key:
+ logger.warning(
+ f"Missing GITHUB_APP_ID or GITHUB_APP_PRIVKEY to authenticate against GitHub repo at {self.repo_url}",
+ )
+ return None
+
+ app_auth = AppAuth(
+ app_id,
+ private_key,
+ )
+ session = AppInstallationAuth(
+ app_auth, self.repo_owner, repositories=[self.repo_name]
+ )
+ return asyncio.run(session.get_token())
+
+
+class GitHubAPI(GitHub):
+ """A simple wrapper that authenticates with and communicates with the GitHub API."""
+
+ session: requests.Session
+
+ GITHUB_BASE_URL = "https://api.github.com"
+
+ def __init__(self, repo_url: str):
+ super().__init__(repo_url)
+
+ self.session = requests.Session()
+ self.session.headers.update(
+ {
+ "Authorization": f"Bearer {self._fetch_token()}",
+ "User-Agent": settings.HTTP_USER_AGENT,
+ "Accept": "application/vnd.github+json",
+ "X-GitHub-Api-Version": "2022-11-28",
+ }
+ )
+
+ def get(self, path: str, *args, **kwargs) -> requests.Response:
+ """Send a GET request to the GitHub API with given args and kwargs."""
+ url = f"{self.GITHUB_BASE_URL}/{path}"
+ return self.session.get(url, *args, **kwargs)
+
+ def post(self, path: str, *args, **kwargs) -> requests.Response:
+ """Send a POST request to the GitHub API with given args and kwargs."""
+ url = f"{self.GITHUB_BASE_URL}/{path}"
+ return self.session.post(url, *args, **kwargs)
+
+
+class GitHubAPIClient:
+ """A convenience client that provides various methods to interact with the GitHub API."""
+
+ _api: GitHubAPI
+
+ class UpstreamError(Exception):
+ pass
+
+ def __init__(self, repo_url: str):
+ self._api = GitHubAPI(repo_url)
+ self.repo_base_url = f"repos/{self.repo_owner}/{self.repo_name}"
+
+ @property
+ def session(self) -> requests.Session:
+ """An authenticated requests Session."""
+ return self._api.session
+
+ @property
+ def repo_owner(self) -> str:
+ return self._api.repo_owner
+
+ @property
+ def repo_name(self) -> str:
+ return self._api.repo_name
+
+ def _repo_get(self, subpath: str, *args, **kwargs) -> dict | list:
+ """Get API endpoint scoped to the repo_base_url.
+
+ Parameters:
+
+ subpath: str
+ Relative path without leading `/`.
+
+ Return:
+ dist | list: decoded JSON from the response
+ """
+ return self._get(f"{self.repo_base_url}/{subpath}", *args, **kwargs)
+
+ def _get(self, path: str, *args, **kwargs) -> dict | list | str | None:
+ result = self._api.get(path, *args, **kwargs)
+ content_type = result.headers["content-type"]
+ if content_type == "application/json; charset=utf-8":
+ return result.json()
+ elif content_type == "application/vnd.github.patch; charset=utf-8":
+ return result.text
+ elif content_type == "application/vnd.github.diff; charset=utf-8":
+ return result.text
+
+ def _post(self, path: str, *args, **kwargs):
+ result = self._api.post(path, *args, **kwargs)
+ return result.json()
+
+ def build_pull_request(self, pull_number: int) -> "PullRequest":
+ """Build a PullRequest object.
+
+ This does the necessary network requests to collect the data."""
+ data = self.get_pull_request(pull_number)
+ return PullRequest(self, data)
+
+ def list_pull_requests(self) -> list:
+ """List all pull requests in the repo."""
+ return self._repo_get("pulls")
+
+ def get_pull_request(self, pull_number: int) -> dict:
+ """Get a specific pull request from the repo."""
+ return self._repo_get(f"pulls/{pull_number}")
+
+ def get_diff(self, pull_number: int) -> str:
+ """Fetch a diff, given a pull request number."""
+ return self._get(
+ f"{self.repo_base_url}/pulls/{pull_number}",
+ headers={"Accept": "application/vnd.github.diff"},
+ )
+
+ def get_patch(self, pull_number: int) -> str:
+ """Fetch a patch, given a pull request number."""
+ return self._get(
+ f"{self.repo_base_url}/pulls/{pull_number}",
+ headers={"Accept": "application/vnd.github.patch"},
+ )
+
+ def get_pull_request_comments(self, pull_number: int) -> list:
+ """Return a list of comments on the whole PR."""
+ # `issues` is correct here, using `pull` instead would return comments on diffs.
+ return self._repo_get(f"issues/{pull_number}/comments")
+
+ def get_pull_request_commits(self, pull_number: int) -> list[dict]:
+ """Get all commits from specific pull request from the repo."""
+ return self._repo_get(f"pulls/{pull_number}/commits")
+
+ def get_pull_request_commits_comments(self, pull_number: int) -> list:
+ """Return a list of comments on specific changes of the PR."""
+ # NOTE: We use the GraphQL API for this one, as the comment-resolution
+ # information is not available via the REST API [0].
+ #
+ # NOTE: While there are many comment fields accessible. It seems the only one
+ # that reliably return data is pullRequest.reviews[].comments[] [1]. Most
+ # notably, pullRequest.commits[].comments[] seems to always be empty.
+ #
+ # But that's not what we need anyway. What we're after is reviewThreads,
+ # which are resolvable.
+ #
+ # [0] https://github.com/orgs/community/discussions/9175#discussioncomment-9008230
+ # [1] https://github.com/orgs/community/discussions/24666#discussioncomment-3244969
+ #
+ comments_query = """
+ query($owner: String!, $repo: String!, $number: Int!) {
+ repository(owner: $owner, name: $repo) {
+ pullRequest(number: $number) {
+ reviewThreads(first: 100) {
+ nodes {
+ comments(first: 1) {
+ nodes {
+ id
+ body
+ url
+ updatedAt
+ }
+ }
+ isResolved
+ }
+ }
+ updatedAt
+ }
+ }
+ }
+ """
+ comments_response = self.session.post(
+ "https://api.github.com/graphql",
+ json={
+ "query": comments_query,
+ "variables": {
+ "owner": self.repo_owner,
+ "repo": self.repo_name,
+ "number": pull_number,
+ },
+ },
+ )
+ comments_response.raise_for_status()
+ comments_response_json = comments_response.json()
+ if "errors" in comments_response_json:
+ raise self.UpstreamError(
+ f"Error from GitHub GraphQL: {comments_response_json}"
+ )
+
+ comments_dict = comments_response_json["data"]["repository"]["pullRequest"][
+ "reviewThreads"
+ ]["nodes"]
+
+ comments = []
+
+ for thread in comments_dict:
+ # We only grab the first comment of each thread.
+ comment = thread["comments"]["nodes"][0]
+ comment["updated_at"] = comment["updatedAt"]
+ del comment["updatedAt"]
+ comment["is_resolved"] = thread["isResolved"]
+
+ comments.append(comment)
+
+ return comments
+
+ def get_pull_request_labels(self, pull_number: int) -> list:
+ """Return a list of labels for the PR."""
+ # `issues` is correct here
+ labels = self._repo_get(f"issues/{pull_number}/labels")
+
+ return labels
+
+ def get_pull_request_reviews(self, pull_number: int) -> list:
+ """Return a list of reviews for the PR."""
+ return self._repo_get(f"pulls/{pull_number}/reviews")
+
+ def open_pull_request(self, pull_number: int) -> dict:
+ """Open the given pull request."""
+ return self._post(
+ f"{self.repo_base_url}/pulls/{pull_number}", json={"state": "open"}
+ )
+
+ def close_pull_request(self, pull_number: int) -> dict:
+ """Close the given pull request."""
+ return self._post(
+ f"{self.repo_base_url}/pulls/{pull_number}", json={"state": "closed"}
+ )
+
+ def add_comment_to_pull_request(self, pull_number: int, comment: str) -> dict:
+ """Add a comment to the given pull request."""
+ return self._post(
+ f"{self.repo_base_url}/issues/{pull_number}/comments",
+ json={"body": comment},
+ )
+
+ @classmethod
+ def convert_timestamp_from_github(cls, timestamp: str) -> str:
+ timestamp_datetime = datetime.fromisoformat(timestamp)
+ return str(math.floor(timestamp_datetime.timestamp()))
+
+
+def pr_cache_key(self: "PullRequest", *args, **kwargs) -> str:
+ """Provide a cache key for PR methods that fetch data from GitHub.
+
+ This method-like function cannot be part of the PullRequest, as it is used by method
+ decorators when declaring the class.
+ """
+ return f"{self.id}{self.updated_at}"
+
+
+# Specialised decorator which embeds the PR-specific cache-key builder.
+pr_cache_method = cache_method(pr_cache_key)
+
+
+class PullRequest:
+ """A class that parses data returned from the GitHub API for pull requests."""
+
+ class StaleMetadataException(Exception):
+ pass
+
+ class Mergeability(str, Enum):
+ """Mergeability of a PR.
+
+ This is not documented for the REST API, but the GraphQL doc has some details
+ [0].
+
+ [0] https://docs.github.com/en/graphql/reference/enums#mergestatestatus
+ """
+
+ BEHIND = "behind" # The head ref is out of date.
+ BLOCKED = "blocked" # The merge is blocked.
+ CLEAN = "clean" # Mergeable and passing commit status.
+ DIRTY = "dirty" # The merge commit cannot be cleanly created.
+ DRAFT = "draft" # The merge is blocked due to the pull request being a draft.
+ HAS_HOOKS = (
+ "has_hooks" # Mergeable with passing commit status and pre-receive hooks.
+ )
+ UNKNOWN = "unknown" # The state cannot currently be determined.
+ UNSTABLE = "unstable" # Mergeable with non-passing commit status.
+
+ class State(str, Enum):
+ """State of a PR."""
+
+ OPEN = "open"
+ CLOSED = "closed"
+
+ class Review(str, Enum):
+ """Type of a review on a PR."""
+
+ APPROVED = "APPROVED"
+ CHANGES_REQUESTED = "CHANGES_REQUESTED"
+ DISMISSED = "DISMISSED"
+
+ client: GitHubAPIClient
+
+ def __repr__(self) -> str:
+ return f"Pull request #{self.number} ({self.head_repo_git_url})"
+
+ def __init__(self, client: GitHubAPIClient, data: dict):
+ self.client = client
+
+ self.url = data["url"]
+ self.base_ref = data["base"]["ref"] # "target" branch name
+ self.base_sha = data["base"]["sha"] # "target" branch sha
+ self.head_ref = data["head"]["ref"] # "working" branch name
+ self.head_sha = data["head"]["sha"] # "working" branch sha
+
+ self.base_user_login = data["base"]["user"]["login"]
+ self.base_user_id = data["base"]["user"]["id"]
+ self.created_at = data["created_at"]
+ self.updated_at = data["updated_at"]
+ self.closed_at = data["closed_at"]
+ self.merged_at = data["merged_at"]
+ self.diff_url = data["diff_url"]
+ self.patch_url = data["patch_url"]
+ self.body = data["body"] # description
+ self.is_draft = data["draft"]
+ self.comments_url = data["comments_url"]
+ self.commits_url = data["commits_url"]
+
+ self.head_repo_git_url = data["head"]["repo"][
+ "git_url"
+ ] # e.g., git://github.com/mozilla-conduit/test-repo.git
+ self.html_url = data["html_url"]
+ self.id = data["id"]
+ self.labels = data["labels"]
+ self.mergeable_state = data["mergeable_state"]
+ self.number = data["number"]
+ self.requested_reviewers = [
+ {"id": r["id"], "html_url": r["html_url"], "login": r["login"]}
+ for r in data["requested_reviewers"]
+ ]
+ self.requested_teams = [
+ {
+ "id": r["id"],
+ "html_url": r["html_url"],
+ "name": r["name"],
+ "slug": r["slug"],
+ "description": r["description"],
+ }
+ for r in data["requested_teams"]
+ ]
+
+ self.state = data["state"] # e.g., "open"
+ self.title = data["title"]
+
+ self.user_id = data["user"]["id"]
+ self.user_html_url = data["user"]["html_url"]
+ self.user_login = data["user"]["login"]
+
+ def _select_commit_author(
+ self, commits: list[dict]
+ ) -> tuple[str | None, str | None]:
+ """Select the most common author in commits."""
+ # This method is ported from lando.api.legacy.revisions.select_diff_author.
+ commits = [commit["commit"] for commit in commits]
+ if not commits:
+ return None, None
+
+ # Below is copied verbatim from the legacy method.
+ authors = [c.get("author", {}) for c in commits]
+ authors = Counter((a.get("name"), a.get("email")) for a in authors)
+ authors = authors.most_common(1)
+ return authors[0][0] if authors else (None, None)
+
+ @property
+ @pr_cache_method
+ def author(self) -> tuple[str | None, str | None]:
+ return self._select_commit_author(self.commits)
+
+ @property
+ def diff(self) -> str:
+ return self.client.get_diff(self.number)
+
+ @property
+ @pr_cache_method
+ def comments(self) -> list:
+ comments = self.client.get_pull_request_comments(self.number)
+ if any(
+ self.client.convert_timestamp_from_github(comment["updated_at"])
+ > self.client.convert_timestamp_from_github(self.updated_at)
+ for comment in comments
+ ):
+ raise self.StaleMetadataException(
+ "Comments were changed while collecting PR information."
+ )
+
+ return comments
+
+ @property
+ @pr_cache_method
+ def commits(self) -> list[dict]:
+ commits = self.client.get_pull_request_commits(self.number)
+
+ if commits[-1]["sha"] != self.head_sha:
+ raise self.StaleMetadataException(
+ "Head commit changed while collecting PR information."
+ )
+
+ # XXX: What happens if a commit has been committed in the past, but has only
+ # been pushed now?
+ if any(
+ self.client.convert_timestamp_from_github(
+ commit["commit"]["committer"]["date"]
+ )
+ > self.client.convert_timestamp_from_github(self.updated_at)
+ for commit in commits
+ ):
+ raise self.StaleMetadataException(
+ "Commits were added while collecting PR information."
+ )
+
+ return commits
+
+ @property
+ @pr_cache_method
+ def commit_comments(self) -> list:
+ """Return a list of comments on specific changes of the PR."""
+ commits_comments = self.client.get_pull_request_commits_comments(self.number)
+
+ if any(
+ self.client.convert_timestamp_from_github(comment["updated_at"])
+ > self.client.convert_timestamp_from_github(self.updated_at)
+ for comment in commits_comments
+ ):
+ raise self.StaleMetadataException(
+ "Comments were changed while collecting PR information."
+ )
+
+ return commits_comments
+
+ @property
+ def patch(self) -> str:
+ return self.client.get_patch(self.number)
+
+ @property
+ @pr_cache_method
+ def reviews(self) -> list:
+ """Return a list of reviews for the PR."""
+ reviews = self.client.get_pull_request_reviews(self.number)
+
+ if any(
+ self.client.convert_timestamp_from_github(review["submitted_at"])
+ > self.client.convert_timestamp_from_github(self.updated_at)
+ for review in reviews
+ ):
+ raise self.StaleMetadataException(
+ "Reviews were added while collecting PR information."
+ )
+
+ return reviews
+
+ @property
+ def commit_message(self) -> str:
+ """Return a string combining the pull request title, description, and URL."""
+ lines = [self.title, ""]
+ if self.body:
+ lines += [self.body, ""]
+ lines.append(f"Pull request: {self.html_url}")
+ return "\n".join(lines)
+
+ def serialize(self) -> dict[str, str]:
+ """Return a dictionary with various pull request data."""
+ return {
+ "url": self.url,
+ "base_ref": self.base_ref,
+ "base_sha": self.base_sha,
+ "base_user_login": self.base_user_login,
+ "base_user_id": self.base_user_id,
+ "created_at": self.created_at,
+ "updated_at": self.updated_at,
+ "closed_at": self.closed_at,
+ "merged_at": self.merged_at,
+ "diff_url": self.diff_url,
+ "patch_url": self.patch_url,
+ "body": self.body,
+ "is_draft": self.is_draft,
+ "comments_url": self.comments_url,
+ "commits_url": self.commits_url,
+ "head_ref": self.head_ref,
+ "head_sha": self.head_sha,
+ "head_repo_git_url": self.head_repo_git_url,
+ "html_url": self.html_url,
+ "id": self.id,
+ "number": self.number,
+ "requested_reviewers": self.requested_reviewers,
+ "requested_teams": self.requested_teams,
+ "state": self.state,
+ "title": self.title,
+ "user_id": self.user_id,
+ "user_html_url": self.user_html_url,
+ "user_login": self.user_login,
+ }
+
+
+class PullRequestPatchHelper(PatchHelper):
+ """A PatchHelper-like wrapper for GitHub pull requests.
+
+ Due to the nature of pull requests, it only implement the data-getting
+ functionality, and doesn't implement the input and output methods.
+ """
+
+ _diff: str
+
+ _author_name: str
+ _author_email: str
+ _pr: PullRequest
+
+ def __init__(self, pr: PullRequest):
+ super().__init__()
+
+ self._pr = pr
+
+ self._diff = pr.diff
+
+ author_name, author_email = self._pr.author
+
+ self.headers = {
+ "date": self._get_timestamp_from_github_timestamp(pr.updated_at),
+ "from": f"{author_name} <{author_email}>",
+ "subject": pr.title,
+ }
+
+ @classmethod
+ def _get_timestamp_from_github_timestamp(cls, timestamp: str) -> str:
+ timestamp_datetime = datetime.fromisoformat(timestamp)
+ return str(math.floor(timestamp_datetime.timestamp()))
+
+ @classmethod
+ def from_string_io(cls, string_io: io.StringIO) -> "PatchHelper":
+ """Implement the PatchHelper interface; not relevant for GitHub PRs."""
+ raise NotImplementedError("`from_string_io` not implemented.")
+
+ @classmethod
+ def from_bytes_io(cls, bytes_io: io.BytesIO) -> "PatchHelper":
+ """Implement the PatchHelper interface; not relevant for GitHub PRs."""
+ raise NotImplementedError("`from_bytes_io` not implemented.")
+
+ def get_commit_description(self) -> str:
+ """Returns the commit description."""
+ return self.get_header("subject")
+
+ @override
+ def get_diff(self) -> str:
+ """Return the patch diff.
+
+ WARNING: As of 2025-10-13, this doesn't include any binary data.
+ """
+ return self._diff
+
+ @override
+ def write(self, f: io.StringIO):
+ """Implement the PatchHelper interface; not relevant for GitHub PRs."""
+ raise NotImplementedError("`from_bytes_io` not implemented.")
+
+ @override
+ def parse_author_information(self) -> tuple[str, str]:
+ """Return the author name and email from the patch."""
+ return self._pr.author
+
+ @override
+ def get_timestamp(self) -> str:
+ """Return an `hg export` formatted timestamp."""
+ return self.get_header("date")
diff --git a/src/lando/utils/github_checks.py b/src/lando/utils/github_checks.py
new file mode 100644
index 000000000..ad96af288
--- /dev/null
+++ b/src/lando/utils/github_checks.py
@@ -0,0 +1,650 @@
+import logging
+from abc import ABC, abstractmethod
+from datetime import datetime, timezone
+from typing import Iterable
+
+import requests
+from django.http import HttpRequest
+from typing_extensions import override
+
+from lando.main.models.jobs import JobStatus
+from lando.main.models.landing_job import get_jobs_for_pull
+from lando.main.models.repo import Repo
+from lando.utils.github import GitHubAPIClient, PullRequest
+from lando.utils.landing_checks import Check
+
+logger = logging.getLogger("__name__")
+
+
+class PullRequestCheck(Check, ABC):
+ @classmethod
+ @abstractmethod
+ def run(
+ cls,
+ pull_request: PullRequest,
+ target_repo: Repo,
+ request: HttpRequest,
+ ) -> list[str]:
+ """Inspect the PR for an issue, and return a message string if present."""
+
+
+#
+# BLOCKERS
+#
+
+
+class PullRequestBlocker(PullRequestCheck, ABC):
+ """Parent class for blocker checks."""
+
+
+class PullRequestUserSCMLevelBlocker(PullRequestBlocker):
+ """You have insufficient permissions to land or your access has expired."""
+
+ @override
+ @classmethod
+ def name(cls) -> str:
+ return "PullRequestUserSCMLevelBlocker"
+
+ @override
+ @classmethod
+ def description(cls) -> str:
+ return "You have insufficient permissions to land or your access has expired."
+
+ @override
+ @classmethod
+ def run(
+ cls,
+ pull_request: PullRequest,
+ target_repo: Repo,
+ request: HttpRequest,
+ ) -> list[str]:
+ # We specifically check the direct user permissions, rather than the union of
+ # those that could have been inherited from group or other roles (e.g., admin).
+ if target_repo.required_permission in request.user.get_user_permissions():
+ return []
+
+ return [cls.description()]
+
+
+class PullRequestClosedBlocker(PullRequestBlocker):
+ """Revision is closed."""
+
+ @override
+ @classmethod
+ def name(cls) -> str:
+ return "PullRequestClosedBlocker"
+
+ @override
+ @classmethod
+ def description(cls) -> str:
+ return "Revision is closed."
+
+ @override
+ @classmethod
+ def run(
+ cls,
+ pull_request: PullRequest,
+ target_repo: Repo,
+ request: HttpRequest,
+ ) -> list[str]:
+ if pull_request.state == pull_request.State.CLOSED:
+ return [cls.description()]
+
+ return []
+
+
+class PullRequestDiffAuthorIsKnownBlocker(PullRequestBlocker):
+ # """"Diff does not have proper author information in Phabricator."""
+ """Commit does not have proper author information."""
+
+ @override
+ @classmethod
+ def name(cls) -> str:
+ return "PullRequestDiffAuthorIsKnownBlocker"
+
+ @override
+ @classmethod
+ def description(cls) -> str:
+ return "Commit does not have proper author information."
+
+ @override
+ @classmethod
+ def run(
+ cls,
+ pull_request: PullRequest,
+ target_repo: Repo,
+ request: HttpRequest,
+ ) -> list[str]:
+ commits = pull_request.commits
+
+ messages = []
+
+ for commit in commits:
+ if (
+ not commit["commit"]["author"]["name"]
+ or not commit["commit"]["author"]["email"]
+ ):
+ messages.append(
+ f"{cls.description()} {commit['sha']}: {commit['commit']['message']} ({commit['html_url']})"
+ )
+
+ return messages
+
+
+class PullRequestAuthorPlannedChangesBlocker(PullRequestBlocker):
+ """The author has indicated they are planning changes to this revision."""
+
+ @override
+ @classmethod
+ def name(cls) -> str:
+ return "PullRequestAuthorPlannedChangesBlocker"
+
+ @override
+ @classmethod
+ def description(cls) -> str:
+ return "The author has indicated they are planning changes to this revision."
+
+ @override
+ @classmethod
+ def run(
+ cls,
+ pull_request: PullRequest,
+ target_repo: Repo,
+ request: HttpRequest,
+ ) -> list[str]:
+ if pull_request.is_draft:
+ return [cls.description()]
+
+ return []
+
+
+class PullRequestRevisionDataClassificationBlocker(PullRequestBlocker):
+ """Revision makes changes to data collection and should have its data classification assessed before landing."""
+
+ @override
+ @classmethod
+ def name(cls) -> str:
+ return "PullRequestRevisionDataClassificationBlocker"
+
+ @override
+ @classmethod
+ def description(cls) -> str:
+ return "Revision makes changes to data collection and should have its data classification assessed before landing."
+
+ @override
+ @classmethod
+ def run(
+ cls,
+ pull_request: PullRequest,
+ target_repo: Repo,
+ request: HttpRequest,
+ ) -> list[str]:
+ if "needs-data-classification" in [
+ label["name"] for label in pull_request.labels
+ ]:
+ return [cls.description()]
+
+ return []
+
+
+# GITHUB-SPECIFIC CHECKS
+
+
+class PullRequestBaseBranchDoesNotMatchTree(PullRequestBlocker):
+ """The base branch for this PR doesn't match this Tree."""
+
+ @override
+ @classmethod
+ def name(cls) -> str:
+ return "PullRequestBaseBranchDoesNotMatchTree"
+
+ @override
+ @classmethod
+ def description(cls) -> str:
+ return "The base branch for this PR doesn't match this Tree."
+
+ @override
+ @classmethod
+ def run(
+ cls,
+ pull_request: PullRequest,
+ target_repo: Repo,
+ request: HttpRequest,
+ ) -> list[str]:
+ if pull_request.base_ref != target_repo.default_branch:
+ return [cls.description()]
+
+ return []
+
+
+class PullRequestConflictWithBaseBranch(PullRequestBlocker):
+ """This Pull Request has conflicts that must be resolved."""
+
+ @override
+ @classmethod
+ def name(cls) -> str:
+ return "PullRequestConflictWithBaseBranch"
+
+ @override
+ @classmethod
+ def description(cls) -> str:
+ return "This Pull Request has conflicts that must be resolved."
+
+ @override
+ @classmethod
+ def run(
+ cls,
+ pull_request: PullRequest,
+ target_repo: Repo,
+ request: HttpRequest,
+ ) -> list[str]:
+ if pull_request.mergeable_state == pull_request.Mergeability.DIRTY:
+ return [cls.description()]
+
+ return []
+
+
+class PullRequestFailingCheck(PullRequestBlocker):
+ """This Pull Request has has some failing checks."""
+
+ @override
+ @classmethod
+ def name(cls) -> str:
+ return "PullRequestFailingCheck"
+
+ @override
+ @classmethod
+ def description(cls) -> str:
+ return "This Pull Request has has some failing checks."
+
+ @override
+ @classmethod
+ def run(
+ cls,
+ pull_request: PullRequest,
+ target_repo: Repo,
+ request: HttpRequest,
+ ) -> list[str]:
+ # If we need more details on which tests are failing, we could use the commit
+ # statuses endpoint instead [0].
+ #
+ # [0] https://docs.github.com/en/rest/commits/statuses?apiVersion=2022-11-28
+ if pull_request.mergeable_state == pull_request.Mergeability.UNSTABLE:
+ return [cls.description()]
+
+ return []
+
+
+#
+# WARNINGS
+#
+
+
+class PullRequestWarning(PullRequestCheck, ABC):
+ """Parent class for warning checks."""
+
+
+class PullRequestBlockingReviewsWarning(PullRequestWarning):
+ """Has a review intended to block landing."""
+
+ @override
+ @classmethod
+ def name(cls) -> str:
+ return "PullRequestBlockingReviewsWarning"
+
+ @override
+ @classmethod
+ def description(cls) -> str:
+ return "Has a review intended to block landing."
+
+ @override
+ @classmethod
+ def run(
+ cls,
+ pull_request: PullRequest,
+ target_repo: Repo,
+ request: HttpRequest,
+ ) -> list[str]:
+ reviews = pull_request.reviews
+
+ messages = []
+
+ for review in reviews:
+ if review["state"] == pull_request.Review.CHANGES_REQUESTED:
+ messages.append(
+ f"{cls.description()} {review['body'].splitlines()[0]}… {review['html_url']})"
+ )
+
+ return messages
+
+
+class PullRequestPreviouslyLandedWarning(PullRequestWarning):
+ """Has previously landed."""
+
+ @override
+ @classmethod
+ def name(cls) -> str:
+ return "PullRequestPreviouslyLandedWarning"
+
+ @override
+ @classmethod
+ def description(cls) -> str:
+ return "Has previously landed."
+
+ @override
+ @classmethod
+ def run(
+ cls,
+ pull_request: PullRequest,
+ target_repo: Repo,
+ request: HttpRequest,
+ ) -> list[str]:
+ jobs = get_jobs_for_pull(target_repo, pull_request.number)
+
+ if any(job.status == JobStatus.LANDED for job in jobs):
+ return [cls.description()]
+
+ return []
+
+
+class PullRequestNotAcceptedWarning(PullRequestWarning):
+ """Is not Accepted."""
+
+ @override
+ @classmethod
+ def name(cls) -> str:
+ return "PullRequestNotAcceptedWarning"
+
+ @override
+ @classmethod
+ def description(cls) -> str:
+ return "Is not Accepted."
+
+ @override
+ @classmethod
+ def run(
+ cls,
+ pull_request: PullRequest,
+ target_repo: Repo,
+ request: HttpRequest,
+ ) -> list[str]:
+ reviews = pull_request.reviews
+
+ if any(review["state"] == pull_request.Review.APPROVED for review in reviews):
+ return []
+
+ return [cls.description()]
+
+
+class PullRequestReviewsNotCurrentWarning(PullRequestWarning):
+ """No reviewer has accepted the current diff."""
+
+ @override
+ @classmethod
+ def name(cls) -> str:
+ return "PullRequestReviewsNotCurrentWarning"
+
+ @override
+ @classmethod
+ def description(cls) -> str:
+ return "No reviewer has accepted the current diff."
+
+ @override
+ @classmethod
+ def run(
+ cls,
+ pull_request: PullRequest,
+ target_repo: Repo,
+ request: HttpRequest,
+ ) -> list[str]:
+ reviews = pull_request.reviews
+
+ if pull_request.head_sha in [
+ review["commit_id"]
+ for review in reviews
+ if review["state"] == pull_request.Review.APPROVED
+ ]:
+ return []
+
+ return [cls.description()]
+
+
+class PullRequestMissingTestingTagWarning(PullRequestWarning):
+ """Pull request is missing a Testing Policy Project Tag."""
+
+ @override
+ @classmethod
+ def name(cls) -> str:
+ return "PullRequestMissingTestingTagWarning"
+
+ @override
+ @classmethod
+ def description(cls) -> str:
+ return "Pull request is missing a Testing Policy Project Tag."
+
+ @override
+ @classmethod
+ def run(
+ cls,
+ pull_request: PullRequest,
+ target_repo: Repo,
+ request: HttpRequest,
+ ) -> list[str]:
+ # Only allow a single testing tag.
+ if (
+ len(
+ [
+ label["name"]
+ for label in pull_request.labels
+ if label["name"].startswith("testing")
+ ]
+ )
+ != 1
+ ):
+ return [cls.description()]
+
+ return []
+
+
+class PullRequestWIPWarning(PullRequestWarning):
+ """Pull request is marked as WIP."""
+
+ @override
+ @classmethod
+ def name(cls) -> str:
+ return "PullRequestWIPWarning"
+
+ @override
+ @classmethod
+ def description(cls) -> str:
+ return "Pull request is marked as WIP."
+
+ @override
+ @classmethod
+ def run(
+ cls,
+ pull_request: PullRequest,
+ target_repo: Repo,
+ request: HttpRequest,
+ ) -> list[str]:
+ if pull_request.title.lower().startswith("wip:"):
+ return [cls.description()]
+
+ return []
+
+
+class PullRequestCodeFreezeWarning(PullRequestWarning):
+ """Repository is under a soft code freeze."""
+
+ # XXX: This code is duplicated from transplants.warning_code_freeze. See bug 2001021.
+
+ # The code freeze dates generally correspond to PST work days.
+ CODE_FREEZE_OFFSET = "-0800"
+
+ @override
+ @classmethod
+ def name(cls) -> str:
+ return "PullRequestCodeFreezeWarning"
+
+ @override
+ @classmethod
+ def description(cls) -> str:
+ return "Repository is under a soft code freeze."
+
+ @override
+ @classmethod
+ def run(
+ cls,
+ pull_request: PullRequest,
+ target_repo: Repo,
+ request: HttpRequest,
+ ) -> list[str]:
+ if not target_repo.product_details_url:
+ return []
+
+ try:
+ product_details = requests.get(target_repo.product_details_url).json()
+ except requests.exceptions.RequestException as e:
+ logger.exception(e)
+ return [
+ f"Could not retrieve repository's code freeze status from {target_repo.product_details_url}."
+ ]
+
+ freeze_date_str = product_details.get("NEXT_SOFTFREEZE_DATE")
+ merge_date_str = product_details.get("NEXT_MERGE_DATE")
+ # If the JSON doesn't have these keys, this warning isn't applicable
+ if not freeze_date_str or not merge_date_str:
+ return []
+
+ today = datetime.now(tz=timezone.utc)
+ freeze_date = datetime.strptime(
+ f"{freeze_date_str} {cls.CODE_FREEZE_OFFSET}",
+ "%Y-%m-%d %z",
+ ).replace(tzinfo=timezone.utc)
+ if today < freeze_date:
+ return []
+
+ merge_date = datetime.strptime(
+ f"{merge_date_str} {cls.CODE_FREEZE_OFFSET}",
+ "%Y-%m-%d %z",
+ ).replace(tzinfo=timezone.utc)
+
+ if freeze_date <= today <= merge_date:
+ return [f"Repository is under a soft code freeze (ends {merge_date_str})."]
+
+ return []
+
+
+class PullRequestUnresolvedCommentsWarning(PullRequestWarning):
+ """Pull request has unresolved comments."""
+
+ @override
+ @classmethod
+ def name(cls) -> str:
+ return "PullRequestUnresolvedCommentsWarning"
+
+ @override
+ @classmethod
+ def description(cls) -> str:
+ return "Pull request has unresolved comments."
+
+ @override
+ @classmethod
+ def run(
+ cls,
+ pull_request: PullRequest,
+ target_repo: Repo,
+ request: HttpRequest,
+ ) -> list[str]:
+ commit_comments = pull_request.commit_comments
+ messages = []
+
+ for comment in commit_comments:
+ if not comment["is_resolved"]:
+ messages.append(
+ f"{cls.description()} {comment['body']} ({comment['url']})"
+ )
+
+ return messages
+
+
+class PullRequestMultipleAuthorsWarning(PullRequestWarning):
+ """Pull request has multiple authors."""
+
+ @override
+ @classmethod
+ def name(cls) -> str:
+ return "PullRequestMultipleAuthorsWarning"
+
+ @override
+ @classmethod
+ def description(cls) -> str:
+ return "Pull request has multiple authors."
+
+ @override
+ @classmethod
+ def run(
+ cls,
+ pull_request: PullRequest,
+ target_repo: Repo,
+ request: HttpRequest,
+ ) -> list[str]:
+ if (
+ len(
+ authors :=
+ # Note: this is a set comprehension, so each element is unique.
+ {
+ f"{commit['commit']['author']['name']} <{commit['commit']['author']['email']}>"
+ for commit in pull_request.commits
+ }
+ )
+ != 1
+ ):
+ return [cls.description() + " " + cls._authors_str(authors)]
+
+ return []
+
+ @classmethod
+ def _authors_str(cls, authors: Iterable[str]) -> str:
+ return ", ".join(authors)
+
+
+ALL_PULL_REQUEST_BLOCKERS = PullRequestBlocker.__subclasses__()
+ALL_PULL_REQUEST_WARNINGS = PullRequestWarning.__subclasses__()
+ALL_PULL_REQUEST_CHECKS = ALL_PULL_REQUEST_BLOCKERS + ALL_PULL_REQUEST_WARNINGS
+
+
+class PullRequestChecks:
+ """Utility class to check a GitHub pull request for a given list of issues."""
+
+ _client: GitHubAPIClient
+ _request: HttpRequest
+ _target_repo: Repo
+
+ def __init__(
+ self,
+ client: GitHubAPIClient,
+ target_repo: Repo,
+ request: HttpRequest,
+ ):
+ self._client = client
+ self._target_repo = target_repo
+ self._request = request
+
+ def run(self, checks_list: list[str], pull_request: PullRequest) -> list[str]:
+ messages = []
+
+ for check in [
+ chk for chk in ALL_PULL_REQUEST_CHECKS if chk.name() in checks_list
+ ]:
+ try:
+ if outcome := check.run(pull_request, self._target_repo, self._request):
+ messages.extend(outcome)
+ except NotImplementedError:
+ messages.append(f"{check.name()} is not implemented")
+
+ except Exception as exc:
+ logger.exception(exc)
+ messages.append(f"{check.name()} failed to run with error: {exc}")
+
+ return messages
diff --git a/src/lando/utils/landing_checks.py b/src/lando/utils/landing_checks.py
index d295a0eef..9a6e18bf4 100644
--- a/src/lando/utils/landing_checks.py
+++ b/src/lando/utils/landing_checks.py
@@ -1,5 +1,3 @@
-from __future__ import annotations
-
import re
from abc import ABC, abstractmethod
from collections.abc import Iterable
@@ -22,14 +20,6 @@
)
from lando.main.scm.helpers import PatchHelper
-# Decimal notation for the `symlink` file mode.
-SYMLINK_MODE = 40960
-
-# WPTSync bot is restricted to paths matching this regex.
-WPTSYNC_ALLOWED_PATHS_RE = re.compile(
- r"testing/web-platform/(?:moz\.build|meta/.*|tests/.*)$"
-)
-
def wrap_filenames(filenames: list[str]) -> str:
"""Convert a list of filenames to a string with names wrapped in backticks."""
@@ -83,6 +73,9 @@ def result(self) -> str | None:
class PreventSymlinksCheck(PatchCheck):
"""Check for symlinks introduced in the diff."""
+ # Decimal notation for the `symlink` file mode.
+ SYMLINK_MODE = 40960 # == 0120000
+
@override
@classmethod
def name(cls) -> str:
@@ -101,7 +94,7 @@ def next_diff(self, diff: dict):
# Check the file mode on each file and ensure the file is not a symlink.
# `rs_parsepatch` has a `new` and `old` mode key, we are interested in
# only the newly introduced modes.
- if "new" in modes and modes["new"] == SYMLINK_MODE:
+ if "new" in modes and modes["new"] == self.SYMLINK_MODE:
self.symlinked_files.append(diff["filename"])
def result(self) -> str | None:
@@ -383,6 +376,11 @@ def result(self) -> str | None:
class WPTSyncCheck(PatchCollectionCheck):
"""Check the WPTSync bot is only pushing changes to relevant subset of the tree."""
+ # WPTSync bot is restricted to paths matching this regex.
+ WPTSYNC_ALLOWED_PATHS_RE = re.compile(
+ r"testing/web-platform/(?:moz\.build|meta/.*|tests/.*)$"
+ )
+
@override
@classmethod
def name(cls) -> str:
@@ -403,7 +401,7 @@ def next_diff(self, patch_helper: PatchHelper):
diffs = rs_parsepatch.get_diffs(patch_helper.get_diff())
for parsed_diff in diffs:
filename = parsed_diff["filename"]
- if not WPTSYNC_ALLOWED_PATHS_RE.match(filename):
+ if not self.WPTSYNC_ALLOWED_PATHS_RE.match(filename):
self.wpt_disallowed_files.append(filename)
def result(self) -> str | None:
@@ -569,14 +567,18 @@ def run(
hook_names: Iterable[str]
a list of strings of check names
+ Generally, use something like
+
+ [chk.name() for chk in ALL_CHECKS]
+
patches: Iterable[PatchHelper]
a list of patches to check
Returns:
list[str]: a list of error messages.
"""
- commit_checks = [chk for chk in ALL_COMMIT_CHECKS if chk.__name__ in hook_names]
- stack_checks = [chk for chk in ALL_STACK_CHECKS if chk.__name__ in hook_names]
+ commit_checks = [chk for chk in ALL_COMMIT_CHECKS if chk.name() in hook_names]
+ stack_checks = [chk for chk in ALL_STACK_CHECKS if chk.name() in hook_names]
assessor = PatchCollectionAssessor(
patches, push_user_email=self.requester_email
diff --git a/src/lando/utils/tests/data/github_api_response_pull.json b/src/lando/utils/tests/data/github_api_response_pull.json
new file mode 100644
index 000000000..f6b154042
--- /dev/null
+++ b/src/lando/utils/tests/data/github_api_response_pull.json
@@ -0,0 +1 @@
+{"url":"https://api.github.com/repos/mozilla-conduit/test-repo/pulls/1","id":2782816395,"node_id":"PR_kwDONhJ9as6l3miL","html_url":"https://github.com/mozilla-conduit/test-repo/pull/1","diff_url":"https://github.com/mozilla-conduit/test-repo/pull/1.diff","patch_url":"https://github.com/mozilla-conduit/test-repo/pull/1.patch","issue_url":"https://api.github.com/repos/mozilla-conduit/test-repo/issues/1","number":1,"state":"open","locked":false,"title":"WIP: test pull request with multiple commits","user":{"login":"zzzeid","id":2043828,"node_id":"MDQ6VXNlcjIwNDM4Mjg=","avatar_url":"https://avatars.githubusercontent.com/u/2043828?v=4","gravatar_id":"","url":"https://api.github.com/users/zzzeid","html_url":"https://github.com/zzzeid","followers_url":"https://api.github.com/users/zzzeid/followers","following_url":"https://api.github.com/users/zzzeid/following{/other_user}","gists_url":"https://api.github.com/users/zzzeid/gists{/gist_id}","starred_url":"https://api.github.com/users/zzzeid/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/zzzeid/subscriptions","organizations_url":"https://api.github.com/users/zzzeid/orgs","repos_url":"https://api.github.com/users/zzzeid/repos","events_url":"https://api.github.com/users/zzzeid/events{/privacy}","received_events_url":"https://api.github.com/users/zzzeid/received_events","type":"User","user_view_type":"public","site_admin":false},"body":"test description","created_at":"2025-08-28T19:49:55Z","updated_at":"2025-10-21T03:30:19Z","closed_at":null,"merged_at":null,"merge_commit_sha":"76d87c626e1c109867e89c87a099938c10a1caac","assignee":null,"assignees":[],"requested_reviewers":[],"requested_teams":[],"labels":[{"id":9491938338,"node_id":"LA_kwDONhJ9as8AAAACNcN8Ig","url":"https://api.github.com/repos/mozilla-conduit/test-repo/labels/needs-data-classification","name":"needs-data-classification","color":"207987","default":false,"description":""}],"milestone":null,"draft":true,"commits_url":"https://api.github.com/repos/mozilla-conduit/test-repo/pulls/1/commits","review_comments_url":"https://api.github.com/repos/mozilla-conduit/test-repo/pulls/1/comments","review_comment_url":"https://api.github.com/repos/mozilla-conduit/test-repo/pulls/comments{/number}","comments_url":"https://api.github.com/repos/mozilla-conduit/test-repo/issues/1/comments","statuses_url":"https://api.github.com/repos/mozilla-conduit/test-repo/statuses/79250dceba7ff53b9e7e813262b6162c3a1c776a","head":{"label":"mozilla-conduit:branch_b","ref":"branch_b","sha":"79250dceba7ff53b9e7e813262b6162c3a1c776a","user":{"login":"mozilla-conduit","id":25333391,"node_id":"MDEyOk9yZ2FuaXphdGlvbjI1MzMzMzkx","avatar_url":"https://avatars.githubusercontent.com/u/25333391?v=4","gravatar_id":"","url":"https://api.github.com/users/mozilla-conduit","html_url":"https://github.com/mozilla-conduit","followers_url":"https://api.github.com/users/mozilla-conduit/followers","following_url":"https://api.github.com/users/mozilla-conduit/following{/other_user}","gists_url":"https://api.github.com/users/mozilla-conduit/gists{/gist_id}","starred_url":"https://api.github.com/users/mozilla-conduit/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/mozilla-conduit/subscriptions","organizations_url":"https://api.github.com/users/mozilla-conduit/orgs","repos_url":"https://api.github.com/users/mozilla-conduit/repos","events_url":"https://api.github.com/users/mozilla-conduit/events{/privacy}","received_events_url":"https://api.github.com/users/mozilla-conduit/received_events","type":"Organization","user_view_type":"public","site_admin":false},"repo":{"id":907181418,"node_id":"R_kgDONhJ9ag","name":"test-repo","full_name":"mozilla-conduit/test-repo","private":false,"owner":{"login":"mozilla-conduit","id":25333391,"node_id":"MDEyOk9yZ2FuaXphdGlvbjI1MzMzMzkx","avatar_url":"https://avatars.githubusercontent.com/u/25333391?v=4","gravatar_id":"","url":"https://api.github.com/users/mozilla-conduit","html_url":"https://github.com/mozilla-conduit","followers_url":"https://api.github.com/users/mozilla-conduit/followers","following_url":"https://api.github.com/users/mozilla-conduit/following{/other_user}","gists_url":"https://api.github.com/users/mozilla-conduit/gists{/gist_id}","starred_url":"https://api.github.com/users/mozilla-conduit/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/mozilla-conduit/subscriptions","organizations_url":"https://api.github.com/users/mozilla-conduit/orgs","repos_url":"https://api.github.com/users/mozilla-conduit/repos","events_url":"https://api.github.com/users/mozilla-conduit/events{/privacy}","received_events_url":"https://api.github.com/users/mozilla-conduit/received_events","type":"Organization","user_view_type":"public","site_admin":false},"html_url":"https://github.com/mozilla-conduit/test-repo","description":"This is just a test repo.","fork":true,"url":"https://api.github.com/repos/mozilla-conduit/test-repo","forks_url":"https://api.github.com/repos/mozilla-conduit/test-repo/forks","keys_url":"https://api.github.com/repos/mozilla-conduit/test-repo/keys{/key_id}","collaborators_url":"https://api.github.com/repos/mozilla-conduit/test-repo/collaborators{/collaborator}","teams_url":"https://api.github.com/repos/mozilla-conduit/test-repo/teams","hooks_url":"https://api.github.com/repos/mozilla-conduit/test-repo/hooks","issue_events_url":"https://api.github.com/repos/mozilla-conduit/test-repo/issues/events{/number}","events_url":"https://api.github.com/repos/mozilla-conduit/test-repo/events","assignees_url":"https://api.github.com/repos/mozilla-conduit/test-repo/assignees{/user}","branches_url":"https://api.github.com/repos/mozilla-conduit/test-repo/branches{/branch}","tags_url":"https://api.github.com/repos/mozilla-conduit/test-repo/tags","blobs_url":"https://api.github.com/repos/mozilla-conduit/test-repo/git/blobs{/sha}","git_tags_url":"https://api.github.com/repos/mozilla-conduit/test-repo/git/tags{/sha}","git_refs_url":"https://api.github.com/repos/mozilla-conduit/test-repo/git/refs{/sha}","trees_url":"https://api.github.com/repos/mozilla-conduit/test-repo/git/trees{/sha}","statuses_url":"https://api.github.com/repos/mozilla-conduit/test-repo/statuses/{sha}","languages_url":"https://api.github.com/repos/mozilla-conduit/test-repo/languages","stargazers_url":"https://api.github.com/repos/mozilla-conduit/test-repo/stargazers","contributors_url":"https://api.github.com/repos/mozilla-conduit/test-repo/contributors","subscribers_url":"https://api.github.com/repos/mozilla-conduit/test-repo/subscribers","subscription_url":"https://api.github.com/repos/mozilla-conduit/test-repo/subscription","commits_url":"https://api.github.com/repos/mozilla-conduit/test-repo/commits{/sha}","git_commits_url":"https://api.github.com/repos/mozilla-conduit/test-repo/git/commits{/sha}","comments_url":"https://api.github.com/repos/mozilla-conduit/test-repo/comments{/number}","issue_comment_url":"https://api.github.com/repos/mozilla-conduit/test-repo/issues/comments{/number}","contents_url":"https://api.github.com/repos/mozilla-conduit/test-repo/contents/{+path}","compare_url":"https://api.github.com/repos/mozilla-conduit/test-repo/compare/{base}...{head}","merges_url":"https://api.github.com/repos/mozilla-conduit/test-repo/merges","archive_url":"https://api.github.com/repos/mozilla-conduit/test-repo/{archive_format}{/ref}","downloads_url":"https://api.github.com/repos/mozilla-conduit/test-repo/downloads","issues_url":"https://api.github.com/repos/mozilla-conduit/test-repo/issues{/number}","pulls_url":"https://api.github.com/repos/mozilla-conduit/test-repo/pulls{/number}","milestones_url":"https://api.github.com/repos/mozilla-conduit/test-repo/milestones{/number}","notifications_url":"https://api.github.com/repos/mozilla-conduit/test-repo/notifications{?since,all,participating}","labels_url":"https://api.github.com/repos/mozilla-conduit/test-repo/labels{/name}","releases_url":"https://api.github.com/repos/mozilla-conduit/test-repo/releases{/id}","deployments_url":"https://api.github.com/repos/mozilla-conduit/test-repo/deployments","created_at":"2024-12-23T02:41:51Z","updated_at":"2025-10-24T19:27:33Z","pushed_at":"2025-10-24T19:27:27Z","git_url":"git://github.com/mozilla-conduit/test-repo.git","ssh_url":"git@github.com:mozilla-conduit/test-repo.git","clone_url":"https://github.com/mozilla-conduit/test-repo.git","svn_url":"https://github.com/mozilla-conduit/test-repo","homepage":null,"size":37,"stargazers_count":0,"watchers_count":0,"language":null,"has_issues":false,"has_projects":false,"has_downloads":true,"has_wiki":false,"has_pages":false,"has_discussions":false,"forks_count":1,"mirror_url":null,"archived":false,"disabled":false,"open_issues_count":3,"license":null,"allow_forking":true,"is_template":false,"web_commit_signoff_required":false,"topics":[],"visibility":"public","forks":1,"open_issues":3,"watchers":0,"default_branch":"main"}},"base":{"label":"mozilla-conduit:branch_a","ref":"branch_a","sha":"61635cec955077dafcf1bc18be380e037368a8da","user":{"login":"mozilla-conduit","id":25333391,"node_id":"MDEyOk9yZ2FuaXphdGlvbjI1MzMzMzkx","avatar_url":"https://avatars.githubusercontent.com/u/25333391?v=4","gravatar_id":"","url":"https://api.github.com/users/mozilla-conduit","html_url":"https://github.com/mozilla-conduit","followers_url":"https://api.github.com/users/mozilla-conduit/followers","following_url":"https://api.github.com/users/mozilla-conduit/following{/other_user}","gists_url":"https://api.github.com/users/mozilla-conduit/gists{/gist_id}","starred_url":"https://api.github.com/users/mozilla-conduit/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/mozilla-conduit/subscriptions","organizations_url":"https://api.github.com/users/mozilla-conduit/orgs","repos_url":"https://api.github.com/users/mozilla-conduit/repos","events_url":"https://api.github.com/users/mozilla-conduit/events{/privacy}","received_events_url":"https://api.github.com/users/mozilla-conduit/received_events","type":"Organization","user_view_type":"public","site_admin":false},"repo":{"id":907181418,"node_id":"R_kgDONhJ9ag","name":"test-repo","full_name":"mozilla-conduit/test-repo","private":false,"owner":{"login":"mozilla-conduit","id":25333391,"node_id":"MDEyOk9yZ2FuaXphdGlvbjI1MzMzMzkx","avatar_url":"https://avatars.githubusercontent.com/u/25333391?v=4","gravatar_id":"","url":"https://api.github.com/users/mozilla-conduit","html_url":"https://github.com/mozilla-conduit","followers_url":"https://api.github.com/users/mozilla-conduit/followers","following_url":"https://api.github.com/users/mozilla-conduit/following{/other_user}","gists_url":"https://api.github.com/users/mozilla-conduit/gists{/gist_id}","starred_url":"https://api.github.com/users/mozilla-conduit/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/mozilla-conduit/subscriptions","organizations_url":"https://api.github.com/users/mozilla-conduit/orgs","repos_url":"https://api.github.com/users/mozilla-conduit/repos","events_url":"https://api.github.com/users/mozilla-conduit/events{/privacy}","received_events_url":"https://api.github.com/users/mozilla-conduit/received_events","type":"Organization","user_view_type":"public","site_admin":false},"html_url":"https://github.com/mozilla-conduit/test-repo","description":"This is just a test repo.","fork":true,"url":"https://api.github.com/repos/mozilla-conduit/test-repo","forks_url":"https://api.github.com/repos/mozilla-conduit/test-repo/forks","keys_url":"https://api.github.com/repos/mozilla-conduit/test-repo/keys{/key_id}","collaborators_url":"https://api.github.com/repos/mozilla-conduit/test-repo/collaborators{/collaborator}","teams_url":"https://api.github.com/repos/mozilla-conduit/test-repo/teams","hooks_url":"https://api.github.com/repos/mozilla-conduit/test-repo/hooks","issue_events_url":"https://api.github.com/repos/mozilla-conduit/test-repo/issues/events{/number}","events_url":"https://api.github.com/repos/mozilla-conduit/test-repo/events","assignees_url":"https://api.github.com/repos/mozilla-conduit/test-repo/assignees{/user}","branches_url":"https://api.github.com/repos/mozilla-conduit/test-repo/branches{/branch}","tags_url":"https://api.github.com/repos/mozilla-conduit/test-repo/tags","blobs_url":"https://api.github.com/repos/mozilla-conduit/test-repo/git/blobs{/sha}","git_tags_url":"https://api.github.com/repos/mozilla-conduit/test-repo/git/tags{/sha}","git_refs_url":"https://api.github.com/repos/mozilla-conduit/test-repo/git/refs{/sha}","trees_url":"https://api.github.com/repos/mozilla-conduit/test-repo/git/trees{/sha}","statuses_url":"https://api.github.com/repos/mozilla-conduit/test-repo/statuses/{sha}","languages_url":"https://api.github.com/repos/mozilla-conduit/test-repo/languages","stargazers_url":"https://api.github.com/repos/mozilla-conduit/test-repo/stargazers","contributors_url":"https://api.github.com/repos/mozilla-conduit/test-repo/contributors","subscribers_url":"https://api.github.com/repos/mozilla-conduit/test-repo/subscribers","subscription_url":"https://api.github.com/repos/mozilla-conduit/test-repo/subscription","commits_url":"https://api.github.com/repos/mozilla-conduit/test-repo/commits{/sha}","git_commits_url":"https://api.github.com/repos/mozilla-conduit/test-repo/git/commits{/sha}","comments_url":"https://api.github.com/repos/mozilla-conduit/test-repo/comments{/number}","issue_comment_url":"https://api.github.com/repos/mozilla-conduit/test-repo/issues/comments{/number}","contents_url":"https://api.github.com/repos/mozilla-conduit/test-repo/contents/{+path}","compare_url":"https://api.github.com/repos/mozilla-conduit/test-repo/compare/{base}...{head}","merges_url":"https://api.github.com/repos/mozilla-conduit/test-repo/merges","archive_url":"https://api.github.com/repos/mozilla-conduit/test-repo/{archive_format}{/ref}","downloads_url":"https://api.github.com/repos/mozilla-conduit/test-repo/downloads","issues_url":"https://api.github.com/repos/mozilla-conduit/test-repo/issues{/number}","pulls_url":"https://api.github.com/repos/mozilla-conduit/test-repo/pulls{/number}","milestones_url":"https://api.github.com/repos/mozilla-conduit/test-repo/milestones{/number}","notifications_url":"https://api.github.com/repos/mozilla-conduit/test-repo/notifications{?since,all,participating}","labels_url":"https://api.github.com/repos/mozilla-conduit/test-repo/labels{/name}","releases_url":"https://api.github.com/repos/mozilla-conduit/test-repo/releases{/id}","deployments_url":"https://api.github.com/repos/mozilla-conduit/test-repo/deployments","created_at":"2024-12-23T02:41:51Z","updated_at":"2025-10-24T19:27:33Z","pushed_at":"2025-10-24T19:27:27Z","git_url":"git://github.com/mozilla-conduit/test-repo.git","ssh_url":"git@github.com:mozilla-conduit/test-repo.git","clone_url":"https://github.com/mozilla-conduit/test-repo.git","svn_url":"https://github.com/mozilla-conduit/test-repo","homepage":null,"size":37,"stargazers_count":0,"watchers_count":0,"language":null,"has_issues":false,"has_projects":false,"has_downloads":true,"has_wiki":false,"has_pages":false,"has_discussions":false,"forks_count":1,"mirror_url":null,"archived":false,"disabled":false,"open_issues_count":3,"license":null,"allow_forking":true,"is_template":false,"web_commit_signoff_required":false,"topics":[],"visibility":"public","forks":1,"open_issues":3,"watchers":0,"default_branch":"main"}},"_links":{"self":{"href":"https://api.github.com/repos/mozilla-conduit/test-repo/pulls/1"},"html":{"href":"https://github.com/mozilla-conduit/test-repo/pull/1"},"issue":{"href":"https://api.github.com/repos/mozilla-conduit/test-repo/issues/1"},"comments":{"href":"https://api.github.com/repos/mozilla-conduit/test-repo/issues/1/comments"},"review_comments":{"href":"https://api.github.com/repos/mozilla-conduit/test-repo/pulls/1/comments"},"review_comment":{"href":"https://api.github.com/repos/mozilla-conduit/test-repo/pulls/comments{/number}"},"commits":{"href":"https://api.github.com/repos/mozilla-conduit/test-repo/pulls/1/commits"},"statuses":{"href":"https://api.github.com/repos/mozilla-conduit/test-repo/statuses/79250dceba7ff53b9e7e813262b6162c3a1c776a"}},"author_association":"NONE","auto_merge":null,"active_lock_reason":null,"merged":false,"mergeable":true,"rebaseable":true,"mergeable_state":"clean","merged_by":null,"comments":3,"review_comments":6,"maintainer_can_modify":false,"commits":7,"additions":4,"deletions":0,"changed_files":8}
\ No newline at end of file
diff --git a/src/lando/utils/tests/data/github_api_response_pull_commits.json b/src/lando/utils/tests/data/github_api_response_pull_commits.json
new file mode 100644
index 000000000..7319d9b00
--- /dev/null
+++ b/src/lando/utils/tests/data/github_api_response_pull_commits.json
@@ -0,0 +1 @@
+[{"sha":"ce9fe5d05e5d56a4756019654e3c2b424cd937b2","node_id":"C_kwDONhJ9atoAKGNlOWZlNWQwNWU1ZDU2YTQ3NTYwMTk2NTRlM2MyYjQyNGNkOTM3YjI","commit":{"author":{"name":"Zeid","email":"zeid@mozilla.com","date":"2025-08-28T19:46:57Z"},"committer":{"name":"Zeid","email":"zeid@mozilla.com","date":"2025-08-28T19:46:57Z"},"message":"second commit","tree":{"sha":"a9debae872efdf54c58b013160357ce3f304eaa0","url":"https://api.github.com/repos/mozilla-conduit/test-repo/git/trees/a9debae872efdf54c58b013160357ce3f304eaa0"},"url":"https://api.github.com/repos/mozilla-conduit/test-repo/git/commits/ce9fe5d05e5d56a4756019654e3c2b424cd937b2","comment_count":0,"verification":{"verified":false,"reason":"unsigned","signature":null,"payload":null,"verified_at":null}},"url":"https://api.github.com/repos/mozilla-conduit/test-repo/commits/ce9fe5d05e5d56a4756019654e3c2b424cd937b2","html_url":"https://github.com/mozilla-conduit/test-repo/commit/ce9fe5d05e5d56a4756019654e3c2b424cd937b2","comments_url":"https://api.github.com/repos/mozilla-conduit/test-repo/commits/ce9fe5d05e5d56a4756019654e3c2b424cd937b2/comments","author":{"login":"zzzeid","id":2043828,"node_id":"MDQ6VXNlcjIwNDM4Mjg=","avatar_url":"https://avatars.githubusercontent.com/u/2043828?v=4","gravatar_id":"","url":"https://api.github.com/users/zzzeid","html_url":"https://github.com/zzzeid","followers_url":"https://api.github.com/users/zzzeid/followers","following_url":"https://api.github.com/users/zzzeid/following{/other_user}","gists_url":"https://api.github.com/users/zzzeid/gists{/gist_id}","starred_url":"https://api.github.com/users/zzzeid/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/zzzeid/subscriptions","organizations_url":"https://api.github.com/users/zzzeid/orgs","repos_url":"https://api.github.com/users/zzzeid/repos","events_url":"https://api.github.com/users/zzzeid/events{/privacy}","received_events_url":"https://api.github.com/users/zzzeid/received_events","type":"User","user_view_type":"public","site_admin":false},"committer":{"login":"zzzeid","id":2043828,"node_id":"MDQ6VXNlcjIwNDM4Mjg=","avatar_url":"https://avatars.githubusercontent.com/u/2043828?v=4","gravatar_id":"","url":"https://api.github.com/users/zzzeid","html_url":"https://github.com/zzzeid","followers_url":"https://api.github.com/users/zzzeid/followers","following_url":"https://api.github.com/users/zzzeid/following{/other_user}","gists_url":"https://api.github.com/users/zzzeid/gists{/gist_id}","starred_url":"https://api.github.com/users/zzzeid/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/zzzeid/subscriptions","organizations_url":"https://api.github.com/users/zzzeid/orgs","repos_url":"https://api.github.com/users/zzzeid/repos","events_url":"https://api.github.com/users/zzzeid/events{/privacy}","received_events_url":"https://api.github.com/users/zzzeid/received_events","type":"User","user_view_type":"public","site_admin":false},"parents":[{"sha":"61635cec955077dafcf1bc18be380e037368a8da","url":"https://api.github.com/repos/mozilla-conduit/test-repo/commits/61635cec955077dafcf1bc18be380e037368a8da","html_url":"https://github.com/mozilla-conduit/test-repo/commit/61635cec955077dafcf1bc18be380e037368a8da"}]},{"sha":"c27b7d14cb3ddd3ec6a16459156208674b429991","node_id":"C_kwDONhJ9atoAKGMyN2I3ZDE0Y2IzZGRkM2VjNmExNjQ1OTE1NjIwODY3NGI0Mjk5OTE","commit":{"author":{"name":"Zeid","email":"zeid@mozilla.com","date":"2025-10-07T15:24:30Z"},"committer":{"name":"Zeid","email":"zeid@mozilla.com","date":"2025-10-07T15:24:30Z"},"message":"third commit\n\nadd image","tree":{"sha":"dd81ba6ec9715ba2e7c5cb23befec2569c38663f","url":"https://api.github.com/repos/mozilla-conduit/test-repo/git/trees/dd81ba6ec9715ba2e7c5cb23befec2569c38663f"},"url":"https://api.github.com/repos/mozilla-conduit/test-repo/git/commits/c27b7d14cb3ddd3ec6a16459156208674b429991","comment_count":0,"verification":{"verified":false,"reason":"unsigned","signature":null,"payload":null,"verified_at":null}},"url":"https://api.github.com/repos/mozilla-conduit/test-repo/commits/c27b7d14cb3ddd3ec6a16459156208674b429991","html_url":"https://github.com/mozilla-conduit/test-repo/commit/c27b7d14cb3ddd3ec6a16459156208674b429991","comments_url":"https://api.github.com/repos/mozilla-conduit/test-repo/commits/c27b7d14cb3ddd3ec6a16459156208674b429991/comments","author":{"login":"zzzeid","id":2043828,"node_id":"MDQ6VXNlcjIwNDM4Mjg=","avatar_url":"https://avatars.githubusercontent.com/u/2043828?v=4","gravatar_id":"","url":"https://api.github.com/users/zzzeid","html_url":"https://github.com/zzzeid","followers_url":"https://api.github.com/users/zzzeid/followers","following_url":"https://api.github.com/users/zzzeid/following{/other_user}","gists_url":"https://api.github.com/users/zzzeid/gists{/gist_id}","starred_url":"https://api.github.com/users/zzzeid/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/zzzeid/subscriptions","organizations_url":"https://api.github.com/users/zzzeid/orgs","repos_url":"https://api.github.com/users/zzzeid/repos","events_url":"https://api.github.com/users/zzzeid/events{/privacy}","received_events_url":"https://api.github.com/users/zzzeid/received_events","type":"User","user_view_type":"public","site_admin":false},"committer":{"login":"zzzeid","id":2043828,"node_id":"MDQ6VXNlcjIwNDM4Mjg=","avatar_url":"https://avatars.githubusercontent.com/u/2043828?v=4","gravatar_id":"","url":"https://api.github.com/users/zzzeid","html_url":"https://github.com/zzzeid","followers_url":"https://api.github.com/users/zzzeid/followers","following_url":"https://api.github.com/users/zzzeid/following{/other_user}","gists_url":"https://api.github.com/users/zzzeid/gists{/gist_id}","starred_url":"https://api.github.com/users/zzzeid/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/zzzeid/subscriptions","organizations_url":"https://api.github.com/users/zzzeid/orgs","repos_url":"https://api.github.com/users/zzzeid/repos","events_url":"https://api.github.com/users/zzzeid/events{/privacy}","received_events_url":"https://api.github.com/users/zzzeid/received_events","type":"User","user_view_type":"public","site_admin":false},"parents":[{"sha":"ce9fe5d05e5d56a4756019654e3c2b424cd937b2","url":"https://api.github.com/repos/mozilla-conduit/test-repo/commits/ce9fe5d05e5d56a4756019654e3c2b424cd937b2","html_url":"https://github.com/mozilla-conduit/test-repo/commit/ce9fe5d05e5d56a4756019654e3c2b424cd937b2"}]},{"sha":"6d13ee6f941eb565909c4dfbae73055ef2247144","node_id":"C_kwDONhJ9atoAKDZkMTNlZTZmOTQxZWI1NjU5MDljNGRmYmFlNzMwNTVlZjIyNDcxNDQ","commit":{"author":{"name":"Olivier Mehani","email":"omehani@mozilla.com","date":"2025-10-08T06:51:16Z"},"committer":{"name":"Olivier Mehani","email":"omehani@mozilla.com","date":"2025-10-08T06:51:16Z"},"message":"add naughty try task config","tree":{"sha":"56f6a5b8f36e3c64d9ad6f7bd8bc667b4ec9ebf0","url":"https://api.github.com/repos/mozilla-conduit/test-repo/git/trees/56f6a5b8f36e3c64d9ad6f7bd8bc667b4ec9ebf0"},"url":"https://api.github.com/repos/mozilla-conduit/test-repo/git/commits/6d13ee6f941eb565909c4dfbae73055ef2247144","comment_count":0,"verification":{"verified":true,"reason":"valid","signature":"-----BEGIN SSH SIGNATURE-----\nU1NIU0lHAAAAAQAAARcAAAAHc3NoLXJzYQAAAAMBAAEAAAEBAMm7viy0nMNf9BD5s/rW9N\nmzq9jMVtjjGNb2QheNccgZ5njlEcbCCdlBXQA4RosJ0EjZo8qPHe5zixANIE+WpokAGVBI\nsTZ7EK9BNlLZCx3cqA/gDS66SHUGfytrM4xygjKsEHGyA/Od2yaIc5adCFYiPDJDe4wMr+\n6dAsGBb7zZZ4KxX6T3B0Df90JW46A25LPh0Y2ObPuvD79pzQmwQsoMQG2dPbJZlwGcqHJv\nDpZKZd3Tql5ZqCPYwTidT4xy8W4OOPPTtCl9eR39L5tf5f0Ai4iuG989VzaweaB4v3urz6\n90AnGRHKBPRF6L0uRkCnTfw+/zpjwnUpWOt8wOyg0AAAADZ2l0AAAAAAAAAAZzaGE1MTIA\nAAEUAAAADHJzYS1zaGEyLTUxMgAAAQCtJFaXtNbdVPlXBsaLIQ6fB99qYMBquGNrE1Dpbq\nu9PUFkgE7FZm1QnybaTOeK2xDJcluZUbxSCtoqBruNXIOLGvm3MZ57r+Fr8dJOnuKPm2LM\nS30ucybE5gMPy8ALpHPwMwBnSF19aPlxlUlPBQeqkTVsNtVmxpEac3xs1y1POw74ac3Xdz\n1Smcvg0uxxCPKn2ny6NpXMpCPLpgWr/N3a1k7SLNbl6XuYbzRG56dNds/9ynSQEQlhyJC1\nBLqo/a+0IcMjSh8Qw5qXd+lQ0oKNsLQMPszuCe4XNOfbDlsBYVMVVfN6pNAT6BJ1rqwRDf\nidfDoS9Hrt9uCNdvTrtEI0\n-----END SSH SIGNATURE-----","payload":"tree 56f6a5b8f36e3c64d9ad6f7bd8bc667b4ec9ebf0\nparent c27b7d14cb3ddd3ec6a16459156208674b429991\nauthor Olivier Mehani 1759906276 +1100\ncommitter Olivier Mehani 1759906276 +1100\n\nadd naughty try task config\n","verified_at":"2025-10-08T06:51:21Z"}},"url":"https://api.github.com/repos/mozilla-conduit/test-repo/commits/6d13ee6f941eb565909c4dfbae73055ef2247144","html_url":"https://github.com/mozilla-conduit/test-repo/commit/6d13ee6f941eb565909c4dfbae73055ef2247144","comments_url":"https://api.github.com/repos/mozilla-conduit/test-repo/commits/6d13ee6f941eb565909c4dfbae73055ef2247144/comments","author":{"login":"shtrom","id":160280,"node_id":"MDQ6VXNlcjE2MDI4MA==","avatar_url":"https://avatars.githubusercontent.com/u/160280?v=4","gravatar_id":"","url":"https://api.github.com/users/shtrom","html_url":"https://github.com/shtrom","followers_url":"https://api.github.com/users/shtrom/followers","following_url":"https://api.github.com/users/shtrom/following{/other_user}","gists_url":"https://api.github.com/users/shtrom/gists{/gist_id}","starred_url":"https://api.github.com/users/shtrom/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/shtrom/subscriptions","organizations_url":"https://api.github.com/users/shtrom/orgs","repos_url":"https://api.github.com/users/shtrom/repos","events_url":"https://api.github.com/users/shtrom/events{/privacy}","received_events_url":"https://api.github.com/users/shtrom/received_events","type":"User","user_view_type":"public","site_admin":false},"committer":{"login":"shtrom","id":160280,"node_id":"MDQ6VXNlcjE2MDI4MA==","avatar_url":"https://avatars.githubusercontent.com/u/160280?v=4","gravatar_id":"","url":"https://api.github.com/users/shtrom","html_url":"https://github.com/shtrom","followers_url":"https://api.github.com/users/shtrom/followers","following_url":"https://api.github.com/users/shtrom/following{/other_user}","gists_url":"https://api.github.com/users/shtrom/gists{/gist_id}","starred_url":"https://api.github.com/users/shtrom/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/shtrom/subscriptions","organizations_url":"https://api.github.com/users/shtrom/orgs","repos_url":"https://api.github.com/users/shtrom/repos","events_url":"https://api.github.com/users/shtrom/events{/privacy}","received_events_url":"https://api.github.com/users/shtrom/received_events","type":"User","user_view_type":"public","site_admin":false},"parents":[{"sha":"c27b7d14cb3ddd3ec6a16459156208674b429991","url":"https://api.github.com/repos/mozilla-conduit/test-repo/commits/c27b7d14cb3ddd3ec6a16459156208674b429991","html_url":"https://github.com/mozilla-conduit/test-repo/commit/c27b7d14cb3ddd3ec6a16459156208674b429991"}]},{"sha":"1d9881143c8288d6d230869c8d5e2b26d12862cc","node_id":"C_kwDONhJ9atoAKDFkOTg4MTE0M2M4Mjg4ZDZkMjMwODY5YzhkNWUyYjI2ZDEyODYyY2M","commit":{"author":{"name":"Olivier Mehani","email":"omehani@mozilla.com","date":"2025-10-08T07:22:38Z"},"committer":{"name":"Olivier Mehani","email":"omehani@mozilla.com","date":"2025-10-08T07:22:38Z"},"message":"add non-empty b","tree":{"sha":"2087b0f783782881f3b1e8a180addd0f4c4ef4f6","url":"https://api.github.com/repos/mozilla-conduit/test-repo/git/trees/2087b0f783782881f3b1e8a180addd0f4c4ef4f6"},"url":"https://api.github.com/repos/mozilla-conduit/test-repo/git/commits/1d9881143c8288d6d230869c8d5e2b26d12862cc","comment_count":0,"verification":{"verified":true,"reason":"valid","signature":"-----BEGIN SSH SIGNATURE-----\nU1NIU0lHAAAAAQAAARcAAAAHc3NoLXJzYQAAAAMBAAEAAAEBAMm7viy0nMNf9BD5s/rW9N\nmzq9jMVtjjGNb2QheNccgZ5njlEcbCCdlBXQA4RosJ0EjZo8qPHe5zixANIE+WpokAGVBI\nsTZ7EK9BNlLZCx3cqA/gDS66SHUGfytrM4xygjKsEHGyA/Od2yaIc5adCFYiPDJDe4wMr+\n6dAsGBb7zZZ4KxX6T3B0Df90JW46A25LPh0Y2ObPuvD79pzQmwQsoMQG2dPbJZlwGcqHJv\nDpZKZd3Tql5ZqCPYwTidT4xy8W4OOPPTtCl9eR39L5tf5f0Ai4iuG989VzaweaB4v3urz6\n90AnGRHKBPRF6L0uRkCnTfw+/zpjwnUpWOt8wOyg0AAAADZ2l0AAAAAAAAAAZzaGE1MTIA\nAAEUAAAADHJzYS1zaGEyLTUxMgAAAQAEqQble0cPQc3avvsWWJO5VrqOCyXVYnRPM3bDvO\nf9pRpyFZ45Yw7/OwtPNmhMgOvhuSTHfHJ3B6/H+6fOqdneOieo//TsviKgUiwRWYctYXyb\nWATS2CAgWv8Va4fvbPMl/kkTGAJj9FO9nS3iVeeanhXLTWjXl7qVzGWxNwS/Xf5cSQ5LSB\nnICH7WyvrAyV4W9usP6UTmfmPjtyyNTfO1Hh3rXBLnDPjLAkfl9LdkngzydsupNt2E9U2r\nrHREkXopiQUXPM7Ao/JCcDZxXu9Q/qrLzhWbtc5qp29os1g4ZzpOkuvY6ZXcHjrOmjXN80\nY8vN8ooSnud+kUDldgaPnG\n-----END SSH SIGNATURE-----","payload":"tree 2087b0f783782881f3b1e8a180addd0f4c4ef4f6\nparent 6d13ee6f941eb565909c4dfbae73055ef2247144\nauthor Olivier Mehani 1759908158 +1100\ncommitter Olivier Mehani 1759908158 +1100\n\nadd non-empty b\n","verified_at":"2025-10-08T07:22:43Z"}},"url":"https://api.github.com/repos/mozilla-conduit/test-repo/commits/1d9881143c8288d6d230869c8d5e2b26d12862cc","html_url":"https://github.com/mozilla-conduit/test-repo/commit/1d9881143c8288d6d230869c8d5e2b26d12862cc","comments_url":"https://api.github.com/repos/mozilla-conduit/test-repo/commits/1d9881143c8288d6d230869c8d5e2b26d12862cc/comments","author":{"login":"shtrom","id":160280,"node_id":"MDQ6VXNlcjE2MDI4MA==","avatar_url":"https://avatars.githubusercontent.com/u/160280?v=4","gravatar_id":"","url":"https://api.github.com/users/shtrom","html_url":"https://github.com/shtrom","followers_url":"https://api.github.com/users/shtrom/followers","following_url":"https://api.github.com/users/shtrom/following{/other_user}","gists_url":"https://api.github.com/users/shtrom/gists{/gist_id}","starred_url":"https://api.github.com/users/shtrom/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/shtrom/subscriptions","organizations_url":"https://api.github.com/users/shtrom/orgs","repos_url":"https://api.github.com/users/shtrom/repos","events_url":"https://api.github.com/users/shtrom/events{/privacy}","received_events_url":"https://api.github.com/users/shtrom/received_events","type":"User","user_view_type":"public","site_admin":false},"committer":{"login":"shtrom","id":160280,"node_id":"MDQ6VXNlcjE2MDI4MA==","avatar_url":"https://avatars.githubusercontent.com/u/160280?v=4","gravatar_id":"","url":"https://api.github.com/users/shtrom","html_url":"https://github.com/shtrom","followers_url":"https://api.github.com/users/shtrom/followers","following_url":"https://api.github.com/users/shtrom/following{/other_user}","gists_url":"https://api.github.com/users/shtrom/gists{/gist_id}","starred_url":"https://api.github.com/users/shtrom/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/shtrom/subscriptions","organizations_url":"https://api.github.com/users/shtrom/orgs","repos_url":"https://api.github.com/users/shtrom/repos","events_url":"https://api.github.com/users/shtrom/events{/privacy}","received_events_url":"https://api.github.com/users/shtrom/received_events","type":"User","user_view_type":"public","site_admin":false},"parents":[{"sha":"6d13ee6f941eb565909c4dfbae73055ef2247144","url":"https://api.github.com/repos/mozilla-conduit/test-repo/commits/6d13ee6f941eb565909c4dfbae73055ef2247144","html_url":"https://github.com/mozilla-conduit/test-repo/commit/6d13ee6f941eb565909c4dfbae73055ef2247144"}]},{"sha":"1849bb7efb9b26b77b9a2e057a5a929df13518ba","node_id":"C_kwDONhJ9atoAKDE4NDliYjdlZmI5YjI2Yjc3YjlhMmUwNTdhNWE5MjlkZjEzNTE4YmE","commit":{"author":{"name":"o","email":"","date":"2025-10-17T08:09:27Z"},"committer":{"name":"Olivier Mehani","email":"omehani@mozilla.com","date":"2025-10-17T08:09:27Z"},"message":"add two more files","tree":{"sha":"d9c5a98880631afe295373a5c3c2f2664b3d6524","url":"https://api.github.com/repos/mozilla-conduit/test-repo/git/trees/d9c5a98880631afe295373a5c3c2f2664b3d6524"},"url":"https://api.github.com/repos/mozilla-conduit/test-repo/git/commits/1849bb7efb9b26b77b9a2e057a5a929df13518ba","comment_count":0,"verification":{"verified":true,"reason":"valid","signature":"-----BEGIN PGP SIGNATURE-----\n\niQEzBAABCgAdFiEES4zBgrJt6mjyrNs5QyquhRg2VioFAmjx+bkACgkQQyquhRg2\nVipePAf8CwMhgWeXG4ic74akhnbqAUpg6tC5C5TZRiHpDJGz4FgSO8k6De/ZpxSQ\nI4tT10ngD4vPkSBWs6LLA98Xp8qwe4sVXrtAwyWGw+dXdEbzK55HTLT0loOsHJ+B\nDNqtn51SGBNBvqsGWpE39Mix2GxgAOC2eBmSR+OdBsoPUmKXFL1d4kEu6VggsVjh\nEAyFI0UrH5EbGohMtV6BTCLFPbN7qFUtnM323QI28kNvpo2drF1QN2HczdKAGA1t\nkcwqzLFqBaBYwq/Nu9scE2+hze9fYFdtnrtuUJLFi2DOelX31CtX8ctUUnN+jW7J\nx6fPi/G9PQZdRhkTdsoW5c/6FrJM2A==\n=W5KM\n-----END PGP SIGNATURE-----","payload":"tree d9c5a98880631afe295373a5c3c2f2664b3d6524\nparent 1d9881143c8288d6d230869c8d5e2b26d12862cc\nauthor o <> 1760688567 +1100\ncommitter Olivier Mehani 1760688567 +1100\n\nadd two more files\n","verified_at":"2025-10-17T08:09:37Z"}},"url":"https://api.github.com/repos/mozilla-conduit/test-repo/commits/1849bb7efb9b26b77b9a2e057a5a929df13518ba","html_url":"https://github.com/mozilla-conduit/test-repo/commit/1849bb7efb9b26b77b9a2e057a5a929df13518ba","comments_url":"https://api.github.com/repos/mozilla-conduit/test-repo/commits/1849bb7efb9b26b77b9a2e057a5a929df13518ba/comments","author":{},"committer":{"login":"shtrom","id":160280,"node_id":"MDQ6VXNlcjE2MDI4MA==","avatar_url":"https://avatars.githubusercontent.com/u/160280?v=4","gravatar_id":"","url":"https://api.github.com/users/shtrom","html_url":"https://github.com/shtrom","followers_url":"https://api.github.com/users/shtrom/followers","following_url":"https://api.github.com/users/shtrom/following{/other_user}","gists_url":"https://api.github.com/users/shtrom/gists{/gist_id}","starred_url":"https://api.github.com/users/shtrom/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/shtrom/subscriptions","organizations_url":"https://api.github.com/users/shtrom/orgs","repos_url":"https://api.github.com/users/shtrom/repos","events_url":"https://api.github.com/users/shtrom/events{/privacy}","received_events_url":"https://api.github.com/users/shtrom/received_events","type":"User","user_view_type":"public","site_admin":false},"parents":[{"sha":"1d9881143c8288d6d230869c8d5e2b26d12862cc","url":"https://api.github.com/repos/mozilla-conduit/test-repo/commits/1d9881143c8288d6d230869c8d5e2b26d12862cc","html_url":"https://github.com/mozilla-conduit/test-repo/commit/1d9881143c8288d6d230869c8d5e2b26d12862cc"}]},{"sha":"b7d2c82b47efcc0b095a5c3b1f7453450b04d865","node_id":"C_kwDONhJ9atoAKGI3ZDJjODJiNDdlZmNjMGIwOTVhNWMzYjFmNzQ1MzQ1MGIwNGQ4NjU","commit":{"author":{"name":"Olivier Mehani","email":"omehani@mozilla.com","date":"2025-10-20T23:53:28Z"},"committer":{"name":"Olivier Mehani","email":"omehani@mozilla.com","date":"2025-10-20T23:53:28Z"},"message":"add c","tree":{"sha":"3d3d95983e53018919997b88711b195942dadb2c","url":"https://api.github.com/repos/mozilla-conduit/test-repo/git/trees/3d3d95983e53018919997b88711b195942dadb2c"},"url":"https://api.github.com/repos/mozilla-conduit/test-repo/git/commits/b7d2c82b47efcc0b095a5c3b1f7453450b04d865","comment_count":0,"verification":{"verified":true,"reason":"valid","signature":"-----BEGIN PGP SIGNATURE-----\n\niQEzBAABCgAdFiEES4zBgrJt6mjyrNs5QyquhRg2VioFAmj2y3gACgkQQyquhRg2\nViqFZwgAl9VCnO/RrMA8BwdQ23P00PKi2y0OmpXB+jV33bMLX/WmdSBfbaxREK23\nt5MquLIuQX0ocZg5pbJ+avZwhFTLNTdFhZPflgG0GSq8UpgULK0UACbN41QzFUBm\nYTae8MV+xIVomID/fC1amoHpBFrLHJ0JQ1oK0MNt6ualyddfr0Oe+gO2A7HW3jGK\nZJuM/6q6RGUT8F6XdAWsxrlLy0gHBCAQBdQraBsfQAjBtKOCxq59sepR7DKkFMrW\nZGsqnKwvDjQiMFyfPcT3xoSGu10rEO9H+YXbkQa8V37idqc4MMFqNOa8UPRaW+l+\n/v6gxLHc0IagwTkD85JEUPHOGAelOQ==\n=7bd1\n-----END PGP SIGNATURE-----","payload":"tree 3d3d95983e53018919997b88711b195942dadb2c\nparent 1849bb7efb9b26b77b9a2e057a5a929df13518ba\nauthor Olivier Mehani 1761004408 +1100\ncommitter Olivier Mehani 1761004408 +1100\n\nadd c\n","verified_at":"2025-10-20T23:53:33Z"}},"url":"https://api.github.com/repos/mozilla-conduit/test-repo/commits/b7d2c82b47efcc0b095a5c3b1f7453450b04d865","html_url":"https://github.com/mozilla-conduit/test-repo/commit/b7d2c82b47efcc0b095a5c3b1f7453450b04d865","comments_url":"https://api.github.com/repos/mozilla-conduit/test-repo/commits/b7d2c82b47efcc0b095a5c3b1f7453450b04d865/comments","author":{"login":"shtrom","id":160280,"node_id":"MDQ6VXNlcjE2MDI4MA==","avatar_url":"https://avatars.githubusercontent.com/u/160280?v=4","gravatar_id":"","url":"https://api.github.com/users/shtrom","html_url":"https://github.com/shtrom","followers_url":"https://api.github.com/users/shtrom/followers","following_url":"https://api.github.com/users/shtrom/following{/other_user}","gists_url":"https://api.github.com/users/shtrom/gists{/gist_id}","starred_url":"https://api.github.com/users/shtrom/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/shtrom/subscriptions","organizations_url":"https://api.github.com/users/shtrom/orgs","repos_url":"https://api.github.com/users/shtrom/repos","events_url":"https://api.github.com/users/shtrom/events{/privacy}","received_events_url":"https://api.github.com/users/shtrom/received_events","type":"User","user_view_type":"public","site_admin":false},"committer":{"login":"shtrom","id":160280,"node_id":"MDQ6VXNlcjE2MDI4MA==","avatar_url":"https://avatars.githubusercontent.com/u/160280?v=4","gravatar_id":"","url":"https://api.github.com/users/shtrom","html_url":"https://github.com/shtrom","followers_url":"https://api.github.com/users/shtrom/followers","following_url":"https://api.github.com/users/shtrom/following{/other_user}","gists_url":"https://api.github.com/users/shtrom/gists{/gist_id}","starred_url":"https://api.github.com/users/shtrom/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/shtrom/subscriptions","organizations_url":"https://api.github.com/users/shtrom/orgs","repos_url":"https://api.github.com/users/shtrom/repos","events_url":"https://api.github.com/users/shtrom/events{/privacy}","received_events_url":"https://api.github.com/users/shtrom/received_events","type":"User","user_view_type":"public","site_admin":false},"parents":[{"sha":"1849bb7efb9b26b77b9a2e057a5a929df13518ba","url":"https://api.github.com/repos/mozilla-conduit/test-repo/commits/1849bb7efb9b26b77b9a2e057a5a929df13518ba","html_url":"https://github.com/mozilla-conduit/test-repo/commit/1849bb7efb9b26b77b9a2e057a5a929df13518ba"}]},{"sha":"79250dceba7ff53b9e7e813262b6162c3a1c776a","node_id":"C_kwDONhJ9atoAKDc5MjUwZGNlYmE3ZmY1M2I5ZTdlODEzMjYyYjYxNjJjM2ExYzc3NmE","commit":{"author":{"name":"Olivier Mehani","email":"omehani@mozilla.com","date":"2025-10-21T03:30:13Z"},"committer":{"name":"Olivier Mehani","email":"omehani@mozilla.com","date":"2025-10-21T03:30:13Z"},"message":"","tree":{"sha":"2490b83a49ee4201f272e88adf4d9acacc1708dc","url":"https://api.github.com/repos/mozilla-conduit/test-repo/git/trees/2490b83a49ee4201f272e88adf4d9acacc1708dc"},"url":"https://api.github.com/repos/mozilla-conduit/test-repo/git/commits/79250dceba7ff53b9e7e813262b6162c3a1c776a","comment_count":0,"verification":{"verified":true,"reason":"valid","signature":"-----BEGIN PGP SIGNATURE-----\n\niQEzBAABCgAdFiEES4zBgrJt6mjyrNs5QyquhRg2VioFAmj2/kUACgkQQyquhRg2\nVioexgf+If3k6svNuLdf0eoeflx77rnIcniplw4hQxdZgLQIzHOgVBc7J4MjF2Zx\nDRRbtPQsXxiLM+FzKSRlfMboIdY2Q2vnGSwsStqEw7l1mAO3WfacKKz6jqYC/MgS\nPm8TQfokPNo3FEPw73xsPW00LjEvpjlW9mE+FuDJ/j/3bVk9BYJHok9e79VgJezg\n8NroIrNglIDofW1H5r7eW1RzypMF8O5gk+za/tgQH1e/DlMFQOQ+r03kliepOsId\nx10R6tVsHK4/TmpC7ghJZY7HcrTlu690NNxZOk/P6RXrC5vsy2D9r4ez/K3Ijm7g\nZiHoOGFqKSvmYm1BcCySxvs2gg3FOQ==\n=kO1j\n-----END PGP SIGNATURE-----","payload":"tree 2490b83a49ee4201f272e88adf4d9acacc1708dc\nparent b7d2c82b47efcc0b095a5c3b1f7453450b04d865\nauthor Olivier Mehani 1761017413 +1100\ncommitter Olivier Mehani 1761017413 +1100\n\n","verified_at":"2025-10-21T03:30:18Z"}},"url":"https://api.github.com/repos/mozilla-conduit/test-repo/commits/79250dceba7ff53b9e7e813262b6162c3a1c776a","html_url":"https://github.com/mozilla-conduit/test-repo/commit/79250dceba7ff53b9e7e813262b6162c3a1c776a","comments_url":"https://api.github.com/repos/mozilla-conduit/test-repo/commits/79250dceba7ff53b9e7e813262b6162c3a1c776a/comments","author":{"login":"shtrom","id":160280,"node_id":"MDQ6VXNlcjE2MDI4MA==","avatar_url":"https://avatars.githubusercontent.com/u/160280?v=4","gravatar_id":"","url":"https://api.github.com/users/shtrom","html_url":"https://github.com/shtrom","followers_url":"https://api.github.com/users/shtrom/followers","following_url":"https://api.github.com/users/shtrom/following{/other_user}","gists_url":"https://api.github.com/users/shtrom/gists{/gist_id}","starred_url":"https://api.github.com/users/shtrom/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/shtrom/subscriptions","organizations_url":"https://api.github.com/users/shtrom/orgs","repos_url":"https://api.github.com/users/shtrom/repos","events_url":"https://api.github.com/users/shtrom/events{/privacy}","received_events_url":"https://api.github.com/users/shtrom/received_events","type":"User","user_view_type":"public","site_admin":false},"committer":{"login":"shtrom","id":160280,"node_id":"MDQ6VXNlcjE2MDI4MA==","avatar_url":"https://avatars.githubusercontent.com/u/160280?v=4","gravatar_id":"","url":"https://api.github.com/users/shtrom","html_url":"https://github.com/shtrom","followers_url":"https://api.github.com/users/shtrom/followers","following_url":"https://api.github.com/users/shtrom/following{/other_user}","gists_url":"https://api.github.com/users/shtrom/gists{/gist_id}","starred_url":"https://api.github.com/users/shtrom/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/shtrom/subscriptions","organizations_url":"https://api.github.com/users/shtrom/orgs","repos_url":"https://api.github.com/users/shtrom/repos","events_url":"https://api.github.com/users/shtrom/events{/privacy}","received_events_url":"https://api.github.com/users/shtrom/received_events","type":"User","user_view_type":"public","site_admin":false},"parents":[{"sha":"b7d2c82b47efcc0b095a5c3b1f7453450b04d865","url":"https://api.github.com/repos/mozilla-conduit/test-repo/commits/b7d2c82b47efcc0b095a5c3b1f7453450b04d865","html_url":"https://github.com/mozilla-conduit/test-repo/commit/b7d2c82b47efcc0b095a5c3b1f7453450b04d865"}]}]
diff --git a/src/lando/utils/tests/test_github.py b/src/lando/utils/tests/test_github.py
new file mode 100644
index 000000000..04f07fd3e
--- /dev/null
+++ b/src/lando/utils/tests/test_github.py
@@ -0,0 +1,486 @@
+import json
+from textwrap import dedent
+from typing import Callable
+from unittest import mock
+
+import pytest
+from django.conf import settings
+from requests import Response
+
+from lando.utils.github import (
+ GitHub,
+ GitHubAPI,
+ GitHubAPIClient,
+ PullRequestPatchHelper,
+)
+
+
+@pytest.mark.parametrize(
+ "url, expected_support",
+ (
+ ("https://github.com/mozilla-firefox/firefox", True),
+ ("https://github.com/mozilla-firefox/firefox/", True),
+ ("https://someuser:somepass@github.com/owner/repo.git/", True),
+ ("http://git.test/test-repo/", False),
+ ("https://hg.mozilla.org/mozilla-central/", False),
+ ),
+)
+def test_github_is_supported(url: str, expected_support: bool):
+ assert (
+ GitHub.is_supported_url(url) == expected_support
+ ), f"Support for {url} incorrectly determined"
+
+
+@pytest.mark.parametrize(
+ "url, expected_repo_owner, expected_repo_name",
+ (
+ ("https://github.com/mozilla-firefox/firefox", "mozilla-firefox", "firefox"),
+ ("https://github.com/mozilla-firefox/firefox/", "mozilla-firefox", "firefox"),
+ ("https://someuser:somepass@github.com/owner/repo.git", "owner", "repo"),
+ ("https://someuser:somepass@github.com/owner/repo.git/", "owner", "repo"),
+ ),
+)
+def test_github_parsed_url(url: str, expected_repo_owner: str, expected_repo_name: str):
+ github = GitHub(url)
+
+ assert github.repo_owner == expected_repo_owner, "Repo owner mismatch"
+ assert github.repo_name == expected_repo_name, "Repo name mismatch"
+
+
+def test_github_parsed_url_not_github():
+ with pytest.raises(ValueError):
+ GitHub("https://hg.mozilla.org/mozilla-central/")
+
+
+@pytest.fixture
+def mock_github_fetch_token(monkeypatch: pytest.MonkeyPatch) -> mock.Mock:
+ mock_fetch_token = mock.MagicMock()
+ mock_fetch_token.return_value = "mock_token"
+ monkeypatch.setattr("lando.utils.github.GitHub._fetch_token", mock_fetch_token)
+ return mock_fetch_token
+
+
+@pytest.fixture
+def mock_github_api_get(
+ monkeypatch: pytest.MonkeyPatch, mock_response: Callable
+) -> Callable:
+ def _github_api_get(
+ repo: str,
+ pr_response: dict,
+ pr_commits_response: str,
+ github_pr_patch: str,
+ github_pr_diff: str,
+ ) -> mock.Mock:
+ pr_no = "1"
+
+ response_map = {
+ pr_response["diff_url"]: mock_response(text=github_pr_diff),
+ pr_response["patch_url"]: mock_response(text=github_pr_patch),
+ f"repos/{repo}/pulls/{pr_no}": {
+ "application/vnd.github.patch": mock_response(
+ text=github_pr_patch,
+ headers={
+ "content-type": "application/vnd.github.patch; charset=utf-8"
+ },
+ ),
+ "application/vnd.github.diff": mock_response(
+ text=github_pr_diff,
+ headers={
+ "content-type": "application/vnd.github.diff; charset=utf-8"
+ },
+ ),
+ },
+ f"repos/{repo}/pulls/{pr_no}/commits": mock_response(
+ text=pr_commits_response,
+ headers={
+ "content-type": "application/json; charset=utf-8",
+ },
+ ),
+ }
+
+ def _mock_api_get(url: str, headers: dict = dict, **kwargs) -> Response:
+ # We don't use 'get' here, as we'd rather it failed loudly if something's
+ # missing.
+ response = response_map[url]
+
+ if isinstance(response, dict) and (content_type := headers.get("Accept")):
+ response = response.get(content_type)
+
+ if "content-type" not in response.headers:
+ response.headers["content-type"] = "application/x-whatever"
+
+ return response
+
+ mock_api_get = mock.Mock(side_effect=_mock_api_get)
+ monkeypatch.setattr("lando.utils.github.GitHubAPI.get", mock_api_get)
+
+ return mock_api_get
+
+ return _github_api_get
+
+
+@pytest.mark.parametrize(
+ "url, expected_authenticated_url",
+ (
+ (
+ "https://github.com/mozilla-firefox/firefox/",
+ "https://git:mock_token@github.com/mozilla-firefox/firefox/",
+ ),
+ (
+ "https://github.com/mozilla-firefox/firefox.git/",
+ "https://git:mock_token@github.com/mozilla-firefox/firefox.git/",
+ ),
+ (
+ "https://github.com/mozilla-firefox/firefox.git/some?other#path",
+ "https://git:mock_token@github.com/mozilla-firefox/firefox.git/some?other#path",
+ ),
+ (
+ "https://someuser:somepass@github.com/owner/repo.git/",
+ "https://someuser:somepass@github.com/owner/repo.git/",
+ ),
+ ),
+)
+def test_github_authenticated_url(
+ mock_github_fetch_token: mock.Mock, url: str, expected_authenticated_url: str
+):
+ assert GitHub(url).authenticated_url == expected_authenticated_url
+
+
+def test_github_authenticated_url_no_token(
+ mock_github_fetch_token: mock.Mock, caplog: pytest.LogCaptureFixture
+):
+ mock_github_fetch_token.return_value = None
+
+ url = "https://github.com/mozilla-firefox/firefox/"
+
+ assert GitHub(url).authenticated_url == url
+ assert "Couldn't obtain a token" in caplog.text
+
+
+def test_github_api_init(mock_github_fetch_token: mock.Mock):
+ api_client = GitHubAPI("https://github.com/o/r")
+
+ assert api_client.session.headers.get("Authorization") == "Bearer mock_token"
+
+
+def test_github_api_client_init(mock_github_fetch_token: mock.Mock):
+ api_client = GitHubAPIClient("https://github.com/o/r")
+
+ assert api_client.repo_base_url == "repos/o/r"
+
+
+@pytest.fixture
+def github_pr_response() -> str:
+ """Return the raw response from a GitHub API request about a PR.
+
+ Data created with
+
+ # curl --user-agent 'shtrom' \
+ -H 'Accept: application/vnd.github+json' \
+ -H 'X-GitHub-Api-Version: 2022-11-28' \
+ https://api.github.com/repos/mozilla-conduit/test-repo/pulls/1 \
+ > src/lando/utils/tests/data/github_api_response_pull.json
+ """
+ json_data_path = (
+ settings.BASE_DIR / "utils" / "tests" / "data" / "github_api_response_pull.json"
+ )
+ with open(json_data_path) as f:
+ return f.read()
+
+
+@pytest.fixture
+def github_pr_commits_response() -> str:
+ """Return the raw response from a GitHub API request about a PR.
+
+ Data created with
+
+ # curl --user-agent 'shtrom' \
+ -H 'Accept: application/vnd.github+json' \
+ -H 'X-GitHub-Api-Version: 2022-11-28' \
+ https://api.github.com/repos/mozilla-conduit/test-repo/pulls/1/commits \
+ > src/lando/utils/tests/data/github_api_response_pull_commits.json
+ """
+ json_data_path = (
+ settings.BASE_DIR
+ / "utils"
+ / "tests"
+ / "data"
+ / "github_api_response_pull_commits.json"
+ )
+ with open(json_data_path) as f:
+ return f.read()
+
+
+@pytest.fixture
+def github_pr_patch() -> str:
+ # curl -LO https://github.com/mozilla-conduit/test-repo/pull/1.patch
+ return dedent(
+ """
+ From ce9fe5d05e5d56a4756019654e3c2b424cd937b2 Mon Sep 17 00:00:00 2001
+ From: User
+ Date: Thu, 28 Aug 2025 15:46:57 -0400
+ Subject: [PATCH 1/5] second commit
+
+ ---
+ test | 1 +
+ 1 file changed, 1 insertion(+)
+
+ diff --git a/test b/test
+ index 89b24ec..7bba8c8 100644
+ --- a/test
+ +++ b/test
+ @@ -1 +1,2 @@
+ line 1
+ +line 2
+
+ From c27b7d14cb3ddd3ec6a16459156208674b429991 Mon Sep 17 00:00:00 2001
+ From: User
+ Date: Tue, 7 Oct 2025 11:24:30 -0400
+ Subject: [PATCH 2/5] third commit
+
+ add image
+ ---
+ favico.ico | Bin 0 -> 4286 bytes
+ 1 file changed, 0 insertions(+), 0 deletions(-)
+ create mode 100644 favico.ico
+
+ diff --git a/favico.ico b/favico.ico
+ new file mode 100644
+ index 0000000000000000000000000000000000000000..7ae814461d408ed9414e6d76ab8540a3d37d134c
+ GIT binary patch
+ literal 4286
+ zcmc(iF-}}D5Qa?v(Wau_?h$bbDvDeJ7ES>LN}r07B2^kHq}(AXbAfazk!U!<;_qpH
+ zn4Qe{c~^^KWb9{;=bIUi{oaTI^vBYkL@(%zW7W(&F^YVMnGv
+ z8^>-N--z>de!8H3ySg0rAd=A-x_V>LV#E6L`{#o4{PZ}ouRkA;d~BgGPhV`cJvL-)
+ z81Pi+i!1%&OD3{o%7!TB54Sf``PxSQdA@|bER1O1qa2Ue_$8lhoAIOkAg`?r#NFzm
+ zAF=T%(ue#yeB^CDM!wtqRxD|~oqArEuX7pYT;_Wg`%V034ST?SDIe5kzW@61eQW-Y
+ ztmSSsuR!e`Igz{)znL9qr%K
+ zD{{+k#g(yo2C^Jz-M?N3&r@el??Ar?{(FPit2F(oOxdA4%5oN__-8};=l#pNsAT%Y
+ zrL{Sf$-@+%Hu~0;57rLeO_t^RK6Vk``o3zz+iwc#jckZ?O5WaI(RU(e&N6MEnE3k$
+ zz4$bx7ddMyIqY)<-9GM?Pd~E({p>4t;1?AhW3rL4h|7ErTeh@pK$m#1TYDkdb=b0j
+ o)}Kr1Tc`Ekx>kQrpYKELi1MOk2WzJGx`%IN-s$&uMf|_=0)^}s4gdfE
+
+ literal 0
+ HcmV?d00001
+
+
+ From 6d13ee6f941eb565909c4dfbae73055ef2247144 Mon Sep 17 00:00:00 2001
+ From: User2
+ Date: Wed, 8 Oct 2025 17:51:16 +1100
+ Subject: [PATCH 3/5] add naughty try task config
+
+ ---
+ try_task_config.json | 0
+ 1 file changed, 0 insertions(+), 0 deletions(-)
+ create mode 100644 try_task_config.json
+
+ diff --git a/try_task_config.json b/try_task_config.json
+ new file mode 100644
+ index 0000000..e69de29
+
+ From 1d9881143c8288d6d230869c8d5e2b26d12862cc Mon Sep 17 00:00:00 2001
+ From: User2
+ Date: Wed, 8 Oct 2025 18:22:38 +1100
+ Subject: [PATCH 4/5] add non-empty b
+
+ ---
+ b | 1 +
+ 1 file changed, 1 insertion(+)
+ create mode 100644 b
+
+ diff --git a/b b/b
+ new file mode 100644
+ index 0000000..e0b3f1b
+ --- /dev/null
+ +++ b/b
+ @@ -0,0 +1 @@
+ +bb
+
+ From d1adda9a692e3f362435b4f80ea19aa41c555e69 Mon Sep 17 00:00:00 2001
+ From: User2
+ Date: Wed, 8 Oct 2025 18:26:29 +1100
+ Subject: [PATCH 5/5] add two more files
+
+ ---
+ 1 | 1 +
+ 2 | 1 +
+ 2 files changed, 2 insertions(+)
+ create mode 100644 1
+ create mode 100644 2
+
+ diff --git a/1 b/1
+ new file mode 100644
+ index 0000000..d00491f
+ --- /dev/null
+ +++ b/1
+ @@ -0,0 +1 @@
+ +1
+ diff --git a/2 b/2
+ new file mode 100644
+ index 0000000..0cfbf08
+ --- /dev/null
+ +++ b/2
+ @@ -0,0 +1 @@
+ +2
+ """
+ ).lstrip()
+
+
+@pytest.fixture
+def github_pr_diff() -> str:
+ # curl -LO https://github.com/mozilla-conduit/test-repo/pull/1.diff
+ return dedent(
+ """
+ diff --git a/1 b/1
+ new file mode 100644
+ index 0000000..d00491f
+ --- /dev/null
+ +++ b/1
+ @@ -0,0 +1 @@
+ +1
+ diff --git a/2 b/2
+ new file mode 100644
+ index 0000000..0cfbf08
+ --- /dev/null
+ +++ b/2
+ @@ -0,0 +1 @@
+ +2
+ diff --git a/b b/b
+ new file mode 100644
+ index 0000000..e0b3f1b
+ --- /dev/null
+ +++ b/b
+ @@ -0,0 +1 @@
+ +bb
+ diff --git a/favico.ico b/favico.ico
+ new file mode 100644
+ index 0000000..7ae8144
+ Binary files /dev/null and b/favico.ico differ
+ diff --git a/test b/test
+ index 89b24ec..7bba8c8 100644
+ --- a/test
+ +++ b/test
+ @@ -1 +1,2 @@
+ line 1
+ +line 2
+ diff --git a/try_task_config.json b/try_task_config.json
+ new file mode 100644
+ index 0000000..e69de29
+ """
+ ).lstrip()
+
+
+def test_api_client_build_pr(
+ github_pr_response: str,
+ github_pr_commits_response: str,
+ github_pr_diff: str,
+ github_pr_patch: str,
+):
+ api_client = GitHubAPIClient("https://github.com/mozilla-conduit/test-repo")
+
+ api_client.get_pull_request = mock.MagicMock()
+ api_client.get_pull_request.return_value = json.loads(github_pr_response)
+
+ api_client.get_diff = mock.MagicMock()
+ api_client.get_diff.return_value = github_pr_diff
+
+ api_client.get_patch = mock.MagicMock()
+ api_client.get_patch.return_value = github_pr_patch
+
+ pr = api_client.build_pull_request(1)
+
+ assert api_client.get_pull_request.call_count == 1
+ assert pr.number == 1
+
+ assert pr.diff == github_pr_diff
+ assert api_client.get_diff.call_count == 1
+ assert api_client.get_diff.call_args.args == (1,)
+
+ assert pr.patch == github_pr_patch
+ assert api_client.get_patch.call_count == 1
+ assert api_client.get_patch.call_args.args == (1,)
+
+
+@pytest.fixture
+def github_api_client(
+ mock_github_fetch_token: mock.Mock, # pyright: ignore[reportUnusedParameter]
+ mock_github_api_get: Callable,
+ mock_response: Callable,
+ monkeypatch: pytest.MonkeyPatch,
+) -> Callable:
+ def _github_api_client(
+ github_pr_response: str,
+ github_pr_commits_response: str,
+ *,
+ github_pr_list_response: str = "null",
+ github_pr_patch: str = "",
+ github_pr_diff: str = "",
+ ) -> GitHubAPIClient:
+ repo = "mozilla-conduit/test-repo"
+ client_mock = GitHubAPIClient(f"https://github.com/{repo}/")
+
+ client_mock.list_pull_request = mock.Mock(
+ return_value=json.loads(github_pr_list_response)
+ )
+
+ pr_response = json.loads(github_pr_response)
+ client_mock.get_pull_request = mock.Mock(return_value=pr_response)
+
+ # Prime the GitHub API object to fake network interaction with coherent
+ # response.
+ mock_github_api_get(
+ repo,
+ pr_response,
+ github_pr_commits_response,
+ github_pr_patch,
+ github_pr_diff,
+ )
+
+ return client_mock
+
+ return _github_api_client
+
+
+@pytest.fixture
+def github_api_client_pr(
+ github_api_client: Callable,
+ github_pr_response: str,
+ github_pr_commits_response: str,
+ github_pr_patch: str,
+ github_pr_diff: str,
+) -> mock.Mock:
+ return github_api_client(
+ github_pr_response,
+ github_pr_commits_response,
+ github_pr_patch=github_pr_patch,
+ github_pr_diff=github_pr_diff,
+ )
+
+
+def test_PullRequestPatchHelper(github_api_client_pr: mock.Mock):
+ # This should match the github_pr_response fixture.
+ pr_url = "https://api.github.com/repos/mozilla-conduit/test-repo/pulls/1"
+
+ pr = github_api_client_pr.build_pull_request(1)
+
+ assert pr.url == pr_url
+
+ # Serialisation
+ serialised_pr = pr.serialize()
+
+ assert serialised_pr["url"] == pr_url
+
+ # PatchHelper
+ pr_patch_helper = PullRequestPatchHelper(pr)
+
+ assert (
+ pr_patch_helper.get_commit_description()
+ == "WIP: test pull request with multiple commits"
+ )
+ assert pr_patch_helper.get_timestamp() == "1761017419"
+ assert pr_patch_helper.parse_author_information() == (
+ "Olivier Mehani",
+ "omehani@mozilla.com",
+ )