diff --git a/public/index.html b/public/index.html
index ff0b215..55a03a1 100644
--- a/public/index.html
+++ b/public/index.html
@@ -1919,21 +1919,43 @@
Error<
/**
* Check for PR updates by fetching lightweight timestamp data
- * Compare with local cache to detect changes
+ * Compare with local cache to detect changes.
+ *
+ * The /api/prs/updates endpoint is paginated (cursor-based, after_id)
+ * to avoid D1 storage-operation timeouts on large datasets.
+ * We fetch all pages here before processing changes so that removal
+ * detection remains accurate across the whole PR set.
*/
async function checkForPrUpdates() {
if (!autoRefreshEnabled) return;
try {
- const response = await fetch('/api/prs/updates');
- const data = await response.json();
+ // Collect all pages from the paginated updates endpoint.
+ const allUpdates = [];
+ let afterId = null;
+ let hasMore = true;
+ while (hasMore) {
+ const url = afterId
+ ? `/api/prs/updates?after_id=${afterId}`
+ : '/api/prs/updates';
+ const response = await fetch(url);
+ const data = await response.json();
+
+ if (data.error) {
+ console.error('Error checking for updates:', data.error);
+ return;
+ }
- if (data.error) {
- console.error('Error checking for updates:', data.error);
- return;
+ const page = data.updates || [];
+ allUpdates.push(...page);
+ hasMore = data.has_more === true;
+ if (hasMore && page.length > 0) {
+ afterId = page[page.length - 1].id;
+ } else {
+ hasMore = false;
+ }
}
- const updates = data.updates || [];
const changedPrIds = [];
const newPrIds = [];
const removedPrIds = [];
@@ -1942,7 +1964,7 @@ Error<
const isFirstCheck = Object.keys(prUpdateTimestamps).length === 0;
// Detect changes and new PRs
- updates.forEach(update => {
+ allUpdates.forEach(update => {
const oldTimestamp = prUpdateTimestamps[update.id];
if (oldTimestamp === undefined) {
// PR not seen before - it's newly added
@@ -1955,7 +1977,7 @@ Error<
// Detect removals (PRs that were in cache but not in update)
// Use Set for O(1) lookup instead of O(n) find operation
- const currentPrIds = new Set(updates.map(u => u.id));
+ const currentPrIds = new Set(allUpdates.map(u => u.id));
Object.keys(prUpdateTimestamps).forEach(prId => {
const prIdNum = parseInt(prId);
if (!currentPrIds.has(prIdNum)) {
@@ -2001,7 +2023,7 @@ Error<
// were detected, the view may be stuck in an empty state due to a prior bug.
// Trigger a reload (skipped on the first check to avoid conflicting with
// the initial loadPrs() call that runs concurrently at page startup).
- if (!isFirstCheck && allPrs.length === 0 && updates.length > 0 &&
+ if (!isFirstCheck && allPrs.length === 0 && allUpdates.length > 0 &&
changedPrIds.length === 0 && removedPrIds.length === 0) {
await loadPrs(true);
}
diff --git a/src/handlers.py b/src/handlers.py
index 372bfad..7831bc8 100644
--- a/src/handlers.py
+++ b/src/handlers.py
@@ -34,6 +34,10 @@
# Maximum PRs to import/discover per bulk operation to prevent timeouts on large orgs
_MAX_PRS_PER_BULK_OP = 1000
+# Page size for the lightweight /api/prs/updates endpoint.
+# Keeps each D1 query well within the storage-operation timeout limit.
+_PR_UPDATES_PAGE_SIZE = 500
+
def _is_caller_scoped_token(token_info):
"""Return True when the request uses a caller-provided token."""
@@ -960,38 +964,58 @@ def _row_to_dict(r):
}
}), {'headers': {'Content-Type': 'application/json'}})
-async def handle_pr_updates_check(env):
+async def handle_pr_updates_check(env, after_id=None):
"""
- GET /api/prs/updates
+ GET /api/prs/updates[?after_id=]
Lightweight endpoint to check for PR updates.
Returns only PR IDs and their updated_at timestamps for change detection.
-
+
+ Results are paginated using cursor-based pagination (after_id) to keep
+ each D1 query within the storage-operation timeout limit. When the
+ response includes ``"has_more": true`` the caller should repeat the
+ request with ``after_id`` set to the last id in the returned list.
+
This allows the frontend to poll efficiently without fetching full PR data.
"""
try:
db = get_db(env)
-
- # Fetch only IDs and timestamps - minimal data transfer
- stmt = db.prepare('SELECT id, updated_at FROM prs ORDER BY id')
+
+ # Cursor-based pagination: fetch the next page starting after after_id.
+ # We request one extra row (LIMIT + 1) so we can reliably detect whether
+ # another page exists without a separate COUNT query.
+ fetch_limit = _PR_UPDATES_PAGE_SIZE + 1
+ if after_id is not None:
+ stmt = db.prepare(
+ 'SELECT id, updated_at FROM prs WHERE id > ? ORDER BY id LIMIT ?'
+ ).bind(after_id, fetch_limit)
+ else:
+ stmt = db.prepare(
+ 'SELECT id, updated_at FROM prs ORDER BY id LIMIT ?'
+ ).bind(fetch_limit)
+
result = await stmt.all()
-
+
if not result or not result.results:
return Response.new(
- json.dumps({'updates': []}),
+ json.dumps({'updates': [], 'has_more': False}),
{'headers': {'Content-Type': 'application/json'}}
)
-
- # Convert to lightweight format
+
+ # Convert to lightweight format; trim to the real page size
+ rows = list(result.results)
+ has_more = len(rows) > _PR_UPDATES_PAGE_SIZE
+ rows = rows[:_PR_UPDATES_PAGE_SIZE]
+
updates = []
- for row in result.results:
+ for row in rows:
row_dict = row.to_py()
updates.append({
'id': row_dict.get('id'),
'updated_at': row_dict.get('updated_at')
})
-
+
return Response.new(
- json.dumps({'updates': updates}),
+ json.dumps({'updates': updates, 'has_more': has_more}),
{'headers': {'Content-Type': 'application/json'}}
)
except Exception as e:
diff --git a/src/index.py b/src/index.py
index 146c0f5..57368c8 100644
--- a/src/index.py
+++ b/src/index.py
@@ -70,7 +70,14 @@ async def on_fetch(request, env):
response = None
if path == '/api/prs/updates' and request.method == 'GET':
- response = await handle_pr_updates_check(env)
+ after_id_str = url.searchParams.get('after_id')
+ after_id = None
+ if after_id_str:
+ try:
+ after_id = int(after_id_str)
+ except (ValueError, TypeError):
+ after_id = None
+ response = await handle_pr_updates_check(env, after_id=after_id)
elif path == '/api/prs':
if request.method == 'GET':
repo = url.searchParams.get('repo')