From edfbc181ef0af5089273e2d90dafb1fbcede826b Mon Sep 17 00:00:00 2001 From: Yutaka Yamashita Date: Sun, 3 May 2026 11:01:29 +0900 Subject: [PATCH] fix(web-ui): surface agent-level errors in chat UI Previously, agent-level errors were silently swallowed by the SSE parser try-catch, leaving users with an infinite loading spinner. Changes: - strandsParser.js: handle {"status":"error"} SSE events with user-friendly messages for each error category: - Conversation limit (Too much media / input too long) - Throttling (rate limit exceeded) - Model timeout - Model not ready - Service unavailable - Generic errors (show raw message with warning icon) - ChatPanel.tsx: detect retryable vs conversation-limit errors in catch block and show appropriate guidance --- web-ui/src/components/chat/ChatPanel.tsx | 12 +++++++++++- web-ui/src/services/strandsParser.js | 22 ++++++++++++++++++++++ 2 files changed, 33 insertions(+), 1 deletion(-) diff --git a/web-ui/src/components/chat/ChatPanel.tsx b/web-ui/src/components/chat/ChatPanel.tsx index e38c50ba..4272fd81 100644 --- a/web-ui/src/components/chat/ChatPanel.tsx +++ b/web-ui/src/components/chat/ChatPanel.tsx @@ -892,11 +892,21 @@ export const ChatPanel = forwardRef(function Ch if (err instanceof DOMException && err.name === "AbortError") { // Keep partial response as-is } else { + const errorMessage = err instanceof Error ? err.message : String(err) + const isRetryable = errorMessage.includes("ThrottlingException") || errorMessage.includes("throttl") + || errorMessage.includes("timed out") || errorMessage.includes("timeout") + || errorMessage.includes("not ready") || errorMessage.includes("ServiceUnavailable") + const isConversationLimit = errorMessage.includes("Too much media") || errorMessage.includes("too long") + const displayMessage = isConversationLimit + ? "This conversation is too long for the model to process. Please start a new chat to continue." + : isRetryable + ? "The service is temporarily busy or timed out. Please wait a moment and try again." + : "Sorry, something went wrong. Please try again." setMessages((prev) => { const updated = [...prev] updated[updated.length - 1] = { ...updated[updated.length - 1], - content: "Sorry, something went wrong. Please try again.", + content: displayMessage, } return updated }) diff --git a/web-ui/src/services/strandsParser.js b/web-ui/src/services/strandsParser.js index 8b2e0288..f5598c3a 100644 --- a/web-ui/src/services/strandsParser.js +++ b/web-ui/src/services/strandsParser.js @@ -50,6 +50,28 @@ export const parseStreamingChunk = (line, currentCompletion, updateCallback, too // Keep-alive if (json.keepalive) return currentCompletion; + // Agent-level error (e.g. Bedrock ValidationException, image/token limit exceeded) + if (json.status === 'error' && json.error) { + const msg = json.error; + let errorMessage; + if (msg.includes("Too much media") || msg.includes("too long")) { + errorMessage = "⚠️ This conversation is too long for the model to process. Please start a new chat to continue."; + } else if (msg.includes("ThrottlingException") || msg.includes("throttl")) { + errorMessage = "⚠️ The service is temporarily busy. Please wait a moment and try again."; + } else if (msg.includes("ModelTimeoutException") || msg.includes("timed out") || msg.includes("timeout")) { + errorMessage = "⚠️ The model took too long to respond. Please try again."; + } else if (msg.includes("ModelNotReadyException") || msg.includes("not ready")) { + errorMessage = "⚠️ The model is not ready yet. Please wait a moment and try again."; + } else if (msg.includes("ServiceUnavailable")) { + errorMessage = "⚠️ The service is temporarily unavailable. Please try again later."; + } else { + errorMessage = `⚠️ ${msg}`; + } + const newCompletion = currentCompletion + errorMessage; + updateCallback(newCompletion); + return newCompletion; + } + // MCP status event — pass to toolCallback with special type if (json.mcp_status) { if (toolCallback) toolCallback('__mcp_status__', { mcpStatus: json.mcp_status });