diff --git a/packages/opencode/src/cli/cmd/tui/ui/dialog.tsx b/packages/opencode/src/cli/cmd/tui/ui/dialog.tsx index 4477d301562..79bca42406a 100644 --- a/packages/opencode/src/cli/cmd/tui/ui/dialog.tsx +++ b/packages/opencode/src/cli/cmd/tui/ui/dialog.tsx @@ -62,7 +62,6 @@ function init() { current.onClose?.() setStore("stack", store.stack.slice(0, -1)) evt.preventDefault() - evt.stopPropagation() refocus() } }) diff --git a/packages/opencode/src/session/llm.ts b/packages/opencode/src/session/llm.ts index 0db453a2229..baf601b3e73 100644 --- a/packages/opencode/src/session/llm.ts +++ b/packages/opencode/src/session/llm.ts @@ -121,6 +121,19 @@ export namespace LLM { const tools = await resolveTools(input) + // Warn about potential context window issues for local models with tools + // See: https://github.com/anomalyco/opencode/issues/7185 + const MINIMUM_CONTEXT_FOR_TOOLS = 16_384 + const toolCount = Object.keys(tools).filter((x) => x !== "invalid").length + if (toolCount > 0 && input.model.limit.context > 0 && input.model.limit.context < MINIMUM_CONTEXT_FOR_TOOLS) { + l.warn("low context window for tool calling", { + contextLimit: input.model.limit.context, + minimumRecommended: MINIMUM_CONTEXT_FOR_TOOLS, + toolCount, + hint: "Tool calling may fail with context windows below 16K tokens. For vLLM/Ollama, increase max-model-len or num_ctx.", + }) + } + return streamText({ onError(error) { l.error("stream error", {