diff --git a/src/lib/models/providers/ollama/ollamaLLM.ts b/src/lib/models/providers/ollama/ollamaLLM.ts index 3bcd3ccf1..7de1c1044 100644 --- a/src/lib/models/providers/ollama/ollamaLLM.ts +++ b/src/lib/models/providers/ollama/ollamaLLM.ts @@ -12,6 +12,7 @@ import { parse } from 'partial-json'; import crypto from 'crypto'; import { Message } from '@/lib/types'; import { repairJson } from '@toolsycc/json-repair'; +import { stripMarkdownFences } from '@/lib/utils/parseJson'; type OllamaConfig = { baseURL: string; @@ -249,7 +250,7 @@ class OllamaLLM extends BaseLLM { recievedObj += chunk.message.content; try { - yield parse(recievedObj) as T; + yield parse(stripMarkdownFences(recievedObj)) as T; } catch (err) { console.log('Error parsing partial object from Ollama:', err); yield {} as T; diff --git a/src/lib/models/providers/openai/openaiLLM.ts b/src/lib/models/providers/openai/openaiLLM.ts index 5ae1538a0..6aa664995 100644 --- a/src/lib/models/providers/openai/openaiLLM.ts +++ b/src/lib/models/providers/openai/openaiLLM.ts @@ -19,6 +19,7 @@ import { } from 'openai/resources/index.mjs'; import { Message } from '@/lib/types'; import { repairJson } from '@toolsycc/json-repair'; +import { safeParseJson, stripMarkdownFences } from '@/lib/utils/parseJson'; type OpenAIConfig = { apiKey: string; @@ -110,7 +111,7 @@ class OpenAILLM extends BaseLLM { return { name: tc.function.name, id: tc.id, - arguments: JSON.parse(tc.function.arguments), + arguments: safeParseJson(tc.function.arguments), }; } }) @@ -256,14 +257,14 @@ class OpenAILLM extends BaseLLM { recievedObj += chunk.delta; try { - yield parse(recievedObj) as T; + yield parse(stripMarkdownFences(recievedObj)) as T; } catch (err) { console.log('Error parsing partial object from OpenAI:', err); yield {} as T; } } else if (chunk.type === 'response.output_text.done' && chunk.text) { try { - yield parse(chunk.text) as T; + yield parse(stripMarkdownFences(chunk.text)) as T; } catch (err) { throw new Error(`Error parsing response from OpenAI: ${err}`); } diff --git a/src/lib/utils/parseJson.ts b/src/lib/utils/parseJson.ts new file mode 100644 index 000000000..bfe0ab4fc --- /dev/null +++ b/src/lib/utils/parseJson.ts @@ -0,0 +1,21 @@ +/** + * Strips markdown code fences that some LLM providers (Claude, models via + * LiteLLM/OpenRouter) wrap around JSON output. + * + * Handles ```json ... ```, ``` ... ```, and raw JSON (no-op). + */ +export function stripMarkdownFences(text: string): string { + return text + .trim() + .replace(/^```(?:json)?\s*/i, '') + .replace(/\s*```$/, '') + .trim(); +} + +/** + * Strips markdown code fences from LLM output before JSON.parse. + * Fixes issue #959: Claude/LiteLLM models wrap JSON in markdown blocks. + */ +export function safeParseJson(text: string): T { + return JSON.parse(stripMarkdownFences(text)) as T; +}