diff --git a/scripts/test-deepseek.ts b/scripts/test-deepseek.ts new file mode 100644 index 0000000..2f11460 --- /dev/null +++ b/scripts/test-deepseek.ts @@ -0,0 +1,88 @@ +import { DeepSeekProvider } from "../src/services/ai/providers/deepseek.js"; +import type { ChatCompletionTool } from "../src/services/ai/tools/tool-schema.js"; + +const apiKey = process.env.DEEPSEEK_API_KEY; +if (!apiKey) { + console.error("Error: DEEPSEEK_API_KEY is not set."); + process.exit(1); +} + +const model = process.env.DEEPSEEK_MODEL ?? "deepseek-chat"; +const apiUrl = process.env.DEEPSEEK_API_URL; + +class FakeSessionManager { + private readonly session = { id: "test-session-1" }; + private readonly messages: any[] = []; + + getSession(): any { + return null; + } + createSession(): any { + return this.session; + } + getMessages(): any[] { + return this.messages; + } + getLastSequence(): number { + return this.messages.length - 1; + } + addMessage(message: any): void { + this.messages.push(message); + } +} + +const echoTool: ChatCompletionTool = { + type: "function", + function: { + name: "echo_greeting", + description: "Return a greeting message", + parameters: { + type: "object", + properties: { + message: { + type: "string", + description: "The greeting message to return", + }, + }, + required: ["message"], + }, + }, +}; + +const config: Record = { + model, + apiKey, + ...(apiUrl ? { apiUrl } : {}), + maxIterations: 3, +}; + +const provider = new DeepSeekProvider(config as any, new FakeSessionManager() as any); + +console.log(`Provider : ${provider.getProviderName()}`); +console.log(`Model : ${model}`); +console.log(`API URL : ${apiUrl ?? "https://api.deepseek.com"}${apiUrl ? "" : " (default)"}`); +console.log("---"); +console.log("Calling DeepSeek API...\n"); + +const systemPrompt = + "You are a helpful assistant. When asked to greet, call the echo_greeting tool."; +const userPrompt = + "Please greet me by calling the echo_greeting tool with message 'Hello from DeepSeek!'"; + +const result = await provider.executeToolCall( + systemPrompt, + userPrompt, + echoTool, + "integration-test" +); + +if (result.success) { + console.log("SUCCESS"); + console.log(`Iterations : ${result.iterations}`); + console.log(`Result :`, JSON.stringify((result as any).data, null, 2)); +} else { + console.error("FAILED"); + console.error(`Error : ${result.error}`); + console.error(`Iterations : ${result.iterations}`); + process.exit(1); +} diff --git a/src/config.ts b/src/config.ts index f19be09..ebbf447 100644 --- a/src/config.ts +++ b/src/config.ts @@ -36,7 +36,7 @@ interface OpenCodeMemConfig { autoCaptureMaxIterations?: number; autoCaptureIterationTimeout?: number; autoCaptureLanguage?: string; - memoryProvider?: "openai-chat" | "openai-responses" | "anthropic"; + memoryProvider?: "openai-chat" | "openai-responses" | "anthropic" | "deepseek"; memoryModel?: string; memoryApiUrl?: string; memoryApiKey?: string; @@ -99,7 +99,7 @@ const DEFAULTS: Required< memoryModel?: string; memoryApiUrl?: string; memoryApiKey?: string; - memoryProvider?: "openai-chat" | "openai-responses" | "anthropic"; + memoryProvider?: "openai-chat" | "openai-responses" | "anthropic" | "deepseek"; memoryTemperature?: number | false; memoryExtraParams?: Record; opencodeProvider?: string; @@ -269,7 +269,7 @@ const CONFIG_TEMPLATE = `{ "autoCaptureEnabled": true, - // Provider type: "openai-chat" | "openai-responses" | "anthropic" + // Provider type: "openai-chat" | "openai-responses" | "anthropic" | "deepseek" "memoryProvider": "openai-chat", // REQUIRED for auto-capture (all 3 must be set): @@ -307,6 +307,12 @@ const CONFIG_TEMPLATE = `{ // "memoryApiUrl": "https://api.groq.com/openai/v1" // "memoryApiKey": "gsk_..." + // DeepSeek (with session support): + // "memoryProvider": "deepseek" + // "memoryModel": "deepseek-chat" + // "memoryApiUrl": "https://api.deepseek.com" + // "memoryApiKey": "sk-..." + // Maximum iterations for multi-turn AI analysis (for openai-responses and anthropic) "autoCaptureMaxIterations": 5, @@ -477,7 +483,8 @@ export const CONFIG = { memoryProvider: (fileConfig.memoryProvider ?? "openai-chat") as | "openai-chat" | "openai-responses" - | "anthropic", + | "anthropic" + | "deepseek", memoryModel: fileConfig.memoryModel, memoryApiUrl: fileConfig.memoryApiUrl, memoryApiKey: resolveSecretValue(fileConfig.memoryApiKey), diff --git a/src/services/ai/ai-provider-factory.ts b/src/services/ai/ai-provider-factory.ts index 2935927..ebb0c79 100644 --- a/src/services/ai/ai-provider-factory.ts +++ b/src/services/ai/ai-provider-factory.ts @@ -3,6 +3,7 @@ import { OpenAIChatCompletionProvider } from "./providers/openai-chat-completion import { OpenAIResponsesProvider } from "./providers/openai-responses.js"; import { AnthropicMessagesProvider } from "./providers/anthropic-messages.js"; import { GoogleGeminiProvider } from "./providers/google-gemini.js"; +import { DeepSeekProvider } from "./providers/deepseek.js"; import { aiSessionManager } from "./session/ai-session-manager.js"; import type { AIProviderType } from "./session/session-types.js"; @@ -21,13 +22,16 @@ export class AIProviderFactory { case "google-gemini": return new GoogleGeminiProvider(config, aiSessionManager); + case "deepseek": + return new DeepSeekProvider(config, aiSessionManager); + default: throw new Error(`Unknown provider type: ${providerType}`); } } static getSupportedProviders(): AIProviderType[] { - return ["openai-chat", "openai-responses", "anthropic", "google-gemini"]; + return ["openai-chat", "openai-responses", "anthropic", "google-gemini", "deepseek"]; } static cleanupExpiredSessions(): number { diff --git a/src/services/ai/providers/deepseek.ts b/src/services/ai/providers/deepseek.ts new file mode 100644 index 0000000..c34496e --- /dev/null +++ b/src/services/ai/providers/deepseek.ts @@ -0,0 +1,384 @@ +import { BaseAIProvider, type ToolCallResult, applySafeExtraParams } from "./base-provider.js"; +import { AISessionManager } from "../session/ai-session-manager.js"; +import type { ChatCompletionTool } from "../tools/tool-schema.js"; +import { log } from "../../logger.js"; +import { UserProfileValidator } from "../validators/user-profile-validator.js"; + +interface DeepSeekToolCall { + id: string; + type: string; + function: { + name: string; + arguments: string; + }; +} + +interface DeepSeekErrorResponse { + status?: unknown; + msg?: unknown; +} + +interface DeepSeekResponse { + choices: Array<{ + message: { + content?: string; + tool_calls?: DeepSeekToolCall[]; + }; + finish_reason?: string; + }>; +} + +export class DeepSeekProvider extends BaseAIProvider { + private aiSessionManager: AISessionManager; + + constructor(config: any, aiSessionManager: AISessionManager) { + super(config); + this.aiSessionManager = aiSessionManager; + } + + getProviderName(): string { + return "deepseek"; + } + + supportsSession(): boolean { + return true; + } + + private addToolResponse( + sessionId: string, + messages: any[], + toolCallId: string, + content: string + ): void { + const sequence = this.aiSessionManager.getLastSequence(sessionId) + 1; + this.aiSessionManager.addMessage({ + aiSessionId: sessionId, + sequence, + role: "tool", + content, + toolCallId, + }); + messages.push({ + role: "tool", + tool_call_id: toolCallId, + content, + }); + } + + private filterIncompleteToolCallSequences(messages: any[]): any[] { + const result: any[] = []; + let i = 0; + + while (i < messages.length) { + const msg = messages[i]; + + if (msg.role === "assistant" && msg.toolCalls && msg.toolCalls.length > 0) { + const toolCallIds = new Set(msg.toolCalls.map((tc: any) => tc.id)); + const toolResponses: any[] = []; + let j = i + 1; + + while (j < messages.length && messages[j].role === "tool") { + if (toolCallIds.has(messages[j].toolCallId)) { + toolResponses.push(messages[j]); + toolCallIds.delete(messages[j].toolCallId); + } + j++; + } + + if (toolCallIds.size === 0) { + result.push(msg); + toolResponses.forEach((tr) => result.push(tr)); + i = j; + } else { + break; + } + } else { + result.push(msg); + i++; + } + } + + return result; + } + + async executeToolCall( + systemPrompt: string, + userPrompt: string, + toolSchema: ChatCompletionTool, + sessionId: string + ): Promise { + let session = this.aiSessionManager.getSession(sessionId, "deepseek"); + + if (!session) { + session = this.aiSessionManager.createSession({ + provider: "deepseek", + sessionId, + }); + } + + const existingMessages = this.aiSessionManager.getMessages(session.id); + const messages: any[] = []; + + const validatedMessages = this.filterIncompleteToolCallSequences(existingMessages); + + for (const msg of validatedMessages) { + const apiMsg: any = { + role: msg.role, + content: msg.content, + }; + + if (msg.toolCalls) { + apiMsg.tool_calls = msg.toolCalls; + } + + if (msg.toolCallId) { + apiMsg.tool_call_id = msg.toolCallId; + } + + messages.push(apiMsg); + } + + if (messages.length === 0) { + const sequence = this.aiSessionManager.getLastSequence(session.id) + 1; + this.aiSessionManager.addMessage({ + aiSessionId: session.id, + sequence, + role: "system", + content: systemPrompt, + }); + + messages.push({ role: "system", content: systemPrompt }); + } + + const userSequence = this.aiSessionManager.getLastSequence(session.id) + 1; + this.aiSessionManager.addMessage({ + aiSessionId: session.id, + sequence: userSequence, + role: "user", + content: userPrompt, + }); + + messages.push({ role: "user", content: userPrompt }); + + let iterations = 0; + const maxIterations = this.config.maxIterations ?? 5; + const iterationTimeout = this.config.iterationTimeout ?? 30000; + + while (iterations < maxIterations) { + iterations++; + + const controller = new AbortController(); + const timeout = setTimeout(() => controller.abort(), iterationTimeout); + + try { + const requestBody: any = { + model: this.config.model, + messages, + tools: [toolSchema], + tool_choice: "auto", + }; + + if (this.config.memoryTemperature !== false) { + requestBody.temperature = this.config.memoryTemperature ?? 0.3; + } + + if (this.config.extraParams) { + applySafeExtraParams(requestBody, this.config.extraParams); + } + + const headers: Record = { + "Content-Type": "application/json", + }; + + if (this.config.apiKey) { + headers.Authorization = `Bearer ${this.config.apiKey}`; + } + + const response = await fetch(`${this.config.apiUrl}/chat/completions`, { + method: "POST", + headers, + body: JSON.stringify(requestBody), + signal: controller.signal, + }); + + clearTimeout(timeout); + + if (!response.ok) { + const errorText = await response.text().catch(() => response.statusText); + log("DeepSeek API error", { + provider: this.getProviderName(), + model: this.config.model, + status: response.status, + error: errorText, + iteration: iterations, + }); + + let errorMessage = `API error: ${response.status} - ${errorText}`; + + if ( + response.status === 400 && + errorText.includes("unsupported_value") && + errorText.includes("temperature") + ) { + errorMessage = + 'Your model does not support the temperature parameter. Add "memoryTemperature": false to your config file to disable it.'; + } + + return { + success: false, + error: errorMessage, + iterations, + }; + } + + const data = (await response.json()) as DeepSeekErrorResponse & Partial; + + if (data.status && data.msg) { + log("DeepSeek API returned error in response body", { + provider: this.getProviderName(), + model: this.config.model, + status: data.status, + msg: data.msg, + }); + return { + success: false, + error: `API error: ${data.status} - ${data.msg}`, + iterations, + }; + } + + if (!data.choices || !data.choices[0]) { + log("Invalid DeepSeek API response format", { + provider: this.getProviderName(), + model: this.config.model, + response: JSON.stringify(data).slice(0, 1000), + hasChoices: !!data.choices, + choicesLength: data.choices?.length, + }); + return { + success: false, + error: "Invalid API response format", + iterations, + }; + } + + const response_data = data as DeepSeekResponse; + const choice = response_data.choices[0] as DeepSeekResponse["choices"][0]; + + const assistantSequence = this.aiSessionManager.getLastSequence(session.id) + 1; + const assistantMsg: any = { + aiSessionId: session.id, + sequence: assistantSequence, + role: "assistant", + content: choice.message.content || "", + }; + + if (choice.message.tool_calls) { + assistantMsg.toolCalls = choice.message.tool_calls; + } + + this.aiSessionManager.addMessage(assistantMsg); + messages.push(choice.message); + + if (choice.message.tool_calls && choice.message.tool_calls.length > 0) { + for (const toolCall of choice.message.tool_calls) { + const toolCallId = toolCall.id; + + if (toolCall.function.name === toolSchema.function.name) { + try { + const parsed = JSON.parse(toolCall.function.arguments); + const result = UserProfileValidator.validate(parsed); + if (!result.valid) { + throw new Error(result.errors.join(", ")); + } + + this.addToolResponse( + session.id, + messages, + toolCallId, + JSON.stringify({ success: true }) + ); + + return { + success: true, + data: result.data, + iterations, + }; + } catch (validationError) { + const errorStack = + validationError instanceof Error ? validationError.stack : undefined; + log("DeepSeek tool response validation failed", { + error: String(validationError), + stack: errorStack, + errorType: + validationError instanceof Error + ? validationError.constructor.name + : typeof validationError, + toolName: toolSchema.function.name, + iteration: iterations, + rawArguments: toolCall.function.arguments.slice(0, 500), + }); + + const errorMessage = `Validation failed: ${String(validationError)}`; + this.addToolResponse( + session.id, + messages, + toolCallId, + JSON.stringify({ success: false, error: errorMessage }) + ); + + return { + success: false, + error: errorMessage, + iterations, + }; + } + } + + const wrongToolMessage = `Wrong tool called. Please use ${toolSchema.function.name} instead.`; + this.addToolResponse( + session.id, + messages, + toolCallId, + JSON.stringify({ success: false, error: wrongToolMessage }) + ); + + break; + } + } + + const retrySequence = this.aiSessionManager.getLastSequence(session.id) + 1; + const retryPrompt = `Please use the ${toolSchema.function.name} tool to extract and save the memories from the conversation as instructed.`; + + this.aiSessionManager.addMessage({ + aiSessionId: session.id, + sequence: retrySequence, + role: "user", + content: retryPrompt, + }); + + messages.push({ role: "user", content: retryPrompt }); + } catch (error) { + clearTimeout(timeout); + if (error instanceof Error && error.name === "AbortError") { + return { + success: false, + error: `API request timeout (${iterationTimeout}ms)`, + iterations, + }; + } + return { + success: false, + error: String(error), + iterations, + }; + } + } + + return { + success: false, + error: `Max iterations (${maxIterations}) reached without tool call`, + iterations, + }; + } +} diff --git a/src/services/ai/session/session-types.ts b/src/services/ai/session/session-types.ts index 64c6cd0..072c6b0 100644 --- a/src/services/ai/session/session-types.ts +++ b/src/services/ai/session/session-types.ts @@ -1,4 +1,9 @@ -export type AIProviderType = "openai-chat" | "openai-responses" | "anthropic" | "google-gemini"; +export type AIProviderType = + | "openai-chat" + | "openai-responses" + | "anthropic" + | "google-gemini" + | "deepseek"; export interface AIMessage { id?: number; diff --git a/tests/deepseek-provider.test.ts b/tests/deepseek-provider.test.ts new file mode 100644 index 0000000..60efab9 --- /dev/null +++ b/tests/deepseek-provider.test.ts @@ -0,0 +1,330 @@ +import { afterEach, describe, expect, it } from "bun:test"; +import { DeepSeekProvider } from "../src/services/ai/providers/deepseek.js"; +import type { ChatCompletionTool } from "../src/services/ai/tools/tool-schema.js"; + +const toolSchema: ChatCompletionTool = { + type: "function", + function: { + name: "save_memories", + description: "Save memories", + parameters: { + type: "object", + properties: {}, + required: [], + }, + }, +}; + +class FakeSessionManager { + private readonly session = { id: "session-1" }; + private readonly messages: any[] = []; + + getSession(): any { + return null; + } + + createSession(): any { + return this.session; + } + + getMessages(): any[] { + return this.messages; + } + + getLastSequence(): number { + return this.messages.length - 1; + } + + addMessage(message: any): void { + this.messages.push(message); + } +} + +function makeProvider(config: Record = {}) { + return new DeepSeekProvider( + { model: "deepseek-chat", apiKey: "test-key", ...config }, + new FakeSessionManager() as any + ); +} + +function makeFetch(response: { + ok?: boolean; + status?: number; + statusText?: string; + body?: unknown; +}) { + const textBody = + typeof response.body === "string" ? response.body : JSON.stringify(response.body ?? "error"); + const jsonBody = typeof response.body === "string" ? {} : (response.body ?? {}); + return (async (_input: RequestInfo | URL, _init?: RequestInit) => { + return { + ok: response.ok ?? false, + status: response.status ?? 400, + statusText: response.statusText ?? "Bad Request", + text: async () => textBody, + json: async () => jsonBody, + } as Response; + }) as typeof fetch; +} + +describe("DeepSeekProvider", () => { + const originalFetch = globalThis.fetch; + + afterEach(() => { + globalThis.fetch = originalFetch; + }); + + it("getProviderName returns deepseek", () => { + expect(makeProvider().getProviderName()).toBe("deepseek"); + }); + + it("supportsSession returns true", () => { + expect(makeProvider().supportsSession()).toBe(true); + }); + + it("uses provided apiUrl for the request", async () => { + let capturedUrl = ""; + globalThis.fetch = (async (input: RequestInfo | URL, _init?: RequestInit) => { + capturedUrl = String(input); + return { ok: false, status: 400, statusText: "Bad", text: async () => "err" } as Response; + }) as typeof fetch; + + await makeProvider({ apiUrl: "https://api.deepseek.com" }).executeToolCall( + "system", + "user", + toolSchema, + "session-id" + ); + + expect(capturedUrl).toBe("https://api.deepseek.com/chat/completions"); + }); + + it("respects custom apiUrl when provided", async () => { + let capturedUrl = ""; + globalThis.fetch = (async (input: RequestInfo | URL, _init?: RequestInit) => { + capturedUrl = String(input); + return { ok: false, status: 400, statusText: "Bad", text: async () => "err" } as Response; + }) as typeof fetch; + + await makeProvider({ apiUrl: "https://custom.example.com/v1" }).executeToolCall( + "system", + "user", + toolSchema, + "session-id" + ); + + expect(capturedUrl).toBe("https://custom.example.com/v1/chat/completions"); + }); + + it("sends Authorization Bearer header", async () => { + let capturedHeaders: Record | undefined; + globalThis.fetch = (async (_input: RequestInfo | URL, init?: RequestInit) => { + capturedHeaders = init?.headers as Record; + return { ok: false, status: 400, statusText: "Bad", text: async () => "err" } as Response; + }) as typeof fetch; + + await makeProvider({ apiKey: "sk-mykey" }).executeToolCall( + "system", + "user", + toolSchema, + "session-id" + ); + + expect(capturedHeaders?.Authorization).toBe("Bearer sk-mykey"); + }); + + it("omits Authorization header when apiKey is not set", async () => { + let capturedHeaders: Record | undefined; + globalThis.fetch = (async (_input: RequestInfo | URL, init?: RequestInit) => { + capturedHeaders = init?.headers as Record; + return { ok: false, status: 400, statusText: "Bad", text: async () => "err" } as Response; + }) as typeof fetch; + + await makeProvider({ apiKey: undefined }).executeToolCall( + "system", + "user", + toolSchema, + "session-id" + ); + + expect(capturedHeaders?.Authorization).toBeUndefined(); + }); + + it("sends model, messages, tools, tool_choice in request body", async () => { + let capturedBody: Record | undefined; + globalThis.fetch = (async (_input: RequestInfo | URL, init?: RequestInit) => { + capturedBody = JSON.parse(String(init?.body ?? "{}")); + return { ok: false, status: 400, statusText: "Bad", text: async () => "err" } as Response; + }) as typeof fetch; + + await makeProvider({ model: "deepseek-reasoner" }).executeToolCall( + "system", + "user", + toolSchema, + "session-id" + ); + + expect(capturedBody?.model).toBe("deepseek-reasoner"); + expect(Array.isArray(capturedBody?.messages)).toBe(true); + expect(Array.isArray(capturedBody?.tools)).toBe(true); + expect(capturedBody?.tool_choice).toBe("auto"); + }); + + it("includes temperature 0.3 by default", async () => { + let capturedBody: Record | undefined; + globalThis.fetch = (async (_input: RequestInfo | URL, init?: RequestInit) => { + capturedBody = JSON.parse(String(init?.body ?? "{}")); + return { ok: false, status: 400, statusText: "Bad", text: async () => "err" } as Response; + }) as typeof fetch; + + await makeProvider().executeToolCall("system", "user", toolSchema, "session-id"); + + expect(capturedBody?.temperature).toBe(0.3); + }); + + it("omits temperature when memoryTemperature is false", async () => { + let capturedBody: Record | undefined; + globalThis.fetch = (async (_input: RequestInfo | URL, init?: RequestInit) => { + capturedBody = JSON.parse(String(init?.body ?? "{}")); + return { ok: false, status: 400, statusText: "Bad", text: async () => "err" } as Response; + }) as typeof fetch; + + await makeProvider({ memoryTemperature: false }).executeToolCall( + "system", + "user", + toolSchema, + "session-id" + ); + + expect(capturedBody?.temperature).toBeUndefined(); + }); + + it("returns success: false with error message on API error response", async () => { + globalThis.fetch = makeFetch({ ok: false, status: 401, body: "Unauthorized" }); + + const result = await makeProvider().executeToolCall("system", "user", toolSchema, "session-id"); + + expect(result.success).toBe(false); + expect(result.error).toContain("401"); + }); + + it("returns friendly message on temperature unsupported error", async () => { + globalThis.fetch = makeFetch({ + ok: false, + status: 400, + body: '{"error": {"type": "unsupported_value", "param": "temperature"}}', + }); + + const result = await makeProvider().executeToolCall("system", "user", toolSchema, "session-id"); + + expect(result.success).toBe(false); + expect(result.error).toContain("memoryTemperature"); + }); + + it("returns success: false when response has no choices", async () => { + globalThis.fetch = makeFetch({ ok: true, body: { choices: [] } } as any); + + const result = await makeProvider().executeToolCall("system", "user", toolSchema, "session-id"); + + expect(result.success).toBe(false); + expect(result.error).toContain("Invalid API response format"); + }); + + it("returns success: false when API returns error in response body", async () => { + globalThis.fetch = makeFetch({ + ok: true, + body: { status: "error", msg: "quota exceeded" }, + } as any); + + const result = await makeProvider().executeToolCall("system", "user", toolSchema, "session-id"); + + expect(result.success).toBe(false); + expect(result.error).toContain("quota exceeded"); + }); + + it("returns success: false after max iterations with no tool call", async () => { + globalThis.fetch = makeFetch({ + ok: true, + body: { + choices: [{ message: { content: "I will not use a tool", tool_calls: undefined } }], + }, + } as any); + + const result = await makeProvider({ maxIterations: 2 }).executeToolCall( + "system", + "user", + toolSchema, + "session-id" + ); + + expect(result.success).toBe(false); + expect(result.error).toContain("Max iterations"); + expect(result.iterations).toBe(2); + }); + + it("returns success: true when model calls the correct tool", async () => { + const validArguments = JSON.stringify({ + preferences: [], + patterns: [], + workflows: [], + codingStyle: {}, + domainKnowledge: [], + }); + + globalThis.fetch = makeFetch({ + ok: true, + body: { + choices: [ + { + message: { + content: null, + tool_calls: [ + { + id: "call-1", + type: "function", + function: { name: "save_memories", arguments: validArguments }, + }, + ], + }, + }, + ], + }, + } as any); + + const result = await makeProvider().executeToolCall("system", "user", toolSchema, "session-id"); + + expect(result.success).toBe(true); + expect(result.iterations).toBe(1); + }); + + it("returns success: false when model calls wrong tool name", async () => { + globalThis.fetch = makeFetch({ + ok: true, + body: { + choices: [ + { + message: { + content: null, + tool_calls: [ + { + id: "call-1", + type: "function", + function: { name: "wrong_tool", arguments: "{}" }, + }, + ], + }, + }, + ], + }, + } as any); + + const result = await makeProvider({ maxIterations: 1 }).executeToolCall( + "system", + "user", + toolSchema, + "session-id" + ); + + expect(result.success).toBe(false); + }); +});