Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1759,6 +1759,7 @@ const memoryLanceDBProPlugin = {
oauthPath: llmOauthPath,
timeoutMs: llmTimeoutMs,
log: (msg: string) => api.logger.debug(msg),
warnLog: (msg: string) => api.logger.warn(msg),
});

// Initialize embedding-based noise prototype bank (async, non-blocking)
Expand Down
15 changes: 9 additions & 6 deletions src/llm-client.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,8 @@ export interface LlmClientConfig {
oauthProvider?: string;
timeoutMs?: number;
log?: (msg: string) => void;
/** Warn-level logger for user-visible failures (timeouts, retries, network errors). */
warnLog?: (msg: string) => void;
}

export interface LlmClient {
Expand Down Expand Up @@ -172,7 +174,7 @@ function createTimeoutSignal(timeoutMs?: number): { signal: AbortSignal; dispose
};
}

function createApiKeyClient(config: LlmClientConfig, log: (msg: string) => void): LlmClient {
function createApiKeyClient(config: LlmClientConfig, log: (msg: string) => void, warnLog?: (msg: string) => void): LlmClient {
if (!config.apiKey) {
throw new Error("LLM api-key mode requires llm.apiKey or embedding.apiKey");
}
Expand Down Expand Up @@ -249,7 +251,7 @@ function createApiKeyClient(config: LlmClientConfig, log: (msg: string) => void)
} catch (err) {
lastError =
`memory-lancedb-pro: llm-client [${label}] request failed for model ${config.model}: ${err instanceof Error ? err.message : String(err)}`;
log(lastError);
(warnLog ?? log)(lastError);
return null;
}
},
Expand All @@ -259,7 +261,7 @@ function createApiKeyClient(config: LlmClientConfig, log: (msg: string) => void)
};
}

function createOauthClient(config: LlmClientConfig, log: (msg: string) => void): LlmClient {
function createOauthClient(config: LlmClientConfig, log: (msg: string) => void, warnLog?: (msg: string) => void): LlmClient {
if (!config.oauthPath) {
throw new Error("LLM oauth mode requires llm.oauthPath");
}
Expand Down Expand Up @@ -400,7 +402,7 @@ function createOauthClient(config: LlmClientConfig, log: (msg: string) => void):
} catch (err) {
lastError =
`memory-lancedb-pro: llm-client [${label}] OAuth request failed for model ${config.model}: ${err instanceof Error ? err.message : String(err)}`;
log(lastError);
(warnLog ?? log)(lastError);
return null;
}
},
Expand All @@ -412,10 +414,11 @@ function createOauthClient(config: LlmClientConfig, log: (msg: string) => void):

export function createLlmClient(config: LlmClientConfig): LlmClient {
const log = config.log ?? (() => {});
const warnLog = config.warnLog;
if (config.auth === "oauth") {
return createOauthClient(config, log);
return createOauthClient(config, log, warnLog);
}
return createApiKeyClient(config, log);
return createApiKeyClient(config, log, warnLog);
}

export { extractJsonFromResponse, repairCommonJson };
Loading