diff --git a/README.md b/README.md index f8d5c05..5082ab8 100644 --- a/README.md +++ b/README.md @@ -83,6 +83,7 @@ To use Internet search you need a [Tavily API key](https://app.tavily.com/home). ## DONE +- [x] Ollama base URL as settings - [x] OpenAI base URL as settings - [x] DALL-E as tool - [x] Google Gemini API diff --git a/defaults/settings.json b/defaults/settings.json index 1d2d79e..26667e1 100644 --- a/defaults/settings.json +++ b/defaults/settings.json @@ -94,7 +94,7 @@ } }, "ollama": { - "baseURL": "https://api.ollama.com", + "baseURL": "http://127.0.0.1:11434", "models": { "chat": [], "image": [] diff --git a/src/services/ollama.ts b/src/services/ollama.ts index 5418c3c..87e444f 100644 --- a/src/services/ollama.ts +++ b/src/services/ollama.ts @@ -3,7 +3,7 @@ import { Message } from '../types/index.d' import { LLmCompletionPayload, LlmChunk, LlmCompletionOpts, LlmResponse, LlmStream, LlmEventCallback } from '../types/llm.d' import { EngineConfig, Configuration } from '../types/config.d' import LlmEngine from './engine' -import ollama from 'ollama/dist/browser.mjs' +import { Ollama } from 'ollama/dist/browser.mjs' import { ChatResponse, ProgressResponse } from 'ollama' export const isOllamaReady = (engineConfig: EngineConfig): boolean => { @@ -24,7 +24,9 @@ export default class extends LlmEngine { constructor(config: Configuration) { super(config) - this.client = ollama + this.client = new Ollama({ + host: config.engines.ollama.baseURL, + }) } getName(): string { diff --git a/src/settings/SettingsOllama.vue b/src/settings/SettingsOllama.vue index aef6471..d79c32a 100644 --- a/src/settings/SettingsOllama.vue +++ b/src/settings/SettingsOllama.vue @@ -28,6 +28,10 @@