diff --git a/README.md b/README.md index f8d5c05..5082ab8 100644 --- a/README.md +++ b/README.md @@ -83,6 +83,7 @@ To use Internet search you need a [Tavily API key](https://app.tavily.com/home). ## DONE +- [x] Ollama base URL as settings - [x] OpenAI base URL as settings - [x] DALL-E as tool - [x] Google Gemini API diff --git a/defaults/settings.json b/defaults/settings.json index 1d2d79e..26667e1 100644 --- a/defaults/settings.json +++ b/defaults/settings.json @@ -94,7 +94,7 @@ } }, "ollama": { - "baseURL": "https://api.ollama.com", + "baseURL": "http://127.0.0.1:11434", "models": { "chat": [], "image": [] diff --git a/src/services/ollama.ts b/src/services/ollama.ts index 5418c3c..87e444f 100644 --- a/src/services/ollama.ts +++ b/src/services/ollama.ts @@ -3,7 +3,7 @@ import { Message } from '../types/index.d' import { LLmCompletionPayload, LlmChunk, LlmCompletionOpts, LlmResponse, LlmStream, LlmEventCallback } from '../types/llm.d' import { EngineConfig, Configuration } from '../types/config.d' import LlmEngine from './engine' -import ollama from 'ollama/dist/browser.mjs' +import { Ollama } from 'ollama/dist/browser.mjs' import { ChatResponse, ProgressResponse } from 'ollama' export const isOllamaReady = (engineConfig: EngineConfig): boolean => { @@ -24,7 +24,9 @@ export default class extends LlmEngine { constructor(config: Configuration) { super(config) - this.client = ollama + this.client = new Ollama({ + host: config.engines.ollama.baseURL, + }) } getName(): string { diff --git a/src/settings/SettingsOllama.vue b/src/settings/SettingsOllama.vue index aef6471..d79c32a 100644 --- a/src/settings/SettingsOllama.vue +++ b/src/settings/SettingsOllama.vue @@ -28,6 +28,10 @@
{{ pull_progress }}
+
+ + +
@@ -37,7 +41,9 @@ import { ref, nextTick } from 'vue' import { store } from '../services/store' import { loadOllamaModels } from '../services/llm' import Ollama, { getPullableModels } from '../services/ollama' +import defaults from '../../defaults/settings.json' +const baseURL = ref(null) const refreshLabel = ref('Refresh') const chat_model = ref(null) const chat_models = ref([]) @@ -50,7 +56,8 @@ const pullStream = ref(null) let ollama = new Ollama(store.config) const load = () => { - chat_models.value = store.config.engines.ollama.models.chat || [] + baseURL.value = store.config.engines.ollama?.baseURL || '' + chat_models.value = store.config.engines.ollama?.models.chat || [] chat_model.value = store.config.engines.ollama?.model?.chat || '' pull_models.value = getPullableModels } @@ -132,6 +139,7 @@ const onStop = async () => { } const save = () => { + store.config.engines.ollama.baseURL = baseURL.value store.config.engines.ollama.model.chat = chat_model.value store.saveSettings() }