From 4a0f084450c853dda6fe03c03d71231709d16cf1 Mon Sep 17 00:00:00 2001 From: Nicolas Bonamy Date: Fri, 28 Jun 2024 21:13:09 -0500 Subject: [PATCH] test fixes --- src/services/ollama.ts | 2 +- tests/unit/config.test.ts | 2 ++ tests/unit/engine_ollama.test.ts | 44 ++++++++++++++++---------------- 3 files changed, 25 insertions(+), 23 deletions(-) diff --git a/src/services/ollama.ts b/src/services/ollama.ts index 87e444f..b7858a7 100644 --- a/src/services/ollama.ts +++ b/src/services/ollama.ts @@ -95,7 +95,7 @@ export default class extends LlmEngine { } async stop() { - await ollama.abort() + await this.client.abort() } // eslint-disable-next-line @typescript-eslint/no-unused-vars diff --git a/tests/unit/config.test.ts b/tests/unit/config.test.ts index d7d8154..f08c613 100644 --- a/tests/unit/config.test.ts +++ b/tests/unit/config.test.ts @@ -16,6 +16,8 @@ vi.mock('fs', async (importOriginal) => { test('Load default settings', () => { const loaded = config.loadSettings('') delete loaded.getActiveModel + loaded.engines.openai.baseURL = defaultSettings.engines.openai.baseURL + loaded.engines.ollama.baseURL = defaultSettings.engines.ollama.baseURL expect(loaded).toStrictEqual(defaultSettings) expect(loaded.general.language).toBe('') expect(loaded.engines.openai.models.chat).toStrictEqual([]) diff --git a/tests/unit/engine_ollama.test.ts b/tests/unit/engine_ollama.test.ts index 6baf2d6..e596791 100644 --- a/tests/unit/engine_ollama.test.ts +++ b/tests/unit/engine_ollama.test.ts @@ -10,27 +10,27 @@ import { loadOllamaModels } from '../../src/services/llm' import { Model } from '../../src/types/config.d' vi.mock('ollama/browser', async() => { - return { default : { - list: vi.fn(() => { - return { models: [ - { model: 'model2', name: 'model2' }, - { model: 'model1', name: 'model1' }, - ] } - }), - chat: vi.fn((opts) => { - if (opts.stream) { - return { - controller: { - abort: vi.fn() - } + const Ollama = vi.fn() + Ollama.prototype.list = vi.fn(() => { + return { models: [ + { model: 'model2', name: 'model2' }, + { model: 'model1', name: 'model1' }, + ] } + }) + Ollama.prototype.chat = vi.fn((opts) => { + if (opts.stream) { + return { + controller: { + abort: vi.fn() } } - else { - return { message: { content: 'response' } } - } - }), - abort: vi.fn(), - }} + } + else { + return { message: { content: 'response' } } + } + }) + Ollama.prototype.abort = vi.fn() + return { Ollama: Ollama } }) beforeEach(() => { @@ -61,7 +61,7 @@ test('Ollama completion', async () => { new Message('system', 'instruction'), new Message('user', 'prompt'), ], null) - expect(_ollama.default.chat).toHaveBeenCalled() + expect(_ollama.Ollama.prototype.chat).toHaveBeenCalled() expect(response).toStrictEqual({ type: 'text', content: 'response' @@ -74,10 +74,10 @@ test('Ollama stream', async () => { new Message('system', 'instruction'), new Message('user', 'prompt'), ], null) - expect(_ollama.default.chat).toHaveBeenCalled() + expect(_ollama.Ollama.prototype.chat).toHaveBeenCalled() expect(response.controller).toBeDefined() await ollama.stop() - expect(_ollama.default.abort).toHaveBeenCalled() + expect(_ollama.Ollama.prototype.abort).toHaveBeenCalled() }) test('Ollama image', async () => {