From a14bca21ef353b5ee383040b0d2b50c9e580e802 Mon Sep 17 00:00:00 2001 From: Nicolas Bonamy Date: Wed, 1 May 2024 15:46:22 -0500 Subject: [PATCH] assistant errors tests --- src/services/assistant.ts | 3 +++ tests/mocks/llm.ts | 22 ++++++++++++++++++++++ tests/unit/assistant.test.ts | 14 +++++++++++++- 3 files changed, 38 insertions(+), 1 deletion(-) diff --git a/src/services/assistant.ts b/src/services/assistant.ts index fd5e0b0..e3a7999 100644 --- a/src/services/assistant.ts +++ b/src/services/assistant.ts @@ -180,10 +180,13 @@ export default class { if (error.name !== 'AbortError') { if (error.status === 401 || error.message.includes('401') || error.message.toLowerCase().includes('apikey')) { message.setText('You need to enter your API key in the Models tab of Settings in order to chat.') + callback?.call(null, { done: true }) } else if (error.status === 400 && (error.message.includes('credit') || error.message.includes('balance'))) { message.setText('Sorry, it seems you have run out of credits. Check the balance of your LLM provider account.') + callback?.call(null, { done: true }) } else if (message.content === '') { message.setText('Sorry, I could not generate text for that prompt.') + callback?.call(null, { done: true }) } else { message.appendText({ text: '\n\nSorry, I am not able to continue here.', done: true }) } diff --git a/tests/mocks/llm.ts b/tests/mocks/llm.ts index d8e96ab..4b54768 100644 --- a/tests/mocks/llm.ts +++ b/tests/mocks/llm.ts @@ -6,6 +6,20 @@ import { Configuration } from '../../src/types/config.d' import LlmEngine from '../../src/services/engine' import RandomChunkStream from './stream' +class LlmError extends Error { + + name: string + status: number + message: string + + constructor(name: string, status: number, message: string) { + super() + this.name = name + this.status = status + this.message = message + } +} + export default class LlmMock extends LlmEngine { constructor(config: Configuration) { @@ -53,6 +67,14 @@ export default class LlmMock extends LlmEngine { async stream(thread: Message[], opts: LlmCompletionOpts): Promise { + // errors + if (thread[thread.length-1].content.includes('no api key')) { + throw new LlmError('NoApiKeyError', 401, 'Missing apiKey') + } + if (thread[thread.length-1].content.includes('no credit')) { + throw new LlmError('LowBalanceError', 400, 'Your balance is too low') + } + // model: switch to vision if needed const model = this.selectModel(thread, opts?.model || this.getChatModel()) diff --git a/tests/unit/assistant.test.ts b/tests/unit/assistant.test.ts index cf2b3b4..4f8ffb6 100644 --- a/tests/unit/assistant.test.ts +++ b/tests/unit/assistant.test.ts @@ -128,4 +128,16 @@ test('Conversation language', async () => { const instructions = await assistant.chat.messages[0].content expect(instructions).toMatch(/French/) -}) \ No newline at end of file +}) + +test('No API Key', async () => { + await prompt('no api key') + const content = assistant.chat.lastMessage().content + expect(content).toBe('You need to enter your API key in the Models tab of Settings in order to chat.') +}) + +test('Low balance', async () => { + await prompt('no credit left') + const content = assistant.chat.lastMessage().content + expect(content).toBe('Sorry, it seems you have run out of credits. Check the balance of your LLM provider account.') +})