-
Notifications
You must be signed in to change notification settings - Fork 375
/
SimpleChatEngine.ts
72 lines (64 loc) · 2.15 KB
/
SimpleChatEngine.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
import type { ChatHistory } from "../../ChatHistory.js";
import { getHistory } from "../../ChatHistory.js";
import { Response } from "../../Response.js";
import { wrapEventCaller } from "../../internal/context/EventCaller.js";
import type { ChatResponseChunk, LLM } from "../../llm/index.js";
import { OpenAI } from "../../llm/index.js";
import {
extractText,
streamConverter,
streamReducer,
} from "../../llm/utils.js";
import type {
ChatEngine,
ChatEngineParamsNonStreaming,
ChatEngineParamsStreaming,
} from "./types.js";
/**
* SimpleChatEngine is the simplest possible chat engine. Useful for using your own custom prompts.
*/
export class SimpleChatEngine implements ChatEngine {
chatHistory: ChatHistory;
llm: LLM;
constructor(init?: Partial<SimpleChatEngine>) {
this.chatHistory = getHistory(init?.chatHistory);
this.llm = init?.llm ?? new OpenAI();
}
chat(params: ChatEngineParamsStreaming): Promise<AsyncIterable<Response>>;
chat(params: ChatEngineParamsNonStreaming): Promise<Response>;
@wrapEventCaller
async chat(
params: ChatEngineParamsStreaming | ChatEngineParamsNonStreaming,
): Promise<Response | AsyncIterable<Response>> {
const { message, stream } = params;
const chatHistory = params.chatHistory
? getHistory(params.chatHistory)
: this.chatHistory;
chatHistory.addMessage({ content: message, role: "user" });
if (stream) {
const stream = await this.llm.chat({
messages: await chatHistory.requestMessages(),
stream: true,
});
return streamConverter(
streamReducer({
stream,
initialValue: "",
reducer: (accumulator, part) => accumulator + part.delta,
finished: (accumulator) => {
chatHistory.addMessage({ content: accumulator, role: "assistant" });
},
}),
(r: ChatResponseChunk) => new Response(r.delta),
);
}
const response = await this.llm.chat({
messages: await chatHistory.requestMessages(),
});
chatHistory.addMessage(response.message);
return new Response(extractText(response.message.content));
}
reset() {
this.chatHistory.reset();
}
}