Skip to content

Commit 2fd07ab

Browse files
committed
chore: remove some code
1 parent 7f7a682 commit 2fd07ab

File tree

7 files changed

+279
-214
lines changed

7 files changed

+279
-214
lines changed

backend/src/chat/chat.resolver.ts

Lines changed: 20 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,11 @@
1-
import { Resolver, Subscription, Args } from '@nestjs/graphql';
1+
import {
2+
Resolver,
3+
Subscription,
4+
Args,
5+
Field,
6+
ObjectType,
7+
Query,
8+
} from '@nestjs/graphql';
29
import { ChatCompletionChunk, ChatInput } from './chat.model';
310
import { ChatProxyService } from './chat.service';
411

@@ -24,4 +31,16 @@ export class ChatResolver {
2431
throw new Error('Chat stream failed');
2532
}
2633
}
34+
35+
@Query(() => ModelTags)
36+
async modelTags() {
37+
//TODO: model tags api
38+
return { tags: ['gpt-3', 'gpt-4', 'davinci'] };
39+
}
40+
}
41+
42+
@ObjectType('ModelTags')
43+
export class ModelTags {
44+
@Field(() => [String])
45+
tags: string[];
2746
}

frontend/src/app/page.tsx

Lines changed: 154 additions & 139 deletions
Original file line numberDiff line numberDiff line change
@@ -1,203 +1,218 @@
11
"use client";
22

3+
import React, { useEffect, useRef, useState } from "react";
34
import { ChatLayout } from "@/components/chat/chat-layout";
4-
import { Button } from "@/components/ui/button";
55
import {
66
Dialog,
7-
DialogDescription,
7+
DialogContent,
88
DialogHeader,
99
DialogTitle,
10-
DialogContent,
10+
DialogDescription,
1111
} from "@/components/ui/dialog";
12-
import { Input } from "@/components/ui/input";
1312
import UsernameForm from "@/components/username-form";
1413
import { getSelectedModel } from "@/lib/model-helper";
15-
import { ChatOllama } from "@langchain/community/chat_models/ollama";
16-
import { AIMessage, HumanMessage } from "@langchain/core/messages";
17-
import { BytesOutputParser } from "@langchain/core/output_parsers";
18-
import { Attachment, ChatRequestOptions } from "ai";
19-
import { Message, useChat } from "ai/react";
20-
import React, { useEffect, useRef, useState } from "react";
2114
import { toast } from "sonner";
2215
import { v4 as uuidv4 } from "uuid";
2316
import useChatStore from "./hooks/useChatStore";
17+
import { Message } from "@/components/types";
2418

2519
export default function Home() {
26-
const {
27-
messages,
28-
input,
29-
handleInputChange,
30-
handleSubmit,
31-
isLoading,
32-
error,
33-
data,
34-
stop,
35-
setMessages,
36-
setInput,
37-
} = useChat({
38-
onResponse: (response) => {
39-
if (response) {
40-
setLoadingSubmit(false);
41-
}
42-
},
43-
onError: (error) => {
44-
setLoadingSubmit(false);
45-
toast.error("An error occurred. Please try again.");
46-
},
47-
});
48-
const [chatId, setChatId] = React.useState<string>("");
49-
const [selectedModel, setSelectedModel] = React.useState<string>(
20+
const [messages, setMessages] = useState<Message[]>([]);
21+
const [input, setInput] = useState("");
22+
const [isLoading, setIsLoading] = useState(false);
23+
const [error, setError] = useState<Error | null>(null);
24+
const [chatId, setChatId] = useState<string>("");
25+
const [selectedModel, setSelectedModel] = useState<string>(
5026
getSelectedModel()
5127
);
52-
const [open, setOpen] = React.useState(false);
53-
const [ollama, setOllama] = useState<ChatOllama>();
54-
const env = process.env.NODE_ENV;
55-
const [loadingSubmit, setLoadingSubmit] = React.useState(false);
28+
const [open, setOpen] = useState(false);
29+
const [loadingSubmit, setLoadingSubmit] = useState(false);
5630
const formRef = useRef<HTMLFormElement>(null);
31+
5732
const base64Images = useChatStore((state) => state.base64Images);
5833
const setBase64Images = useChatStore((state) => state.setBase64Images);
34+
const ws = useRef<WebSocket | null>(null);
5935

6036
useEffect(() => {
6137
if (messages.length < 1) {
62-
// Generate a random id for the chat
63-
console.log("Generating chat id");
6438
const id = uuidv4();
6539
setChatId(id);
6640
}
6741
}, [messages]);
6842

69-
React.useEffect(() => {
43+
useEffect(() => {
7044
if (!isLoading && !error && chatId && messages.length > 0) {
71-
// Save messages to local storage
7245
localStorage.setItem(`chat_${chatId}`, JSON.stringify(messages));
73-
// Trigger the storage event to update the sidebar component
7446
window.dispatchEvent(new Event("storage"));
7547
}
76-
}, [chatId, isLoading, error]);
48+
}, [chatId, isLoading, error, messages]);
7749

7850
useEffect(() => {
79-
if (env === "production") {
80-
const newOllama = new ChatOllama({
81-
baseUrl: process.env.NEXT_PUBLIC_OLLAMA_URL || "http://localhost:11434",
82-
model: selectedModel,
83-
});
84-
setOllama(newOllama);
85-
}
51+
// 初始化 WebSocket 连接
52+
ws.current = new WebSocket("ws://localhost:8080/graphql");
53+
54+
ws.current.onopen = () => {
55+
console.log("WebSocket connected");
56+
};
57+
58+
ws.current.onerror = (error) => {
59+
console.error("WebSocket error:", error);
60+
toast.error("Connection error. Retrying...");
61+
};
8662

8763
if (!localStorage.getItem("ollama_user")) {
8864
setOpen(true);
8965
}
90-
}, [selectedModel]);
91-
92-
const addMessage = (Message: Message) => {
93-
messages.push(Message);
94-
window.dispatchEvent(new Event("storage"));
95-
setMessages([...messages]);
96-
};
9766

98-
// Function to handle chatting with Ollama in production (client side)
99-
const handleSubmitProduction = async (
100-
e: React.FormEvent<HTMLFormElement>
101-
) => {
102-
e.preventDefault();
67+
return () => {
68+
if (ws.current) {
69+
ws.current.close();
70+
}
71+
};
72+
}, []);
10373

104-
addMessage({ role: "user", content: input, id: chatId });
105-
setInput("");
74+
const handleInputChange = (e: React.ChangeEvent<HTMLTextAreaElement>) => {
75+
setInput(e.target.value);
76+
};
10677

107-
if (ollama) {
108-
try {
109-
const parser = new BytesOutputParser();
110-
111-
const stream = await ollama
112-
.pipe(parser)
113-
.stream(
114-
(messages as Message[]).map((m) =>
115-
m.role == "user"
116-
? new HumanMessage(m.content)
117-
: new AIMessage(m.content)
118-
)
119-
);
120-
121-
const decoder = new TextDecoder();
122-
123-
let responseMessage = "";
124-
for await (const chunk of stream) {
125-
const decodedChunk = decoder.decode(chunk);
126-
responseMessage += decodedChunk;
127-
setLoadingSubmit(false);
128-
setMessages([
129-
...messages,
130-
{ role: "assistant", content: responseMessage, id: chatId },
131-
]);
132-
}
133-
addMessage({ role: "assistant", content: responseMessage, id: chatId });
134-
setMessages([...messages]);
135-
136-
localStorage.setItem(`chat_${chatId}`, JSON.stringify(messages));
137-
// Trigger the storage event to update the sidebar component
138-
window.dispatchEvent(new Event("storage"));
139-
} catch (error) {
140-
toast.error("An error occurred. Please try again.");
141-
setLoadingSubmit(false);
142-
}
78+
const stop = () => {
79+
// 实现停止生成的逻辑
80+
if (ws.current) {
81+
ws.current.send(
82+
JSON.stringify({
83+
type: "stop",
84+
id: chatId,
85+
})
86+
);
14387
}
14488
};
14589

146-
const onSubmit = (e: React.FormEvent<HTMLFormElement>) => {
90+
const onSubmit = async (e: React.FormEvent<HTMLFormElement>) => {
14791
e.preventDefault();
92+
if (
93+
!input.trim() ||
94+
!ws.current ||
95+
ws.current.readyState !== WebSocket.OPEN
96+
) {
97+
return;
98+
}
99+
148100
setLoadingSubmit(true);
149101

150-
setMessages([...messages]);
102+
const newMessage: Message = {
103+
id: uuidv4(),
104+
role: "user",
105+
content: input,
106+
createdAt: new Date().toISOString(),
107+
};
151108

152-
const attachments: Attachment[] = base64Images
153-
? base64Images.map((image) => ({
154-
contentType: 'image/base64', // Content type for base64 images
155-
url: image, // The base64 image data
156-
}))
157-
: [];
109+
setMessages((prev) => [...prev, newMessage]);
110+
setInput("");
158111

159-
// Prepare the options object with additional body data, to pass the model.
160-
const requestOptions: ChatRequestOptions = {
161-
options: {
162-
body: {
163-
selectedModel: selectedModel,
112+
const attachments = base64Images
113+
? base64Images.map((image) => ({
114+
contentType: "image/base64",
115+
url: image,
116+
}))
117+
: [];
118+
119+
// GraphQL subscription 请求
120+
const subscriptionMsg = {
121+
type: "start",
122+
id: Date.now().toString(),
123+
payload: {
124+
query: `
125+
subscription ChatStream($input: ChatInputType!) {
126+
chatStream(input: $input) {
127+
choices {
128+
delta {
129+
content
130+
}
131+
finish_reason
132+
index
133+
}
134+
created
135+
id
136+
model
137+
object
138+
}
139+
}
140+
`,
141+
variables: {
142+
input: {
143+
message: input,
144+
chatId,
145+
model: selectedModel,
146+
attachments,
147+
},
164148
},
165149
},
166-
...(base64Images && {
167-
data: {
168-
images: base64Images,
169-
},
170-
experimental_attachments: attachments
171-
}),
172150
};
173151

174-
messages.slice(0, -1)
175-
152+
try {
153+
// 设置消息处理器
154+
ws.current.onmessage = (event) => {
155+
const response = JSON.parse(event.data);
156+
157+
if (response.type === "data" && response.payload.data) {
158+
const chunk = response.payload.data.chatStream;
159+
const content = chunk.choices[0]?.delta?.content;
160+
161+
if (content) {
162+
setMessages((prev) => {
163+
const lastMsg = prev[prev.length - 1];
164+
if (lastMsg?.role === "assistant") {
165+
return [
166+
...prev.slice(0, -1),
167+
{ ...lastMsg, content: lastMsg.content + content },
168+
];
169+
} else {
170+
return [
171+
...prev,
172+
{
173+
id: chunk.id,
174+
role: "assistant",
175+
content,
176+
createdAt: new Date(chunk.created * 1000).toISOString(),
177+
},
178+
];
179+
}
180+
});
181+
}
182+
183+
if (chunk.choices[0]?.finish_reason === "stop") {
184+
setLoadingSubmit(false);
185+
// 保存到本地存储
186+
localStorage.setItem(`chat_${chatId}`, JSON.stringify(messages));
187+
window.dispatchEvent(new Event("storage"));
188+
}
189+
}
190+
};
176191

177-
if (env === "production") {
178-
handleSubmitProduction(e);
179-
setBase64Images(null)
180-
} else {
181-
// Call the handleSubmit function with the options
182-
handleSubmit(e, requestOptions);
183-
setBase64Images(null)
192+
// 发送订阅请求
193+
ws.current.send(JSON.stringify(subscriptionMsg));
194+
setBase64Images(null);
195+
} catch (error) {
196+
console.error("Error:", error);
197+
toast.error("An error occurred. Please try again.");
198+
setLoadingSubmit(false);
184199
}
185200
};
186201

187-
const onOpenChange = (isOpen: boolean) => {
188-
const username = localStorage.getItem("ollama_user")
189-
if (username) return setOpen(isOpen)
202+
const onOpenChange = (isOpen: boolean) => {
203+
const username = localStorage.getItem("ollama_user");
204+
if (username) return setOpen(isOpen);
205+
206+
localStorage.setItem("ollama_user", "Anonymous");
207+
window.dispatchEvent(new Event("storage"));
208+
setOpen(isOpen);
209+
};
190210

191-
localStorage.setItem("ollama_user", "Anonymous")
192-
window.dispatchEvent(new Event("storage"))
193-
setOpen(isOpen)
194-
}
195-
196211
return (
197-
<main className="flex h-[calc(100dvh)] flex-col items-center ">
212+
<main className="flex h-[calc(100dvh)] flex-col items-center">
198213
<Dialog open={open} onOpenChange={onOpenChange}>
199214
<ChatLayout
200-
chatId=""
215+
chatId={chatId}
201216
setSelectedModel={setSelectedModel}
202217
messages={messages}
203218
input={input}

0 commit comments

Comments
 (0)