Skip to content

Commit bbd670e

Browse files
committed
Merge branch 'main' of github.com:fancn21th/xyzchat into main
2 parents 4ccb1a4 + 9fc1d06 commit bbd670e

File tree

8 files changed

+166
-18
lines changed

8 files changed

+166
-18
lines changed

README.md

+35-2
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,32 @@
11
# xyzchat 项目
22

3+
## Prerequisites
4+
5+
You better have solid understanding of the following technologies:
6+
7+
- [React](https://reactjs.org/)
8+
9+
You should also have a basic understanding of the following technologies:
10+
11+
- [Next.js](https://nextjs.org/)
12+
- [TypeScript](https://www.typescriptlang.org/)
13+
- [Tailwind CSS](https://tailwindcss.com/)
14+
- [Turbo](https://turbo.hotwired.dev/)
15+
- [Storybook](https://storybook.js.org/)
16+
- [Radix UI](https://radix-ui.com/)
17+
- [Shadcn UI](https://ui.shadcn.com/)
18+
- [AI SDK](https://sdk.vercel.ai/)
19+
20+
Nevertheless, you can still follow along if you are willing to learn.
21+
22+
- Firstly you would find the biz logic in mobile app only has 100 lines of code.
23+
24+
This is because the details are hidden in the core components as well as blah project.
25+
26+
- Secondly you would find component development is separated from the mobile app.
27+
28+
Even it seems over-engineering, but I find engineer tend to have a good interface design unconsciously.
29+
330
## How to setup
431

532
> This is a monorepo project driven by turbo
@@ -16,13 +43,19 @@ pnpm build
1643

1744
## How to dev
1845

19-
- build components
46+
- components
2047

2148
```bash
2249
pnpm dev:component
2350
```
2451

25-
- build mobile app
52+
- mobile app
53+
54+
```bash
55+
pnpm build # build the core & blah projects
56+
pnpm mock # start mock server
57+
pnpm dev # start mobile app
58+
```
2659

2760
## 项目目标
2861

apps/mobile/package.json

+1
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
},
1111
"dependencies": {
1212
"@ai-sdk/azure": "0.0.32",
13+
"@ai-sdk/blah": "workspace:*",
1314
"ai": "3.3.17",
1415
"clsx": "2.1.1",
1516
"next": "14.2.6",

apps/mobile/src/app/api/chat/route.ts

+8-5
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,13 @@
11
import { streamText, convertToCoreMessages } from "ai";
2-
import { createAzure } from "@ai-sdk/azure";
2+
// import { createAzure } from "@ai-sdk/azure";
3+
import { createMistral } from "@ai-sdk/blah";
34

4-
const openai = createAzure({
5-
resourceName: process.env.AZURE_OPENAI_RESOURCE_NAME, // Azure resource name
6-
apiKey: process.env.AZURE_OPENAI_API_KEY,
7-
});
5+
// const openai = createAzure({
6+
// resourceName: process.env.AZURE_OPENAI_RESOURCE_NAME, // Azure resource name
7+
// apiKey: process.env.AZURE_OPENAI_API_KEY,
8+
// });
9+
10+
const openai = createMistral({});
811

912
export async function POST(request: Request) {
1013
const { messages } = await request.json();

apps/mock-api/src/handlers.ts

+60-7
Original file line numberDiff line numberDiff line change
@@ -19,16 +19,69 @@ export const handlers = [
1919
http.get("/user", () => {
2020
return HttpResponse.json({ firstName: "alex" });
2121
}),
22-
http.post("/mock/api/completion", async () => {
23-
const data = new StreamData();
24-
const stream = OpenAIStream(await fetch(DEFAULT_TEST_URL), {
25-
onFinal() {
26-
data.close();
22+
// the mock api below is for AI SDK UI component sending a request to the OpenAI API
23+
// http.post("/mock/chat/completions", async () => {
24+
// const data = new StreamData();
25+
// const stream = OpenAIStream(await fetch(DEFAULT_TEST_URL), {
26+
// onFinal() {
27+
// data.close();
28+
// },
29+
// });
30+
// const response = new StreamingTextResponse(stream, {}, data);
31+
// return response;
32+
// }),
33+
// the mock api below is for Custom Provider sending a request to the OpenAI API
34+
http.post("/mock/chat/completions", async () => {
35+
const encoder = new TextEncoder();
36+
const content = [
37+
"Hello",
38+
", ",
39+
"world!",
40+
" You",
41+
" are",
42+
" calling",
43+
" a",
44+
" custom",
45+
" ai",
46+
" api",
47+
];
48+
const chunks = [
49+
`data: {"id":"6e2cd91750904b7092f49bdca9083de1","object":"chat.completion.chunk",` +
50+
`"created":1711097175,"model":"mistral-small-latest","choices":[{"index":0,` +
51+
`"delta":{"role":"assistant","content":""},"finish_reason":null,"logprobs":null}]}\n\n`,
52+
...content.map((text) => {
53+
return (
54+
`data: {"id":"6e2cd91750904b7092f49bdca9083de1","object":"chat.completion.chunk",` +
55+
`"created":1711097175,"model":"mistral-small-latest","choices":[{"index":0,` +
56+
`"delta":{"role":"assistant","content":"${text}"},"finish_reason":null,"logprobs":null}]}\n\n`
57+
);
58+
}),
59+
`data: {"id":"6e2cd91750904b7092f49bdca9083de1","object":"chat.completion.chunk",` +
60+
`"created":1711097175,"model":"mistral-small-latest","choices":[{"index":0,` +
61+
`"delta":{"content":""},"finish_reason":"stop","logprobs":null}],` +
62+
`"usage":{"prompt_tokens":4,"total_tokens":36,"completion_tokens":32}}\n\n`,
63+
`data: [DONE]\n\n`,
64+
];
65+
const stream = new ReadableStream({
66+
async start(controller) {
67+
try {
68+
for (const chunk of chunks) {
69+
controller.enqueue(encoder.encode(chunk));
70+
}
71+
} finally {
72+
controller.close();
73+
}
2774
},
2875
});
29-
const response = new StreamingTextResponse(stream, {}, data);
3076

31-
return response;
77+
return new HttpResponse(stream, {
78+
status: 200,
79+
headers: {
80+
"Content-Type": "text/event-stream",
81+
"Cache-Control": "no-cache",
82+
Connection: "keep-alive",
83+
},
84+
});
3285
}),
3386
...servers,
3487
];

packages/blah/package.json

+2-1
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,8 @@
3030
},
3131
"dependencies": {
3232
"@ai-sdk/provider": "0.0.21",
33-
"@ai-sdk/provider-utils": "1.0.16"
33+
"@ai-sdk/provider-utils": "1.0.16",
34+
"eventsource-parser": "1.1.2"
3435
},
3536
"devDependencies": {
3637
"@types/node": "^18",
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,48 @@
1+
import { ZodSchema } from "zod";
2+
import {
3+
ResponseHandler,
4+
ParseResult,
5+
extractResponseHeaders,
6+
safeParseJSON,
7+
} from "@ai-sdk/provider-utils";
8+
import { EmptyResponseBodyError } from "@ai-sdk/provider";
9+
import {
10+
EventSourceParserStream,
11+
ParsedEvent,
12+
} from "eventsource-parser/stream";
13+
14+
export const createEventSourceResponseHandler =
15+
<T>(
16+
chunkSchema: ZodSchema<T>
17+
): ResponseHandler<ReadableStream<ParseResult<T>>> =>
18+
async ({ response }: { response: Response }) => {
19+
const responseHeaders = extractResponseHeaders(response);
20+
21+
if (response.body == null) {
22+
throw new EmptyResponseBodyError({});
23+
}
24+
25+
return {
26+
responseHeaders,
27+
value: response.body
28+
.pipeThrough(new TextDecoderStream())
29+
.pipeThrough(new EventSourceParserStream())
30+
.pipeThrough(
31+
new TransformStream<ParsedEvent, ParseResult<T>>({
32+
transform({ data }, controller) {
33+
// ignore the 'DONE' event that e.g. OpenAI sends:
34+
if (data === "[DONE]") {
35+
return;
36+
}
37+
38+
controller.enqueue(
39+
safeParseJSON({
40+
text: data,
41+
schema: chunkSchema,
42+
})
43+
);
44+
},
45+
})
46+
),
47+
};
48+
};

packages/blah/src/mistral-chat-language-model.ts

+6-3
Original file line numberDiff line numberDiff line change
@@ -8,10 +8,11 @@ import {
88
FetchFunction,
99
ParseResult,
1010
combineHeaders,
11-
createEventSourceResponseHandler,
11+
// createEventSourceResponseHandler,
1212
createJsonResponseHandler,
1313
postJsonToApi,
1414
} from "@ai-sdk/provider-utils";
15+
import { createEventSourceResponseHandler } from "./createEventSourceResponseHandler";
1516
import { z } from "zod";
1617
import { convertToMistralChatMessages } from "./convert-to-mistral-chat-messages";
1718
import { mapMistralFinishReason } from "./map-mistral-finish-reason";
@@ -210,8 +211,10 @@ export class MistralChatLanguageModel implements LanguageModelV1 {
210211
const { args, warnings } = this.getArgs(options);
211212

212213
const { responseHeaders, value: response } = await postJsonToApi({
213-
url: `${this.config.baseURL}/chat/completions`,
214-
headers: combineHeaders(this.config.headers(), options.headers),
214+
// url: `${this.config.baseURL}/chat/completions`,
215+
// headers: combineHeaders(this.config.headers(), options.headers),
216+
url: `http://localhost:4321/mock/chat/completions`,
217+
headers: combineHeaders({}, options.headers),
215218
body: {
216219
...args,
217220
stream: true,

pnpm-lock.yaml

+6
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

0 commit comments

Comments
 (0)