@@ -19,16 +19,69 @@ export const handlers = [
19
19
http . get ( "/user" , ( ) => {
20
20
return HttpResponse . json ( { firstName : "alex" } ) ;
21
21
} ) ,
22
- http . post ( "/mock/api/completion" , async ( ) => {
23
- const data = new StreamData ( ) ;
24
- const stream = OpenAIStream ( await fetch ( DEFAULT_TEST_URL ) , {
25
- onFinal ( ) {
26
- data . close ( ) ;
22
+ // the mock api below is for AI SDK UI component sending a request to the OpenAI API
23
+ // http.post("/mock/chat/completions", async () => {
24
+ // const data = new StreamData();
25
+ // const stream = OpenAIStream(await fetch(DEFAULT_TEST_URL), {
26
+ // onFinal() {
27
+ // data.close();
28
+ // },
29
+ // });
30
+ // const response = new StreamingTextResponse(stream, {}, data);
31
+ // return response;
32
+ // }),
33
+ // the mock api below is for Custom Provider sending a request to the OpenAI API
34
+ http . post ( "/mock/chat/completions" , async ( ) => {
35
+ const encoder = new TextEncoder ( ) ;
36
+ const content = [
37
+ "Hello" ,
38
+ ", " ,
39
+ "world!" ,
40
+ " You" ,
41
+ " are" ,
42
+ " calling" ,
43
+ " a" ,
44
+ " custom" ,
45
+ " ai" ,
46
+ " api" ,
47
+ ] ;
48
+ const chunks = [
49
+ `data: {"id":"6e2cd91750904b7092f49bdca9083de1","object":"chat.completion.chunk",` +
50
+ `"created":1711097175,"model":"mistral-small-latest","choices":[{"index":0,` +
51
+ `"delta":{"role":"assistant","content":""},"finish_reason":null,"logprobs":null}]}\n\n` ,
52
+ ...content . map ( ( text ) => {
53
+ return (
54
+ `data: {"id":"6e2cd91750904b7092f49bdca9083de1","object":"chat.completion.chunk",` +
55
+ `"created":1711097175,"model":"mistral-small-latest","choices":[{"index":0,` +
56
+ `"delta":{"role":"assistant","content":"${ text } "},"finish_reason":null,"logprobs":null}]}\n\n`
57
+ ) ;
58
+ } ) ,
59
+ `data: {"id":"6e2cd91750904b7092f49bdca9083de1","object":"chat.completion.chunk",` +
60
+ `"created":1711097175,"model":"mistral-small-latest","choices":[{"index":0,` +
61
+ `"delta":{"content":""},"finish_reason":"stop","logprobs":null}],` +
62
+ `"usage":{"prompt_tokens":4,"total_tokens":36,"completion_tokens":32}}\n\n` ,
63
+ `data: [DONE]\n\n` ,
64
+ ] ;
65
+ const stream = new ReadableStream ( {
66
+ async start ( controller ) {
67
+ try {
68
+ for ( const chunk of chunks ) {
69
+ controller . enqueue ( encoder . encode ( chunk ) ) ;
70
+ }
71
+ } finally {
72
+ controller . close ( ) ;
73
+ }
27
74
} ,
28
75
} ) ;
29
- const response = new StreamingTextResponse ( stream , { } , data ) ;
30
76
31
- return response ;
77
+ return new HttpResponse ( stream , {
78
+ status : 200 ,
79
+ headers : {
80
+ "Content-Type" : "text/event-stream" ,
81
+ "Cache-Control" : "no-cache" ,
82
+ Connection : "keep-alive" ,
83
+ } ,
84
+ } ) ;
32
85
} ) ,
33
86
...servers ,
34
87
] ;
0 commit comments