Skip to content

Commit

Permalink
feat: dynamic additional data
Browse files Browse the repository at this point in the history
  • Loading branch information
yutakobayashidev committed Oct 11, 2023
1 parent 9f253d5 commit 1b1f576
Show file tree
Hide file tree
Showing 13 changed files with 328 additions and 124 deletions.
Binary file modified frontend/bun.lockb
Binary file not shown.
10 changes: 5 additions & 5 deletions frontend/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@
"dayjs": "^1.11.7",
"dotenv": "^16.0.3",
"eslint": "8.42.0",
"eslint-config-next": "13.4.4",
"eslint-config-next": "^13.5.4",
"face-api.js": "^0.22.2",
"framer-motion": "^10.12.16",
"graphql": "^16.6.0",
Expand All @@ -61,21 +61,22 @@
"kuromoji": "^0.1.2",
"kuromojin": "^3.0.0",
"langchain": "^0.0.95",
"next": "13.4.9",
"next": "^13.5.4",
"next-auth": "0.0.0-manual.e65faa1c",
"next-contentlayer": "^0.3.2",
"node-fetch": "2.6.7",
"open-graph-scraper": "^6.0.1",
"openai": "^4.5.0",
"playwright-aws-lambda": "^0.10.0",
"playwright-core": "^1.38.0",
"react": "18.2.0",
"react": "^18.2.0",
"react-d3-cloud": "^1.0.6",
"react-dom": "18.2.0",
"react-dom": "^18.2.0",
"react-hook-form": "^7.44.3",
"react-icons": "^4.8.0",
"react-markdown": "^8.0.7",
"react-textarea-autosize": "^8.4.1",
"recharts": "^2.8.0",
"reflect-metadata": "^0.1.13",
"rehype-raw": "^6.1.1",
"rehype-sanitize": "^5.0.1",
Expand All @@ -88,7 +89,6 @@
"typegraphql-prisma": "^0.27.0",
"typescript": "5.2.0-dev.20230606",
"video.js": "^8.5.2",
"vidstack": "^0.6.13",
"zact": "^0.0.2",
"zod": "^3.21.4"
},
Expand Down
67 changes: 27 additions & 40 deletions frontend/src/app/_components/Chatbot.tsx
Original file line number Diff line number Diff line change
@@ -1,24 +1,26 @@
"use client";

import { placeholderAtom } from "@src/store/placeholder";
import {
ArrowDownIcon,
ArrowUpIcon,
MeOutlinedIcon,
} from "@xpadev-net/designsystem-icons";
import { Country } from "@src/types/country";
import { ArrowDownIcon, ArrowUpIcon } from "@xpadev-net/designsystem-icons";
import { useChat } from "ai/react";
import cn from "classnames";
import { AnimatePresence, motion } from "framer-motion";
import { useAtom } from "jotai";
import { type Session } from "next-auth";
import { useState } from "react";
import { FaMagic } from "react-icons/fa";
import { IoMdSend } from "react-icons/io";
import { SiOpenai } from "react-icons/si";
import ReactMarkdown from "react-markdown";
import { toast } from "sonner";

export default function Chatbot({ user }: { user: Session["user"] }) {
import MessageItem from "../chat/Message";

export default function Chatbot({
countries,
user,
}: {
countries: Country[];
user: Session["user"];
}) {
const [isOpen, setIsOpen] = useState(false);

const variants = {
Expand All @@ -33,6 +35,7 @@ export default function Chatbot({ user }: { user: Session["user"] }) {
};

const {
data,
handleInputChange,
handleSubmit,
input,
Expand Down Expand Up @@ -87,38 +90,22 @@ export default function Chatbot({ user }: { user: Session["user"] }) {
>
<div className="grow overflow-y-auto">
{messages.length ? (
messages.map((m, i) => (
<div key={i} className="mb-4 flex items-start">
{user && m.role === "user" ? (
<img
src={user.image ?? "/noimage.png"}
alt={user.name ?? "不明"}
className={cn("h-8 w-8 rounded-md border shadow")}
messages.map((message, i) => {
const correspondingData = data
? data.find((d: any) => d.index === i)
: null;

return (
<div key={i} className="mb-5">
<MessageItem
countries={countries}
message={message}
user={user}
data={correspondingData}
/>
) : (
<div
className={cn(
"flex h-8 w-8 shrink-0 select-none items-center justify-center rounded-md border shadow",
m.role === "user" ? "bg-gray-100" : "bg-black"
)}
>
{m.role === "user" ? (
<MeOutlinedIcon
width="1em"
height="1em"
fill="currentColor"
className="h-4 w-4"
/>
) : (
<SiOpenai className="h-4 w-4 text-white" />
)}
</div>
)}
<ReactMarkdown className="prose prose-neutral prose-a:text-primary prose-a:no-underline hover:prose-a:underline prose-img:rounded-lg prose-img:shadow ml-4 max-w-none flex-1 space-y-2 overflow-hidden px-1">
{m.content}
</ReactMarkdown>
</div>
))
</div>
);
})
) : (
<div className="rounded-lg border bg-gray-50 px-4 py-3 text-sm text-gray-500">
<p className="mb-2">
Expand Down
23 changes: 15 additions & 8 deletions frontend/src/app/api/chat/functions.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,15 @@
import { conn } from "@src/lib/planetscale";
import { ChatCompletionCreateParams } from "openai/resources/chat";

export type TransformedData = {
country_id: string;
country_value: string;
date: string;
indicator_id: string;
indicator_value: string;
value: number | null;
};

export const functions: ChatCompletionCreateParams.Function[] = [
{
name: "get_member_info",
Expand Down Expand Up @@ -89,10 +98,10 @@ async function get_population(countryCode: string) {
const data = result[1];

if (!data) {
return "Sorry, we could not retrieve population data because there was no data available for the country code.";
return "申し訳ありませんが、国番号に対応するデータがないため、人口データを取得できませんでした。";
}

const transformedData = data.map((datum: any) => {
const transformedData: TransformedData[] = data.map((datum: any) => {
return {
country_id: datum.country.id,
country_value: datum.country.value,
Expand All @@ -103,21 +112,21 @@ async function get_population(countryCode: string) {
};
});

transformedData.sort((a, b) => parseInt(a.date) - parseInt(b.date));

return transformedData;
} catch (e: any) {
return `Sorry, we could not retrieve population data due to an error: ${e.message}`;
return `申し訳ありませんが、エラーにより人口データを取得できませんでした`;
}
}

async function get_member_info(name: string) {
if (!conn) return null;

const query = "SELECT * FROM Member WHERE name = ? LIMIT 1";
const params = [name];
const data = await conn.execute(query, params);

if (data.rows.length === 0) {
return "Sorry, the member information could not be found.";
return "申し訳ありませんが、議員情報が見つかりませんでした。";
}

return data.rows[0];
Expand Down Expand Up @@ -152,8 +161,6 @@ async function meeting_list(args: any) {
};
});

console.log(newArray);

return newArray;
}

Expand Down
80 changes: 69 additions & 11 deletions frontend/src/app/api/chat/route.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,11 @@
import { Ratelimit } from "@upstash/ratelimit";
import { Redis } from "@upstash/redis";
import { OpenAIStream, StreamingTextResponse } from "ai";
import {
createStreamDataTransformer,
experimental_StreamData,
OpenAIStream,
StreamingTextResponse,
} from "ai";
import { OpenAI } from "openai";

import { functions, runFunction } from "./functions";
Expand Down Expand Up @@ -47,41 +52,94 @@ export async function POST(req: Request) {

const { messages } = await req.json();
const key = JSON.stringify(messages);
const cached = await await redis.get(key);
const cached = (await redis.get(key)) as any;

if (cached) {
return new Response(cached as any);
}
const data = new experimental_StreamData();

if (cached && cached.completion) {
console.log(cached);

const chunks: string[] = [];
for (let i = 0; i < cached.completion.length; i += 5) {
chunks.push(cached.completion.substring(i, i + 5));
}

const stream = new ReadableStream({
async start(controller) {
for (const chunk of chunks) {
const bytes = new TextEncoder().encode(chunk);
controller.enqueue(bytes);
await new Promise((r) =>
setTimeout(r, Math.floor(Math.random() * 40) + 10)
);
}
controller.close();
},
});

if (cached.data) {
cached.data.forEach((item: any) => {
data.append(item);
});
data.close();

const transformedStream = stream.pipeThrough(
createStreamDataTransformer(true)
);

return new StreamingTextResponse(transformedStream, {}, data);
} else {
return new StreamingTextResponse(stream);
}
}
const initialResponse = await openai.chat.completions.create({
function_call: "auto",
functions,
messages,
model: "gpt-3.5-turbo-0613",
model: "gpt-3.5-turbo",
stream: true,
temperature: 0,
});

const allDataAppends: any[] = [];

const stream = OpenAIStream(initialResponse, {
experimental_onFunctionCall: async (
{ name, arguments: args },
createFunctionCallMessages
) => {
const functionResponse = await runFunction(name, args);

const newMessages = createFunctionCallMessages(functionResponse);

const appendData = {
body: functionResponse,
index: messages.length - 1 + 1,
type: typeof functionResponse === "string" ? "error" : name,
};

data.append(appendData);
allDataAppends.push(appendData);

return openai.chat.completions.create({
functions,
messages: [...messages, ...newMessages],
model: "gpt-3.5-turbo-0613",
stream: true,
temperature: 0,
});
},
async onCompletion(completion) {
// Cache the response
await redis.set(key, completion);
experimental_streamData: true,
async onFinal(completion) {
data.close();

const cacheValue = {
completion: completion,
data: allDataAppends,
};
await redis.set(key, JSON.stringify(cacheValue));
await redis.expire(key, 60 * 60);
},
});

return new StreamingTextResponse(stream);
return new StreamingTextResponse(stream, {}, data);
}
Loading

0 comments on commit 1b1f576

Please sign in to comment.