diff --git a/src/app/api/chat/openaiFunctionCall/route.ts b/src/app/api/chat/openaiFunctionCall/route.ts index 40d6a0f6c..f7bf5448e 100644 --- a/src/app/api/chat/openaiFunctionCall/route.ts +++ b/src/app/api/chat/openaiFunctionCall/route.ts @@ -74,7 +74,7 @@ export async function POST(req: Request) { // Auto-trace LLM calls w/ langsmith const openai = wrapOpenAI(new OpenAI({ apiKey: decryptedKey }), { - project_name: 'test-custom-logs', + project_name: 'uiuc-chat-production', metadata: { user_email: conversation.user_email, conversation_id: conversation.id, diff --git a/src/pages/api/UIUC-api/logConversationToSupabase.ts b/src/pages/api/UIUC-api/logConversationToSupabase.ts index 15740dfeb..aff88b7bf 100644 --- a/src/pages/api/UIUC-api/logConversationToSupabase.ts +++ b/src/pages/api/UIUC-api/logConversationToSupabase.ts @@ -79,7 +79,7 @@ const logConversationToSupabase = async (req: any, res: any) => { Assistant: conversation.messages[conversation.messages.length - 1]?.content, }, - project_name: 'test-custom-logs', + project_name: 'uiuc-chat-production', metadata: { projectName: course_name, conversation_id: conversation.id,