Skip to content

Commit

Permalink
Working convo logging to LangSmith, prompt engineering needs work
Browse files Browse the repository at this point in the history
  • Loading branch information
KastanDay committed May 31, 2024
1 parent 3f31e34 commit c7d4e61
Show file tree
Hide file tree
Showing 8 changed files with 268 additions and 97 deletions.
19 changes: 17 additions & 2 deletions src/components/Chat/Chat.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,7 @@ import handleTools, {
useFetchAllWorkflows,
} from '~/utils/functionCalling/handleFunctionCalling'
import { useFetchEnabledDocGroups } from '~/hooks/docGroupsQueries'
import { buildPrompt } from '~/pages/api/chat'

const montserrat_med = Montserrat({
weight: '500',
Expand Down Expand Up @@ -384,8 +385,7 @@ export const Chat = memo(({ stopConversationRef, courseMetadata }: Props) => {
controller: AbortController,
) => {
const imageContent = (message.content as Content[]).filter(
(content) =>
content.type === 'image_url'
(content) => content.type === 'image_url',
)
let imgDesc = ''
if (imageContent.length > 0) {
Expand Down Expand Up @@ -587,6 +587,21 @@ export const Chat = memo(({ stopConversationRef, courseMetadata }: Props) => {
isImage: false,
}

// src/pages/api/buildPrompt.ts
const buildPromptResponse = await fetch('/api/buildPrompt', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify(chatBody),
})
chatBody.conversation = await buildPromptResponse.json()
updatedConversation = chatBody.conversation
// homeDispatch({
// field: 'selectedConversation',
// value: chatBody.conversation,
// })

// Call the OpenAI API
const response = await fetch(endpoint, {
method: 'POST',
Expand Down
2 changes: 1 addition & 1 deletion src/components/UIUC-Components/N8nWorkflowsTable.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ export const N8nWorkflowsTable = ({
isSuccess: isSuccess,
isError: isErrorTools,
refetch: refetchWorkflows,
} = useFetchAllWorkflows(course_name, n8nApiKey, 10, 'true', true)
} = useFetchAllWorkflows(course_name, n8nApiKey, 20, 'true', true)

const mutate_active_flows = useMutation({
mutationFn: async ({ id, checked }: { id: string; checked: boolean }) => {
Expand Down
84 changes: 67 additions & 17 deletions src/pages/api/UIUC-api/logConversationToSupabase.ts
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
import { supabase } from '@/utils/supabaseClient'
import { traceable } from 'langsmith/traceable'
import { Conversation } from '~/types/chat'
import { buildPrompt } from '../chat'
import { Content, Conversation } from '~/types/chat'
import { RunTree } from 'langsmith'

const logConversationToSupabase = async (req: any, res: any) => {
const { course_name, conversation } = req.body
const { course_name, conversation } = req.body as {
course_name: string
conversation: Conversation
}

const { data, error } = await supabase.from('llm-convo-monitor').upsert(
[
Expand All @@ -23,21 +25,69 @@ const logConversationToSupabase = async (req: any, res: any) => {
console.log('error form supabase:', error)
}

// TODO get userMessage from BuildPrompt
// const userMessage = buildPrompt(conversation, openaiKey, course_name, metadata)
console.log('👇👇👇👇👇👇👇👇👇👇👇👇👇')
console.log(
'2nd Latest message object (user)',
conversation.messages[conversation.messages.length - 2],
)
console.log(
'Latest message object (assistant)',
conversation.messages[conversation.messages.length - 1],
)
console.log('full convo id', conversation.id)
console.log(
'User message',
(
conversation.messages[conversation.messages.length - 2]
?.content[0] as Content
).text,
)
console.log(
'Assistant message',
conversation.messages[conversation.messages.length - 2]?.content,
)
console.log(
'Engineered prompt',
conversation.messages[conversation.messages.length - 2]!
.finalPromtEngineeredMessage,
)
console.log(
'System message',
conversation.messages[conversation.messages.length - 2]!
.latestSystemMessage,
)
console.log('👆👆👆👆👆👆👆👆👆👆👆👆👆')

// console.log('👇👇👇👇👇👇👇👇👇👇👇👇👇')
// console.log('full userMessage', userMessage)
// console.log('👆👆👆👆👆👆👆👆👆👆👆👆👆')
// Log to Langsmith
const rt = new RunTree({
run_type: 'llm',
name: 'Final Response Log',
// inputs: { "Messages": conversation.messages },
inputs: {
'User input': (
conversation.messages[conversation.messages.length - 2]
?.content[0] as Content
).text,
'System message':
conversation.messages[conversation.messages.length - 2]!
.latestSystemMessage,
'Engineered prompt':
conversation.messages[conversation.messages.length - 2]!
.finalPromtEngineeredMessage,
},
outputs: {
Assistant:
conversation.messages[conversation.messages.length - 1]?.content,
},
project_name: 'test-custom-logs',
metadata: { projectName: course_name, conversation_id: conversation.id }, // "conversation_id" is a SPECIAL KEYWORD. CANNOT BE ALTERED: https://docs.smith.langchain.com/old/monitoring/faq/threads
// id: conversation.id, // DON'T USE - breaks the threading support
})

// TODO: Log to langsmith
// const chatModel = traceable(
// async (lastUserMessageAsSubmitted) => {
// return
// },
// { run_type: "llm", name: "logConversationSupabase", metadata: { projectName: course_name, contexts: lastContexts }, inputs: { lastUserMessageAsSubmitted }, outputs: { lastAIMessage } }
// )
// await chatModel(lastUserMessageAsSubmitted)
// End and submit the run
rt.end()
await rt.postRun()
console.log('✅✅✅✅✅✅✅✅ AFTER ALL LANGSMITH CALLS')

return res.status(200).json({ success: true })
}
Expand Down
48 changes: 48 additions & 0 deletions src/pages/api/buildPrompt.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
// src/pages/api/chat.ts
import { CourseMetadata } from '~/types/courseMetadata'
import { getCourseMetadata } from '~/pages/api/UIUC-api/getCourseMetadata'
// @ts-expect-error - no types
import wasm from '../../../node_modules/@dqbd/tiktoken/lite/tiktoken_bg.wasm?module'
import tiktokenModel from '@dqbd/tiktoken/encoders/cl100k_base.json'
import { Tiktoken, init } from '@dqbd/tiktoken/lite/init'
import { OpenAIError, OpenAIStream } from '@/utils/server'
import {
ChatBody,
Content,
ContextWithMetadata,
Conversation,
MessageType,
OpenAIChatMessage,
ToolOutput,
UIUCTool,
} from '@/types/chat'
import { NextResponse } from 'next/server'
import { decrypt, isEncrypted } from '~/utils/crypto'
import { buildPrompt } from './chat'

export const config = {
runtime: 'edge',
}

// A POST request endpoint that just calls buildPrompt and returns that as a json body.
export default async (req: Request): Promise<NextResponse> => {
try {
const { conversation, key, course_name, courseMetadata, isImage } =
(await req.json()) as ChatBody

console.log('In build prompt fetch endpoint!!')

const updatedConversation = await buildPrompt({
conversation,
rawOpenaiKey: key,
projectName: course_name,
courseMetadata,
isImage,
})

return new NextResponse(JSON.stringify(updatedConversation))
} catch (error) {
console.error('Error in buildPromptAPI:', error)
return new NextResponse(JSON.stringify({ error: (error as Error).message }))
}
}
Loading

0 comments on commit c7d4e61

Please sign in to comment.