Skip to content

Commit

Permalink
Dependency removal broke switching to an older conversation, revertin…
Browse files Browse the repository at this point in the history
…g the change. Some more styling and error handling changes.
  • Loading branch information
rohan-uiuc committed Dec 6, 2023
1 parent e9f01fd commit b482410
Show file tree
Hide file tree
Showing 2 changed files with 17 additions and 14 deletions.
23 changes: 13 additions & 10 deletions src/components/Chat/Chat.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -172,18 +172,19 @@ export const Chat = memo(({ stopConversationRef, courseMetadata }: Props) => {
}
}

const handleImageContent = async (message: Message, endpoint: string, updatedConversation: Conversation, searchQuery: string) => {
const handleImageContent = async (message: Message, endpoint: string, updatedConversation: Conversation, searchQuery: string, controller: AbortController) => {
const imageContent = (message.content as Content[]).filter(content => content.type === 'image_url');
if (imageContent.length > 0) {
homeDispatch({ field: 'isImg2TextLoading', value: true })
// This is where prompt for first call is created
const chatBody: ChatBody = {
model: updatedConversation.model,
messages: [
{
...message,
content: [
...imageContent,
{ type: 'text', text: 'Describe the image(s), be concise' }
{ type: 'text', text: 'Provide detailed description of the image(s) focusing on any text (OCR information), distinct objects, colors, and actions depicted. Include contextual information, subtle details, and specific terminologies relevant for semantic document retrieval.' }
]
}
],
Expand All @@ -200,6 +201,7 @@ export const Chat = memo(({ stopConversationRef, courseMetadata }: Props) => {
'Content-Type': 'application/json',
},
body: JSON.stringify(chatBody),
signal: controller.signal,
})
.then(async response => {
if (!response.ok) {
Expand Down Expand Up @@ -262,6 +264,7 @@ export const Chat = memo(({ stopConversationRef, courseMetadata }: Props) => {
})
.catch(error => {
console.error('Error in chat.tsx running onResponseCompletion():', error);
controller.abort();
})
.finally(() => {
homeDispatch({ field: 'isImg2TextLoading', value: false })
Expand All @@ -283,6 +286,8 @@ export const Chat = memo(({ stopConversationRef, courseMetadata }: Props) => {
// THIS IS WHERE MESSAGES ARE SENT.
const handleSend = useCallback(
async (message: Message, deleteCount = 0, plugin: Plugin | null = null) => {

setCurrentMessage(message)
// New way with React Context API
// TODO: MOVE THIS INTO ChatMessage
// console.log('IN handleSend: ', message)
Expand Down Expand Up @@ -319,9 +324,11 @@ export const Chat = memo(({ stopConversationRef, courseMetadata }: Props) => {

const endpoint = getEndpoint(plugin);

const controller = new AbortController()

// Run image to text conversion, attach to Message object.
if (Array.isArray(message.content)) {
searchQuery = await handleImageContent(message, endpoint, updatedConversation, searchQuery);
searchQuery = await handleImageContent(message, endpoint, updatedConversation, searchQuery, controller);
}

// Run context search, attach to Message object.
Expand Down Expand Up @@ -356,9 +363,6 @@ export const Chat = memo(({ stopConversationRef, courseMetadata }: Props) => {
})
}


const controller = new AbortController()

// This is where we call the OpenAI API
const response = await fetch(endpoint, {
method: 'POST',
Expand Down Expand Up @@ -761,8 +765,7 @@ export const Chat = memo(({ stopConversationRef, courseMetadata }: Props) => {

homeDispatch({ field: 'conversations', value: updatedConversations });
saveConversations(updatedConversations);
}, []);
// }, [selectedConversation, conversations]); // Uncomment if running into issues with useCallback
}, [selectedConversation, conversations]);


return (
Expand Down Expand Up @@ -863,7 +866,7 @@ export const Chat = memo(({ stopConversationRef, courseMetadata }: Props) => {
contentRenderer={renderMessageContent}
messageIndex={index}
onEdit={(editedMessage) => {
setCurrentMessage(editedMessage)
// setCurrentMessage(editedMessage)
handleSend(
editedMessage,
selectedConversation?.messages.length - index,
Expand All @@ -885,7 +888,7 @@ export const Chat = memo(({ stopConversationRef, courseMetadata }: Props) => {
stopConversationRef={stopConversationRef}
textareaRef={textareaRef}
onSend={(message, plugin) => {
setCurrentMessage(message)
// setCurrentMessage(message)
handleSend(message, 0, plugin)
}}
onScrollDownClick={handleScrollDown}
Expand Down
8 changes: 4 additions & 4 deletions src/components/Chat/ChatMessage.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -429,12 +429,12 @@ export const ChatMessage: FC<Props> = memo(
if ((content.text as string).trim().startsWith('Image description:')) {
console.log("Image description found: ", content.text)
return (
<Accordion key={index} className='shadow-lg rounded-md bg-[#2e026d]'>
<Accordion.Item value="imageDescription">
<Accordion.Control className={`text-gray-200 rounded-md hover:bg-purple-900 ${montserrat_paragraph.variable} font-montserratParagraph`}>
<Accordion variant='filled' key={index} className=' shadow-lg rounded-lg bg-[#2e026d]'>
<Accordion.Item value="imageDescription rounded-lg">
<Accordion.Control className={`text-gray-200 rounded-lg hover:bg-purple-900 ${montserrat_paragraph.variable} font-montserratParagraph`}>
Following image description will be used to search over your documents to provide intelligent responses
</Accordion.Control>
<Accordion.Panel className={`bg-[#1d1f32] rounded-md text-gray-200 p-4 ${montserrat_paragraph.variable} font-montserratParagraph`}>
<Accordion.Panel className={`bg-[#1d1f32] rounded-lg text-gray-200 p-4 ${montserrat_paragraph.variable} font-montserratParagraph`}>
{content.text}
</Accordion.Panel>
</Accordion.Item>
Expand Down

0 comments on commit b482410

Please sign in to comment.