1- import { KeyboardEventHandler , SyntheticEvent , useEffect , useRef , useState } from 'react'
2- import styleClasses from "./conversation.module.scss"
3- import { ActionIcon , Group , rem , Slider , Stack , Text , Textarea , Title , Tooltip } from '@mantine/core'
4- import { IconArrowRight , IconFilePlus , IconMessagePlus } from '@tabler/icons-react'
5- import { conversationSelector , doConversation , newConversation } from '../../redux/Conversation/ConversationSlice'
6- import { ConversationMessage } from '../Message/conversationMessage'
7- import { useAppDispatch , useAppSelector } from '../../redux/store'
8- import { Message , MessageRole } from '../../redux/Conversation/Conversation'
9- import { UiFeatures } from '../../common/Sandbox'
10- import { getCurrentTimeStamp } from '../../common/util'
11- import { useDisclosure } from '@mantine/hooks'
12- import DataSource from './DataSource'
13- import { ConversationSideBar } from './ConversationSideBar'
1+ // Copyright (C) 2024 Intel Corporation
2+ // SPDX-License-Identifier: Apache-2.0
143
15- type ConversationProps = {
16- title : string
17- enabledUiFeatures : UiFeatures
18- }
19-
20- const Conversation = ( { title, enabledUiFeatures } : ConversationProps ) => {
21- const [ prompt , setPrompt ] = useState < string > ( "" )
22- const [ systemPrompt , setSystemPrompt ] = useState < string > ( "You are a helpful assistant." )
23- const promptInputRef = useRef < HTMLTextAreaElement > ( null )
24- const [ fileUploadOpened , { open : openFileUpload , close : closeFileUpload } ] = useDisclosure ( false )
4+ import { KeyboardEventHandler , SyntheticEvent , useEffect , useRef , useState } from 'react' ;
5+ import styleClasses from "./conversation.module.scss" ;
6+ import { ActionIcon , Button , Collapse , Group , rem , Slider , Stack , Text , Textarea , Title , Tooltip } from '@mantine/core' ;
7+ import { IconArrowRight , IconChevronDown , IconChevronUp , IconFilePlus , IconMessagePlus } from '@tabler/icons-react' ;
258
26- const { conversations, onGoingResult, selectedConversationId } = useAppSelector ( conversationSelector )
27- const dispatch = useAppDispatch ( )
28- const selectedConversation = conversations . find ( x => x . conversationId === selectedConversationId )
29- const scrollViewport = useRef < HTMLDivElement > ( null )
9+ import { conversationSelector , doConversation , newConversation , isAgentSelector , getCurrentAgentSteps } from '../../redux/Conversation/ConversationSlice' ;
10+ import { ConversationMessage } from '../Message/conversationMessage' ;
11+ import { useAppDispatch , useAppSelector } from '../../redux/store' ;
12+ import { Message , MessageRole } from '../../redux/Conversation/Conversation' ;
13+ import { UiFeatures } from '../../common/Sandbox' ;
14+ import { getCurrentTimeStamp } from '../../common/util' ;
15+ import { useDisclosure } from '@mantine/hooks' ;
16+ import DataSource from './DataSource' ;
17+ import { ConversationSideBar } from './ConversationSideBar' ;
3018
31- const [ tokenLimit , setTokenLimit ] = useState < number > ( 50 )
32- const [ temperature , setTemperature ] = useState < number > ( 0.30 )
19+ type ConversationProps = {
20+ title : string ;
21+ enabledUiFeatures : UiFeatures ;
22+ } ;
3323
34- // State for tracking tokens and message processing time
35- const [ messageTokenData , setMessageTokenData ] = useState < { [ key : string ] : { tokens : number ; rate : number , time : number } } > ( { } )
24+ const Conversation = ( { title, enabledUiFeatures } : ConversationProps ) => {
25+ const [ prompt , setPrompt ] = useState < string > ( "" ) ;
26+ const [ systemPrompt , setSystemPrompt ] = useState < string > ( "You are a helpful assistant." ) ;
27+ const promptInputRef = useRef < HTMLTextAreaElement > ( null ) ;
28+ const [ fileUploadOpened , { open : openFileUpload , close : closeFileUpload } ] = useDisclosure ( false ) ;
3629
37- const [ currentMessageIndex , setCurrentMessageIndex ] = useState < number > ( - 1 )
38-
39- // New state to track the start time for calculating tokens per second
40- const [ startTime , setStartTime ] = useState < number | null > ( null )
30+ const { conversations, onGoingResult, selectedConversationId } = useAppSelector ( conversationSelector ) ;
31+ const isAgent = useAppSelector ( isAgentSelector ) ;
32+ const dispatch = useAppDispatch ( ) ;
33+ const selectedConversation = conversations . find ( x => x . conversationId === selectedConversationId ) ;
34+ const scrollViewport = useRef < HTMLDivElement > ( null ) ;
4135
42- // New state to manage the assistant's message placeholder
43- const [ isAssistantTyping , setIsAssistantTyping ] = useState < boolean > ( false )
36+ const [ tokenLimit , setTokenLimit ] = useState < number > ( 50 ) ;
37+ const [ temperature , setTemperature ] = useState < number > ( 0.30 ) ;
4438
45- const toSend = "Enter"
39+ const [ messageTokenData , setMessageTokenData ] = useState < { [ key : string ] : { tokens : number ; rate : number ; time : number } } > ( { } ) ;
40+ const [ currentMessageIndex , setCurrentMessageIndex ] = useState < number > ( - 1 ) ;
41+ const [ startTime , setStartTime ] = useState < number | null > ( null ) ;
42+ const [ isAssistantTyping , setIsAssistantTyping ] = useState < boolean > ( false ) ;
43+ const [ showInferenceParams , setShowInferenceParams ] = useState < boolean > ( true ) ;
4644
47- // const systemPrompt: Partial<Message> = {
48- // role: MessageRole.System,
49- // content: "You are a helpful assistant",
50- // }
45+ const toSend = "Enter" ;
5146
5247 const handleSubmit = ( ) => {
5348 const userPrompt : Message = {
5449 role : MessageRole . User ,
5550 content : prompt ,
5651 time : getCurrentTimeStamp ( ) ,
57- }
58-
59- let messages : Partial < Message > [ ] = [ ]
52+ } ;
53+
54+ let messages : Partial < Message > [ ] = [ ] ;
6055 if ( selectedConversation ) {
6156 messages = selectedConversation . Messages . map ( ( message ) => {
62- return { role : message . role , content : message . content }
63- } )
57+ return { role : message . role , content : message . content } ;
58+ } ) ;
6459 }
65-
66- messages = [ { role : MessageRole . System , content : systemPrompt } , ...messages ]
67-
68- // Initialize token data for the new message
60+
61+ messages = [ { role : MessageRole . System , content : systemPrompt } , ...messages ] ;
62+
6963 setMessageTokenData ( ( prev ) => ( {
7064 ...prev ,
7165 [ `${ selectedConversationId } -${ selectedConversation ?. Messages . length } ` ] : { tokens : 0 , rate : 0 , time : 0 } ,
72- } ) )
73-
74- // Set the current message index for tracking
75- setCurrentMessageIndex ( selectedConversation ?. Messages . length || 0 )
76-
66+ } ) ) ;
67+
68+ setCurrentMessageIndex ( selectedConversation ?. Messages . length || 0 ) ;
69+
7770 doConversation ( {
78- conversationId : selectedConversationId ,
79- userPrompt,
80- messages,
81- maxTokens : tokenLimit ,
82- temperature : temperature ,
83- model : "Intel/neural-chat-7b-v3-3" ,
84- } )
85- setPrompt ( "" )
86- setStartTime ( Date . now ( ) ) // Set start time when the user submits the message
87- setIsAssistantTyping ( true ) // Show the assistant's typing placeholder immediately
88- }
71+ conversationId : selectedConversationId ,
72+ userPrompt,
73+ messages,
74+ maxTokens : tokenLimit ,
75+ temperature : temperature ,
76+ model : "Intel/neural-chat-7b-v3-3" ,
77+ } ) ;
78+ setPrompt ( "" ) ;
79+ setStartTime ( Date . now ( ) ) ;
80+ setIsAssistantTyping ( true ) ;
81+ } ;
8982
9083 const scrollToBottom = ( ) => {
91- scrollViewport . current ! . scrollTo ( { top : scrollViewport . current ! . scrollHeight } )
92- }
84+ scrollViewport . current ! . scrollTo ( { top : scrollViewport . current ! . scrollHeight } ) ;
85+ } ;
9386
9487 useEffect ( ( ) => {
95- // Update token data for the current message
9688 if ( onGoingResult && startTime && currentMessageIndex !== - 1 ) {
97- const tokenLength = onGoingResult . split ( " " ) . length // Estimate tokens based on words
98- const currentTimestamp = Date . now ( )
99-
100- const elapsedTime = ( currentTimestamp - startTime ) / 1000 // seconds
101- const tokenRate = elapsedTime > 0 ? tokenLength / elapsedTime : 0
102-
103- // Update token data for the current message
89+ let tokenLength : number ;
90+ if ( isAgent ) {
91+ const currentSteps = getCurrentAgentSteps ( ) ;
92+ const allContent = currentSteps . flatMap ( step => step . content ) . join ( " " ) ;
93+ tokenLength = allContent . split ( " " ) . length ;
94+ } else {
95+ tokenLength = onGoingResult . split ( " " ) . length ;
96+ }
97+
98+ const currentTimestamp = Date . now ( ) ;
99+ const elapsedTime = ( currentTimestamp - startTime ) / 1000 ;
100+ const tokenRate = elapsedTime > 0 ? tokenLength / elapsedTime : 0 ;
101+
104102 setMessageTokenData ( ( prev ) => ( {
105103 ...prev ,
106104 [ `${ selectedConversationId } -${ currentMessageIndex } ` ] : { tokens : tokenLength , rate : tokenRate , time : elapsedTime } ,
107- } ) )
108-
109- setIsAssistantTyping ( false )
105+ } ) ) ;
106+
107+ setIsAssistantTyping ( false ) ;
110108 }
111-
112- scrollToBottom ( )
113- } , [ onGoingResult , startTime , selectedConversation ?. Messages , currentMessageIndex ] )
109+
110+ scrollToBottom ( ) ;
111+ } , [ onGoingResult , startTime , selectedConversation ?. Messages , currentMessageIndex , isAgent ] ) ;
114112
115113 const handleKeyDown : KeyboardEventHandler = ( event ) => {
116114 if ( ! event . shiftKey && event . key === toSend ) {
117- handleSubmit ( )
115+ handleSubmit ( ) ;
118116 setTimeout ( ( ) => {
119- setPrompt ( "" )
120- } , 1 )
117+ setPrompt ( "" ) ;
118+ } , 1 ) ;
121119 }
122- }
120+ } ;
123121
124122 const handleNewConversation = ( ) => {
125- dispatch ( newConversation ( ) )
126- }
123+ dispatch ( newConversation ( ) ) ;
124+ } ;
127125
128126 const handleChange = ( event : SyntheticEvent ) => {
129- event . preventDefault ( )
130- setPrompt ( ( event . target as HTMLTextAreaElement ) . value )
131- }
127+ event . preventDefault ( ) ;
128+ setPrompt ( ( event . target as HTMLTextAreaElement ) . value ) ;
129+ } ;
132130
133131 return (
134132 < div className = { styleClasses . conversationWrapper } >
@@ -140,11 +138,10 @@ const Conversation = ({ title, enabledUiFeatures }: ConversationProps) => {
140138 < span className = { styleClasses . spacer } > </ span >
141139 < Group >
142140 { selectedConversation && selectedConversation ?. Messages . length > 0 && (
143- < ActionIcon onClick = { handleNewConversation } disabled = { onGoingResult != "" } size = { 32 } variant = "default" >
141+ < ActionIcon onClick = { handleNewConversation } disabled = { onGoingResult !== "" } size = { 32 } variant = "default" >
144142 < IconMessagePlus />
145143 </ ActionIcon >
146144 ) }
147-
148145 < Tooltip
149146 label = { enabledUiFeatures . dataprep ? "Upload File" : "Data Prep node is not found in the flow." }
150147 color = { enabledUiFeatures . dataprep ? "blue" : "red" }
@@ -167,13 +164,11 @@ const Conversation = ({ title, enabledUiFeatures }: ConversationProps) => {
167164 ) }
168165
169166 { selectedConversation ?. Messages . map ( ( message , index ) => {
170- const messageKey = `${ selectedConversationId } -${ index - 1 } `
171- const tokenData = messageTokenData [ messageKey ]
172- const elapsedTime = tokenData ?. time ?? 0
173- const tokens = tokenData ?. tokens ?? 0
174- const rate = tokenData ?. rate ?? 0
175-
176- console . log ( "Message: " , message , "Message Key: " , messageKey , "Token Data: " , tokenData )
167+ const messageKey = `${ selectedConversationId } -${ index - 1 } ` ;
168+ const tokenData = messageTokenData [ messageKey ] ;
169+ const elapsedTime = tokenData ?. time ?? 0 ;
170+ const tokens = tokenData ?. tokens ?? 0 ;
171+ const rate = tokenData ?. rate ?? 0 ;
177172
178173 return (
179174 < ConversationMessage
@@ -184,19 +179,21 @@ const Conversation = ({ title, enabledUiFeatures }: ConversationProps) => {
184179 elapsedTime = { message . role === MessageRole . Assistant ? elapsedTime : undefined }
185180 tokenCount = { message . role === MessageRole . Assistant ? tokens : undefined }
186181 tokenRate = { message . role === MessageRole . Assistant ? rate : undefined }
182+ agentSteps = { message . agentSteps || [ ] }
187183 />
188- )
184+ ) ;
189185 } ) }
190186
191187 { selectedConversation && isAssistantTyping && (
192188 < ConversationMessage
193189 key = { `_ai_placeholder` }
194190 date = { Date . now ( ) }
195191 human = { false }
196- message = { "..." } // Placeholder text while the response is being generated
197- elapsedTime = { 0 } // Start with 0 seconds
198- tokenCount = { 0 } // Start with 0 tokens
199- tokenRate = { 0 } // Start with 0 tokens per second
192+ message = { "..." }
193+ elapsedTime = { 0 }
194+ tokenCount = { 0 }
195+ tokenRate = { 0 }
196+ agentSteps = { getCurrentAgentSteps ( ) }
200197 />
201198 ) }
202199
@@ -209,27 +206,41 @@ const Conversation = ({ title, enabledUiFeatures }: ConversationProps) => {
209206 elapsedTime = { messageTokenData [ `${ selectedConversationId } -${ currentMessageIndex } ` ] ?. time }
210207 tokenCount = { messageTokenData [ `${ selectedConversationId } -${ currentMessageIndex } ` ] ?. tokens }
211208 tokenRate = { messageTokenData [ `${ selectedConversationId } -${ currentMessageIndex } ` ] ?. rate }
209+ agentSteps = { getCurrentAgentSteps ( ) }
212210 />
213- ) }
211+ ) }
214212 </ div >
215213
216214 < div className = { styleClasses . conversatioSliders } >
217- < Stack style = { { marginLeft : '10px' } } >
218- < Title size = "sm" > Inference Settings</ Title >
219- < Text size = "sm" > Token Limit: { tokenLimit } </ Text >
220- < Slider value = { tokenLimit } onChange = { setTokenLimit } min = { 10 } max = { 500 } step = { 1 } />
221- < Text size = "sm" > Temperature: { temperature . toFixed ( 2 ) } </ Text >
222- < Slider value = { temperature } onChange = { setTemperature } min = { 0.10 } max = { 1.00 } step = { 0.01 } />
223- < Textarea
224- label = "System Prompt"
225- placeholder = "Set system prompt"
226- value = { systemPrompt }
227- onChange = { ( e ) => setSystemPrompt ( e . target . value ) }
228- size = "sm"
229- mb = "sm"
230- />
231- </ Stack >
232- </ div >
215+ < Button
216+ variant = "light"
217+ size = "xs"
218+ radius = "xl"
219+ onClick = { ( ) => setShowInferenceParams ( ! showInferenceParams ) }
220+ rightSection = { showInferenceParams ? < IconChevronDown size = { 14 } /> : < IconChevronUp size = { 14 } /> }
221+ mb = "xs"
222+ >
223+ { showInferenceParams ? "Hide Inference Settings" : "Show Inference Settings" }
224+ </ Button >
225+ < Collapse in = { showInferenceParams } mb = "md" >
226+ < Stack style = { { marginLeft : '10px' } } >
227+ < Title size = "sm" > Inference Settings</ Title >
228+ < Text size = "sm" > Token Limit: { tokenLimit } </ Text >
229+ < Slider value = { tokenLimit } onChange = { setTokenLimit } min = { 10 } max = { 500 } step = { 1 } />
230+ < Text size = "sm" > Temperature: { temperature . toFixed ( 2 ) } </ Text >
231+ < Slider value = { temperature } onChange = { setTemperature } min = { 0.10 } max = { 1.00 } step = { 0.01 } />
232+ < Textarea
233+ label = "System Prompt"
234+ placeholder = "Set system prompt"
235+ value = { systemPrompt }
236+ onChange = { ( e ) => setSystemPrompt ( e . target . value ) }
237+ size = "sm"
238+ mb = "sm"
239+ />
240+ </ Stack >
241+ </ Collapse >
242+ </ div >
243+
233244
234245 < div className = { styleClasses . conversationActions } >
235246 < Tooltip
@@ -258,7 +269,7 @@ const Conversation = ({ title, enabledUiFeatures }: ConversationProps) => {
258269 </ div >
259270 < DataSource opened = { fileUploadOpened } onClose = { closeFileUpload } />
260271 </ div >
261- )
262- }
272+ ) ;
273+ } ;
263274
264- export default Conversation
275+ export default Conversation ;
0 commit comments