Skip to content
Merged
3 changes: 2 additions & 1 deletion app/renderer/src/main/public/locales/en/aiAgent.json
Original file line number Diff line number Diff line change
Expand Up @@ -352,7 +352,8 @@
"totalInput": "Total Input",
"totalOutput": "Total Output",
"input": "Input",
"output": "Output"
"output": "Output",
"cache": "Cache"
},
"InstallLlamaServerModelPrompt": {
"installing": "Installing model environment",
Expand Down
3 changes: 2 additions & 1 deletion app/renderer/src/main/public/locales/zh/aiAgent.json
Original file line number Diff line number Diff line change
Expand Up @@ -352,7 +352,8 @@
"totalInput": "总输入",
"totalOutput": "总输出",
"input": "输入",
"output": "输出"
"output": "输出",
"cache": "缓存"
},
"InstallLlamaServerModelPrompt": {
"installing": "正在安装模型环境",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -78,10 +78,11 @@ const AIContextToken: FC<{
// AI的Token消耗
const token = useCreation(() => {
const { consumption } = aiDataRef || {}
if (isEmpty(consumption)) return [0, 0]
if (isEmpty(consumption)) return [0, 0, 0]
const input = consumption?.input_consumption || 0
const output = consumption?.output_consumption || 0
return [formatNumberUnits(input), formatNumberUnits(output)]
const cacheHit = consumption?.cache_hit_token || 0
return [formatNumberUnits(input), formatNumberUnits(output), formatNumberUnits(cacheHit)]
}, [renderNumber, aiDataRef?.consumption])
Comment on lines -79 to 86

const isShowCost = useCreation(() => {
Expand Down Expand Up @@ -177,11 +178,12 @@ const AIContextToken: FC<{
<OutlineArrowdownIcon />
{token[1]}
</div>
<div className={classNames(styles['token-tag'], styles['download-token'])}>{token[2]}</div>
</div>
<YakitPopover
content={
<AIEchartsDetails
overallToken={[token[0], token[1]]}
overallToken={[token[0], token[1], token[2]]}
tierConsumption={aiDataRef?.consumption?.tier_consumption}
pressure={aiDataRef?.pressure}
firstCost={aiDataRef?.firstCost}
Expand Down Expand Up @@ -214,7 +216,7 @@ interface CurrentModel {
lightweightModels?: AIModelConfig
}
interface AIEchartsDetailsProps {
overallToken: [number | string, number | string]
overallToken: [number | string, number | string, number | string]
/** ref */
tierConsumption?: AIAgentGrpcApi.Consumption['tier_consumption']
/** ref */
Expand Down Expand Up @@ -254,17 +256,19 @@ const AIEchartsDetails: React.FC<AIEchartsDetailsProps> = memo((props) => {
}, [aiGlobalConfig.IntelligentModels, aiGlobalConfig.LightweightModels])

const intelligentToken = useCreation(() => {
if (!tierConsumption?.intelligent) return [0, 0]
if (!tierConsumption?.intelligent) return [0, 0, 0]
const input = tierConsumption.intelligent.input_consumption || 0
const output = tierConsumption.intelligent.output_consumption || 0
return [formatNumberUnits(input), formatNumberUnits(output)]
const cacheHit = tierConsumption.intelligent.cache_hit_token || 0
return [formatNumberUnits(input), formatNumberUnits(output), formatNumberUnits(cacheHit)]
}, [renderNumber, tierConsumption?.intelligent])

const lightweightToken = useCreation(() => {
if (!tierConsumption?.lightweight) return [0, 0]
if (!tierConsumption?.lightweight) return [0, 0, 0]
const input = tierConsumption.lightweight.input_consumption || 0
const output = tierConsumption.lightweight.output_consumption || 0
return [formatNumberUnits(input), formatNumberUnits(output)]
const cacheHit = tierConsumption.lightweight.cache_hit_token || 0
return [formatNumberUnits(input), formatNumberUnits(output), formatNumberUnits(cacheHit)]
}, [renderNumber, tierConsumption?.lightweight])
Comment on lines 258 to 272

// 上下文压力集合
Expand All @@ -283,6 +287,7 @@ const AIEchartsDetails: React.FC<AIEchartsDetailsProps> = memo((props) => {
const contextStatsData = useCreation(() => {
return getContextStatsData(contextStats?.data)
}, [renderNumber, contextStats])

// 上下文成分
const contextSectionsData = useCreation(() => {
if (!contextSections?.sections)
Expand Down Expand Up @@ -362,18 +367,22 @@ const AIEchartsDetails: React.FC<AIEchartsDetailsProps> = memo((props) => {
{overallToken[1]}
</div>
</div>
<div className={styles['token-overall']}>
<span>{t('AIContextToken.cache')}</span>
<div className={classNames(styles['token-tag'], styles['download-token'])}>{overallToken[2]}</div>
</div>
</div>
</div>
<div className={styles['token-content']}>
<AITokens
modelType={t('AiAgengt.intelligentModels')}
aiModel={currentModel?.intelligentModels}
token={[intelligentToken[0], intelligentToken[1]]}
token={[intelligentToken[0], intelligentToken[1], intelligentToken[2]]}
/>
<AITokens
modelType={t('AiAgengt.lightweightModels')}
aiModel={currentModel?.lightweightModels}
token={[lightweightToken[0], lightweightToken[1]]}
token={[lightweightToken[0], lightweightToken[1], lightweightToken[2]]}
/>
</div>
</div>
Expand Down Expand Up @@ -405,7 +414,7 @@ const AIEchartsDetails: React.FC<AIEchartsDetailsProps> = memo((props) => {
)}
{contextSectionsData?.sections.length > 0 && (
<div style={{ height: '320px' }}>
<ContextTable contextSectionsData={contextSectionsData} />
<ContextTable contextSectionsData={contextSectionsData} roleLabelMap={contextStats?.data?.role_labels} />
</div>
)}
</div>
Expand All @@ -416,7 +425,7 @@ const AIEchartsDetails: React.FC<AIEchartsDetailsProps> = memo((props) => {
interface AITokensProps {
modelType: string
aiModel?: AIModelConfig
token: [number | string, number | string]
token: [number | string, number | string, number | string]
}
const AITokens: React.FC<AITokensProps> = memo((props) => {
const { modelType, aiModel, token } = props
Expand Down Expand Up @@ -455,6 +464,11 @@ const AITokens: React.FC<AITokensProps> = memo((props) => {
</div>
<div className={classNames(styles['token-tag'], styles['download-token'])}>{token[1]}</div>
</div>
<div className={styles['diver']} />
<div className={styles['ai-tokens-item']}>
<div className={styles['token-item']}>{t('AIContextToken.cache')}</div>
<div className={classNames(styles['token-tag'], styles['download-token'])}>{token[2]}</div>
</div>
</div>
</div>
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@

.filter-dropdown {
padding-top: 4px;

background-color: var(--Colors-Use-Basic-Background);
.filter-options {
display: flex;
flex-direction: column;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { Table, TableColumnsType } from 'antd'
import { FC, useMemo, useState } from 'react'
import { FC, useCallback, useMemo, useState } from 'react'
import styles from './ContextTable.module.scss'
import { OutlineChevrondownIcon, OutlineFilterIcon } from '@/assets/icon/outline'
import { YakitButton } from '@/components/yakitUI/YakitButton/YakitButton'
Expand All @@ -9,21 +9,30 @@ import { AIAgentGrpcApi } from '@/pages/ai-re-act/hooks/grpcApi'
import { YakitModal } from '@/components/yakitUI/YakitModal/YakitModal'
import { YakitEditor } from '@/components/yakitUI/YakitEditor/YakitEditor'

const roleTextMap: Record<string, string> = {
/** 旧版 sections 仅有少量固定 role 时的展示回退 */
const LEGACY_ROLE_LABELS: Record<string, string> = {
mixed: '混合',
runtime_context: '运行内容',
user_input: '用户输入',
system_prompt: '系统信息',
}

const roleFilters = Object.entries(roleTextMap).map(([value, text]) => ({ text, value }))
const collectSectionRoles = (nodes: AIAgentGrpcApi.AIContextSections[] | undefined, out = new Set<string>()) => {
if (!nodes?.length) return out
for (const n of nodes) {
if (n.role) out.add(n.role)
if (n.children?.length) collectSectionRoles(n.children, out)
}
return out
}

const RoleFilterDropdown: React.FC<{
roleFilters: { text: string; value: string }[]
selectedKeys: React.Key[]
setSelectedKeys: (keys: React.Key[]) => void
confirm: () => void
clearFilters?: () => void
}> = ({ selectedKeys, setSelectedKeys, confirm, clearFilters }) => {
}> = ({ roleFilters, selectedKeys, setSelectedKeys, confirm, clearFilters }) => {
const activeKeys = selectedKeys as string[]

return (
Expand All @@ -36,7 +45,13 @@ const RoleFilterDropdown: React.FC<{
<label className={styles['filter-option']} key={value}>
<YakitCheckbox
checked={activeKeys.includes(value)}
onChange={(event) => setSelectedKeys(event.target.checked ? [value] : [])}
onChange={(event) => {
if (event.target.checked) {
setSelectedKeys([...activeKeys, value])
} else {
setSelectedKeys(activeKeys.filter((k) => k !== value))
}
}}
>
{item.text}
</YakitCheckbox>
Expand Down Expand Up @@ -65,10 +80,42 @@ const RoleFilterDropdown: React.FC<{

const ContextTable: FC<{
contextSectionsData?: AIContextSectionsDetail
}> = ({ contextSectionsData }) => {
/** prompt_profile 首次锁定的 role_name -> role_name_zh,与上下文字节统计一致 */
roleLabelMap?: Record<string, string>
}> = ({ contextSectionsData, roleLabelMap }) => {
const [previewKey, setPreviewKey] = useState<string>('')
const [expandedRowKeys, setExpandedRowKeys] = useState<React.Key[]>([])

const roleFilters = useMemo(() => {
const merged: Record<string, string> = { ...LEGACY_ROLE_LABELS, ...(roleLabelMap || {}) }
const keys = new Set<string>()
if (roleLabelMap && Object.keys(roleLabelMap).length > 0) {
for (const k of Object.keys(roleLabelMap)) keys.add(k)
} else {
for (const k of Object.keys(LEGACY_ROLE_LABELS)) keys.add(k)
}
collectSectionRoles(contextSectionsData?.sections).forEach((r) => keys.add(r))
return [...keys].map((value) => ({
value,
text: merged[value] || value,
}))
}, [roleLabelMap, contextSectionsData?.sections])

const resolveRoleText = useCallback(
(role: string, row: AIAgentGrpcApi.AIContextSections) => {
if (roleLabelMap?.[role] && roleLabelMap?.[role] !== '') {
return roleLabelMap?.[role]
} else if (row.role_zh && row.role_zh !== '') {
return row.role_zh
} else if (LEGACY_ROLE_LABELS[role] && LEGACY_ROLE_LABELS[role] !== '') {
return LEGACY_ROLE_LABELS[role]
} else {
return role
}
},
[roleLabelMap],
)

const columns: TableColumnsType<AIAgentGrpcApi.AIContextSections> = useMemo(
() => [
{
Expand Down Expand Up @@ -100,10 +147,10 @@ const ContextTable: FC<{
filterIcon: (filtered: boolean) => (
<OutlineFilterIcon className={`${styles['filter-icon']} ${filtered ? styles['filter-icon-active'] : ''}`} />
),
filterDropdown: (props) => <RoleFilterDropdown {...props} />,
filterDropdown: (props) => <RoleFilterDropdown roleFilters={roleFilters} {...props} />,
onFilter: (value, record) => record.role === value,
render: (role: string) => {
const roleText = roleTextMap[role] || role
render: (role: string, row: AIAgentGrpcApi.AIContextSections) => {
const roleText = resolveRoleText(role, row)

return (
<span className={styles['context-sub-label']} title={roleText}>
Expand All @@ -130,7 +177,7 @@ const ContextTable: FC<{
},
},
],
[contextSectionsData?.summary],
[contextSectionsData?.summary, roleFilters, resolveRoleText],
)

const previewContent = useMemo(() => {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,15 +40,22 @@ export const isPerfDataChanged = (prev: PerfData, next: PerfData): boolean => {
// 上下文字节统计
const prevStats = prev.contextStats
const nextStats = next.contextStats
if (
prevStats?.prompt_bytes !== nextStats?.prompt_bytes ||
prevStats?.data?.prompt_bytes.length !== nextStats?.data?.prompt_bytes.length ||
prevStats?.data?.system_prompt_bytes.length !== nextStats?.data?.system_prompt_bytes.length ||
prevStats?.data?.runtime_context_bytes.length !== nextStats?.data?.runtime_context_bytes.length ||
prevStats?.data?.user_input_bytes.length !== nextStats?.data?.user_input_bytes.length ||
prevStats?.data?.times.length !== nextStats?.data?.times.length
)
return true
if (prevStats?.prompt_bytes !== nextStats?.prompt_bytes) return true
const prevD = prevStats?.data
const nextD = nextStats?.data
if (prevD?.times.length !== nextD?.times.length) return true
if (prevD?.prompt_bytes.length !== nextD?.prompt_bytes.length) return true

const prevOrderLen = prevD?.role_order?.length ?? 0
const nextOrderLen = nextD?.role_order?.length ?? 0
if (prevOrderLen > 0 || nextOrderLen > 0) {
if (prevOrderLen !== nextOrderLen) return true
if (prevOrderLen && prevD?.role_order?.join('|') !== nextD?.role_order?.join('|')) return true
const order = nextD?.role_order?.length ? nextD.role_order : prevD?.role_order || []
for (const role of order) {
if (prevD?.role_series?.[role]?.length !== nextD?.role_series?.[role]?.length) return true
}
}

// 上下文成分
if (prev.contextSections?.sections.length !== next.contextSections?.sections.length) return true
Expand Down Expand Up @@ -225,20 +232,20 @@ export const getThreshold = (pressure?: Record<AIModelTypeEnum, AIAgentGrpcApi.P
}

export const getContextStatsData = (contextStats?: AIContextStatsDetail['data']) => {
if (!contextStats)
if (!contextStats) {
return {
prompt_bytes: [],
system_prompt_bytes: [],
runtime_context_bytes: [],
user_input_bytes: [],
times: [],
prompt_bytes: [],
role_order: [],
role_labels: {},
role_series: {},
}
const { prompt_bytes, system_prompt_bytes, runtime_context_bytes, user_input_bytes, times } = contextStats
}
return {
prompt_bytes,
system_prompt_bytes,
runtime_context_bytes,
user_input_bytes,
times,
times: contextStats.times || [],
prompt_bytes: contextStats.prompt_bytes || [],
role_order: contextStats.role_order || [],
role_labels: contextStats.role_labels || {},
role_series: contextStats.role_series || {},
}
}
Loading
Loading