diff --git a/auto-imports.d.ts b/auto-imports.d.ts index 78813d8f..76e1ee65 100644 --- a/auto-imports.d.ts +++ b/auto-imports.d.ts @@ -3,6 +3,7 @@ // @ts-nocheck // noinspection JSUnusedGlobalSymbols // Generated by unplugin-auto-import +// biome-ignore lint: disable export {} declare global { const ElMessage: typeof import('element-plus/es')['ElMessage'] diff --git a/src/ai/prompt.ts b/src/ai/prompt.ts index d176aa6d..e209e5c0 100644 --- a/src/ai/prompt.ts +++ b/src/ai/prompt.ts @@ -55,10 +55,20 @@ const shortDescPrompt = { key: "desc", content: "请为这篇文章生成简明扼要的摘要,只处理文本,尽量返回中文摘要。" + - "摘要长度不超过255个中文字符或512个英文字符。" + + "摘要长度不超过1024个中文字符或2048个英文字符。" + "输出为 JSON 格式,键名为 desc,结果需放在 {} 内。" + "完整结果必须是合法JSON,不得包含非法 JSON 字符。", } +const shortDescPromptStream = { + title: "自动提取摘要", + description: "从文章内容生成文章摘要", + key: "desc", + content: + "请为这篇文章生成简明扼要的摘要,只处理文本,尽量返回中文摘要。" + + "摘要长度不超过1024个中文字符或2048个英文字符。" + + "输出为文本格式,内容为语义化的摘要。" + + "完整结果必须是文本摘要,不得包含非法字符。", +} export type ShortDescAIResult = { desc: string } @@ -106,6 +116,7 @@ export type CategoryAIResult = { const prompt = { titlePrompt, shortDescPrompt, + shortDescPromptStream, tagPrompt, categoryPrompt, } diff --git a/src/components/publish/form/PublishDescription.vue b/src/components/publish/form/PublishDescription.vue index 4f79ee7e..4a7fb358 100644 --- a/src/components/publish/form/PublishDescription.vue +++ b/src/components/publish/form/PublishDescription.vue @@ -94,27 +94,45 @@ const emit = defineEmits(["emitSyncDesc"]) const handleMakeDesc = async () => { formData.isDescLoading = true + const isStream = true try { - // if (formData.useAi) { - const inputWord = prompt.shortDescPrompt.content - const { chat, getChatInput } = useChatGPT() - const chatText = await chat(inputWord, { - name: "desc", - systemMessage: getChatInput(formData?.md, formData.html), - }) - if (StrUtil.isEmptyString(chatText)) { - ElMessage.error("请求错误,请在偏好设置配置请求地址和ChatGPT key!") - return + if (isStream) { + const inputWord = prompt.shortDescPromptStream.content + const { chat, getChatInput } = useChatGPT() + formData.desc = "" + const chatResp = await chat(inputWord, { + name: "desc", + systemMessage: getChatInput(formData?.md, formData.html), + stream: true, + onProgress: (partialResponse) => { + formData.desc = partialResponse.text + logger.debug("partialResponse=>", partialResponse.text) + }, + timeoutMs: 2 * 60 * 1000, + }) + logger.debug("chatResp=>", chatResp) + // formData.desc = chatResp.text + } else { + const inputWord = prompt.shortDescPrompt.content + const { chat, getChatInput } = useChatGPT() + const chatText = await chat(inputWord, { + name: "desc", + systemMessage: getChatInput(formData?.md, formData.html), + }) + if (StrUtil.isEmptyString(chatText)) { + ElMessage.error("请求错误,请在偏好设置配置请求地址和ChatGPT key!") + return + } + const resJson = JsonUtil.safeParse(chatText, {} as ShortDescAIResult) + if (StrUtil.isEmptyString(resJson?.desc)) { + throw new Error("文档信息量太少,未能抽取有效信息") + } + formData.desc = resJson.desc + logger.info("使用AI智能生成的摘要结果 =>", { + inputWord: inputWord, + chatText: chatText, + }) } - const resJson = JsonUtil.safeParse(chatText, {} as ShortDescAIResult) - if (StrUtil.isEmptyString(resJson?.desc)) { - throw new Error("文档信息量太少,未能抽取有效信息") - } - formData.desc = resJson.desc - logger.info("使用AI智能生成的摘要结果 =>", { - inputWord: inputWord, - chatText: chatText, - }) // 自部署无监督摘要 // if (StrUtil.isEmptyString(formData.html)) { diff --git a/src/composables/useChatGPT.ts b/src/composables/useChatGPT.ts index 06c879a1..5ab80b72 100644 --- a/src/composables/useChatGPT.ts +++ b/src/composables/useChatGPT.ts @@ -66,6 +66,11 @@ const useChatGPT = () => { debug: isDev, // workaround for https://github.com/transitive-bullshit/chatgpt-api/issues/592 fetch: self.fetch.bind(self), + completionParams: { + model: pref.value.experimentalAIApiModel, + max_tokens: pref.value.experimentalAIApiMaxTokens, + temperature: pref.value.experimentalAIApiTemperature, + }, }) } } catch (e) { @@ -89,16 +94,28 @@ const useChatGPT = () => { * const chatResponse = await chat('你好,ChatGPT!'); * console.log(chatResponse); // ChatGPT 生成的响应 */ - const chat = async (q: string, opts?: SendMessageOptions): Promise => { + const chat = async (q: string, opts?: SendMessageOptions): Promise => { try { const api = await getAPI() // 使用 ChatGPTAPI 实例进行聊天操作 + opts.completionParams = { + ...opts.completionParams, + model: pref.value.experimentalAIApiModel, + max_tokens: pref.value.experimentalAIApiMaxTokens, + temperature: pref.value.experimentalAIApiTemperature, + } + logger.debug("chat q =>", { q, opts }) const res = await api.sendMessage(q, opts) logger.debug("chat res =>", res) - return res.text + if (opts.stream) { + return res + } else { + return res.text + } } catch (e) { logger.error("Chat encountered an error:", e) + ElMessage.error("Chat encountered an error:" + e) } } diff --git a/src/models/publishPreferenceCfg.ts b/src/models/publishPreferenceCfg.ts index 053d7235..36d3ea50 100644 --- a/src/models/publishPreferenceCfg.ts +++ b/src/models/publishPreferenceCfg.ts @@ -58,6 +58,21 @@ class PublishPreferenceCfg extends PreferenceConfig { */ public experimentalAIProxyUrl?: string + /** + * AI 模型 + */ + public experimentalAIApiModel?: string + + /** + * AI token 数目 + */ + public experimentalAIApiMaxTokens?: number + + /** + * AI 温度 + */ + public experimentalAIApiTemperature?: number + // 文档菜单 /** * 是否展示文档快捷菜单 diff --git a/src/stores/usePreferenceSettingStore.ts b/src/stores/usePreferenceSettingStore.ts index 603df34c..437809cd 100644 --- a/src/stores/usePreferenceSettingStore.ts +++ b/src/stores/usePreferenceSettingStore.ts @@ -63,6 +63,9 @@ const usePreferenceSettingStore = () => { prefConfig.value.experimentalAIProxyUrl = snAiCfg.apiProxy prefConfig.value.experimentalAICode = snAiCfg.apiKey prefConfig.value.experimentalAIBaseUrl = snAiCfg.apiBaseURL + prefConfig.value.experimentalAIApiModel = snAiCfg.apiModel + prefConfig.value.experimentalAIApiMaxTokens = snAiCfg.apiMaxTokens + prefConfig.value.experimentalAIApiTemperature = snAiCfg.apiTemperature logger.info("use siyuan-note ai config") } else { prefConfig.value.experimentalUseSiyuanNoteAIConfig = false