diff --git a/README.md b/README.md index 7f1db28..db9417b 100644 --- a/README.md +++ b/README.md @@ -67,6 +67,7 @@ In the VSCode settings, locate the "ai-commit" configuration options and configu | AZURE_API_VERSION | string | None | No | AZURE_API_VERSION | | AI_COMMIT_LANGUAGE | string | en | Yes | Supports 19 languages | | SYSTEM_PROMPT | string | None | No | Custom system prompt | +| OPENAI_TEMPERATURE | number | 0.7 | No | Controls randomness in the output. Range: 0-2. Lower values: more focused, Higher values: more creative | ## ⌨️ Local Development diff --git a/README.zh_CN.md b/README.zh_CN.md index 34bbd73..e702a93 100644 --- a/README.zh_CN.md +++ b/README.zh_CN.md @@ -67,6 +67,7 @@ | AZURE_API_VERSION | string | None | 否 | AZURE_API_VERSION | | AI_COMMIT_LANGUAGE | string | en | 是 | 支持 19 种语言 | | SYSTEM_PROMPT | string | None | 否 | 自定义系统提示词 | +| OPENAI_TEMPERATURE | number | 0.7 | 否 | 控制输出的随机性。范围:0-2。较低的值:更加集中,较高的值:更有创造性 | ## ⌨️ 本地开发 diff --git a/package.json b/package.json index 1f8e14b..0b65c1c 100644 --- a/package.json +++ b/package.json @@ -117,6 +117,13 @@ "type": "string", "default": "", "description": "Custom system prompt for generating commit messages" + }, + "ai-commit.OPENAI_TEMPERATURE": { + "type": "number", + "default": 0.7, + "minimum": 0, + "maximum": 2, + "description": "OpenAI temperature setting (0-2). Higher values make output more random, lower values more deterministic." } }, "title": "AI Commit" diff --git a/src/config.ts b/src/config.ts index a39c37a..81bccda 100644 --- a/src/config.ts +++ b/src/config.ts @@ -10,6 +10,7 @@ import { createOpenAIApi } from './openai-utils'; * @property {string} AZURE_API_VERSION - The version of Azure API. * @property {string} AI_COMMIT_LANGUAGE - The language for AI commit messages. * @property {string} SYSTEM_PROMPT - The system prompt for generating commit messages. + * @property {string} OPENAI_TEMPERATURE - The temperature setting for OpenAI API. */ export enum ConfigKeys { OPENAI_API_KEY = 'OPENAI_API_KEY', @@ -17,7 +18,8 @@ export enum ConfigKeys { OPENAI_MODEL = 'OPENAI_MODEL', AZURE_API_VERSION = 'AZURE_API_VERSION', AI_COMMIT_LANGUAGE = 'AI_COMMIT_LANGUAGE', - SYSTEM_PROMPT = 'AI_COMMIT_SYSTEM_PROMPT' + SYSTEM_PROMPT = 'AI_COMMIT_SYSTEM_PROMPT', + OPENAI_TEMPERATURE = 'OPENAI_TEMPERATURE' } /** @@ -34,9 +36,9 @@ export class ConfigurationManager { this.disposable = vscode.workspace.onDidChangeConfiguration((event) => { if (event.affectsConfiguration('ai-commit')) { this.configCache.clear(); - - if (event.affectsConfiguration('ai-commit.OPENAI_BASE_URL') || - event.affectsConfiguration('ai-commit.OPENAI_API_KEY')) { + + if (event.affectsConfiguration('ai-commit.OPENAI_BASE_URL') || + event.affectsConfiguration('ai-commit.OPENAI_API_KEY')) { this.updateModelList(); } } @@ -69,14 +71,14 @@ export class ConfigurationManager { try { const openai = createOpenAIApi(); const models = await openai.models.list(); - + // Save available models to extension state await this.context.globalState.update('availableModels', models.data.map(model => model.id)); - + // Get the current selected model const config = vscode.workspace.getConfiguration('ai-commit'); const currentModel = config.get('OPENAI_MODEL'); - + // If the current selected model is not in the available list, set it to the default value const availableModels = models.data.map(model => model.id); if (!availableModels.includes(currentModel)) { diff --git a/src/openai-utils.ts b/src/openai-utils.ts index 529f5bd..11be8e3 100644 --- a/src/openai-utils.ts +++ b/src/openai-utils.ts @@ -53,13 +53,14 @@ export function createOpenAIApi() { */ export async function ChatGPTAPI(messages: ChatCompletionMessageParam[]) { const openai = createOpenAIApi(); - const model = ConfigurationManager.getInstance().getConfig( - ConfigKeys.OPENAI_MODEL - ); + const configManager = ConfigurationManager.getInstance(); + const model = configManager.getConfig(ConfigKeys.OPENAI_MODEL); + const temperature = configManager.getConfig(ConfigKeys.OPENAI_TEMPERATURE, 0.7); const completion = await openai.chat.completions.create({ model, - messages: messages as ChatCompletionMessageParam[] + messages: messages as ChatCompletionMessageParam[], + temperature }); return completion.choices[0]!.message?.content;