Skip to content

Commit 9395bb8

Browse files
committed
refactor(config): unify configuration structure and add temperature setting
- Merge DEFAULT_API_CONFIG into DEFAULT_CONFIG for single source of truth - Add temperature configuration option for LLM adapters (default: 1.0) - Update all LLM adapters to use config-based temperature setting - Add temperature control to settings UI with validation (0.0-2.0 range) - Update config service conversion functions for temperature field - Maintain backward compatibility with legacy DEFAULT_API_CONFIG export
1 parent ce142bd commit 9395bb8

File tree

7 files changed

+65
-36
lines changed

7 files changed

+65
-36
lines changed

src/components/settings/llm-settings.tsx

Lines changed: 22 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ import { Textarea } from "@/components/ui/textarea";
66
import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from "@/components/ui/select";
77
import { Card, CardHeader, CardTitle, CardContent } from "@/components/ui/card";
88
import { Eye, EyeOff } from "lucide-react";
9-
import { AppConfig, DEFAULT_MODELS } from "@/lib/types/config";
9+
import { AppConfig, DEFAULT_MODELS, DEFAULT_API_CONFIG } from "@/lib/types/config";
1010
import { useAppTranslation } from "@/lib/i18n";
1111
import { DEFAULT_SYSTEM_PROMPT, DEFAULT_USER_PROMPT } from "@/lib/types/llm";
1212

@@ -111,15 +111,34 @@ export function LLMSettings({ config, setConfig }: LLMSettingsProps) {
111111
<label className="text-sm font-medium">{t('settings.maxRetries')}</label>
112112
<Input
113113
type="number"
114-
value={config.llm.maxRetries || 5}
114+
value={config.llm.maxRetries ?? DEFAULT_API_CONFIG.maxRetries}
115115
onChange={(e) => {
116116
config.llm.maxRetries = parseInt(e.target.value);
117117
setConfig({ ...config });
118118
}}
119-
placeholder="5"
119+
placeholder={DEFAULT_API_CONFIG.maxRetries.toString()}
120120
/>
121121
</div>
122122

123+
<div className="space-y-2">
124+
<label className="text-sm font-medium">{t('settings.temperature') || 'Temperature'}</label>
125+
<Input
126+
type="number"
127+
value={config.llm.temperature ?? DEFAULT_API_CONFIG.temperature}
128+
onChange={(e) => {
129+
config.llm.temperature = parseFloat(e.target.value);
130+
setConfig({ ...config });
131+
}}
132+
placeholder={DEFAULT_API_CONFIG.temperature.toString()}
133+
min="0"
134+
max="2"
135+
step="0.1"
136+
/>
137+
<p className="text-xs text-muted-foreground">
138+
{t('settings.temperatureHint') || 'Controls randomness (0.0-2.0). Higher values make output more creative.'}
139+
</p>
140+
</div>
141+
123142
<div className="space-y-2 col-span-2">
124143
<label className="text-sm font-medium">{t('settings.systemPrompt') || 'System Prompt'}</label>
125144
<Textarea

src/lib/adapters/anthropic-adapter.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -96,8 +96,8 @@ export class AnthropicAdapter extends BaseLLMAdapter {
9696

9797
const completion = await anthropic.messages.create({
9898
model,
99-
max_tokens: DEFAULT_API_CONFIG.maxTokens,
100-
temperature: DEFAULT_API_CONFIG.temperature,
99+
max_tokens: 4096,
100+
temperature: this.config.temperature ?? DEFAULT_API_CONFIG.temperature,
101101
system: [
102102
{
103103
type: "text",

src/lib/adapters/gemini-adapter.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -86,8 +86,8 @@ export class GeminiAdapter extends BaseLLMAdapter {
8686
model,
8787
systemInstruction: systemPrompt,
8888
generationConfig: {
89-
temperature: DEFAULT_API_CONFIG.temperature,
90-
maxOutputTokens: DEFAULT_API_CONFIG.maxTokens,
89+
temperature: this.config.temperature ?? DEFAULT_API_CONFIG.temperature,
90+
maxOutputTokens: 4096,
9191
},
9292
safetySettings: [
9393
{

src/lib/adapters/openai-adapter.ts

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -105,8 +105,8 @@ export class OpenAIAdapter extends BaseLLMAdapter {
105105
content: userPrompt
106106
}
107107
],
108-
temperature: DEFAULT_API_CONFIG.temperature,
109-
user: "minecraft-mod-localizer" // For better cache routing
108+
temperature: this.config.temperature ?? DEFAULT_API_CONFIG.temperature,
109+
user: "minecraft-mod-localizer"
110110
});
111111

112112
await this.logApiRequest(`API request successful`);
@@ -202,4 +202,4 @@ export class OpenAIAdapter extends BaseLLMAdapter {
202202
return false;
203203
}
204204
}
205-
}
205+
}

src/lib/services/config-service.ts

Lines changed: 16 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -289,7 +289,8 @@ function convertToSnakeCase(config: AppConfig): Record<string, unknown> {
289289
max_retries: config.llm.maxRetries,
290290
prompt_template: config.llm.promptTemplate,
291291
system_prompt: config.llm.systemPrompt,
292-
user_prompt: config.llm.userPrompt
292+
user_prompt: config.llm.userPrompt,
293+
temperature: config.llm.temperature
293294
},
294295
translation: {
295296
mod_chunk_size: config.translation.modChunkSize,
@@ -328,27 +329,28 @@ function convertFromSnakeCase(backendConfig: Record<string, unknown>): AppConfig
328329
apiKey: (llm?.api_key as string) || "",
329330
baseUrl: llm?.base_url as string | undefined,
330331
model: llm?.model as string | undefined,
331-
maxRetries: (llm?.max_retries as number) || 5,
332+
maxRetries: (llm?.max_retries as number) || DEFAULT_CONFIG.llm.maxRetries,
332333
promptTemplate: llm?.prompt_template as string | undefined,
333334
systemPrompt: llm?.system_prompt as string | undefined,
334-
userPrompt: llm?.user_prompt as string | undefined
335+
userPrompt: llm?.user_prompt as string | undefined,
336+
temperature: (llm?.temperature as number) || DEFAULT_CONFIG.llm.temperature
335337
},
336338
translation: {
337-
modChunkSize: (translation?.mod_chunk_size as number) || 50,
338-
questChunkSize: (translation?.quest_chunk_size as number) || 1,
339-
guidebookChunkSize: (translation?.guidebook_chunk_size as number) || 1,
340-
additionalLanguages: (translation?.custom_languages as SupportedLanguage[]) || [],
341-
resourcePackName: (translation?.resource_pack_name as string) || "MinecraftModsLocalizer"
339+
modChunkSize: (translation?.mod_chunk_size as number) || DEFAULT_CONFIG.translation.modChunkSize,
340+
questChunkSize: (translation?.quest_chunk_size as number) || DEFAULT_CONFIG.translation.questChunkSize,
341+
guidebookChunkSize: (translation?.guidebook_chunk_size as number) || DEFAULT_CONFIG.translation.guidebookChunkSize,
342+
additionalLanguages: (translation?.custom_languages as SupportedLanguage[]) || DEFAULT_CONFIG.translation.additionalLanguages,
343+
resourcePackName: (translation?.resource_pack_name as string) || DEFAULT_CONFIG.translation.resourcePackName
342344
},
343345
ui: {
344-
theme: (ui?.theme as "light" | "dark" | "system") || "system"
346+
theme: (ui?.theme as "light" | "dark" | "system") || DEFAULT_CONFIG.ui.theme
345347
},
346348
paths: {
347-
minecraftDir: (paths?.minecraft_dir as string) || "",
348-
modsDir: (paths?.mods_dir as string) || "",
349-
resourcePacksDir: (paths?.resource_packs_dir as string) || "",
350-
configDir: (paths?.config_dir as string) || "",
351-
logsDir: (paths?.logs_dir as string) || ""
349+
minecraftDir: (paths?.minecraft_dir as string) || DEFAULT_CONFIG.paths.minecraftDir,
350+
modsDir: (paths?.mods_dir as string) || DEFAULT_CONFIG.paths.modsDir,
351+
resourcePacksDir: (paths?.resource_packs_dir as string) || DEFAULT_CONFIG.paths.resourcePacksDir,
352+
configDir: (paths?.config_dir as string) || DEFAULT_CONFIG.paths.configDir,
353+
logsDir: (paths?.logs_dir as string) || DEFAULT_CONFIG.paths.logsDir
352354
}
353355
};
354356
}

src/lib/types/config.ts

Lines changed: 18 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -18,15 +18,7 @@ export const DEFAULT_API_URLS = {
1818
google: undefined // Google uses SDK default
1919
} as const;
2020

21-
/**
22-
* Default API configuration
23-
*/
24-
export const DEFAULT_API_CONFIG = {
25-
temperature: 0.3,
26-
maxTokens: 4096,
27-
maxRetries: 5,
28-
chunkSize: 50
29-
} as const;
21+
// Removed DEFAULT_API_CONFIG - values moved to DEFAULT_CONFIG for unified configuration
3022

3123
/**
3224
* Storage keys
@@ -71,6 +63,8 @@ export interface LLMProviderConfig {
7163
systemPrompt?: string;
7264
/** User prompt template with variables for the specific task */
7365
userPrompt?: string;
66+
/** Temperature setting for the LLM (0.0 to 2.0) */
67+
temperature?: number;
7468
}
7569

7670
/**
@@ -127,19 +121,21 @@ export interface UpdateConfig {
127121

128122
/**
129123
* Default application configuration
124+
* Unified configuration with all default values in one place
130125
*/
131126
export const DEFAULT_CONFIG: AppConfig = {
132127
llm: {
133128
provider: "openai",
134129
apiKey: "",
135130
model: DEFAULT_MODELS.openai,
136-
maxRetries: DEFAULT_API_CONFIG.maxRetries,
131+
maxRetries: 3,
137132
promptTemplate: DEFAULT_PROMPT_TEMPLATE,
138133
systemPrompt: DEFAULT_SYSTEM_PROMPT,
139-
userPrompt: DEFAULT_USER_PROMPT
134+
userPrompt: DEFAULT_USER_PROMPT,
135+
temperature: 1.0
140136
},
141137
translation: {
142-
modChunkSize: DEFAULT_API_CONFIG.chunkSize,
138+
modChunkSize: 50,
143139
questChunkSize: 1,
144140
guidebookChunkSize: 1,
145141
additionalLanguages: [],
@@ -159,3 +155,13 @@ export const DEFAULT_CONFIG: AppConfig = {
159155
checkOnStartup: true
160156
}
161157
};
158+
159+
/**
160+
* Backward compatibility: Export individual default values from unified config
161+
* This maintains existing API while using the unified configuration as source
162+
*/
163+
export const DEFAULT_API_CONFIG = {
164+
temperature: DEFAULT_CONFIG.llm.temperature!,
165+
maxRetries: DEFAULT_CONFIG.llm.maxRetries,
166+
chunkSize: DEFAULT_CONFIG.translation.modChunkSize
167+
} as const;

src/lib/types/llm.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -83,6 +83,8 @@ export interface LLMConfig {
8383
systemPrompt?: string;
8484
/** User prompt template with variables for the specific task */
8585
userPrompt?: string;
86+
/** Temperature setting for the LLM (0.0 to 2.0) */
87+
temperature?: number;
8688
}
8789

8890
/**

0 commit comments

Comments
 (0)