-
Notifications
You must be signed in to change notification settings - Fork 3
feat(backend): enhance config loader with embedding support and impro… #74
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 3 commits
78a3545
72c6dbc
1340532
0ee6b84
0b412b0
b378185
4c66d05
ad74c49
97cae53
e579146
8b25760
b7a8b30
7e68b3a
2c2027d
fb9be7d
0b85081
6ad8b9b
56c6ffc
b9c1909
50d3bd1
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,237 @@ | ||
| // config-loader.ts | ||
| import * as fs from 'fs'; | ||
| import * as path from 'path'; | ||
| import * as _ from 'lodash'; | ||
| import { getConfigPath } from './common-path'; | ||
|
|
||
| export interface ChatConfig { | ||
| model: string; | ||
| endpoint?: string; | ||
| token?: string; | ||
| default?: boolean; | ||
| task?: string; | ||
| } | ||
|
|
||
| export interface EmbeddingConfig { | ||
| model: string; | ||
| endpoint?: string; | ||
| token?: string; | ||
| } | ||
|
|
||
| export interface AppConfig { | ||
| chats?: ChatConfig[]; | ||
| embeddings?: EmbeddingConfig; | ||
| } | ||
|
|
||
| export const exampleConfigContent = `{ | ||
| // Chat models configuration | ||
| // You can configure multiple chat models | ||
| "chats": [ | ||
| // Example of OpenAI GPT configuration | ||
| { | ||
| "model": "gpt-3.5-turbo", | ||
| "endpoint": "https://api.openai.com/v1", | ||
| "token": "your-openai-token", // Replace with your OpenAI token | ||
| "default": true // Set as default chat model | ||
| }, | ||
|
|
||
| // Example of local model configuration | ||
| { | ||
| "model": "llama2", | ||
| "endpoint": "http://localhost:11434/v1", | ||
| "task": "chat" | ||
| } | ||
| ], | ||
|
|
||
| // Embedding model configuration (optional) | ||
| "embeddings": { | ||
| "model": "text-embedding-ada-002", | ||
| "endpoint": "https://api.openai.com/v1", | ||
| "token": "your-openai-token" // Replace with your OpenAI token | ||
| } | ||
| }`; | ||
|
|
||
| export class ConfigLoader { | ||
| private static instance: ConfigLoader; | ||
| private config: AppConfig; | ||
| private readonly configPath: string; | ||
|
|
||
| private constructor(configPath?: string) { | ||
| this.configPath = configPath || getConfigPath('config'); | ||
| this.loadConfig(); | ||
| } | ||
|
|
||
| public static getInstance(configPath?: string): ConfigLoader { | ||
| if (!ConfigLoader.instance) { | ||
| ConfigLoader.instance = new ConfigLoader(configPath); | ||
| } | ||
| return ConfigLoader.instance; | ||
| } | ||
|
|
||
| public static initConfigFile(configPath: string): void { | ||
| if (fs.existsSync(configPath)) { | ||
| throw new Error('Config file already exists'); | ||
| } | ||
|
|
||
| const configDir = path.dirname(configPath); | ||
| if (!fs.existsSync(configDir)) { | ||
| fs.mkdirSync(configDir, { recursive: true }); | ||
| } | ||
|
|
||
| fs.writeFileSync(configPath, exampleConfigContent, 'utf-8'); | ||
| } | ||
|
|
||
| public reload(): void { | ||
| this.loadConfig(); | ||
| } | ||
|
|
||
| private loadConfig() { | ||
| try { | ||
| const file = fs.readFileSync(this.configPath, 'utf-8'); | ||
| const jsonContent = file.replace( | ||
| /\\"|"(?:\\"|[^"])*"|(\/\/.*|\/\*[\s\S]*?\*\/)/g, | ||
| (m, g) => (g ? '' : m), | ||
| ); | ||
| this.config = JSON.parse(jsonContent); | ||
|
||
| this.validateConfig(); | ||
| } catch (error) { | ||
| if ( | ||
| error.code === 'ENOENT' || | ||
| error.message.includes('Unexpected end of JSON input') | ||
| ) { | ||
| this.config = {}; | ||
| this.saveConfig(); | ||
| } else { | ||
| throw error; | ||
| } | ||
| } | ||
|
||
| } | ||
|
|
||
| get<T>(path?: string): T { | ||
| if (!path) { | ||
| return this.config as unknown as T; | ||
| } | ||
| return _.get(this.config, path) as T; | ||
| } | ||
|
|
||
| set(path: string, value: any) { | ||
| _.set(this.config, path, value); | ||
| this.saveConfig(); | ||
| } | ||
|
|
||
| private saveConfig() { | ||
| const configDir = path.dirname(this.configPath); | ||
| if (!fs.existsSync(configDir)) { | ||
| fs.mkdirSync(configDir, { recursive: true }); | ||
| } | ||
| fs.writeFileSync( | ||
| this.configPath, | ||
| JSON.stringify(this.config, null, 2), | ||
| 'utf-8', | ||
| ); | ||
| } | ||
|
|
||
| getAllChatConfigs(): ChatConfig[] { | ||
| return this.config.chats || []; | ||
| } | ||
|
|
||
| getChatConfig(modelName?: string): ChatConfig | null { | ||
| if (!this.config.chats || !Array.isArray(this.config.chats)) { | ||
| return null; | ||
| } | ||
|
|
||
| const chats = this.config.chats; | ||
|
|
||
| if (modelName) { | ||
| const foundChat = chats.find((chat) => chat.model === modelName); | ||
| if (foundChat) { | ||
| return foundChat; | ||
| } | ||
| } | ||
|
|
||
| return ( | ||
| chats.find((chat) => chat.default) || (chats.length > 0 ? chats[0] : null) | ||
| ); | ||
| } | ||
|
|
||
| addChatConfig(config: ChatConfig) { | ||
| if (!this.config.chats) { | ||
| this.config.chats = []; | ||
| } | ||
|
|
||
| const index = this.config.chats.findIndex( | ||
| (chat) => chat.model === config.model, | ||
| ); | ||
| if (index !== -1) { | ||
| this.config.chats.splice(index, 1); | ||
| } | ||
|
|
||
| if (config.default) { | ||
| this.config.chats.forEach((chat) => { | ||
| chat.default = false; | ||
| }); | ||
| } | ||
|
|
||
| this.config.chats.push(config); | ||
| this.saveConfig(); | ||
| } | ||
|
|
||
| removeChatConfig(modelName: string): boolean { | ||
| if (!this.config.chats) { | ||
| return false; | ||
| } | ||
|
|
||
| const initialLength = this.config.chats.length; | ||
| this.config.chats = this.config.chats.filter( | ||
| (chat) => chat.model !== modelName, | ||
| ); | ||
|
|
||
| if (this.config.chats.length !== initialLength) { | ||
| this.saveConfig(); | ||
| return true; | ||
| } | ||
|
|
||
| return false; | ||
| } | ||
|
|
||
| getEmbeddingConfig(): EmbeddingConfig | null { | ||
| return this.config.embeddings || null; | ||
| } | ||
|
|
||
| validateConfig() { | ||
| if (!this.config) { | ||
| this.config = {}; | ||
| } | ||
|
|
||
| if (typeof this.config !== 'object') { | ||
| throw new Error('Invalid configuration: Must be an object'); | ||
| } | ||
|
|
||
| if (this.config.chats) { | ||
| if (!Array.isArray(this.config.chats)) { | ||
| throw new Error("Invalid configuration: 'chats' must be an array"); | ||
| } | ||
|
|
||
| this.config.chats.forEach((chat, index) => { | ||
| if (!chat.model) { | ||
| throw new Error( | ||
| `Invalid chat configuration at index ${index}: 'model' is required`, | ||
| ); | ||
| } | ||
| }); | ||
|
|
||
| const defaultChats = this.config.chats.filter((chat) => chat.default); | ||
| if (defaultChats.length > 1) { | ||
| throw new Error( | ||
| 'Invalid configuration: Multiple default chat configurations found', | ||
| ); | ||
| } | ||
| } | ||
|
|
||
| if (this.config.embeddings) { | ||
| if (!this.config.embeddings.model) { | ||
| throw new Error("Invalid embedding configuration: 'model' is required"); | ||
| } | ||
| } | ||
| } | ||
| } | ||
Uh oh!
There was an error while loading. Please reload this page.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
🛠️ Refactor suggestion
Rename file to reflect implementation code
The file
config-loader.spec.tsis conventionally used for test files, but it contains implementation code for theConfigLoaderclass. Consider renaming it toconfig-loader.tsto accurately represent its contents. Additionally, create a corresponding test fileconfig-loader.spec.tsto include unit tests for theConfigLoaderclass.There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Should we consider the attribution like isLocal to separate remote model or local model in chatconfig?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
@NarwhalChen, adding an "isLocal" (or similarly named) attribute in the chat configuration is a reasonable approach if you’d like to distinguish between loading a model from a local path versus relying on a remote API endpoint. This attribute can help clarify configuration options in the code, guide conditional logic (e.g., deciding between file-system checks or network requests), and keep future expansions flexible (for instance, if you add multiple remote endpoints or different local model directories).
If you proceed with this, you could define something like:
interface ChatConfig {
modelName: string;
isDefault?: boolean;
isLocal?: boolean; // Example attribute
// ...other fields
}
That way, when you load or operate on the configuration, you can easily apply different behaviors based on the value of “isLocal.”