Skip to content

Commit f48f5c5

Browse files
committed
feat(perf): implement global singleton pattern and plugin warmup optimization
Refactor plugin initialization to use global singletons and move warmup logic to plugin startup instead of per-session. This eliminates redundant model loading and improves first-message response time significantly. Key changes: - Add global singleton pattern for EmbeddingService using Symbol-based keys - Move warmup logic from message handler to plugin initialization - Add global warmup state tracking to prevent re-initialization - Refactor logger to use lazy initialization with global state flag - Extract embedding initialization into separate private method This resolves performance issues where the memory system would reinitialize on every session start, causing 30-60s delays for first-time users.
1 parent db9f0c9 commit f48f5c5

File tree

4 files changed

+53
-85
lines changed

4 files changed

+53
-85
lines changed

package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "opencode-mem",
3-
"version": "2.3.3",
3+
"version": "2.3.4",
44
"description": "OpenCode plugin that gives coding agents persistent memory using local vector database",
55
"type": "module",
66
"main": "dist/plugin.js",

src/index.ts

Lines changed: 12 additions & 59 deletions
Original file line numberDiff line numberDiff line change
@@ -50,6 +50,18 @@ export const OpenCodeMemPlugin: Plugin = async (ctx: PluginInput) => {
5050
log("Plugin disabled - memory system not configured");
5151
}
5252

53+
const GLOBAL_PLUGIN_WARMUP_KEY = Symbol.for("opencode-mem.plugin.warmedup");
54+
55+
if (!(globalThis as any)[GLOBAL_PLUGIN_WARMUP_KEY] && isConfigured()) {
56+
try {
57+
await memoryClient.warmup();
58+
(globalThis as any)[GLOBAL_PLUGIN_WARMUP_KEY] = true;
59+
log("Plugin warmup completed");
60+
} catch (error) {
61+
log("Plugin warmup failed", { error: String(error) });
62+
}
63+
}
64+
5365
if (CONFIG.webServerEnabled) {
5466
startWebServer({
5567
port: CONFIG.webServerPort,
@@ -158,65 +170,6 @@ export const OpenCodeMemPlugin: Plugin = async (ctx: PluginInput) => {
158170
if (isFirstMessage) {
159171
injectedSessions.add(input.sessionID);
160172

161-
const needsWarmup = !(await memoryClient.isReady());
162-
163-
if (needsWarmup) {
164-
if (ctx.client?.tui) {
165-
await ctx.client.tui
166-
.showToast({
167-
body: {
168-
title: "Memory System",
169-
message: "Initializing (first time: 30-60s)...",
170-
variant: "info",
171-
duration: 5000,
172-
},
173-
})
174-
.catch(() => {});
175-
}
176-
177-
try {
178-
await memoryClient.warmup();
179-
180-
if (ctx.client?.tui) {
181-
const autoCaptureStatus =
182-
CONFIG.autoCaptureEnabled &&
183-
CONFIG.memoryModel &&
184-
CONFIG.memoryApiUrl &&
185-
CONFIG.memoryApiKey
186-
? "Auto-capture: enabled"
187-
: "Auto-capture: disabled";
188-
189-
await ctx.client.tui
190-
.showToast({
191-
body: {
192-
title: "Memory System Ready!",
193-
message: autoCaptureStatus,
194-
variant: CONFIG.autoCaptureEnabled ? "success" : "warning",
195-
duration: 3000,
196-
},
197-
})
198-
.catch(() => {});
199-
}
200-
} catch (warmupError) {
201-
log("Warmup failed", { error: String(warmupError) });
202-
203-
if (ctx.client?.tui) {
204-
await ctx.client.tui
205-
.showToast({
206-
body: {
207-
title: "Memory System Error",
208-
message: `Failed to initialize: ${String(warmupError)}`,
209-
variant: "error",
210-
duration: 10000,
211-
},
212-
})
213-
.catch(() => {});
214-
}
215-
216-
return;
217-
}
218-
}
219-
220173
const projectMemoriesListResult = await memoryClient.listMemories(
221174
tags.project.tag,
222175
CONFIG.maxMemories

src/services/embedding.ts

Lines changed: 30 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@ env.allowRemoteModels = true;
77
env.cacheDir = CONFIG.storagePath + "/.cache";
88

99
const TIMEOUT_MS = 30000;
10+
const GLOBAL_EMBEDDING_KEY = Symbol.for("opencode-mem.embedding.instance");
1011

1112
function withTimeout<T>(promise: Promise<T>, ms: number): Promise<T> {
1213
return Promise.race([
@@ -20,34 +21,42 @@ export class EmbeddingService {
2021
private initPromise: Promise<void> | null = null;
2122
public isWarmedUp: boolean = false;
2223

24+
static getInstance(): EmbeddingService {
25+
if (!(globalThis as any)[GLOBAL_EMBEDDING_KEY]) {
26+
(globalThis as any)[GLOBAL_EMBEDDING_KEY] = new EmbeddingService();
27+
}
28+
return (globalThis as any)[GLOBAL_EMBEDDING_KEY];
29+
}
30+
2331
async warmup(progressCallback?: (progress: any) => void): Promise<void> {
2432
if (this.isWarmedUp) return;
2533
if (this.initPromise) return this.initPromise;
2634

27-
this.initPromise = (async () => {
28-
try {
29-
if (CONFIG.embeddingApiUrl && CONFIG.embeddingApiKey) {
30-
log("Using OpenAI-compatible API for embeddings");
31-
this.isWarmedUp = true;
32-
return;
33-
}
34-
35-
log("Downloading embedding model", { model: CONFIG.embeddingModel });
36-
37-
this.pipe = await pipeline("feature-extraction", CONFIG.embeddingModel, {
38-
progress_callback: progressCallback,
39-
});
35+
this.initPromise = this.initializeModel(progressCallback);
36+
return this.initPromise;
37+
}
4038

39+
private async initializeModel(progressCallback?: (progress: any) => void): Promise<void> {
40+
try {
41+
if (CONFIG.embeddingApiUrl && CONFIG.embeddingApiKey) {
42+
log("Using OpenAI-compatible API for embeddings");
4143
this.isWarmedUp = true;
42-
log("Embedding model ready");
43-
} catch (error) {
44-
this.initPromise = null;
45-
log("Failed to initialize embedding model", { error: String(error) });
46-
throw error;
44+
return;
4745
}
48-
})();
4946

50-
return this.initPromise;
47+
log("Downloading embedding model", { model: CONFIG.embeddingModel });
48+
49+
this.pipe = await pipeline("feature-extraction", CONFIG.embeddingModel, {
50+
progress_callback: progressCallback,
51+
});
52+
53+
this.isWarmedUp = true;
54+
log("Embedding model ready");
55+
} catch (error) {
56+
this.initPromise = null;
57+
log("Failed to initialize embedding model", { error: String(error) });
58+
throw error;
59+
}
5160
}
5261

5362
async embed(text: string): Promise<Float32Array> {
@@ -89,4 +98,4 @@ export class EmbeddingService {
8998
}
9099
}
91100

92-
export const embeddingService = new EmbeddingService();
101+
export const embeddingService = EmbeddingService.getInstance();

src/services/logger.ts

Lines changed: 10 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -5,13 +5,19 @@ import { join } from "path";
55
const LOG_DIR = join(homedir(), ".opencode-mem");
66
const LOG_FILE = join(LOG_DIR, "opencode-mem.log");
77

8-
if (!existsSync(LOG_DIR)) {
9-
mkdirSync(LOG_DIR, { recursive: true });
10-
}
8+
const GLOBAL_LOGGER_KEY = Symbol.for("opencode-mem.logger.initialized");
119

12-
writeFileSync(LOG_FILE, `\n--- Session started: ${new Date().toISOString()} ---\n`, { flag: "a" });
10+
function ensureLoggerInitialized() {
11+
if ((globalThis as any)[GLOBAL_LOGGER_KEY]) return;
12+
if (!existsSync(LOG_DIR)) {
13+
mkdirSync(LOG_DIR, { recursive: true });
14+
}
15+
writeFileSync(LOG_FILE, `\n--- Session started: ${new Date().toISOString()} ---\n`, { flag: "a" });
16+
(globalThis as any)[GLOBAL_LOGGER_KEY] = true;
17+
}
1318

1419
export function log(message: string, data?: unknown) {
20+
ensureLoggerInitialized();
1521
const timestamp = new Date().toISOString();
1622
const line = data
1723
? `[${timestamp}] ${message}: ${JSON.stringify(data)}\n`

0 commit comments

Comments
 (0)