Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 10 additions & 1 deletion open-sse/translator/helpers/openaiHelper.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,15 @@
export const VALID_OPENAI_CONTENT_TYPES = ["text", "image_url", "image"];
export const VALID_OPENAI_MESSAGE_TYPES = ["text", "image_url", "image", "tool_calls", "tool_result"];

function normalizeOpenAIContent(content) {
const textOnly = content.every((block) => block.type === "text");
if (textOnly) {
return content.map((block) => block.text || "").join("\n");
}

return content;
}

// Filter messages to OpenAI standard format
// Remove: thinking, redacted_thinking, signature, and other non-OpenAI blocks
export function filterToOpenAIFormat(body) {
Expand Down Expand Up @@ -47,7 +56,7 @@ export function filterToOpenAIFormat(body) {
filteredContent.push({ type: "text", text: "" });
}

return { ...msg, content: filteredContent };
return { ...msg, content: normalizeOpenAIContent(filteredContent) };
}

return msg;
Expand Down
21 changes: 14 additions & 7 deletions open-sse/translator/request/claude-to-openai.js
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,17 @@ export function claudeToOpenAIRequest(model, body, stream) {
return result;
}

function normalizeOpenAIContent(parts) {
if (parts.length === 0) return "";

const textOnly = parts.every((part) => part.type === "text");
if (textOnly) {
return parts.map((part) => part.text || "").join("\n");
}

return parts.length === 1 && parts[0].type === "text" ? parts[0].text : parts;
}

// Fix missing tool responses - add empty responses for tool_calls without responses
function fixMissingToolResponses(messages) {
for (let i = 0; i < messages.length; i++) {
Expand Down Expand Up @@ -177,9 +188,7 @@ function convertClaudeMessage(msg) {
// If has tool results, return array of tool messages
if (toolResults.length > 0) {
if (parts.length > 0) {
const textContent = parts.length === 1 && parts[0].type === "text"
? parts[0].text
: parts;
const textContent = normalizeOpenAIContent(parts);
return [...toolResults, { role: "user", content: textContent }];
}
return toolResults;
Expand All @@ -189,9 +198,7 @@ function convertClaudeMessage(msg) {
if (toolCalls.length > 0) {
const result = { role: "assistant" };
if (parts.length > 0) {
result.content = parts.length === 1 && parts[0].type === "text"
? parts[0].text
: parts;
result.content = normalizeOpenAIContent(parts);
}
result.tool_calls = toolCalls;
return result;
Expand All @@ -201,7 +208,7 @@ function convertClaudeMessage(msg) {
if (parts.length > 0) {
return {
role,
content: parts.length === 1 && parts[0].type === "text" ? parts[0].text : parts
content: normalizeOpenAIContent(parts)
};
}

Expand Down
11 changes: 10 additions & 1 deletion open-sse/utils/streamHelpers.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,18 @@ import { FORMATS } from "../translator/formats.js";
export function parseSSELine(line, format = null) {
if (!line) return null;

const trimmed = line.trim();

if (trimmed.startsWith("{")) {
try {
return JSON.parse(trimmed);
} catch (error) {
return null;
}
}

// NDJSON format (Ollama): raw JSON lines without "data:" prefix
if (format === FORMATS.OLLAMA) {
const trimmed = line.trim();
if (trimmed.startsWith("{")) {
try {
return JSON.parse(trimmed);
Expand Down
57 changes: 47 additions & 10 deletions src/app/api/oauth/cursor/auto-import/route.js
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,14 @@ function getCandidatePaths(platform) {
];
}

function getPlatformError(platform, candidates) {
if (platform === "darwin") {
return `Cursor database not found in known macOS locations:\n${candidates.join("\n")}\n\nMake sure Cursor IDE is installed and opened at least once.`;
}

return "Cursor database not found. Make sure Cursor IDE is installed and you are logged in.";
}

/** Extract tokens using better-sqlite3 (stream-based, no RAM limit) */
function extractTokens(db) {
const desiredKeys = [...ACCESS_TOKEN_KEYS, ...MACHINE_ID_KEYS];
Expand Down Expand Up @@ -134,23 +142,31 @@ async function extractTokensViaCLI(dbPath) {
export async function GET() {
try {
const platform = process.platform;
if (!["darwin", "linux", "win32"].includes(platform)) {
return NextResponse.json({ found: false, error: "Unsupported platform" }, { status: 400 });
}

const candidates = getCandidatePaths(platform);

let dbPath = null;
for (const candidate of candidates) {
try {
await access(candidate, constants.R_OK);
dbPath = candidate;
break;
} catch {
// Try next candidate
if (platform === "darwin" || platform === "win32") {
for (const candidate of candidates) {
try {
await access(candidate, constants.R_OK);
dbPath = candidate;
break;
} catch {
// Try next candidate
}
}
} else {
[dbPath] = candidates;
}

if (!dbPath) {
return NextResponse.json({
found: false,
error: `Cursor database not found. Checked locations:\n${candidates.join("\n")}\n\nMake sure Cursor IDE is installed and opened at least once.`,
error: getPlatformError(platform, candidates),
});
}

Expand Down Expand Up @@ -178,8 +194,15 @@ export async function GET() {
if (tokens.accessToken && tokens.machineId) {
return NextResponse.json({ found: true, accessToken: tokens.accessToken, machineId: tokens.machineId });
}
} catch {
} catch (error) {
db?.close();

if (platform === "darwin") {
return NextResponse.json({
found: false,
error: `Cursor database found at ${dbPath}, but could not open it: ${error.message}`,
});
}
}
}

Expand All @@ -192,7 +215,21 @@ export async function GET() {
} catch { /* sqlite3 CLI not available */ }

// Strategy 3: ask user to paste manually
return NextResponse.json({ found: false, windowsManual: true, dbPath });
if (platform === "win32") {
return NextResponse.json({ found: false, windowsManual: true, dbPath });
}

if (platform === "linux") {
return NextResponse.json({
found: false,
error: getPlatformError(platform, candidates),
});
}

return NextResponse.json({
found: false,
error: "Please login to Cursor IDE first and then reopen Cursor before retrying auto-import.",
});
} catch (error) {
console.log("Cursor auto-import error:", error);
return NextResponse.json({ found: false, error: error.message }, { status: 500 });
Expand Down
23 changes: 12 additions & 11 deletions src/sse/handlers/chat.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@ import {
extractApiKey,
isValidApiKey,
} from "../services/auth.js";
import { getSettings } from "@/lib/localDb";
import { getModelInfo, getComboModels } from "../services/model.js";
import { handleChatCore } from "open-sse/handlers/chatCore.js";
import { errorResponse, unavailableResponse } from "open-sse/utils/error.js";
Expand All @@ -17,13 +16,15 @@ import { detectFormatByEndpoint } from "open-sse/translator/formats.js";
import * as log from "../utils/logger.js";
import { updateProviderCredentials, checkAndRefreshToken } from "../services/tokenRefresh.js";
import { getProjectIdForConnection } from "open-sse/services/projectId.js";
import { createRequestContext, getRequestSettings } from "../services/requestContext.js";

/**
* Handle chat completion request
* Supports: OpenAI, Claude, Gemini, OpenAI Responses API formats
* Format detection and translation handled by translator
*/
export async function handleChat(request, clientRawRequest = null) {
const requestContext = createRequestContext();
let body;
try {
body = await request.json();
Expand Down Expand Up @@ -63,7 +64,7 @@ export async function handleChat(request, clientRawRequest = null) {
}

// Enforce API key if enabled in settings
const settings = await getSettings();
const settings = await getRequestSettings(requestContext);
if (settings.requireApiKey) {
if (!apiKey) {
log.warn("AUTH", "Missing API key (requireApiKey=true)");
Expand All @@ -82,36 +83,36 @@ export async function handleChat(request, clientRawRequest = null) {
}

// Check if model is a combo (has multiple models with fallback)
const comboModels = await getComboModels(modelStr);
const comboModels = await getComboModels(modelStr, requestContext);
if (comboModels) {
log.info("CHAT", `Combo "${modelStr}" with ${comboModels.length} models`);
return handleComboChat({
body,
models: comboModels,
handleSingleModel: (b, m) => handleSingleModelChat(b, m, clientRawRequest, request, apiKey),
handleSingleModel: (b, m) => handleSingleModelChat(b, m, clientRawRequest, request, apiKey, requestContext),
log
});
}

// Single model request
return handleSingleModelChat(body, modelStr, clientRawRequest, request, apiKey);
return handleSingleModelChat(body, modelStr, clientRawRequest, request, apiKey, requestContext);
}

/**
* Handle single model chat request
*/
async function handleSingleModelChat(body, modelStr, clientRawRequest = null, request = null, apiKey = null) {
const modelInfo = await getModelInfo(modelStr);
async function handleSingleModelChat(body, modelStr, clientRawRequest = null, request = null, apiKey = null, requestContext = null) {
const modelInfo = await getModelInfo(modelStr, requestContext);

// If provider is null, this might be a combo name - check and handle
if (!modelInfo.provider) {
const comboModels = await getComboModels(modelStr);
const comboModels = await getComboModels(modelStr, requestContext);
if (comboModels) {
log.info("CHAT", `Combo "${modelStr}" with ${comboModels.length} models`);
return handleComboChat({
body,
models: comboModels,
handleSingleModel: (b, m) => handleSingleModelChat(b, m, clientRawRequest, request, apiKey),
handleSingleModel: (b, m) => handleSingleModelChat(b, m, clientRawRequest, request, apiKey, requestContext),
log
});
}
Expand All @@ -137,7 +138,7 @@ async function handleSingleModelChat(body, modelStr, clientRawRequest = null, re
let lastStatus = null;

while (true) {
const credentials = await getProviderCredentials(provider, excludeConnectionIds, model);
const credentials = await getProviderCredentials(provider, excludeConnectionId, model, requestContext);

// All accounts unavailable
if (!credentials || credentials.allRateLimited) {
Expand Down Expand Up @@ -171,7 +172,7 @@ async function handleSingleModelChat(body, modelStr, clientRawRequest = null, re
}

// Use shared chatCore
const chatSettings = await getSettings();
const chatSettings = await getRequestSettings(requestContext);
const result = await handleChatCore({
body: { ...body, model: `${provider}/${model}` },
modelInfo: { provider, model },
Expand Down
11 changes: 5 additions & 6 deletions src/sse/services/auth.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ import { resolveConnectionProxyConfig } from "@/lib/network/connectionProxy";
import { formatRetryAfter, checkFallbackError, isModelLockActive, buildModelLockUpdate, getEarliestModelLockUntil } from "open-sse/services/accountFallback.js";
import { resolveProviderId } from "@/shared/constants/providers.js";
import * as log from "../utils/logger.js";
import { getRequestSettings } from "./requestContext.js";

// Mutex to prevent race conditions during account selection
let selectionMutex = Promise.resolve();
Expand All @@ -14,11 +15,7 @@ let selectionMutex = Promise.resolve();
* @param {Set<string>|string|null} excludeConnectionIds - Connection ID(s) to exclude (for retry with next account)
* @param {string|null} model - Model name for per-model rate limit filtering
*/
export async function getProviderCredentials(provider, excludeConnectionIds = null, model = null) {
// Normalize to Set for consistent handling
const excludeSet = excludeConnectionIds instanceof Set
? excludeConnectionIds
: (excludeConnectionIds ? new Set([excludeConnectionIds]) : new Set());
export async function getProviderCredentials(provider, excludeConnectionId = null, model = null, requestContext = null) {
// Acquire mutex to prevent race conditions
const currentMutex = selectionMutex;
let resolveMutex;
Expand Down Expand Up @@ -75,7 +72,9 @@ export async function getProviderCredentials(provider, excludeConnectionIds = nu
return null;
}

const settings = await getSettings();
const settings = requestContext
? await getRequestSettings(requestContext)
: await getSettings();
// Per-provider strategy overrides global setting
const providerOverride = (settings.providerStrategies || {})[providerId] || {};
const strategy = providerOverride.fallbackStrategy || settings.fallbackStrategy || "fill-first";
Expand Down
32 changes: 23 additions & 9 deletions src/sse/services/model.js
Original file line number Diff line number Diff line change
@@ -1,34 +1,41 @@
// Re-export from open-sse with localDb integration
import { getModelAliases, getComboByName, getProviderNodes } from "@/lib/localDb";
import { parseModel, resolveModelAliasFromMap, getModelInfoCore } from "open-sse/services/model.js";
import { getRequestComboByName, getRequestModelAliases, getRequestProviderNodes } from "./requestContext.js";

export { parseModel };

/**
* Resolve model alias from localDb
*/
export async function resolveModelAlias(alias) {
const aliases = await getModelAliases();
export async function resolveModelAlias(alias, requestContext = null) {
const aliases = requestContext
? await getRequestModelAliases(requestContext)
: await getModelAliases();
return resolveModelAliasFromMap(alias, aliases);
}

/**
* Get full model info (parse or resolve)
*/
export async function getModelInfo(modelStr) {
export async function getModelInfo(modelStr, requestContext = null) {
const parsed = parseModel(modelStr);

if (!parsed.isAlias) {
if (parsed.provider === parsed.providerAlias) {
// Check OpenAI Compatible nodes
const openaiNodes = await getProviderNodes({ type: "openai-compatible" });
const openaiNodes = requestContext
? await getRequestProviderNodes("openai-compatible", requestContext)
: await getProviderNodes({ type: "openai-compatible" });
const matchedOpenAI = openaiNodes.find((node) => node.prefix === parsed.providerAlias);
if (matchedOpenAI) {
return { provider: matchedOpenAI.id, model: parsed.model };
}

// Check Anthropic Compatible nodes
const anthropicNodes = await getProviderNodes({ type: "anthropic-compatible" });
const anthropicNodes = requestContext
? await getRequestProviderNodes("anthropic-compatible", requestContext)
: await getProviderNodes({ type: "anthropic-compatible" });
const matchedAnthropic = anthropicNodes.find((node) => node.prefix === parsed.providerAlias);
if (matchedAnthropic) {
return { provider: matchedAnthropic.id, model: parsed.model };
Expand All @@ -42,25 +49,32 @@ export async function getModelInfo(modelStr) {

// Check if this is a combo name before resolving as alias
// This prevents combo names from being incorrectly routed to providers
const combo = await getComboByName(parsed.model);
const combo = requestContext
? await getRequestComboByName(parsed.model, requestContext)
: await getComboByName(parsed.model);
if (combo) {
// Return null provider to signal this should be handled as combo
// The caller (handleChat) will detect this and handle it as combo
return { provider: null, model: parsed.model };
}

return getModelInfoCore(modelStr, getModelAliases);
return getModelInfoCore(
modelStr,
requestContext ? () => getRequestModelAliases(requestContext) : getModelAliases,
);
}

/**
* Check if model is a combo and get models list
* @returns {Promise<string[]|null>} Array of models or null if not a combo
*/
export async function getComboModels(modelStr) {
export async function getComboModels(modelStr, requestContext = null) {
// Only check if it's not in provider/model format
if (modelStr.includes("/")) return null;

const combo = await getComboByName(modelStr);
const combo = requestContext
? await getRequestComboByName(modelStr, requestContext)
: await getComboByName(modelStr);
if (combo && combo.models && combo.models.length > 0) {
return combo.models;
}
Expand Down
Loading