Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 13 additions & 1 deletion console/src/locales/en.json
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,7 @@
"title": "MCP Clients",
"description": "Manage Model Context Protocol (MCP) clients for extending agent capabilities.",
"create": "Create Client",
"refreshStatus": "Refresh Status",
"formatSupport": "Supported formats",
"emptyState": "No MCP clients configured yet",
"loadError": "Failed to load MCP clients",
Expand All @@ -115,6 +116,12 @@
"updateError": "Failed to update MCP client",
"enableSuccess": "MCP client enabled successfully",
"disableSuccess": "MCP client disabled successfully",
"runtimeActive": "Connected",
"runtimeUnavailable": "Unavailable",
"runtimeUnknown": "Unknown",
"runtimeChecking": "Checking",
"runtimeQueued": "Queued",
"runtimeDisabled": "Disabled",
Comment on lines 109 to +124
"toggleError": "Failed to toggle MCP client status",
"deleteConfirm": "Are you sure you want to delete this MCP client?",
"deleteSuccess": "MCP client deleted successfully",
Expand Down Expand Up @@ -378,7 +385,12 @@
"openAICompatibleEndpoint": "OpenAI-compatible endpoint, e.g. https://api.example.com (append /v1 only if your provider requires it)",
"azureEndpointHint": "Azure OpenAI endpoint, e.g. https://<resource>.openai.azure.com/openai/v1",
"anthropicEndpointHint": "Anthropic endpoint, e.g. https://api.anthropic.com",
"ollamaEndpointHint": "Ollama endpoint, e.g. http://localhost:11434",
"ollamaEndpointHint": "Ollama endpoint, e.g. http://127.0.0.1:11434",
"ollamaQuickFillLabel": "Quick fill:",
"ollamaQuickFillApplied": "Filled endpoint: {{host}}",
"ollamaEnvHintLocal": "Detected local startup. Recommended: {{host}}",
"ollamaEnvHintDocker": "Detected containerized startup. Recommended: {{host}}",
"ollamaEnvHintRemote": "Detected remote startup. Usually use an address reachable by the backend. Recommended default: {{host}}",
"lmstudioEndpointHint": "LM Studio endpoint, e.g. http://localhost:1234/v1",
"apiEndpointHint": "API endpoint, e.g. https://api.example.com",
"pleaseEnterBaseURL": "Please enter the API base URL",
Expand Down
14 changes: 13 additions & 1 deletion console/src/locales/ja.json
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,7 @@
"title": "MCPクライアント",
"description": "エージェントの機能を拡張するためのMCP(Model Context Protocol)クライアントを管理します。",
"create": "クライアントを作成",
"refreshStatus": "状態を更新",
"formatSupport": "サポートされているフォーマット",
"emptyState": "MCPクライアントがまだ設定されていません",
"loadError": "MCPクライアントの読み込みに失敗しました",
Expand All @@ -109,6 +110,12 @@
"updateError": "MCPクライアントの更新に失敗しました",
"enableSuccess": "MCPクライアントを有効化しました",
"disableSuccess": "MCPクライアントを無効化しました",
"runtimeActive": "接続中",
"runtimeUnavailable": "未接続",
"runtimeUnknown": "状態不明",
"runtimeChecking": "確認中",
"runtimeQueued": "待機中",
"runtimeDisabled": "無効",
"toggleError": "MCPクライアントのステータス変更に失敗しました",
"deleteConfirm": "このMCPクライアントを削除してもよろしいですか?",
"deleteSuccess": "MCPクライアントを削除しました",
Expand Down Expand Up @@ -372,7 +379,12 @@
"openAICompatibleEndpoint": "OpenAI互換エンドポイント、例: https://api.example.com(プロバイダーが必要とする場合のみ /v1 を追加)",
"azureEndpointHint": "Azure OpenAIエンドポイント、例: https://<resource>.openai.azure.com/openai/v1",
"anthropicEndpointHint": "Anthropicエンドポイント、例: https://api.anthropic.com",
"ollamaEndpointHint": "Ollamaエンドポイント、例: http://localhost:11434",
"ollamaEndpointHint": "Ollamaエンドポイント、例: http://127.0.0.1:11434",
"ollamaQuickFillLabel": "クイック入力:",
"ollamaQuickFillApplied": "エンドポイントを入力しました: {{host}}",
"ollamaEnvHintLocal": "ローカル起動環境を検出しました。推奨: {{host}}",
"ollamaEnvHintDocker": "コンテナ起動環境を検出しました。推奨: {{host}}",
"ollamaEnvHintRemote": "リモート起動環境を検出しました。通常はバックエンドから到達可能なアドレスを使用してください。既定の推奨: {{host}}",
"lmstudioEndpointHint": "LM Studioエンドポイント、例: http://localhost:1234/v1",
"apiEndpointHint": "APIエンドポイント、例: https://api.example.com",
"pleaseEnterBaseURL": "APIベースURLを入力してください",
Expand Down
14 changes: 13 additions & 1 deletion console/src/locales/ru.json
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,7 @@
"title": "MCP-клиенты",
"description": "Управляйте клиентами Model Context Protocol (MCP) для расширения возможностей агента.",
"create": "Создать клиент",
"refreshStatus": "Обновить статус",
"formatSupport": "Поддерживаемые форматы",
"emptyState": "MCP-клиенты пока не настроены",
"loadError": "Не удалось загрузить MCP-клиентов",
Expand All @@ -114,6 +115,12 @@
"updateError": "Не удалось обновить MCP-клиент",
"enableSuccess": "MCP-клиент успешно включён",
"disableSuccess": "MCP-клиент успешно отключён",
"runtimeActive": "Подключён",
"runtimeUnavailable": "Недоступен",
"runtimeUnknown": "Статус неизвестен",
"runtimeChecking": "Проверяется",
"runtimeQueued": "В очереди",
"runtimeDisabled": "Отключён",
"toggleError": "Не удалось переключить статус MCP-клиента",
"deleteConfirm": "Вы уверены, что хотите удалить этот MCP-клиент?",
"deleteSuccess": "MCP-клиент успешно удалён",
Expand Down Expand Up @@ -377,7 +384,12 @@
"openAICompatibleEndpoint": "OpenAI-совместимый endpoint, например https://api.example.com (добавляйте /v1 только если ваш провайдер этого требует)",
"azureEndpointHint": "Azure OpenAI endpoint, например https://<resource>.openai.azure.com/openai/v1",
"anthropicEndpointHint": "Anthropic endpoint, например https://api.anthropic.com",
"ollamaEndpointHint": "Ollama endpoint, например http://localhost:11434",
"ollamaEndpointHint": "Ollama endpoint, например http://127.0.0.1:11434",
"ollamaQuickFillLabel": "Быстрая вставка:",
"ollamaQuickFillApplied": "Endpoint заполнен: {{host}}",
"ollamaEnvHintLocal": "Обнаружен локальный запуск. Рекомендуется: {{host}}",
"ollamaEnvHintDocker": "Обнаружен запуск в контейнере. Рекомендуется: {{host}}",
"ollamaEnvHintRemote": "Обнаружен удаленный запуск. Обычно используйте адрес, доступный для backend. Рекомендуемый вариант по умолчанию: {{host}}",
"lmstudioEndpointHint": "LM Studio endpoint, например http://localhost:1234/v1",
"apiEndpointHint": "API endpoint, например https://api.example.com",
"pleaseEnterBaseURL": "Пожалуйста, введите базовый URL API",
Expand Down
14 changes: 13 additions & 1 deletion console/src/locales/zh.json
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,7 @@
"title": "MCP 客户端",
"description": "管理模型上下文协议(MCP)客户端以扩展智能体能力。",
"create": "创建客户端",
"refreshStatus": "更新状态",
"formatSupport": "支持的格式",
"emptyState": "暂无配置的 MCP 客户端",
"loadError": "加载 MCP 客户端失败",
Expand All @@ -115,6 +116,12 @@
"updateError": "MCP 客户端更新失败",
"enableSuccess": "MCP 客户端启用成功",
"disableSuccess": "MCP 客户端禁用成功",
"runtimeActive": "可连通",
"runtimeUnavailable": "未连接",
"runtimeUnknown": "状态未知",
"runtimeChecking": "检测中",
"runtimeQueued": "等待检测",
"runtimeDisabled": "未启用",
"toggleError": "切换 MCP 客户端状态失败",
"deleteConfirm": "确定要删除此 MCP 客户端吗?",
"deleteSuccess": "MCP 客户端删除成功",
Expand Down Expand Up @@ -378,7 +385,12 @@
"openAICompatibleEndpoint": "OpenAI 兼容端点,例如 https://api.example.com(仅在你的服务要求时再追加 /v1)",
"azureEndpointHint": "Azure OpenAI 端点,例如 https://<resource>.openai.azure.com/openai/v1",
"anthropicEndpointHint": "Anthropic 端点,例如 https://api.anthropic.com",
"ollamaEndpointHint": "Ollama 端点,例如 http://localhost:11434",
"ollamaEndpointHint": "Ollama 端点,例如 http://127.0.0.1:11434",
"ollamaQuickFillLabel": "一键填入:",
"ollamaQuickFillApplied": "已填入地址:{{host}}",
"ollamaEnvHintLocal": "已检测到本地启动环境,推荐:{{host}}",
"ollamaEnvHintDocker": "已检测到容器化启动环境,推荐:{{host}}",
"ollamaEnvHintRemote": "已检测到远程启动环境,请优先使用后端可达地址。默认推荐:{{host}}",
"lmstudioEndpointHint": "LM Studio 端点,例如 http://localhost:1234/v1",
"apiEndpointHint": "API 端点,例如 https://api.example.com",
"pleaseEnterBaseURL": "请输入 API 基础 URL",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,10 @@ import {
Select,
} from "@agentscope-ai/design";
import { ApiOutlined, DownOutlined, RightOutlined } from "@ant-design/icons";
import type { ProviderConfigRequest } from "../../../../../api/types";
import type {
ActiveModelsInfo,
ProviderConfigRequest,
} from "../../../../../api/types";
import api from "../../../../../api";
import { useTranslation } from "react-i18next";
import styles from "../../index.module.less";
Expand All @@ -29,7 +32,7 @@ interface JsonCodeEditorProps {
function highlightJson(text: string): ReactNode[] {
const tokens: ReactNode[] = [];
const pattern =
/("(?:\\.|[^"\\])*")(\s*:)?|\btrue\b|\bfalse\b|\bnull\b|-?\d+(?:\.\d+)?(?:[eE][+-]?\d+)?|[{}\[\],:]/g;
/("(?:\\.|[^"\\])*")(\s*:)?|\btrue\b|\bfalse\b|\bnull\b|-?\d+(?:\.\d+)?(?:[eE][+-]?\d+)?|[[\]{}:,]/g;

let lastIndex = 0;
let match: RegExpExecArray | null;
Expand Down Expand Up @@ -253,7 +256,7 @@ interface ProviderConfigModalProps {
chat_model: string;
generate_kwargs: Record<string, unknown>;
};
activeModels: any;
activeModels: ActiveModelsInfo;
open: boolean;
onClose: () => void;
onSaved: () => void;
Expand All @@ -271,10 +274,58 @@ export function ProviderConfigModal({
const [testing, setTesting] = useState(false);
const [formDirty, setFormDirty] = useState(false);
const [advancedOpen, setAdvancedOpen] = useState(false);
const [baseUrlEditedByUser, setBaseUrlEditedByUser] = useState(false);
const [form] = Form.useForm<ProviderConfigFormValues>();
const selectedChatModel = Form.useWatch("chat_model", form);
const currentBaseUrl = Form.useWatch("base_url", form);
const canEditBaseUrl = !provider.freeze_url;

const ollamaHostOptions = useMemo(
() => [
{
label: "127.0.0.1",
url: "http://127.0.0.1:11434",
},
{
label: "localhost",
url: "http://localhost:11434",
},
{
label: "host.docker.internal",
url: "http://host.docker.internal:11434",
},
],
[],
);

const ollamaRecommendation = useMemo(() => {
const defaultOption = {
hintKey: "models.ollamaEnvHintLocal",
recommendedUrl: "http://127.0.0.1:11434",
};

if (typeof window === "undefined") {
return defaultOption;
}

const hostname = window.location.hostname.toLowerCase();
if (hostname === "host.docker.internal" || hostname.includes("docker")) {
return {
hintKey: "models.ollamaEnvHintDocker",
recommendedUrl: "http://host.docker.internal:11434",
};
}

if (hostname === "localhost" || hostname === "127.0.0.1") {
return defaultOption;
}

return {
hintKey: "models.ollamaEnvHintRemote",
recommendedUrl: "http://127.0.0.1:11434",
};
}, []);
Comment on lines +283 to +327
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

To improve maintainability and reduce magic strings, you could extract the hardcoded Ollama URLs into constants. This would make them easier to update in the future and prevent potential inconsistencies between ollamaHostOptions and ollamaRecommendation.


const parseGenerateConfig = (value?: string) => {
const trimmed = value?.trim();
if (!trimmed) {
Expand Down Expand Up @@ -312,6 +363,19 @@ export function ProviderConfigModal({
return t("models.enterApiKeyOptional");
}, [provider.api_key, provider.api_key_prefix, t]);

const shouldHighlightOllamaRecommendation = useMemo(() => {
if (provider.id !== "ollama") {
return false;
}
if (baseUrlEditedByUser) {
return false;
}
if (provider.base_url?.trim()) {
return false;
}
return !(currentBaseUrl || "").trim();
}, [provider.id, provider.base_url, currentBaseUrl, baseUrlEditedByUser]);

const baseUrlExtra = useMemo(() => {
if (!canEditBaseUrl) {
return undefined;
Expand All @@ -326,7 +390,42 @@ export function ProviderConfigModal({
return t("models.openAIEndpoint");
}
if (provider.id === "ollama") {
return t("models.ollamaEndpointHint");
return (
<div className={styles.endpointHintBlock}>
<div>{t("models.ollamaEndpointHint")}</div>
<div className={styles.endpointQuickFillRow}>
<span className={styles.endpointQuickFillLabel}>
{t("models.ollamaQuickFillLabel")}
</span>
{ollamaHostOptions.map((option) => (
<Button
key={option.url}
size="small"
type={
shouldHighlightOllamaRecommendation &&
option.url === ollamaRecommendation.recommendedUrl
? "primary"
: "default"
}
onClick={() => {
form.setFieldsValue({ base_url: option.url });
setFormDirty(true);
message.success(
t("models.ollamaQuickFillApplied", { host: option.url }),
);
}}
>
{option.label}
</Button>
))}
</div>
<div className={styles.endpointRecommendationText}>
{t(ollamaRecommendation.hintKey, {
host: ollamaRecommendation.recommendedUrl,
})}
</div>
</div>
);
}
if (provider.id === "lmstudio") {
return t("models.lmstudioEndpointHint");
Expand All @@ -337,7 +436,17 @@ export function ProviderConfigModal({
: t("models.openAICompatibleEndpoint");
}
return t("models.apiEndpointHint");
}, [canEditBaseUrl, provider.id, provider.is_custom, effectiveChatModel, t]);
}, [
canEditBaseUrl,
provider.id,
provider.is_custom,
effectiveChatModel,
t,
ollamaHostOptions,
ollamaRecommendation,
shouldHighlightOllamaRecommendation,
form,
]);

const baseUrlPlaceholder = useMemo(() => {
if (!canEditBaseUrl) {
Expand All @@ -353,7 +462,7 @@ export function ProviderConfigModal({
return "https://api.openai.com/v1";
}
if (provider.id === "ollama") {
return "http://localhost:11434";
return "http://127.0.0.1:11434";
}
if (provider.id === "lmstudio") {
return "http://localhost:1234/v1";
Expand All @@ -379,6 +488,7 @@ export function ProviderConfigModal({
});
setAdvancedOpen(false);
setFormDirty(false);
setBaseUrlEditedByUser(false);
}
}, [provider, form, open]);

Expand Down Expand Up @@ -614,7 +724,11 @@ export function ProviderConfigModal({
}
extra={baseUrlExtra}
>
<Input placeholder={baseUrlPlaceholder} disabled={!canEditBaseUrl} />
<Input
placeholder={baseUrlPlaceholder}
disabled={!canEditBaseUrl}
onChange={() => setBaseUrlEditedByUser(true)}
/>
</Form.Item>

{/* API Key */}
Expand Down
23 changes: 23 additions & 0 deletions console/src/pages/Settings/Models/index.module.less
Original file line number Diff line number Diff line change
Expand Up @@ -236,6 +236,29 @@
gap: 8px;
}

.endpointHintBlock {
display: flex;
flex-direction: column;
gap: 8px;
}

.endpointQuickFillRow {
display: flex;
align-items: center;
gap: 8px;
flex-wrap: wrap;
}

.endpointQuickFillLabel {
color: #666;
font-size: 12px;
}

.endpointRecommendationText {
color: #8c8c8c;
font-size: 12px;
}

.advancedConfigSection {
margin-top: 4px;
border-top: 1px solid #f0f0f0;
Expand Down