Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
48 changes: 46 additions & 2 deletions install/hiclaw-install.ps1
Original file line number Diff line number Diff line change
Expand Up @@ -261,7 +261,8 @@ $script:Messages = @{
"llm.providers_title" = @{ zh = "可用 LLM 提供商:"; en = "Available LLM Providers:" }
"llm.provider.alibaba" = @{ zh = " 1) 阿里云百炼 - 推荐中国用户使用"; en = " 1) Alibaba Cloud Bailian - Recommended for Chinese users" }
"llm.provider.openai_compat" = @{ zh = " 2) OpenAI 兼容 API - 自定义 Base URL(OpenAI、DeepSeek 等)"; en = " 2) OpenAI-compatible API - Custom Base URL (OpenAI, DeepSeek, etc.)" }
"llm.provider.select" = @{ zh = "选择提供商 [1/2]"; en = "Select provider [1/2]" }
"llm.provider.minimax" = @{ zh = " 3) MiniMax - MiniMax M2.5 系列模型"; en = " 3) MiniMax - MiniMax M2.5 series models" }
"llm.provider.select" = @{ zh = "选择提供商 [1/2/3]"; en = "Select provider [1/2/3]" }
"llm.alibaba.models_title" = @{ zh = "选择百炼模型系列:"; en = "Select Bailian model series:" }
"llm.alibaba.model.codingplan" = @{ zh = " 1) CodingPlan - 专为编程任务优化(推荐)"; en = " 1) CodingPlan - Optimized for coding tasks (recommended)" }
"llm.alibaba.model.qwen" = @{ zh = " 2) 百炼通用接口"; en = " 2) qwen general - General purpose LLM" }
Expand All @@ -272,10 +273,17 @@ $script:Messages = @{
"llm.codingplan.model.kimi" = @{ zh = " 3) kimi-k2.5 - Moonshot Kimi K2.5"; en = " 3) kimi-k2.5 - Moonshot Kimi K2.5" }
"llm.codingplan.model.minimax" = @{ zh = " 4) MiniMax-M2.5 - MiniMax M2.5"; en = " 4) MiniMax-M2.5 - MiniMax M2.5" }
"llm.codingplan.model.select" = @{ zh = "选择模型 [1/2/3/4]"; en = "Select model [1/2/3/4]" }
"llm.provider.selected_minimax" = @{ zh = " 提供商: MiniMax"; en = " Provider: MiniMax" }
"llm.minimax.models_title" = @{ zh = "选择 MiniMax 默认模型:"; en = "Select MiniMax default model:" }
"llm.minimax.model.m25" = @{ zh = " 1) MiniMax-M2.5 - Peak Performance(推荐)"; en = " 1) MiniMax-M2.5 - Peak Performance (recommended)" }
"llm.minimax.model.m25hs" = @{ zh = " 2) MiniMax-M2.5-highspeed - Same performance, faster"; en = " 2) MiniMax-M2.5-highspeed - Same performance, faster" }
"llm.minimax.model.select" = @{ zh = "选择模型 [1/2]"; en = "Select model [1/2]" }
"llm.minimax.apikey_hint" = @{ zh = " 💡 获取 MiniMax API Key:"; en = " 💡 Get your MiniMax API Key from:" }
"llm.minimax.apikey_url" = @{ zh = " https://platform.minimax.io"; en = " https://platform.minimax.io" }
"llm.provider.selected_codingplan" = @{ zh = " 提供商: 阿里云百炼 CodingPlan"; en = " Provider: Alibaba Cloud Bailian CodingPlan" }
"llm.provider.selected_qwen" = @{ zh = " 提供商: 阿里云百炼"; en = " Provider: Alibaba Cloud Bailian" }
"llm.provider.selected_openai" = @{ zh = " 提供商: {0}(OpenAI 兼容)"; en = " Provider: {0} (OpenAI-compatible)" }
"llm.provider.invalid" = @{ zh = "无效选择: {0}(请输入 1 或 2)"; en = "Invalid choice: {0} (please enter 1 or 2)" }
"llm.provider.invalid" = @{ zh = "无效选择: {0}(请输入 1、23)"; en = "Invalid choice: {0} (please enter 1, 2, or 3)" }
"llm.qwen.model_prompt" = @{ zh = "默认模型 ID [qwen3.5-plus]"; en = "Default Model ID [qwen3.5-plus]" }
"llm.openai.base_url_prompt" = @{ zh = "Base URL(例如 https://api.openai.com/v1)"; en = "Base URL (e.g., https://api.openai.com/v1)" }
"llm.openai.model_prompt" = @{ zh = "默认模型 ID [gpt-5.4]"; en = "Default Model ID [gpt-5.4]" }
Expand Down Expand Up @@ -1403,6 +1411,7 @@ function Install-Manager {
Write-Host (Get-Msg "llm.providers_title")
Write-Host (Get-Msg "llm.provider.alibaba")
Write-Host (Get-Msg "llm.provider.openai_compat")
Write-Host (Get-Msg "llm.provider.minimax")
Write-Host ""

if ($script:HICLAW_QUICKSTART) {
Expand Down Expand Up @@ -1504,6 +1513,41 @@ function Install-Manager {
$config.LLM_API_KEY = Read-Prompt -VarName "HICLAW_LLM_API_KEY" -PromptText (Get-Msg "llm.apikey_prompt") -Secret
Test-LlmConnectivity -BaseUrl $config.OPENAI_BASE_URL -ApiKey $config.LLM_API_KEY -Model $config.DEFAULT_MODEL
}
"^(3|minimax)$" {
$config.LLM_PROVIDER = "minimax"
$config.OPENAI_BASE_URL = "https://api.minimax.io/v1"
Write-Log (Get-Msg "llm.provider.selected_minimax")

# Sub-menu: Select MiniMax model
Write-Host ""
Write-Host (Get-Msg "llm.minimax.models_title")
Write-Host (Get-Msg "llm.minimax.model.m25")
Write-Host (Get-Msg "llm.minimax.model.m25hs")
Write-Host ""

$minimaxModelChoice = Read-Host "$(Get-Msg 'llm.minimax.model.select') [1]"
$minimaxModelChoice = if ($minimaxModelChoice) { $minimaxModelChoice } else { "1" }

switch -Regex ($minimaxModelChoice) {
"^(1|MiniMax-M2\.5)$" {
$config.DEFAULT_MODEL = "MiniMax-M2.5"
}
"^(2|MiniMax-M2\.5-highspeed)$" {
$config.DEFAULT_MODEL = "MiniMax-M2.5-highspeed"
}
default {
$config.DEFAULT_MODEL = "MiniMax-M2.5"
}
}

Write-Log (Get-Msg "llm.model.label" -f $config.DEFAULT_MODEL)
Write-Log ""
Write-Log (Get-Msg "llm.minimax.apikey_hint")
Write-Log (Get-Msg "llm.minimax.apikey_url")
Write-Log ""
$config.LLM_API_KEY = Read-Prompt -VarName "HICLAW_LLM_API_KEY" -PromptText (Get-Msg "llm.apikey_prompt") -Secret
Test-LlmConnectivity -BaseUrl $config.OPENAI_BASE_URL -ApiKey $config.LLM_API_KEY -Model $config.DEFAULT_MODEL
}
default {
Write-Error (Get-Msg "llm.provider.invalid" -f $providerChoice)
}
Expand Down
59 changes: 55 additions & 4 deletions install/hiclaw-install.sh
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -296,8 +296,10 @@ msg() {
"llm.provider.alibaba.en") text=" 1) Alibaba Cloud Bailian - Recommended for Chinese users" ;;
"llm.provider.openai_compat.zh") text=" 2) OpenAI 兼容 API - 自定义 Base URL(OpenAI、DeepSeek 等)" ;;
"llm.provider.openai_compat.en") text=" 2) OpenAI-compatible API - Custom Base URL (OpenAI, DeepSeek, etc.)" ;;
"llm.provider.select.zh") text="选择提供商 [1/2]" ;;
"llm.provider.select.en") text="Select provider [1/2]" ;;
"llm.provider.minimax.zh") text=" 3) MiniMax - MiniMax M2.5 系列模型" ;;
"llm.provider.minimax.en") text=" 3) MiniMax - MiniMax M2.5 series models" ;;
"llm.provider.select.zh") text="选择提供商 [1/2/3]" ;;
"llm.provider.select.en") text="Select provider [1/2/3]" ;;
"llm.alibaba.models_title.zh") text="选择百炼模型系列:" ;;
"llm.alibaba.models_title.en") text="Select Bailian model series:" ;;
"llm.alibaba.model.codingplan.zh") text=" 1) CodingPlan - 专为编程任务优化(推荐)" ;;
Expand All @@ -318,14 +320,28 @@ msg() {
"llm.codingplan.model.minimax.en") text=" 4) MiniMax-M2.5 - MiniMax M2.5" ;;
"llm.codingplan.model.select.zh") text="选择模型 [1/2/3/4]" ;;
"llm.codingplan.model.select.en") text="Select model [1/2/3/4]" ;;
"llm.provider.selected_minimax.zh") text=" 提供商: MiniMax" ;;
"llm.provider.selected_minimax.en") text=" Provider: MiniMax" ;;
"llm.minimax.models_title.zh") text="选择 MiniMax 默认模型:" ;;
"llm.minimax.models_title.en") text="Select MiniMax default model:" ;;
"llm.minimax.model.m25.zh") text=" 1) MiniMax-M2.5 - Peak Performance(推荐)" ;;
"llm.minimax.model.m25.en") text=" 1) MiniMax-M2.5 - Peak Performance (recommended)" ;;
"llm.minimax.model.m25hs.zh") text=" 2) MiniMax-M2.5-highspeed - Same performance, faster" ;;
"llm.minimax.model.m25hs.en") text=" 2) MiniMax-M2.5-highspeed - Same performance, faster" ;;
"llm.minimax.model.select.zh") text="选择模型 [1/2]" ;;
"llm.minimax.model.select.en") text="Select model [1/2]" ;;
"llm.minimax.apikey_hint.zh") text=" 💡 获取 MiniMax API Key:" ;;
"llm.minimax.apikey_hint.en") text=" 💡 Get your MiniMax API Key from:" ;;
"llm.minimax.apikey_url.zh") text=" https://platform.minimax.io" ;;
"llm.minimax.apikey_url.en") text=" https://platform.minimax.io" ;;
"llm.provider.selected_codingplan.zh") text=" 提供商: 阿里云百炼 CodingPlan" ;;
"llm.provider.selected_codingplan.en") text=" Provider: Alibaba Cloud Bailian CodingPlan" ;;
"llm.provider.selected_qwen.zh") text=" 提供商: 阿里云百炼" ;;
"llm.provider.selected_qwen.en") text=" Provider: Alibaba Cloud Bailian" ;;
"llm.provider.selected_openai.zh") text=" 提供商: %s(OpenAI 兼容)" ;;
"llm.provider.selected_openai.en") text=" Provider: %s (OpenAI-compatible)" ;;
"llm.provider.invalid.zh") text="无效选择: %s(请输入 1 或 2)" ;;
"llm.provider.invalid.en") text="Invalid choice: %s (please enter 1 or 2)" ;;
"llm.provider.invalid.zh") text="无效选择: %s(请输入 1、23)" ;;
"llm.provider.invalid.en") text="Invalid choice: %s (please enter 1, 2, or 3)" ;;
"llm.qwen.model_prompt.zh") text="默认模型 ID [qwen3.5-plus]" ;;
"llm.qwen.model_prompt.en") text="Default Model ID [qwen3.5-plus]" ;;
"llm.openai.base_url_prompt.zh") text="Base URL(例如 https://api.openai.com/v1)" ;;
Expand Down Expand Up @@ -1328,6 +1344,7 @@ install_manager() {
echo "$(msg llm.providers_title)"
echo "$(msg llm.provider.alibaba)"
echo "$(msg llm.provider.openai_compat)"
echo "$(msg llm.provider.minimax)"
echo ""
if [ "${HICLAW_QUICKSTART}" = "1" ]; then
read -e -p "$(msg llm.provider.select) [1]: " PROVIDER_CHOICE
Expand Down Expand Up @@ -1430,6 +1447,40 @@ install_manager() {
prompt HICLAW_LLM_API_KEY "$(msg llm.apikey_prompt)" "" "true"
test_llm_connectivity "${HICLAW_OPENAI_BASE_URL}" "${HICLAW_LLM_API_KEY}" "${HICLAW_DEFAULT_MODEL}"
;;
3|minimax)
HICLAW_LLM_PROVIDER="minimax"
HICLAW_OPENAI_BASE_URL="https://api.minimax.io/v1"
log "$(msg llm.provider.selected_minimax)"

# Sub-menu: Select MiniMax model
echo ""
echo "$(msg llm.minimax.models_title)"
echo "$(msg llm.minimax.model.m25)"
echo "$(msg llm.minimax.model.m25hs)"
echo ""
read -e -p "$(msg llm.minimax.model.select) [1]: " MINIMAX_MODEL_CHOICE
MINIMAX_MODEL_CHOICE="${MINIMAX_MODEL_CHOICE:-1}"

case "${MINIMAX_MODEL_CHOICE}" in
1|MiniMax-M2.5)
HICLAW_DEFAULT_MODEL="MiniMax-M2.5"
;;
2|MiniMax-M2.5-highspeed)
HICLAW_DEFAULT_MODEL="MiniMax-M2.5-highspeed"
;;
*)
HICLAW_DEFAULT_MODEL="MiniMax-M2.5"
;;
esac

log "$(msg llm.model.label "${HICLAW_DEFAULT_MODEL}")"
log ""
log "$(msg llm.minimax.apikey_hint)"
log "$(msg llm.minimax.apikey_url)"
log ""
prompt HICLAW_LLM_API_KEY "$(msg llm.apikey_prompt)" "" "true"
test_llm_connectivity "${HICLAW_OPENAI_BASE_URL}" "${HICLAW_LLM_API_KEY}" "${HICLAW_DEFAULT_MODEL}"
;;
*)
error "$(msg llm.provider.invalid "${PROVIDER_CHOICE}")"
;;
Expand Down
2 changes: 1 addition & 1 deletion manager/agent/skills/model-switch/SKILL.md
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ No changes are made to `openclaw.json` in this case.
| claude-haiku-4-5 | 200,000 | 64,000 |
| qwen3.5-plus | 200,000 | 64,000 |
| deepseek-chat / deepseek-reasoner / kimi-k2.5 | 256,000 | 128,000 |
| glm-5 / MiniMax-M2.5 | 200,000 | 128,000 |
| glm-5 / MiniMax-M2.5 / MiniMax-M2.5-highspeed | 200,000 | 128,000 |
| *(other)* | 150,000 | 128,000 |

## Switching to an unknown model
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ case "${MODEL_NAME}" in
CTX=200000; MAX=64000 ;;
deepseek-chat|deepseek-reasoner|kimi-k2.5)
CTX=256000; MAX=128000 ;;
glm-5|MiniMax-M2.5)
glm-5|MiniMax-M2.5|MiniMax-M2.5-highspeed)
CTX=200000; MAX=128000 ;;
*)
CTX=150000; MAX=128000 ;;
Expand Down
2 changes: 1 addition & 1 deletion manager/agent/skills/worker-management/scripts/generate-worker-config.sh
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ case "${MODEL_NAME}" in
CTX=200000; MAX=64000 ;;
deepseek-chat|deepseek-reasoner|kimi-k2.5)
CTX=256000; MAX=128000 ;;
glm-5|MiniMax-M2.5)
glm-5|MiniMax-M2.5|MiniMax-M2.5-highspeed)
CTX=200000; MAX=128000 ;;
*)
CTX=150000; MAX=128000 ;;
Expand Down
2 changes: 1 addition & 1 deletion manager/agent/skills/worker-model-switch/SKILL.md
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ No changes are made to `openclaw.json` in this case.
| claude-haiku-4-5 | 200,000 | 64,000 |
| qwen3.5-plus | 200,000 | 64,000 |
| deepseek-chat / deepseek-reasoner / kimi-k2.5 | 256,000 | 128,000 |
| glm-5 / MiniMax-M2.5 | 200,000 | 128,000 |
| glm-5 / MiniMax-M2.5 / MiniMax-M2.5-highspeed | 200,000 | 128,000 |
| *(other)* | 150,000 | 128,000 |

## Switching to an unknown model
Expand Down
2 changes: 1 addition & 1 deletion manager/agent/skills/worker-model-switch/scripts/update-worker-model.sh
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ _resolve_model_params() {
CTX=200000; MAX=64000 ;;
deepseek-chat|deepseek-reasoner|kimi-k2.5)
CTX=256000; MAX=128000 ;;
glm-5|MiniMax-M2.5)
glm-5|MiniMax-M2.5|MiniMax-M2.5-highspeed)
CTX=200000; MAX=128000 ;;
*)
CTX=150000; MAX=128000 ;;
Expand Down
28 changes: 28 additions & 0 deletions manager/scripts/init/setup-higress.sh
Original file line number Diff line number Diff line change
Expand Up @@ -178,6 +178,34 @@ if [ -n "${HICLAW_LLM_API_KEY}" ]; then
fi
fi
;;
minimax)
MINIMAX_BASE_URL="${HICLAW_OPENAI_BASE_URL:-https://api.minimax.io/v1}"
# Parse domain, port, protocol from base URL
MM_PROTO="https"
MM_PORT="443"
MM_URL_STRIP="${MINIMAX_BASE_URL#https://}"
MM_URL_STRIP="${MM_URL_STRIP#http://}"
echo "${MINIMAX_BASE_URL}" | grep -q '^http://' && { MM_PROTO="http"; MM_PORT="80"; }
MM_DOMAIN="${MM_URL_STRIP%%/*}"
echo "${MM_DOMAIN}" | grep -q ':' && { MM_PORT="${MM_DOMAIN##*:}"; MM_DOMAIN="${MM_DOMAIN%:*}"; }

# Service source: GET → PUT if exists, POST if not
existing_svc=$(higress_get /v1/service-sources/minimax)
SVC_BODY='{"type":"dns","name":"minimax","port":'"${MM_PORT}"',"protocol":"'"${MM_PROTO}"'","proxyName":"","domain":"'"${MM_DOMAIN}"'"}'
if [ -n "${existing_svc}" ]; then
higress_api PUT /v1/service-sources/minimax "Updating MiniMax DNS service source" "${SVC_BODY}"
else
higress_api POST /v1/service-sources "Registering MiniMax DNS service source" "${SVC_BODY}"
fi

PROVIDER_BODY='{"type":"openai","name":"minimax","tokens":["'"${HICLAW_LLM_API_KEY}"'"],"version":0,"protocol":"openai/v1","tokenFailoverConfig":{"enabled":false},"rawConfigs":{"openaiCustomUrl":"'"${MINIMAX_BASE_URL}"'","openaiCustomServiceName":"minimax.dns","openaiCustomServicePort":'"${MM_PORT}"'}}'
existing_provider=$(higress_get /v1/ai/providers/minimax)
if [ -n "${existing_provider}" ]; then
higress_api PUT /v1/ai/providers/minimax "Updating LLM provider (minimax)" "${PROVIDER_BODY}"
else
higress_api POST /v1/ai/providers "Creating LLM provider (minimax)" "${PROVIDER_BODY}"
fi
;;
*)
PROVIDER_BODY='{"name":"'"${LLM_PROVIDER}"'","type":"openai","tokens":["'"${HICLAW_LLM_API_KEY}"'"],"modelMapping":{},"protocol":"openai/v1"'
[ -n "${LLM_API_URL}" ] && PROVIDER_BODY="${PROVIDER_BODY}"',"rawConfigs":{"apiUrl":"'"${LLM_API_URL}"'"}'
Expand Down
2 changes: 1 addition & 1 deletion manager/scripts/init/start-manager-agent.sh
Original file line number Diff line number Diff line change
Expand Up @@ -271,7 +271,7 @@ case "${MODEL_NAME}" in
export MODEL_CONTEXT_WINDOW=200000 MODEL_MAX_TOKENS=64000 ;;
deepseek-chat|deepseek-reasoner|kimi-k2.5)
export MODEL_CONTEXT_WINDOW=256000 MODEL_MAX_TOKENS=128000 ;;
glm-5|MiniMax-M2.5)
glm-5|MiniMax-M2.5|MiniMax-M2.5-highspeed)
export MODEL_CONTEXT_WINDOW=200000 MODEL_MAX_TOKENS=128000 ;;
*)
export MODEL_CONTEXT_WINDOW=150000 MODEL_MAX_TOKENS=128000 ;;
Expand Down
Loading