Skip to content

Commit 3e13a9c

Browse files
committed
fix: tighten onboarding credential validation and sandbox envs
1 parent bfe1f2c commit 3e13a9c

File tree

5 files changed

+108
-110
lines changed

5 files changed

+108
-110
lines changed

bin/lib/onboard.js

Lines changed: 26 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -306,6 +306,25 @@ function getCurlTimingArgs() {
306306
return ["--connect-timeout 5", "--max-time 20"];
307307
}
308308

309+
function getSandboxCreateSanitizedEnv(baseEnv = process.env) {
310+
const env = { ...baseEnv };
311+
const strippedNames = new Set([
312+
...Object.values(REMOTE_PROVIDER_CONFIG).map((config) => config.credentialEnv).filter(Boolean),
313+
"DISCORD_BOT_TOKEN",
314+
"SLACK_BOT_TOKEN",
315+
"TELEGRAM_BOT_TOKEN",
316+
]);
317+
for (const name of strippedNames) {
318+
delete env[name];
319+
}
320+
return env;
321+
}
322+
323+
function resolveCredentialValue(credentialEnv) {
324+
if (!credentialEnv) return "";
325+
return process.env[credentialEnv] || getCredential(credentialEnv) || "";
326+
}
327+
309328
function buildProviderArgs(action, name, type, credentialEnv, baseUrl) {
310329
const args =
311330
action === "create"
@@ -619,7 +638,7 @@ async function validateOpenAiLikeSelection(
619638
credentialEnv = null,
620639
retryMessage = "Please choose a provider/model again."
621640
) {
622-
const apiKey = credentialEnv ? getCredential(credentialEnv) : "";
641+
const apiKey = resolveCredentialValue(credentialEnv);
623642
const probe = probeOpenAiLikeEndpoint(endpointUrl, model, apiKey);
624643
if (!probe.ok) {
625644
console.error(` ${label} endpoint validation failed.`);
@@ -635,31 +654,14 @@ async function validateOpenAiLikeSelection(
635654
return probe.api;
636655
}
637656

638-
async function validateAnthropicSelection(label, endpointUrl, model, credentialEnv) {
639-
const apiKey = getCredential(credentialEnv);
640-
const probe = probeAnthropicEndpoint(endpointUrl, model, apiKey);
641-
if (!probe.ok) {
642-
console.error(` ${label} endpoint validation failed.`);
643-
console.error(` ${probe.message}`);
644-
if (isNonInteractive()) {
645-
process.exit(1);
646-
}
647-
console.log(" Please choose a provider/model again.");
648-
console.log("");
649-
return null;
650-
}
651-
console.log(` ${probe.label} available — OpenClaw will use ${probe.api}.`);
652-
return probe.api;
653-
}
654-
655-
async function validateAnthropicSelectionWithRetryMessage(
657+
async function validateAnthropicSelection(
656658
label,
657659
endpointUrl,
658660
model,
659661
credentialEnv,
660662
retryMessage = "Please choose a provider/model again."
661663
) {
662-
const apiKey = getCredential(credentialEnv);
664+
const apiKey = resolveCredentialValue(credentialEnv);
663665
const probe = probeAnthropicEndpoint(endpointUrl, model, apiKey);
664666
if (!probe.ok) {
665667
console.error(` ${label} endpoint validation failed.`);
@@ -676,7 +678,7 @@ async function validateAnthropicSelectionWithRetryMessage(
676678
}
677679

678680
async function validateCustomOpenAiLikeSelection(label, endpointUrl, model, credentialEnv) {
679-
const apiKey = getCredential(credentialEnv);
681+
const apiKey = resolveCredentialValue(credentialEnv);
680682
const probe = probeOpenAiLikeEndpoint(endpointUrl, model, apiKey);
681683
if (probe.ok) {
682684
console.log(` ${probe.label} available — OpenClaw will use ${probe.api}.`);
@@ -698,7 +700,7 @@ async function validateCustomOpenAiLikeSelection(label, endpointUrl, model, cred
698700
}
699701

700702
async function validateCustomAnthropicSelection(label, endpointUrl, model, credentialEnv) {
701-
const apiKey = getCredential(credentialEnv);
703+
const apiKey = resolveCredentialValue(credentialEnv);
702704
const probe = probeAnthropicEndpoint(endpointUrl, model, apiKey);
703705
if (probe.ok) {
704706
console.log(` ${probe.label} available — OpenClaw will use ${probe.api}.`);
@@ -1387,18 +1389,7 @@ async function createSandbox(gpu, model, provider, preferredInferenceApi = null)
13871389
const chatUiUrl = process.env.CHAT_UI_URL || "http://127.0.0.1:18789";
13881390
patchStagedDockerfile(stagedDockerfile, model, chatUiUrl, String(Date.now()), provider, preferredInferenceApi);
13891391
const envArgs = [formatEnvAssignment("CHAT_UI_URL", chatUiUrl)];
1390-
const sandboxEnv = { ...process.env };
1391-
if (process.env.NVIDIA_API_KEY) {
1392-
sandboxEnv.NVIDIA_API_KEY = process.env.NVIDIA_API_KEY;
1393-
}
1394-
const discordToken = getCredential("DISCORD_BOT_TOKEN") || process.env.DISCORD_BOT_TOKEN;
1395-
if (discordToken) {
1396-
sandboxEnv.DISCORD_BOT_TOKEN = discordToken;
1397-
}
1398-
const slackToken = getCredential("SLACK_BOT_TOKEN") || process.env.SLACK_BOT_TOKEN;
1399-
if (slackToken) {
1400-
sandboxEnv.SLACK_BOT_TOKEN = slackToken;
1401-
}
1392+
const sandboxEnv = getSandboxCreateSanitizedEnv();
14021393

14031394
// Run without piping through awk — the pipe masked non-zero exit codes
14041395
// from openshell because bash returns the status of the last pipeline
@@ -1660,7 +1651,7 @@ async function setupNim(gpu) {
16601651
} else {
16611652
const retryMessage = "Please choose a provider/model again.";
16621653
if (selected.key === "anthropic") {
1663-
preferredInferenceApi = await validateAnthropicSelectionWithRetryMessage(
1654+
preferredInferenceApi = await validateAnthropicSelection(
16641655
remoteConfig.label,
16651656
endpointUrl || ANTHROPIC_ENDPOINT_URL,
16661657
model,

docs/inference/switch-inference-providers.md

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -87,11 +87,14 @@ $ nemoclaw <name> status --json
8787

8888
The output includes the active provider, model, and endpoint.
8989

90-
## Notes
90+
:::{note}
9191

9292
- The host keeps provider credentials.
9393
- The sandbox continues to use `inference.local`.
94-
- Runtime switching changes the OpenShell route. It does not rewrite your stored credentials.
94+
- Runtime switching changes the OpenShell route.
95+
- It does not rewrite your stored credentials.
96+
97+
:::
9598

9699
## Related Topics
97100

docs/reference/inference-profiles.md

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,8 @@ status: published
2121
# Inference Profiles
2222

2323
NemoClaw configures inference through the OpenShell gateway.
24-
The agent inside the sandbox talks to `inference.local`, and OpenShell routes that traffic to the provider you selected during onboarding.
24+
The agent inside the sandbox talks to `inference.local`.
25+
OpenShell routes that traffic to the provider you selected during onboarding.
2526

2627
## Routed Provider Model
2728

test/credential-exposure.test.js

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -74,11 +74,11 @@ describe("credential exposure in process arguments", () => {
7474
it("onboard.js does not embed sandbox secrets in the sandbox create command line", () => {
7575
const src = fs.readFileSync(ONBOARD_JS, "utf-8");
7676

77-
expect(src).toMatch(/const sandboxEnv = \{ \.\.\.process\.env \};/);
77+
expect(src).toMatch(/function getSandboxCreateSanitizedEnv\(baseEnv = process\.env\)/);
78+
expect(src).toMatch(/const sandboxEnv = getSandboxCreateSanitizedEnv\(\);/);
7879
expect(src).toMatch(/streamSandboxCreate\(createCommand, sandboxEnv\)/);
79-
expect(src).not.toMatch(/envArgs\.push\(formatEnvAssignment\("NVIDIA_API_KEY"/);
80-
expect(src).not.toMatch(/envArgs\.push\(formatEnvAssignment\("DISCORD_BOT_TOKEN"/);
81-
expect(src).not.toMatch(/envArgs\.push\(formatEnvAssignment\("SLACK_BOT_TOKEN"/);
80+
expect(src).toMatch(/delete env\[name\];/);
81+
expect(src).toMatch(/"TELEGRAM_BOT_TOKEN"/);
8282
});
8383

8484
it("onboard.js curl probes use explicit timeouts", () => {

0 commit comments

Comments
 (0)