Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 12 additions & 4 deletions bin/nemoclaw.js
Original file line number Diff line number Diff line change
Expand Up @@ -519,12 +519,15 @@ function showStatus() {
// Show sandbox registry
const { sandboxes, defaultSandbox } = registry.listSandboxes();
if (sandboxes.length > 0) {
const live = parseGatewayInference(
captureOpenshell(["inference", "get"], { ignoreError: true }).output
);
console.log("");
console.log(" Sandboxes:");
for (const sb of sandboxes) {
const def = sb.name === defaultSandbox ? " *" : "";
const model = sb.model ? ` (${sb.model})` : "";
console.log(` ${sb.name}${def}${model}`);
const model = (live && live.model) || sb.model;
console.log(` ${sb.name}${def}${model ? ` (${model})` : ""}`);
}
console.log("");
}
Expand All @@ -542,12 +545,17 @@ function listSandboxes() {
return;
}

// Query live gateway inference once; prefer it over stale registry values.
const live = parseGatewayInference(
captureOpenshell(["inference", "get"], { ignoreError: true }).output
);

console.log("");
console.log(" Sandboxes:");
for (const sb of sandboxes) {
const def = sb.name === defaultSandbox ? " *" : "";
const model = sb.model || "unknown";
const provider = sb.provider || "unknown";
const model = (live && live.model) || sb.model || "unknown";
const provider = (live && live.provider) || sb.provider || "unknown";
const gpu = sb.gpuEnabled ? "GPU" : "CPU";
const presets = sb.policies && sb.policies.length > 0 ? sb.policies.join(", ") : "none";
console.log(` ${sb.name}${def}`);
Expand Down
98 changes: 98 additions & 0 deletions test/cli.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -746,3 +746,101 @@ describe("CLI dispatch", () => {
expect(statusResult.out.includes("Start the gateway again")).toBeTruthy();
}, 25000);
});

describe("list shows live gateway inference", () => {
it("prefers live inference model/provider over stale registry values", () => {
const home = fs.mkdtempSync(path.join(os.tmpdir(), "nemoclaw-cli-list-live-"));
const localBin = path.join(home, "bin");
const registryDir = path.join(home, ".nemoclaw");
fs.mkdirSync(localBin, { recursive: true });
fs.mkdirSync(registryDir, { recursive: true });
// Registry has no model/provider (mimics post-onboard before inference setup)
fs.writeFileSync(
path.join(registryDir, "sandboxes.json"),
JSON.stringify({
sandboxes: {
test: {
name: "test",
model: null,
provider: null,
gpuEnabled: true,
policies: ["pypi", "npm"],
},
},
defaultSandbox: "test",
}),
{ mode: 0o600 }
);
// Stub openshell: inference get returns live provider/model
fs.writeFileSync(
path.join(localBin, "openshell"),
[
"#!/usr/bin/env bash",
"if [ \"$1\" = \"inference\" ] && [ \"$2\" = \"get\" ]; then",
" echo 'Gateway inference:'",
" echo ' Provider: nvidia-prod'",
" echo ' Model: nvidia/nemotron-3-super-120b-a12b'",
" echo ' Version: 1'",
" exit 0",
"fi",
"exit 0",
].join("\n"),
{ mode: 0o755 }
);

const r = runWithEnv("list", {
HOME: home,
PATH: `${localBin}:${process.env.PATH || ""}`,
});

expect(r.code).toBe(0);
expect(r.out).toContain("nvidia/nemotron-3-super-120b-a12b");
expect(r.out).toContain("nvidia-prod");
expect(r.out).not.toContain("unknown");
});

it("falls back to registry values when openshell inference get fails", () => {
const home = fs.mkdtempSync(path.join(os.tmpdir(), "nemoclaw-cli-list-fallback-"));
const localBin = path.join(home, "bin");
const registryDir = path.join(home, ".nemoclaw");
fs.mkdirSync(localBin, { recursive: true });
fs.mkdirSync(registryDir, { recursive: true });
fs.writeFileSync(
path.join(registryDir, "sandboxes.json"),
JSON.stringify({
sandboxes: {
test: {
name: "test",
model: "llama3.2:1b",
provider: "ollama-local",
gpuEnabled: false,
policies: [],
},
},
defaultSandbox: "test",
}),
{ mode: 0o600 }
);
// Stub openshell: inference get fails
fs.writeFileSync(
path.join(localBin, "openshell"),
[
"#!/usr/bin/env bash",
"if [ \"$1\" = \"inference\" ] && [ \"$2\" = \"get\" ]; then",
" exit 1",
"fi",
"exit 0",
].join("\n"),
{ mode: 0o755 }
);

const r = runWithEnv("list", {
HOME: home,
PATH: `${localBin}:${process.env.PATH || ""}`,
});

expect(r.code).toBe(0);
expect(r.out).toContain("llama3.2:1b");
expect(r.out).toContain("ollama-local");
});
});
Loading