Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 28 additions & 0 deletions nemoclaw-blueprint/policies/presets/local-inference.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
# SPDX-FileCopyrightText: Copyright (c) 2026 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
# SPDX-License-Identifier: Apache-2.0

preset:
name: local-inference
description: "Local inference access (Ollama, vLLM) via host gateway"

network_policies:
local_inference:
name: local_inference
endpoints:
- host: host.openshell.internal
port: 11434
protocol: rest
enforcement: enforce
rules:
- allow: { method: GET, path: "/**" }
- allow: { method: POST, path: "/**" }
- host: host.openshell.internal
port: 8000
protocol: rest
enforcement: enforce
rules:
- allow: { method: GET, path: "/**" }
- allow: { method: POST, path: "/**" }
binaries:
- { path: /usr/local/bin/openclaw }
- { path: /usr/local/bin/claude }
19 changes: 18 additions & 1 deletion src/lib/onboard.ts
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,11 @@ const {
getProviderSelectionConfig,
parseGatewayInference,
} = require("./inference-config");

// Providers that run on the host and need the local-inference policy preset.
// Shared constant so getSuggestedPolicyPresets() and setupPoliciesWithSelection()
// stay in sync.
const LOCAL_INFERENCE_PROVIDERS = ["ollama-local", "vllm-local"];
const { inferContainerRuntime, isWsl, shouldPatchCoredns } = require("./platform");
const { resolveOpenshell } = require("./resolve-openshell");
const {
Expand Down Expand Up @@ -4502,8 +4507,13 @@ async function setupMessagingChannels() {
return selected;
}

function getSuggestedPolicyPresets({ enabledChannels = null, webSearchConfig = null } = {}) {
function getSuggestedPolicyPresets({ enabledChannels = null, webSearchConfig = null, provider = null } = {}) {
const suggestions = ["pypi", "npm"];

// Auto-suggest local-inference preset when a local provider is selected
if (provider && LOCAL_INFERENCE_PROVIDERS.includes(provider)) {
suggestions.push("local-inference");
}
const usesExplicitMessagingSelection = Array.isArray(enabledChannels);

const maybeSuggestMessagingPreset = (channel, envKey) => {
Expand Down Expand Up @@ -5118,6 +5128,7 @@ async function setupPoliciesWithSelection(sandboxName, options = {}) {
const onSelection = typeof options.onSelection === "function" ? options.onSelection : null;
const webSearchConfig = options.webSearchConfig || null;
const enabledChannels = Array.isArray(options.enabledChannels) ? options.enabledChannels : null;
const provider = options.provider || null;

step(8, 8, "Policy presets");

Expand Down Expand Up @@ -5147,6 +5158,10 @@ async function setupPoliciesWithSelection(sandboxName, options = {}) {
const suggestions = tiers.resolveTierPresets(tierName).map((p) => p.name);
// Allow credential-based overrides on top of the tier (additive only).
if (webSearchConfig && !suggestions.includes("brave")) suggestions.push("brave");
// Auto-suggest local-inference preset when a local provider is selected
if (provider && LOCAL_INFERENCE_PROVIDERS.includes(provider) && !suggestions.includes("local-inference")) {
suggestions.push("local-inference");
}

if (isNonInteractive()) {
const policyMode = (process.env.NEMOCLAW_POLICY_MODE || "suggested").trim().toLowerCase();
Expand Down Expand Up @@ -5966,6 +5981,7 @@ async function onboard(opts = {}) {
: null,
enabledChannels: selectedMessagingChannels,
webSearchConfig,
provider,
onSelection: (policyPresets) => {
onboardSession.updateSession((current) => {
current.policyPresets = policyPresets;
Expand Down Expand Up @@ -6049,6 +6065,7 @@ module.exports = {
isOpenclawReady,
arePolicyPresetsApplied,
getSuggestedPolicyPresets,
LOCAL_INFERENCE_PROVIDERS,
presetsCheckboxSelector,
selectPolicyTier,
selectTierPresetsAndAccess,
Expand Down
19 changes: 19 additions & 0 deletions test/onboard.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,25 @@ describe("onboard helpers", () => {
}
});

it("suggests local-inference preset when provider is ollama-local", () => {
const presets = getSuggestedPolicyPresets({ provider: "ollama-local" });
expect(presets).toContain("local-inference");
expect(presets).toContain("pypi");
expect(presets).toContain("npm");
});

it("suggests local-inference preset when provider is vllm-local", () => {
const presets = getSuggestedPolicyPresets({ provider: "vllm-local" });
expect(presets).toContain("local-inference");
});

it("does not suggest local-inference for cloud providers", () => {
expect(getSuggestedPolicyPresets({ provider: "nvidia-prod" })).not.toContain("local-inference");
expect(getSuggestedPolicyPresets({ provider: "openai-api" })).not.toContain("local-inference");
expect(getSuggestedPolicyPresets({ provider: null })).not.toContain("local-inference");
expect(getSuggestedPolicyPresets({})).not.toContain("local-inference");
});

it("patches the staged Dockerfile with the selected model and chat UI URL", () => {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "nemoclaw-onboard-dockerfile-"));
const dockerfilePath = path.join(tmpDir, "Dockerfile");
Expand Down
20 changes: 18 additions & 2 deletions test/policies.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -96,9 +96,9 @@ selectFromList(items, options)

describe("policies", () => {
describe("listPresets", () => {
it("returns all 11 presets", () => {
it("returns all 12 presets", () => {
const presets = policies.listPresets();
expect(presets.length).toBe(11);
expect(presets.length).toBe(12);
});

it("each preset has name and description", () => {
Expand All @@ -120,6 +120,7 @@ describe("policies", () => {
"github",
"huggingface",
"jira",
"local-inference",
"npm",
"outlook",
"pypi",
Expand Down Expand Up @@ -153,6 +154,21 @@ describe("policies", () => {
expect(content).toContain("/usr/bin/node");
}
});

it("local-inference preset targets host.openshell.internal on Ollama and vLLM ports", () => {
const content = policies.loadPreset("local-inference");
expect(content).toContain("host.openshell.internal");
expect(content).toContain("port: 11434");
expect(content).toContain("port: 8000");
});

it("local-inference preset restricts binaries to openclaw and claude", () => {
const content = policies.loadPreset("local-inference");
expect(content).toContain("/usr/local/bin/openclaw");
expect(content).toContain("/usr/local/bin/claude");
// Should NOT include node — only agent binaries need inference access
expect(content).not.toContain("/usr/local/bin/node");
});
});

describe("getPresetEndpoints", () => {
Expand Down
Loading