diff --git a/.gitignore b/.gitignore
index ae5f25e..05e616b 100644
--- a/.gitignore
+++ b/.gitignore
@@ -12,6 +12,9 @@ dist
dist-ssr
*.local
+# Local MCP config (copy src-tauri/mcp.example.json → src-tauri/mcp.json)
+src-tauri/mcp.json
+
# Editor directories and files
.vscode/*
!.vscode/extensions.json
diff --git a/bun.lock b/bun.lock
index 53bf064..c0f50af 100644
--- a/bun.lock
+++ b/bun.lock
@@ -5,9 +5,11 @@
"": {
"name": "pengine",
"dependencies": {
+ "@radix-ui/react-accordion": "^1.2.12",
"@radix-ui/react-menubar": "^1.1.16",
"@tailwindcss/vite": "^4.2.2",
"@tauri-apps/api": "^2",
+ "@tauri-apps/plugin-dialog": "^2",
"@tauri-apps/plugin-opener": "^2",
"qrcode.react": "^4.2.0",
"react": "^19.1.0",
@@ -172,8 +174,12 @@
"@radix-ui/primitive": ["@radix-ui/primitive@1.1.3", "", {}, "sha512-JTF99U/6XIjCBo0wqkU5sK10glYe27MRRsfwoiq5zzOEZLHU3A3KCMa5X/azekYRCJ0HlwI0crAXS/5dEHTzDg=="],
+ "@radix-ui/react-accordion": ["@radix-ui/react-accordion@1.2.12", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-collapsible": "1.1.12", "@radix-ui/react-collection": "1.1.7", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-direction": "1.1.1", "@radix-ui/react-id": "1.1.1", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-use-controllable-state": "1.2.2" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react", "@types/react-dom"] }, "sha512-T4nygeh9YE9dLRPhAHSeOZi7HBXo+0kYIPJXayZfvWOWA0+n3dESrZbjfDPUABkUNym6Hd+f2IR113To8D2GPA=="],
+
"@radix-ui/react-arrow": ["@radix-ui/react-arrow@1.1.7", "", { "dependencies": { "@radix-ui/react-primitive": "2.1.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react", "@types/react-dom"] }, "sha512-F+M1tLhO+mlQaOWspE8Wstg+z6PwxwRd8oQ8IXceWz92kfAmalTRf0EjrouQeo7QssEPfCn05B4Ihs1K9WQ/7w=="],
+ "@radix-ui/react-collapsible": ["@radix-ui/react-collapsible@1.1.12", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-id": "1.1.1", "@radix-ui/react-presence": "1.1.5", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-use-controllable-state": "1.2.2", "@radix-ui/react-use-layout-effect": "1.1.1" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react", "@types/react-dom"] }, "sha512-Uu+mSh4agx2ib1uIGPP4/CKNULyajb3p92LsVXmH2EHVMTfZWpll88XJ0j4W0z3f8NK1eYl1+Mf/szHPmcHzyA=="],
+
"@radix-ui/react-collection": ["@radix-ui/react-collection@1.1.7", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-slot": "1.2.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react", "@types/react-dom"] }, "sha512-Fh9rGN0MoI4ZFUNyfFVNU4y9LUz93u9/0K+yLgA2bwRojxM8JU1DyvvMBabnZPBgMWREAJvU2jjVzq+LrFUglw=="],
"@radix-ui/react-compose-refs": ["@radix-ui/react-compose-refs@1.1.2", "", { "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-z4eqJvfiNnFMHIIvXP3CY57y2WJs5g2v3X0zm9mEJkrkNv4rDxu+sg9Jh8EkXyeqBkB7SOcboo9dMVqhyrACIg=="],
@@ -330,6 +336,8 @@
"@tauri-apps/cli-win32-x64-msvc": ["@tauri-apps/cli-win32-x64-msvc@2.10.1", "", { "os": "win32", "cpu": "x64" }, "sha512-6Cn7YpPFwzChy0ERz6djKEmUehWrYlM+xTaNzGPgZocw3BD7OfwfWHKVWxXzdjEW2KfKkHddfdxK1XXTYqBRLg=="],
+ "@tauri-apps/plugin-dialog": ["@tauri-apps/plugin-dialog@2.7.0", "", { "dependencies": { "@tauri-apps/api": "^2.10.1" } }, "sha512-4nS/hfGMGCXiAS3LtVjH9AgsSAPJeG/7R+q8agTFqytjnMa4Zq95Bq8WzVDkckpanX+yyRHXnRtrKXkANKDHvw=="],
+
"@tauri-apps/plugin-opener": ["@tauri-apps/plugin-opener@2.5.3", "", { "dependencies": { "@tauri-apps/api": "^2.8.0" } }, "sha512-CCcUltXMOfUEArbf3db3kCE7Ggy1ExBEBl51Ko2ODJ6GDYHRp1nSNlQm5uNCFY5k7/ufaK5Ib3Du/Zir19IYQQ=="],
"@types/babel__core": ["@types/babel__core@7.20.5", "", { "dependencies": { "@babel/parser": "^7.20.7", "@babel/types": "^7.20.7", "@types/babel__generator": "*", "@types/babel__template": "*", "@types/babel__traverse": "*" } }, "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA=="],
diff --git a/doc/design/README.md b/doc/design/README.md
index f647753..8be8082 100644
--- a/doc/design/README.md
+++ b/doc/design/README.md
@@ -183,6 +183,12 @@ Telegram ──► teloxide dispatcher ──► text_handler
---
+## Modules
+
+- [MCP — Model Context Protocol](./mcp.md) — agent tool-use via external MCP servers (POC).
+
+---
+
## Adding a New Module
### Frontend
diff --git a/doc/design/mcp.md b/doc/design/mcp.md
new file mode 100644
index 0000000..fd14bd4
--- /dev/null
+++ b/doc/design/mcp.md
@@ -0,0 +1,112 @@
+# MCP — Model Context Protocol Module (POC)
+
+> Status: **proof of concept**. One server, one transport, one happy path.
+
+## What & Why
+
+[MCP](https://modelcontextprotocol.org/) is an open JSON-RPC 2.0 protocol that lets an LLM "host" discover and call tools exposed by external "servers". Pengine adopts MCP so we can grow the agent's capabilities by dropping in new servers instead of writing bespoke Rust glue for each tool. Every tool call flows through one well-defined choke point, which is what makes it auditable.
+
+## Roles in Pengine
+
+| Role | Where | Responsibility |
+|---|---|---|
+| **Host** | Pengine (Tauri binary) | Owns the LLM (Ollama) connection, the Telegram bot, and the agent loop. |
+| **Client** | `src-tauri/src/modules/mcp/` | One `McpClient` per connected server. Speaks JSON-RPC over stdio. |
+| **Server** | External child process | Anything that speaks MCP — `npx @modelcontextprotocol/server-filesystem`, a Docker container, a custom binary. |
+
+```text
+Telegram message
+ │
+ ▼
+bot::service::text_handler
+ │
+ ▼
+bot::agent::run_turn ────► ollama::chat_with_tools (Ollama /api/chat)
+ ▲ │
+ │ │ tool_calls?
+ │ ▼
+ └─────────── mcp::registry::call_tool ──► McpClient ──► child process (stdio)
+```
+
+## Module Layout
+
+```text
+src-tauri/src/modules/mcp/
+├── mod.rs
+├── protocol.rs JSON-RPC 2.0 request/response types
+├── types.rs McpConfig, ServerConfig, Tool
+├── transport.rs StdioTransport — child process + line-delimited JSON
+├── client.rs McpClient — initialize / tools/list / tools/call
+├── registry.rs McpRegistry — fan-out across all connected servers
+└── service.rs load_or_init_config(), connect_all()
+```
+
+The registry lives on `AppState.mcp` (`Arc>`) so the bot agent and any future HTTP route can reach it.
+
+## Config
+
+File: `$APP_DATA/mcp.json` (next to `connection.json`). Created on first launch with a sane default if missing.
+
+```json
+{
+ "servers": {
+ "filesystem": {
+ "command": "npx",
+ "args": ["-y", "@modelcontextprotocol/server-filesystem", "/tmp"],
+ "env": {}
+ }
+ }
+}
+```
+
+To add a server: add another entry under `servers`. Restart Pengine.
+
+## Protocol Subset Implemented
+
+Four messages, that's it:
+
+1. `initialize` — handshake. We send `{protocolVersion, capabilities, clientInfo}` and ignore most of the response.
+2. `notifications/initialized` — required notification after init.
+3. `tools/list` — discovery, cached on the client.
+4. `tools/call` — `{name, arguments}` → `{content: [{type: "text", text}]}`.
+
+Out of scope for the POC: resources, prompts, sampling, server-initiated requests, batch JSON-RPC, HTTP transport.
+
+## Ollama Bridge
+
+MCP `inputSchema` is JSON Schema, and so are Ollama's tool `parameters` — translation is just a rename. See `to_ollama_tools` in `bot/agent.rs`. Tool names are emitted as `server.tool` so the registry can route a call back to the right client.
+
+The agent loop in `bot::agent::run_turn`:
+
+1. Snapshot the available tools from the registry.
+2. Send `system + user` plus the tool list to Ollama.
+3. If the response carries `tool_calls`, run each via `registry.call_tool`, append the results as `role: "tool"` messages, loop. Capped at **5 steps**.
+4. Otherwise return the assistant content as the final reply.
+
+Use a tool-capable model (e.g. `qwen3:8b`). Check with `ollama show ` for the `tools` capability.
+
+## Audit Logs
+
+Every MCP-relevant event is emitted as a `LogEntry` with `kind = "mcp"` via `state.emit_log`. They flow through the existing SSE log stream (`GET /v1/logs`) and are visible on the dashboard:
+
+- `loading MCP config…`
+- `filesystem ready (2 tools)`
+- `MCP ready (2 tools)`
+- `tools available: filesystem.read_file, filesystem.list_directory`
+- `tool call (0): filesystem.list_directory({"path":"/tmp"})`
+- `tool result (842 bytes)`
+- `tool error: …`
+
+That single audit trail is the "auditable protocol" promise of this feature.
+
+## Try It
+
+1. `npx -y @modelcontextprotocol/server-filesystem /tmp` should run (Node + npm available).
+2. `ollama pull qwen3:8b` (or any tool-capable model).
+3. `bun run tauri dev`. On first launch, watch the dashboard for `mcp` lines confirming the filesystem server connected.
+4. Connect a Telegram bot, then send: *"List the files in /tmp."*
+5. Expect a `tool call` and `tool result` line in the log, followed by a coherent reply on Telegram.
+
+## Future Work
+
+Permission prompts, multiple servers in the default config, a frontend tools panel, hot reload of `mcp.json`, HTTP/SSE transport, resources & prompts. Not in this PR.
diff --git a/e2e/setup-dashboard.spec.ts b/e2e/setup-dashboard.spec.ts
index f50f0c4..0427562 100644
--- a/e2e/setup-dashboard.spec.ts
+++ b/e2e/setup-dashboard.spec.ts
@@ -82,16 +82,83 @@ async function mockApis(page: import("@playwright/test").Page) {
await route.continue();
}
});
+
+ await page.route(`${PENGINE_API_BASE}/v1/ollama/models`, async (route) => {
+ await route.fulfill({
+ status: 200,
+ contentType: "application/json",
+ body: JSON.stringify({
+ reachable: true,
+ active_model: "qwen3-coder:30b",
+ selected_model: null,
+ models: ["qwen3-coder:30b"],
+ }),
+ });
+ });
+
+ await page.route(`${PENGINE_API_BASE}/v1/ollama/model`, async (route) => {
+ if (route.request().method() === "PUT") {
+ let selected_model: string | null = null;
+ const raw = route.request().postData();
+ if (raw) {
+ try {
+ const body = JSON.parse(raw) as { model?: string | null };
+ let m: string | null = null;
+ if (typeof body.model === "string") m = body.model.trim();
+ selected_model = m && m.length > 0 ? m : null;
+ } catch {
+ /* ignore malformed body */
+ }
+ }
+ await route.fulfill({
+ status: 200,
+ contentType: "application/json",
+ body: JSON.stringify({ selected_model }),
+ });
+ } else {
+ await route.continue();
+ }
+ });
+
+ await page.route(
+ (url) => url.href.startsWith(`${PENGINE_API_BASE}/v1/mcp/servers`),
+ async (route) => {
+ await route.fulfill({
+ status: 200,
+ contentType: "application/json",
+ body: JSON.stringify({ servers: {} }),
+ });
+ },
+ );
+
+ await page.route(
+ (url) => url.href.startsWith(`${PENGINE_API_BASE}/v1/mcp/tools`),
+ async (route) => {
+ await route.fulfill({
+ status: 200,
+ contentType: "application/json",
+ body: JSON.stringify([]),
+ });
+ },
+ );
}
test.describe("setup to dashboard flow", () => {
test("shows 'no device' on dashboard when disconnected", async ({ page }) => {
+ // Force offline so the assertion does not depend on a local Pengine/Ollama install.
+ await page.route(`${PENGINE_API_BASE}/v1/health`, async (route) => {
+ await route.abort("failed");
+ });
+ await page.route(`${PENGINE_API_BASE}/v1/ollama/models`, async (route) => {
+ await route.abort("failed");
+ });
+
await page.goto("/dashboard");
await expect(page.getByTestId("app-ready")).toBeVisible();
await expect(page).toHaveURL(/\/dashboard$/);
- await expect(page.getByText("No device connected")).toBeVisible();
- await expect(page.getByRole("link", { name: "Go to setup" })).toBeVisible();
+ await expect(page.getByText("Some services offline")).toBeVisible({ timeout: 15_000 });
+ await expect(page.getByRole("link", { name: "Setup", exact: true })).toBeVisible();
});
test("walks all setup wizard steps and opens dashboard", async ({ page }) => {
@@ -128,10 +195,8 @@ test.describe("setup to dashboard flow", () => {
await page.getByRole("button", { name: "Open dashboard" }).click();
await expect(page).toHaveURL(/\/dashboard$/);
- await expect(
- page.getByRole("heading", { name: "Connected device and running services" }),
- ).toBeVisible();
- await expect(page.getByText("Telegram gateway")).toBeVisible();
+ await expect(page.getByText("All systems running")).toBeVisible({ timeout: 15_000 });
+ await expect(page.getByText("@TestPengineBot")).toBeVisible();
});
test("loads dashboard when device is already connected", async ({ page }) => {
@@ -144,6 +209,7 @@ test.describe("setup to dashboard flow", () => {
await expect(page.getByTestId("app-ready")).toBeVisible();
await expect(page).toHaveURL(/\/dashboard$/);
- await expect(page.getByText("1 connected device")).toBeVisible();
+ await expect(page.getByText("All systems running")).toBeVisible({ timeout: 15_000 });
+ await expect(page.getByRole("button", { name: "Disconnect" })).toBeVisible();
});
});
diff --git a/package.json b/package.json
index 04b32ef..57f24d2 100644
--- a/package.json
+++ b/package.json
@@ -30,9 +30,11 @@
]
},
"dependencies": {
+ "@radix-ui/react-accordion": "^1.2.12",
"@radix-ui/react-menubar": "^1.1.16",
"@tailwindcss/vite": "^4.2.2",
"@tauri-apps/api": "^2",
+ "@tauri-apps/plugin-dialog": "^2",
"@tauri-apps/plugin-opener": "^2",
"qrcode.react": "^4.2.0",
"react": "^19.1.0",
diff --git a/src-tauri/Cargo.lock b/src-tauri/Cargo.lock
index 91b3149..ee2b6d7 100644
--- a/src-tauri/Cargo.lock
+++ b/src-tauri/Cargo.lock
@@ -2744,6 +2744,7 @@ checksum = "e3e0adef53c21f888deb4fa59fc59f7eb17404926ee8a6f59f5df0fd7f9f3272"
dependencies = [
"bitflags 2.11.0",
"block2",
+ "libc",
"objc2",
"objc2-core-foundation",
]
@@ -2957,12 +2958,15 @@ dependencies = [
"axum",
"chrono",
"env_logger",
+ "fastrand",
+ "log",
"reqwest 0.13.2",
"serde",
"serde_json",
"socket2 0.5.10",
"tauri",
"tauri-build",
+ "tauri-plugin-dialog",
"tauri-plugin-opener",
"teloxide",
"tokio",
@@ -3763,6 +3767,30 @@ dependencies = [
"web-sys",
]
+[[package]]
+name = "rfd"
+version = "0.16.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a15ad77d9e70a92437d8f74c35d99b4e4691128df018833e99f90bcd36152672"
+dependencies = [
+ "block2",
+ "dispatch2",
+ "glib-sys",
+ "gobject-sys",
+ "gtk-sys",
+ "js-sys",
+ "log",
+ "objc2",
+ "objc2-app-kit",
+ "objc2-core-foundation",
+ "objc2-foundation",
+ "raw-window-handle",
+ "wasm-bindgen",
+ "wasm-bindgen-futures",
+ "web-sys",
+ "windows-sys 0.60.2",
+]
+
[[package]]
name = "rgb"
version = "0.8.53"
@@ -4726,6 +4754,48 @@ dependencies = [
"walkdir",
]
+[[package]]
+name = "tauri-plugin-dialog"
+version = "2.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a1fa4150c95ae391946cc8b8f905ab14797427caba3a8a2f79628e956da91809"
+dependencies = [
+ "log",
+ "raw-window-handle",
+ "rfd",
+ "serde",
+ "serde_json",
+ "tauri",
+ "tauri-plugin",
+ "tauri-plugin-fs",
+ "thiserror 2.0.18",
+ "url",
+]
+
+[[package]]
+name = "tauri-plugin-fs"
+version = "2.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "36e1ec28b79f3d0683f4507e1615c36292c0ea6716668770d4396b9b39871ed8"
+dependencies = [
+ "anyhow",
+ "dunce",
+ "glob",
+ "log",
+ "objc2-foundation",
+ "percent-encoding",
+ "schemars 0.8.22",
+ "serde",
+ "serde_json",
+ "serde_repr",
+ "tauri",
+ "tauri-plugin",
+ "tauri-utils",
+ "thiserror 2.0.18",
+ "toml 0.9.12+spec-1.1.0",
+ "url",
+]
+
[[package]]
name = "tauri-plugin-opener"
version = "2.5.3"
diff --git a/src-tauri/Cargo.toml b/src-tauri/Cargo.toml
index 6269015..22f4094 100644
--- a/src-tauri/Cargo.toml
+++ b/src-tauri/Cargo.toml
@@ -20,6 +20,7 @@ tauri-build = { version = "2", features = [] }
[dependencies]
tauri = { version = "2", features = [] }
tauri-plugin-opener = "2"
+log = "0.4"
serde = { version = "1", features = ["derive"] }
serde_json = "1"
tokio = { version = "1", features = ["full"] }
@@ -31,4 +32,6 @@ env_logger = "0.11"
chrono = { version = "0.4", features = ["serde"] }
tokio-stream = { version = "0.1", features = ["sync"] }
socket2 = "0.5"
+fastrand = "2"
+tauri-plugin-dialog = "2"
diff --git a/src-tauri/capabilities/default.json b/src-tauri/capabilities/default.json
index ecdd4ce..fdfa076 100644
--- a/src-tauri/capabilities/default.json
+++ b/src-tauri/capabilities/default.json
@@ -6,6 +6,7 @@
"permissions": [
"core:default",
"opener:default",
+ "dialog:default",
"core:event:default",
"core:event:allow-listen",
"core:event:allow-emit"
diff --git a/src-tauri/mcp.example.json b/src-tauri/mcp.example.json
new file mode 100644
index 0000000..52dcec6
--- /dev/null
+++ b/src-tauri/mcp.example.json
@@ -0,0 +1,18 @@
+{
+ "servers": {
+ "dice": {
+ "type": "native",
+ "id": "dice"
+ },
+ "filesystem": {
+ "type": "stdio",
+ "command": "npx",
+ "args": [
+ "-y",
+ "@modelcontextprotocol/server-filesystem",
+ "/absolute/path/to/allowed/folder"
+ ],
+ "env": {}
+ }
+ }
+}
diff --git a/src-tauri/src/app.rs b/src-tauri/src/app.rs
index 6f4082c..f089a55 100644
--- a/src-tauri/src/app.rs
+++ b/src-tauri/src/app.rs
@@ -1,5 +1,6 @@
use crate::infrastructure::http_server;
use crate::modules::bot::{commands, repository, service as bot_service};
+use crate::modules::mcp::service as mcp_service;
use crate::shared::state::AppState;
use std::path::PathBuf;
use tauri::Manager;
@@ -18,22 +19,42 @@ pub fn run() {
tauri::Builder::default()
.plugin(tauri_plugin_opener::init())
+ .plugin(tauri_plugin_dialog::init())
.setup(|app| {
let path = store_path(app);
- let shared_state = AppState::new(path);
+ let (mcp_path, mcp_src) = mcp_service::resolve_mcp_config_path(&path);
+ let shared_state = AppState::new(path, mcp_path, mcp_src.to_string());
{
let handle = app.handle().clone();
let state = shared_state.clone();
- tauri::async_runtime::spawn(async move {
- let mut lock = state.app_handle.lock().await;
- *lock = Some(handle);
+ tauri::async_runtime::block_on(async move {
+ *state.app_handle.lock().await = Some(handle);
});
}
app.manage(shared_state.clone());
- // Resume persisted connection if present
+ // Load MCP before any bot work so the first Telegram message never sees an empty registry.
+ let mcp_path = shared_state.mcp_config_path.clone();
+ let mcp_state = shared_state.clone();
+ tauri::async_runtime::block_on(async move {
+ mcp_state
+ .emit_log("mcp", &format!("loading {}", mcp_path.display()))
+ .await;
+ match mcp_service::load_or_init_config(&mcp_path) {
+ Ok(cfg) => {
+ mcp_service::rebuild_registry_into_state(&mcp_state, &cfg).await;
+ }
+ Err(e) => {
+ mcp_state
+ .emit_log("mcp", &format!("mcp.json error: {e}"))
+ .await;
+ }
+ }
+ });
+
+ // Resume persisted Telegram connection if present.
let resume_state = shared_state.clone();
tauri::async_runtime::spawn(async move {
let Some(conn) = repository::load(&resume_state.store_path) else {
@@ -43,15 +64,12 @@ pub fn run() {
.emit_log("ok", &format!("Resuming bot @{}…", conn.bot_username))
.await;
let token = conn.bot_token.clone();
- {
- let mut lock = resume_state.connection.lock().await;
- *lock = Some(conn);
- }
+ *resume_state.connection.lock().await = Some(conn);
let shutdown = resume_state.shutdown_notify.clone();
bot_service::start_bot(resume_state, token, shutdown).await;
});
- // Start localhost HTTP API
+ // Start localhost HTTP API.
let server_state = shared_state.clone();
tauri::async_runtime::spawn(async move {
http_server::start_server(server_state).await;
@@ -62,6 +80,7 @@ pub fn run() {
.invoke_handler(tauri::generate_handler![
commands::get_connection_status,
commands::disconnect_bot,
+ commands::pick_mcp_filesystem_folder,
])
.run(tauri::generate_context!())
.expect("error while running tauri application");
diff --git a/src-tauri/src/infrastructure/http_server.rs b/src-tauri/src/infrastructure/http_server.rs
index 37debab..268924b 100644
--- a/src-tauri/src/infrastructure/http_server.rs
+++ b/src-tauri/src/infrastructure/http_server.rs
@@ -1,10 +1,12 @@
use crate::infrastructure::bot_lifecycle;
use crate::modules::bot::{repository, service as bot_service};
+use crate::modules::mcp::service as mcp_service;
+use crate::modules::ollama::service as ollama_service;
use crate::shared::state::{AppState, ConnectionData};
-use axum::extract::State;
+use axum::extract::{Path, State};
use axum::http::StatusCode;
use axum::response::{Json, Sse};
-use axum::routing::{delete, get, post};
+use axum::routing::{delete, get, post, put};
use axum::Router;
use chrono::Utc;
use serde::{Deserialize, Serialize};
@@ -41,6 +43,39 @@ pub struct ErrorResponse {
pub error: String,
}
+#[derive(Serialize)]
+pub struct McpToolDto {
+ pub server: String,
+ pub name: String,
+ pub description: Option,
+}
+
+#[derive(Serialize)]
+pub struct McpConfigInfoResponse {
+ pub config_path: String,
+ /// `"project"` or `"app_data"`
+ pub source: String,
+ pub filesystem_allowed_paths: Vec,
+}
+
+#[derive(Deserialize)]
+pub struct PutMcpFilesystemBody {
+ pub paths: Vec,
+}
+
+#[derive(Serialize)]
+pub struct OllamaModelsResponse {
+ pub reachable: bool,
+ pub active_model: Option,
+ pub selected_model: Option,
+ pub models: Vec,
+}
+
+#[derive(Deserialize)]
+pub struct PutOllamaModelBody {
+ pub model: Option,
+}
+
pub async fn start_server(state: AppState) {
let cors = CorsLayer::new()
.allow_origin(Any)
@@ -52,6 +87,14 @@ pub async fn start_server(state: AppState) {
.route("/v1/connect", delete(handle_disconnect))
.route("/v1/health", get(handle_health))
.route("/v1/logs", get(handle_logs_sse))
+ .route("/v1/ollama/models", get(handle_ollama_models))
+ .route("/v1/ollama/model", put(handle_ollama_model_put))
+ .route("/v1/mcp/tools", get(handle_mcp_tools))
+ .route("/v1/mcp/config", get(handle_mcp_config_get))
+ .route("/v1/mcp/filesystem", put(handle_mcp_filesystem_put))
+ .route("/v1/mcp/servers", get(handle_mcp_servers_list))
+ .route("/v1/mcp/servers/{name}", put(handle_mcp_server_upsert))
+ .route("/v1/mcp/servers/{name}", delete(handle_mcp_server_delete))
.layer(cors)
.with_state(state.clone());
@@ -65,8 +108,6 @@ pub async fn start_server(state: AppState) {
axum::serve(listener, app).await.expect("axum serve failed");
}
-/// Bind with `SO_REUSEADDR` so a quick restart can reclaim the port after the old socket
-/// enters `TIME_WAIT`. Falls back to the same error as plain bind if another process still listens.
fn bind_loopback_reuse(addr: std::net::SocketAddr) -> std::io::Result {
let socket = Socket::new(Domain::for_address(addr), Type::STREAM, None)?;
socket.set_nonblocking(true)?;
@@ -210,6 +251,282 @@ async fn handle_health(State(state): State) -> Json {
})
}
+async fn handle_ollama_models(State(state): State) -> Json {
+ let selected_model = state.preferred_ollama_model.read().await.clone();
+ match ollama_service::model_catalog(3000).await {
+ Ok(catalog) => Json(OllamaModelsResponse {
+ reachable: true,
+ active_model: catalog.active,
+ selected_model,
+ models: catalog.models,
+ }),
+ Err(_) => Json(OllamaModelsResponse {
+ reachable: false,
+ active_model: None,
+ selected_model,
+ models: Vec::new(),
+ }),
+ }
+}
+
+async fn handle_ollama_model_put(
+ State(state): State,
+ Json(body): Json,
+) -> Result<(StatusCode, Json), (StatusCode, Json)> {
+ let normalized = body
+ .model
+ .as_ref()
+ .map(|m| m.trim().to_string())
+ .filter(|m| !m.is_empty());
+
+ if let Some(ref model) = normalized {
+ let catalog = ollama_service::model_catalog(3000)
+ .await
+ .map_err(|e| (StatusCode::BAD_GATEWAY, Json(ErrorResponse { error: e })))?;
+ if !catalog.models.iter().any(|m| m == model) {
+ return Err((
+ StatusCode::BAD_REQUEST,
+ Json(ErrorResponse {
+ error: format!("model '{model}' is not available in Ollama"),
+ }),
+ ));
+ }
+ }
+
+ {
+ let mut lock = state.preferred_ollama_model.write().await;
+ *lock = normalized.clone();
+ }
+
+ state
+ .emit_log(
+ "run",
+ &format!(
+ "ollama model {}",
+ normalized
+ .as_ref()
+ .map(|m| format!("set to '{m}'"))
+ .unwrap_or_else(|| "reset to active".to_string())
+ ),
+ )
+ .await;
+
+ Ok((
+ StatusCode::OK,
+ Json(serde_json::json!({ "selected_model": normalized })),
+ ))
+}
+
+async fn handle_mcp_config_get(State(state): State) -> Json {
+ let filesystem_allowed_paths = state
+ .mcp_config_path
+ .exists()
+ .then(|| mcp_service::read_config(&state.mcp_config_path).ok())
+ .flatten()
+ .map(|c| mcp_service::filesystem_allowed_paths(&c))
+ .unwrap_or_default();
+
+ Json(McpConfigInfoResponse {
+ config_path: state.mcp_config_path.to_string_lossy().into_owned(),
+ source: state.mcp_config_source.clone(),
+ filesystem_allowed_paths,
+ })
+}
+
+async fn handle_mcp_filesystem_put(
+ State(state): State,
+ Json(body): Json,
+) -> Result<(StatusCode, Json), (StatusCode, Json)> {
+ let paths: Vec = body
+ .paths
+ .iter()
+ .map(|p| p.trim().to_string())
+ .filter(|p| !p.is_empty())
+ .collect();
+ if paths.is_empty() {
+ return Err((
+ StatusCode::BAD_REQUEST,
+ Json(ErrorResponse {
+ error: "at least one path is required".into(),
+ }),
+ ));
+ }
+
+ let _guard = state.mcp_config_mutex.lock().await;
+
+ let mut cfg = if state.mcp_config_path.exists() {
+ mcp_service::read_config(&state.mcp_config_path)
+ .map_err(|e| (StatusCode::BAD_REQUEST, Json(ErrorResponse { error: e })))?
+ } else {
+ mcp_service::load_or_init_config(&state.mcp_config_path).map_err(|e| {
+ (
+ StatusCode::INTERNAL_SERVER_ERROR,
+ Json(ErrorResponse { error: e }),
+ )
+ })?
+ };
+
+ mcp_service::set_filesystem_allowed_paths(&mut cfg, &paths);
+ mcp_service::save_config(&state.mcp_config_path, &cfg).map_err(|e| {
+ (
+ StatusCode::INTERNAL_SERVER_ERROR,
+ Json(ErrorResponse { error: e }),
+ )
+ })?;
+
+ state
+ .emit_log(
+ "mcp",
+ &format!(
+ "filesystem allowed paths ({}) updated → {}",
+ paths.len(),
+ state.mcp_config_path.display()
+ ),
+ )
+ .await;
+
+ mcp_service::rebuild_registry_into_state(&state, &cfg).await;
+
+ Ok((StatusCode::OK, Json(serde_json::json!({ "ok": true }))))
+}
+
+async fn handle_mcp_tools(State(state): State) -> Json> {
+ Json(
+ state
+ .mcp
+ .read()
+ .await
+ .all_tools()
+ .into_iter()
+ .map(|t| McpToolDto {
+ server: t.server_name,
+ name: t.name,
+ description: t.description,
+ })
+ .collect(),
+ )
+}
+
+// ── MCP server CRUD ──────────────────────────────────────────────────
+
+#[derive(Serialize)]
+struct McpServersResponse {
+ servers: std::collections::BTreeMap,
+}
+
+async fn handle_mcp_servers_list(
+ State(state): State,
+) -> Result, (StatusCode, Json)> {
+ let cfg = {
+ let _guard = state.mcp_config_mutex.lock().await;
+ mcp_service::load_or_init_config(&state.mcp_config_path).map_err(|e| {
+ (
+ StatusCode::INTERNAL_SERVER_ERROR,
+ Json(ErrorResponse { error: e }),
+ )
+ })?
+ };
+ Ok(Json(McpServersResponse {
+ servers: cfg.servers,
+ }))
+}
+
+fn is_valid_server_name(name: &str) -> bool {
+ !name.is_empty()
+ && name.len() <= 64
+ && name
+ .chars()
+ .all(|c| c.is_ascii_alphanumeric() || c == '-' || c == '_')
+}
+
+async fn handle_mcp_server_upsert(
+ State(state): State,
+ Path(name): Path,
+ Json(entry): Json,
+) -> Result<(StatusCode, Json), (StatusCode, Json)> {
+ if !is_valid_server_name(&name) {
+ return Err((
+ StatusCode::BAD_REQUEST,
+ Json(ErrorResponse {
+ error: "server name must be alphanumeric, hyphens, or underscores (max 64 chars)"
+ .into(),
+ }),
+ ));
+ }
+
+ if let crate::modules::mcp::types::ServerEntry::Stdio { ref command, .. } = entry {
+ if command.trim().is_empty() {
+ return Err((
+ StatusCode::BAD_REQUEST,
+ Json(ErrorResponse {
+ error: "command must not be empty".into(),
+ }),
+ ));
+ }
+ }
+
+ let _guard = state.mcp_config_mutex.lock().await;
+ let mut cfg = mcp_service::load_or_init_config(&state.mcp_config_path).map_err(|e| {
+ (
+ StatusCode::INTERNAL_SERVER_ERROR,
+ Json(ErrorResponse { error: e }),
+ )
+ })?;
+
+ cfg.servers.insert(name.clone(), entry);
+
+ mcp_service::save_config(&state.mcp_config_path, &cfg).map_err(|e| {
+ (
+ StatusCode::INTERNAL_SERVER_ERROR,
+ Json(ErrorResponse { error: e }),
+ )
+ })?;
+
+ state
+ .emit_log("mcp", &format!("server '{name}' saved"))
+ .await;
+ mcp_service::rebuild_registry_into_state(&state, &cfg).await;
+
+ Ok((StatusCode::OK, Json(serde_json::json!({ "ok": true }))))
+}
+
+async fn handle_mcp_server_delete(
+ State(state): State,
+ Path(name): Path,
+) -> Result<(StatusCode, Json), (StatusCode, Json)> {
+ let _guard = state.mcp_config_mutex.lock().await;
+
+ let mut cfg = mcp_service::load_or_init_config(&state.mcp_config_path).map_err(|e| {
+ (
+ StatusCode::INTERNAL_SERVER_ERROR,
+ Json(ErrorResponse { error: e }),
+ )
+ })?;
+
+ if cfg.servers.remove(&name).is_none() {
+ return Err((
+ StatusCode::NOT_FOUND,
+ Json(ErrorResponse {
+ error: format!("server '{name}' not found"),
+ }),
+ ));
+ }
+
+ mcp_service::save_config(&state.mcp_config_path, &cfg).map_err(|e| {
+ (
+ StatusCode::INTERNAL_SERVER_ERROR,
+ Json(ErrorResponse { error: e }),
+ )
+ })?;
+
+ state
+ .emit_log("mcp", &format!("server '{name}' removed"))
+ .await;
+ mcp_service::rebuild_registry_into_state(&state, &cfg).await;
+
+ Ok((StatusCode::OK, Json(serde_json::json!({ "ok": true }))))
+}
+
async fn handle_logs_sse(
State(state): State,
) -> Sse>> {
diff --git a/src-tauri/src/lib.rs b/src-tauri/src/lib.rs
index 49488d5..a56f05f 100644
--- a/src-tauri/src/lib.rs
+++ b/src-tauri/src/lib.rs
@@ -1,6 +1,6 @@
mod app;
mod infrastructure;
-mod modules;
+pub mod modules;
mod shared;
pub fn run() {
diff --git a/src-tauri/src/modules/bot/agent.rs b/src-tauri/src/modules/bot/agent.rs
new file mode 100644
index 0000000..c855e7c
--- /dev/null
+++ b/src-tauri/src/modules/bot/agent.rs
@@ -0,0 +1,293 @@
+use crate::modules::ollama::service as ollama;
+use crate::shared::state::AppState;
+use serde_json::json;
+use std::time::{Duration, Instant};
+
+const MAX_STEPS: usize = 3;
+
+/// Ollama sometimes returns `function.arguments` as a JSON string; normalize to an object.
+fn tool_call_arguments(call: &serde_json::Value) -> serde_json::Value {
+ let raw = call.get("function").and_then(|f| f.get("arguments"));
+ match raw {
+ None => json!({}),
+ Some(serde_json::Value::String(s)) => {
+ serde_json::from_str::(s).unwrap_or_else(|_| json!({}))
+ }
+ Some(v) => v.clone(),
+ }
+}
+
+fn fmt_duration(d: Duration) -> String {
+ let ms = d.as_millis();
+ if ms < 1000 {
+ format!("{ms}ms")
+ } else {
+ format!("{:.1}s", d.as_secs_f64())
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub enum ReplySource {
+ Model,
+ Tool,
+}
+
+pub struct TurnResult {
+ pub text: String,
+ pub source: ReplySource,
+}
+
+pub async fn run_turn(state: &AppState, user_message: &str) -> Result {
+ let model = if let Some(selected) = state.preferred_ollama_model.read().await.clone() {
+ selected
+ } else {
+ ollama::active_model().await?
+ };
+
+ let (ollama_tools, has_tools) = {
+ let reg = state.mcp.read().await;
+ (reg.ollama_tools(), !reg.is_empty())
+ };
+
+ let fs_context = {
+ let paths = state.cached_filesystem_paths.read().await.clone();
+ if paths.is_empty() {
+ String::new()
+ } else {
+ let listing = paths.join(", ");
+ format!(
+ "\nFile tools operate on these directories: {listing}\n\
+ Always use absolute paths rooted in one of those directories."
+ )
+ }
+ };
+
+ let system = if has_tools {
+ format!(
+ "You are a helpful assistant with tool access.\n\
+ Rules:\n\
+ - Call a tool ONLY when you need external data you don't already have.\n\
+ - After receiving tool results, answer the user's question immediately in the same response.\n\
+ - Be concise and direct.{fs_context}"
+ )
+ } else {
+ "Answer concisely.".to_string()
+ };
+
+ let mut messages = json!([
+ { "role": "system", "content": system },
+ { "role": "user", "content": user_message }
+ ]);
+
+ let mut tool_results: Vec<(String, String)> = Vec::new();
+ let mut tools_supported = true;
+ let empty_tools = json!([]);
+
+ // Phase 1: let the model call tools (up to MAX_STEPS rounds).
+ for step in 0..MAX_STEPS {
+ let t_model = Instant::now();
+ let effective_tools = if tools_supported {
+ &ollama_tools
+ } else {
+ &empty_tools
+ };
+ let result = ollama::chat_with_tools(&model, &messages, effective_tools).await?;
+ let msg = result.message;
+ if !result.tools_sent && tools_supported {
+ tools_supported = false;
+ state
+ .emit_log(
+ "tool",
+ &format!("{model} does not support tools — answering without them"),
+ )
+ .await;
+ }
+ state
+ .emit_log(
+ "time",
+ &format!("model step {step} {}", fmt_duration(t_model.elapsed())),
+ )
+ .await;
+
+ if let Some(arr) = messages.as_array_mut() {
+ arr.push(msg.clone());
+ }
+
+ let tool_calls = msg
+ .get("tool_calls")
+ .and_then(|v| v.as_array())
+ .cloned()
+ .unwrap_or_default();
+
+ if tool_calls.is_empty() {
+ let text = msg
+ .get("content")
+ .and_then(|v| v.as_str())
+ .unwrap_or("")
+ .trim()
+ .to_string();
+
+ if !text.is_empty() {
+ // Model already produced a usable answer (with or without prior tool data).
+ state
+ .emit_log(
+ "tool",
+ if tool_results.is_empty() {
+ "model replied in text"
+ } else {
+ "answered from tool data"
+ },
+ )
+ .await;
+ return Ok(TurnResult {
+ text,
+ source: ReplySource::Model,
+ });
+ }
+
+ // Model returned no text after tools ran — fall through to summarize.
+ if tool_results.is_empty() {
+ return Ok(TurnResult {
+ text: String::new(),
+ source: ReplySource::Model,
+ });
+ }
+ break;
+ }
+
+ state
+ .emit_log(
+ "tool",
+ &format!("model requested {} tool call(s)", tool_calls.len()),
+ )
+ .await;
+
+ let mut direct_replies: Vec = Vec::new();
+
+ for call in &tool_calls {
+ let name = call
+ .get("function")
+ .and_then(|f| f.get("name"))
+ .and_then(|v| v.as_str())
+ .unwrap_or("")
+ .to_string();
+ let args = tool_call_arguments(call);
+
+ state.emit_log("tool", &format!("[{step}] {name}")).await;
+
+ let t_tool = Instant::now();
+ let resolved = {
+ let reg = state.mcp.read().await;
+ reg.resolve_tool(&name)
+ };
+ let (result_text, is_direct) = match resolved {
+ Ok((provider, tool_name, direct)) => {
+ match provider.call_tool(&tool_name, args).await {
+ Ok(text) => {
+ state
+ .emit_log("tool", &format!("result ({} bytes)", text.len()))
+ .await;
+ (text, direct)
+ }
+ Err(e) => {
+ state.emit_log("tool", &format!("error: {e}")).await;
+ (format!("ERROR: {e}"), false)
+ }
+ }
+ }
+ Err(e) => {
+ state.emit_log("tool", &format!("error: {e}")).await;
+ (format!("ERROR: {e}"), false)
+ }
+ };
+ state
+ .emit_log(
+ "time",
+ &format!("tool {name} {}", fmt_duration(t_tool.elapsed())),
+ )
+ .await;
+
+ tool_results.push((name.clone(), result_text.clone()));
+
+ if is_direct {
+ direct_replies.push(result_text.clone());
+ }
+
+ if let Some(arr) = messages.as_array_mut() {
+ arr.push(json!({
+ "role": "tool",
+ "name": name,
+ "content": result_text,
+ }));
+ }
+ }
+
+ if !direct_replies.is_empty() {
+ return Ok(TurnResult {
+ text: direct_replies.join("\n\n"),
+ source: ReplySource::Tool,
+ });
+ }
+ }
+
+ // Phase 2: tools ran but model didn't produce a good answer yet.
+ // Make a clean summarization call — no tools, plain Q&A with inlined data.
+ if !tool_results.is_empty() {
+ let mut data_block = String::new();
+ for (name, content) in &tool_results {
+ data_block.push_str(&format!("--- {name} result ---\n{content}\n"));
+ }
+
+ let summary_messages = json!([
+ {
+ "role": "system",
+ "content": "Answer the user's question using ONLY the data provided below. Be concise and direct."
+ },
+ {
+ "role": "user",
+ "content": format!("{user_message}\n\nData:\n{data_block}")
+ }
+ ]);
+
+ let empty = json!([]);
+ let t_summary = Instant::now();
+ let summary_result = ollama::chat_with_tools(&model, &summary_messages, &empty).await?;
+ let summary_msg = summary_result.message;
+ state
+ .emit_log(
+ "time",
+ &format!("summarize {}", fmt_duration(t_summary.elapsed())),
+ )
+ .await;
+
+ let text = summary_msg
+ .get("content")
+ .and_then(|v| v.as_str())
+ .unwrap_or("")
+ .to_string();
+
+ if !text.trim().is_empty() {
+ state.emit_log("tool", "answered from tool data").await;
+ return Ok(TurnResult {
+ text,
+ source: ReplySource::Model,
+ });
+ }
+
+ let fallback = tool_results
+ .last()
+ .map(|(_, c)| c.clone())
+ .expect("tool_results must be non-empty here after guard");
+ state
+ .emit_log("tool", "empty summary, returning raw tool output")
+ .await;
+ return Ok(TurnResult {
+ text: fallback,
+ source: ReplySource::Tool,
+ });
+ }
+
+ Err(format!(
+ "agent exceeded {MAX_STEPS} steps without finishing"
+ ))
+}
diff --git a/src-tauri/src/modules/bot/commands.rs b/src-tauri/src/modules/bot/commands.rs
index ae4017f..d620866 100644
--- a/src-tauri/src/modules/bot/commands.rs
+++ b/src-tauri/src/modules/bot/commands.rs
@@ -1,6 +1,8 @@
use crate::infrastructure::bot_lifecycle;
use crate::modules::bot::repository;
use crate::shared::state::AppState;
+#[cfg(desktop)]
+use tauri_plugin_dialog::DialogExt;
#[tauri::command]
pub async fn get_connection_status(
@@ -26,3 +28,21 @@ pub async fn disconnect_bot(state: tauri::State<'_, AppState>) -> Result Result
{`curl -fsSL https://ollama.com/install.sh | sh
-ollama pull llama3.2`}
+ollama pull qwen3:8b`}
+
+ Recommended: qwen3:8b — good balance of speed
+ and tool-calling support.
+
{ollamaChecking && (
Detecting Ollama…
@@ -262,7 +266,7 @@ ollama pull llama3.2`}
{ollamaReachable === true && !ollamaModel && (
Ollama is running but no model is pulled yet. Run{" "}
- ollama pull llama3.2 first.
+ ollama pull qwen3:8b first.
)}
diff --git a/src/modules/bot/components/TerminalPreview.tsx b/src/modules/bot/components/TerminalPreview.tsx
index f9c3220..9511a34 100644
--- a/src/modules/bot/components/TerminalPreview.tsx
+++ b/src/modules/bot/components/TerminalPreview.tsx
@@ -13,6 +13,7 @@ function kindClass(kind: string) {
if (kind === "ok") return "bg-emerald-400/10 text-emerald-300";
if (kind === "run") return "bg-sky-400/10 text-sky-300";
if (kind === "tool") return "bg-yellow-400/10 text-yellow-200";
+ if (kind === "time") return "bg-fuchsia-400/10 text-fuchsia-200";
if (kind === "reply") return "bg-violet-400/10 text-violet-300";
if (kind === "msg") return "bg-cyan-400/10 text-cyan-300";
return "bg-slate-400/10 text-slate-300";
@@ -121,7 +122,7 @@ export function TerminalPreview() {
{lines.map((line, i) => (
diff --git a/src/modules/mcp/components/AddServerForm.tsx b/src/modules/mcp/components/AddServerForm.tsx
new file mode 100644
index 0000000..2a61dbc
--- /dev/null
+++ b/src/modules/mcp/components/AddServerForm.tsx
@@ -0,0 +1,308 @@
+import { useState } from "react";
+import type { ServerEntry } from "..";
+
+type Props = {
+ busy: boolean;
+ onAdd: (name: string, entry: ServerEntry) => Promise
;
+};
+
+type Mode = "paste" | "form";
+
+export function AddServerForm({ busy, onAdd }: Props) {
+ const [open, setOpen] = useState(false);
+ const [mode, setMode] = useState("paste");
+ const [error, setError] = useState(null);
+
+ // Paste mode state
+ const [jsonText, setJsonText] = useState("");
+ const [pasteName, setPasteName] = useState("");
+
+ // Form mode state
+ const [formName, setFormName] = useState("");
+ const [command, setCommand] = useState("");
+ const [argsText, setArgsText] = useState("");
+ const [envText, setEnvText] = useState("");
+ const [directReturn, setDirectReturn] = useState(false);
+
+ const reset = () => {
+ setJsonText("");
+ setPasteName("");
+ setFormName("");
+ setCommand("");
+ setArgsText("");
+ setEnvText("");
+ setDirectReturn(false);
+ setError(null);
+ };
+
+ const handlePasteSubmit = async () => {
+ setError(null);
+ let parsed: unknown;
+ try {
+ parsed = JSON.parse(jsonText);
+ } catch {
+ setError("Invalid JSON");
+ return;
+ }
+
+ if (typeof parsed !== "object" || parsed === null || Array.isArray(parsed)) {
+ setError("Expected a JSON object");
+ return;
+ }
+
+ const obj = parsed as Record;
+
+ // Detect format: { "type": "stdio", ... } vs { "my-server": { "type": "stdio", ... } }
+ let name: string;
+ let entry: ServerEntry;
+
+ if ("type" in obj && (obj.type === "stdio" || obj.type === "native")) {
+ // Direct entry — need a name from the input
+ if (!pasteName.trim()) {
+ setError("Enter a server name (the JSON has no key wrapper)");
+ return;
+ }
+ name = pasteName.trim();
+ entry = normalizeEntry(obj);
+ } else {
+ // Wrapped: { "server-name": { ... } }
+ const keys = Object.keys(obj);
+ if (keys.length !== 1) {
+ setError('Expected either a server entry or { "name": { ...entry } }');
+ return;
+ }
+ name = keys[0];
+ const inner = obj[name];
+ if (typeof inner !== "object" || inner === null || Array.isArray(inner)) {
+ setError(`Value for "${name}" is not a valid server entry`);
+ return;
+ }
+ entry = normalizeEntry(inner as Record);
+ }
+
+ try {
+ await onAdd(name, entry);
+ reset();
+ setOpen(false);
+ } catch (e) {
+ setError(e instanceof Error ? e.message : "Could not add server");
+ }
+ };
+
+ const handleFormSubmit = async () => {
+ setError(null);
+ const name = formName.trim();
+ if (!name) {
+ setError("Server name is required");
+ return;
+ }
+ if (!command.trim()) {
+ setError("Command is required");
+ return;
+ }
+
+ const args = argsText
+ .split("\n")
+ .map((l) => l.trim())
+ .filter(Boolean);
+ const env: Record = {};
+ for (const line of envText.split("\n")) {
+ const eq = line.indexOf("=");
+ if (eq > 0) env[line.slice(0, eq).trim()] = line.slice(eq + 1).trim();
+ }
+
+ try {
+ await onAdd(name, {
+ type: "stdio",
+ command: command.trim(),
+ args,
+ env,
+ direct_return: directReturn,
+ });
+ reset();
+ setOpen(false);
+ } catch (e) {
+ setError(e instanceof Error ? e.message : "Could not add server");
+ }
+ };
+
+ const inputClass =
+ "w-full rounded-lg border border-white/15 bg-white/5 px-2.5 py-2 font-mono text-xs text-white outline-none placeholder:text-white/25 focus:border-white/30";
+
+ if (!open) {
+ return (
+
+ );
+ }
+
+ return (
+
+
+
Add server
+
+
+
+ {/* Mode tabs */}
+
+ {(["paste", "form"] as const).map((m) => (
+
+ ))}
+
+
+ {mode === "paste" && (
+
+
+
+ setPasteName(e.target.value)}
+ placeholder="my-server"
+ className={inputClass}
+ />
+
+
+
+
+
+
+ )}
+
+ {mode === "form" && (
+
+
+
+ setFormName(e.target.value)}
+ placeholder="my-server"
+ className={inputClass}
+ />
+
+
+
+ setCommand(e.target.value)}
+ placeholder="npx"
+ className={inputClass}
+ />
+
+
+
+
+
+
+
+
+
+
+ )}
+
+ {error &&
{error}
}
+
+ );
+}
+
+/** Normalize a raw JSON object into a ServerEntry, filling in defaults. */
+function normalizeEntry(obj: Record): ServerEntry {
+ if (obj.type === "native") {
+ return { type: "native", id: String(obj.id ?? "") };
+ }
+ return {
+ type: "stdio",
+ command: String(obj.command ?? ""),
+ args: Array.isArray(obj.args) ? obj.args.map(String) : [],
+ env:
+ typeof obj.env === "object" && obj.env !== null && !Array.isArray(obj.env)
+ ? Object.fromEntries(
+ Object.entries(obj.env as Record).map(([k, v]) => [k, String(v)]),
+ )
+ : {},
+ direct_return: Boolean(obj.direct_return ?? false),
+ };
+}
diff --git a/src/modules/mcp/components/McpServerCard.tsx b/src/modules/mcp/components/McpServerCard.tsx
new file mode 100644
index 0000000..c3f1813
--- /dev/null
+++ b/src/modules/mcp/components/McpServerCard.tsx
@@ -0,0 +1,476 @@
+import { useState } from "react";
+import type { McpTool, ServerEntry, ServerEntryStdio } from "..";
+
+type Props = {
+ name: string;
+ entry: ServerEntry;
+ tools: McpTool[];
+ busy: boolean;
+ editingName: string | null;
+ onSave: (name: string, entry: ServerEntry) => Promise;
+ onDelete: (name: string) => Promise;
+ onEditStart: (name: string | null) => void;
+};
+
+/** Detect filesystem MCP package in live args textarea (one token per line). */
+function argsTextLooksLikeFilesystem(argsText: string): boolean {
+ return argsText
+ .split("\n")
+ .map((l) => l.trim())
+ .filter(Boolean)
+ .some((a) => a.includes("server-filesystem"));
+}
+
+export function McpServerCard({
+ name,
+ entry,
+ tools,
+ busy,
+ editingName,
+ onSave,
+ onDelete,
+ onEditStart,
+}: Props) {
+ const isNative = entry.type === "native";
+ const isEditing = editingName === name;
+ const [confirmDelete, setConfirmDelete] = useState(false);
+ const [deleteError, setDeleteError] = useState(null);
+
+ const toolCount = tools.filter((t) => t.server === name).length;
+ const commandPreview =
+ entry.type === "stdio" ? [entry.command, ...entry.args].join(" ") : `native:${entry.id}`;
+
+ const handleToggleDirect = async () => {
+ if (entry.type !== "stdio") return;
+ await onSave(name, { ...entry, direct_return: !entry.direct_return });
+ };
+
+ const handleDelete = async () => {
+ setDeleteError(null);
+ try {
+ await onDelete(name);
+ setConfirmDelete(false);
+ } catch (e) {
+ setDeleteError(e instanceof Error ? e.message : "Could not remove server");
+ }
+ };
+
+ // ── Editing: form replaces the card content ────────────────────────
+ if (isEditing && entry.type === "stdio") {
+ return (
+
+ onSave(name, updated)}
+ onCancel={() => onEditStart(null)}
+ />
+
+ );
+ }
+
+ // ── Read-only card ─────────────────────────────────────────────────
+ return (
+
+
+
+
+
{name}
+
+ {entry.type}
+
+ {entry.type === "stdio" && entry.direct_return && (
+
+ direct
+
+ )}
+
+
+ {commandPreview}
+
+
+ {toolCount} tool{toolCount === 1 ? "" : "s"}
+
+
+
+ {!isNative && (
+
+ {entry.type === "stdio" && (
+
+ )}
+
+
+
+ )}
+
+
+ {confirmDelete && (
+
+
+ Remove {name}? Its tools will be disconnected.
+
+ {deleteError && (
+
+ {deleteError}
+
+ )}
+
+
+
+
+
+ )}
+
+ );
+}
+
+// ── Unified edit form (all servers, with filesystem folder helper) ───
+
+const INPUT_CLASS =
+ "w-full rounded-lg border border-white/15 bg-white/5 px-2.5 py-2 font-mono text-xs text-white outline-none placeholder:text-white/25 focus:border-white/30";
+
+function InlineEditForm({
+ name,
+ entry,
+ busy,
+ onSave,
+ onCancel,
+}: {
+ name: string;
+ entry: ServerEntryStdio;
+ busy: boolean;
+ onSave: (entry: ServerEntry) => Promise;
+ onCancel: () => void;
+}) {
+ const [command, setCommand] = useState(entry.command);
+ const [argsText, setArgsText] = useState(entry.args.join("\n"));
+ const [envText, setEnvText] = useState(
+ Object.entries(entry.env)
+ .map(([k, v]) => `${k}=${v}`)
+ .join("\n"),
+ );
+ const [directReturn, setDirectReturn] = useState(entry.direct_return);
+ const [pickFolderError, setPickFolderError] = useState(null);
+
+ const isFs = argsTextLooksLikeFilesystem(argsText);
+
+ // ── Filesystem folder helpers (read/write the args textarea) ──────
+
+ const parsePaths = (): string[] => {
+ const lines = argsText
+ .split("\n")
+ .map((l) => l.trim())
+ .filter(Boolean);
+ const pkgIdx = lines.findIndex((a) => a.includes("server-filesystem"));
+ if (pkgIdx < 0) return [];
+ return lines.slice(pkgIdx + 1);
+ };
+
+ const updatePaths = (paths: string[]) => {
+ const lines = argsText
+ .split("\n")
+ .map((l) => l.trim())
+ .filter(Boolean);
+ const pkgIdx = lines.findIndex((a) => a.includes("server-filesystem"));
+ if (pkgIdx < 0) return;
+ const prefix = lines.slice(0, pkgIdx + 1);
+ setArgsText([...prefix, ...paths].join("\n"));
+ };
+
+ const addPath = (p: string) => {
+ const trimmed = p.trim();
+ if (!trimmed) return;
+ const current = parsePaths();
+ if (!current.includes(trimmed)) {
+ updatePaths([...current, trimmed]);
+ }
+ };
+
+ const removePath = (path: string) => {
+ updatePaths(parsePaths().filter((p) => p !== path));
+ };
+
+ const pickFolder = async () => {
+ setPickFolderError(null);
+ try {
+ const { invoke } = await import("@tauri-apps/api/core");
+ try {
+ const picked = await invoke("pick_mcp_filesystem_folder");
+ if (picked) addPath(picked);
+ } catch (invokeErr) {
+ setPickFolderError(
+ invokeErr instanceof Error ? invokeErr.message : "Could not open folder picker",
+ );
+ }
+ } catch {
+ // Web / non-Tauri: dynamic import of `@tauri-apps/api/core` fails — expected, stay silent
+ }
+ };
+
+ // ── Submit ────────────────────────────────────────────────────────
+
+ const handleSubmit = async () => {
+ const args = argsText
+ .split("\n")
+ .map((l) => l.trim())
+ .filter(Boolean);
+ const env: Record = {};
+ for (const line of envText.split("\n")) {
+ const eq = line.indexOf("=");
+ if (eq > 0) {
+ const key = line.slice(0, eq).trim();
+ if (key !== "") env[key] = line.slice(eq + 1).trim();
+ }
+ }
+ await onSave({
+ type: "stdio",
+ command: command.trim(),
+ args,
+ env,
+ direct_return: directReturn,
+ });
+ };
+
+ const fsPaths = isFs ? parsePaths() : [];
+
+ return (
+
+ {/* Header with name + cancel */}
+
+
+
+ {/* Filesystem folder helper */}
+ {isFs && (
+
+ )}
+
+
+
+ setCommand(e.target.value)}
+ placeholder="npx"
+ className={INPUT_CLASS}
+ />
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ );
+}
+
+// ── Folder path helper (visual add/remove for filesystem paths) ─────
+
+function FolderHelper({
+ paths,
+ pickError,
+ onAdd,
+ onRemove,
+ onPickFolder,
+}: {
+ paths: string[];
+ pickError: string | null;
+ onAdd: (p: string) => void;
+ onRemove: (path: string) => void;
+ onPickFolder: () => void;
+}) {
+ const [newPath, setNewPath] = useState("");
+
+ const handleAdd = () => {
+ if (newPath.trim()) {
+ onAdd(newPath);
+ setNewPath("");
+ }
+ };
+
+ return (
+
+
+ Allowed folders
+
+
+ {paths.length === 0 &&
No folders yet
}
+
+ {pickError && (
+
+ {pickError}
+
+ )}
+
+ {paths.length > 0 && (
+
+ {paths.map((p, i) => (
+
+
+ {p}
+
+
+
+ ))}
+
+ )}
+
+
+ setNewPath(e.target.value)}
+ onKeyDown={(e) => {
+ if (e.key === "Enter" && newPath.trim()) {
+ e.preventDefault();
+ handleAdd();
+ }
+ }}
+ placeholder="/path/to/folder"
+ className="min-w-0 flex-1 rounded-md border border-white/15 bg-white/5 px-2 py-1.5 font-mono text-[11px] text-white outline-none placeholder:text-white/20 focus:border-white/30"
+ />
+
+
+
+
+ );
+}
diff --git a/src/modules/mcp/components/McpToolsPanel.tsx b/src/modules/mcp/components/McpToolsPanel.tsx
new file mode 100644
index 0000000..79e30fc
--- /dev/null
+++ b/src/modules/mcp/components/McpToolsPanel.tsx
@@ -0,0 +1,313 @@
+import * as Accordion from "@radix-ui/react-accordion";
+import { useCallback, useEffect, useMemo, useRef, useState } from "react";
+import {
+ deleteMcpServer,
+ fetchMcpServers,
+ fetchMcpTools,
+ upsertMcpServer,
+ type McpTool,
+ type ServerEntry,
+} from "..";
+import { AddServerForm } from "./AddServerForm";
+import { McpServerCard } from "./McpServerCard";
+
+/**
+ * Dashboard panel: filesystem shortcut, server list with CRUD, and tool groups.
+ */
+export function McpToolsPanel() {
+ const [tools, setTools] = useState(null);
+ const [servers, setServers] = useState | null>(null);
+ const [toolsError, setToolsError] = useState(null);
+ const [serversError, setServersError] = useState(null);
+ const [toolsLoading, setToolsLoading] = useState(true);
+ const [serversLoading, setServersLoading] = useState(true);
+ const [busy, setBusy] = useState(false);
+ const [notice, setNotice] = useState(null);
+ const [editingName, setEditingName] = useState(null);
+
+ const toolsSeqRef = useRef(0);
+ const serversSeqRef = useRef(0);
+ const cancelledRef = useRef(false);
+ const pollTimerRef = useRef | null>(null);
+ const scheduleToolsPollRef = useRef<(delay: number) => void>(() => {});
+ const toolsRef = useRef(null);
+ toolsRef.current = tools;
+
+ const reload = useCallback(async () => {
+ setServersLoading(true);
+ setToolsLoading(true);
+ const sId = ++serversSeqRef.current;
+ const tId = ++toolsSeqRef.current;
+ const [t, s] = await Promise.all([fetchMcpTools(), fetchMcpServers()]);
+ if (cancelledRef.current) return;
+
+ if (sId === serversSeqRef.current) {
+ setServersLoading(false);
+ if (s !== null) {
+ setServers(s);
+ setServersError(null);
+ } else {
+ setServersError("Could not load MCP servers");
+ }
+ }
+
+ if (tId === toolsSeqRef.current) {
+ setToolsLoading(false);
+ if (t !== null) {
+ setTools(t);
+ setToolsError(null);
+ } else {
+ setToolsError("Could not load MCP tools");
+ }
+ const next = t !== null && t.length > 0 ? 10_000 : 30_000;
+ scheduleToolsPollRef.current(next);
+ }
+ }, []);
+
+ useEffect(() => {
+ cancelledRef.current = false;
+
+ const schedulePoll = (delay: number) => {
+ if (pollTimerRef.current) clearTimeout(pollTimerRef.current);
+ pollTimerRef.current = setTimeout(runPoll, delay);
+ };
+ scheduleToolsPollRef.current = schedulePoll;
+
+ const runPoll = async () => {
+ pollTimerRef.current = null;
+ const tId = ++toolsSeqRef.current;
+ if (toolsRef.current === null) setToolsLoading(true);
+ const data = await fetchMcpTools();
+ if (cancelledRef.current) return;
+ if (tId !== toolsSeqRef.current) return;
+
+ setToolsLoading(false);
+ if (data !== null) {
+ setTools(data);
+ setToolsError(null);
+ } else {
+ setToolsError("Could not load MCP tools");
+ }
+ const next = data !== null && data.length > 0 ? 10_000 : 30_000;
+ schedulePoll(next);
+ };
+
+ const loadServersOnce = async () => {
+ const sId = ++serversSeqRef.current;
+ setServersLoading(true);
+ const s = await fetchMcpServers();
+ if (cancelledRef.current) return;
+ if (sId !== serversSeqRef.current) return;
+ setServersLoading(false);
+ if (s !== null) {
+ setServers(s);
+ setServersError(null);
+ } else {
+ setServersError("Could not load MCP servers");
+ }
+ };
+
+ void loadServersOnce();
+ schedulePoll(0);
+
+ return () => {
+ cancelledRef.current = true;
+ if (pollTimerRef.current) clearTimeout(pollTimerRef.current);
+ pollTimerRef.current = null;
+ scheduleToolsPollRef.current = () => {};
+ };
+ }, []);
+
+ // ── Server CRUD handlers ───────────────────────────────────────────
+
+ const handleSaveServer = async (name: string, entry: ServerEntry): Promise => {
+ setBusy(true);
+ setNotice(null);
+ const ok = await upsertMcpServer(name, entry);
+ if (!ok) {
+ setNotice(`Could not save "${name}"`);
+ setBusy(false);
+ return false;
+ }
+ setEditingName(null);
+ await reload();
+ setBusy(false);
+ setNotice(`Server "${name}" saved — tools reloaded`);
+ return true;
+ };
+
+ const handleAddServer = async (name: string, entry: ServerEntry) => {
+ const ok = await handleSaveServer(name, entry);
+ if (!ok) {
+ throw new Error(`Could not save "${name}"`);
+ }
+ };
+
+ const handleDeleteServer = async (name: string) => {
+ setBusy(true);
+ setNotice(null);
+ const ok = await deleteMcpServer(name);
+ if (!ok) {
+ setBusy(false);
+ throw new Error(`Could not remove "${name}"`);
+ }
+ await reload();
+ setBusy(false);
+ setNotice(`Server "${name}" removed`);
+ };
+
+ // ── Derived data ───────────────────────────────────────────────────
+
+ const serverEntries = useMemo(() => {
+ if (!servers) return null;
+ return Object.entries(servers).sort(([a], [b]) => a.localeCompare(b));
+ }, [servers]);
+
+ const groups = useMemo(() => {
+ if (!tools) return null;
+ const map = new Map();
+ for (const tool of tools) {
+ const list = map.get(tool.server) ?? [];
+ list.push(tool);
+ map.set(tool.server, list);
+ }
+ return [...map.entries()]
+ .sort(([a], [b]) => a.localeCompare(b))
+ .map(([server, items]) => ({ server, items }));
+ }, [tools]);
+
+ return (
+
+ {notice && (
+
+ {notice}
+
+ )}
+
+
+ {/* ── Servers ─────────────────────────────────────────────── */}
+
+
Servers
+
+ {serversError && servers !== null && (
+
+ {serversError}
+
+ )}
+
+ {serversLoading && servers === null && !serversError && (
+
+ Loading…
+
+ )}
+
+ {serversError && servers === null && !serversLoading && (
+
+ {serversError}
+
+ )}
+
+ {serverEntries !== null && (
+
+ {serverEntries.map(([name, entry]) => (
+ {
+ await handleSaveServer(serverName, serverEntry);
+ }}
+ onDelete={handleDeleteServer}
+ onEditStart={setEditingName}
+ />
+ ))}
+
+ )}
+
+
+
+
+ {/* ── Available tools ─────────────────────────────────────── */}
+
+
Available tools
+
+ {toolsError && tools !== null && (
+
+ {toolsError}
+
+ )}
+
+ {toolsLoading && tools === null && !toolsError && (
+
+ Loading…
+
+ )}
+
+ {toolsError && tools === null && !toolsLoading && (
+
+ {toolsError}
+
+ )}
+
+ {groups !== null && groups.length === 0 && (
+
No MCP tools connected.
+ )}
+
+ {groups !== null && groups.length > 0 && (
+
+ {groups.map((group) => (
+
+
+
+
+
+ {group.server}
+
+
+ {group.items.length} command
+ {group.items.length === 1 ? "" : "s"}
+
+
+
+ +
+
+
+
+
+
+ {group.items.map((tool) => (
+ -
+
{tool.name}
+ {tool.description && (
+
+ {tool.description}
+
+ )}
+
+ ))}
+
+
+
+ ))}
+
+ )}
+
+
+
+ );
+}
diff --git a/src/modules/mcp/index.ts b/src/modules/mcp/index.ts
new file mode 100644
index 0000000..035c4b6
--- /dev/null
+++ b/src/modules/mcp/index.ts
@@ -0,0 +1,147 @@
+import { PENGINE_API_BASE } from "../../shared/api/config";
+
+export type McpTool = {
+ server: string;
+ name: string;
+ description: string | null;
+};
+
+export type McpConfigInfo = {
+ config_path: string;
+ source: string;
+ filesystem_allowed_paths: string[];
+};
+
+export type ServerEntryStdio = {
+ type: "stdio";
+ command: string;
+ args: string[];
+ env: Record;
+ direct_return: boolean;
+};
+
+export type ServerEntryNative = {
+ type: "native";
+ id: string;
+};
+
+export type ServerEntry = ServerEntryStdio | ServerEntryNative;
+
+function makeTimeoutSignal(timeoutMs: number): { signal: AbortSignal; cleanup: () => void } {
+ if (typeof AbortSignal !== "undefined" && typeof AbortSignal.timeout === "function") {
+ return { signal: AbortSignal.timeout(timeoutMs), cleanup: () => {} };
+ }
+ const controller = new AbortController();
+ const timer = setTimeout(() => controller.abort(), timeoutMs);
+ return {
+ signal: controller.signal,
+ cleanup: () => clearTimeout(timer),
+ };
+}
+
+/** GET `/v1/mcp/config` — active `mcp.json` path and filesystem allow-list. */
+export async function fetchMcpConfig(timeoutMs = 3000): Promise {
+ const { signal, cleanup } = makeTimeoutSignal(timeoutMs);
+ try {
+ const resp = await fetch(`${PENGINE_API_BASE}/v1/mcp/config`, {
+ signal,
+ });
+ if (!resp.ok) return null;
+ return (await resp.json()) as McpConfigInfo;
+ } catch {
+ return null;
+ } finally {
+ cleanup();
+ }
+}
+
+/** PUT `/v1/mcp/filesystem` — set allowed folders for the `filesystem` stdio server and reload tools. */
+export async function putMcpFilesystemPaths(paths: string[], timeoutMs = 15000): Promise {
+ const { signal, cleanup } = makeTimeoutSignal(timeoutMs);
+ try {
+ const resp = await fetch(`${PENGINE_API_BASE}/v1/mcp/filesystem`, {
+ method: "PUT",
+ headers: { "Content-Type": "application/json" },
+ body: JSON.stringify({ paths }),
+ signal,
+ });
+ return resp.ok;
+ } catch {
+ return false;
+ } finally {
+ cleanup();
+ }
+}
+
+/** GET `/v1/mcp/tools` — flat list of tools across all connected MCP servers. `null` = request failed. */
+export async function fetchMcpTools(timeoutMs = 3000): Promise {
+ const { signal, cleanup } = makeTimeoutSignal(timeoutMs);
+ try {
+ const resp = await fetch(`${PENGINE_API_BASE}/v1/mcp/tools`, {
+ signal,
+ });
+ if (!resp.ok) return null;
+ return (await resp.json()) as McpTool[];
+ } catch {
+ return null;
+ } finally {
+ cleanup();
+ }
+}
+
+/** GET `/v1/mcp/servers` — full server config map from mcp.json. */
+export async function fetchMcpServers(
+ timeoutMs = 5000,
+): Promise | null> {
+ const { signal, cleanup } = makeTimeoutSignal(timeoutMs);
+ try {
+ const resp = await fetch(`${PENGINE_API_BASE}/v1/mcp/servers`, {
+ signal,
+ });
+ if (!resp.ok) return null;
+ const body = (await resp.json()) as { servers: Record };
+ return body.servers;
+ } catch {
+ return null;
+ } finally {
+ cleanup();
+ }
+}
+
+/** PUT `/v1/mcp/servers/{name}` — create or update a server entry, then rebuild tools. */
+export async function upsertMcpServer(
+ name: string,
+ entry: ServerEntry,
+ timeoutMs = 20000,
+): Promise {
+ const { signal, cleanup } = makeTimeoutSignal(timeoutMs);
+ try {
+ const resp = await fetch(`${PENGINE_API_BASE}/v1/mcp/servers/${encodeURIComponent(name)}`, {
+ method: "PUT",
+ headers: { "Content-Type": "application/json" },
+ body: JSON.stringify(entry),
+ signal,
+ });
+ return resp.ok;
+ } catch {
+ return false;
+ } finally {
+ cleanup();
+ }
+}
+
+/** DELETE `/v1/mcp/servers/{name}` — remove a server and rebuild tools. */
+export async function deleteMcpServer(name: string, timeoutMs = 20000): Promise {
+ const { signal, cleanup } = makeTimeoutSignal(timeoutMs);
+ try {
+ const resp = await fetch(`${PENGINE_API_BASE}/v1/mcp/servers/${encodeURIComponent(name)}`, {
+ method: "DELETE",
+ signal,
+ });
+ return resp.ok;
+ } catch {
+ return false;
+ } finally {
+ cleanup();
+ }
+}
diff --git a/src/modules/ollama/api/index.ts b/src/modules/ollama/api/index.ts
index af156e0..f90c141 100644
--- a/src/modules/ollama/api/index.ts
+++ b/src/modules/ollama/api/index.ts
@@ -1,5 +1,5 @@
-import { OLLAMA_API_BASE } from "../../../shared/api/config";
-import type { OllamaProbe } from "../types";
+import { OLLAMA_API_BASE, PENGINE_API_BASE } from "../../../shared/api/config";
+import type { OllamaModelsResponse, OllamaProbe } from "../types";
/** Prefer loaded model from `/api/ps`, else first pulled model from `/api/tags`. */
export async function fetchOllamaModel(timeoutMs = 3000): Promise {
@@ -25,3 +25,44 @@ export async function fetchOllamaModel(timeoutMs = 3000): Promise {
return { reachable: false, model: null };
}
}
+
+export async function fetchOllamaModels(timeoutMs = 3000): Promise {
+ try {
+ const resp = await fetch(`${PENGINE_API_BASE}/v1/ollama/models`, {
+ signal: AbortSignal.timeout(timeoutMs),
+ });
+ if (!resp.ok) {
+ return { reachable: false, active_model: null, selected_model: null, models: [] };
+ }
+ return (await resp.json()) as OllamaModelsResponse;
+ } catch {
+ return { reachable: false, active_model: null, selected_model: null, models: [] };
+ }
+}
+
+export async function setPreferredOllamaModel(
+ model: string | null,
+): Promise<{ ok: boolean; error?: string }> {
+ try {
+ const resp = await fetch(`${PENGINE_API_BASE}/v1/ollama/model`, {
+ method: "PUT",
+ headers: { "Content-Type": "application/json" },
+ body: JSON.stringify({ model }),
+ signal: AbortSignal.timeout(5000),
+ });
+ if (resp.ok) return { ok: true };
+
+ const raw = await resp.text();
+ const fallback = `Request failed (HTTP ${resp.status})`;
+ let message = fallback;
+ try {
+ const body = JSON.parse(raw) as { error?: string; message?: string };
+ message = body.error || body.message || raw.trim() || fallback;
+ } catch {
+ message = raw.trim() || fallback;
+ }
+ return { ok: false, error: message };
+ } catch (e) {
+ return { ok: false, error: e instanceof Error ? e.message : "Request failed" };
+ }
+}
diff --git a/src/modules/ollama/index.ts b/src/modules/ollama/index.ts
index d55fb79..1af34a0 100644
--- a/src/modules/ollama/index.ts
+++ b/src/modules/ollama/index.ts
@@ -1,2 +1,2 @@
-export { fetchOllamaModel } from "./api";
-export type { OllamaProbe } from "./types";
+export { fetchOllamaModel, fetchOllamaModels, setPreferredOllamaModel } from "./api";
+export type { OllamaModelsResponse, OllamaProbe } from "./types";
diff --git a/src/modules/ollama/types.ts b/src/modules/ollama/types.ts
index a2622e5..ccf2986 100644
--- a/src/modules/ollama/types.ts
+++ b/src/modules/ollama/types.ts
@@ -1 +1,8 @@
export type OllamaProbe = { reachable: boolean; model: string | null };
+
+export type OllamaModelsResponse = {
+ reachable: boolean;
+ active_model: string | null;
+ selected_model: string | null;
+ models: string[];
+};
diff --git a/src/pages/DashboardPage.tsx b/src/pages/DashboardPage.tsx
index 81d24de..d090bb2 100644
--- a/src/pages/DashboardPage.tsx
+++ b/src/pages/DashboardPage.tsx
@@ -3,8 +3,8 @@ import { Link, useNavigate } from "react-router-dom";
import { getPengineHealth } from "../modules/bot/api";
import { TerminalPreview } from "../modules/bot/components/TerminalPreview";
import { useAppSessionStore } from "../modules/bot/store/appSessionStore";
-import { fetchOllamaModel } from "../modules/ollama/api";
-import { PENGINE_API_BASE } from "../shared/api/config";
+import { McpToolsPanel } from "../modules/mcp/components/McpToolsPanel";
+import { fetchOllamaModels, setPreferredOllamaModel } from "../modules/ollama/api";
import { TopMenu } from "../shared/ui/TopMenu";
type ServiceInfo = {
@@ -18,9 +18,14 @@ export function DashboardPage() {
const isDeviceConnected = useAppSessionStore((state) => state.isDeviceConnected);
const disconnectDevice = useAppSessionStore((state) => state.disconnectDevice);
const botUsername = useAppSessionStore((state) => state.botUsername);
+ const [availableModels, setAvailableModels] = useState([]);
+ const [selectedModel, setSelectedModel] = useState(null);
+ const [activeModel, setActiveModel] = useState(null);
+ const [savingModel, setSavingModel] = useState(false);
+ const [modelError, setModelError] = useState(null);
const [services, setServices] = useState([
- { name: "Telegram gateway", status: "checking", detail: "Checking…" },
- { name: "Pengine runtime", status: "checking", detail: "Checking…" },
+ { name: "Pengine", status: "checking", detail: "Checking…" },
+ { name: "Telegram", status: "checking", detail: "Checking…" },
{ name: "Ollama", status: "checking", detail: "Checking…" },
]);
const [disconnectError, setDisconnectError] = useState(null);
@@ -32,29 +37,28 @@ export function DashboardPage() {
const botConnected = health?.bot_connected ?? false;
if (health?.bot_username) botUser = health.bot_username;
- const { reachable: ollamaUp, model: ollamaModel } = await fetchOllamaModel(2000);
+ const ollama = await fetchOllamaModels(2500);
+ const ollamaUp = ollama.reachable;
+ const effectiveModel = ollama.selected_model ?? ollama.active_model;
+ setAvailableModels(ollama.models);
+ setSelectedModel(ollama.selected_model);
+ setActiveModel(ollama.active_model);
setServices([
{
- name: "Telegram gateway",
- status: botConnected ? "running" : "stopped",
- detail: botConnected ? `@${botUser} long poll active` : "Not connected",
+ name: "Pengine",
+ status: pengineUp ? "running" : "stopped",
+ detail: pengineUp ? "API reachable" : "Not running",
},
{
- name: "Pengine runtime",
- status: pengineUp ? "running" : "stopped",
- detail: pengineUp
- ? `${PENGINE_API_BASE.replace(/^https?:\/\//, "")} reachable`
- : "App not running",
+ name: "Telegram",
+ status: botConnected ? "running" : "stopped",
+ detail: botConnected ? `@${botUser}` : "Not connected",
},
{
name: "Ollama",
status: ollamaUp ? "running" : "stopped",
- detail: ollamaUp
- ? ollamaModel
- ? `model: ${ollamaModel}`
- : "Running, no model loaded"
- : "Not reachable",
+ detail: ollamaUp ? (effectiveModel ? effectiveModel : "No model loaded") : "Not reachable",
},
]);
}, [botUsername]);
@@ -75,102 +79,137 @@ export function DashboardPage() {
}
};
+ const handleModelChange = async (value: string) => {
+ const next = value === "__active__" ? null : value;
+ setModelError(null);
+ setSavingModel(true);
+ const result = await setPreferredOllamaModel(next);
+ setSavingModel(false);
+ if (result.ok) {
+ await refreshStatus();
+ return;
+ }
+ setModelError(result.error ?? "Could not update model");
+ };
+
+ const anyChecking = services.some((s) => s.status === "checking");
+ const allRunning = services.every((s) => s.status === "running");
+
return (
-
-
- Dashboard
-
- Connected device and running services
-
-
- The Pengine desktop app is running the bot service. Messages from Telegram are handled
- locally even when this page is closed.
-
-
-
-
-
-
-
Services
-
- {services.map((service) => (
-
-
-
{service.name}
-
- {service.detail}
-
-
-
- {service.status}
-
-
- ))}
-
+
+ {/* ── Status bar: services + connection ──────────────────── */}
+
+
+ {/* Overall status */}
+
+
+
+ {anyChecking
+ ? "Checking services..."
+ : allRunning
+ ? "All systems running"
+ : "Some services offline"}
+
-
+
+
+ {/* Service pills — hide detail text on small screens */}
+ {services.map((service) => (
+
+
+
+ {service.name}
+
+
+ {service.detail}
+
+
+ ))}
-
-
-
Device session
- {isDeviceConnected ? (
- <>
-
1 connected device
-
- Telegram messaging is active and local runtime services are available.
-
- >
- ) : (
- <>
-
No device connected
-
- Run through the setup wizard to connect your Telegram bot.
-
-
- Go to setup
-
- >
- )}
+ {/* Connection controls */}
+
+
+
+
-
+ {!isDeviceConnected && (
+
+ Setup
+
+ )}
{isDeviceConnected && (
-
-
Controls
-
- Disconnect the current device session and return to setup.
-
- {disconnectError && (
-
{disconnectError}
- )}
-
-
+
)}
+
+
+ {disconnectError && (
+
{disconnectError}
+ )}
+ {modelError &&
{modelError}
}
+
+ {/* ── Terminal (full width) ────────────────────────────── */}
+
+
+ {/* ── Servers & tools ─────────────────────────────────────── */}
+
diff --git a/src/shared/ui/TopMenu.tsx b/src/shared/ui/TopMenu.tsx
index 92c6fb7..ba97a05 100644
--- a/src/shared/ui/TopMenu.tsx
+++ b/src/shared/ui/TopMenu.tsx
@@ -1,4 +1,5 @@
import * as Menubar from "@radix-ui/react-menubar";
+import { useState } from "react";
import { Link, useLocation } from "react-router-dom";
import { useAppSessionStore } from "../../modules/bot/store/appSessionStore";
@@ -11,50 +12,119 @@ export function TopMenu() {
const location = useLocation();
const isDeviceConnected = useAppSessionStore((s) => s.isDeviceConnected);
const showOpenSetup = !isDeviceConnected && location.pathname !== "/setup";
+ const [mobileOpen, setMobileOpen] = useState(false);
+ const isOnSetup = location.pathname === "/setup";
+
+ const closeMobileMenu = () => setMobileOpen(false);
+ const isActiveLink = (to: string) =>
+ to === "/" ? location.pathname === "/" : location.pathname.startsWith(to);
return (
-
-
-

-
-
- Pengine
-
-
Local AI Agent Engine
+
+
+
+

+
+
+ Pengine
+
+
Local AI Agent Engine
+
+
+
+
+
+
+ Open setup
+
-
-
-
- {navLinks.map((item) => (
-
-
-
- {item.label}
-
-
-
- ))}
-
-
-
+
+
- Open setup
-
+
+
+
+
+
+ {navLinks.map((item) => {
+ const active = isActiveLink(item.to);
+ return (
+
+
+
+ {item.label}
+
+
+
+ );
+ })}
+
+
+
+ {isOnSetup ? "Setup in progress" : "Runtime dashboard"}
+
+
);