From 863d309d19f3b03c2b3a2e2fa40f83410a2ebc53 Mon Sep 17 00:00:00 2001 From: MaximEdogawa Date: Thu, 9 Apr 2026 17:49:11 +0200 Subject: [PATCH 01/11] feat: implement Model Context Protocol (MCP) for tool integration - Added MCP module to facilitate interaction with external tools via JSON-RPC. - Introduced native tools, including a dice rolling feature, that run in-process without subprocess overhead. - Enhanced the agent loop to utilize MCP tools, allowing for tool calls during conversation. - Created a dashboard panel to display available MCP tools and their commands. - Updated API to serve tool information and integrated MCP functionality into the existing application structure. - Added design documentation for MCP and its implementation details. --- bun.lock | 5 + doc/design/README.md | 6 + doc/design/mcp.md | 112 ++++++++++++++++ package.json | 1 + src-tauri/Cargo.lock | 1 + src-tauri/Cargo.toml | 2 +- src-tauri/src/app.rs | 27 ++-- src-tauri/src/infrastructure/http_server.rs | 25 ++++ src-tauri/src/modules/bot/agent.rs | 123 +++++++++++++++++ src-tauri/src/modules/bot/mod.rs | 1 + src-tauri/src/modules/bot/service.rs | 53 +++++--- src-tauri/src/modules/mcp/mod.rs | 4 + src-tauri/src/modules/mcp/native.rs | 103 +++++++++++++++ src-tauri/src/modules/mcp/registry.rs | 131 +++++++++++++++++++ src-tauri/src/modules/mcp/service.rs | 65 +++++++++ src-tauri/src/modules/mcp/types.rs | 17 +++ src-tauri/src/modules/mod.rs | 1 + src-tauri/src/modules/ollama/service.rs | 21 ++- src-tauri/src/shared/state.rs | 5 +- src/modules/mcp/components/McpToolsPanel.tsx | 99 ++++++++++++++ src/modules/mcp/index.ts | 20 +++ src/pages/DashboardPage.tsx | 3 + 22 files changed, 789 insertions(+), 36 deletions(-) create mode 100644 doc/design/mcp.md create mode 100644 src-tauri/src/modules/bot/agent.rs create mode 100644 src-tauri/src/modules/mcp/mod.rs create mode 100644 src-tauri/src/modules/mcp/native.rs create mode 100644 src-tauri/src/modules/mcp/registry.rs create mode 100644 src-tauri/src/modules/mcp/service.rs create mode 100644 src-tauri/src/modules/mcp/types.rs create mode 100644 src/modules/mcp/components/McpToolsPanel.tsx create mode 100644 src/modules/mcp/index.ts diff --git a/bun.lock b/bun.lock index 53bf064..9b0bfbe 100644 --- a/bun.lock +++ b/bun.lock @@ -5,6 +5,7 @@ "": { "name": "pengine", "dependencies": { + "@radix-ui/react-accordion": "^1.2.12", "@radix-ui/react-menubar": "^1.1.16", "@tailwindcss/vite": "^4.2.2", "@tauri-apps/api": "^2", @@ -172,8 +173,12 @@ "@radix-ui/primitive": ["@radix-ui/primitive@1.1.3", "", {}, "sha512-JTF99U/6XIjCBo0wqkU5sK10glYe27MRRsfwoiq5zzOEZLHU3A3KCMa5X/azekYRCJ0HlwI0crAXS/5dEHTzDg=="], + "@radix-ui/react-accordion": ["@radix-ui/react-accordion@1.2.12", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-collapsible": "1.1.12", "@radix-ui/react-collection": "1.1.7", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-direction": "1.1.1", "@radix-ui/react-id": "1.1.1", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-use-controllable-state": "1.2.2" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react", "@types/react-dom"] }, "sha512-T4nygeh9YE9dLRPhAHSeOZi7HBXo+0kYIPJXayZfvWOWA0+n3dESrZbjfDPUABkUNym6Hd+f2IR113To8D2GPA=="], + "@radix-ui/react-arrow": ["@radix-ui/react-arrow@1.1.7", "", { "dependencies": { "@radix-ui/react-primitive": "2.1.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react", "@types/react-dom"] }, "sha512-F+M1tLhO+mlQaOWspE8Wstg+z6PwxwRd8oQ8IXceWz92kfAmalTRf0EjrouQeo7QssEPfCn05B4Ihs1K9WQ/7w=="], + "@radix-ui/react-collapsible": ["@radix-ui/react-collapsible@1.1.12", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-id": "1.1.1", "@radix-ui/react-presence": "1.1.5", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-use-controllable-state": "1.2.2", "@radix-ui/react-use-layout-effect": "1.1.1" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react", "@types/react-dom"] }, "sha512-Uu+mSh4agx2ib1uIGPP4/CKNULyajb3p92LsVXmH2EHVMTfZWpll88XJ0j4W0z3f8NK1eYl1+Mf/szHPmcHzyA=="], + "@radix-ui/react-collection": ["@radix-ui/react-collection@1.1.7", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-slot": "1.2.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react", "@types/react-dom"] }, "sha512-Fh9rGN0MoI4ZFUNyfFVNU4y9LUz93u9/0K+yLgA2bwRojxM8JU1DyvvMBabnZPBgMWREAJvU2jjVzq+LrFUglw=="], "@radix-ui/react-compose-refs": ["@radix-ui/react-compose-refs@1.1.2", "", { "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-z4eqJvfiNnFMHIIvXP3CY57y2WJs5g2v3X0zm9mEJkrkNv4rDxu+sg9Jh8EkXyeqBkB7SOcboo9dMVqhyrACIg=="], diff --git a/doc/design/README.md b/doc/design/README.md index f647753..8be8082 100644 --- a/doc/design/README.md +++ b/doc/design/README.md @@ -183,6 +183,12 @@ Telegram ──► teloxide dispatcher ──► text_handler --- +## Modules + +- [MCP — Model Context Protocol](./mcp.md) — agent tool-use via external MCP servers (POC). + +--- + ## Adding a New Module ### Frontend diff --git a/doc/design/mcp.md b/doc/design/mcp.md new file mode 100644 index 0000000..953754e --- /dev/null +++ b/doc/design/mcp.md @@ -0,0 +1,112 @@ +# MCP — Model Context Protocol Module (POC) + +> Status: **proof of concept**. One server, one transport, one happy path. + +## What & Why + +[MCP](https://modelcontextprotocol.org/) is an open JSON-RPC 2.0 protocol that lets an LLM "host" discover and call tools exposed by external "servers". Pengine adopts MCP so we can grow the agent's capabilities by dropping in new servers instead of writing bespoke Rust glue for each tool. Every tool call flows through one well-defined choke point, which is what makes it auditable. + +## Roles in Pengine + +| Role | Where | Responsibility | +|---|---|---| +| **Host** | Pengine (Tauri binary) | Owns the LLM (Ollama) connection, the Telegram bot, and the agent loop. | +| **Client** | `src-tauri/src/modules/mcp/` | One `McpClient` per connected server. Speaks JSON-RPC over stdio. | +| **Server** | External child process | Anything that speaks MCP — `npx @modelcontextprotocol/server-filesystem`, a Docker container, a custom binary. | + +``` +Telegram message + │ + ▼ +bot::service::text_handler + │ + ▼ +bot::agent::run_turn ────► ollama::chat_with_tools (Ollama /api/chat) + ▲ │ + │ │ tool_calls? + │ ▼ + └─────────── mcp::registry::call_tool ──► McpClient ──► child process (stdio) +``` + +## Module Layout + +``` +src-tauri/src/modules/mcp/ +├── mod.rs +├── protocol.rs JSON-RPC 2.0 request/response types +├── types.rs McpConfig, ServerConfig, Tool +├── transport.rs StdioTransport — child process + line-delimited JSON +├── client.rs McpClient — initialize / tools/list / tools/call +├── registry.rs McpRegistry — fan-out across all connected servers +└── service.rs load_or_init_config(), connect_all() +``` + +The registry lives on `AppState.mcp` (`Arc>`) so the bot agent and any future HTTP route can reach it. + +## Config + +File: `$APP_DATA/mcp.json` (next to `connection.json`). Created on first launch with a sane default if missing. + +```json +{ + "servers": { + "filesystem": { + "command": "npx", + "args": ["-y", "@modelcontextprotocol/server-filesystem", "/tmp"], + "env": {} + } + } +} +``` + +To add a server: add another entry under `servers`. Restart Pengine. + +## Protocol Subset Implemented + +Three messages, that's it: + +1. `initialize` — handshake. We send `{protocolVersion, capabilities, clientInfo}` and ignore most of the response. +2. `notifications/initialized` — required notification after init. +3. `tools/list` — discovery, cached on the client. +4. `tools/call` — `{name, arguments}` → `{content: [{type: "text", text}]}`. + +Out of scope for the POC: resources, prompts, sampling, server-initiated requests, batch JSON-RPC, HTTP transport. + +## Ollama Bridge + +MCP `inputSchema` is JSON Schema, and so are Ollama's tool `parameters` — translation is just a rename. See `to_ollama_tools` in `bot/agent.rs`. Tool names are emitted as `server.tool` so the registry can route a call back to the right client. + +The agent loop in `bot::agent::run_turn`: + +1. Snapshot the available tools from the registry. +2. Send `system + user` plus the tool list to Ollama. +3. If the response carries `tool_calls`, run each via `registry.call_tool`, append the results as `role: "tool"` messages, loop. Capped at **5 steps**. +4. Otherwise return the assistant content as the final reply. + +Use a tool-capable model (e.g. `qwen3:8b`). Check with `ollama show ` for the `tools` capability. + +## Audit Logs + +Every MCP-relevant event is emitted as a `LogEntry` with `kind = "mcp"` via `state.emit_log`. They flow through the existing SSE log stream (`GET /v1/logs`) and are visible on the dashboard: + +- `loading MCP config…` +- `filesystem ready (2 tools)` +- `MCP ready (2 tools)` +- `tools available: filesystem.read_file, filesystem.list_directory` +- `tool call (0): filesystem.list_directory({"path":"/tmp"})` +- `tool result (842 bytes)` +- `tool error: …` + +That single audit trail is the "auditable protocol" promise of this feature. + +## Try It + +1. `npx -y @modelcontextprotocol/server-filesystem /tmp` should run (Node + npm available). +2. `ollama pull qwen3:8b` (or any tool-capable model). +3. `bun run tauri dev`. On first launch, watch the dashboard for `mcp` lines confirming the filesystem server connected. +4. Connect a Telegram bot, then send: *"List the files in /tmp."* +5. Expect a `tool call` and `tool result` line in the log, followed by a coherent reply on Telegram. + +## Future Work + +Permission prompts, multiple servers in the default config, a frontend tools panel, hot reload of `mcp.json`, HTTP/SSE transport, resources & prompts. Not in this PR. diff --git a/package.json b/package.json index 04b32ef..973b7f7 100644 --- a/package.json +++ b/package.json @@ -30,6 +30,7 @@ ] }, "dependencies": { + "@radix-ui/react-accordion": "^1.2.12", "@radix-ui/react-menubar": "^1.1.16", "@tailwindcss/vite": "^4.2.2", "@tauri-apps/api": "^2", diff --git a/src-tauri/Cargo.lock b/src-tauri/Cargo.lock index 91b3149..9396014 100644 --- a/src-tauri/Cargo.lock +++ b/src-tauri/Cargo.lock @@ -2957,6 +2957,7 @@ dependencies = [ "axum", "chrono", "env_logger", + "fastrand", "reqwest 0.13.2", "serde", "serde_json", diff --git a/src-tauri/Cargo.toml b/src-tauri/Cargo.toml index 6269015..34d0b65 100644 --- a/src-tauri/Cargo.toml +++ b/src-tauri/Cargo.toml @@ -31,4 +31,4 @@ env_logger = "0.11" chrono = { version = "0.4", features = ["serde"] } tokio-stream = { version = "0.1", features = ["sync"] } socket2 = "0.5" - +fastrand = "2" diff --git a/src-tauri/src/app.rs b/src-tauri/src/app.rs index 6f4082c..9eddc87 100644 --- a/src-tauri/src/app.rs +++ b/src-tauri/src/app.rs @@ -1,5 +1,6 @@ use crate::infrastructure::http_server; use crate::modules::bot::{commands, repository, service as bot_service}; +use crate::modules::mcp::service as mcp_service; use crate::shared::state::AppState; use std::path::PathBuf; use tauri::Manager; @@ -26,14 +27,13 @@ pub fn run() { let handle = app.handle().clone(); let state = shared_state.clone(); tauri::async_runtime::spawn(async move { - let mut lock = state.app_handle.lock().await; - *lock = Some(handle); + *state.app_handle.lock().await = Some(handle); }); } app.manage(shared_state.clone()); - // Resume persisted connection if present + // Resume persisted Telegram connection if present. let resume_state = shared_state.clone(); tauri::async_runtime::spawn(async move { let Some(conn) = repository::load(&resume_state.store_path) else { @@ -43,15 +43,26 @@ pub fn run() { .emit_log("ok", &format!("Resuming bot @{}…", conn.bot_username)) .await; let token = conn.bot_token.clone(); - { - let mut lock = resume_state.connection.lock().await; - *lock = Some(conn); - } + *resume_state.connection.lock().await = Some(conn); let shutdown = resume_state.shutdown_notify.clone(); bot_service::start_bot(resume_state, token, shutdown).await; }); - // Start localhost HTTP API + // Native tools are instant — no subprocess, no async I/O. + let mcp_state = shared_state.clone(); + tauri::async_runtime::spawn(async move { + let registry = mcp_service::build_default_registry(); + let n = registry.tool_names().len(); + *mcp_state.mcp.write().await = registry; + mcp_state + .emit_log( + "mcp", + &format!("{n} native tool{}", if n == 1 { "" } else { "s" }), + ) + .await; + }); + + // Start localhost HTTP API. let server_state = shared_state.clone(); tauri::async_runtime::spawn(async move { http_server::start_server(server_state).await; diff --git a/src-tauri/src/infrastructure/http_server.rs b/src-tauri/src/infrastructure/http_server.rs index 37debab..6757284 100644 --- a/src-tauri/src/infrastructure/http_server.rs +++ b/src-tauri/src/infrastructure/http_server.rs @@ -41,6 +41,13 @@ pub struct ErrorResponse { pub error: String, } +#[derive(Serialize)] +pub struct McpToolDto { + pub server: String, + pub name: String, + pub description: Option, +} + pub async fn start_server(state: AppState) { let cors = CorsLayer::new() .allow_origin(Any) @@ -52,6 +59,7 @@ pub async fn start_server(state: AppState) { .route("/v1/connect", delete(handle_disconnect)) .route("/v1/health", get(handle_health)) .route("/v1/logs", get(handle_logs_sse)) + .route("/v1/mcp/tools", get(handle_mcp_tools)) .layer(cors) .with_state(state.clone()); @@ -210,6 +218,23 @@ async fn handle_health(State(state): State) -> Json { }) } +async fn handle_mcp_tools(State(state): State) -> Json> { + Json( + state + .mcp + .read() + .await + .all_tools() + .into_iter() + .map(|t| McpToolDto { + server: t.server_name, + name: t.name, + description: t.description, + }) + .collect(), + ) +} + async fn handle_logs_sse( State(state): State, ) -> Sse>> { diff --git a/src-tauri/src/modules/bot/agent.rs b/src-tauri/src/modules/bot/agent.rs new file mode 100644 index 0000000..80f78da --- /dev/null +++ b/src-tauri/src/modules/bot/agent.rs @@ -0,0 +1,123 @@ +//! Minimal agent loop: ask Ollama, run any tools it requests, feed results +//! back, repeat until it stops asking for tools. + +use crate::modules::ollama::service as ollama; +use crate::shared::state::AppState; +use serde_json::json; + +const MAX_STEPS: usize = 5; + +/// Where the reply text came from — provable, not guessable. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum ReplySource { + /// Text generated by the LLM. + Model, + /// Verbatim output from a tool execution. + Tool, +} + +pub struct TurnResult { + pub text: String, + pub source: ReplySource, +} + +pub async fn run_turn(state: &AppState, user_message: &str) -> Result { + let model = ollama::active_model().await?; + + let (ollama_tools, has_tools) = { + let reg = state.mcp.read().await; + (reg.ollama_tools(), !reg.is_empty()) + }; + + let system = if has_tools { + "Use the provided tools. Do not guess tool output.".to_string() + } else { + "Answer concisely.".to_string() + }; + + let mut messages = json!([ + { "role": "system", "content": system }, + { "role": "user", "content": user_message } + ]); + + for step in 0..MAX_STEPS { + let msg = ollama::chat_with_tools(&model, &messages, &ollama_tools).await?; + + if let Some(arr) = messages.as_array_mut() { + arr.push(msg.clone()); + } + + let tool_calls = msg + .get("tool_calls") + .and_then(|v| v.as_array()) + .cloned() + .unwrap_or_default(); + + if tool_calls.is_empty() { + let text = msg + .get("content") + .and_then(|v| v.as_str()) + .unwrap_or("") + .to_string(); + return Ok(TurnResult { + text, + source: ReplySource::Model, + }); + } + + let mut direct_reply: Option = None; + + for call in tool_calls { + let name = call + .get("function") + .and_then(|f| f.get("name")) + .and_then(|v| v.as_str()) + .unwrap_or("") + .to_string(); + let args = call + .get("function") + .and_then(|f| f.get("arguments")) + .cloned() + .unwrap_or(json!({})); + + state.emit_log("tool", &format!("[{step}] {name}")).await; + + let (result_text, is_direct) = match state.mcp.read().await.call_tool(&name, args).await + { + Ok((text, direct)) => { + state + .emit_log("tool", &format!("result ({} bytes)", text.len())) + .await; + (text, direct) + } + Err(e) => { + state.emit_log("tool", &format!("error: {e}")).await; + (format!("ERROR: {e}"), false) + } + }; + + if is_direct { + direct_reply = Some(result_text.clone()); + } + + if let Some(arr) = messages.as_array_mut() { + arr.push(json!({ + "role": "tool", + "name": name, + "content": result_text, + })); + } + } + + if let Some(text) = direct_reply { + return Ok(TurnResult { + text, + source: ReplySource::Tool, + }); + } + } + + Err(format!( + "agent exceeded {MAX_STEPS} steps without finishing" + )) +} diff --git a/src-tauri/src/modules/bot/mod.rs b/src-tauri/src/modules/bot/mod.rs index 6a6f35e..5001b1c 100644 --- a/src-tauri/src/modules/bot/mod.rs +++ b/src-tauri/src/modules/bot/mod.rs @@ -1,3 +1,4 @@ +pub mod agent; pub mod commands; pub mod repository; pub mod service; diff --git a/src-tauri/src/modules/bot/service.rs b/src-tauri/src/modules/bot/service.rs index 3c1ed64..517fbd2 100644 --- a/src-tauri/src/modules/bot/service.rs +++ b/src-tauri/src/modules/bot/service.rs @@ -1,8 +1,8 @@ -use crate::modules::ollama::service as ollama; +use crate::modules::bot::agent; use crate::shared::state::AppState; use std::sync::Arc; use teloxide::prelude::*; -use teloxide::types::Me; +use teloxide::types::{ChatAction, Me}; use teloxide::utils::command::BotCommands; use tokio::sync::Notify; @@ -98,31 +98,44 @@ async fn command_handler( } async fn text_handler(bot: Bot, msg: Message, state: AppState) -> ResponseResult<()> { - let incoming = msg.text().unwrap_or(""); + let incoming = msg.text().unwrap_or("").to_string(); state .emit_log("msg", &format!("from {}: {}", user_label(&msg), incoming)) .await; - match ollama::active_model().await { - Ok(model) => { - state - .emit_log("tool", &format!("routing to ollama → {model}")) - .await; - match ollama::chat(&model, incoming).await { - Ok(reply) => { - state.emit_log("reply", &format!("→ {reply}")).await; - bot.send_message(msg.chat.id, &reply).await?; - } - Err(e) => { - state.emit_log("run", &format!("ollama error: {e}")).await; - send_inference_unavailable(&bot, &msg, &state).await; - } + // Telegram's `typing` action lasts ~5 seconds. Refresh it every 4s in a + // background task while the agent runs so the user sees a continuous + // "writing…" indicator no matter how long the tool calls take. + let typing_task = { + let bot = bot.clone(); + let chat_id = msg.chat.id; + tokio::spawn(async move { + loop { + let _ = bot.send_chat_action(chat_id, ChatAction::Typing).await; + tokio::time::sleep(std::time::Duration::from_secs(4)).await; } + }) + }; + + let result = agent::run_turn(&state, &incoming).await; + typing_task.abort(); + + match result { + Ok(turn) => { + let reply = if turn.text.trim().is_empty() { + "(no reply)".to_string() + } else { + turn.text + }; + let tag = match turn.source { + agent::ReplySource::Tool => "tool", + agent::ReplySource::Model => "model", + }; + state.emit_log("reply", &format!("[{tag}] {reply}")).await; + bot.send_message(msg.chat.id, &reply).await?; } Err(e) => { - state - .emit_log("run", &format!("no ollama model available: {e}")) - .await; + state.emit_log("run", &format!("agent error: {e}")).await; send_inference_unavailable(&bot, &msg, &state).await; } } diff --git a/src-tauri/src/modules/mcp/mod.rs b/src-tauri/src/modules/mcp/mod.rs new file mode 100644 index 0000000..67277ff --- /dev/null +++ b/src-tauri/src/modules/mcp/mod.rs @@ -0,0 +1,4 @@ +pub mod native; +pub mod registry; +pub mod service; +pub mod types; diff --git a/src-tauri/src/modules/mcp/native.rs b/src-tauri/src/modules/mcp/native.rs new file mode 100644 index 0000000..88a52d5 --- /dev/null +++ b/src-tauri/src/modules/mcp/native.rs @@ -0,0 +1,103 @@ +//! Built-in tools that run in-process. No subprocess, no JSON-RPC overhead. + +use super::types::ToolDef; +use serde_json::{json, Value}; + +const MAX_SIDES: u64 = 1_000_000; + +pub struct NativeProvider { + pub server_name: String, + pub tools: Vec, + handler: fn(&str, &Value) -> Result, +} + +impl NativeProvider { + pub fn call(&self, tool_name: &str, args: &Value) -> Result { + if !self.tools.iter().any(|t| t.name == tool_name) { + return Err(format!("unknown native tool: {tool_name}")); + } + (self.handler)(tool_name, args) + } +} + +/// The bundled dice tool — runs in pure Rust, instant response. +pub fn dice() -> NativeProvider { + NativeProvider { + server_name: "dice".to_string(), + tools: vec![ToolDef { + server_name: "dice".to_string(), + name: "roll_dice".to_string(), + description: Some( + "Roll a die with the given number of sides and return the result.".to_string(), + ), + input_schema: json!({ + "type": "object", + "properties": { + "sides": { + "type": "integer", + "description": "Number of sides (default 6, max 1 000 000)" + } + } + }), + direct_return: true, + }], + handler: handle_dice, + } +} + +fn handle_dice(_tool_name: &str, args: &Value) -> Result { + let sides = args + .get("sides") + .and_then(|v| v.as_u64()) + .unwrap_or(6) + .clamp(2, MAX_SIDES); + + let result = fastrand::u64(1..=sides); + Ok(format!("Rolled a d{sides}: {result}")) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn dice_returns_valid_result() { + let provider = dice(); + assert_eq!(provider.tools.len(), 1); + assert_eq!(provider.tools[0].name, "roll_dice"); + assert!(provider.tools[0].direct_return); + + let out = provider + .call("roll_dice", &json!({"sides": 20})) + .expect("dice call"); + assert!(out.starts_with("Rolled a d20: "), "got: {out}"); + + let num: u64 = out.trim_start_matches("Rolled a d20: ").parse().unwrap(); + assert!((1..=20).contains(&num)); + } + + #[test] + fn dice_clamps_invalid_sides() { + let provider = dice(); + + let out = provider + .call("roll_dice", &json!({"sides": 0})) + .expect("sides=0"); + assert!(out.starts_with("Rolled a d2: "), "clamped to 2, got: {out}"); + + let out = provider + .call("roll_dice", &json!({"sides": 9999999})) + .expect("sides=9999999"); + assert!( + out.starts_with("Rolled a d1000000: "), + "clamped to MAX, got: {out}" + ); + } + + #[test] + fn dice_rejects_unknown_tool() { + let provider = dice(); + let err = provider.call("unknown", &json!({})).unwrap_err(); + assert!(err.contains("unknown native tool")); + } +} diff --git a/src-tauri/src/modules/mcp/registry.rs b/src-tauri/src/modules/mcp/registry.rs new file mode 100644 index 0000000..b56e8d1 --- /dev/null +++ b/src-tauri/src/modules/mcp/registry.rs @@ -0,0 +1,131 @@ +//! Tool registry: aggregates providers behind a single dispatch interface. +//! +//! Adding a new provider kind (e.g. Docker-backed MCP) means adding a variant +//! to [`Provider`] and a match arm in each method — nothing else changes. + +use super::native::NativeProvider; +use super::types::ToolDef; +use serde_json::{json, Value}; + +// ── Provider ─────────────────────────────────────────────────────── + +/// Where a tool lives. Extend this enum for Docker / external MCP +/// servers — the registry, agent loop, and HTTP API stay untouched. +pub enum Provider { + Native(NativeProvider), +} + +impl Provider { + pub fn server_name(&self) -> &str { + match self { + Provider::Native(n) => &n.server_name, + } + } + + pub fn tools(&self) -> &[ToolDef] { + match self { + Provider::Native(n) => &n.tools, + } + } + + pub async fn call_tool(&self, name: &str, args: Value) -> Result { + match self { + Provider::Native(n) => n.call(name, &args), + } + } +} + +// ── Registry ─────────────────────────────────────────────────────── + +/// Central tool registry. Pre-caches the Ollama tool JSON and the +/// human-readable name list at construction time so the hot path +/// (each chat turn) is just a cheap `clone()`. +pub struct ToolRegistry { + providers: Vec, + cached_ollama_tools: Value, + cached_tool_names: Vec, +} + +impl Default for ToolRegistry { + fn default() -> Self { + Self { + providers: Vec::new(), + cached_ollama_tools: Value::Array(Vec::new()), + cached_tool_names: Vec::new(), + } + } +} + +impl ToolRegistry { + pub fn new(providers: Vec) -> Self { + let cached_ollama_tools = build_ollama_tools(&providers); + let cached_tool_names = providers + .iter() + .flat_map(|p| p.tools().iter().map(|t| t.name.clone())) + .collect(); + Self { + providers, + cached_ollama_tools, + cached_tool_names, + } + } + + pub fn all_tools(&self) -> Vec { + self.providers + .iter() + .flat_map(|p| p.tools().iter().cloned()) + .collect() + } + + pub fn ollama_tools(&self) -> Value { + self.cached_ollama_tools.clone() + } + + pub fn tool_names(&self) -> &[String] { + &self.cached_tool_names + } + + pub fn is_empty(&self) -> bool { + self.cached_tool_names.is_empty() + } + + /// Dispatch a tool call. Returns `(output_text, direct_return)`. + pub async fn call_tool(&self, name: &str, args: Value) -> Result<(String, bool), String> { + let (server, tool) = match name.split_once('.') { + Some((s, t)) => (Some(s), t), + None => (None, name), + }; + + for provider in &self.providers { + if let Some(s) = server { + if provider.server_name() != s { + continue; + } + } + if let Some(def) = provider.tools().iter().find(|t| t.name == tool) { + let direct = def.direct_return; + let text = provider.call_tool(tool, args).await?; + return Ok((text, direct)); + } + } + Err(format!("tool not found: {name}")) + } +} + +fn build_ollama_tools(providers: &[Provider]) -> Value { + let arr: Vec = providers + .iter() + .flat_map(|p| p.tools().iter()) + .map(|t| { + json!({ + "type": "function", + "function": { + "name": t.name, + "description": t.description.clone().unwrap_or_default(), + "parameters": t.input_schema, + } + }) + }) + .collect(); + Value::Array(arr) +} diff --git a/src-tauri/src/modules/mcp/service.rs b/src-tauri/src/modules/mcp/service.rs new file mode 100644 index 0000000..3f61d51 --- /dev/null +++ b/src-tauri/src/modules/mcp/service.rs @@ -0,0 +1,65 @@ +//! Public facade: build the tool registry from native providers. + +use super::native; +use super::registry::{Provider, ToolRegistry}; + +/// Build a registry pre-loaded with all bundled native tools. +/// No I/O, no subprocess — instant. +pub fn build_default_registry() -> ToolRegistry { + ToolRegistry::new(vec![Provider::Native(native::dice())]) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn default_registry_has_dice() { + let reg = build_default_registry(); + assert_eq!(reg.tool_names(), &["roll_dice"]); + } + + #[tokio::test] + async fn native_dice_callable_through_registry() { + let reg = build_default_registry(); + let (text, direct) = reg + .call_tool("roll_dice", serde_json::json!({"sides": 6})) + .await + .expect("call roll_dice"); + assert!(text.starts_with("Rolled a d6: "), "got: {text}"); + assert!(direct, "dice should be direct_return"); + } + + /// Proves the reply comes from the native tool, not the model: + /// + /// 1. `direct_return` is true → agent returns tool output verbatim, + /// the model never sees it and cannot rephrase. + /// 2. The output matches `Rolled a dN: M` with M in [1, N] — a format + /// the native Rust handler produces. If the model fabricated a roll, + /// `direct_return` would be false and source would be `Model`. + #[tokio::test] + async fn dice_result_is_provably_from_tool_not_model() { + let reg = build_default_registry(); + + for sides in [6, 20, 100] { + let (text, direct) = reg + .call_tool("roll_dice", serde_json::json!({ "sides": sides })) + .await + .expect("call roll_dice"); + + assert!(direct, "direct_return must be true for dice"); + + let prefix = format!("Rolled a d{sides}: "); + assert!( + text.starts_with(&prefix), + "expected prefix '{prefix}', got: {text}" + ); + + let num: u64 = text[prefix.len()..].trim().parse().expect("parse roll"); + assert!( + (1..=sides).contains(&num), + "roll {num} out of range [1, {sides}]" + ); + } + } +} diff --git a/src-tauri/src/modules/mcp/types.rs b/src-tauri/src/modules/mcp/types.rs new file mode 100644 index 0000000..ddf2d14 --- /dev/null +++ b/src-tauri/src/modules/mcp/types.rs @@ -0,0 +1,17 @@ +use serde::{Deserialize, Serialize}; + +/// Definition of a single tool, regardless of where it runs. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ToolDef { + #[serde(skip)] + pub server_name: String, + pub name: String, + #[serde(default)] + pub description: Option, + #[serde(default, rename = "inputSchema")] + pub input_schema: serde_json::Value, + /// When true the agent returns tool output verbatim, skipping the LLM + /// summarization round-trip. Good for deterministic tools like dice. + #[serde(skip)] + pub direct_return: bool, +} diff --git a/src-tauri/src/modules/mod.rs b/src-tauri/src/modules/mod.rs index aa58b0c..4a4ea12 100644 --- a/src-tauri/src/modules/mod.rs +++ b/src-tauri/src/modules/mod.rs @@ -1,2 +1,3 @@ pub mod bot; +pub mod mcp; pub mod ollama; diff --git a/src-tauri/src/modules/ollama/service.rs b/src-tauri/src/modules/ollama/service.rs index 6b07848..0dd17d9 100644 --- a/src-tauri/src/modules/ollama/service.rs +++ b/src-tauri/src/modules/ollama/service.rs @@ -41,10 +41,19 @@ pub async fn active_model() -> Result { .ok_or_else(|| "no models pulled in ollama".to_string()) } -pub async fn chat(model: &str, prompt: &str) -> Result { +/// Tool-aware chat for the agent loop. Sends a full message history plus a +/// list of tool definitions and returns the raw assistant message (which may +/// contain `tool_calls`). Caller is responsible for executing tools and +/// looping. Returns the `message` object verbatim. +pub async fn chat_with_tools( + model: &str, + messages: &serde_json::Value, + tools: &serde_json::Value, +) -> Result { let payload = serde_json::json!({ "model": model, - "messages": [{"role": "user", "content": format!("Think fast and answer extremely short. If you don't know the answer, say you don't know. Question: {prompt}")}], + "messages": messages, + "tools": tools, "stream": false, }); @@ -57,8 +66,8 @@ pub async fn chat(model: &str, prompt: &str) -> Result { .map_err(|e| e.to_string())?; let body: serde_json::Value = resp.json().await.map_err(|e| e.to_string())?; - body["message"]["content"] - .as_str() - .map(|s| s.to_string()) - .ok_or_else(|| "unexpected ollama response shape".to_string()) + Ok(body + .get("message") + .cloned() + .unwrap_or(serde_json::Value::Null)) } diff --git a/src-tauri/src/shared/state.rs b/src-tauri/src/shared/state.rs index d4d5f4e..283ee1f 100644 --- a/src-tauri/src/shared/state.rs +++ b/src-tauri/src/shared/state.rs @@ -1,9 +1,10 @@ +use crate::modules::mcp::registry::ToolRegistry; use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; use std::path::PathBuf; use std::sync::Arc; use tauri::Emitter; -use tokio::sync::{Mutex, Notify}; +use tokio::sync::{Mutex, Notify, RwLock}; #[derive(Debug, Clone, Serialize, Deserialize)] pub struct ConnectionData { @@ -28,6 +29,7 @@ pub struct AppState { pub log_tx: Arc>>>, pub store_path: PathBuf, pub app_handle: Arc>>, + pub mcp: Arc>, } impl AppState { @@ -40,6 +42,7 @@ impl AppState { log_tx: Arc::new(Mutex::new(Some(log_tx))), store_path, app_handle: Arc::new(Mutex::new(None)), + mcp: Arc::new(RwLock::new(ToolRegistry::default())), } } diff --git a/src/modules/mcp/components/McpToolsPanel.tsx b/src/modules/mcp/components/McpToolsPanel.tsx new file mode 100644 index 0000000..ecaad7f --- /dev/null +++ b/src/modules/mcp/components/McpToolsPanel.tsx @@ -0,0 +1,99 @@ +import * as Accordion from "@radix-ui/react-accordion"; +import { useEffect, useMemo, useState } from "react"; +import { fetchMcpTools, type McpTool } from ".."; + +/** + * Dashboard panel showing MCP tool *groups*. Each accordion item is one tool + * group (= one MCP server, e.g. "dice"); expanding it reveals the individual + * commands that group exposes (e.g. `roll_dice`). + */ +export function McpToolsPanel() { + const [tools, setTools] = useState(null); + + useEffect(() => { + let cancelled = false; + const load = async () => { + const data = await fetchMcpTools(); + if (!cancelled) setTools(data); + }; + load(); + const timer = setInterval(load, 10000); + return () => { + cancelled = true; + clearInterval(timer); + }; + }, []); + + // Bucket tools by their server (group) name. Stable, alphabetical order. + const groups = useMemo(() => { + if (!tools) return null; + const map = new Map(); + for (const tool of tools) { + const list = map.get(tool.server) ?? []; + list.push(tool); + map.set(tool.server, list); + } + return [...map.entries()] + .sort(([a], [b]) => a.localeCompare(b)) + .map(([server, items]) => ({ server, items })); + }, [tools]); + + return ( +
+

Available tools

+ + {groups === null && ( +

+ Loading… +

+ )} + + {groups !== null && groups.length === 0 && ( +

No MCP tools connected.

+ )} + + {groups !== null && groups.length > 0 && ( + g.server)} + className="mt-4 grid gap-2" + > + {groups.map((group) => ( + + + +
+

{group.server}

+

+ {group.items.length} command + {group.items.length === 1 ? "" : "s"} +

+
+ + + + +
+
+ +
    + {group.items.map((tool) => ( +
  • +

    {tool.name}

    + {tool.description && ( +

    {tool.description}

    + )} +
  • + ))} +
+
+
+ ))} +
+ )} +
+ ); +} diff --git a/src/modules/mcp/index.ts b/src/modules/mcp/index.ts new file mode 100644 index 0000000..0f0589f --- /dev/null +++ b/src/modules/mcp/index.ts @@ -0,0 +1,20 @@ +import { PENGINE_API_BASE } from "../../shared/api/config"; + +export type McpTool = { + server: string; + name: string; + description: string | null; +}; + +/** GET `/v1/mcp/tools` — flat list of tools across all connected MCP servers. */ +export async function fetchMcpTools(timeoutMs = 3000): Promise { + try { + const resp = await fetch(`${PENGINE_API_BASE}/v1/mcp/tools`, { + signal: AbortSignal.timeout(timeoutMs), + }); + if (!resp.ok) return []; + return (await resp.json()) as McpTool[]; + } catch { + return []; + } +} diff --git a/src/pages/DashboardPage.tsx b/src/pages/DashboardPage.tsx index 81d24de..3bd1ac5 100644 --- a/src/pages/DashboardPage.tsx +++ b/src/pages/DashboardPage.tsx @@ -3,6 +3,7 @@ import { Link, useNavigate } from "react-router-dom"; import { getPengineHealth } from "../modules/bot/api"; import { TerminalPreview } from "../modules/bot/components/TerminalPreview"; import { useAppSessionStore } from "../modules/bot/store/appSessionStore"; +import { McpToolsPanel } from "../modules/mcp/components/McpToolsPanel"; import { fetchOllamaModel } from "../modules/ollama/api"; import { PENGINE_API_BASE } from "../shared/api/config"; import { TopMenu } from "../shared/ui/TopMenu"; @@ -170,6 +171,8 @@ export function DashboardPage() { )} + + From 49601520a980d77407b950d959fbd55461292a32 Mon Sep 17 00:00:00 2001 From: MaximEdogawa Date: Thu, 9 Apr 2026 21:15:27 +0200 Subject: [PATCH 02/11] feat: enhance MCP integration with filesystem support and dialog plugin - Added support for managing filesystem paths in MCP configuration, allowing users to set allowed folders for tools. - Implemented new API endpoints for fetching MCP configuration and updating filesystem paths. - Integrated Tauri dialog plugin for a native folder picker, enhancing user experience in selecting filesystem paths. - Updated MCP tools panel to display configuration details and allow users to apply filesystem paths directly. - Added example MCP configuration file for reference and improved documentation for new features. --- .gitignore | 3 + bun.lock | 3 + package.json | 1 + src-tauri/Cargo.lock | 68 ++++++ src-tauri/Cargo.toml | 5 + src-tauri/capabilities/default.json | 1 + src-tauri/mcp.example.json | 18 ++ src-tauri/src/app.rs | 38 ++-- src-tauri/src/infrastructure/http_server.rs | 84 +++++++- src-tauri/src/lib.rs | 2 +- src-tauri/src/modules/bot/agent.rs | 157 +++++++++++--- src-tauri/src/modules/bot/commands.rs | 20 ++ src-tauri/src/modules/mcp/client.rs | 88 ++++++++ src-tauri/src/modules/mcp/mod.rs | 3 + src-tauri/src/modules/mcp/native.rs | 62 ++---- src-tauri/src/modules/mcp/protocol.rs | 43 ++++ src-tauri/src/modules/mcp/registry.rs | 42 ++-- src-tauri/src/modules/mcp/service.rs | 195 +++++++++++++----- src-tauri/src/modules/mcp/transport.rs | 128 ++++++++++++ src-tauri/src/modules/mcp/types.rs | 26 ++- src-tauri/src/modules/ollama/service.rs | 5 + src-tauri/src/shared/state.rs | 8 +- src-tauri/tests/mcp_tools.rs | 102 +++++++++ .../bot/components/TerminalPreview.tsx | 1 + src/modules/mcp/components/McpToolsPanel.tsx | 125 +++++++++-- src/modules/mcp/index.ts | 34 +++ 26 files changed, 1089 insertions(+), 173 deletions(-) create mode 100644 src-tauri/mcp.example.json create mode 100644 src-tauri/src/modules/mcp/client.rs create mode 100644 src-tauri/src/modules/mcp/protocol.rs create mode 100644 src-tauri/src/modules/mcp/transport.rs create mode 100644 src-tauri/tests/mcp_tools.rs diff --git a/.gitignore b/.gitignore index ae5f25e..05e616b 100644 --- a/.gitignore +++ b/.gitignore @@ -12,6 +12,9 @@ dist dist-ssr *.local +# Local MCP config (copy src-tauri/mcp.example.json → src-tauri/mcp.json) +src-tauri/mcp.json + # Editor directories and files .vscode/* !.vscode/extensions.json diff --git a/bun.lock b/bun.lock index 9b0bfbe..c0f50af 100644 --- a/bun.lock +++ b/bun.lock @@ -9,6 +9,7 @@ "@radix-ui/react-menubar": "^1.1.16", "@tailwindcss/vite": "^4.2.2", "@tauri-apps/api": "^2", + "@tauri-apps/plugin-dialog": "^2", "@tauri-apps/plugin-opener": "^2", "qrcode.react": "^4.2.0", "react": "^19.1.0", @@ -335,6 +336,8 @@ "@tauri-apps/cli-win32-x64-msvc": ["@tauri-apps/cli-win32-x64-msvc@2.10.1", "", { "os": "win32", "cpu": "x64" }, "sha512-6Cn7YpPFwzChy0ERz6djKEmUehWrYlM+xTaNzGPgZocw3BD7OfwfWHKVWxXzdjEW2KfKkHddfdxK1XXTYqBRLg=="], + "@tauri-apps/plugin-dialog": ["@tauri-apps/plugin-dialog@2.7.0", "", { "dependencies": { "@tauri-apps/api": "^2.10.1" } }, "sha512-4nS/hfGMGCXiAS3LtVjH9AgsSAPJeG/7R+q8agTFqytjnMa4Zq95Bq8WzVDkckpanX+yyRHXnRtrKXkANKDHvw=="], + "@tauri-apps/plugin-opener": ["@tauri-apps/plugin-opener@2.5.3", "", { "dependencies": { "@tauri-apps/api": "^2.8.0" } }, "sha512-CCcUltXMOfUEArbf3db3kCE7Ggy1ExBEBl51Ko2ODJ6GDYHRp1nSNlQm5uNCFY5k7/ufaK5Ib3Du/Zir19IYQQ=="], "@types/babel__core": ["@types/babel__core@7.20.5", "", { "dependencies": { "@babel/parser": "^7.20.7", "@babel/types": "^7.20.7", "@types/babel__generator": "*", "@types/babel__template": "*", "@types/babel__traverse": "*" } }, "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA=="], diff --git a/package.json b/package.json index 973b7f7..57f24d2 100644 --- a/package.json +++ b/package.json @@ -34,6 +34,7 @@ "@radix-ui/react-menubar": "^1.1.16", "@tailwindcss/vite": "^4.2.2", "@tauri-apps/api": "^2", + "@tauri-apps/plugin-dialog": "^2", "@tauri-apps/plugin-opener": "^2", "qrcode.react": "^4.2.0", "react": "^19.1.0", diff --git a/src-tauri/Cargo.lock b/src-tauri/Cargo.lock index 9396014..3a682a6 100644 --- a/src-tauri/Cargo.lock +++ b/src-tauri/Cargo.lock @@ -2744,6 +2744,7 @@ checksum = "e3e0adef53c21f888deb4fa59fc59f7eb17404926ee8a6f59f5df0fd7f9f3272" dependencies = [ "bitflags 2.11.0", "block2", + "libc", "objc2", "objc2-core-foundation", ] @@ -2964,6 +2965,7 @@ dependencies = [ "socket2 0.5.10", "tauri", "tauri-build", + "tauri-plugin-dialog", "tauri-plugin-opener", "teloxide", "tokio", @@ -3764,6 +3766,30 @@ dependencies = [ "web-sys", ] +[[package]] +name = "rfd" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a15ad77d9e70a92437d8f74c35d99b4e4691128df018833e99f90bcd36152672" +dependencies = [ + "block2", + "dispatch2", + "glib-sys", + "gobject-sys", + "gtk-sys", + "js-sys", + "log", + "objc2", + "objc2-app-kit", + "objc2-core-foundation", + "objc2-foundation", + "raw-window-handle", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "windows-sys 0.60.2", +] + [[package]] name = "rgb" version = "0.8.53" @@ -4727,6 +4753,48 @@ dependencies = [ "walkdir", ] +[[package]] +name = "tauri-plugin-dialog" +version = "2.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1fa4150c95ae391946cc8b8f905ab14797427caba3a8a2f79628e956da91809" +dependencies = [ + "log", + "raw-window-handle", + "rfd", + "serde", + "serde_json", + "tauri", + "tauri-plugin", + "tauri-plugin-fs", + "thiserror 2.0.18", + "url", +] + +[[package]] +name = "tauri-plugin-fs" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36e1ec28b79f3d0683f4507e1615c36292c0ea6716668770d4396b9b39871ed8" +dependencies = [ + "anyhow", + "dunce", + "glob", + "log", + "objc2-foundation", + "percent-encoding", + "schemars 0.8.22", + "serde", + "serde_json", + "serde_repr", + "tauri", + "tauri-plugin", + "tauri-utils", + "thiserror 2.0.18", + "toml 0.9.12+spec-1.1.0", + "url", +] + [[package]] name = "tauri-plugin-opener" version = "2.5.3" diff --git a/src-tauri/Cargo.toml b/src-tauri/Cargo.toml index 34d0b65..d40786d 100644 --- a/src-tauri/Cargo.toml +++ b/src-tauri/Cargo.toml @@ -32,3 +32,8 @@ chrono = { version = "0.4", features = ["serde"] } tokio-stream = { version = "0.1", features = ["sync"] } socket2 = "0.5" fastrand = "2" +tauri-plugin-dialog = "2" + +[dev-dependencies] +serde_json = "1" +tokio = { version = "1", features = ["macros", "rt-multi-thread"] } diff --git a/src-tauri/capabilities/default.json b/src-tauri/capabilities/default.json index ecdd4ce..fdfa076 100644 --- a/src-tauri/capabilities/default.json +++ b/src-tauri/capabilities/default.json @@ -6,6 +6,7 @@ "permissions": [ "core:default", "opener:default", + "dialog:default", "core:event:default", "core:event:allow-listen", "core:event:allow-emit" diff --git a/src-tauri/mcp.example.json b/src-tauri/mcp.example.json new file mode 100644 index 0000000..52dcec6 --- /dev/null +++ b/src-tauri/mcp.example.json @@ -0,0 +1,18 @@ +{ + "servers": { + "dice": { + "type": "native", + "id": "dice" + }, + "filesystem": { + "type": "stdio", + "command": "npx", + "args": [ + "-y", + "@modelcontextprotocol/server-filesystem", + "/absolute/path/to/allowed/folder" + ], + "env": {} + } + } +} diff --git a/src-tauri/src/app.rs b/src-tauri/src/app.rs index 9eddc87..baefc34 100644 --- a/src-tauri/src/app.rs +++ b/src-tauri/src/app.rs @@ -19,9 +19,11 @@ pub fn run() { tauri::Builder::default() .plugin(tauri_plugin_opener::init()) + .plugin(tauri_plugin_dialog::init()) .setup(|app| { let path = store_path(app); - let shared_state = AppState::new(path); + let (mcp_path, mcp_src) = mcp_service::resolve_mcp_config_path(&path); + let shared_state = AppState::new(path, mcp_path, mcp_src.to_string()); { let handle = app.handle().clone(); @@ -33,6 +35,25 @@ pub fn run() { app.manage(shared_state.clone()); + // Load MCP before any bot work so the first Telegram message never sees an empty registry. + let mcp_path = shared_state.mcp_config_path.clone(); + let mcp_state = shared_state.clone(); + tauri::async_runtime::block_on(async move { + mcp_state + .emit_log("mcp", &format!("loading {}", mcp_path.display())) + .await; + match mcp_service::load_or_init_config(&mcp_path) { + Ok(cfg) => { + mcp_service::rebuild_registry_into_state(&mcp_state, &cfg).await; + } + Err(e) => { + mcp_state + .emit_log("mcp", &format!("mcp.json error: {e}")) + .await; + } + } + }); + // Resume persisted Telegram connection if present. let resume_state = shared_state.clone(); tauri::async_runtime::spawn(async move { @@ -48,20 +69,6 @@ pub fn run() { bot_service::start_bot(resume_state, token, shutdown).await; }); - // Native tools are instant — no subprocess, no async I/O. - let mcp_state = shared_state.clone(); - tauri::async_runtime::spawn(async move { - let registry = mcp_service::build_default_registry(); - let n = registry.tool_names().len(); - *mcp_state.mcp.write().await = registry; - mcp_state - .emit_log( - "mcp", - &format!("{n} native tool{}", if n == 1 { "" } else { "s" }), - ) - .await; - }); - // Start localhost HTTP API. let server_state = shared_state.clone(); tauri::async_runtime::spawn(async move { @@ -73,6 +80,7 @@ pub fn run() { .invoke_handler(tauri::generate_handler![ commands::get_connection_status, commands::disconnect_bot, + commands::pick_mcp_filesystem_folder, ]) .run(tauri::generate_context!()) .expect("error while running tauri application"); diff --git a/src-tauri/src/infrastructure/http_server.rs b/src-tauri/src/infrastructure/http_server.rs index 6757284..0859cd4 100644 --- a/src-tauri/src/infrastructure/http_server.rs +++ b/src-tauri/src/infrastructure/http_server.rs @@ -1,10 +1,11 @@ use crate::infrastructure::bot_lifecycle; use crate::modules::bot::{repository, service as bot_service}; +use crate::modules::mcp::service as mcp_service; use crate::shared::state::{AppState, ConnectionData}; use axum::extract::State; use axum::http::StatusCode; use axum::response::{Json, Sse}; -use axum::routing::{delete, get, post}; +use axum::routing::{delete, get, post, put}; use axum::Router; use chrono::Utc; use serde::{Deserialize, Serialize}; @@ -48,6 +49,19 @@ pub struct McpToolDto { pub description: Option, } +#[derive(Serialize)] +pub struct McpConfigInfoResponse { + pub config_path: String, + /// `"project"` or `"app_data"` + pub source: String, + pub filesystem_allowed_path: Option, +} + +#[derive(Deserialize)] +pub struct PutMcpFilesystemBody { + pub path: String, +} + pub async fn start_server(state: AppState) { let cors = CorsLayer::new() .allow_origin(Any) @@ -60,6 +74,8 @@ pub async fn start_server(state: AppState) { .route("/v1/health", get(handle_health)) .route("/v1/logs", get(handle_logs_sse)) .route("/v1/mcp/tools", get(handle_mcp_tools)) + .route("/v1/mcp/config", get(handle_mcp_config_get)) + .route("/v1/mcp/filesystem", put(handle_mcp_filesystem_put)) .layer(cors) .with_state(state.clone()); @@ -73,8 +89,6 @@ pub async fn start_server(state: AppState) { axum::serve(listener, app).await.expect("axum serve failed"); } -/// Bind with `SO_REUSEADDR` so a quick restart can reclaim the port after the old socket -/// enters `TIME_WAIT`. Falls back to the same error as plain bind if another process still listens. fn bind_loopback_reuse(addr: std::net::SocketAddr) -> std::io::Result { let socket = Socket::new(Domain::for_address(addr), Type::STREAM, None)?; socket.set_nonblocking(true)?; @@ -218,6 +232,70 @@ async fn handle_health(State(state): State) -> Json { }) } +async fn handle_mcp_config_get(State(state): State) -> Json { + let filesystem_allowed_path = state + .mcp_config_path + .exists() + .then(|| mcp_service::read_config(&state.mcp_config_path).ok()) + .flatten() + .and_then(|c| mcp_service::filesystem_allowed_path(&c)); + + Json(McpConfigInfoResponse { + config_path: state.mcp_config_path.to_string_lossy().into_owned(), + source: state.mcp_config_source.clone(), + filesystem_allowed_path, + }) +} + +async fn handle_mcp_filesystem_put( + State(state): State, + Json(body): Json, +) -> Result<(StatusCode, Json), (StatusCode, Json)> { + let path = body.path.trim(); + if path.is_empty() { + return Err(( + StatusCode::BAD_REQUEST, + Json(ErrorResponse { + error: "path is required".into(), + }), + )); + } + + let mut cfg = if state.mcp_config_path.exists() { + mcp_service::read_config(&state.mcp_config_path) + .map_err(|e| (StatusCode::BAD_REQUEST, Json(ErrorResponse { error: e })))? + } else { + mcp_service::load_or_init_config(&state.mcp_config_path).map_err(|e| { + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(ErrorResponse { error: e }), + ) + })? + }; + + mcp_service::set_filesystem_allowed_path(&mut cfg, path); + mcp_service::save_config(&state.mcp_config_path, &cfg).map_err(|e| { + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(ErrorResponse { error: e }), + ) + })?; + + state + .emit_log( + "mcp", + &format!( + "filesystem allow path updated → {}", + state.mcp_config_path.display() + ), + ) + .await; + + mcp_service::rebuild_registry_into_state(&state, &cfg).await; + + Ok((StatusCode::OK, Json(serde_json::json!({ "ok": true })))) +} + async fn handle_mcp_tools(State(state): State) -> Json> { Json( state diff --git a/src-tauri/src/lib.rs b/src-tauri/src/lib.rs index 49488d5..a56f05f 100644 --- a/src-tauri/src/lib.rs +++ b/src-tauri/src/lib.rs @@ -1,6 +1,6 @@ mod app; mod infrastructure; -mod modules; +pub mod modules; mod shared; pub fn run() { diff --git a/src-tauri/src/modules/bot/agent.rs b/src-tauri/src/modules/bot/agent.rs index 80f78da..12e3db5 100644 --- a/src-tauri/src/modules/bot/agent.rs +++ b/src-tauri/src/modules/bot/agent.rs @@ -1,18 +1,34 @@ -//! Minimal agent loop: ask Ollama, run any tools it requests, feed results -//! back, repeat until it stops asking for tools. - use crate::modules::ollama::service as ollama; use crate::shared::state::AppState; use serde_json::json; +use std::time::{Duration, Instant}; + +const MAX_STEPS: usize = 3; + +/// Ollama sometimes returns `function.arguments` as a JSON string; normalize to an object. +fn tool_call_arguments(call: &serde_json::Value) -> serde_json::Value { + let raw = call.get("function").and_then(|f| f.get("arguments")); + match raw { + None => json!({}), + Some(serde_json::Value::String(s)) => { + serde_json::from_str::(s).unwrap_or_else(|_| json!({})) + } + Some(v) => v.clone(), + } +} -const MAX_STEPS: usize = 5; +fn fmt_duration(d: Duration) -> String { + let ms = d.as_millis(); + if ms < 1000 { + format!("{ms}ms") + } else { + format!("{:.1}s", d.as_secs_f64()) + } +} -/// Where the reply text came from — provable, not guessable. #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum ReplySource { - /// Text generated by the LLM. Model, - /// Verbatim output from a tool execution. Tool, } @@ -29,8 +45,24 @@ pub async fn run_turn(state: &AppState, user_message: &str) -> Result Result = Vec::new(); + + // Phase 1: let the model call tools (up to MAX_STEPS rounds). for step in 0..MAX_STEPS { + let t_model = Instant::now(); let msg = ollama::chat_with_tools(&model, &messages, &ollama_tools).await?; + state + .emit_log( + "time", + &format!("model step {step} {}", fmt_duration(t_model.elapsed())), + ) + .await; if let Some(arr) = messages.as_array_mut() { arr.push(msg.clone()); @@ -54,34 +96,42 @@ pub async fn run_turn(state: &AppState, user_message: &str) -> Result = None; - for call in tool_calls { + for call in &tool_calls { let name = call .get("function") .and_then(|f| f.get("name")) .and_then(|v| v.as_str()) .unwrap_or("") .to_string(); - let args = call - .get("function") - .and_then(|f| f.get("arguments")) - .cloned() - .unwrap_or(json!({})); + let args = tool_call_arguments(call); state.emit_log("tool", &format!("[{step}] {name}")).await; + let t_tool = Instant::now(); let (result_text, is_direct) = match state.mcp.read().await.call_tool(&name, args).await { Ok((text, direct)) => { @@ -95,6 +145,14 @@ pub async fn run_turn(state: &AppState, user_message: &str) -> Result Result) -> Result Result, String> { + let folder = app + .dialog() + .file() + .set_title("Folder for MCP filesystem tools") + .blocking_pick_folder(); + Ok(folder.map(|p| p.to_string())) +} + +#[cfg(not(desktop))] +#[tauri::command] +pub async fn pick_mcp_filesystem_folder() -> Result, String> { + Err("folder picker is only available on desktop".into()) +} diff --git a/src-tauri/src/modules/mcp/client.rs b/src-tauri/src/modules/mcp/client.rs new file mode 100644 index 0000000..0d22a40 --- /dev/null +++ b/src-tauri/src/modules/mcp/client.rs @@ -0,0 +1,88 @@ +use super::transport::StdioTransport; +use super::types::ToolDef; +use serde_json::{json, Value}; +use std::collections::HashMap; + +pub struct McpClient { + pub server_name: String, + transport: StdioTransport, + pub tools: Vec, +} + +impl McpClient { + pub async fn connect( + server_name: String, + command: String, + args: Vec, + env: HashMap, + ) -> Result { + let transport = StdioTransport::spawn(&command, &args, &env).await?; + + let init_params = json!({ + "protocolVersion": "2024-11-05", + "capabilities": {}, + "clientInfo": { "name": "pengine", "version": "0.1.0" }, + }); + transport.call("initialize", Some(init_params)).await?; + let _ = transport.notify("notifications/initialized", None).await; + + let result = transport.call("tools/list", None).await?; + let tools = parse_tools(&server_name, &result); + + Ok(Self { + server_name, + transport, + tools, + }) + } + + pub async fn call_tool(&self, name: &str, args: Value) -> Result { + let result = self + .transport + .call( + "tools/call", + Some(json!({ "name": name, "arguments": args })), + ) + .await?; + + let mut out = String::new(); + if let Some(items) = result.get("content").and_then(|v| v.as_array()) { + for item in items { + if let Some(t) = item.get("text").and_then(|v| v.as_str()) { + if !out.is_empty() { + out.push('\n'); + } + out.push_str(t); + } + } + } + if out.is_empty() { + out = result.to_string(); + } + Ok(out) + } +} + +fn parse_tools(server_name: &str, result: &Value) -> Vec { + let Some(arr) = result.get("tools").and_then(|v| v.as_array()) else { + return vec![]; + }; + arr.iter() + .filter_map(|t| { + let name = t.get("name")?.as_str()?.to_string(); + Some(ToolDef { + server_name: server_name.to_string(), + name, + description: t + .get("description") + .and_then(|v| v.as_str()) + .map(|s| s.to_string()), + input_schema: t + .get("inputSchema") + .cloned() + .unwrap_or_else(|| json!({"type": "object"})), + direct_return: false, + }) + }) + .collect() +} diff --git a/src-tauri/src/modules/mcp/mod.rs b/src-tauri/src/modules/mcp/mod.rs index 67277ff..67116d9 100644 --- a/src-tauri/src/modules/mcp/mod.rs +++ b/src-tauri/src/modules/mcp/mod.rs @@ -1,4 +1,7 @@ +pub mod client; pub mod native; +pub mod protocol; pub mod registry; pub mod service; +pub mod transport; pub mod types; diff --git a/src-tauri/src/modules/mcp/native.rs b/src-tauri/src/modules/mcp/native.rs index 88a52d5..89221ad 100644 --- a/src-tauri/src/modules/mcp/native.rs +++ b/src-tauri/src/modules/mcp/native.rs @@ -1,5 +1,3 @@ -//! Built-in tools that run in-process. No subprocess, no JSON-RPC overhead. - use super::types::ToolDef; use serde_json::{json, Value}; @@ -20,12 +18,12 @@ impl NativeProvider { } } -/// The bundled dice tool — runs in pure Rust, instant response. -pub fn dice() -> NativeProvider { +/// Built-in dice tools under the given server key (must match `mcp.json` server name). +pub fn dice_named(server_key: &str) -> NativeProvider { NativeProvider { - server_name: "dice".to_string(), + server_name: server_key.to_string(), tools: vec![ToolDef { - server_name: "dice".to_string(), + server_name: server_key.to_string(), name: "roll_dice".to_string(), description: Some( "Roll a die with the given number of sides and return the result.".to_string(), @@ -45,6 +43,10 @@ pub fn dice() -> NativeProvider { } } +pub fn dice() -> NativeProvider { + dice_named("dice") +} + fn handle_dice(_tool_name: &str, args: &Value) -> Result { let sides = args .get("sides") @@ -56,48 +58,10 @@ fn handle_dice(_tool_name: &str, args: &Value) -> Result { Ok(format!("Rolled a d{sides}: {result}")) } -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn dice_returns_valid_result() { - let provider = dice(); - assert_eq!(provider.tools.len(), 1); - assert_eq!(provider.tools[0].name, "roll_dice"); - assert!(provider.tools[0].direct_return); - - let out = provider - .call("roll_dice", &json!({"sides": 20})) - .expect("dice call"); - assert!(out.starts_with("Rolled a d20: "), "got: {out}"); - - let num: u64 = out.trim_start_matches("Rolled a d20: ").parse().unwrap(); - assert!((1..=20).contains(&num)); - } - - #[test] - fn dice_clamps_invalid_sides() { - let provider = dice(); - - let out = provider - .call("roll_dice", &json!({"sides": 0})) - .expect("sides=0"); - assert!(out.starts_with("Rolled a d2: "), "clamped to 2, got: {out}"); - - let out = provider - .call("roll_dice", &json!({"sides": 9999999})) - .expect("sides=9999999"); - assert!( - out.starts_with("Rolled a d1000000: "), - "clamped to MAX, got: {out}" - ); - } - - #[test] - fn dice_rejects_unknown_tool() { - let provider = dice(); - let err = provider.call("unknown", &json!({})).unwrap_err(); - assert!(err.contains("unknown native tool")); +/// Resolve `id` from `mcp.json` (`type: native`) into a provider under `server_key`. +pub fn native_for(server_key: &str, id: &str) -> Result { + match id { + "dice" => Ok(dice_named(server_key)), + _ => Err(format!("unknown native id: {id}")), } } diff --git a/src-tauri/src/modules/mcp/protocol.rs b/src-tauri/src/modules/mcp/protocol.rs new file mode 100644 index 0000000..4f78322 --- /dev/null +++ b/src-tauri/src/modules/mcp/protocol.rs @@ -0,0 +1,43 @@ +//! Minimal JSON-RPC 2.0 for MCP over stdio. + +use serde::{Deserialize, Serialize}; +use serde_json::Value; + +#[derive(Debug, Serialize)] +pub struct JsonRpcRequest<'a> { + pub jsonrpc: &'static str, + pub id: u64, + pub method: &'a str, + #[serde(skip_serializing_if = "Option::is_none")] + pub params: Option, +} + +impl<'a> JsonRpcRequest<'a> { + pub fn new(id: u64, method: &'a str, params: Option) -> Self { + Self { + jsonrpc: "2.0", + id, + method, + params, + } + } +} + +#[derive(Debug, Deserialize)] +pub struct JsonRpcResponse { + #[allow(dead_code)] + pub jsonrpc: Option, + #[allow(dead_code)] + pub id: Option, + #[serde(default)] + pub result: Option, + #[serde(default)] + pub error: Option, +} + +#[derive(Debug, Deserialize)] +pub struct JsonRpcError { + #[allow(dead_code)] + pub code: i64, + pub message: String, +} diff --git a/src-tauri/src/modules/mcp/registry.rs b/src-tauri/src/modules/mcp/registry.rs index b56e8d1..b182394 100644 --- a/src-tauri/src/modules/mcp/registry.rs +++ b/src-tauri/src/modules/mcp/registry.rs @@ -1,45 +1,36 @@ -//! Tool registry: aggregates providers behind a single dispatch interface. -//! -//! Adding a new provider kind (e.g. Docker-backed MCP) means adding a variant -//! to [`Provider`] and a match arm in each method — nothing else changes. - +use super::client::McpClient; use super::native::NativeProvider; use super::types::ToolDef; use serde_json::{json, Value}; -// ── Provider ─────────────────────────────────────────────────────── - -/// Where a tool lives. Extend this enum for Docker / external MCP -/// servers — the registry, agent loop, and HTTP API stay untouched. pub enum Provider { Native(NativeProvider), + Mcp(Box), } impl Provider { pub fn server_name(&self) -> &str { match self { Provider::Native(n) => &n.server_name, + Provider::Mcp(c) => &c.server_name, } } pub fn tools(&self) -> &[ToolDef] { match self { Provider::Native(n) => &n.tools, + Provider::Mcp(c) => &c.tools, } } pub async fn call_tool(&self, name: &str, args: Value) -> Result { match self { Provider::Native(n) => n.call(name, &args), + Provider::Mcp(c) => c.call_tool(name, args).await, } } } -// ── Registry ─────────────────────────────────────────────────────── - -/// Central tool registry. Pre-caches the Ollama tool JSON and the -/// human-readable name list at construction time so the hot path -/// (each chat turn) is just a cheap `clone()`. pub struct ToolRegistry { providers: Vec, cached_ollama_tools: Value, @@ -61,7 +52,9 @@ impl ToolRegistry { let cached_ollama_tools = build_ollama_tools(&providers); let cached_tool_names = providers .iter() - .flat_map(|p| p.tools().iter().map(|t| t.name.clone())) + .flat_map(|p| p.tools().iter()) + .filter(|t| should_expose_to_model(t)) + .map(|t| t.name.clone()) .collect(); Self { providers, @@ -89,7 +82,6 @@ impl ToolRegistry { self.cached_tool_names.is_empty() } - /// Dispatch a tool call. Returns `(output_text, direct_return)`. pub async fn call_tool(&self, name: &str, args: Value) -> Result<(String, bool), String> { let (server, tool) = match name.split_once('.') { Some((s, t)) => (Some(s), t), @@ -112,10 +104,28 @@ impl ToolRegistry { } } +fn should_expose_to_model(tool: &ToolDef) -> bool { + let desc = tool.description.as_deref().unwrap_or(""); + if desc.to_ascii_uppercase().contains("DEPRECATED") { + return false; + } + !REDUNDANT_TOOLS.contains(&tool.name.as_str()) +} + +/// Tools that add noise without value for a small local model. +const REDUNDANT_TOOLS: &[&str] = &[ + "read_media_file", + "read_multiple_files", + "list_directory_with_sizes", + "directory_tree", + "list_allowed_directories", +]; + fn build_ollama_tools(providers: &[Provider]) -> Value { let arr: Vec = providers .iter() .flat_map(|p| p.tools().iter()) + .filter(|t| should_expose_to_model(t)) .map(|t| { json!({ "type": "function", diff --git a/src-tauri/src/modules/mcp/service.rs b/src-tauri/src/modules/mcp/service.rs index 3f61d51..14440a9 100644 --- a/src-tauri/src/modules/mcp/service.rs +++ b/src-tauri/src/modules/mcp/service.rs @@ -1,65 +1,158 @@ -//! Public facade: build the tool registry from native providers. +//! Load `mcp.json` and build [`ToolRegistry`] — same code path for `native` and `stdio` +//! (Docker is just `command` + `args` on a `stdio` entry). +use super::client::McpClient; use super::native; use super::registry::{Provider, ToolRegistry}; +use super::types::{McpConfig, ServerEntry}; +use std::path::{Path, PathBuf}; -/// Build a registry pre-loaded with all bundled native tools. -/// No I/O, no subprocess — instant. -pub fn build_default_registry() -> ToolRegistry { - ToolRegistry::new(vec![Provider::Native(native::dice())]) +const FILESYSTEM_SERVER_KEY: &str = "filesystem"; +const FILESYSTEM_PKG: &str = "@modelcontextprotocol/server-filesystem"; + +/// Prefer project `mcp.json` under `src-tauri/` (or `./mcp.json` when cwd is `src-tauri`), +/// otherwise `mcp.json` next to `connection.json` in app data. +pub fn resolve_mcp_config_path(store_path: &Path) -> (PathBuf, &'static str) { + let cwd = std::env::current_dir().unwrap_or_else(|_| PathBuf::from(".")); + + let from_repo_root = cwd.join("src-tauri").join("mcp.json"); + if from_repo_root.exists() { + return (from_repo_root, "project"); + } + + let in_crate_root = cwd.join("mcp.json"); + if cwd.join("Cargo.toml").exists() && in_crate_root.exists() { + return (in_crate_root, "project"); + } + + let app_path = store_path + .parent() + .map(|p| p.join("mcp.json")) + .unwrap_or_else(|| PathBuf::from("mcp.json")); + (app_path, "app_data") } -#[cfg(test)] -mod tests { - use super::*; +pub fn read_config(path: &Path) -> Result { + let raw = std::fs::read_to_string(path).map_err(|e| format!("read mcp.json: {e}"))?; + serde_json::from_str(&raw).map_err(|e| { + format!( + "parse mcp.json: {e} — every server entry needs a \"type\" field (\"native\" or \"stdio\")" + ) + }) +} - #[test] - fn default_registry_has_dice() { - let reg = build_default_registry(); - assert_eq!(reg.tool_names(), &["roll_dice"]); +pub fn save_config(path: &Path, cfg: &McpConfig) -> Result<(), String> { + if let Some(parent) = path.parent() { + let _ = std::fs::create_dir_all(parent); } + let pretty = serde_json::to_string_pretty(cfg).map_err(|e| format!("encode mcp.json: {e}"))?; + std::fs::write(path, pretty).map_err(|e| format!("write mcp.json: {e}")) +} - #[tokio::test] - async fn native_dice_callable_through_registry() { - let reg = build_default_registry(); - let (text, direct) = reg - .call_tool("roll_dice", serde_json::json!({"sides": 6})) - .await - .expect("call roll_dice"); - assert!(text.starts_with("Rolled a d6: "), "got: {text}"); - assert!(direct, "dice should be direct_return"); +/// Allowed folder for the official MCP filesystem stdio server (last path segment in `args`). +pub fn filesystem_allowed_path(cfg: &McpConfig) -> Option { + let ServerEntry::Stdio { args, .. } = cfg.servers.get(FILESYSTEM_SERVER_KEY)? else { + return None; + }; + if !args.iter().any(|a| a.contains("server-filesystem")) { + return None; + } + args.last().cloned() +} + +pub fn set_filesystem_allowed_path(cfg: &mut McpConfig, abs_path: &str) { + let entry = ServerEntry::Stdio { + command: "npx".into(), + args: vec![ + "-y".into(), + FILESYSTEM_PKG.into(), + abs_path.trim().to_string(), + ], + env: Default::default(), + }; + cfg.servers.insert(FILESYSTEM_SERVER_KEY.into(), entry); +} + +fn default_config_value() -> serde_json::Value { + serde_json::json!({ + "servers": { + "dice": { + "type": "native", + "id": "dice" + } + } + }) +} + +pub fn load_or_init_config(path: &Path) -> Result { + if path.exists() { + return read_config(path); } - /// Proves the reply comes from the native tool, not the model: - /// - /// 1. `direct_return` is true → agent returns tool output verbatim, - /// the model never sees it and cannot rephrase. - /// 2. The output matches `Rolled a dN: M` with M in [1, N] — a format - /// the native Rust handler produces. If the model fabricated a roll, - /// `direct_return` would be false and source would be `Model`. - #[tokio::test] - async fn dice_result_is_provably_from_tool_not_model() { - let reg = build_default_registry(); - - for sides in [6, 20, 100] { - let (text, direct) = reg - .call_tool("roll_dice", serde_json::json!({ "sides": sides })) - .await - .expect("call roll_dice"); - - assert!(direct, "direct_return must be true for dice"); - - let prefix = format!("Rolled a d{sides}: "); - assert!( - text.starts_with(&prefix), - "expected prefix '{prefix}', got: {text}" - ); - - let num: u64 = text[prefix.len()..].trim().parse().expect("parse roll"); - assert!( - (1..=sides).contains(&num), - "roll {num} out of range [1, {sides}]" - ); + if let Some(parent) = path.parent() { + let _ = std::fs::create_dir_all(parent); + } + let default = default_config_value(); + let pretty = serde_json::to_string_pretty(&default) + .map_err(|e| format!("encode default mcp.json: {e}"))?; + std::fs::write(path, pretty).map_err(|e| format!("write mcp.json: {e}"))?; + serde_json::from_value(default).map_err(|e| e.to_string()) +} + +/// Connect every server in order (stable `BTreeMap` keys). Returns registry + status lines. +pub async fn build_registry(cfg: &McpConfig) -> (ToolRegistry, Vec) { + let mut providers = Vec::new(); + let mut status = Vec::new(); + + for (server_key, entry) in &cfg.servers { + match entry { + ServerEntry::Native { id } => match native::native_for(server_key, id) { + Ok(p) => { + let n = p.tools.len(); + providers.push(Provider::Native(p)); + status.push(format!( + "{server_key} native ({n} tool{})", + if n == 1 { "" } else { "s" } + )); + } + Err(e) => status.push(format!("{server_key} native failed: {e}")), + }, + ServerEntry::Stdio { command, args, env } => match McpClient::connect( + server_key.clone(), + command.clone(), + args.clone(), + env.clone(), + ) + .await + { + Ok(client) => { + let n = client.tools.len(); + providers.push(Provider::Mcp(Box::new(client))); + status.push(format!( + "{server_key} stdio ({n} tool{})", + if n == 1 { "" } else { "s" } + )); + } + Err(e) => status.push(format!("{server_key} stdio failed: {e}")), + }, } } + + (ToolRegistry::new(providers), status) +} + +/// Replace in-memory tools after a config change (writes should use [`save_config`] first). +pub async fn rebuild_registry_into_state(state: &crate::shared::state::AppState, cfg: &McpConfig) { + let (registry, status) = build_registry(cfg).await; + for line in status { + state.emit_log("mcp", &line).await; + } + let n = registry.tool_names().len(); + *state.mcp.write().await = registry; + state + .emit_log( + "mcp", + &format!("ready ({n} tool{})", if n == 1 { "" } else { "s" }), + ) + .await; } diff --git a/src-tauri/src/modules/mcp/transport.rs b/src-tauri/src/modules/mcp/transport.rs new file mode 100644 index 0000000..cec57d1 --- /dev/null +++ b/src-tauri/src/modules/mcp/transport.rs @@ -0,0 +1,128 @@ +//! Line-delimited JSON over stdin/stdout of a child process. + +use super::protocol::{JsonRpcRequest, JsonRpcResponse}; +use serde_json::Value; +use std::collections::HashMap; +use std::process::Stdio; +use std::sync::atomic::{AtomicU64, Ordering}; +use std::sync::Arc; +use tokio::io::{AsyncBufReadExt, AsyncWriteExt, BufReader}; +use tokio::process::{Child, ChildStdin, Command}; +use tokio::sync::{oneshot, Mutex}; + +type Pending = Arc>>>; + +pub struct StdioTransport { + #[allow(dead_code)] + child: Child, + stdin: Mutex, + next_id: AtomicU64, + pending: Pending, +} + +impl StdioTransport { + pub async fn spawn( + command: &str, + args: &[String], + env: &HashMap, + ) -> Result { + let mut cmd = Command::new(command); + cmd.args(args) + .envs(env) + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .kill_on_drop(true); + + let mut child = cmd + .spawn() + .map_err(|e| format!("spawn `{command}` failed: {e}"))?; + + let stdin = child.stdin.take().ok_or("no stdin")?; + let stdout = child.stdout.take().ok_or("no stdout")?; + let stderr = child.stderr.take().ok_or("no stderr")?; + + let pending: Pending = Arc::new(Mutex::new(HashMap::new())); + + { + let pending = pending.clone(); + tokio::spawn(async move { + let mut lines = BufReader::new(stdout).lines(); + while let Ok(Some(line)) = lines.next_line().await { + let line = line.trim(); + if line.is_empty() { + continue; + } + let Ok(resp) = serde_json::from_str::(line) else { + continue; + }; + if let Some(id) = resp.id { + if let Some(tx) = pending.lock().await.remove(&id) { + let _ = tx.send(resp); + } + } + } + }); + } + + tokio::spawn(async move { + let mut lines = BufReader::new(stderr).lines(); + while let Ok(Some(_)) = lines.next_line().await {} + }); + + Ok(Self { + child, + stdin: Mutex::new(stdin), + next_id: AtomicU64::new(1), + pending, + }) + } + + pub async fn call(&self, method: &str, params: Option) -> Result { + let id = self.next_id.fetch_add(1, Ordering::Relaxed); + let req = JsonRpcRequest::new(id, method, params); + let mut payload = serde_json::to_vec(&req).map_err(|e| format!("encode request: {e}"))?; + payload.push(b'\n'); + + let (tx, rx) = oneshot::channel(); + self.pending.lock().await.insert(id, tx); + + { + let mut stdin = self.stdin.lock().await; + stdin + .write_all(&payload) + .await + .map_err(|e| format!("write stdin: {e}"))?; + stdin.flush().await.map_err(|e| format!("flush: {e}"))?; + } + + let resp = tokio::time::timeout(std::time::Duration::from_secs(30), rx) + .await + .map_err(|_| "mcp call timed out".to_string())? + .map_err(|_| "mcp response channel dropped".to_string())?; + + if let Some(err) = resp.error { + return Err(format!("mcp error: {}", err.message)); + } + Ok(resp.result.unwrap_or(Value::Null)) + } + + pub async fn notify(&self, method: &str, params: Option) -> Result<(), String> { + let mut msg = serde_json::json!({ + "jsonrpc": "2.0", + "method": method, + }); + if let Some(p) = params { + msg["params"] = p; + } + let mut payload = serde_json::to_vec(&msg).map_err(|e| e.to_string())?; + payload.push(b'\n'); + let mut stdin = self.stdin.lock().await; + stdin + .write_all(&payload) + .await + .map_err(|e| format!("write stdin: {e}"))?; + stdin.flush().await.map_err(|e| format!("flush: {e}"))?; + Ok(()) + } +} diff --git a/src-tauri/src/modules/mcp/types.rs b/src-tauri/src/modules/mcp/types.rs index ddf2d14..521ed7e 100644 --- a/src-tauri/src/modules/mcp/types.rs +++ b/src-tauri/src/modules/mcp/types.rs @@ -1,4 +1,28 @@ use serde::{Deserialize, Serialize}; +use std::collections::{BTreeMap, HashMap}; + +/// Root config: `src-tauri/mcp.json` in dev or `mcp.json` next to app data (`connection.json`). +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct McpConfig { + #[serde(default)] + pub servers: BTreeMap, +} + +/// One logical MCP server. Same top-level shape for every backend: `type` picks the loader. +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(tag = "type", rename_all = "snake_case")] +pub enum ServerEntry { + /// In-process tool pack; `id` selects a built-in (e.g. `dice`). + Native { id: String }, + /// Child process speaking MCP over stdio (`docker run … -i` is just command + args). + Stdio { + command: String, + #[serde(default)] + args: Vec, + #[serde(default)] + env: HashMap, + }, +} /// Definition of a single tool, regardless of where it runs. #[derive(Debug, Clone, Serialize, Deserialize)] @@ -10,8 +34,6 @@ pub struct ToolDef { pub description: Option, #[serde(default, rename = "inputSchema")] pub input_schema: serde_json::Value, - /// When true the agent returns tool output verbatim, skipping the LLM - /// summarization round-trip. Good for deterministic tools like dice. #[serde(skip)] pub direct_return: bool, } diff --git a/src-tauri/src/modules/ollama/service.rs b/src-tauri/src/modules/ollama/service.rs index 0dd17d9..bae63fe 100644 --- a/src-tauri/src/modules/ollama/service.rs +++ b/src-tauri/src/modules/ollama/service.rs @@ -65,7 +65,12 @@ pub async fn chat_with_tools( .await .map_err(|e| e.to_string())?; + let status = resp.status(); let body: serde_json::Value = resp.json().await.map_err(|e| e.to_string())?; + if !status.is_success() { + return Err(format!("ollama chat HTTP {status}: {body}")); + } + Ok(body .get("message") .cloned() diff --git a/src-tauri/src/shared/state.rs b/src-tauri/src/shared/state.rs index 283ee1f..0540568 100644 --- a/src-tauri/src/shared/state.rs +++ b/src-tauri/src/shared/state.rs @@ -28,12 +28,16 @@ pub struct AppState { pub bot_running: Arc>, pub log_tx: Arc>>>, pub store_path: PathBuf, + /// Resolved `mcp.json` path (project `src-tauri/mcp.json` when present, else app data dir). + pub mcp_config_path: PathBuf, + /// `"project"` or `"app_data"` — for dashboard copy only. + pub mcp_config_source: String, pub app_handle: Arc>>, pub mcp: Arc>, } impl AppState { - pub fn new(store_path: PathBuf) -> Self { + pub fn new(store_path: PathBuf, mcp_config_path: PathBuf, mcp_config_source: String) -> Self { let (log_tx, _) = tokio::sync::broadcast::channel(256); Self { connection: Arc::new(Mutex::new(None)), @@ -41,6 +45,8 @@ impl AppState { bot_running: Arc::new(Mutex::new(false)), log_tx: Arc::new(Mutex::new(Some(log_tx))), store_path, + mcp_config_path, + mcp_config_source, app_handle: Arc::new(Mutex::new(None)), mcp: Arc::new(RwLock::new(ToolRegistry::default())), } diff --git a/src-tauri/tests/mcp_tools.rs b/src-tauri/tests/mcp_tools.rs new file mode 100644 index 0000000..9955b90 --- /dev/null +++ b/src-tauri/tests/mcp_tools.rs @@ -0,0 +1,102 @@ +//! Integration tests for MCP tooling. + +use pengine_lib::modules::mcp::{native, service}; +use serde_json::json; +use std::path::PathBuf; + +fn temp_mcp_path(name: &str) -> PathBuf { + let mut p = std::env::temp_dir(); + let nanos = std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .map(|d| d.as_nanos()) + .unwrap_or(0); + p.push(format!("pengine-mcp-{name}-{nanos}.json")); + p +} + +#[test] +fn dice_returns_valid_result() { + let provider = native::dice(); + assert_eq!(provider.tools.len(), 1); + assert_eq!(provider.tools[0].name, "roll_dice"); + assert!(provider.tools[0].direct_return); + + let out = provider + .call("roll_dice", &json!({"sides": 20})) + .expect("dice call"); + assert!(out.starts_with("Rolled a d20: "), "got: {out}"); + + let num: u64 = out.trim_start_matches("Rolled a d20: ").parse().unwrap(); + assert!((1..=20).contains(&num)); +} + +#[test] +fn dice_clamps_invalid_sides() { + let provider = native::dice(); + + let out = provider + .call("roll_dice", &json!({"sides": 0})) + .expect("sides=0"); + assert!(out.starts_with("Rolled a d2: "), "clamped to 2, got: {out}"); + + let out = provider + .call("roll_dice", &json!({"sides": 9999999})) + .expect("sides=9999999"); + assert!( + out.starts_with("Rolled a d1000000: "), + "clamped to MAX, got: {out}" + ); +} + +#[test] +fn dice_rejects_unknown_tool() { + let provider = native::dice(); + let err = provider.call("unknown", &json!({})).unwrap_err(); + assert!(err.contains("unknown native tool")); +} + +#[tokio::test] +async fn mcp_json_loads_native_dice() { + let path = temp_mcp_path("load"); + let cfg = service::load_or_init_config(&path).expect("load_or_init"); + assert!(cfg.servers.contains_key("dice")); + + let (reg, status) = service::build_registry(&cfg).await; + assert!(status + .iter() + .any(|s| s.contains("dice") && s.contains("native"))); + assert_eq!(reg.tool_names(), &["roll_dice"]); + let _ = std::fs::remove_file(path); +} + +#[tokio::test] +async fn native_dice_callable_through_registry_from_config() { + let path = temp_mcp_path("registry"); + let cfg = service::load_or_init_config(&path).expect("load_or_init"); + let (reg, _) = service::build_registry(&cfg).await; + let (text, direct) = reg + .call_tool("roll_dice", json!({"sides": 6})) + .await + .expect("call roll_dice"); + assert!(text.starts_with("Rolled a d6: "), "got: {text}"); + assert!(direct, "dice should be direct_return"); + let _ = std::fs::remove_file(path); +} + +#[test] +fn native_server_key_rename_in_config() { + let path = temp_mcp_path("rename"); + std::fs::write( + &path, + r#"{"servers":{"mydice":{"type":"native","id":"dice"}}}"#, + ) + .unwrap(); + let cfg = service::load_or_init_config(&path).expect("load"); + let rt = tokio::runtime::Builder::new_current_thread() + .enable_all() + .build() + .unwrap(); + let (reg, _) = rt.block_on(service::build_registry(&cfg)); + assert_eq!(reg.all_tools()[0].server_name, "mydice"); + let _ = std::fs::remove_file(path); +} diff --git a/src/modules/bot/components/TerminalPreview.tsx b/src/modules/bot/components/TerminalPreview.tsx index f9c3220..b1cb3e9 100644 --- a/src/modules/bot/components/TerminalPreview.tsx +++ b/src/modules/bot/components/TerminalPreview.tsx @@ -13,6 +13,7 @@ function kindClass(kind: string) { if (kind === "ok") return "bg-emerald-400/10 text-emerald-300"; if (kind === "run") return "bg-sky-400/10 text-sky-300"; if (kind === "tool") return "bg-yellow-400/10 text-yellow-200"; + if (kind === "time") return "bg-fuchsia-400/10 text-fuchsia-200"; if (kind === "reply") return "bg-violet-400/10 text-violet-300"; if (kind === "msg") return "bg-cyan-400/10 text-cyan-300"; return "bg-slate-400/10 text-slate-300"; diff --git a/src/modules/mcp/components/McpToolsPanel.tsx b/src/modules/mcp/components/McpToolsPanel.tsx index ecaad7f..a3b7a7b 100644 --- a/src/modules/mcp/components/McpToolsPanel.tsx +++ b/src/modules/mcp/components/McpToolsPanel.tsx @@ -1,6 +1,12 @@ import * as Accordion from "@radix-ui/react-accordion"; import { useEffect, useMemo, useState } from "react"; -import { fetchMcpTools, type McpTool } from ".."; +import { + fetchMcpConfig, + fetchMcpTools, + putMcpFilesystemPath, + type McpConfigInfo, + type McpTool, +} from ".."; /** * Dashboard panel showing MCP tool *groups*. Each accordion item is one tool @@ -9,21 +15,73 @@ import { fetchMcpTools, type McpTool } from ".."; */ export function McpToolsPanel() { const [tools, setTools] = useState(null); + const [config, setConfig] = useState(null); + const [pathDraft, setPathDraft] = useState(""); + const [busy, setBusy] = useState(false); + const [notice, setNotice] = useState(null); + + const syncAfterSave = async () => { + const [t, c] = await Promise.all([fetchMcpTools(), fetchMcpConfig()]); + setTools(t); + setConfig(c); + if (c?.filesystem_allowed_path) setPathDraft(c.filesystem_allowed_path); + }; useEffect(() => { let cancelled = false; - const load = async () => { + let timer: ReturnType; + + void (async () => { + const c = await fetchMcpConfig(); + if (cancelled) return; + setConfig(c); + if (c?.filesystem_allowed_path) setPathDraft(c.filesystem_allowed_path); + })(); + + const pollTools = async () => { const data = await fetchMcpTools(); - if (!cancelled) setTools(data); + if (cancelled) return; + setTools(data); + const next = data.length > 0 ? 10_000 : 1_000; + timer = setTimeout(() => pollTools(), next); }; - load(); - const timer = setInterval(load, 10000); + + pollTools(); return () => { cancelled = true; - clearInterval(timer); + clearTimeout(timer); }; }, []); + const applyPath = async (path: string) => { + const trimmed = path.trim(); + if (!trimmed) { + setNotice("Enter a folder path"); + return; + } + setBusy(true); + setNotice(null); + const ok = await putMcpFilesystemPath(trimmed); + setBusy(false); + if (!ok) { + setNotice("Could not save (is the API running?)"); + return; + } + await syncAfterSave(); + setNotice("Saved — tools reloaded"); + }; + + const pickFolder = async () => { + setNotice(null); + try { + const { invoke } = await import("@tauri-apps/api/core"); + const picked = await invoke("pick_mcp_filesystem_folder"); + if (picked) await applyPath(picked); + } catch { + setNotice("Folder picker needs the desktop app"); + } + }; + // Bucket tools by their server (group) name. Stable, alphabetical order. const groups = useMemo(() => { if (!tools) return null; @@ -38,9 +96,56 @@ export function McpToolsPanel() { .map(([server, items]) => ({ server, items })); }, [tools]); + const sourceLabel = + config == null + ? "…" + : config.source === "project" + ? "Project (src-tauri/mcp.json)" + : "App data mcp.json"; + return (
-

Available tools

+

MCP config

+
+

{sourceLabel}

+

+ {config?.config_path ?? "…"} +

+

+ Filesystem allow folder +

+ setPathDraft(e.target.value)} + placeholder="/absolute/path/to/project" + className="mt-1.5 w-full rounded-lg border border-white/15 bg-white/5 px-2.5 py-2 font-mono text-xs text-white outline-none placeholder:text-white/25 focus:border-white/30" + /> +
+ + +
+ {notice &&

{notice}

} +
+ +

Available tools

{groups === null && (

@@ -53,11 +158,7 @@ export function McpToolsPanel() { )} {groups !== null && groups.length > 0 && ( - g.server)} - className="mt-4 grid gap-2" - > + {groups.map((group) => ( { + try { + const resp = await fetch(`${PENGINE_API_BASE}/v1/mcp/config`, { + signal: AbortSignal.timeout(timeoutMs), + }); + if (!resp.ok) return null; + return (await resp.json()) as McpConfigInfo; + } catch { + return null; + } +} + +/** PUT `/v1/mcp/filesystem` — set allowed folder for the `filesystem` stdio server and reload tools. */ +export async function putMcpFilesystemPath(path: string, timeoutMs = 15000): Promise { + try { + const resp = await fetch(`${PENGINE_API_BASE}/v1/mcp/filesystem`, { + method: "PUT", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ path }), + signal: AbortSignal.timeout(timeoutMs), + }); + return resp.ok; + } catch { + return false; + } +} + /** GET `/v1/mcp/tools` — flat list of tools across all connected MCP servers. */ export async function fetchMcpTools(timeoutMs = 3000): Promise { try { From 9b91a3fd9d97c85b496a3904fe03a9670658b89d Mon Sep 17 00:00:00 2001 From: MaximEdogawa Date: Fri, 10 Apr 2026 11:06:10 +0200 Subject: [PATCH 03/11] feat: enhance agent and MCP client for improved tool handling - Updated the agent's system message to clarify tool usage rules for better user guidance. - Refined text handling in the agent to ensure non-empty responses are logged appropriately. - Added a `direct_return` option in the MCP client to allow tools to return results directly to users, bypassing the model for summarization. - Adjusted the MCP service and types to support the new `direct_return` functionality, enhancing tool integration. --- src-tauri/src/modules/bot/agent.rs | 41 ++++++++++++++++++++++------ src-tauri/src/modules/mcp/client.rs | 9 +++++- src-tauri/src/modules/mcp/service.rs | 12 ++++++-- src-tauri/src/modules/mcp/types.rs | 4 +++ 4 files changed, 54 insertions(+), 12 deletions(-) diff --git a/src-tauri/src/modules/bot/agent.rs b/src-tauri/src/modules/bot/agent.rs index 12e3db5..d13befd 100644 --- a/src-tauri/src/modules/bot/agent.rs +++ b/src-tauri/src/modules/bot/agent.rs @@ -60,8 +60,11 @@ pub async fn run_turn(state: &AppState, user_message: &str) -> Result Result, env: HashMap, + direct_return: bool, ) -> Result { let transport = StdioTransport::spawn(&command, &args, &env).await?; @@ -27,7 +28,13 @@ impl McpClient { let _ = transport.notify("notifications/initialized", None).await; let result = transport.call("tools/list", None).await?; - let tools = parse_tools(&server_name, &result); + let mut tools = parse_tools(&server_name, &result); + + if direct_return { + for tool in &mut tools { + tool.direct_return = true; + } + } Ok(Self { server_name, diff --git a/src-tauri/src/modules/mcp/service.rs b/src-tauri/src/modules/mcp/service.rs index 14440a9..d379163 100644 --- a/src-tauri/src/modules/mcp/service.rs +++ b/src-tauri/src/modules/mcp/service.rs @@ -69,6 +69,7 @@ pub fn set_filesystem_allowed_path(cfg: &mut McpConfig, abs_path: &str) { abs_path.trim().to_string(), ], env: Default::default(), + direct_return: true, }; cfg.servers.insert(FILESYSTEM_SERVER_KEY.into(), entry); } @@ -117,19 +118,26 @@ pub async fn build_registry(cfg: &McpConfig) -> (ToolRegistry, Vec) { } Err(e) => status.push(format!("{server_key} native failed: {e}")), }, - ServerEntry::Stdio { command, args, env } => match McpClient::connect( + ServerEntry::Stdio { + command, + args, + env, + direct_return, + } => match McpClient::connect( server_key.clone(), command.clone(), args.clone(), env.clone(), + *direct_return, ) .await { Ok(client) => { let n = client.tools.len(); + let dr = if *direct_return { " direct_return" } else { "" }; providers.push(Provider::Mcp(Box::new(client))); status.push(format!( - "{server_key} stdio ({n} tool{})", + "{server_key} stdio ({n} tool{}{dr})", if n == 1 { "" } else { "s" } )); } diff --git a/src-tauri/src/modules/mcp/types.rs b/src-tauri/src/modules/mcp/types.rs index 521ed7e..66a96f8 100644 --- a/src-tauri/src/modules/mcp/types.rs +++ b/src-tauri/src/modules/mcp/types.rs @@ -21,6 +21,10 @@ pub enum ServerEntry { args: Vec, #[serde(default)] env: HashMap, + /// When true, tool results are returned directly to the user without + /// sending them back to the model for summarisation. + #[serde(default)] + direct_return: bool, }, } From 178d721da7b1712ef9270889fdfa6f1b5fee8467 Mon Sep 17 00:00:00 2001 From: MaximEdogawa Date: Fri, 10 Apr 2026 11:49:52 +0200 Subject: [PATCH 04/11] feat: implement server management in MCP with CRUD operations - Added new API endpoints for managing MCP servers, including fetching, creating, updating, and deleting server entries. - Introduced `ServerEntry` types to support both standard and native server configurations. - Created `AddServerForm` component for user-friendly server addition and configuration. - Developed `McpServerCard` component for displaying and managing individual server entries. - Updated `McpToolsPanel` to integrate server management features, allowing users to view and interact with configured servers. - Enhanced the `TerminalPreview` component for improved display of terminal output. --- src-tauri/src/infrastructure/http_server.rs | 119 ++++- .../bot/components/TerminalPreview.tsx | 2 +- src/modules/mcp/components/AddServerForm.tsx | 300 ++++++++++++ src/modules/mcp/components/McpServerCard.tsx | 430 ++++++++++++++++++ src/modules/mcp/components/McpToolsPanel.tsx | 265 +++++------ src/modules/mcp/index.ts | 63 +++ src/pages/DashboardPage.tsx | 178 ++++---- src/shared/ui/TopMenu.tsx | 148 ++++-- 8 files changed, 1233 insertions(+), 272 deletions(-) create mode 100644 src/modules/mcp/components/AddServerForm.tsx create mode 100644 src/modules/mcp/components/McpServerCard.tsx diff --git a/src-tauri/src/infrastructure/http_server.rs b/src-tauri/src/infrastructure/http_server.rs index 0859cd4..e4f98c9 100644 --- a/src-tauri/src/infrastructure/http_server.rs +++ b/src-tauri/src/infrastructure/http_server.rs @@ -2,7 +2,7 @@ use crate::infrastructure::bot_lifecycle; use crate::modules::bot::{repository, service as bot_service}; use crate::modules::mcp::service as mcp_service; use crate::shared::state::{AppState, ConnectionData}; -use axum::extract::State; +use axum::extract::{Path, State}; use axum::http::StatusCode; use axum::response::{Json, Sse}; use axum::routing::{delete, get, post, put}; @@ -76,6 +76,9 @@ pub async fn start_server(state: AppState) { .route("/v1/mcp/tools", get(handle_mcp_tools)) .route("/v1/mcp/config", get(handle_mcp_config_get)) .route("/v1/mcp/filesystem", put(handle_mcp_filesystem_put)) + .route("/v1/mcp/servers", get(handle_mcp_servers_list)) + .route("/v1/mcp/servers/{name}", put(handle_mcp_server_upsert)) + .route("/v1/mcp/servers/{name}", delete(handle_mcp_server_delete)) .layer(cors) .with_state(state.clone()); @@ -313,6 +316,120 @@ async fn handle_mcp_tools(State(state): State) -> Json ) } +// ── MCP server CRUD ────────────────────────────────────────────────── + +#[derive(Serialize)] +struct McpServersResponse { + servers: std::collections::BTreeMap, +} + +async fn handle_mcp_servers_list( + State(state): State, +) -> Result, (StatusCode, Json)> { + let cfg = mcp_service::load_or_init_config(&state.mcp_config_path).map_err(|e| { + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(ErrorResponse { error: e }), + ) + })?; + Ok(Json(McpServersResponse { + servers: cfg.servers, + })) +} + +fn is_valid_server_name(name: &str) -> bool { + !name.is_empty() + && name.len() <= 64 + && name + .chars() + .all(|c| c.is_ascii_alphanumeric() || c == '-' || c == '_') +} + +async fn handle_mcp_server_upsert( + State(state): State, + Path(name): Path, + Json(entry): Json, +) -> Result<(StatusCode, Json), (StatusCode, Json)> { + if !is_valid_server_name(&name) { + return Err(( + StatusCode::BAD_REQUEST, + Json(ErrorResponse { + error: "server name must be alphanumeric, hyphens, or underscores (max 64 chars)" + .into(), + }), + )); + } + + if let crate::modules::mcp::types::ServerEntry::Stdio { ref command, .. } = entry { + if command.trim().is_empty() { + return Err(( + StatusCode::BAD_REQUEST, + Json(ErrorResponse { + error: "command must not be empty".into(), + }), + )); + } + } + + let mut cfg = mcp_service::load_or_init_config(&state.mcp_config_path).map_err(|e| { + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(ErrorResponse { error: e }), + ) + })?; + + cfg.servers.insert(name.clone(), entry); + + mcp_service::save_config(&state.mcp_config_path, &cfg).map_err(|e| { + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(ErrorResponse { error: e }), + ) + })?; + + state + .emit_log("mcp", &format!("server '{name}' saved")) + .await; + mcp_service::rebuild_registry_into_state(&state, &cfg).await; + + Ok((StatusCode::OK, Json(serde_json::json!({ "ok": true })))) +} + +async fn handle_mcp_server_delete( + State(state): State, + Path(name): Path, +) -> Result<(StatusCode, Json), (StatusCode, Json)> { + let mut cfg = mcp_service::load_or_init_config(&state.mcp_config_path).map_err(|e| { + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(ErrorResponse { error: e }), + ) + })?; + + if cfg.servers.remove(&name).is_none() { + return Err(( + StatusCode::NOT_FOUND, + Json(ErrorResponse { + error: format!("server '{name}' not found"), + }), + )); + } + + mcp_service::save_config(&state.mcp_config_path, &cfg).map_err(|e| { + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(ErrorResponse { error: e }), + ) + })?; + + state + .emit_log("mcp", &format!("server '{name}' removed")) + .await; + mcp_service::rebuild_registry_into_state(&state, &cfg).await; + + Ok((StatusCode::OK, Json(serde_json::json!({ "ok": true })))) +} + async fn handle_logs_sse( State(state): State, ) -> Sse>> { diff --git a/src/modules/bot/components/TerminalPreview.tsx b/src/modules/bot/components/TerminalPreview.tsx index b1cb3e9..9511a34 100644 --- a/src/modules/bot/components/TerminalPreview.tsx +++ b/src/modules/bot/components/TerminalPreview.tsx @@ -122,7 +122,7 @@ export function TerminalPreview() {

{lines.map((line, i) => (
diff --git a/src/modules/mcp/components/AddServerForm.tsx b/src/modules/mcp/components/AddServerForm.tsx new file mode 100644 index 0000000..9beaa7a --- /dev/null +++ b/src/modules/mcp/components/AddServerForm.tsx @@ -0,0 +1,300 @@ +import { useState } from "react"; +import type { ServerEntry } from ".."; + +type Props = { + busy: boolean; + onAdd: (name: string, entry: ServerEntry) => Promise; +}; + +type Mode = "paste" | "form"; + +export function AddServerForm({ busy, onAdd }: Props) { + const [open, setOpen] = useState(false); + const [mode, setMode] = useState("paste"); + const [error, setError] = useState(null); + + // Paste mode state + const [jsonText, setJsonText] = useState(""); + const [pasteName, setPasteName] = useState(""); + + // Form mode state + const [formName, setFormName] = useState(""); + const [command, setCommand] = useState(""); + const [argsText, setArgsText] = useState(""); + const [envText, setEnvText] = useState(""); + const [directReturn, setDirectReturn] = useState(false); + + const reset = () => { + setJsonText(""); + setPasteName(""); + setFormName(""); + setCommand(""); + setArgsText(""); + setEnvText(""); + setDirectReturn(false); + setError(null); + }; + + const handlePasteSubmit = async () => { + setError(null); + let parsed: unknown; + try { + parsed = JSON.parse(jsonText); + } catch { + setError("Invalid JSON"); + return; + } + + if (typeof parsed !== "object" || parsed === null || Array.isArray(parsed)) { + setError("Expected a JSON object"); + return; + } + + const obj = parsed as Record; + + // Detect format: { "type": "stdio", ... } vs { "my-server": { "type": "stdio", ... } } + let name: string; + let entry: ServerEntry; + + if ("type" in obj && (obj.type === "stdio" || obj.type === "native")) { + // Direct entry — need a name from the input + if (!pasteName.trim()) { + setError("Enter a server name (the JSON has no key wrapper)"); + return; + } + name = pasteName.trim(); + entry = normalizeEntry(obj); + } else { + // Wrapped: { "server-name": { ... } } + const keys = Object.keys(obj); + if (keys.length !== 1) { + setError('Expected either a server entry or { "name": { ...entry } }'); + return; + } + name = keys[0]; + const inner = obj[name]; + if (typeof inner !== "object" || inner === null || Array.isArray(inner)) { + setError(`Value for "${name}" is not a valid server entry`); + return; + } + entry = normalizeEntry(inner as Record); + } + + await onAdd(name, entry); + reset(); + setOpen(false); + }; + + const handleFormSubmit = async () => { + setError(null); + const name = formName.trim(); + if (!name) { + setError("Server name is required"); + return; + } + if (!command.trim()) { + setError("Command is required"); + return; + } + + const args = argsText + .split("\n") + .map((l) => l.trim()) + .filter(Boolean); + const env: Record = {}; + for (const line of envText.split("\n")) { + const eq = line.indexOf("="); + if (eq > 0) env[line.slice(0, eq).trim()] = line.slice(eq + 1).trim(); + } + + await onAdd(name, { + type: "stdio", + command: command.trim(), + args, + env, + direct_return: directReturn, + }); + reset(); + setOpen(false); + }; + + const inputClass = + "w-full rounded-lg border border-white/15 bg-white/5 px-2.5 py-2 font-mono text-xs text-white outline-none placeholder:text-white/25 focus:border-white/30"; + + if (!open) { + return ( + + ); + } + + return ( +
+
+

Add server

+ +
+ + {/* Mode tabs */} +
+ {(["paste", "form"] as const).map((m) => ( + + ))} +
+ + {mode === "paste" && ( +
+
+ + setPasteName(e.target.value)} + placeholder="my-server" + className={inputClass} + /> +
+
+ +