diff --git a/bun.lock b/bun.lock
index 9a0f1a0..cafd303 100644
--- a/bun.lock
+++ b/bun.lock
@@ -8,6 +8,7 @@
"@radix-ui/react-accordion": "^1.2.12",
"@radix-ui/react-menubar": "^1.1.16",
"@radix-ui/react-scroll-area": "^1.2.10",
+ "@radix-ui/react-slider": "^1.3.6",
"@tailwindcss/vite": "^4.2.2",
"@tauri-apps/api": "^2",
"@tauri-apps/plugin-dialog": "^2",
@@ -215,6 +216,8 @@
"@radix-ui/react-scroll-area": ["@radix-ui/react-scroll-area@1.2.10", "", { "dependencies": { "@radix-ui/number": "1.1.1", "@radix-ui/primitive": "1.1.3", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-direction": "1.1.1", "@radix-ui/react-presence": "1.1.5", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-use-callback-ref": "1.1.1", "@radix-ui/react-use-layout-effect": "1.1.1" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react", "@types/react-dom"] }, "sha512-tAXIa1g3sM5CGpVT0uIbUx/U3Gs5N8T52IICuCtObaos1S8fzsrPXG5WObkQN3S6NVl6wKgPhAIiBGbWnvc97A=="],
+ "@radix-ui/react-slider": ["@radix-ui/react-slider@1.3.6", "", { "dependencies": { "@radix-ui/number": "1.1.1", "@radix-ui/primitive": "1.1.3", "@radix-ui/react-collection": "1.1.7", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-direction": "1.1.1", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-use-controllable-state": "1.2.2", "@radix-ui/react-use-layout-effect": "1.1.1", "@radix-ui/react-use-previous": "1.1.1", "@radix-ui/react-use-size": "1.1.1" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react", "@types/react-dom"] }, "sha512-JPYb1GuM1bxfjMRlNLE+BcmBC8onfCi60Blk7OBqi2MLTFdS+8401U4uFjnwkOr49BLmXxLC6JHkvAsx5OJvHw=="],
+
"@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.3", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A=="],
"@radix-ui/react-use-callback-ref": ["@radix-ui/react-use-callback-ref@1.1.1", "", { "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-FkBMwD+qbGQeMu1cOHnuGB6x4yzPjho8ap5WtbEJ26umhgqVXbhekKUQO+hZEL1vU92a3wHwdp0HAcqAUF5iDg=="],
@@ -227,6 +230,8 @@
"@radix-ui/react-use-layout-effect": ["@radix-ui/react-use-layout-effect@1.1.1", "", { "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-RbJRS4UWQFkzHTTwVymMTUv8EqYhOp8dOOviLj2ugtTiXRaRQS7GLGxZTLL1jWhMeoSCf5zmcZkqTl9IiYfXcQ=="],
+ "@radix-ui/react-use-previous": ["@radix-ui/react-use-previous@1.1.1", "", { "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-2dHfToCj/pzca2Ck724OZ5L0EVrr3eHRNsG/b3xQJLA2hZpVCS99bLAX+hm1IHXDEnzU6by5z/5MIY794/a8NQ=="],
+
"@radix-ui/react-use-rect": ["@radix-ui/react-use-rect@1.1.1", "", { "dependencies": { "@radix-ui/rect": "1.1.1" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-QTYuDesS0VtuHNNvMh+CjlKJ4LJickCMUAqjlE3+j8w+RlRpwyX3apEQKGFzbZGdo7XNG1tXa+bQqIE7HIXT2w=="],
"@radix-ui/react-use-size": ["@radix-ui/react-use-size@1.1.1", "", { "dependencies": { "@radix-ui/react-use-layout-effect": "1.1.1" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-ewrXRDTAqAXlkl6t/fkXWNAhFX9I+CkKlw6zjEwk86RSPKwZr3xpBRso655aqYafwtnbpHLj6toFzmd6xdVptQ=="],
diff --git a/package.json b/package.json
index 26bcbc8..dfa1d09 100644
--- a/package.json
+++ b/package.json
@@ -33,6 +33,7 @@
"@radix-ui/react-accordion": "^1.2.12",
"@radix-ui/react-menubar": "^1.1.16",
"@radix-ui/react-scroll-area": "^1.2.10",
+ "@radix-ui/react-slider": "^1.3.6",
"@tailwindcss/vite": "^4.2.2",
"@tauri-apps/api": "^2",
"@tauri-apps/plugin-dialog": "^2",
diff --git a/src-tauri/Cargo.lock b/src-tauri/Cargo.lock
index 5274bc2..6a7878f 100644
--- a/src-tauri/Cargo.lock
+++ b/src-tauri/Cargo.lock
@@ -8,6 +8,17 @@ version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa"
+[[package]]
+name = "aes"
+version = "0.8.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0"
+dependencies = [
+ "cfg-if",
+ "cipher",
+ "cpufeatures",
+]
+
[[package]]
name = "aho-corasick"
version = "1.1.4"
@@ -420,6 +431,15 @@ dependencies = [
"generic-array",
]
+[[package]]
+name = "block-padding"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a8894febbff9f758034a5b8e12d87918f56dfc64a8e1fe757d65e29041538d93"
+dependencies = [
+ "generic-array",
+]
+
[[package]]
name = "block2"
version = "0.6.2"
@@ -557,6 +577,15 @@ dependencies = [
"toml 0.9.12+spec-1.1.0",
]
+[[package]]
+name = "cbc"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "26b52a9543ae338f279b96b0b9fed9c8093744685043739079ce85cd58f289a6"
+dependencies = [
+ "cipher",
+]
+
[[package]]
name = "cc"
version = "1.2.59"
@@ -622,6 +651,16 @@ dependencies = [
"windows-link 0.2.1",
]
+[[package]]
+name = "cipher"
+version = "0.4.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad"
+dependencies = [
+ "crypto-common",
+ "inout",
+]
+
[[package]]
name = "cmake"
version = "0.1.58"
@@ -858,6 +897,35 @@ dependencies = [
"syn 2.0.117",
]
+[[package]]
+name = "dbus"
+version = "0.9.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "21b3aa68d7e7abee336255bd7248ea965cc393f3e70411135a6f6a4b651345d4"
+dependencies = [
+ "libc",
+ "libdbus-sys",
+ "windows-sys 0.59.0",
+]
+
+[[package]]
+name = "dbus-secret-service"
+version = "4.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "708b509edf7889e53d7efb0ffadd994cc6c2345ccb62f55cfd6b0682165e4fa6"
+dependencies = [
+ "aes",
+ "block-padding",
+ "cbc",
+ "dbus",
+ "fastrand",
+ "hkdf",
+ "num",
+ "once_cell",
+ "sha2",
+ "zeroize",
+]
+
[[package]]
name = "deranged"
version = "0.5.8"
@@ -942,6 +1010,7 @@ checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
dependencies = [
"block-buffer",
"crypto-common",
+ "subtle",
]
[[package]]
@@ -1823,6 +1892,24 @@ version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
+[[package]]
+name = "hkdf"
+version = "0.12.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7b5f8eb2ad728638ea2c7d47a21db23b7b58a72ed6a38256b8a1849f15fbbdf7"
+dependencies = [
+ "hmac",
+]
+
+[[package]]
+name = "hmac"
+version = "0.12.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e"
+dependencies = [
+ "digest",
+]
+
[[package]]
name = "html5ever"
version = "0.29.1"
@@ -2169,6 +2256,16 @@ dependencies = [
"cfb",
]
+[[package]]
+name = "inout"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "879f10e63c20629ecabbb64a8010319738c66a5cd0c29b02d63d272b03751d01"
+dependencies = [
+ "block-padding",
+ "generic-array",
+]
+
[[package]]
name = "ipnet"
version = "2.12.0"
@@ -2371,6 +2468,20 @@ dependencies = [
"unicode-segmentation",
]
+[[package]]
+name = "keyring"
+version = "3.6.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "eebcc3aff044e5944a8fbaf69eb277d11986064cba30c468730e8b9909fb551c"
+dependencies = [
+ "byteorder",
+ "dbus-secret-service",
+ "log",
+ "secret-service",
+ "windows-sys 0.60.2",
+ "zeroize",
+]
+
[[package]]
name = "kuchikiki"
version = "0.8.8-speedreader"
@@ -2419,6 +2530,15 @@ version = "0.2.184"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "48f5d2a454e16a5ea0f4ced81bd44e4cfc7bd3a507b61887c99fd3538b28e4af"
+[[package]]
+name = "libdbus-sys"
+version = "0.2.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "328c4789d42200f1eeec05bd86c9c13c7f091d2ba9a6ea35acdf51f31bc0f043"
+dependencies = [
+ "pkg-config",
+]
+
[[package]]
name = "libloading"
version = "0.7.4"
@@ -2651,18 +2771,95 @@ version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086"
+[[package]]
+name = "nix"
+version = "0.29.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "71e2746dc3a24dd78b3cfcb7be93368c6de9963d30f43a6a73998a9cf4b17b46"
+dependencies = [
+ "bitflags 2.11.0",
+ "cfg-if",
+ "cfg_aliases",
+ "libc",
+ "memoffset",
+]
+
[[package]]
name = "nodrop"
version = "0.1.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72ef4a56884ca558e5ddb05a1d1e7e1bfd9a68d9ed024c21704cc98872dae1bb"
+[[package]]
+name = "num"
+version = "0.4.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "35bd024e8b2ff75562e5f34e7f4905839deb4b22955ef5e73d2fea1b9813cb23"
+dependencies = [
+ "num-bigint",
+ "num-complex",
+ "num-integer",
+ "num-iter",
+ "num-rational",
+ "num-traits",
+]
+
+[[package]]
+name = "num-bigint"
+version = "0.4.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9"
+dependencies = [
+ "num-integer",
+ "num-traits",
+]
+
+[[package]]
+name = "num-complex"
+version = "0.4.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "73f88a1307638156682bada9d7604135552957b7818057dcef22705b4d509495"
+dependencies = [
+ "num-traits",
+]
+
[[package]]
name = "num-conv"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c6673768db2d862beb9b39a78fdcb1a69439615d5794a1be50caa9bc92c81967"
+[[package]]
+name = "num-integer"
+version = "0.1.46"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f"
+dependencies = [
+ "num-traits",
+]
+
+[[package]]
+name = "num-iter"
+version = "0.1.45"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf"
+dependencies = [
+ "autocfg",
+ "num-integer",
+ "num-traits",
+]
+
+[[package]]
+name = "num-rational"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f83d14da390562dca69fc84082e73e548e1ad308d24accdedd2720017cb37824"
+dependencies = [
+ "num-bigint",
+ "num-integer",
+ "num-traits",
+]
+
[[package]]
name = "num-traits"
version = "0.2.19"
@@ -2977,12 +3174,16 @@ version = "0.1.0"
dependencies = [
"axum",
"chrono",
+ "core-foundation 0.10.1",
+ "ctor",
"env_logger",
"fastrand",
"futures",
+ "keyring",
"log",
"regex",
"reqwest 0.13.2",
+ "security-framework",
"serde",
"serde_json",
"socket2 0.5.10",
@@ -4028,6 +4229,25 @@ version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
+[[package]]
+name = "secret-service"
+version = "4.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e4d35ad99a181be0a60ffcbe85d680d98f87bdc4d7644ade319b87076b9dbfd4"
+dependencies = [
+ "aes",
+ "cbc",
+ "futures-util",
+ "generic-array",
+ "hkdf",
+ "num",
+ "once_cell",
+ "rand 0.8.5",
+ "serde",
+ "sha2",
+ "zbus 4.4.0",
+]
+
[[package]]
name = "security-framework"
version = "3.7.0"
@@ -4288,6 +4508,17 @@ dependencies = [
"stable_deref_trait",
]
+[[package]]
+name = "sha1"
+version = "0.10.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba"
+dependencies = [
+ "cfg-if",
+ "cpufeatures",
+ "digest",
+]
+
[[package]]
name = "sha2"
version = "0.10.9"
@@ -4432,6 +4663,12 @@ dependencies = [
"windows-sys 0.59.0",
]
+[[package]]
+name = "static_assertions"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
+
[[package]]
name = "string_cache"
version = "0.8.9"
@@ -4839,7 +5076,7 @@ dependencies = [
"thiserror 2.0.18",
"url",
"windows",
- "zbus",
+ "zbus 5.14.0",
]
[[package]]
@@ -6539,6 +6776,16 @@ dependencies = [
"pkg-config",
]
+[[package]]
+name = "xdg-home"
+version = "1.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ec1cdab258fb55c0da61328dc52c8764709b249011b2cad0454c72f0bf10a1f6"
+dependencies = [
+ "libc",
+ "windows-sys 0.59.0",
+]
+
[[package]]
name = "yoke"
version = "0.8.2"
@@ -6562,6 +6809,38 @@ dependencies = [
"synstructure",
]
+[[package]]
+name = "zbus"
+version = "4.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bb97012beadd29e654708a0fdb4c84bc046f537aecfde2c3ee0a9e4b4d48c725"
+dependencies = [
+ "async-broadcast",
+ "async-process",
+ "async-recursion",
+ "async-trait",
+ "enumflags2",
+ "event-listener",
+ "futures-core",
+ "futures-sink",
+ "futures-util",
+ "hex",
+ "nix",
+ "ordered-stream",
+ "rand 0.8.5",
+ "serde",
+ "serde_repr",
+ "sha1",
+ "static_assertions",
+ "tracing",
+ "uds_windows",
+ "windows-sys 0.52.0",
+ "xdg-home",
+ "zbus_macros 4.4.0",
+ "zbus_names 3.0.0",
+ "zvariant 4.2.0",
+]
+
[[package]]
name = "zbus"
version = "5.14.0"
@@ -6592,9 +6871,22 @@ dependencies = [
"uuid",
"windows-sys 0.61.2",
"winnow 0.7.15",
- "zbus_macros",
- "zbus_names",
- "zvariant",
+ "zbus_macros 5.14.0",
+ "zbus_names 4.3.1",
+ "zvariant 5.10.0",
+]
+
+[[package]]
+name = "zbus_macros"
+version = "4.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "267db9407081e90bbfa46d841d3cbc60f59c0351838c4bc65199ecd79ab1983e"
+dependencies = [
+ "proc-macro-crate 3.5.0",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.117",
+ "zvariant_utils 2.1.0",
]
[[package]]
@@ -6607,9 +6899,20 @@ dependencies = [
"proc-macro2",
"quote",
"syn 2.0.117",
- "zbus_names",
- "zvariant",
- "zvariant_utils",
+ "zbus_names 4.3.1",
+ "zvariant 5.10.0",
+ "zvariant_utils 3.3.0",
+]
+
+[[package]]
+name = "zbus_names"
+version = "3.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4b9b1fef7d021261cc16cba64c351d291b715febe0fa10dc3a443ac5a5022e6c"
+dependencies = [
+ "serde",
+ "static_assertions",
+ "zvariant 4.2.0",
]
[[package]]
@@ -6620,7 +6923,7 @@ checksum = "ffd8af6d5b78619bab301ff3c560a5bd22426150253db278f164d6cf3b72c50f"
dependencies = [
"serde",
"winnow 0.7.15",
- "zvariant",
+ "zvariant 5.10.0",
]
[[package]]
@@ -6669,6 +6972,20 @@ name = "zeroize"
version = "1.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0"
+dependencies = [
+ "zeroize_derive",
+]
+
+[[package]]
+name = "zeroize_derive"
+version = "1.4.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "85a5b4158499876c763cb03bc4e49185d3cccbabb15b33c627f7884f43db852e"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.117",
+]
[[package]]
name = "zerotrie"
@@ -6738,6 +7055,19 @@ dependencies = [
"simd-adler32",
]
+[[package]]
+name = "zvariant"
+version = "4.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2084290ab9a1c471c38fc524945837734fbf124487e105daec2bb57fd48c81fe"
+dependencies = [
+ "endi",
+ "enumflags2",
+ "serde",
+ "static_assertions",
+ "zvariant_derive 4.2.0",
+]
+
[[package]]
name = "zvariant"
version = "5.10.0"
@@ -6748,8 +7078,21 @@ dependencies = [
"enumflags2",
"serde",
"winnow 0.7.15",
- "zvariant_derive",
- "zvariant_utils",
+ "zvariant_derive 5.10.0",
+ "zvariant_utils 3.3.0",
+]
+
+[[package]]
+name = "zvariant_derive"
+version = "4.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "73e2ba546bda683a90652bac4a279bc146adad1386f25379cf73200d2002c449"
+dependencies = [
+ "proc-macro-crate 3.5.0",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.117",
+ "zvariant_utils 2.1.0",
]
[[package]]
@@ -6762,7 +7105,18 @@ dependencies = [
"proc-macro2",
"quote",
"syn 2.0.117",
- "zvariant_utils",
+ "zvariant_utils 3.3.0",
+]
+
+[[package]]
+name = "zvariant_utils"
+version = "2.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c51bcff7cc3dbb5055396bcf774748c3dab426b4b8659046963523cee4808340"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.117",
]
[[package]]
diff --git a/src-tauri/Cargo.toml b/src-tauri/Cargo.toml
index 5562436..0926ead 100644
--- a/src-tauri/Cargo.toml
+++ b/src-tauri/Cargo.toml
@@ -38,6 +38,17 @@ zip = { version = "2", default-features = false, features = ["deflate"] }
futures = "0.3.31"
regex = "1"
+[target.'cfg(target_os = "macos")'.dependencies]
+security-framework = "3"
+core-foundation = "0.10"
+
+[target.'cfg(target_os = "linux")'.dependencies]
+keyring = { version = "3", default-features = false, features = ["sync-secret-service", "crypto-rust"] }
+
+[target.'cfg(target_os = "windows")'.dependencies]
+keyring = { version = "3", default-features = false, features = ["windows-native"] }
+
[dev-dependencies]
tempfile = "3"
+ctor = "0.2"
diff --git a/src-tauri/src/app.rs b/src-tauri/src/app.rs
index cbf8ce3..c573926 100644
--- a/src-tauri/src/app.rs
+++ b/src-tauri/src/app.rs
@@ -1,7 +1,8 @@
use crate::infrastructure::http_server;
use crate::modules::bot::{commands, repository, service as bot_service};
use crate::modules::mcp::service as mcp_service;
-use crate::shared::state::AppState;
+use crate::modules::secure_store;
+use crate::shared::state::{AppState, ConnectionData};
use std::path::PathBuf;
use tauri::Manager;
@@ -23,6 +24,49 @@ pub fn run() {
.setup(|app| {
let path = store_path(app);
let (mcp_path, mcp_src) = mcp_service::resolve_mcp_config_path(&path);
+
+ // Warm unified secrets once before MCP rebuild and bot resume (avoids parallel prompts).
+ // If legacy per-bot / per-MCP items still exist, migration may touch the keychain again
+ // (a second prompt on first launch after upgrade is normal).
+ //
+ // Run on a plain `std::thread` (not `tokio::task::spawn_blocking` + `block_on`): during
+ // Tao's `did_finish_launching` the Tokio runtime may not accept blocking tasks yet; that
+ // combination panicked with `panic_cannot_unwind` on macOS and aborted the process.
+ {
+ let path_w = path.clone();
+ let mcp_path_w = mcp_path.clone();
+ match std::thread::Builder::new()
+ .name("pengine-warm-secrets".into())
+ .spawn(move || {
+ let mut warm_mig: Vec = Vec::new();
+ let meta_for_warm = repository::load(&path_w, &mut warm_mig);
+ for line in warm_mig {
+ log::info!("{line}");
+ }
+ let bot_ids: Vec = meta_for_warm
+ .as_ref()
+ .map(|m| vec![m.bot_id.clone()])
+ .unwrap_or_default();
+ let mcp_pairs = match mcp_service::load_or_init_config(&mcp_path_w) {
+ Ok(cfg) => mcp_service::catalog_passthrough_key_pairs(&cfg),
+ Err(e) => {
+ log::warn!("warm_app_secrets: skipped mcp pairs ({e})");
+ Vec::new()
+ }
+ };
+ if let Err(e) = secure_store::warm_app_secrets(&bot_ids, &mcp_pairs) {
+ log::warn!("warm_app_secrets failed: {e}");
+ }
+ }) {
+ Ok(handle) => {
+ if let Err(e) = handle.join() {
+ log::error!("warm_app_secrets thread panicked: {e:?}");
+ }
+ }
+ Err(e) => log::warn!("warm_app_secrets: could not spawn thread ({e})"),
+ }
+ }
+
let shared_state = AppState::new(path, mcp_path, mcp_src.to_string());
{
@@ -57,16 +101,45 @@ pub fn run() {
}
});
- // Resume persisted Telegram connection if present.
+ // Resume persisted Telegram connection if present (token is cached after warm_app_secrets).
let resume_state = shared_state.clone();
tauri::async_runtime::spawn(async move {
- let Some(conn) = repository::load(&resume_state.store_path) else {
+ let mut migration_log: Vec = Vec::new();
+ let meta = repository::load(&resume_state.store_path, &mut migration_log);
+ for line in migration_log {
+ resume_state.emit_log("auth", &line).await;
+ }
+ let Some(meta) = meta else {
return;
};
resume_state
- .emit_log("ok", &format!("Resuming bot @{}…", conn.bot_username))
+ .emit_log("auth", "Loading saved bot token…")
.await;
- let token = conn.bot_token.clone();
+ let token = match secure_store::load_token(&meta.bot_id) {
+ Ok(t) => t,
+ Err(e) => {
+ resume_state
+ .emit_log(
+ "auth",
+ &format!(
+ "Could not unlock stored bot token for @{}: {e}. \
+ Reconnect in the UI to save a new one.",
+ meta.bot_username
+ ),
+ )
+ .await;
+ return;
+ }
+ };
+ resume_state
+ .emit_log("ok", &format!("Resuming bot @{}…", meta.bot_username))
+ .await;
+ let conn = ConnectionData {
+ bot_token: token.clone(),
+ bot_id: meta.bot_id,
+ bot_username: meta.bot_username,
+ connected_at: meta.connected_at,
+ };
*resume_state.connection.lock().await = Some(conn);
let shutdown = resume_state.shutdown_notify.clone();
bot_service::start_bot(resume_state, token, shutdown).await;
diff --git a/src-tauri/src/infrastructure/http_server.rs b/src-tauri/src/infrastructure/http_server.rs
index 95732da..375b063 100644
--- a/src-tauri/src/infrastructure/http_server.rs
+++ b/src-tauri/src/infrastructure/http_server.rs
@@ -2,10 +2,12 @@ use crate::infrastructure::bot_lifecycle;
use crate::modules::bot::{repository, service as bot_service};
use crate::modules::mcp::service as mcp_service;
use crate::modules::ollama::service as ollama_service;
+use crate::modules::secure_store::{self, SecureStoreError};
use crate::modules::skills::service as skills_service;
use crate::modules::skills::types::{ClawHubPluginSummary, ClawHubSkill, Skill};
use crate::modules::tool_engine::{runtime as te_runtime, service as te_service};
-use crate::shared::state::{AppState, ConnectionData};
+use crate::shared::state::{AppState, ConnectionData, ConnectionMetadata};
+use crate::shared::user_settings;
use axum::extract::Query;
use axum::extract::{Path, State};
use axum::http::StatusCode;
@@ -82,6 +84,19 @@ pub struct PutOllamaModelBody {
pub model: Option,
}
+#[derive(Serialize)]
+pub struct UserSettingsResponse {
+ pub skills_hint_max_bytes: u32,
+ pub skills_hint_max_bytes_min: u32,
+ pub skills_hint_max_bytes_max: u32,
+ pub skills_hint_max_bytes_default: u32,
+}
+
+#[derive(Deserialize)]
+pub struct PutUserSettingsBody {
+ pub skills_hint_max_bytes: u32,
+}
+
pub async fn start_server(state: AppState) {
let cors = CorsLayer::new()
.allow_origin(Any)
@@ -95,6 +110,8 @@ pub async fn start_server(state: AppState) {
.route("/v1/logs", get(handle_logs_sse))
.route("/v1/ollama/models", get(handle_ollama_models))
.route("/v1/ollama/model", put(handle_ollama_model_put))
+ .route("/v1/settings", get(handle_user_settings_get))
+ .route("/v1/settings", put(handle_user_settings_put))
.route("/v1/mcp/tools", get(handle_mcp_tools))
.route("/v1/mcp/config", get(handle_mcp_config_get))
.route("/v1/mcp/filesystem", put(handle_mcp_filesystem_put))
@@ -225,7 +242,20 @@ async fn handle_connect(
bot_lifecycle::stop_and_wait_for_bot(&state).await;
- repository::persist(&state.store_path, &conn).map_err(|e| {
+ secure_store::save_token(&conn.bot_id, &conn.bot_token).map_err(|e| {
+ (
+ StatusCode::INTERNAL_SERVER_ERROR,
+ Json(ErrorResponse {
+ error: format!("could not save bot token to OS keychain: {e}"),
+ }),
+ )
+ })?;
+
+ let metadata = ConnectionMetadata::from(&conn);
+ repository::persist(&state.store_path, &metadata).map_err(|e| {
+ // Best-effort rollback so we don't leave a token in the keychain that
+ // no metadata file points to.
+ let _ = secure_store::delete_token(&conn.bot_id);
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(ErrorResponse { error: e }),
@@ -262,10 +292,12 @@ async fn handle_disconnect(
) -> Result<(StatusCode, Json), (StatusCode, Json)> {
bot_lifecycle::stop_and_wait_for_bot(&state).await;
- {
+ let bot_id = {
let mut lock = state.connection.lock().await;
+ let id = lock.as_ref().map(|c| c.bot_id.clone());
*lock = None;
- }
+ id
+ };
repository::clear(&state.store_path).map_err(|e| {
(
@@ -274,6 +306,17 @@ async fn handle_disconnect(
)
})?;
+ if let Some(id) = bot_id {
+ if let Err(e) = secure_store::delete_token(&id) {
+ state
+ .emit_log(
+ "auth",
+ &format!("WARN: could not remove bot token from keychain: {e}"),
+ )
+ .await;
+ }
+ }
+
state.emit_log("ok", "Disconnected and cleared store").await;
Ok((
@@ -310,6 +353,49 @@ async fn handle_ollama_models(State(state): State) -> Json) -> Json {
+ let v = *state.skills_hint_max_bytes.read().await;
+ Json(UserSettingsResponse {
+ skills_hint_max_bytes: v,
+ skills_hint_max_bytes_min: user_settings::MIN_SKILLS_HINT_MAX_BYTES,
+ skills_hint_max_bytes_max: user_settings::MAX_SKILLS_HINT_MAX_BYTES,
+ skills_hint_max_bytes_default: user_settings::DEFAULT_SKILLS_HINT_MAX_BYTES,
+ })
+}
+
+async fn handle_user_settings_put(
+ State(state): State,
+ Json(body): Json,
+) -> Result<(StatusCode, Json), (StatusCode, Json)> {
+ let saved =
+ user_settings::save_skills_hint_max_bytes(&state.store_path, body.skills_hint_max_bytes)
+ .map_err(|e| {
+ (
+ StatusCode::INTERNAL_SERVER_ERROR,
+ Json(ErrorResponse { error: e }),
+ )
+ })?;
+ {
+ let mut w = state.skills_hint_max_bytes.write().await;
+ *w = saved;
+ }
+ state
+ .emit_log(
+ "run",
+ &format!("user settings: skills_hint_max_bytes={saved}"),
+ )
+ .await;
+ Ok((
+ StatusCode::OK,
+ Json(UserSettingsResponse {
+ skills_hint_max_bytes: saved,
+ skills_hint_max_bytes_min: user_settings::MIN_SKILLS_HINT_MAX_BYTES,
+ skills_hint_max_bytes_max: user_settings::MAX_SKILLS_HINT_MAX_BYTES,
+ skills_hint_max_bytes_default: user_settings::DEFAULT_SKILLS_HINT_MAX_BYTES,
+ }),
+ ))
+}
+
async fn handle_ollama_model_put(
State(state): State,
Json(body): Json,
@@ -506,17 +592,10 @@ async fn handle_mcp_servers_list(
)
})?
};
+ // Secrets never live in `mcp.json` — they're stored in the OS keychain and injected into
+ // argv at MCP spawn time — so nothing to redact before returning the config to the dashboard.
Ok(Json(McpServersResponse {
- servers: cfg
- .servers
- .iter()
- .map(|(k, v)| {
- (
- k.clone(),
- mcp_service::redact_mcp_server_entry_for_list_response(v),
- )
- })
- .collect(),
+ servers: cfg.servers.clone(),
}))
}
@@ -542,7 +621,7 @@ fn mcp_stdio_identity_ignores_direct_return(
args: a0,
env: e0,
private_host_path: p0,
- catalog_passthrough: t0,
+ catalog_passthrough_keys: t0,
..
},
ServerEntry::Stdio {
@@ -550,7 +629,7 @@ fn mcp_stdio_identity_ignores_direct_return(
args: a1,
env: e1,
private_host_path: p1,
- catalog_passthrough: t1,
+ catalog_passthrough_keys: t1,
..
},
) => c0 == c1 && a0 == a1 && e0 == e1 && p0 == p1 && t0 == t1,
@@ -594,7 +673,6 @@ async fn handle_mcp_server_upsert(
})?;
let old = cfg.servers.get(&name).cloned();
- let entry = mcp_service::merge_stdio_entry_preserving_redacted_secrets(old.as_ref(), entry);
if old.as_ref() == Some(&entry) {
return Ok((StatusCode::OK, Json(serde_json::json!({ "ok": true }))));
}
@@ -786,21 +864,18 @@ async fn handle_toolengine_catalog(
let k = te_service::server_key(&t.id);
match c.servers.get(&k)? {
crate::modules::mcp::types::ServerEntry::Stdio {
- catalog_passthrough,
+ catalog_passthrough_keys,
..
} => {
- let mut names: Vec = t
- .passthrough_env
+ let declared: std::collections::HashSet<&str> =
+ t.passthrough_env.iter().map(String::as_str).collect();
+ let mut names: Vec = catalog_passthrough_keys
.iter()
- .filter(|name| {
- catalog_passthrough
- .get(*name)
- .map(|v| !v.trim().is_empty())
- .unwrap_or(false)
- })
+ .filter(|name| declared.contains(name.as_str()))
.cloned()
.collect();
names.sort();
+ names.dedup();
Some(names)
}
_ => None,
@@ -1069,6 +1144,59 @@ async fn handle_toolengine_private_folder_put(
Ok((StatusCode::OK, Json(serde_json::json!({ "ok": true }))))
}
+fn rollback_passthrough_keychain_ops(tool_id: &str, applied: &[(String, Option)]) {
+ for (k, prev) in applied.iter().rev() {
+ match prev {
+ Some(s) => {
+ if let Err(e) = secure_store::save_mcp_secret(tool_id, k, s) {
+ log::warn!(
+ "passthrough keychain rollback: could not restore {tool_id}/{k}: {e}"
+ );
+ }
+ }
+ None => {
+ if let Err(e) = secure_store::delete_mcp_secret(tool_id, k) {
+ log::warn!(
+ "passthrough keychain rollback: could not delete {tool_id}/{k}: {e}"
+ );
+ }
+ }
+ }
+ }
+}
+
+async fn rollback_passthrough_config_keys(
+ state: &AppState,
+ tool_id: &str,
+ keys: &[String],
+ catalog: &crate::modules::tool_engine::types::ToolCatalog,
+ bot_id: Option,
+) -> Result<(), String> {
+ let _guard = state.mcp_config_mutex.lock().await;
+ let mut cfg = mcp_service::load_or_init_config(&state.mcp_config_path)?;
+ let srv_key = te_service::server_key(tool_id);
+ let Some(crate::modules::mcp::types::ServerEntry::Stdio {
+ catalog_passthrough_keys,
+ ..
+ }) = cfg.servers.get_mut(&srv_key)
+ else {
+ return Err("rollback: tool server missing or not stdio".into());
+ };
+ catalog_passthrough_keys.clear();
+ catalog_passthrough_keys.extend(keys.iter().cloned());
+ catalog_passthrough_keys.sort();
+ catalog_passthrough_keys.dedup();
+ let host_paths = mcp_service::filesystem_allowed_paths(&cfg);
+ te_service::sync_workspace_mounted_tools_for_catalog(
+ &mut cfg,
+ &host_paths,
+ catalog,
+ &state.mcp_config_path,
+ bot_id,
+ )?;
+ mcp_service::save_config(&state.mcp_config_path, &cfg)
+}
+
async fn handle_toolengine_passthrough_env_put(
State(state): State,
Json(body): Json,
@@ -1135,7 +1263,34 @@ async fn handle_toolengine_passthrough_env_put(
.as_ref()
.map(|c| c.bot_id.clone());
- {
+ let mut env_pairs: Vec<(String, String)> = body
+ .env
+ .iter()
+ .map(|(k, v)| (k.clone(), v.clone()))
+ .collect();
+ env_pairs.sort_by(|a, b| a.0.cmp(&b.0));
+
+ let mut prior: HashMap> = HashMap::new();
+ for (k, _) in &env_pairs {
+ match secure_store::load_mcp_secret(&tool_id, k) {
+ Ok(s) => {
+ prior.insert(k.clone(), Some(s));
+ }
+ Err(SecureStoreError::NotFound) => {
+ prior.insert(k.clone(), None);
+ }
+ Err(e) => {
+ return Err((
+ StatusCode::INTERNAL_SERVER_ERROR,
+ Json(ErrorResponse {
+ error: format!("read existing passthrough secret {k}: {e}"),
+ }),
+ ));
+ }
+ }
+ }
+
+ let keys_before: Vec = {
let _guard = state.mcp_config_mutex.lock().await;
let mut cfg = mcp_service::load_or_init_config(&state.mcp_config_path).map_err(|e| {
(
@@ -1154,18 +1309,24 @@ async fn handle_toolengine_passthrough_env_put(
));
};
- match server_ent {
+ let keys_before = match server_ent {
crate::modules::mcp::types::ServerEntry::Stdio {
- catalog_passthrough,
+ catalog_passthrough_keys,
..
} => {
- for (k, v) in &body.env {
+ let keys_before = catalog_passthrough_keys.clone();
+ let mut new_keys = keys_before.clone();
+ for (k, v) in &env_pairs {
if v.trim().is_empty() {
- catalog_passthrough.remove(k);
- } else {
- catalog_passthrough.insert(k.clone(), v.clone());
+ new_keys.retain(|stored| stored != k);
+ } else if !new_keys.iter().any(|stored| stored == k) {
+ new_keys.push(k.clone());
}
}
+ new_keys.sort();
+ new_keys.dedup();
+ *catalog_passthrough_keys = new_keys;
+ keys_before
}
_ => {
return Err((
@@ -1175,7 +1336,7 @@ async fn handle_toolengine_passthrough_env_put(
}),
));
}
- }
+ };
let host_paths = mcp_service::filesystem_allowed_paths(&cfg);
te_service::sync_workspace_mounted_tools_for_catalog(
@@ -1183,7 +1344,7 @@ async fn handle_toolengine_passthrough_env_put(
&host_paths,
&catalog,
&state.mcp_config_path,
- bot_id,
+ bot_id.clone(),
)
.map_err(|e| {
(
@@ -1198,6 +1359,61 @@ async fn handle_toolengine_passthrough_env_put(
Json(ErrorResponse { error: e }),
)
})?;
+
+ keys_before
+ };
+
+ let mut applied: Vec<(String, Option)> = Vec::new();
+ for (k, v) in &env_pairs {
+ let prev = prior.get(k).cloned().unwrap_or(None);
+ if v.trim().is_empty() {
+ if let Err(e) = secure_store::delete_mcp_secret(&tool_id, k) {
+ rollback_passthrough_keychain_ops(&tool_id, &applied);
+ if let Err(rb) = rollback_passthrough_config_keys(
+ &state,
+ &tool_id,
+ &keys_before,
+ &catalog,
+ bot_id.clone(),
+ )
+ .await
+ {
+ log::error!(
+ "passthrough env: keychain delete failed ({e}); config rollback also failed: {rb}"
+ );
+ }
+ return Err((
+ StatusCode::INTERNAL_SERVER_ERROR,
+ Json(ErrorResponse {
+ error: format!("delete passthrough secret {k} from OS keychain: {e}"),
+ }),
+ ));
+ }
+ applied.push((k.clone(), prev));
+ } else if let Err(e) = secure_store::save_mcp_secret(&tool_id, k, v) {
+ rollback_passthrough_keychain_ops(&tool_id, &applied);
+ if let Err(rb) = rollback_passthrough_config_keys(
+ &state,
+ &tool_id,
+ &keys_before,
+ &catalog,
+ bot_id.clone(),
+ )
+ .await
+ {
+ log::error!(
+ "passthrough env: keychain save failed ({e}); config rollback also failed: {rb}"
+ );
+ }
+ return Err((
+ StatusCode::INTERNAL_SERVER_ERROR,
+ Json(ErrorResponse {
+ error: format!("save passthrough secret {k} to OS keychain: {e}"),
+ }),
+ ));
+ } else {
+ applied.push((k.clone(), prev));
+ }
}
state
diff --git a/src-tauri/src/modules/bot/agent.rs b/src-tauri/src/modules/bot/agent.rs
index 5a4af28..6278912 100644
--- a/src-tauri/src/modules/bot/agent.rs
+++ b/src-tauri/src/modules/bot/agent.rs
@@ -11,6 +11,7 @@ use crate::shared::text::{
};
use chrono::Utc;
use serde_json::json;
+use std::collections::HashSet;
use std::time::{Duration, Instant};
/// Tool rounds + at least one completion-only step. Research flows (sitemap + several
@@ -25,6 +26,9 @@ const MAX_BRAVE_WEB_SEARCH_PER_USER_MESSAGE: u32 = 1;
const BRAVE_WEB_SEARCH_LIMIT_MSG: &str = "Pengine policy: at most one `brave_web_search` call per user message (cost control). \
Use the previous search result, answer without another search, or ask the user to narrow the query.";
+const FETCH_DUPLICATE_URL_MSG: &str = "Pengine policy: this URL was already fetched successfully in this user message. \
+Do not call `fetch` again for the same URL. Answer from the prior tool output (or use a different URL if the excerpt was insufficient).";
+
/// After tool results (agent step ≥1), cap completion tokens. The model should
/// put the user-visible answer in `` (see system prompt); this
/// cap bounds wall time if it drafts a long ``. ~1024 fits a
@@ -90,6 +94,21 @@ fn tool_name_is_fetch(name: &str) -> bool {
.is_some_and(|(_, tail)| tail.eq_ignore_ascii_case("fetch"))
}
+fn fetch_url_from_tool_args(args: &serde_json::Value) -> Option {
+ args.get("url")
+ .and_then(|v| v.as_str())
+ .map(str::trim)
+ .filter(|s| !s.is_empty())
+ .map(std::string::ToString::to_string)
+}
+
+/// Stable key so `https://Host/path/` and `https://host/path#x` count as one URL.
+fn fetch_url_dedup_key(url: &str) -> String {
+ let t = url.trim();
+ let no_frag = t.split('#').next().unwrap_or(t).trim_end_matches('/');
+ no_frag.to_lowercase()
+}
+
/// After `brave_web_search`, prefetch this many distinct result URLs (one search per message; extra bandwidth here is `fetch` only).
const AUTO_FETCH_TOP_URLS: usize = search_followup::DEFAULT_AUTO_FETCH_CAP;
@@ -98,9 +117,20 @@ async fn append_host_prefetch_after_brave_search(
messages: &mut serde_json::Value,
tool_results: &mut Vec<(String, String)>,
search_blob: &str,
+ fetch_urls_success: &mut HashSet,
) {
let urls = search_followup::extract_fetchable_urls(search_blob, AUTO_FETCH_TOP_URLS);
for url in urls {
+ let key = fetch_url_dedup_key(&url);
+ if fetch_urls_success.contains(&key) {
+ state
+ .emit_log(
+ "tool",
+ &format!("[host] auto-fetch skip (already fetched): {url}"),
+ )
+ .await;
+ continue;
+ }
state
.emit_log("tool", &format!("[host] auto-fetch {url}"))
.await;
@@ -115,6 +145,9 @@ async fn append_host_prefetch_after_brave_search(
Ok(t) => t,
Err(e) => format!("ERROR: {e}"),
};
+ if !text.trim_start().starts_with("ERROR:") {
+ fetch_urls_success.insert(key);
+ }
let compacted = compact_tool_output(&text);
let for_model = truncate_for_model(&compacted, TOOL_OUTPUT_CHAR_CAP);
let block_name = format!("fetch (auto: {url})");
@@ -538,8 +571,8 @@ async fn build_system_prompt(state: &AppState, has_tools: bool, has_memory: bool
};
let skills_raw = skills::skills_prompt_hint(&state.store_path);
- let (skills_hint, skills_truncated) =
- skills::limit_skills_hint_bytes(skills_raw, skills::MAX_TOTAL_SKILL_HINT_BYTES);
+ let skills_cap = *state.skills_hint_max_bytes.read().await as usize;
+ let (skills_hint, skills_truncated) = skills::limit_skills_hint_bytes(skills_raw, skills_cap);
if skills_truncated {
state
.emit_log(
@@ -547,7 +580,7 @@ async fn build_system_prompt(state: &AppState, has_tools: bool, has_memory: bool
&format!(
"skills hint truncated to {} bytes (cap {})",
skills_hint.len(),
- skills::MAX_TOTAL_SKILL_HINT_BYTES
+ skills_cap
),
)
.await;
@@ -641,6 +674,8 @@ async fn run_model_turn(
// re-enters step 0 with a fresh catalog, so it must not be treated as a
// post-tool continuation (no reminder, keep user's think/num_predict).
let mut tool_rounds: usize = 0;
+ // URLs already fetched successfully this user message (model + host auto-fetch).
+ let mut fetch_urls_success: HashSet = HashSet::new();
for step in 0..MAX_STEPS {
let t0 = Instant::now();
@@ -762,6 +797,25 @@ async fn run_model_turn(
}
}
+ {
+ let mut batch_fetch_keys = HashSet::::new();
+ for (name, res) in prepared.iter_mut() {
+ if !tool_name_is_fetch(name) {
+ continue;
+ }
+ let Ok((_, _, _, ref args)) = res else {
+ continue;
+ };
+ let Some(raw) = fetch_url_from_tool_args(args) else {
+ continue;
+ };
+ let key = fetch_url_dedup_key(&raw);
+ if fetch_urls_success.contains(&key) || !batch_fetch_keys.insert(key) {
+ *res = Err(FETCH_DUPLICATE_URL_MSG.to_string());
+ }
+ }
+ }
+
let invoked_names: Vec = prepared.iter().map(|(n, _)| n.clone()).collect();
state.note_tools_used(&invoked_names).await;
@@ -812,6 +866,15 @@ async fn run_model_turn(
}
let compacted = compact_tool_output(&text);
let for_model = truncate_for_model(&compacted, TOOL_OUTPUT_CHAR_CAP);
+ if tool_name_is_fetch(name) {
+ if let Ok((_, _, _, args)) = resolved {
+ if let Some(raw) = fetch_url_from_tool_args(args) {
+ if !text.trim_start().starts_with("ERROR:") {
+ fetch_urls_success.insert(fetch_url_dedup_key(&raw));
+ }
+ }
+ }
+ }
if name.eq_ignore_ascii_case("brave_web_search")
&& !for_model.trim_start().starts_with("ERROR:")
{
@@ -831,8 +894,14 @@ async fn run_model_turn(
tool_rounds += 1;
if let Some(blob) = last_brave_search_blob {
- append_host_prefetch_after_brave_search(state, &mut messages, &mut tool_results, &blob)
- .await;
+ append_host_prefetch_after_brave_search(
+ state,
+ &mut messages,
+ &mut tool_results,
+ &blob,
+ &mut fetch_urls_success,
+ )
+ .await;
}
if !direct_replies.is_empty() {
@@ -951,4 +1020,16 @@ mod tests {
assert!(tool_name_is_fetch("te_pengine-fetch.fetch"));
assert!(!tool_name_is_fetch("roll_dice"));
}
+
+ #[test]
+ fn fetch_url_dedup_key_ignores_fragment_and_trailing_slash() {
+ assert_eq!(
+ fetch_url_dedup_key("https://WWW.Example.COM/path/#frag"),
+ fetch_url_dedup_key("https://www.example.com/path")
+ );
+ assert_eq!(
+ fetch_url_dedup_key("https://a.example/page/"),
+ fetch_url_dedup_key("HTTPS://A.EXAMPLE/page")
+ );
+ }
}
diff --git a/src-tauri/src/modules/bot/commands.rs b/src-tauri/src/modules/bot/commands.rs
index 76e854a..6abe3b2 100644
--- a/src-tauri/src/modules/bot/commands.rs
+++ b/src-tauri/src/modules/bot/commands.rs
@@ -1,6 +1,7 @@
use crate::infrastructure::bot_lifecycle;
use crate::modules::bot::repository;
use crate::modules::keywords::all_keyword_groups;
+use crate::modules::secure_store;
use crate::shared::keywords::KeywordGroup;
use crate::shared::state::AppState;
#[cfg(desktop)]
@@ -22,11 +23,23 @@ pub async fn get_connection_status(
pub async fn disconnect_bot(state: tauri::State<'_, AppState>) -> Result {
bot_lifecycle::stop_and_wait_for_bot(&state).await;
- {
+ let bot_id = {
let mut lock = state.connection.lock().await;
+ let id = lock.as_ref().map(|c| c.bot_id.clone());
*lock = None;
- }
+ id
+ };
repository::clear(&state.store_path)?;
+ if let Some(id) = bot_id {
+ if let Err(e) = secure_store::delete_token(&id) {
+ state
+ .emit_log(
+ "auth",
+ &format!("WARN: could not remove bot token from keychain: {e}"),
+ )
+ .await;
+ }
+ }
state.emit_log("ok", "Disconnected via Tauri command").await;
Ok("disconnected".into())
}
diff --git a/src-tauri/src/modules/bot/repository.rs b/src-tauri/src/modules/bot/repository.rs
index 80c49a8..9647499 100644
--- a/src-tauri/src/modules/bot/repository.rs
+++ b/src-tauri/src/modules/bot/repository.rs
@@ -1,7 +1,15 @@
-use crate::shared::state::ConnectionData;
+//! Persistence for the Telegram connection record.
+//!
+//! Only metadata (`bot_id`, `bot_username`, `connected_at`) is written to
+//! `connection.json`. The bot token lives in the OS keychain — see
+//! `modules::secure_store`. `load` is migration-aware: if it finds a legacy file
+//! with a `bot_token` field, it moves the token to the keychain and rewrites the
+//! file in the new shape before returning the metadata.
+use crate::modules::secure_store;
+use crate::shared::state::ConnectionMetadata;
use std::path::Path;
-pub fn persist(path: &Path, data: &ConnectionData) -> Result<(), String> {
+pub fn persist(path: &Path, data: &ConnectionMetadata) -> Result<(), String> {
let json = serde_json::to_string_pretty(data).map_err(|e| e.to_string())?;
if let Some(parent) = path.parent() {
std::fs::create_dir_all(parent).map_err(|e| e.to_string())?;
@@ -10,9 +18,121 @@ pub fn persist(path: &Path, data: &ConnectionData) -> Result<(), String> {
Ok(())
}
-pub fn load(path: &Path) -> Option {
+/// Read `connection.json` and return metadata. Returns `None` if the file is
+/// missing or unparseable. Migration log lines are appended to `migration_log` so
+/// callers can emit them through `AppState::emit_log` (the repository itself is
+/// sync/filesystem-only and has no access to the bus).
+pub fn load(path: &Path, migration_log: &mut Vec) -> Option {
let json = std::fs::read_to_string(path).ok()?;
- serde_json::from_str(&json).ok()
+ let value: serde_json::Value = match serde_json::from_str(&json) {
+ Ok(v) => v,
+ Err(e) => {
+ migration_log.push(format!(
+ "Malformed connection.json: invalid JSON ({e}); expected connection metadata"
+ ));
+ return None;
+ }
+ };
+ let Some(obj) = value.as_object() else {
+ migration_log.push(
+ "Malformed connection.json: root is not a JSON object; file parsed as JSON but does \
+ not contain expected connection metadata"
+ .to_string(),
+ );
+ return None;
+ };
+
+ let mut field_issues: Vec = Vec::new();
+ match obj.get("bot_id") {
+ None => field_issues.push("`bot_id` missing".into()),
+ Some(v) if v.as_str().is_none() => field_issues.push("`bot_id` is not a string".into()),
+ _ => {}
+ }
+ match obj.get("bot_username") {
+ None => field_issues.push("`bot_username` missing".into()),
+ Some(v) if v.as_str().is_none() => {
+ field_issues.push("`bot_username` is not a string".into())
+ }
+ _ => {}
+ }
+ match obj.get("connected_at") {
+ None => field_issues.push("`connected_at` missing".into()),
+ Some(v) => match v.as_str() {
+ None => field_issues.push("`connected_at` is not a string".into()),
+ Some(s) if chrono::DateTime::parse_from_rfc3339(s).is_err() => {
+ field_issues.push("`connected_at` is not a valid RFC3339 timestamp string".into())
+ }
+ Some(_) => {}
+ },
+ }
+ if !field_issues.is_empty() {
+ migration_log.push(format!(
+ "Malformed connection.json: missing or invalid field(s) — {}; file parsed as JSON but \
+ does not contain expected connection metadata",
+ field_issues.join("; ")
+ ));
+ return None;
+ }
+
+ let bot_id = obj
+ .get("bot_id")
+ .and_then(|v| v.as_str())
+ .unwrap()
+ .to_string();
+ let bot_username = obj
+ .get("bot_username")
+ .and_then(|v| v.as_str())
+ .unwrap()
+ .to_string();
+ let connected_at = obj
+ .get("connected_at")
+ .and_then(|v| v.as_str())
+ .and_then(|s| chrono::DateTime::parse_from_rfc3339(s).ok())
+ .unwrap()
+ .with_timezone(&chrono::Utc);
+
+ if let Some(token) = obj
+ .get("bot_token")
+ .and_then(|v| v.as_str())
+ .map(str::trim)
+ .filter(|t| !t.is_empty())
+ {
+ let meta = ConnectionMetadata {
+ bot_id: bot_id.clone(),
+ bot_username: bot_username.clone(),
+ connected_at,
+ };
+ match secure_store::save_token(&bot_id, token) {
+ Ok(()) => {
+ if let Err(e) = persist(path, &meta) {
+ migration_log.push(format!(
+ "WARN: moved bot token to keychain but failed to strip plaintext \
+ from {}: {e}. Delete the file manually or the next startup will \
+ re-migrate (harmless).",
+ path.display()
+ ));
+ } else {
+ migration_log.push(
+ "Migrated plaintext bot token from connection.json to OS keychain."
+ .to_string(),
+ );
+ }
+ }
+ Err(e) => {
+ migration_log.push(format!(
+ "ERROR: could not migrate plaintext bot token to keychain: {e}. \
+ Leaving connection.json untouched; the bot will fall back to re-prompting."
+ ));
+ return None;
+ }
+ }
+ }
+
+ Some(ConnectionMetadata {
+ bot_id,
+ bot_username,
+ connected_at,
+ })
}
pub fn clear(path: &Path) -> Result<(), String> {
diff --git a/src-tauri/src/modules/keywords.rs b/src-tauri/src/modules/keywords.rs
index 1cc7fc0..82c88d7 100644
--- a/src-tauri/src/modules/keywords.rs
+++ b/src-tauri/src/modules/keywords.rs
@@ -5,6 +5,7 @@
use crate::modules::memory;
use crate::modules::ollama;
+use crate::modules::skills;
use crate::shared::keywords::KeywordGroup;
pub fn all_keyword_groups() -> Vec<&'static KeywordGroup> {
@@ -14,6 +15,7 @@ pub fn all_keyword_groups() -> Vec<&'static KeywordGroup> {
&memory::DIARY_START,
&memory::DIARY_END,
&ollama::keywords::THINK_ON,
+ &skills::keywords::EXPLICIT_WEB_SEARCH,
]
}
diff --git a/src-tauri/src/modules/mcp/registry.rs b/src-tauri/src/modules/mcp/registry.rs
index eb0224f..7457da7 100644
--- a/src-tauri/src/modules/mcp/registry.rs
+++ b/src-tauri/src/modules/mcp/registry.rs
@@ -421,11 +421,47 @@ const ROUTING_STOPWORDS: &[&str] = &[
"about",
];
-/// Tools exposed on every turn regardless of routing. Keeps the model from
-/// getting stuck when keyword matching misses (short queries, non-English
-/// intent words, typos) — a tiny token cost for a big correctness win.
+/// Tools exposed on most routed turns when keyword matching is weak or the user
+/// may still need network/time without saying so explicitly. Omitted for short,
+/// high-confidence ranked picks (e.g. “roll a dice” → `roll_dice` only) to keep
+/// the Ollama `tools` payload small — see [`should_skip_always_on_tools`].
const ALWAYS_ON_TOOL_NAMES: &[&str] = &["fetch", "time"];
+/// After ranked tools are chosen, skip appending `fetch`/`time` when the user
+/// message is short, at least one tool already matched, and nothing suggests
+/// URL fetch, weather, or memory — so trivial native calls stay a single tool.
+fn should_skip_always_on_tools(
+ user_message: &str,
+ recent_tool_names: &[String],
+ tools: &[ToolDef],
+ memory_server: Option<&str>,
+ chat_session_recording: bool,
+ ranked_nonempty: bool,
+) -> bool {
+ if !ranked_nonempty {
+ return false;
+ }
+ if chat_session_recording {
+ return false;
+ }
+ if memory_tools_relevant(
+ user_message,
+ recent_tool_names,
+ tools,
+ memory_server,
+ chat_session_recording,
+ ) {
+ return false;
+ }
+ if message_suggests_url_fetch(user_message) {
+ return false;
+ }
+ if user_message.split_whitespace().count() > 12 {
+ return false;
+ }
+ true
+}
+
fn filter_brave_web_search(mut tools: Vec, allow: bool) -> Vec {
if allow {
return tools;
@@ -589,7 +625,17 @@ fn route_tools(
}
}
- push_always_on_tools(tools, &mut selected, &mut seen);
+ let ranked_nonempty = !selected.is_empty();
+ if !should_skip_always_on_tools(
+ user_message,
+ recent_tool_names,
+ tools,
+ memory_server,
+ chat_session_recording,
+ ranked_nonempty,
+ ) {
+ push_always_on_tools(tools, &mut selected, &mut seen);
+ }
if memory_tools_relevant(
user_message,
recent_tool_names,
@@ -607,7 +653,6 @@ fn route_tools(
}
fn message_suggests_url_fetch(msg: &str) -> bool {
- let lower = msg.to_lowercase();
const HINTS: &[&str] = &[
"wetter",
"weather",
@@ -642,8 +687,24 @@ fn message_suggests_url_fetch(msg: &str) -> bool {
"readme",
"oesterreich.gv",
"bundesrecht",
+ // Short “news / headlines” queries often need `fetch`; without these,
+ // `should_skip_always_on_tools` can leave only an unrelated tool whose
+ // description happens to contain “news”.
+ "news",
+ "headlines",
+ "headline",
+ "breaking",
+ "rss",
+ "nachrichten",
+ "schlagzeilen",
+ "zeitung",
+ "presse",
+ "gameinformer",
+ "game informer",
];
- HINTS.iter().any(|h| lower.contains(h))
+ HINTS
+ .iter()
+ .any(|h| crate::modules::skills::service::user_message_needle_match(msg, h))
}
fn score_tool_combined(
@@ -901,6 +962,102 @@ mod tests {
assert!(s > 0);
}
+ #[test]
+ fn routing_gameinformer_news_includes_fetch_even_if_other_tools_match_news_token() {
+ let mut tools: Vec = (0..12)
+ .map(|i| ToolDef {
+ server_name: "srv".into(),
+ name: format!("misc_{i}"),
+ description: None,
+ input_schema: json!({}),
+ direct_return: false,
+ category: None,
+ risk: ToolRisk::Low,
+ })
+ .collect();
+ for name in ["fetch", "time"] {
+ tools.push(ToolDef {
+ server_name: "srv".into(),
+ name: name.into(),
+ description: None,
+ input_schema: json!({}),
+ direct_return: false,
+ category: None,
+ risk: ToolRisk::Low,
+ });
+ }
+ tools.push(ToolDef {
+ server_name: "alerts".into(),
+ name: "push_news_digest".into(),
+ description: Some("Configure breaking news alerts and digest frequency.".into()),
+ input_schema: json!({"type": "object"}),
+ direct_return: false,
+ category: None,
+ risk: ToolRisk::Low,
+ });
+ let plan = super::route_tools(&tools, "gameinformer news", &[], None, false);
+ match plan {
+ super::ToolRoutePlan::Subset {
+ tools: sel,
+ routing,
+ } => {
+ assert_eq!(routing, "ranked");
+ assert!(
+ sel.iter().any(|t| t.name == "fetch"),
+ "fetch must be present so the model can load the site; got {sel:?}"
+ );
+ }
+ super::ToolRoutePlan::FullCatalog => panic!("expected ranked subset"),
+ }
+ }
+
+ #[test]
+ fn routing_roll_dice_short_query_skips_always_on_tools() {
+ let mut tools: Vec = (0..12)
+ .map(|i| ToolDef {
+ server_name: "srv".into(),
+ name: format!("misc_{i}"),
+ description: None,
+ input_schema: json!({}),
+ direct_return: false,
+ category: None,
+ risk: ToolRisk::Low,
+ })
+ .collect();
+ for name in ["fetch", "time"] {
+ tools.push(ToolDef {
+ server_name: "srv".into(),
+ name: name.into(),
+ description: None,
+ input_schema: json!({}),
+ direct_return: false,
+ category: None,
+ risk: ToolRisk::Low,
+ });
+ }
+ tools.push(ToolDef {
+ server_name: "dice".into(),
+ name: "roll_dice".into(),
+ description: Some("Roll a die with the given number of sides.".into()),
+ input_schema: json!({"type": "object"}),
+ direct_return: true,
+ category: None,
+ risk: ToolRisk::Low,
+ });
+ let plan = super::route_tools(&tools, "roll a dice", &[], None, false);
+ match plan {
+ super::ToolRoutePlan::Subset {
+ tools: sel,
+ routing,
+ } => {
+ assert_eq!(routing, "ranked");
+ assert_eq!(sel.len(), 1, "{sel:?}");
+ assert_eq!(sel[0].name, "roll_dice");
+ }
+ super::ToolRoutePlan::FullCatalog => panic!("expected ranked subset"),
+ }
+ }
+
#[test]
fn routing_all_zero_scores_uses_core_tools_not_full_catalog() {
let mut tools: Vec = (0..12)
diff --git a/src-tauri/src/modules/mcp/service.rs b/src-tauri/src/modules/mcp/service.rs
index 4e9ef33..a23e645 100644
--- a/src-tauri/src/modules/mcp/service.rs
+++ b/src-tauri/src/modules/mcp/service.rs
@@ -5,7 +5,7 @@ use super::client::McpClient;
use super::native;
use super::registry::{Provider, ToolRegistry};
use super::types::{McpConfig, ServerEntry};
-use std::collections::HashMap;
+use crate::modules::secure_store;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use tauri::Emitter;
@@ -48,17 +48,170 @@ pub fn resolve_mcp_config_path(store_path: &Path) -> (PathBuf, &'static str) {
pub fn read_config(path: &Path) -> Result {
let raw = std::fs::read_to_string(path).map_err(|e| format!("read mcp.json: {e}"))?;
- let mut cfg: McpConfig = serde_json::from_str(&raw).map_err(|e| {
+ let mut value: serde_json::Value = serde_json::from_str(&raw).map_err(|e| {
format!(
"parse mcp.json: {e} — every server entry needs a \"type\" field (\"native\" or \"stdio\")"
)
})?;
- if migrate_legacy_npx_filesystem(&mut cfg) {
+
+ // Must run before serde deserialises into `ServerEntry::Stdio` — the old field name
+ // (`catalog_passthrough`) no longer exists on the struct, so a plain `from_value` would
+ // silently drop any pre-migration secrets that are still sitting in `mcp.json`.
+ let migrated_passthrough = migrate_legacy_catalog_passthrough(&mut value)?;
+
+ let mut cfg: McpConfig = serde_json::from_value(value).map_err(|e| {
+ format!(
+ "parse mcp.json: {e} — every server entry needs a \"type\" field (\"native\" or \"stdio\")"
+ )
+ })?;
+ let migrated_npx = migrate_legacy_npx_filesystem(&mut cfg);
+ if migrated_passthrough || migrated_npx {
save_config(path, &cfg)?;
}
Ok(cfg)
}
+/// Derive a catalog tool id from its `mcp.json` server key (inverse of
+/// `tool_engine::service::server_key`). Returns `None` for non-catalog keys (`te_custom_*`,
+/// bare native entries, etc.) where there are no passthrough secrets to migrate or inject.
+fn tool_id_from_catalog_server_key(server_key: &str) -> Option {
+ let rest = server_key.strip_prefix("te_")?;
+ if rest.starts_with("custom_") {
+ return None;
+ }
+ Some(rest.replacen('-', "/", 1))
+}
+
+/// Every `(catalog tool id, passthrough env key)` configured in `mcp.json` — used to warm the
+/// OS keychain blob once before connecting stdio servers (one unlock instead of N).
+pub fn catalog_passthrough_key_pairs(cfg: &McpConfig) -> Vec<(String, String)> {
+ let mut out = Vec::new();
+ for (server_key, entry) in &cfg.servers {
+ let ServerEntry::Stdio {
+ catalog_passthrough_keys,
+ ..
+ } = entry
+ else {
+ continue;
+ };
+ let Some(tool_id) = tool_id_from_catalog_server_key(server_key) else {
+ continue;
+ };
+ if catalog_passthrough_keys.is_empty() {
+ continue;
+ }
+ for k in catalog_passthrough_keys {
+ let t = k.trim();
+ if !t.is_empty() {
+ out.push((tool_id.clone(), t.to_string()));
+ }
+ }
+ }
+ out
+}
+
+/// Move pre-migration `catalog_passthrough: {KEY: VAL}` secrets from `mcp.json` into the OS
+/// keychain, strip any `--env=KEY=VAL` in the stored argv for those keys, and replace the field
+/// with `catalog_passthrough_keys: [KEY, …]`. Operates on raw JSON so the serde model can drop
+/// the legacy field cleanly — serde would otherwise silently discard the secrets.
+fn migrate_legacy_catalog_passthrough(raw: &mut serde_json::Value) -> Result {
+ let Some(servers) = raw.get_mut("servers").and_then(|v| v.as_object_mut()) else {
+ return Ok(false);
+ };
+
+ let mut any_migrated = false;
+ for (server_key, server) in servers.iter_mut() {
+ let Some(tool_id) = tool_id_from_catalog_server_key(server_key) else {
+ continue;
+ };
+ let Some(obj) = server.as_object_mut() else {
+ continue;
+ };
+ if obj.get("type").and_then(|v| v.as_str()) != Some("stdio") {
+ continue;
+ }
+ let Some(legacy_val) = obj.remove("catalog_passthrough") else {
+ continue;
+ };
+ let Some(legacy_map) = legacy_val.as_object() else {
+ continue;
+ };
+ if legacy_map.is_empty() {
+ continue;
+ }
+
+ let entries: Vec<(String, serde_json::Value)> = legacy_map
+ .iter()
+ .map(|(k, v)| (k.clone(), v.clone()))
+ .collect();
+ let mut remaining = serde_json::Map::new();
+ let mut migrated_keys: Vec = Vec::new();
+ for (env_key, env_val) in entries {
+ let Some(val_str) = env_val.as_str() else {
+ remaining.insert(env_key, env_val);
+ continue;
+ };
+ if val_str.trim().is_empty() {
+ remaining.insert(env_key, serde_json::Value::String(val_str.to_string()));
+ continue;
+ }
+ match secure_store::save_mcp_secret(&tool_id, &env_key, val_str) {
+ Ok(()) => migrated_keys.push(env_key),
+ Err(e) => {
+ log::warn!(
+ "migrate legacy catalog_passthrough: could not save {tool_id}/{env_key} \
+ into OS keychain: {e}"
+ );
+ remaining.insert(env_key, serde_json::Value::String(val_str.to_string()));
+ }
+ }
+ }
+
+ let legacy_reinserted = !remaining.is_empty();
+ if legacy_reinserted {
+ obj.insert(
+ "catalog_passthrough".to_string(),
+ serde_json::Value::Object(remaining),
+ );
+ }
+
+ if let Some(args) = obj.get_mut("args").and_then(|v| v.as_array_mut()) {
+ args.retain(|arg| {
+ let Some(s) = arg.as_str() else {
+ return true;
+ };
+ let Some(rest) = s.strip_prefix("--env=") else {
+ return true;
+ };
+ let Some((name, _)) = rest.split_once('=') else {
+ return true;
+ };
+ !migrated_keys.iter().any(|k| k == name)
+ });
+ }
+
+ if !migrated_keys.is_empty() {
+ migrated_keys.sort();
+ migrated_keys.dedup();
+ obj.insert(
+ "catalog_passthrough_keys".to_string(),
+ serde_json::Value::Array(
+ migrated_keys
+ .into_iter()
+ .map(serde_json::Value::String)
+ .collect(),
+ ),
+ );
+ any_migrated = true;
+ } else if legacy_reinserted && migrated_keys.is_empty() {
+ // Legacy map was rewritten (e.g. non-string values coalesced) even though nothing
+ // reached the keychain.
+ any_migrated = true;
+ }
+ }
+ Ok(any_migrated)
+}
+
pub fn save_config(path: &Path, cfg: &McpConfig) -> Result<(), String> {
if let Some(parent) = path.parent() {
std::fs::create_dir_all(parent)
@@ -121,150 +274,57 @@ fn default_config_value() -> serde_json::Value {
})
}
-fn redact_podman_docker_env_argv(args: &[String]) -> Vec {
- args.iter()
- .map(|a| {
- let Some(rest) = a.strip_prefix("--env=") else {
- return a.clone();
- };
- let Some((name, val)) = rest.split_once('=') else {
- return a.clone();
- };
- if val.is_empty() {
- return a.clone();
- }
- format!("--env={name}=********")
- })
- .collect()
-}
-
-fn command_is_podman_or_docker(command: &str) -> bool {
- let cmd_trim = command.trim();
- std::path::Path::new(cmd_trim)
- .file_name()
- .and_then(|s| s.to_str())
- .map(|b| b == "podman" || b == "docker")
- .unwrap_or(cmd_trim == "podman" || cmd_trim == "docker")
-}
-
-/// Removes stored catalog secrets and masks `--env=…` values in `podman|docker run` argv before
-/// returning `mcp.json` over HTTP (GET `/v1/mcp/servers`).
-pub fn redact_mcp_server_entry_for_list_response(entry: &ServerEntry) -> ServerEntry {
- match entry {
- ServerEntry::Native { .. } => entry.clone(),
- ServerEntry::Stdio {
- command,
- args,
- env,
- direct_return,
- private_host_path,
- ..
- } => {
- let args = if command_is_podman_or_docker(command) {
- redact_podman_docker_env_argv(args)
- } else {
- args.clone()
- };
- ServerEntry::Stdio {
- command: command.clone(),
- args,
- env: env.clone(),
- direct_return: *direct_return,
- private_host_path: private_host_path.clone(),
- catalog_passthrough: HashMap::new(),
- }
+/// Resolve a catalog passthrough value: host `std::env` first (if set), then OS keychain.
+///
+/// Env-first avoids touching the keychain when running tests or when developers export a key
+/// for one-off runs; in normal GUI use the variable is usually unset and the keychain path runs
+/// after [`crate::modules::secure_store::warm_app_secrets`] (in-memory cache, no per-request unlock).
+fn resolve_passthrough_value(tool_id: &str, env_key: &str) -> Option {
+ if let Ok(v) = std::env::var(env_key) {
+ let v = v.trim().to_string();
+ if !v.is_empty() {
+ return Some(v);
}
}
-}
-
-const REDACTED_ENV_VALUE_PLACEHOLDER: &str = "********";
-
-fn is_redacted_podman_env_arg(token: &str) -> bool {
- let Some(rest) = token.strip_prefix("--env=") else {
- return false;
- };
- let Some((_name, val)) = rest.split_once('=') else {
- return false;
- };
- val == REDACTED_ENV_VALUE_PLACEHOLDER
-}
-
-/// Restores real `podman|docker run --env=…` argv and `catalog_passthrough` when the client PUTs
-/// a stdio entry that came from [`redact_mcp_server_entry_for_list_response`] (dashboard round-trip,
-/// e.g. toggling `direct_return`), so secrets are not replaced with `********` on disk.
-pub fn merge_stdio_entry_preserving_redacted_secrets(
- old_entry: Option<&ServerEntry>,
- entry: ServerEntry,
-) -> ServerEntry {
- let ServerEntry::Stdio {
- command,
- mut args,
- env,
- direct_return,
- private_host_path,
- mut catalog_passthrough,
- } = entry
- else {
- return entry;
- };
-
- let Some(ServerEntry::Stdio {
- args: old_args,
- catalog_passthrough: old_cp,
- ..
- }) = old_entry
- else {
- return ServerEntry::Stdio {
- command,
- args,
- env,
- direct_return,
- private_host_path,
- catalog_passthrough,
- };
- };
-
- if !command_is_podman_or_docker(&command) {
- return ServerEntry::Stdio {
- command,
- args,
- env,
- direct_return,
- private_host_path,
- catalog_passthrough,
- };
- }
-
- let mut restored_any_env = false;
- for new_a in &mut args {
- if !is_redacted_podman_env_arg(new_a.as_str()) {
- continue;
- }
- let Some(rest) = new_a.strip_prefix("--env=") else {
- continue;
- };
- let Some((name, _)) = rest.split_once('=') else {
- continue;
- };
- let prefix = format!("--env={name}=");
- if let Some(old_a) = old_args.iter().find(|a| a.starts_with(&prefix)) {
- *new_a = old_a.clone();
- restored_any_env = true;
+ match secure_store::load_mcp_secret(tool_id, env_key) {
+ Ok(v) if !v.trim().is_empty() => Some(v),
+ Ok(_) => None,
+ Err(secure_store::SecureStoreError::NotFound) => None,
+ Err(e) => {
+ log::warn!("mcp passthrough: keychain load failed for {tool_id}/{env_key}: {e}");
+ None
}
}
+}
- if restored_any_env && catalog_passthrough.is_empty() && !old_cp.is_empty() {
- catalog_passthrough = old_cp.clone();
+/// Splice `--env=KEY=VAL` flags for each passthrough key into `podman|docker run` argv at the
+/// slot just before the image reference (first non-flag arg after `run`). Keys that resolve to
+/// no value are skipped silently so the spawn still gets a chance to succeed with other env.
+fn splice_passthrough_env_into_argv(
+ argv: &[String],
+ tool_id: &str,
+ keys: &[String],
+) -> Vec {
+ if keys.is_empty() {
+ return argv.to_vec();
}
-
- ServerEntry::Stdio {
- command,
- args,
- env,
- direct_return,
- private_host_path,
- catalog_passthrough,
+ let insert_at = argv
+ .iter()
+ .enumerate()
+ .skip_while(|(_, a)| a.as_str() == "run")
+ .find(|(_, a)| !a.starts_with('-'))
+ .map(|(i, _)| i)
+ .unwrap_or(argv.len());
+
+ let mut out: Vec = Vec::with_capacity(argv.len() + keys.len());
+ out.extend_from_slice(&argv[..insert_at]);
+ for key in keys {
+ if let Some(val) = resolve_passthrough_value(tool_id, key) {
+ out.push(format!("--env={key}={val}"));
+ }
}
+ out.extend_from_slice(&argv[insert_at..]);
+ out
}
pub fn load_or_init_config(path: &Path) -> Result {
@@ -305,25 +365,34 @@ pub async fn connect_one_server(
args,
env,
direct_return,
+ catalog_passthrough_keys,
..
- } => match McpClient::connect(
- server_key.to_string(),
- command.clone(),
- args.clone(),
- env.clone(),
- *direct_return,
- )
- .await
- {
- Ok(client) => {
- let n = client.tools().len();
- let cmd_word = if n == 1 { "command" } else { "commands" };
- let dr = if *direct_return { " direct_return" } else { "" };
- let msg = format!("{server_key} stdio ({n} {cmd_word}{dr})");
- (Some(Provider::Mcp(Arc::new(client))), msg)
+ } => {
+ let spawn_args = match tool_id_from_catalog_server_key(server_key) {
+ Some(tool_id) if !catalog_passthrough_keys.is_empty() => {
+ splice_passthrough_env_into_argv(args, &tool_id, catalog_passthrough_keys)
+ }
+ _ => args.clone(),
+ };
+ match McpClient::connect(
+ server_key.to_string(),
+ command.clone(),
+ spawn_args,
+ env.clone(),
+ *direct_return,
+ )
+ .await
+ {
+ Ok(client) => {
+ let n = client.tools().len();
+ let cmd_word = if n == 1 { "command" } else { "commands" };
+ let dr = if *direct_return { " direct_return" } else { "" };
+ let msg = format!("{server_key} stdio ({n} {cmd_word}{dr})");
+ (Some(Provider::Mcp(Arc::new(client))), msg)
+ }
+ Err(e) => (None, format!("{server_key} stdio failed: {e}")),
}
- Err(e) => (None, format!("{server_key} stdio failed: {e}")),
- },
+ }
}
}
@@ -331,6 +400,11 @@ pub async fn connect_one_server(
/// Used by tests and as a one-shot rebuild path; the live runtime uses
/// [`rebuild_registry_into_state`] which publishes incrementally.
pub async fn build_mcp_providers(cfg: &McpConfig) -> (Vec, Vec) {
+ let pairs = catalog_passthrough_key_pairs(cfg);
+ if let Err(e) = secure_store::preload_mcp_passthrough_secrets(&pairs) {
+ log::warn!("mcp passthrough: keychain preload failed: {e}");
+ }
+
let mut providers = Vec::new();
let mut status = Vec::new();
@@ -473,6 +547,11 @@ pub async fn rebuild_registry_into_state(
cfg
};
+ let passthrough_pairs = catalog_passthrough_key_pairs(&cfg);
+ if let Err(e) = secure_store::preload_mcp_passthrough_secrets(&passthrough_pairs) {
+ log::warn!("mcp passthrough: keychain preload failed: {e}");
+ }
+
*state.cached_filesystem_paths.write().await = filesystem_allowed_paths(&cfg);
// Publish the registry after each *successful* connect so native tools (e.g. dice) are usable
@@ -561,76 +640,58 @@ mod tests {
}
#[test]
- fn list_response_redacts_podman_env_args_and_catalog_passthrough() {
- let entry = ServerEntry::Stdio {
- command: "podman".into(),
- args: vec![
- "run".into(),
+ fn splice_passthrough_env_inserts_before_image_ref() {
+ // Stored argv (from disk) has no passthrough --env=; the image ref is the first
+ // non-flag arg after `run`.
+ let argv = vec![
+ "run".into(),
+ "--rm".into(),
+ "-i".into(),
+ "ghcr.io/example/tool:latest".into(),
+ "--ignore-robots-txt".into(),
+ ];
+
+ std::env::set_var("TEST_PASSTHROUGH_SPLICE_KEY", "host-value");
+ let spliced = splice_passthrough_env_into_argv(
+ &argv,
+ "pengine/nonexistent-tool",
+ &["TEST_PASSTHROUGH_SPLICE_KEY".into()],
+ );
+ std::env::remove_var("TEST_PASSTHROUGH_SPLICE_KEY");
+
+ assert_eq!(
+ spliced,
+ vec![
+ "run".to_string(),
"--rm".into(),
- "--env=BRAVE_API_KEY=super-secret".into(),
+ "-i".into(),
+ "--env=TEST_PASSTHROUGH_SPLICE_KEY=host-value".into(),
+ "ghcr.io/example/tool:latest".into(),
+ "--ignore-robots-txt".into(),
],
- env: HashMap::new(),
- direct_return: false,
- private_host_path: None,
- catalog_passthrough: HashMap::from([("BRAVE_API_KEY".into(), "super-secret".into())]),
- };
- let r = redact_mcp_server_entry_for_list_response(&entry);
- let ServerEntry::Stdio {
- args,
- catalog_passthrough,
- ..
- } = r
- else {
- panic!("expected stdio");
- };
- assert!(
- args.iter().any(|a| a == "--env=BRAVE_API_KEY=********"),
- "args={args:?}"
+ "passthrough --env must land directly before the image reference"
);
- assert!(catalog_passthrough.is_empty());
}
#[test]
- fn merge_stdio_restores_redacted_env_argv_and_catalog_passthrough() {
- let old = ServerEntry::Stdio {
- command: "podman".into(),
- args: vec![
- "run".into(),
- "--rm".into(),
- "--env=BRAVE_API_KEY=real-secret".into(),
- ],
- env: HashMap::new(),
- direct_return: false,
- private_host_path: None,
- catalog_passthrough: HashMap::from([("BRAVE_API_KEY".into(), "real-secret".into())]),
- };
- let new = ServerEntry::Stdio {
- command: "podman".into(),
- args: vec![
- "run".into(),
- "--rm".into(),
- "--env=BRAVE_API_KEY=********".into(),
- ],
- env: HashMap::new(),
- direct_return: true,
- private_host_path: None,
- catalog_passthrough: HashMap::new(),
- };
- let merged = merge_stdio_entry_preserving_redacted_secrets(Some(&old), new);
- let ServerEntry::Stdio {
- args,
- catalog_passthrough,
- direct_return,
- ..
- } = merged
- else {
- panic!("expected stdio");
- };
- assert_eq!(args[2], "--env=BRAVE_API_KEY=real-secret");
+ fn splice_passthrough_env_is_noop_when_no_value_resolvable() {
+ let argv = vec!["run".into(), "--rm".into(), "img:tag".into()];
+ // Guaranteed-missing env var; keychain will also miss under the test-only tool id.
+ let spliced = splice_passthrough_env_into_argv(
+ &argv,
+ "pengine/nonexistent-tool",
+ &["DEFINITELY_NOT_SET_IN_ENV_ZZZ".into()],
+ );
+ assert_eq!(spliced, argv);
+ }
+
+ #[test]
+ fn tool_id_from_catalog_server_key_skips_custom_and_native() {
assert_eq!(
- catalog_passthrough.get("BRAVE_API_KEY").map(String::as_str),
- Some("real-secret")
+ tool_id_from_catalog_server_key("te_pengine-brave-search").as_deref(),
+ Some("pengine/brave-search")
);
- assert!(direct_return);
+ assert_eq!(tool_id_from_catalog_server_key("te_custom_my-tool"), None);
+ assert_eq!(tool_id_from_catalog_server_key("dice"), None);
}
}
diff --git a/src-tauri/src/modules/mcp/types.rs b/src-tauri/src/modules/mcp/types.rs
index fed1e6a..0a636bc 100644
--- a/src-tauri/src/modules/mcp/types.rs
+++ b/src-tauri/src/modules/mcp/types.rs
@@ -39,10 +39,12 @@ pub enum ServerEntry {
/// into the container. Defaults to `$APP_DATA/tool-data//`; user overrides land here.
#[serde(default, skip_serializing_if = "Option::is_none")]
private_host_path: Option,
- /// Persisted values for catalog `passthrough_env` keys (injected into `args` as
- /// `podman|docker run --env=…`). Not applied to the host `podman` process via `env`.
- #[serde(default, skip_serializing_if = "HashMap::is_empty")]
- catalog_passthrough: HashMap,
+ /// Catalog `passthrough_env` keys whose secret values are stored in the OS keychain
+ /// (service `com.maximedogawa.pengine.mcp_passthrough`, account `::`).
+ /// Injected into `args` as `--env=KEY=VAL` at spawn time in `connect_one_server`;
+ /// **never** written back to `mcp.json` alongside their values.
+ #[serde(default, skip_serializing_if = "Vec::is_empty")]
+ catalog_passthrough_keys: Vec,
},
}
diff --git a/src-tauri/src/modules/mod.rs b/src-tauri/src/modules/mod.rs
index dbb50e7..b994517 100644
--- a/src-tauri/src/modules/mod.rs
+++ b/src-tauri/src/modules/mod.rs
@@ -3,5 +3,6 @@ pub mod keywords;
pub mod mcp;
pub mod memory;
pub mod ollama;
+pub mod secure_store;
pub mod skills;
pub mod tool_engine;
diff --git a/src-tauri/src/modules/secure_store/keyring_impl.rs b/src-tauri/src/modules/secure_store/keyring_impl.rs
new file mode 100644
index 0000000..d1ce29f
--- /dev/null
+++ b/src-tauri/src/modules/secure_store/keyring_impl.rs
@@ -0,0 +1,51 @@
+//! Linux + Windows backend for secret storage via the `keyring` crate.
+//!
+//! - Linux: Secret Service (gnome-keyring / KWallet). The user's desktop keyring
+//! unlocks with their login password; most distros auto-unlock on session login,
+//! otherwise the Secret Service prompts the first time per session.
+//! - Windows: Credential Manager, scoped to the logged-in Windows user.
+
+use super::SecureStoreError;
+use keyring::{Entry, Error as KeyringError};
+
+fn open(service: &str, account: &str) -> Result {
+ Entry::new(service, account).map_err(map_error)
+}
+
+fn map_error(e: KeyringError) -> SecureStoreError {
+ match e {
+ KeyringError::NoEntry => SecureStoreError::NotFound,
+ KeyringError::NoStorageAccess(inner) => SecureStoreError::Backend(format!(
+ "no access to OS credential store: {inner} (is the desktop keyring \
+ running and unlocked?)"
+ )),
+ other => SecureStoreError::Backend(other.to_string()),
+ }
+}
+
+pub(super) fn save(service: &str, account: &str, value: &[u8]) -> Result<(), SecureStoreError> {
+ let entry = open(service, account)?;
+ // The keyring crate's cross-platform API takes UTF-8 strings; callers are
+ // expected to store UTF-8 (tokens, API keys). If we ever need raw bytes we
+ // can switch to `set_secret`, but today all callers pass text.
+ let text = std::str::from_utf8(value)
+ .map_err(|e| SecureStoreError::Backend(format!("secret value was not valid UTF-8: {e}")))?;
+ entry.set_password(text).map_err(map_error)
+}
+
+pub(super) fn load(service: &str, account: &str) -> Result, SecureStoreError> {
+ let entry = open(service, account)?;
+ entry
+ .get_password()
+ .map(|s| s.into_bytes())
+ .map_err(map_error)
+}
+
+pub(super) fn delete(service: &str, account: &str) -> Result<(), SecureStoreError> {
+ let entry = open(service, account)?;
+ match entry.delete_credential() {
+ Ok(()) => Ok(()),
+ Err(KeyringError::NoEntry) => Ok(()),
+ Err(e) => Err(map_error(e)),
+ }
+}
diff --git a/src-tauri/src/modules/secure_store/macos.rs b/src-tauri/src/modules/secure_store/macos.rs
new file mode 100644
index 0000000..c49f997
--- /dev/null
+++ b/src-tauri/src/modules/secure_store/macos.rs
@@ -0,0 +1,58 @@
+use security_framework::passwords::{
+ delete_generic_password, generic_password, set_generic_password, PasswordOptions,
+};
+
+use super::SecureStoreError;
+
+/// Numeric OSStatus values that security-framework-sys doesn't re-export.
+const ERR_SEC_USER_CANCELED: i32 = -128;
+const ERR_SEC_AUTH_FAILED: i32 = -25293;
+const ERR_SEC_ITEM_NOT_FOUND: i32 = -25300;
+/// `errSecDuplicateItem` — set failed because a matching item already exists.
+const ERR_SEC_DUPLICATE_ITEM: i32 = -25299;
+
+fn map_error(err: security_framework::base::Error) -> SecureStoreError {
+ match err.code() {
+ ERR_SEC_ITEM_NOT_FOUND => SecureStoreError::NotFound,
+ ERR_SEC_USER_CANCELED => SecureStoreError::UserCancelled,
+ ERR_SEC_AUTH_FAILED => SecureStoreError::AuthFailed,
+ other => SecureStoreError::Backend(format!("OSStatus {other}: {err}")),
+ }
+}
+
+/// Store generic passwords **without** `SecAccessControl` user-presence (Touch ID /
+/// passcode on **every** read/write/delete). That model caused many prompts per session.
+///
+/// Secrets still live in the user login keychain (encrypted at rest, not synced to iCloud
+/// unless the system does so for generic passwords — we use app-specific service strings).
+/// Access is gated by the macOS user session, like most desktop apps.
+///
+/// Tries `set_generic_password` first so an interrupted write does not leave the item deleted;
+/// on duplicate-item, deletes once and retries (replace semantics).
+pub(super) fn save(service: &str, account: &str, value: &[u8]) -> Result<(), SecureStoreError> {
+ match set_generic_password(service, account, value) {
+ Ok(()) => Ok(()),
+ Err(e) if e.code() == ERR_SEC_DUPLICATE_ITEM => {
+ match delete_generic_password(service, account) {
+ Ok(()) => {}
+ Err(e2) if e2.code() == ERR_SEC_ITEM_NOT_FOUND => {}
+ Err(e2) => return Err(map_error(e2)),
+ }
+ set_generic_password(service, account, value).map_err(map_error)
+ }
+ Err(e) => Err(map_error(e)),
+ }
+}
+
+pub(super) fn load(service: &str, account: &str) -> Result, SecureStoreError> {
+ let opts = PasswordOptions::new_generic_password(service, account);
+ generic_password(opts).map_err(map_error)
+}
+
+pub(super) fn delete(service: &str, account: &str) -> Result<(), SecureStoreError> {
+ match delete_generic_password(service, account) {
+ Ok(()) => Ok(()),
+ Err(e) if e.code() == ERR_SEC_ITEM_NOT_FOUND => Ok(()),
+ Err(e) => Err(map_error(e)),
+ }
+}
diff --git a/src-tauri/src/modules/secure_store/mock_store.rs b/src-tauri/src/modules/secure_store/mock_store.rs
new file mode 100644
index 0000000..00ce070
--- /dev/null
+++ b/src-tauri/src/modules/secure_store/mock_store.rs
@@ -0,0 +1,38 @@
+//! In-memory stand-in for the OS credential store. Used when `cfg!(test)` is true (unit tests in
+//! this crate) or when `PENGINE_MOCK_KEYCHAIN=1` / `true` is set (integration tests and CI).
+
+use super::SecureStoreError;
+use std::collections::HashMap;
+use std::sync::{LazyLock, Mutex};
+
+type Key = (String, String);
+
+static MOCK: LazyLock>>> = LazyLock::new(|| Mutex::new(HashMap::new()));
+
+fn key(service: &str, account: &str) -> Key {
+ (service.to_string(), account.to_string())
+}
+
+fn lock() -> Result>>, SecureStoreError> {
+ MOCK.lock()
+ .map_err(|_| SecureStoreError::Backend("mock keychain mutex poisoned".into()))
+}
+
+pub(super) fn save(service: &str, account: &str, value: &[u8]) -> Result<(), SecureStoreError> {
+ let mut g = lock()?;
+ g.insert(key(service, account), value.to_vec());
+ Ok(())
+}
+
+pub(super) fn load(service: &str, account: &str) -> Result, SecureStoreError> {
+ let g = lock()?;
+ g.get(&key(service, account))
+ .cloned()
+ .ok_or(SecureStoreError::NotFound)
+}
+
+pub(super) fn delete(service: &str, account: &str) -> Result<(), SecureStoreError> {
+ let mut g = lock()?;
+ g.remove(&key(service, account));
+ Ok(())
+}
diff --git a/src-tauri/src/modules/secure_store/mod.rs b/src-tauri/src/modules/secure_store/mod.rs
new file mode 100644
index 0000000..43340fc
--- /dev/null
+++ b/src-tauri/src/modules/secure_store/mod.rs
@@ -0,0 +1,455 @@
+//! OS-backed storage for per-bot secrets and MCP passthrough env vars.
+//!
+//! - macOS: Login keychain generic passwords (**no** per-operation Touch ID / passcode).
+//! Encrypted at rest; readable while the user is logged in. Combined with the in-memory
+//! cache after [`warm_app_secrets`], normal use does not hit the keychain on every request.
+//! - Linux / Windows: `keyring` crate (Secret Service / Credential Manager).
+//!
+//! **Single keychain item:** Bot tokens and MCP passthrough values live in **one**
+//! JSON blob (`AppSecretsV1`). Call [`warm_app_secrets`] once at startup so the rest
+//! of the session reads secrets from RAM unless you explicitly save.
+//!
+//! Legacy per-bot / per-MCP items are merged into the unified blob on first cold load when
+//! needed. We **do not** auto-delete legacy entries: deleting old ACL-protected items can
+//! trigger the same password prompt as reading them; stale legacy rows are harmless once the
+//! unified item is populated.
+//!
+//! **Tests:** Unit tests use an in-memory mock (`cfg!(test)`). Integration tests use
+//! `tests/common.rs` to set `PENGINE_MOCK_KEYCHAIN=1` so the library never touches the OS store.
+//!
+//! **First launch after upgrading** may still trigger more than one keychain prompt while the
+//! unified blob is filled from legacy items; that is expected.
+
+mod mock_store;
+
+#[cfg(target_os = "macos")]
+mod macos;
+
+#[cfg(any(target_os = "linux", target_os = "windows"))]
+mod keyring_impl;
+
+fn use_mock_store() -> bool {
+ cfg!(test) || mock_env_enabled()
+}
+
+fn mock_env_enabled() -> bool {
+ matches!(
+ std::env::var("PENGINE_MOCK_KEYCHAIN"),
+ Ok(s) if s == "1" || s.eq_ignore_ascii_case("true")
+ )
+}
+
+#[cfg(target_os = "macos")]
+fn os_save(service: &str, account: &str, value: &[u8]) -> Result<(), SecureStoreError> {
+ macos::save(service, account, value)
+}
+#[cfg(target_os = "macos")]
+fn os_load(service: &str, account: &str) -> Result, SecureStoreError> {
+ macos::load(service, account)
+}
+#[cfg(target_os = "macos")]
+fn os_delete(service: &str, account: &str) -> Result<(), SecureStoreError> {
+ macos::delete(service, account)
+}
+
+#[cfg(any(target_os = "linux", target_os = "windows"))]
+fn os_save(service: &str, account: &str, value: &[u8]) -> Result<(), SecureStoreError> {
+ keyring_impl::save(service, account, value)
+}
+#[cfg(any(target_os = "linux", target_os = "windows"))]
+fn os_load(service: &str, account: &str) -> Result, SecureStoreError> {
+ keyring_impl::load(service, account)
+}
+#[cfg(any(target_os = "linux", target_os = "windows"))]
+fn os_delete(service: &str, account: &str) -> Result<(), SecureStoreError> {
+ keyring_impl::delete(service, account)
+}
+
+#[cfg(not(any(target_os = "macos", target_os = "linux", target_os = "windows")))]
+fn os_save(_s: &str, _a: &str, _v: &[u8]) -> Result<(), SecureStoreError> {
+ Err(SecureStoreError::Unsupported)
+}
+#[cfg(not(any(target_os = "macos", target_os = "linux", target_os = "windows")))]
+fn os_load(_s: &str, _a: &str) -> Result, SecureStoreError> {
+ Err(SecureStoreError::NotFound)
+}
+#[cfg(not(any(target_os = "macos", target_os = "linux", target_os = "windows")))]
+fn os_delete(_s: &str, _a: &str) -> Result<(), SecureStoreError> {
+ Err(SecureStoreError::Unsupported)
+}
+
+fn kv_save(service: &str, account: &str, value: &[u8]) -> Result<(), SecureStoreError> {
+ if use_mock_store() {
+ mock_store::save(service, account, value)
+ } else {
+ os_save(service, account, value)
+ }
+}
+
+fn kv_load(service: &str, account: &str) -> Result, SecureStoreError> {
+ if use_mock_store() {
+ mock_store::load(service, account)
+ } else {
+ os_load(service, account)
+ }
+}
+
+fn kv_delete(service: &str, account: &str) -> Result<(), SecureStoreError> {
+ if use_mock_store() {
+ mock_store::delete(service, account)
+ } else {
+ os_delete(service, account)
+ }
+}
+
+use serde::{Deserialize, Serialize};
+use std::collections::HashMap;
+use std::sync::Mutex;
+
+/// Legacy: one keychain item per bot id.
+const BOT_TOKEN_SERVICE: &str = "com.maximedogawa.pengine.bot_token";
+/// Legacy: MCP passthrough service (per-key + old JSON blob).
+const MCP_PASSTHROUGH_SERVICE: &str = "com.maximedogawa.pengine.mcp_passthrough";
+const MCP_PASSTHROUGH_BLOB_ACCOUNT: &str = "__pengine_mcp_passthrough_blob_v1__";
+
+/// Unified store: one keychain item for the whole app secret set.
+const UNIFIED_SERVICE: &str = "com.maximedogawa.pengine.app_secrets";
+const UNIFIED_ACCOUNT: &str = "__pengine_app_secrets_v1__";
+
+#[derive(Debug, Clone, Default, Serialize, Deserialize)]
+struct AppSecretsV1 {
+ #[serde(default)]
+ bots: HashMap,
+ #[serde(default)]
+ mcp: HashMap,
+}
+
+static APP_SECRETS: Mutex> = Mutex::new(None);
+
+fn composite_key(tool_id: &str, env_key: &str) -> String {
+ format!("{tool_id}::{env_key}")
+}
+
+fn parse_unified(bytes: &[u8]) -> Result {
+ if bytes.is_empty() {
+ return Ok(AppSecretsV1::default());
+ }
+ let s = String::from_utf8(bytes.to_vec())
+ .map_err(|e| SecureStoreError::Backend(format!("secrets blob was not valid UTF-8: {e}")))?;
+ let t = s.trim();
+ if t.is_empty() {
+ return Ok(AppSecretsV1::default());
+ }
+ serde_json::from_str(t)
+ .map_err(|e| SecureStoreError::Backend(format!("secrets blob invalid JSON: {e}")))
+}
+
+fn load_unified_from_keychain() -> Result {
+ match kv_load(UNIFIED_SERVICE, UNIFIED_ACCOUNT) {
+ Ok(bytes) => parse_unified(&bytes),
+ Err(SecureStoreError::NotFound) => Ok(AppSecretsV1::default()),
+ Err(e) => Err(e),
+ }
+}
+
+fn save_unified_to_keychain(s: &AppSecretsV1) -> Result<(), SecureStoreError> {
+ if s.bots.is_empty() && s.mcp.is_empty() {
+ return kv_delete(UNIFIED_SERVICE, UNIFIED_ACCOUNT);
+ }
+ let json = serde_json::to_string(s)
+ .map_err(|e| SecureStoreError::Backend(format!("encode secrets blob: {e}")))?;
+ kv_save(UNIFIED_SERVICE, UNIFIED_ACCOUNT, json.as_bytes())
+}
+
+/// Read the pre-unification MCP JSON blob (map of `tool::KEY` → value), if present.
+fn read_legacy_mcp_blob_map() -> Result, SecureStoreError> {
+ match kv_load(MCP_PASSTHROUGH_SERVICE, MCP_PASSTHROUGH_BLOB_ACCOUNT) {
+ Ok(bytes) => {
+ if bytes.is_empty() {
+ return Ok(HashMap::new());
+ }
+ let s = String::from_utf8(bytes).map_err(|e| {
+ SecureStoreError::Backend(format!("legacy mcp blob was not valid UTF-8: {e}"))
+ })?;
+ let t = s.trim();
+ if t.is_empty() {
+ return Ok(HashMap::new());
+ }
+ serde_json::from_str(t).map_err(|e| {
+ SecureStoreError::Backend(format!("legacy mcp blob invalid JSON: {e}"))
+ })
+ }
+ Err(SecureStoreError::NotFound) => Ok(HashMap::new()),
+ Err(e) => Err(e),
+ }
+}
+
+#[derive(Clone, Copy)]
+enum LegacyScan {
+ /// First session load: may read the old MCP JSON blob once, then gap fills.
+ Full,
+ /// In-memory cache already exists: **never** re-scan the legacy MCP blob (avoids a keychain
+ /// round-trip on every MCP registry rebuild).
+ GapsOnly,
+}
+
+fn needs_legacy_gap_fetch(
+ s: &AppSecretsV1,
+ bot_ids: &[String],
+ mcp_pairs: &[(String, String)],
+) -> bool {
+ bot_ids.iter().any(|b| !s.bots.contains_key(b))
+ || mcp_pairs
+ .iter()
+ .any(|(t, k)| !s.mcp.contains_key(&composite_key(t, k)))
+}
+
+fn lock_secrets() -> Result>, SecureStoreError>
+{
+ APP_SECRETS
+ .lock()
+ .map_err(|_| SecureStoreError::Backend("app secrets mutex poisoned".into()))
+}
+
+/// Load secrets from the keychain **once**, migrate legacy items, cache in memory.
+/// Call from app startup with every known `bot_id` and MCP passthrough `(tool_id, env_key)`
+/// **before** spawning work that calls [`load_token`] or MCP connect.
+pub fn warm_app_secrets(
+ bot_ids: &[String],
+ mcp_pairs: &[(String, String)],
+) -> Result<(), SecureStoreError> {
+ let mut guard = lock_secrets()?;
+ match guard.as_mut() {
+ Some(s) => extend_from_legacy_if_missing(s, bot_ids, mcp_pairs),
+ None => {
+ let mut s = load_unified_from_keychain()?;
+ let dirty = if needs_legacy_gap_fetch(&s, bot_ids, mcp_pairs) {
+ migrate_all_legacy_into(&mut s, bot_ids, mcp_pairs, LegacyScan::Full)?
+ } else {
+ false
+ };
+ if dirty {
+ save_unified_to_keychain(&s)?;
+ }
+ *guard = Some(s);
+ Ok(())
+ }
+ }
+}
+
+/// Extend an already-warmed cache: pick up new MCP keys or bots from legacy keychain only.
+fn extend_from_legacy_if_missing(
+ s: &mut AppSecretsV1,
+ bot_ids: &[String],
+ mcp_pairs: &[(String, String)],
+) -> Result<(), SecureStoreError> {
+ if !needs_legacy_gap_fetch(s, bot_ids, mcp_pairs) {
+ return Ok(());
+ }
+ let dirty = migrate_all_legacy_into(s, bot_ids, mcp_pairs, LegacyScan::GapsOnly)?;
+ if dirty {
+ save_unified_to_keychain(s)?;
+ }
+ Ok(())
+}
+
+/// Merge legacy keychain data into `s`. Returns `true` if `s` changed.
+fn migrate_all_legacy_into(
+ s: &mut AppSecretsV1,
+ bot_ids: &[String],
+ mcp_pairs: &[(String, String)],
+ scan: LegacyScan,
+) -> Result {
+ let mut dirty = false;
+
+ if matches!(scan, LegacyScan::Full) {
+ // Whole legacy MCP JSON blob (from the first iteration of blob storage).
+ if let Ok(old_mcp) = read_legacy_mcp_blob_map() {
+ for (k, v) in old_mcp {
+ if !v.is_empty() && s.mcp.insert(k, v).is_none() {
+ dirty = true;
+ }
+ }
+ }
+ }
+
+ for bid in bot_ids {
+ if s.bots.contains_key(bid) {
+ continue;
+ }
+ match kv_load(BOT_TOKEN_SERVICE, bid) {
+ Ok(bytes) => {
+ let tok = String::from_utf8(bytes).map_err(|e| {
+ SecureStoreError::Backend(format!("legacy bot token was not valid UTF-8: {e}"))
+ })?;
+ if !tok.is_empty() {
+ s.bots.insert(bid.clone(), tok);
+ dirty = true;
+ }
+ }
+ Err(SecureStoreError::NotFound) => {}
+ Err(e) => return Err(e),
+ }
+ }
+
+ for (tool_id, env_key) in mcp_pairs {
+ let ck = composite_key(tool_id, env_key);
+ if s.mcp.contains_key(&ck) {
+ continue;
+ }
+ match kv_load(MCP_PASSTHROUGH_SERVICE, &ck) {
+ Ok(bytes) => {
+ let val = String::from_utf8(bytes).map_err(|e| {
+ SecureStoreError::Backend(format!("legacy mcp secret was not valid UTF-8: {e}"))
+ })?;
+ if !val.is_empty() {
+ s.mcp.insert(ck, val);
+ dirty = true;
+ }
+ }
+ Err(SecureStoreError::NotFound) => {}
+ Err(e) => return Err(e),
+ }
+ }
+
+ Ok(dirty)
+}
+
+/// Same as [`warm_app_secrets`] with no bot ids — used from MCP registry rebuild.
+pub fn preload_mcp_passthrough_secrets(
+ candidates: &[(String, String)],
+) -> Result<(), SecureStoreError> {
+ warm_app_secrets(&[], candidates)
+}
+
+pub fn save_token(bot_id: &str, token: &str) -> Result<(), SecureStoreError> {
+ let snapshot = {
+ let guard = lock_secrets()?;
+ let base = match guard.as_ref() {
+ Some(x) => x.clone(),
+ None => load_unified_from_keychain()?,
+ };
+ let mut next = base;
+ next.bots.insert(bot_id.to_string(), token.to_string());
+ next
+ };
+ save_unified_to_keychain(&snapshot)?;
+ let mut guard = lock_secrets()?;
+ *guard = Some(snapshot);
+ Ok(())
+}
+
+pub fn load_token(bot_id: &str) -> Result {
+ {
+ let guard = lock_secrets()?;
+ if let Some(s) = guard.as_ref() {
+ if let Some(t) = s.bots.get(bot_id) {
+ return Ok(t.clone());
+ }
+ }
+ }
+ warm_app_secrets(&[bot_id.to_string()], &[])?;
+ let guard = lock_secrets()?;
+ let s = guard
+ .as_ref()
+ .ok_or_else(|| SecureStoreError::Backend("secrets cache not initialized".into()))?;
+ s.bots
+ .get(bot_id)
+ .cloned()
+ .ok_or(SecureStoreError::NotFound)
+}
+
+pub fn delete_token(bot_id: &str) -> Result<(), SecureStoreError> {
+ let snapshot = {
+ let guard = lock_secrets()?;
+ let base = match guard.as_ref() {
+ Some(x) => x.clone(),
+ None => load_unified_from_keychain()?,
+ };
+ let mut next = base;
+ next.bots.remove(bot_id);
+ next
+ };
+ save_unified_to_keychain(&snapshot)?;
+ let mut guard = lock_secrets()?;
+ *guard = Some(snapshot);
+ Ok(())
+}
+
+pub fn save_mcp_secret(tool_id: &str, env_key: &str, value: &str) -> Result<(), SecureStoreError> {
+ let ck = composite_key(tool_id, env_key);
+ let snapshot = {
+ let guard = lock_secrets()?;
+ let base = match guard.as_ref() {
+ Some(x) => x.clone(),
+ None => load_unified_from_keychain()?,
+ };
+ let mut next = base;
+ next.mcp.insert(ck, value.to_string());
+ next
+ };
+ save_unified_to_keychain(&snapshot)?;
+ let mut guard = lock_secrets()?;
+ *guard = Some(snapshot);
+ Ok(())
+}
+
+pub fn load_mcp_secret(tool_id: &str, env_key: &str) -> Result {
+ let ck = composite_key(tool_id, env_key);
+ {
+ let guard = lock_secrets()?;
+ if let Some(s) = guard.as_ref() {
+ if let Some(v) = s.mcp.get(&ck) {
+ return Ok(v.clone());
+ }
+ }
+ }
+ warm_app_secrets(&[], &[(tool_id.to_string(), env_key.to_string())])?;
+ let guard = lock_secrets()?;
+ let s = guard
+ .as_ref()
+ .ok_or_else(|| SecureStoreError::Backend("secrets cache not initialized".into()))?;
+ s.mcp.get(&ck).cloned().ok_or(SecureStoreError::NotFound)
+}
+
+pub fn delete_mcp_secret(tool_id: &str, env_key: &str) -> Result<(), SecureStoreError> {
+ let ck = composite_key(tool_id, env_key);
+ let snapshot = {
+ let guard = lock_secrets()?;
+ let base = match guard.as_ref() {
+ Some(x) => x.clone(),
+ None => load_unified_from_keychain()?,
+ };
+ let mut next = base;
+ next.mcp.remove(&ck);
+ next
+ };
+ save_unified_to_keychain(&snapshot)?;
+ let mut guard = lock_secrets()?;
+ *guard = Some(snapshot);
+ Ok(())
+}
+
+#[derive(Debug)]
+pub enum SecureStoreError {
+ NotFound,
+ UserCancelled,
+ /// Keychain rejected access (e.g. locked, auth failed) without an explicit user cancel.
+ AuthFailed,
+ Unsupported,
+ Backend(String),
+}
+
+impl std::fmt::Display for SecureStoreError {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match self {
+ Self::NotFound => write!(f, "secret not found"),
+ Self::UserCancelled => write!(f, "user cancelled authentication"),
+ Self::AuthFailed => write!(f, "keychain authentication failed (locked or denied)"),
+ Self::Unsupported => write!(f, "secure store not supported on this platform yet"),
+ Self::Backend(msg) => write!(f, "secure store error: {msg}"),
+ }
+ }
+}
+
+impl std::error::Error for SecureStoreError {}
diff --git a/src-tauri/src/modules/skills/keywords.rs b/src-tauri/src/modules/skills/keywords.rs
new file mode 100644
index 0000000..4289db4
--- /dev/null
+++ b/src-tauri/src/modules/skills/keywords.rs
@@ -0,0 +1,110 @@
+//! User-message phrases that mean “search the open web” for Brave gating.
+//!
+//! All **keyword-only** Brave rules live here and are registered in
+//! [`crate::modules::keywords::all_keyword_groups`]. [`super::service::allow_brave_web_search_for_message`]
+//! calls [`brave_search_allowed_by_keywords`] first; skills add their own gates via frontmatter.
+
+use crate::shared::keywords::{normalize, KeywordGroup, MatchMode};
+
+const EXPLICIT_WEB_SEARCH_EN: &[&str] = &[
+ "search the internet",
+ "search the web",
+ "web search",
+ "duckduckgo",
+];
+
+const EXPLICIT_WEB_SEARCH_DE: &[&str] = &[
+ "suche im internet",
+ "suche im internt",
+ "suche mir im internet",
+ "such mir im internet",
+ "such mir im web",
+ "such mal im internet",
+ "im internet suchen",
+ "im web suchen",
+ "finde mir im internet",
+ "seachr",
+ "internetrecherche",
+ "recherche im internet",
+ "online recherchieren",
+ "google mal",
+];
+
+/// Substrings that count as an explicit request to search the public web
+/// (exposes billed `brave_web_search` when no skill match is needed).
+pub const EXPLICIT_WEB_SEARCH: KeywordGroup = KeywordGroup {
+ id: "skills.explicit_web_search",
+ description: "User asked to search the open web; allow billed brave_web_search for this turn.",
+ mode: MatchMode::Substring,
+ phrases_by_lang: &[
+ ("en", EXPLICIT_WEB_SEARCH_EN),
+ ("de", EXPLICIT_WEB_SEARCH_DE),
+ ],
+};
+
+/// After `suche nach`, require an explicit web intent nearby (same idea as phrase list; kept
+/// separate because `suche nach` alone matches too many German sentences).
+const SUCHE_NACH_WEB_CONTEXT: &[&str] =
+ &["internet", "online", "im web", "bei google", "duckduckgo"];
+
+/// True when the message matches catalogued **search keywords** (phrase group + `suche nach` rule).
+/// Does not evaluate skills; see [`super::service::allow_brave_web_search_for_message`].
+pub fn brave_search_allowed_by_keywords(user_message: &str) -> bool {
+ if EXPLICIT_WEB_SEARCH.matches(user_message) {
+ return true;
+ }
+ let u = normalize(user_message);
+ u.contains("suche nach") && SUCHE_NACH_WEB_CONTEXT.iter().any(|t| u.contains(t))
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn german_internet_phrases_match() {
+ assert!(brave_search_allowed_by_keywords(
+ "bitte suche im Internet nach X"
+ ));
+ assert!(brave_search_allowed_by_keywords(
+ "such mir im internet rezepte"
+ ));
+ }
+
+ #[test]
+ fn english_phrases_match() {
+ assert!(brave_search_allowed_by_keywords(
+ "search the internet for penguins"
+ ));
+ }
+
+ #[test]
+ fn gameinformer_news_does_not_enable_brave_via_keywords() {
+ assert!(!brave_search_allowed_by_keywords("gameinformer news"));
+ }
+
+ #[test]
+ fn non_web_lookup_does_not_match() {
+ assert!(!brave_search_allowed_by_keywords(
+ "Suche Informationen im Österreich GV über X."
+ ));
+ }
+
+ #[test]
+ fn suche_nach_requires_web_context() {
+ assert!(!brave_search_allowed_by_keywords(
+ "suche nach something vague"
+ ));
+ assert!(brave_search_allowed_by_keywords(
+ "suche nach topic im internet"
+ ));
+ }
+
+ #[test]
+ fn explicit_group_matches_same_as_brave_keywords_helper() {
+ assert_eq!(
+ EXPLICIT_WEB_SEARCH.matches("search the web for x"),
+ brave_search_allowed_by_keywords("search the web for x")
+ );
+ }
+}
diff --git a/src-tauri/src/modules/skills/mod.rs b/src-tauri/src/modules/skills/mod.rs
index 8cf2a98..03f7dc9 100644
--- a/src-tauri/src/modules/skills/mod.rs
+++ b/src-tauri/src/modules/skills/mod.rs
@@ -1,2 +1,3 @@
+pub mod keywords;
pub mod service;
pub mod types;
diff --git a/src-tauri/src/modules/skills/service.rs b/src-tauri/src/modules/skills/service.rs
index d55592d..15da92c 100644
--- a/src-tauri/src/modules/skills/service.rs
+++ b/src-tauri/src/modules/skills/service.rs
@@ -101,17 +101,23 @@ pub fn set_skill_enabled(store_path: &Path, slug: &str, enabled: bool) -> Result
write_disabled_set(store_path, &set)
}
-/// Per-skill body cap in the system-prompt hint. Keeps the prompt short so local
+/// Per-skill body cap in the system-prompt hint. Keeps the prompt bounded so local
/// models re-read it cheaply on each turn. Skills needing more detail should
-/// front-load the critical URL/recipe in the first ~1000 chars; use `mandatory.md` beside `SKILL.md` for rules that must not truncate away.
-pub const SKILL_HINT_BODY_CAP: usize = 1000;
+/// front-load the critical scope gate / URL / recipe at the top of `SKILL.md`.
+pub const SKILL_HINT_BODY_CAP: usize = 2200;
-/// Hard cap for the full skills fragment (intro + every enabled skill body + mandatory snippets).
-pub const MAX_TOTAL_SKILL_HINT_BYTES: usize = SKILL_HINT_BODY_CAP * 8;
+/// Default cap for the full skills fragment (intro + bodies + mandatory snippets), aligned with
+/// [`crate::shared::user_settings::DEFAULT_SKILLS_HINT_MAX_BYTES`]. The **runtime** limit is
+/// [`crate::shared::state::AppState::skills_hint_max_bytes`] (see `bot::agent` turns).
+pub const DEFAULT_SKILL_HINT_BYTES: usize =
+ crate::shared::user_settings::DEFAULT_SKILLS_HINT_MAX_BYTES as usize;
+
+/// `mandatory.md` is still high-signal but must not balloon the system prompt unchecked.
+const SKILL_MANDATORY_HINT_CAP: usize = 1200;
const SKILL_HINT_INTRO: &str = "\n\nSkills: follow each recipe exactly — \
it lists WHICH URL and HOW MANY calls. Stop when you can answer; \
-don't probe alternate hosts. Prefer **`fetch`** whenever you have a concrete URL; use **`brave_web_search`** only when the recipe lists it in `requires` (and this turn matches that skill) or the user explicitly asked to search the open web.\n\
+don't probe alternate hosts. Unless a skill’s **`mandatory.md`** says otherwise, prefer **`fetch`** whenever you have a concrete URL; use **`brave_web_search`** when the recipe lists it in `requires` (and this turn matches that skill), when **`mandatory.md`** orders it, or when the user explicitly asked to search the open web.\n\
Portal- or government-specific skills you install yourself apply **only** when the user is clearly asking about that jurisdiction’s government, law, official forms, or public administration — \
not for recipes, hobbies, general knowledge, software, or unrelated chit-chat. If the topic does not match the skill’s scope, ignore that recipe entirely.";
@@ -139,8 +145,9 @@ pub fn skills_prompt_hint(store_path: &Path) -> String {
if let Some(m) = &s.mandatory_hint {
let m = m.trim();
if !m.is_empty() {
+ let m = truncate_for_prompt(m, SKILL_MANDATORY_HINT_CAP);
out.push_str("\n\n");
- out.push_str(m);
+ out.push_str(&m);
}
}
}
@@ -278,48 +285,64 @@ pub fn parse_skill(slug: &str, raw: &str, origin: SkillOrigin) -> Result bool {
- let u = user_message.to_lowercase();
- const PHRASES: &[&str] = &[
- "search the internet",
- "search the web",
- "suche im internet",
- "suche im internt",
- "suche mir im internet",
- "such mir im internet",
- "such mir im web",
- "such mal im internet",
- "im internet suchen",
- "im web suchen",
- "finde mir im internet",
- "suche im internet",
- "seachr",
- "web search",
- "internetrecherche",
- "recherche im internet",
- "online recherchieren",
- "google mal",
- "duckduckgo",
- ];
- if PHRASES.iter().any(|p| u.contains(p)) {
- return true;
+/// Lowercase text with ä/ö/ü/ß folded to ASCII digraphs so `oesterreich` matches `Österreich`.
+fn german_ascii_fold(lower: &str) -> String {
+ let mut o = String::with_capacity(lower.len() + 4);
+ for c in lower.chars() {
+ match c {
+ 'ä' => o.push_str("ae"),
+ 'ö' => o.push_str("oe"),
+ 'ü' => o.push_str("ue"),
+ 'ß' => o.push_str("ss"),
+ _ => o.push(c),
+ }
}
- // "suche nach" alone matches too many German sentences; require an explicit web intent nearby.
- if u.contains("suche nach")
- && (u.contains("internet")
- || u.contains("online")
- || u.contains("im web")
- || u.contains("bei google")
- || u.contains("duckduckgo"))
- {
+ o
+}
+
+fn alphanumeric_token_match(haystack_lower: &str, needle_lower: &str) -> bool {
+ haystack_lower
+ .split(|c: char| !c.is_alphanumeric())
+ .filter(|t| !t.is_empty())
+ .any(|t| t == needle_lower)
+}
+
+/// `needle` is already lowercased. Short needles use alphanumeric token equality (avoids `rss` ⊆
+/// `progress`); longer needles match as substring, with a German-fold fallback path.
+pub(crate) fn user_text_covers_token(
+ user_lower: &str,
+ user_folded: &str,
+ needle_lower: &str,
+) -> bool {
+ if needle_lower.is_empty() {
+ return false;
+ }
+ if needle_lower.len() <= 4 {
+ let needle_folded = german_ascii_fold(needle_lower);
+ return alphanumeric_token_match(user_lower, needle_lower)
+ || (!needle_folded.is_empty()
+ && alphanumeric_token_match(user_folded, &needle_folded));
+ }
+ if user_lower.contains(needle_lower) {
return true;
}
- false
+ let needle_folded = german_ascii_fold(needle_lower);
+ user_folded.contains(&needle_folded)
+}
+
+/// Lowercase + fold helper for callers outside this module (e.g. MCP tool ranking).
+pub(crate) fn user_message_needle_match(user_message: &str, needle: &str) -> bool {
+ let needle_lower = needle.trim().to_lowercase();
+ if needle_lower.is_empty() {
+ return false;
+ }
+ let u = user_message.to_lowercase();
+ let u_fold = german_ascii_fold(&u);
+ user_text_covers_token(&u, &u_fold, &needle_lower)
}
/// Tags this generic must not alone enable billed web search (e.g. "news" ⊆ "gameinformer news").
-const BRAVE_TAG_DENYLIST: &[&str] = &[
+pub(crate) const BRAVE_TAG_DENYLIST: &[&str] = &[
"news", "info", "help", "guide", "tips", "blog", "home", "page", "data", "list", "links",
"link", "tool", "tools", "apps", "app", "media", "site", "sites", "world", "daily", "live",
];
@@ -333,8 +356,10 @@ fn skill_triggers_brave_web_search(skill: &Skill, user_message: &str) -> bool {
return false;
}
let u = user_message.to_lowercase();
+ let u_fold = german_ascii_fold(&u);
for sub in &skill.brave_allow_substrings {
- if sub.len() >= 3 && u.contains(&sub.to_lowercase()) {
+ let sl = sub.to_lowercase();
+ if sl.len() >= 3 && user_text_covers_token(&u, &u_fold, &sl) {
return true;
}
}
@@ -345,18 +370,19 @@ fn skill_triggers_brave_web_search(skill: &Skill, user_message: &str) -> bool {
if BRAVE_TAG_DENYLIST.iter().any(|g| g.eq_ignore_ascii_case(t)) {
continue;
}
- if u.contains(&t.to_lowercase()) {
+ let tl = t.to_lowercase();
+ if user_text_covers_token(&u, &u_fold, &tl) {
return true;
}
}
false
}
-/// Expose the billed `brave_web_search` tool only when a skill lists it in `requires` and the
-/// user message matches that skill’s `brave_allow_substrings` / tags, or when the user uses an
-/// explicit “search the internet”-style phrase.
+/// Expose the billed `brave_web_search` tool when catalogued **search keywords** match
+/// ([`super::keywords::brave_search_allowed_by_keywords`]) or when an enabled skill’s
+/// `requires` / `brave_allow_substrings` / tags gate this turn.
pub fn allow_brave_web_search_for_message(store_path: &Path, user_message: &str) -> bool {
- if user_explicitly_requests_web_search(user_message) {
+ if super::keywords::brave_search_allowed_by_keywords(user_message) {
return true;
}
list_skills(store_path)
@@ -955,64 +981,4 @@ mod tests {
"expected answer-style reminder in:\n{hint}"
);
}
-
- #[test]
- fn allow_brave_from_explicit_web_search_phrase() {
- let tmp = tempdir().unwrap();
- let fake_store = tmp.path().join("connection.json");
- assert!(allow_brave_web_search_for_message(
- &fake_store,
- "bitte suche im Internet nach X"
- ));
- assert!(allow_brave_web_search_for_message(
- &fake_store,
- "search the internet for penguins"
- ));
- assert!(allow_brave_web_search_for_message(
- &fake_store,
- "suche mir im internet rezepte für einen Apfelstrudel"
- ));
- }
-
- #[test]
- fn allow_brave_when_skill_requires_and_substring_matches() {
- let tmp = tempdir().unwrap();
- let fake_store = tmp.path().join("connection.json");
- let md = "---\nname: t\ndescription: d\ntags: [gov]\nrequires: [brave_web_search]\nbrave_allow_substrings: [widgets]\n---\n\nbody\n";
- write_custom_skill(&fake_store, "t", md).unwrap();
- assert!(!allow_brave_web_search_for_message(
- &fake_store,
- "hello world"
- ));
- assert!(allow_brave_web_search_for_message(
- &fake_store,
- "tell me about widgets"
- ));
- }
-
- #[test]
- fn allow_brave_not_enabled_by_generic_news_tag() {
- let tmp = tempdir().unwrap();
- let fake_store = tmp.path().join("connection.json");
- let md = "---\nname: t\ndescription: d\ntags: [news, gaming]\nrequires: [brave_web_search]\n---\n\nbody\n";
- write_custom_skill(&fake_store, "t", md).unwrap();
- assert!(!allow_brave_web_search_for_message(
- &fake_store,
- "gameinformer news"
- ));
- }
-
- #[test]
- fn suche_nach_requires_web_context() {
- let tmp = tempdir().unwrap();
- let fake_store = tmp.path().join("connection.json");
- assert!(!allow_brave_web_search_for_message(
- &fake_store,
- "suche nach gameinformer"
- ));
- assert!(allow_brave_web_search_for_message(
- &fake_store,
- "suche nach gameinformer im internet"
- ));
- }
}
diff --git a/src-tauri/src/modules/skills/types.rs b/src-tauri/src/modules/skills/types.rs
index 7b10aa5..6fa1413 100644
--- a/src-tauri/src/modules/skills/types.rs
+++ b/src-tauri/src/modules/skills/types.rs
@@ -30,8 +30,11 @@ pub struct Skill {
pub license: Option,
#[serde(default)]
pub requires: Vec,
- /// If `requires` lists `brave_web_search`, optional substrings (case-insensitive) that must
- /// appear in the user message before that tool is exposed — in addition to `tags` (length ≥4).
+ /// If `requires` lists `brave_web_search`, optional substrings (case-insensitive; ä/ö/ü/ß
+ /// folded for matching) that must appear in the user message before that tool is exposed —
+ /// in addition to `tags` (length ≥6, not in [`super::service::BRAVE_TAG_DENYLIST`] in
+ /// `skills/service.rs`, used by `skill_triggers_brave_web_search` and covered in
+ /// `tests/skills_brave_gate.rs`).
#[serde(default)]
pub brave_allow_substrings: Vec,
#[serde(default)]
diff --git a/src-tauri/src/modules/tool_engine/runtime.rs b/src-tauri/src/modules/tool_engine/runtime.rs
index 2351b61..37c7b2b 100644
--- a/src-tauri/src/modules/tool_engine/runtime.rs
+++ b/src-tauri/src/modules/tool_engine/runtime.rs
@@ -32,8 +32,12 @@ async fn try_runtime(binary_name: &str, kind: RuntimeKind) -> Option Option {
+ // Use `--version` instead of `version --format=…`: `podman version` talks to the machine
+ // socket and fails with "Cannot connect to Podman" when the VM is stopped, even though the
+ // CLI is installed. `docker version` can also exit non-zero when the daemon is down while
+ // still printing the client version. `--version` is client-only and succeeds if the binary exists.
let output = tokio::process::Command::new(path)
- .args(["version", "--format", "{{.Client.Version}}"])
+ .arg("--version")
.stdout(std::process::Stdio::piped())
.stderr(std::process::Stdio::null())
.output()
@@ -44,7 +48,8 @@ async fn try_runtime_at(path: &Path, kind: RuntimeKind) -> Option {
return None;
}
- let version = String::from_utf8_lossy(&output.stdout).trim().to_string();
+ let raw = String::from_utf8_lossy(&output.stdout);
+ let version = parse_dash_version(kind, &raw)?.to_string();
if version.is_empty() {
return None;
}
@@ -64,6 +69,33 @@ async fn try_runtime_at(path: &Path, kind: RuntimeKind) -> Option {
})
}
+/// Parse `podman --version` / `docker --version` stdout into a semver-ish version token.
+fn parse_dash_version(kind: RuntimeKind, stdout: &str) -> Option<&str> {
+ let line = stdout.lines().next()?.trim();
+ let mut parts = line.split_whitespace();
+ match kind {
+ RuntimeKind::Podman => {
+ if parts.next()? != "podman" {
+ return None;
+ }
+ if parts.next()? != "version" {
+ return None;
+ }
+ parts.next()
+ }
+ RuntimeKind::Docker => {
+ if !parts.next()?.eq_ignore_ascii_case("docker") {
+ return None;
+ }
+ if !parts.next()?.eq_ignore_ascii_case("version") {
+ return None;
+ }
+ let ver = parts.next()?;
+ Some(ver.trim_end_matches(','))
+ }
+ }
+}
+
async fn check_docker_rootless(binary: &Path) -> bool {
let output = tokio::process::Command::new(binary)
.args(["info", "--format", "{{.SecurityOptions}}"])
@@ -77,3 +109,38 @@ async fn check_docker_rootless(binary: &Path) -> bool {
.map(|o| String::from_utf8_lossy(&o.stdout).contains("rootless"))
.unwrap_or(false)
}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn parse_dash_version_podman() {
+ assert_eq!(
+ parse_dash_version(RuntimeKind::Podman, "podman version 5.8.1\n"),
+ Some("5.8.1")
+ );
+ assert_eq!(
+ parse_dash_version(RuntimeKind::Podman, "Docker version 29.0.0\n"),
+ None
+ );
+ }
+
+ #[test]
+ fn parse_dash_version_docker() {
+ assert_eq!(
+ parse_dash_version(
+ RuntimeKind::Docker,
+ "Docker version 29.3.1, build c2be9ccfc3\n"
+ ),
+ Some("29.3.1")
+ );
+ assert_eq!(
+ parse_dash_version(
+ RuntimeKind::Docker,
+ "docker version 24.0.6, build ed223bc\n"
+ ),
+ Some("24.0.6")
+ );
+ }
+}
diff --git a/src-tauri/src/modules/tool_engine/service.rs b/src-tauri/src/modules/tool_engine/service.rs
index 38fc642..aa53977 100644
--- a/src-tauri/src/modules/tool_engine/service.rs
+++ b/src-tauri/src/modules/tool_engine/service.rs
@@ -30,38 +30,19 @@ const TE_CUSTOM_PREFIX: &str = "te_custom_";
/// In-image workspace stub when no host folders are mounted yet.
pub const EMPTY_WORKSPACE_CONTAINER_ROOT: &str = "/tmp";
-fn filter_stored_catalog_passthrough(
- entry: &ToolEntry,
- stored: &HashMap,
-) -> HashMap {
- let mut out = HashMap::new();
- for name in &entry.passthrough_env {
- if let Some(v) = stored.get(name) {
- if !v.trim().is_empty() {
- out.insert(name.clone(), v.clone());
- }
- }
- }
- out
-}
-
-/// Values for `podman|docker run --env=…`: `mcp.json` `catalog_passthrough` first, else host `std::env`.
-fn merged_passthrough_for_container(
- entry: &ToolEntry,
- stored: &HashMap,
-) -> HashMap {
- let filtered = filter_stored_catalog_passthrough(entry, stored);
- let mut out = HashMap::new();
- for name in &entry.passthrough_env {
- if let Some(v) = filtered.get(name) {
- out.insert(name.clone(), v.clone());
- } else if let Ok(v) = std::env::var(name) {
- let t = v.trim().to_string();
- if !t.is_empty() {
- out.insert(name.clone(), t);
- }
+/// Keep only the passthrough keys that the catalog still declares (sorted, deduped).
+/// Called after loading stored `catalog_passthrough_keys` from `mcp.json` so we drop keys the
+/// catalog has removed; the associated keychain entries are purged on uninstall.
+fn filter_catalog_passthrough_keys(entry: &ToolEntry, stored: &[String]) -> Vec {
+ let declared: HashSet<&str> = entry.passthrough_env.iter().map(String::as_str).collect();
+ let mut seen: HashSet<&str> = HashSet::new();
+ let mut out: Vec = Vec::new();
+ for name in stored {
+ if declared.contains(name.as_str()) && seen.insert(name.as_str()) {
+ out.push(name.clone());
}
}
+ out.sort();
out
}
@@ -282,7 +263,7 @@ fn catalog_tool_stdio_eq(a: &ServerEntry, b: &ServerEntry) -> bool {
env: e1,
direct_return: d1,
private_host_path: p1,
- catalog_passthrough: t1,
+ catalog_passthrough_keys: t1,
},
ServerEntry::Stdio {
command: c2,
@@ -290,7 +271,7 @@ fn catalog_tool_stdio_eq(a: &ServerEntry, b: &ServerEntry) -> bool {
env: e2,
direct_return: d2,
private_host_path: p2,
- catalog_passthrough: t2,
+ catalog_passthrough_keys: t2,
},
) => c1 == c2 && a1 == a2 && e1 == e2 && d1 == d2 && p1 == p2 && t1 == t2,
_ => false,
@@ -298,8 +279,9 @@ fn catalog_tool_stdio_eq(a: &ServerEntry, b: &ServerEntry) -> bool {
}
/// Rebuild argv for one installed catalog tool from `mcp.json` + catalog entry.
-/// Container env for catalog tools is baked into argv via `podman run --env=…`. User-provided
-/// secrets live in `catalog_passthrough`; `env` is for legacy stdio (e.g. `npx`) only.
+/// Container env for catalog tools (workspace binds, private-folder bind + path env) is baked into
+/// argv here. Passthrough secrets are **not** baked in — they live in the OS keychain and are
+/// injected at spawn time by `connect_one_server`.
fn rebuild_installed_catalog_tool_stdio(
entry: &ToolEntry,
host_paths: &[String],
@@ -311,7 +293,7 @@ fn rebuild_installed_catalog_tool_stdio(
command,
direct_return,
private_host_path,
- catalog_passthrough,
+ catalog_passthrough_keys,
..
} = prev
else {
@@ -336,9 +318,7 @@ fn rebuild_installed_catalog_tool_stdio(
_ => None,
};
- let stored = filter_stored_catalog_passthrough(entry, catalog_passthrough);
- let merged = merged_passthrough_for_container(entry, &stored);
- let args = podman_run_argv_for_tool(entry, host_paths, private_bind.as_ref(), &merged)?;
+ let args = podman_run_argv_for_tool(entry, host_paths, private_bind.as_ref())?;
Ok(Some(ServerEntry::Stdio {
command: command.clone(),
@@ -346,7 +326,7 @@ fn rebuild_installed_catalog_tool_stdio(
env: HashMap::new(),
direct_return: *direct_return,
private_host_path: private_host_path.clone(),
- catalog_passthrough: stored,
+ catalog_passthrough_keys: filter_catalog_passthrough_keys(entry, catalog_passthrough_keys),
}))
}
@@ -393,11 +373,14 @@ pub fn workspace_app_bind_pairs(host_paths: &[String]) -> Vec<(String, String)>
/// Full `podman|docker run …` argv (excluding the runtime binary) for a catalog tool entry.
/// The image reference is `image@digest` (digest-pinned).
+///
+/// Passthrough-env secrets are **not** emitted here. They live in the OS keychain and are
+/// spliced in as `--env=KEY=VAL` at spawn time by `mcp::service::connect_one_server` — see
+/// [`splice_passthrough_env_into_argv`] for the exact insertion point.
pub fn podman_run_argv_for_tool(
entry: &ToolEntry,
host_paths: &[String],
private_bind: Option<&PrivateBind<'_>>,
- passthrough: &HashMap,
) -> Result, String> {
if entry.append_workspace_roots && !entry.mount_workspace {
return Err("catalog: append_workspace_roots requires mount_workspace".into());
@@ -451,10 +434,6 @@ pub fn podman_run_argv_for_tool(
args.push(format!("--env={k}={v}"));
}
- for (name, value) in passthrough {
- args.push(format!("--env={name}={value}"));
- }
-
args.push(image_ref);
args.extend(entry.mcp_server_cmd.iter().cloned());
if entry.ignore_robots_txt {
@@ -759,26 +738,25 @@ pub async fn install_tool(
};
let key = server_key(tool_id);
- let stored = cfg
+ let stored_keys: Vec = cfg
.servers
.get(&key)
.and_then(|e| {
if let ServerEntry::Stdio {
- catalog_passthrough,
+ catalog_passthrough_keys,
..
} = e
{
- Some(filter_stored_catalog_passthrough(
+ Some(filter_catalog_passthrough_keys(
entry,
- catalog_passthrough,
+ catalog_passthrough_keys,
))
} else {
None
}
})
.unwrap_or_default();
- let merged = merged_passthrough_for_container(entry, &stored);
- let args = podman_run_argv_for_tool(entry, &host_paths, private_bind.as_ref(), &merged)?;
+ let args = podman_run_argv_for_tool(entry, &host_paths, private_bind.as_ref())?;
let server_entry = ServerEntry::Stdio {
command: runtime.binary.clone(),
@@ -786,7 +764,7 @@ pub async fn install_tool(
env: HashMap::new(),
direct_return: entry.direct_return,
private_host_path: None,
- catalog_passthrough: stored,
+ catalog_passthrough_keys: stored_keys,
};
cfg.servers.insert(key, server_entry);
@@ -862,20 +840,36 @@ pub async fn uninstall_tool(
) -> Result<(), String> {
let key = server_key(tool_id);
let mut installed_image_ref: Option = None;
+ let mut passthrough_keys_to_purge: Vec = Vec::new();
if mcp_config_path.exists() {
let _cfg_guard = mcp_cfg_lock.lock().await;
let mut cfg = mcp_service::read_config(mcp_config_path)?;
- if let Some(ServerEntry::Stdio { args, .. }) = cfg.servers.get(&key) {
+ if let Some(ServerEntry::Stdio {
+ args,
+ catalog_passthrough_keys,
+ ..
+ }) = cfg.servers.get(&key)
+ {
installed_image_ref = args
.iter()
.skip_while(|a| *a == "run")
.find(|a| !a.starts_with('-'))
.cloned();
+ passthrough_keys_to_purge = catalog_passthrough_keys.clone();
}
cfg.servers.remove(&key);
mcp_service::save_config(mcp_config_path, &cfg)?;
}
+ // Remove any keychain secrets associated with this tool's passthrough_env.
+ // Non-fatal: a stale keychain entry after uninstall is a minor cleanup issue,
+ // not a reason to fail the user-visible uninstall.
+ for env_key in &passthrough_keys_to_purge {
+ if let Err(e) = crate::modules::secure_store::delete_mcp_secret(tool_id, env_key) {
+ log::warn!("uninstall: could not delete keychain secret for {tool_id}/{env_key}: {e}");
+ }
+ }
+
let image_ref = match installed_image_ref {
Some(r) => Some(r),
None => load_catalog()
@@ -1028,7 +1022,7 @@ pub async fn add_custom_tool(
env: HashMap::new(),
direct_return: entry.direct_return,
private_host_path: None,
- catalog_passthrough: HashMap::new(),
+ catalog_passthrough_keys: Vec::new(),
};
cfg.servers
@@ -1076,7 +1070,7 @@ pub fn sync_custom_tools_if_installed(cfg: &mut McpConfig, host_paths: &[String]
env,
direct_return,
private_host_path,
- catalog_passthrough,
+ catalog_passthrough_keys,
}) = cfg.servers.get(&key)
else {
continue;
@@ -1093,7 +1087,7 @@ pub fn sync_custom_tools_if_installed(cfg: &mut McpConfig, host_paths: &[String]
env: env.clone(),
direct_return: *direct_return,
private_host_path: private_host_path.clone(),
- catalog_passthrough: catalog_passthrough.clone(),
+ catalog_passthrough_keys: catalog_passthrough_keys.clone(),
};
cfg.servers.insert(key, new_entry);
changed = true;
@@ -1104,7 +1098,6 @@ pub fn sync_custom_tools_if_installed(cfg: &mut McpConfig, host_paths: &[String]
#[cfg(test)]
mod tests {
use super::*;
- use std::collections::HashMap;
use tempfile::tempdir;
#[test]
@@ -1172,7 +1165,7 @@ mod tests {
.find(|v| v.version == fm.current)
.unwrap();
assert_eq!(ver.digest, "sha256:placeholder");
- let argv = podman_run_argv_for_tool(fm, &[], None, &HashMap::new()).expect("argv");
+ let argv = podman_run_argv_for_tool(fm, &[], None).expect("argv");
let tagged = format!("{}:{}", fm.image, fm.current);
let image_ref = argv
.iter()
@@ -1205,7 +1198,7 @@ mod tests {
config: pf,
bot_id: "12345",
};
- let argv = podman_run_argv_for_tool(mem, &[], Some(&pb), &HashMap::new()).expect("argv");
+ let argv = podman_run_argv_for_tool(mem, &[], Some(&pb)).expect("argv");
let want_mount = format!(
"-v={}:/mcp/data:rw",
diff --git a/src-tauri/src/shared/keywords.rs b/src-tauri/src/shared/keywords.rs
index 3d76197..4dd5151 100644
--- a/src-tauri/src/shared/keywords.rs
+++ b/src-tauri/src/shared/keywords.rs
@@ -2,8 +2,9 @@
//!
//! Feature modules declare their own [`KeywordGroup`]s next to the domain that
//! owns them (e.g. memory-session commands live in `modules/memory`, AI-model
-//! control cues in `modules/ollama/keywords.rs`). This module only provides
-//! the shape and the matcher — no domain knowledge.
+//! control cues in `modules/ollama/keywords.rs`, Brave gating phrases in
+//! `modules/skills/keywords.rs`). This module only provides the shape and the
+//! matcher — no domain knowledge.
//!
//! Translation guide: each group carries one `(lang, &[phrases])` row per
//! language. To add Spanish phrases, add them to the `("es", &[...])` entry
diff --git a/src-tauri/src/shared/mod.rs b/src-tauri/src/shared/mod.rs
index a8779ba..ca2a9a4 100644
--- a/src-tauri/src/shared/mod.rs
+++ b/src-tauri/src/shared/mod.rs
@@ -1,3 +1,4 @@
pub mod keywords;
pub mod state;
pub mod text;
+pub mod user_settings;
diff --git a/src-tauri/src/shared/state.rs b/src-tauri/src/shared/state.rs
index 9c81ee0..f9cba00 100644
--- a/src-tauri/src/shared/state.rs
+++ b/src-tauri/src/shared/state.rs
@@ -1,6 +1,7 @@
use crate::modules::mcp::registry::ToolRegistry;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
+use std::fmt;
use std::path::PathBuf;
use std::sync::Arc;
use tauri::Emitter;
@@ -9,7 +10,10 @@ use tokio::sync::{Mutex, Notify, RwLock};
const RECENT_TOOLS_CAP: usize = 32;
const TOOL_CTX_LATENCY_CAP: usize = 128;
-#[derive(Debug, Clone, Serialize, Deserialize)]
+/// In-memory connection record. Holds the plaintext bot token while the bot runs.
+/// Not serializable on purpose — the token must never reach disk. Use
+/// `ConnectionMetadata` for anything persisted to `connection.json`.
+#[derive(Clone)]
pub struct ConnectionData {
pub bot_token: String,
pub bot_id: String,
@@ -17,6 +21,36 @@ pub struct ConnectionData {
pub connected_at: DateTime,
}
+impl fmt::Debug for ConnectionData {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("ConnectionData")
+ .field("bot_token", &"")
+ .field("bot_id", &self.bot_id)
+ .field("bot_username", &self.bot_username)
+ .field("connected_at", &self.connected_at)
+ .finish()
+ }
+}
+
+/// Persisted shape of `connection.json`. The bot token lives in the OS keychain and
+/// is loaded on demand via `modules::secure_store`.
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct ConnectionMetadata {
+ pub bot_id: String,
+ pub bot_username: String,
+ pub connected_at: DateTime,
+}
+
+impl From<&ConnectionData> for ConnectionMetadata {
+ fn from(c: &ConnectionData) -> Self {
+ Self {
+ bot_id: c.bot_id.clone(),
+ bot_username: c.bot_username.clone(),
+ connected_at: c.connected_at,
+ }
+ }
+}
+
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct LogEntry {
pub timestamp: String,
@@ -54,10 +88,13 @@ pub struct AppState {
pub recent_tool_names: Arc>>,
/// Milliseconds spent in tool subset selection (rolling, for p95 logs).
pub tool_ctx_latency_ms: Arc>>,
+ /// Max UTF-8 bytes for the combined skills system-prompt fragment (dashboard / `user_settings.json`).
+ pub skills_hint_max_bytes: Arc>,
}
impl AppState {
pub fn new(store_path: PathBuf, mcp_config_path: PathBuf, mcp_config_source: String) -> Self {
+ let skills_cap = crate::shared::user_settings::load_skills_hint_max_bytes(&store_path);
let (log_tx, _) = tokio::sync::broadcast::channel(256);
Self {
connection: Arc::new(Mutex::new(None)),
@@ -77,6 +114,7 @@ impl AppState {
memory_session: Arc::new(RwLock::new(None)),
recent_tool_names: Arc::new(Mutex::new(Vec::new())),
tool_ctx_latency_ms: Arc::new(Mutex::new(Vec::new())),
+ skills_hint_max_bytes: Arc::new(RwLock::new(skills_cap)),
}
}
diff --git a/src-tauri/src/shared/text.rs b/src-tauri/src/shared/text.rs
index f8f1179..90f2232 100644
--- a/src-tauri/src/shared/text.rs
+++ b/src-tauri/src/shared/text.rs
@@ -63,7 +63,8 @@ pub const PENGINE_POST_TOOL_REMINDER: &str = "\
You have tool output. Respond in the user's language. REQUIRED: put ONLY the user-visible answer inside \
... . Put any English or meta reasoning ONLY inside ... . \
Do not narrate tool usage, skills, or planning in plain text; no sentences outside those tags. \
-If several `fetch` results are present, some may show robots.txt or User-Agent blocks — still use any successful excerpts; do not tell the user that nothing could be retrieved when other blocks contain usable text.";
+If several `fetch` results are present, some may show robots.txt or User-Agent blocks — still use any successful excerpts; do not tell the user that nothing could be retrieved when other blocks contain usable text. \
+Do not call `fetch` again for the same URL in this turn; if you already have an excerpt for a URL, answer from it or pick a different URL.";
fn looks_like_english_scratchpad(s: &str) -> bool {
s.contains("Okay, let's")
diff --git a/src-tauri/src/shared/user_settings.rs b/src-tauri/src/shared/user_settings.rs
new file mode 100644
index 0000000..30452a3
--- /dev/null
+++ b/src-tauri/src/shared/user_settings.rs
@@ -0,0 +1,71 @@
+//! Optional preferences stored next to `connection.json` as `user_settings.json`.
+
+use serde::{Deserialize, Serialize};
+use std::path::{Path, PathBuf};
+
+/// Default cap for the combined skills fragment in the system prompt (bytes, UTF-8).
+pub const DEFAULT_SKILLS_HINT_MAX_BYTES: u32 = 10 * 1024;
+pub const MIN_SKILLS_HINT_MAX_BYTES: u32 = 4 * 1024;
+pub const MAX_SKILLS_HINT_MAX_BYTES: u32 = 256 * 1024;
+
+#[derive(Debug, Clone, Serialize, Deserialize, Default)]
+struct UserSettingsFile {
+ #[serde(default)]
+ skills_hint_max_bytes: Option,
+}
+
+/// Path to `user_settings.json` (same directory as `connection.json`).
+pub fn user_settings_path(connection_json: &Path) -> PathBuf {
+ connection_json
+ .parent()
+ .map(|p| p.join("user_settings.json"))
+ .unwrap_or_else(|| PathBuf::from("user_settings.json"))
+}
+
+pub fn clamp_skills_hint_max_bytes(v: u32) -> u32 {
+ v.clamp(MIN_SKILLS_HINT_MAX_BYTES, MAX_SKILLS_HINT_MAX_BYTES)
+}
+
+pub fn load_skills_hint_max_bytes(connection_json: &Path) -> u32 {
+ let p = user_settings_path(connection_json);
+ let Ok(raw) = std::fs::read_to_string(p) else {
+ return DEFAULT_SKILLS_HINT_MAX_BYTES;
+ };
+ let parsed: UserSettingsFile = serde_json::from_str(&raw).unwrap_or_default();
+ clamp_skills_hint_max_bytes(
+ parsed
+ .skills_hint_max_bytes
+ .unwrap_or(DEFAULT_SKILLS_HINT_MAX_BYTES),
+ )
+}
+
+pub fn save_skills_hint_max_bytes(connection_json: &Path, value: u32) -> Result {
+ let v = clamp_skills_hint_max_bytes(value);
+ let p = user_settings_path(connection_json);
+ let mut parsed: UserSettingsFile = std::fs::read_to_string(&p)
+ .ok()
+ .and_then(|s| serde_json::from_str(&s).ok())
+ .unwrap_or_default();
+ parsed.skills_hint_max_bytes = Some(v);
+ let json = serde_json::to_string_pretty(&parsed).map_err(|e| e.to_string())?;
+ if let Some(dir) = p.parent() {
+ std::fs::create_dir_all(dir).map_err(|e| e.to_string())?;
+ }
+ std::fs::write(&p, json).map_err(|e| e.to_string())?;
+ Ok(v)
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn clamp_skills_hint_respects_bounds() {
+ assert_eq!(clamp_skills_hint_max_bytes(100), MIN_SKILLS_HINT_MAX_BYTES);
+ assert_eq!(
+ clamp_skills_hint_max_bytes(999_999),
+ MAX_SKILLS_HINT_MAX_BYTES
+ );
+ assert_eq!(clamp_skills_hint_max_bytes(12_000), 12_000);
+ }
+}
diff --git a/src-tauri/tests/common/mod.rs b/src-tauri/tests/common/mod.rs
new file mode 100644
index 0000000..f837aff
--- /dev/null
+++ b/src-tauri/tests/common/mod.rs
@@ -0,0 +1,10 @@
+//! Shared integration-test setup: avoid real OS credential stores (see `secure_store::mock_store`).
+
+// `#[ctor]` runs before `main` in an unspecified order relative to other ctors; `set_var` is not
+// documented as thread-safe, so this assumes single-threaded ctor execution.
+#[ctor::ctor]
+fn enable_mock_keychain() {
+ if std::env::var_os("PENGINE_MOCK_KEYCHAIN").is_none() {
+ std::env::set_var("PENGINE_MOCK_KEYCHAIN", "1");
+ }
+}
diff --git a/src-tauri/tests/mcp_tools.rs b/src-tauri/tests/mcp_tools.rs
index cdfca3d..8835c70 100644
--- a/src-tauri/tests/mcp_tools.rs
+++ b/src-tauri/tests/mcp_tools.rs
@@ -1,5 +1,7 @@
//! Integration tests for MCP tooling.
+mod common;
+
use pengine_lib::modules::mcp::registry::ToolRegistry;
use pengine_lib::modules::mcp::{native, service};
use serde_json::json;
diff --git a/src-tauri/tests/skills_brave_gate.rs b/src-tauri/tests/skills_brave_gate.rs
new file mode 100644
index 0000000..cf0e976
--- /dev/null
+++ b/src-tauri/tests/skills_brave_gate.rs
@@ -0,0 +1,46 @@
+//! Brave gating from **skills** (frontmatter). Keyword-only rules are tested in
+//! `modules/skills/keywords.rs`.
+
+mod common;
+
+use pengine_lib::modules::skills::service::{
+ allow_brave_web_search_for_message, write_custom_skill,
+};
+use tempfile::tempdir;
+
+#[test]
+fn brave_allowed_when_skill_substring_matches_umlaut_user_text() {
+ let tmp = tempdir().unwrap();
+ let store = tmp.path().join("connection.json");
+ let md = "---\nname: t\ndescription: d\ntags: [toolong]\nrequires: [brave_web_search]\nbrave_allow_substrings: [oesterreich]\n---\n\nbody\n";
+ write_custom_skill(&store, "bravegate-a", md).unwrap();
+ assert!(allow_brave_web_search_for_message(
+ &store,
+ "Kurz zu Österreich und Pension"
+ ));
+}
+
+#[test]
+fn brave_allowed_when_skill_requires_and_substring_matches() {
+ let tmp = tempdir().unwrap();
+ let store = tmp.path().join("connection.json");
+ let md = "---\nname: t\ndescription: d\ntags: [gov]\nrequires: [brave_web_search]\nbrave_allow_substrings: [widgets]\n---\n\nbody\n";
+ write_custom_skill(&store, "bravegate-b", md).unwrap();
+ assert!(!allow_brave_web_search_for_message(&store, "hello world"));
+ assert!(allow_brave_web_search_for_message(
+ &store,
+ "tell me about widgets"
+ ));
+}
+
+#[test]
+fn brave_not_enabled_by_generic_news_tag() {
+ let tmp = tempdir().unwrap();
+ let store = tmp.path().join("connection.json");
+ let md = "---\nname: t\ndescription: d\ntags: [news, gaming]\nrequires: [brave_web_search]\n---\n\nbody\n";
+ write_custom_skill(&store, "bravegate-c", md).unwrap();
+ assert!(!allow_brave_web_search_for_message(
+ &store,
+ "gameinformer news"
+ ));
+}
diff --git a/src/modules/bot/components/SetupWizardSteps.tsx b/src/modules/bot/components/SetupWizardSteps.tsx
index 1de94ff..96778c1 100644
--- a/src/modules/bot/components/SetupWizardSteps.tsx
+++ b/src/modules/bot/components/SetupWizardSteps.tsx
@@ -105,6 +105,40 @@ ollama pull qwen3:8b`}
tool-calling support.
+
+ Start Ollama when you log in
+
+
+ After a reboot, Pengine needs the Ollama API on{" "}
+ {OLLAMA_API_BASE} again. Pick
+ the option that matches how you installed Ollama.
+
+
+
+ macOS (Ollama.app): System Settings → General →
+ Login Items & Extensions → Open at Login → add{" "}
+ Ollama .
+
+
+ macOS (Homebrew): run{" "}
+ brew services start ollama {" "}
+ so the service restarts at login.
+
+
+ Linux: if{" "}
+ systemctl status ollama {" "}
+ works, run{" "}
+
+ sudo systemctl enable --now ollama
+
+ .
+
+
+ Windows: Settings → Apps → Startup → enable{" "}
+ Ollama .
+
+
+
{ollamaChecking && (
Detecting Ollama…
)}
@@ -208,6 +242,55 @@ podman machine start
# Linux (Debian/Ubuntu)
sudo apt install podman`}
+
+ Optional — start Podman Machine on login (macOS)
+
+
+ The Podman Linux VM does not start by itself after you reboot. You can use a
+ LaunchAgent so podman machine start runs when
+ you sign in. Edit the podman path if you use
+ Intel Homebrew (/usr/local/bin/podman ).
+
+
+ {`
+
+
+
+ Label
+ io.podman.machine.start
+ ProgramArguments
+
+ /opt/homebrew/bin/podman
+ machine
+ start
+
+ RunAtLoad
+
+
+ `}
+
+
+ {`mkdir -p ~/Library/LaunchAgents
+# Save the XML above as ~/Library/LaunchAgents/io.podman.machine.start.plist
+launchctl bootstrap gui/$(id -u) ~/Library/LaunchAgents/io.podman.machine.start.plist`}
+
+
+ Signing out and back in also loads new LaunchAgents. To stop autostart:{" "}
+
+ launchctl bootout gui/$(id -u)/io.podman.machine.start
+
+ .
+
+
+ Linux: most installs talk to the host kernel
+ directly (no Podman Machine). If you use a rootless API socket, your distro may
+ document{" "}
+
+ systemctl --user enable --now podman.socket
+
+ .
+
@@ -220,6 +303,13 @@ sudo apt install podman`}
# or use the convenience script:
curl -fsSL https://get.docker.com | sh`}
+
+ Docker Desktop can start on login from{" "}
+
+ Settings → General → Start Docker Desktop when you log in
+
+ .
+
@@ -322,6 +412,36 @@ export function WizardStepPengineLocal(props: {
The Pengine desktop app must be running on this machine. It hosts the bot service on
localhost so messages keep flowing even after you close this browser tab.
+
+
+ Open Pengine when you log in
+
+
+ The local HTTP server runs inside the desktop app. If you reboot and only open this
+ browser tab, Telegram will stay disconnected until the app is running again.
+
+
+
+ macOS: System Settings → General → Login Items
+ & Extensions → Open at Login → add pengine {" "}
+ from Applications (the bundle is{" "}
+ pengine.app
+ ).
+
+
+ Linux: use your desktop environment's{" "}
+ Startup Applications (or{" "}
+ ~/.config/autostart/ ) to
+ run the pengine binary or the{" "}
+ .desktop file from your
+ install.
+
+
+ Windows: Settings → Apps → Startup → enable{" "}
+ pengine .
+
+
+
Checking {PENGINE.health}…
diff --git a/src/modules/mcp/components/McpServerCard.tsx b/src/modules/mcp/components/McpServerCard.tsx
index a963e4f..98bf031 100644
--- a/src/modules/mcp/components/McpServerCard.tsx
+++ b/src/modules/mcp/components/McpServerCard.tsx
@@ -13,6 +13,8 @@ import {
envToOtherLinesText,
extractPrimarySecretEnvKey,
} from "../mcpEnvHelpers";
+import { FolderHelper } from "./McpServerCardFolderHelper";
+import { TeFileManagerMountPanel, TePrivateDataFolderPanel } from "./McpServerCardTePanels";
/** `pengine/memory` → `te_pengine-memory` (matches Rust `server_key`). */
function teServerKeyForToolId(toolId: string): string {
@@ -465,97 +467,29 @@ function InlineEditForm({
{isTeFileManager && (
-
-
- Shared folders (File Manager container mounts)
-
-
- After File Manager is installed, add paths here (or install it first from Tool Engine
- with an empty list). Each folder mounts as{" "}
- /app/<name>. Apply updates{" "}
- workspace_roots in{" "}
- mcp.json and closes the editor.
-
- {tePaths.length > 0 && (
-
- {tePaths.map((p, i) => (
-
- {teAppMounts[i] ?? ""}
- ←
- {p}
-
- ))}
-
- )}
-
void pickTeFolder()}
- />
- {teApplyError && (
-
- {teApplyError}
-
- )}
- void applyTeFolders()}
- className="mt-3 rounded-lg border border-emerald-300/30 bg-emerald-300/15 px-3 py-1.5 font-mono text-[11px] text-emerald-100 hover:bg-emerald-300/25 disabled:opacity-40"
- >
- {teApplyBusy ? "Applying…" : "Apply folders"}
-
-
+
void pickTeFolder()}
+ onApply={() => void applyTeFolders()}
+ />
)}
{tePrivateToolId && (
-
-
- Private data folder (host)
-
-
- This tool keeps state on disk in a single host directory (bind-mounted into the
- container). Use Choose folder or paste a path, then Apply — same idea as File
- Manager's shared folders, but only for this tool's data file(s).
-
- {tePrivatePickError && (
-
- {tePrivatePickError}
-
- )}
-
- setTePrivatePathInput(e.target.value)}
- placeholder="/path/to/memory-data"
- className="min-w-0 flex-1 rounded-md border border-white/15 bg-white/5 px-2 py-1.5 font-mono text-[11px] text-white outline-none placeholder:text-white/20 focus:border-white/30"
- />
- void pickTePrivateFolder()}
- className="shrink-0 rounded-md border border-fuchsia-300/25 bg-fuchsia-300/10 px-2 py-1.5 font-mono text-[10px] text-fuchsia-100/90 hover:bg-fuchsia-300/20"
- title="Choose folder (desktop only)"
- >
- Choose folder
-
-
- {tePrivateApplyError && (
-
- {tePrivateApplyError}
-
- )}
-
void applyTePrivateFolder()}
- className="mt-3 rounded-lg border border-fuchsia-300/30 bg-fuchsia-300/15 px-3 py-1.5 font-mono text-[11px] text-fuchsia-100 hover:bg-fuchsia-300/25 disabled:opacity-40"
- >
- {tePrivateApplyBusy ? "Applying…" : "Apply data folder"}
-
-
+ void pickTePrivateFolder()}
+ onApply={() => void applyTePrivateFolder()}
+ />
)}
{/* Filesystem folder helper (npx server-filesystem) */}
@@ -721,100 +655,3 @@ function InlineEditForm({
);
}
-
-// ── Folder path helper (visual add/remove for filesystem paths) ─────
-
-export function FolderHelper({
- paths,
- pickError,
- onAdd,
- onRemove,
- onPickFolder,
-}: {
- paths: string[];
- pickError: string | null;
- onAdd: (p: string) => void;
- onRemove: (path: string) => void;
- onPickFolder: () => void;
-}) {
- const [newPath, setNewPath] = useState("");
-
- const handleAdd = () => {
- if (newPath.trim()) {
- onAdd(newPath);
- setNewPath("");
- }
- };
-
- return (
-
-
- Allowed folders
-
-
- {paths.length === 0 &&
No folders yet
}
-
- {pickError && (
-
- {pickError}
-
- )}
-
- {paths.length > 0 && (
-
- {paths.map((p, i) => (
-
-
- {p}
-
-
onRemove(p)}
- aria-label={`Remove allowed folder ${p}`}
- title="Remove folder"
- className="shrink-0 font-mono text-[10px] text-rose-300/50 hover:text-rose-200"
- >
- x
-
-
- ))}
-
- )}
-
-
- setNewPath(e.target.value)}
- onKeyDown={(e) => {
- if (e.key === "Enter" && newPath.trim()) {
- e.preventDefault();
- handleAdd();
- }
- }}
- placeholder="/path/to/folder"
- className="min-w-0 flex-1 rounded-md border border-white/15 bg-white/5 px-2 py-1.5 font-mono text-[11px] text-white outline-none placeholder:text-white/20 focus:border-white/30"
- />
-
- add
-
-
- browse
-
-
-
- );
-}
diff --git a/src/modules/mcp/components/McpServerCardFolderHelper.tsx b/src/modules/mcp/components/McpServerCardFolderHelper.tsx
new file mode 100644
index 0000000..d39c11c
--- /dev/null
+++ b/src/modules/mcp/components/McpServerCardFolderHelper.tsx
@@ -0,0 +1,96 @@
+import { useState } from "react";
+
+export function FolderHelper({
+ paths,
+ pickError,
+ onAdd,
+ onRemove,
+ onPickFolder,
+}: {
+ paths: string[];
+ pickError: string | null;
+ onAdd: (p: string) => void;
+ onRemove: (path: string) => void;
+ onPickFolder: () => void;
+}) {
+ const [newPath, setNewPath] = useState("");
+
+ const handleAdd = () => {
+ if (newPath.trim()) {
+ onAdd(newPath);
+ setNewPath("");
+ }
+ };
+
+ return (
+
+
+ Allowed folders
+
+
+ {paths.length === 0 &&
No folders yet
}
+
+ {pickError && (
+
+ {pickError}
+
+ )}
+
+ {paths.length > 0 && (
+
+ {paths.map((p, i) => (
+
+
+ {p}
+
+
onRemove(p)}
+ aria-label={`Remove allowed folder ${p}`}
+ title="Remove folder"
+ className="shrink-0 font-mono text-[10px] text-rose-300/50 hover:text-rose-200"
+ >
+ x
+
+
+ ))}
+
+ )}
+
+
+ setNewPath(e.target.value)}
+ onKeyDown={(e) => {
+ if (e.key === "Enter" && newPath.trim()) {
+ e.preventDefault();
+ handleAdd();
+ }
+ }}
+ placeholder="/path/to/folder"
+ className="min-w-0 flex-1 rounded-md border border-white/15 bg-white/5 px-2 py-1.5 font-mono text-[11px] text-white outline-none placeholder:text-white/20 focus:border-white/30"
+ />
+
+ add
+
+
+ browse
+
+
+
+ );
+}
diff --git a/src/modules/mcp/components/McpServerCardTePanels.tsx b/src/modules/mcp/components/McpServerCardTePanels.tsx
new file mode 100644
index 0000000..142593b
--- /dev/null
+++ b/src/modules/mcp/components/McpServerCardTePanels.tsx
@@ -0,0 +1,139 @@
+import { FolderHelper } from "./McpServerCardFolderHelper";
+
+type TeFileManagerMountPanelProps = {
+ tePaths: string[];
+ teAppMounts: string[];
+ tePickError: string | null;
+ teApplyError: string | null;
+ teApplyBusy: boolean;
+ onAddPath: (p: string) => void;
+ onRemovePath: (p: string) => void;
+ onPickFolder: () => void;
+ onApply: () => void;
+};
+
+export function TeFileManagerMountPanel({
+ tePaths,
+ teAppMounts,
+ tePickError,
+ teApplyError,
+ teApplyBusy,
+ onAddPath,
+ onRemovePath,
+ onPickFolder,
+ onApply,
+}: TeFileManagerMountPanelProps) {
+ return (
+
+
+ Shared folders (File Manager container mounts)
+
+
+ After File Manager is installed, add paths here (or install it first from Tool Engine with
+ an empty list). Each folder mounts as{" "}
+ /app/<name>. Apply updates{" "}
+ workspace_roots in{" "}
+ mcp.json and closes the editor.
+
+ {tePaths.length > 0 && (
+
+ {tePaths.map((p, i) => (
+
+ {teAppMounts[i] ?? ""}
+ ←
+ {p}
+
+ ))}
+
+ )}
+
+ {teApplyError && (
+
+ {teApplyError}
+
+ )}
+
+ {teApplyBusy ? "Applying…" : "Apply folders"}
+
+
+ );
+}
+
+type TePrivateDataFolderPanelProps = {
+ pathInput: string;
+ onPathChange: (v: string) => void;
+ pickError: string | null;
+ applyError: string | null;
+ applyBusy: boolean;
+ onPickFolder: () => void;
+ onApply: () => void;
+};
+
+export function TePrivateDataFolderPanel({
+ pathInput,
+ onPathChange,
+ pickError,
+ applyError,
+ applyBusy,
+ onPickFolder,
+ onApply,
+}: TePrivateDataFolderPanelProps) {
+ return (
+
+
+ Private data folder (host)
+
+
+ This tool keeps state on disk in a single host directory (bind-mounted into the container).
+ Use Choose folder or paste a path, then Apply — same idea as File Manager's shared
+ folders, but only for this tool's data file(s).
+
+ {pickError && (
+
+ {pickError}
+
+ )}
+
+ onPathChange(e.target.value)}
+ placeholder="/path/to/memory-data"
+ className="min-w-0 flex-1 rounded-md border border-white/15 bg-white/5 px-2 py-1.5 font-mono text-[11px] text-white outline-none placeholder:text-white/20 focus:border-white/30"
+ />
+
+ Choose folder
+
+
+ {applyError && (
+
+ {applyError}
+
+ )}
+
+ {applyBusy ? "Applying…" : "Apply data folder"}
+
+
+ );
+}
diff --git a/src/modules/settings/api/index.ts b/src/modules/settings/api/index.ts
new file mode 100644
index 0000000..4dc3230
--- /dev/null
+++ b/src/modules/settings/api/index.ts
@@ -0,0 +1,41 @@
+import { PENGINE_API_BASE } from "../../../shared/api/config";
+
+const SETTINGS_URL = `${PENGINE_API_BASE}/v1/settings`;
+
+export type UserSettings = {
+ skills_hint_max_bytes: number;
+ skills_hint_max_bytes_min: number;
+ skills_hint_max_bytes_max: number;
+ skills_hint_max_bytes_default: number;
+};
+
+export async function fetchUserSettings(timeoutMs: number): Promise
{
+ try {
+ const resp = await fetch(SETTINGS_URL, { signal: AbortSignal.timeout(timeoutMs) });
+ if (!resp.ok) return null;
+ return (await resp.json()) as UserSettings;
+ } catch {
+ return null;
+ }
+}
+
+export async function putUserSettings(
+ skills_hint_max_bytes: number,
+): Promise<{ ok: true; settings: UserSettings } | { ok: false; error: string }> {
+ try {
+ const resp = await fetch(SETTINGS_URL, {
+ method: "PUT",
+ headers: { "Content-Type": "application/json" },
+ body: JSON.stringify({ skills_hint_max_bytes }),
+ signal: AbortSignal.timeout(8000),
+ });
+ const data = (await resp.json()) as UserSettings & { error?: string };
+ if (!resp.ok) {
+ return { ok: false, error: data.error ?? `HTTP ${resp.status}` };
+ }
+ return { ok: true, settings: data as UserSettings };
+ } catch (err) {
+ const message = err instanceof Error ? err.message : String(err);
+ return { ok: false, error: message };
+ }
+}
diff --git a/src/modules/settings/index.ts b/src/modules/settings/index.ts
new file mode 100644
index 0000000..d158c57
--- /dev/null
+++ b/src/modules/settings/index.ts
@@ -0,0 +1 @@
+export * from "./api";
diff --git a/src/modules/skills/components/ClawHubBrowse.tsx b/src/modules/skills/components/ClawHubBrowse.tsx
index 191898f..9990b52 100644
--- a/src/modules/skills/components/ClawHubBrowse.tsx
+++ b/src/modules/skills/components/ClawHubBrowse.tsx
@@ -1,42 +1,7 @@
-import * as ScrollArea from "@radix-ui/react-scroll-area";
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
import { installClawHubSkill, searchClawHub, searchClawHubPlugins } from "../api";
import type { ClawHubPlugin, ClawHubSkill } from "../types";
-
-function formatClawHubUpdated(ms: number): string {
- const d = new Date(ms);
- if (Number.isNaN(d.getTime())) return "—";
- return d.toLocaleString(undefined, { dateStyle: "medium", timeStyle: "short" });
-}
-
-function clawHubSkillDetailUrl(slug: string): string {
- return `https://clawhub.ai/openclaw/${encodeURIComponent(slug)}`;
-}
-
-/** Match ClawHub list-style compact numbers (e.g. 39.1k). */
-function fmtCompact(n: number): string {
- if (!Number.isFinite(n)) return "—";
- if (n >= 1_000_000) return `${(n / 1_000_000).toFixed(1).replace(/\.0$/, "")}M`;
- if (n >= 100_000) return `${Math.round(n / 1000)}k`;
- if (n >= 1000) return `${(n / 1000).toFixed(1).replace(/\.0$/, "")}k`;
- return String(n);
-}
-
-function formatClawHubStatsPrimary(entry: ClawHubSkill): string {
- const parts: string[] = [];
- if (entry.downloads != null) parts.push(fmtCompact(entry.downloads));
- if (entry.stars != null) parts.push(`★ ${entry.stars}`);
- if (entry.versionCount != null) parts.push(`${entry.versionCount} v`);
- return parts.length > 0 ? parts.join(" · ") : "—";
-}
-
-function formatClawHubStatsInstalls(entry: ClawHubSkill): string | null {
- if (entry.installsCurrent == null && entry.installsAllTime == null) return null;
- const bits: string[] = [];
- if (entry.installsCurrent != null) bits.push(`${fmtCompact(entry.installsCurrent)} cur`);
- if (entry.installsAllTime != null) bits.push(`${fmtCompact(entry.installsAllTime)} all`);
- return bits.join(" · ");
-}
+import { ClawHubPluginsResultsSection, ClawHubSkillsResultsSection } from "./ClawHubBrowseParts";
const CLAW_SORT_OPTIONS = [
{ value: "downloads", label: "Downloads" },
@@ -48,8 +13,6 @@ const CLAW_SORT_OPTIONS = [
] as const;
const SEARCH_PAGE_SIZE = 30;
-/** Approximate ClawHub plugin registry size (for UI copy); list still loads until `nextCursor` ends. */
-const CLAWHUB_PLUGINS_CATALOG_ESTIMATE = 55_561;
function FilterChip({
active,
@@ -194,7 +157,7 @@ export function ClawHubBrowse({ onClose, onAfterSkillInstall }: ClawHubBrowsePro
const t = pluginTagFilter.trim().toLowerCase();
if (!t) return pluginResults;
return pluginResults.filter((p) =>
- p.capabilityTags.some((tag) => tag.toLowerCase().includes(t)),
+ p.capabilityTags.some((tag: string) => tag.toLowerCase().includes(t)),
);
}, [pluginResults, pluginTagFilter]);
@@ -394,223 +357,29 @@ export function ClawHubBrowse({ onClose, onAfterSkillInstall }: ClawHubBrowsePro
)}
- {clawRegistry === "skills" && results && results.length === 0 && !browseLoading && (
- No matches.
- )}
- {clawRegistry === "skills" && results && results.length > 0 && (
-
- {results.length} skill{results.length === 1 ? "" : "s"} shown
- · author & stats from ClawHub when available
-
- )}
- {clawRegistry === "skills" && results && results.length > 0 && (
-
-
-
-
Skill
-
Summary
-
Author
-
Stats
-
-
-
- {results.map((entry) => {
- const installsSub = formatClawHubStatsInstalls(entry);
- return (
-
-
-
- {entry.displayName}
-
-
- {entry.isHighlighted === true && (
-
- Highlighted
-
- )}
- {entry.isOfficial === true && (
-
- Official
-
- )}
- {entry.version != null && entry.version !== "" && (
-
- v{entry.version}
-
- )}
-
-
- {entry.slug}
-
- {(entry.updatedAt != null ||
- (entry.score != null && Number.isFinite(entry.score))) && (
-
- {entry.updatedAt != null && (
- <>Upd {formatClawHubUpdated(entry.updatedAt)}>
- )}
- {entry.updatedAt != null &&
- entry.score != null &&
- Number.isFinite(entry.score) &&
- " · "}
- {entry.score != null && Number.isFinite(entry.score) && (
- <>score {entry.score.toFixed(2)}>
- )}
-
- )}
-
-
-
- Author
- {entry.ownerHandle ? (
- @{entry.ownerHandle}
- ) : (
- —
- )}
-
-
- Stats
- {formatClawHubStatsPrimary(entry)}
- {installsSub && (
-
- {installsSub}
- {entry.commentsCount != null ? ` · ${entry.commentsCount} comments` : ""}
-
- )}
- {!installsSub && entry.commentsCount != null && (
-
- {entry.commentsCount} comments
-
- )}
-
-
-
void handleInstall(entry)}
- disabled={installingSlug !== null}
- className="w-full rounded-lg border border-emerald-300/20 bg-emerald-300/10 px-2.5 py-1 font-mono text-[10px] text-emerald-300 transition hover:bg-emerald-300/20 disabled:opacity-40 md:w-auto"
- >
- {installingSlug === entry.slug ? "Installing…" : "Install"}
-
-
- Details
-
-
-
- );
- })}
- {(hasMoreSkills || loadingMore) && (
-
- {loadingMore ? "Loading more…" : "Scroll for more"}
-
- )}
-
-
-
-
-
-
-
- )}
+
- {clawRegistry === "plugins" &&
- visiblePlugins &&
- visiblePlugins.length === 0 &&
- !browseLoading && (
-
- {pluginResults?.length ? "No plugins match this tag filter." : "No matches."}
-
- )}
- {clawRegistry === "plugins" && pluginResults && (
-
- {pluginResults.length.toLocaleString()} loaded
- {hasMorePlugins ? " · scroll for more" : " · end of list"}
- {!pluginTagFilter.trim() ? (
-
- {" "}
- · ~{CLAWHUB_PLUGINS_CATALOG_ESTIMATE.toLocaleString()} in registry
-
- ) : null}
- {pluginTagFilter.trim() && visiblePlugins && pluginResults.length > 0 ? (
-
- {" "}
- · {visiblePlugins.length.toLocaleString()} match tag filter
-
- ) : null}
-
- )}
- {clawRegistry === "plugins" && visiblePlugins && visiblePlugins.length > 0 && (
-
-
-
- {visiblePlugins.map((p) => (
-
-
-
{p.displayName}
-
- {p.name}
- {p.ownerHandle ? ` · @${p.ownerHandle}` : ""}
-
-
- {p.summary}
-
- {p.capabilityTags.length > 0 && (
-
- {p.capabilityTags.join(" · ")}
-
- )}
-
-
- Open
-
-
- ))}
- {(hasMorePlugins || loadingMore) && (
-
- {loadingMore ? "Loading more…" : "Scroll for more"}
-
- )}
-
-
-
-
-
-
-
- )}
+
);
}
diff --git a/src/modules/skills/components/ClawHubBrowseParts.tsx b/src/modules/skills/components/ClawHubBrowseParts.tsx
new file mode 100644
index 0000000..ff2a188
--- /dev/null
+++ b/src/modules/skills/components/ClawHubBrowseParts.tsx
@@ -0,0 +1,285 @@
+import * as ScrollArea from "@radix-ui/react-scroll-area";
+import type { RefObject } from "react";
+import type { ClawHubPlugin, ClawHubSkill } from "../types";
+import {
+ CLAWHUB_PLUGINS_CATALOG_ESTIMATE,
+ clawHubSkillDetailUrl,
+ formatClawHubStatsInstalls,
+ formatClawHubStatsPrimary,
+ formatClawHubUpdated,
+} from "./clawHubBrowseFormat";
+
+function ClawHubSkillRow({
+ entry,
+ installingSlug,
+ onInstall,
+}: {
+ entry: ClawHubSkill;
+ installingSlug: string | null;
+ onInstall: (entry: ClawHubSkill) => void;
+}) {
+ const installsSub = formatClawHubStatsInstalls(entry);
+ return (
+
+
+
{entry.displayName}
+
+ {entry.isHighlighted === true && (
+
+ Highlighted
+
+ )}
+ {entry.isOfficial === true && (
+
+ Official
+
+ )}
+ {entry.version != null && entry.version !== "" && (
+ v{entry.version}
+ )}
+
+
{entry.slug}
+ {(entry.updatedAt != null || (entry.score != null && Number.isFinite(entry.score))) && (
+
+ {entry.updatedAt != null && <>Upd {formatClawHubUpdated(entry.updatedAt)}>}
+ {entry.updatedAt != null &&
+ entry.score != null &&
+ Number.isFinite(entry.score) &&
+ " · "}
+ {entry.score != null && Number.isFinite(entry.score) && (
+ <>score {entry.score.toFixed(2)}>
+ )}
+
+ )}
+
+
+
+ Author
+ {entry.ownerHandle ? (
+ @{entry.ownerHandle}
+ ) : (
+ —
+ )}
+
+
+ Stats
+ {formatClawHubStatsPrimary(entry)}
+ {installsSub && (
+
+ {installsSub}
+ {entry.commentsCount != null ? ` · ${entry.commentsCount} comments` : ""}
+
+ )}
+ {!installsSub && entry.commentsCount != null && (
+
+ {entry.commentsCount} comments
+
+ )}
+
+
+
void onInstall(entry)}
+ disabled={installingSlug !== null}
+ className="w-full rounded-lg border border-emerald-300/20 bg-emerald-300/10 px-2.5 py-1 font-mono text-[10px] text-emerald-300 transition hover:bg-emerald-300/20 disabled:opacity-40 md:w-auto"
+ >
+ {installingSlug === entry.slug ? "Installing…" : "Install"}
+
+
+ Details
+
+
+
+ );
+}
+
+export function ClawHubSkillsResultsSection({
+ active,
+ browseLoading,
+ results,
+ listViewportRef,
+ loadMoreRef,
+ hasMoreSkills,
+ loadingMore,
+ installingSlug,
+ onInstallSkill,
+}: {
+ active: boolean;
+ browseLoading: boolean;
+ results: ClawHubSkill[] | null;
+ listViewportRef: RefObject;
+ loadMoreRef: RefObject;
+ hasMoreSkills: boolean;
+ loadingMore: boolean;
+ installingSlug: string | null;
+ onInstallSkill: (entry: ClawHubSkill) => void;
+}) {
+ if (!active || !results) return null;
+ if (results.length === 0 && !browseLoading) {
+ return No matches.
;
+ }
+ if (results.length === 0) return null;
+ return (
+ <>
+
+ {results.length} skill{results.length === 1 ? "" : "s"} shown
+ · author & stats from ClawHub when available
+
+
+
+
+
Skill
+
Summary
+
Author
+
Stats
+
+
+
+ {results.map((entry) => (
+
+ ))}
+ {(hasMoreSkills || loadingMore) && (
+
+ {loadingMore ? "Loading more…" : "Scroll for more"}
+
+ )}
+
+
+
+
+
+
+
+ >
+ );
+}
+
+export function ClawHubPluginsResultsSection({
+ active,
+ browseLoading,
+ pluginResults,
+ visiblePlugins,
+ pluginTagFilter,
+ listViewportRef,
+ loadMoreRef,
+ hasMorePlugins,
+ loadingMore,
+}: {
+ active: boolean;
+ browseLoading: boolean;
+ pluginResults: ClawHubPlugin[] | null;
+ visiblePlugins: ClawHubPlugin[] | null;
+ pluginTagFilter: string;
+ listViewportRef: RefObject;
+ loadMoreRef: RefObject;
+ hasMorePlugins: boolean;
+ loadingMore: boolean;
+}) {
+ if (!active) return null;
+
+ const emptyFiltered =
+ visiblePlugins && visiblePlugins.length === 0 && !browseLoading && pluginResults !== null;
+
+ return (
+ <>
+ {emptyFiltered && (
+
+ {pluginResults!.length ? "No plugins match this tag filter." : "No matches."}
+
+ )}
+ {pluginResults && (
+
+ {pluginResults.length.toLocaleString()} loaded
+ {hasMorePlugins ? " · scroll for more" : " · end of list"}
+ {!pluginTagFilter.trim() ? (
+
+ {" "}
+ · ~{CLAWHUB_PLUGINS_CATALOG_ESTIMATE.toLocaleString()} in registry
+
+ ) : null}
+ {pluginTagFilter.trim() && visiblePlugins && pluginResults.length > 0 ? (
+
+ {" "}
+ · {visiblePlugins.length.toLocaleString()} match tag filter
+
+ ) : null}
+
+ )}
+ {visiblePlugins && visiblePlugins.length > 0 && (
+
+
+
+ {visiblePlugins.map((p) => (
+
+
+
{p.displayName}
+
+ {p.name}
+ {p.ownerHandle ? ` · @${p.ownerHandle}` : ""}
+
+
+ {p.summary}
+
+ {p.capabilityTags.length > 0 && (
+
+ {p.capabilityTags.join(" · ")}
+
+ )}
+
+
+ Open
+
+
+ ))}
+ {(hasMorePlugins || loadingMore) && (
+
+ {loadingMore ? "Loading more…" : "Scroll for more"}
+
+ )}
+
+
+
+
+
+
+
+ )}
+ >
+ );
+}
diff --git a/src/modules/skills/components/SkillsContextBytesSlider.tsx b/src/modules/skills/components/SkillsContextBytesSlider.tsx
new file mode 100644
index 0000000..98a0115
--- /dev/null
+++ b/src/modules/skills/components/SkillsContextBytesSlider.tsx
@@ -0,0 +1,44 @@
+import * as Slider from "@radix-ui/react-slider";
+
+type Props = {
+ min: number;
+ max: number;
+ step: number;
+ value: number;
+ disabled: boolean;
+ onValueChange: (bytes: number) => void;
+ "aria-label": string;
+};
+
+export function SkillsContextBytesSlider({
+ min,
+ max,
+ step,
+ value,
+ disabled,
+ onValueChange,
+ "aria-label": ariaLabel,
+}: Props) {
+ const clamped = Math.min(max, Math.max(min, value));
+
+ return (
+ {
+ const v = next[0];
+ if (v !== undefined) onValueChange(v);
+ }}
+ aria-label={ariaLabel}
+ >
+
+
+
+
+
+ );
+}
diff --git a/src/modules/skills/components/SkillsPanel.tsx b/src/modules/skills/components/SkillsPanel.tsx
index ffcbe49..efae56f 100644
--- a/src/modules/skills/components/SkillsPanel.tsx
+++ b/src/modules/skills/components/SkillsPanel.tsx
@@ -1,8 +1,18 @@
import * as Accordion from "@radix-ui/react-accordion";
import { useCallback, useEffect, useRef, useState } from "react";
+import { fetchUserSettings, putUserSettings } from "../../settings";
import { addSkill, deleteSkill, fetchSkills, setSkillEnabled } from "../api";
import type { Skill } from "../types";
import { ClawHubBrowse } from "./ClawHubBrowse";
+import { SkillsContextBytesSlider } from "./SkillsContextBytesSlider";
+
+function formatContextKiB(bytes: number): string {
+ const kb = bytes / 1024;
+ if (kb >= 1024 && kb % 1024 === 0) return `${kb / 1024} MiB`;
+ if (kb >= 100) return `${Math.round(kb)} KiB`;
+ if (kb >= 10) return `${kb.toFixed(1)} KiB`;
+ return `${kb.toFixed(2)} KiB`;
+}
const TEMPLATE = `---
name: my-skill
@@ -72,6 +82,16 @@ export function SkillsPanel() {
const [showBrowse, setShowBrowse] = useState(false);
const [browseKey, setBrowseKey] = useState(0);
+ const [ctxBytes, setCtxBytes] = useState(10 * 1024);
+ const [ctxSaved, setCtxSaved] = useState(10 * 1024);
+ const [ctxMin, setCtxMin] = useState(4 * 1024);
+ const [ctxMax, setCtxMax] = useState(256 * 1024);
+ const [ctxDefault, setCtxDefault] = useState(10 * 1024);
+ const [ctxLoaded, setCtxLoaded] = useState(false);
+ const [ctxSaving, setCtxSaving] = useState(false);
+ const [ctxErr, setCtxErr] = useState(null);
+ const [ctxSettingsErr, setCtxSettingsErr] = useState(null);
+
const cancelledRef = useRef(false);
const load = useCallback(async () => {
@@ -95,6 +115,47 @@ export function SkillsPanel() {
};
}, [load]);
+ useEffect(() => {
+ let gone = false;
+ (async () => {
+ const us = await fetchUserSettings(4000);
+ if (gone) return;
+ if (!us) {
+ setCtxSettingsErr(
+ "Could not load settings from Pengine (offline or server error). Using built-in defaults for the slider limits.",
+ );
+ setCtxLoaded(true);
+ return;
+ }
+ setCtxSettingsErr(null);
+ setCtxBytes(us.skills_hint_max_bytes);
+ setCtxSaved(us.skills_hint_max_bytes);
+ setCtxMin(us.skills_hint_max_bytes_min);
+ setCtxMax(us.skills_hint_max_bytes_max);
+ setCtxDefault(us.skills_hint_max_bytes_default);
+ setCtxLoaded(true);
+ })();
+ return () => {
+ gone = true;
+ };
+ }, []);
+
+ const ctxDirty = ctxLoaded && ctxBytes !== ctxSaved;
+
+ const saveContextBytes = async (bytes: number) => {
+ setCtxErr(null);
+ setCtxSaving(true);
+ const result = await putUserSettings(bytes);
+ setCtxSaving(false);
+ if (result.ok) {
+ const v = result.settings.skills_hint_max_bytes;
+ setCtxBytes(v);
+ setCtxSaved(v);
+ return;
+ }
+ setCtxErr(result.error);
+ };
+
const handleAdd = async () => {
const trimmedSlug = newSlug.trim();
const isEditing = editingSlug !== null;
@@ -189,6 +250,66 @@ export function SkillsPanel() {
+
+
+
+
+ Context
+
+
+ {ctxLoaded ? formatContextKiB(ctxBytes) : "…"}
+
+
+
+
+
+ {ctxLoaded
+ ? `${formatContextKiB(ctxMin)}–${formatContextKiB(ctxMax)} · default ${formatContextKiB(ctxDefault)}`
+ : "Loading limits…"}
+
+
+
+ void saveContextBytes(ctxBytes)}
+ className="rounded-md border border-cyan-300/25 bg-cyan-300/10 px-2 py-1 font-mono text-[10px] text-cyan-100 transition hover:bg-cyan-300/15 disabled:pointer-events-none disabled:opacity-35"
+ >
+ {ctxSaving ? "…" : "Save"}
+
+ void saveContextBytes(ctxDefault)}
+ className="rounded-md border border-white/12 bg-transparent px-2 py-1 font-mono text-[10px] text-white/45 transition hover:border-white/20 hover:text-white/65 disabled:pointer-events-none disabled:opacity-35"
+ >
+ Default
+
+
+
+ {ctxSettingsErr && (
+
+ {ctxSettingsErr}
+
+ )}
+ {ctxErr && (
+
+ {ctxErr}
+
+ )}
+
+
Custom dir: {customDir || "—"}
diff --git a/src/modules/skills/components/clawHubBrowseFormat.ts b/src/modules/skills/components/clawHubBrowseFormat.ts
new file mode 100644
index 0000000..10f700a
--- /dev/null
+++ b/src/modules/skills/components/clawHubBrowseFormat.ts
@@ -0,0 +1,38 @@
+import type { ClawHubSkill } from "../types";
+
+export const CLAWHUB_PLUGINS_CATALOG_ESTIMATE = 55_561;
+
+export function formatClawHubUpdated(ms: number): string {
+ const d = new Date(ms);
+ if (Number.isNaN(d.getTime())) return "—";
+ return d.toLocaleString(undefined, { dateStyle: "medium", timeStyle: "short" });
+}
+
+export function clawHubSkillDetailUrl(slug: string): string {
+ return `https://clawhub.ai/openclaw/${encodeURIComponent(slug)}`;
+}
+
+/** Match ClawHub list-style compact numbers (e.g. 39.1k). */
+function fmtCompact(n: number): string {
+ if (!Number.isFinite(n)) return "—";
+ if (n >= 1_000_000) return `${(n / 1_000_000).toFixed(1).replace(/\.0$/, "")}M`;
+ if (n >= 100_000) return `${Math.round(n / 1000)}k`;
+ if (n >= 1000) return `${(n / 1000).toFixed(1).replace(/\.0$/, "")}k`;
+ return String(n);
+}
+
+export function formatClawHubStatsPrimary(entry: ClawHubSkill): string {
+ const parts: string[] = [];
+ if (entry.downloads != null) parts.push(fmtCompact(entry.downloads));
+ if (entry.stars != null) parts.push(`★ ${entry.stars}`);
+ if (entry.versionCount != null) parts.push(`${entry.versionCount} v`);
+ return parts.length > 0 ? parts.join(" · ") : "—";
+}
+
+export function formatClawHubStatsInstalls(entry: ClawHubSkill): string | null {
+ if (entry.installsCurrent == null && entry.installsAllTime == null) return null;
+ const bits: string[] = [];
+ if (entry.installsCurrent != null) bits.push(`${fmtCompact(entry.installsCurrent)} cur`);
+ if (entry.installsAllTime != null) bits.push(`${fmtCompact(entry.installsAllTime)} all`);
+ return bits.join(" · ");
+}
diff --git a/src/modules/toolengine/components/ToolEngineCatalogToolCard.tsx b/src/modules/toolengine/components/ToolEngineCatalogToolCard.tsx
new file mode 100644
index 0000000..b85da7d
--- /dev/null
+++ b/src/modules/toolengine/components/ToolEngineCatalogToolCard.tsx
@@ -0,0 +1,236 @@
+import * as Accordion from "@radix-ui/react-accordion";
+import type { Dispatch, SetStateAction } from "react";
+import type { CatalogTool } from "..";
+
+type Props = {
+ tool: CatalogTool;
+ runtimeAvailable: boolean;
+ busyTool: string | null;
+ passthroughValues: Record>;
+ passthroughReplacing: Record>;
+ passthroughSavingId: string | null;
+ setPassthroughValues: Dispatch>>>;
+ setPassthroughReplacing: Dispatch>>>;
+ onSavePassthrough: (tool: CatalogTool) => void;
+ onInstall: (toolId: string) => void;
+ onUninstall: (toolId: string) => void;
+};
+
+export function ToolEngineCatalogToolCard({
+ tool,
+ runtimeAvailable,
+ busyTool,
+ passthroughValues,
+ passthroughReplacing,
+ passthroughSavingId,
+ setPassthroughValues,
+ setPassthroughReplacing,
+ onSavePassthrough,
+ onInstall,
+ onUninstall,
+}: Props) {
+ return (
+
+
+
+
+ {tool.name}
+
+
+ v{tool.version} — {tool.commands.length} command
+ {tool.commands.length === 1 ? "" : "s"}
+
+
+ {tool.description}
+
+ {tool.id === "pengine/fetch" && (
+
+ robots.txt:{" "}
+ {tool.ignore_robots_txt
+ ? "ignored for this install (opt-in)"
+ : "enforced — set catalog ignore_robots_txt or mcp_server_cmd only if you accept bypassing robots for all fetched URLs"}
+ {tool.robots_ignore_allowlist && tool.robots_ignore_allowlist.length > 0
+ ? ` · allowlist (informational): ${tool.robots_ignore_allowlist.join(", ")}`
+ : ""}
+
+ )}
+ {(tool.passthrough_env?.length ?? 0) > 0 && !tool.installed && (
+
+ Install this tool, then set {tool.passthrough_env?.join(", ")} below so the MCP server
+ can start and register all commands.
+
+ )}
+ {(tool.passthrough_env?.length ?? 0) > 0 && tool.installed && (
+
+
+ Container secrets
+
+ {tool.passthrough_configured_keys && tool.passthrough_configured_keys.length > 0 ? (
+
+ Saved: {tool.passthrough_configured_keys.join(", ")}
+
+ ) : (
+
+ Required for this tool to start. Stored locally in mcp.json (not sent to the
+ model).
+
+ )}
+
+ {(tool.passthrough_env ?? []).map((key) => {
+ const configuredKeys = tool.passthrough_configured_keys ?? [];
+ const isSaved = configuredKeys.includes(key);
+ const isReplacing = passthroughReplacing[tool.id]?.[key] === true;
+ if (isSaved && !isReplacing) {
+ return (
+
+
{key}
+
+
+ ••••••••
+
+ {
+ setPassthroughReplacing((prev) => ({
+ ...prev,
+ [tool.id]: { ...(prev[tool.id] ?? {}), [key]: true },
+ }));
+ setPassthroughValues((prev) => ({
+ ...prev,
+ [tool.id]: { ...(prev[tool.id] ?? {}), [key]: "" },
+ }));
+ }}
+ className="shrink-0 rounded border border-white/15 bg-white/5 px-2 py-0.5 font-mono text-[10px] text-white/70 transition hover:bg-white/10 disabled:opacity-40"
+ >
+ Replace…
+
+
+
+ );
+ }
+ return (
+
+
+ {key}
+ {isSaved && isReplacing ? (
+ {
+ setPassthroughReplacing((prev) => {
+ const row = { ...(prev[tool.id] ?? {}) };
+ delete row[key];
+ const next = { ...prev };
+ if (Object.keys(row).length) next[tool.id] = row;
+ else delete next[tool.id];
+ return next;
+ });
+ setPassthroughValues((prev) => ({
+ ...prev,
+ [tool.id]: { ...(prev[tool.id] ?? {}), [key]: "" },
+ }));
+ }}
+ className="font-mono text-[9px] text-white/40 underline decoration-white/20 underline-offset-2 hover:text-white/60"
+ >
+ Cancel
+
+ ) : null}
+
+
+ setPassthroughValues((prev) => ({
+ ...prev,
+ [tool.id]: { ...(prev[tool.id] ?? {}), [key]: e.target.value },
+ }))
+ }
+ placeholder={isReplacing ? "New value (empty removes key)" : undefined}
+ className="mt-0.5 w-full rounded-md border border-white/10 bg-black/25 px-2 py-1 font-mono text-[11px] text-white outline-none focus:border-emerald-300/35"
+ autoComplete="off"
+ />
+
+ );
+ })}
+
+
+ void onSavePassthrough(tool)}
+ className="rounded-lg border border-emerald-300/20 bg-emerald-300/10 px-3 py-1 font-mono text-[11px] text-emerald-300 transition hover:bg-emerald-300/20 disabled:opacity-40"
+ >
+ {passthroughSavingId === tool.id ? "Saving…" : "Save keys"}
+
+
+
+ )}
+
+
+
void (tool.installed ? onUninstall(tool.id) : onInstall(tool.id))}
+ className={`shrink-0 rounded-lg border px-3 py-1 font-mono text-[11px] transition disabled:opacity-40 ${
+ tool.installed
+ ? "border-rose-300/20 bg-transparent text-rose-300/70 hover:bg-rose-300/10 hover:text-rose-200"
+ : "border-emerald-300/20 bg-emerald-300/10 text-emerald-300 hover:bg-emerald-300/20"
+ }`}
+ >
+ {busyTool === tool.id
+ ? tool.installed
+ ? "Removing…"
+ : "Installing…"
+ : tool.installed
+ ? "Uninstall"
+ : "Install"}
+
+
+
+ {tool.commands.length > 0 && (
+
+
+
+
+
+
Container commands
+
+ {tool.commands.length} MCP tool
+ {tool.commands.length === 1 ? "" : "s"}
+
+
+
+ +
+
+
+
+
+
+ {tool.commands.map((cmd) => (
+
+ {cmd.name}
+ {cmd.description ? (
+
+ {cmd.description}
+
+ ) : null}
+
+ ))}
+
+
+
+
+ )}
+
+ );
+}
diff --git a/src/modules/toolengine/components/ToolEnginePanel.tsx b/src/modules/toolengine/components/ToolEnginePanel.tsx
index 14f003d..284db65 100644
--- a/src/modules/toolengine/components/ToolEnginePanel.tsx
+++ b/src/modules/toolengine/components/ToolEnginePanel.tsx
@@ -1,4 +1,3 @@
-import * as Accordion from "@radix-ui/react-accordion";
import { useCallback, useEffect, useRef, useState } from "react";
import { notifyMcpRegistryChanged } from "../../../shared/mcpEvents";
import { PENGINE_API_BASE } from "../../../shared/api/config";
@@ -12,6 +11,7 @@ import {
type CatalogTool,
type RuntimeStatus,
} from "..";
+import { ToolEngineCatalogToolCard } from "./ToolEngineCatalogToolCard";
export function ToolEnginePanel() {
const [runtime, setRuntime] = useState(null);
@@ -328,223 +328,20 @@ export function ToolEnginePanel() {
{catalog !== null && catalog.length > 0 && (
{catalog.map((tool) => (
-
-
-
-
- {tool.name}
-
-
- v{tool.version} — {tool.commands.length} command
- {tool.commands.length === 1 ? "" : "s"}
-
-
- {tool.description}
-
- {tool.id === "pengine/fetch" && (
-
- robots.txt:{" "}
- {tool.ignore_robots_txt
- ? "ignored for this install (opt-in)"
- : "enforced — set catalog ignore_robots_txt or mcp_server_cmd only if you accept bypassing robots for all fetched URLs"}
- {tool.robots_ignore_allowlist && tool.robots_ignore_allowlist.length > 0
- ? ` · allowlist (informational): ${tool.robots_ignore_allowlist.join(", ")}`
- : ""}
-
- )}
- {(tool.passthrough_env?.length ?? 0) > 0 && !tool.installed && (
-
- Install this tool, then set {tool.passthrough_env?.join(", ")} below so the
- MCP server can start and register all commands.
-
- )}
- {(tool.passthrough_env?.length ?? 0) > 0 && tool.installed && (
-
-
- Container secrets
-
- {tool.passthrough_configured_keys &&
- tool.passthrough_configured_keys.length > 0 ? (
-
- Saved: {tool.passthrough_configured_keys.join(", ")}
-
- ) : (
-
- Required for this tool to start. Stored locally in mcp.json (not sent to
- the model).
-
- )}
-
- {(tool.passthrough_env ?? []).map((key) => {
- const configuredKeys = tool.passthrough_configured_keys ?? [];
- const isSaved = configuredKeys.includes(key);
- const isReplacing = passthroughReplacing[tool.id]?.[key] === true;
- if (isSaved && !isReplacing) {
- return (
-
-
{key}
-
-
- ••••••••
-
- {
- setPassthroughReplacing((prev) => ({
- ...prev,
- [tool.id]: { ...(prev[tool.id] ?? {}), [key]: true },
- }));
- setPassthroughValues((prev) => ({
- ...prev,
- [tool.id]: { ...(prev[tool.id] ?? {}), [key]: "" },
- }));
- }}
- className="shrink-0 rounded border border-white/15 bg-white/5 px-2 py-0.5 font-mono text-[10px] text-white/70 transition hover:bg-white/10 disabled:opacity-40"
- >
- Replace…
-
-
-
- );
- }
- return (
-
-
- {key}
- {isSaved && isReplacing ? (
- {
- setPassthroughReplacing((prev) => {
- const row = { ...(prev[tool.id] ?? {}) };
- delete row[key];
- const next = { ...prev };
- if (Object.keys(row).length) next[tool.id] = row;
- else delete next[tool.id];
- return next;
- });
- setPassthroughValues((prev) => ({
- ...prev,
- [tool.id]: { ...(prev[tool.id] ?? {}), [key]: "" },
- }));
- }}
- className="font-mono text-[9px] text-white/40 underline decoration-white/20 underline-offset-2 hover:text-white/60"
- >
- Cancel
-
- ) : null}
-
-
- setPassthroughValues((prev) => ({
- ...prev,
- [tool.id]: { ...(prev[tool.id] ?? {}), [key]: e.target.value },
- }))
- }
- placeholder={
- isReplacing ? "New value (empty removes key)" : undefined
- }
- className="mt-0.5 w-full rounded-md border border-white/10 bg-black/25 px-2 py-1 font-mono text-[11px] text-white outline-none focus:border-emerald-300/35"
- autoComplete="off"
- />
-
- );
- })}
-
-
- void savePassthrough(tool)}
- className="rounded-lg border border-emerald-300/20 bg-emerald-300/10 px-3 py-1 font-mono text-[11px] text-emerald-300 transition hover:bg-emerald-300/20 disabled:opacity-40"
- >
- {passthroughSavingId === tool.id ? "Saving…" : "Save keys"}
-
-
-
- )}
-
-
-
- void (tool.installed ? handleUninstall(tool.id) : handleInstall(tool.id))
- }
- className={`shrink-0 rounded-lg border px-3 py-1 font-mono text-[11px] transition disabled:opacity-40 ${
- tool.installed
- ? "border-rose-300/20 bg-transparent text-rose-300/70 hover:bg-rose-300/10 hover:text-rose-200"
- : "border-emerald-300/20 bg-emerald-300/10 text-emerald-300 hover:bg-emerald-300/20"
- }`}
- >
- {busyTool === tool.id
- ? tool.installed
- ? "Removing…"
- : "Installing…"
- : tool.installed
- ? "Uninstall"
- : "Install"}
-
-
-
- {/* MCP tools exposed by the container image (collapsible, same pattern as MCP Tools) */}
- {tool.commands.length > 0 && (
-
-
-
-
-
-
- Container commands
-
-
- {tool.commands.length} MCP tool
- {tool.commands.length === 1 ? "" : "s"}
-
-
-
- +
-
-
-
-
-
- {tool.commands.map((cmd) => (
-
- {cmd.name}
- {cmd.description ? (
-
- {cmd.description}
-
- ) : null}
-
- ))}
-
-
-
-
- )}
-
+ tool={tool}
+ runtimeAvailable={runtime?.available === true}
+ busyTool={busyTool}
+ passthroughValues={passthroughValues}
+ passthroughReplacing={passthroughReplacing}
+ passthroughSavingId={passthroughSavingId}
+ setPassthroughValues={setPassthroughValues}
+ setPassthroughReplacing={setPassthroughReplacing}
+ onSavePassthrough={savePassthrough}
+ onInstall={handleInstall}
+ onUninstall={handleUninstall}
+ />
))}
)}
diff --git a/src/pages/DashboardPage.tsx b/src/pages/DashboardPage.tsx
index 540aa83..e3f18b2 100644
--- a/src/pages/DashboardPage.tsx
+++ b/src/pages/DashboardPage.tsx
@@ -31,7 +31,6 @@ export function DashboardPage() {
{ name: "Ollama", status: "checking", detail: "Checking…" },
]);
const [disconnectError, setDisconnectError] = useState(null);
-
const refreshStatus = useCallback(async () => {
let botUser = botUsername ?? "unknown";
const health = await getPengineHealth(3000);