diff --git a/AGENTS.md b/AGENTS.md
index bc69d36..619a252 100644
--- a/AGENTS.md
+++ b/AGENTS.md
@@ -145,7 +145,7 @@ Do not assume older docs mentioning only a subset are current.
- Hermes gateway log (inside container): `/root/.hermes/logs/gateway.log` — shows all received Discord events. Zero entries after startup means the bot is connected but not receiving messages (stale gateway session or missing `MESSAGE_CONTENT` intent).
- cllama context mount (`agentctx`) currently holds only `AgentsMD`, `ClawdapusMD`, and `Metadata` (for bearer token auth). No outbound service credentials, no feed manifests, no decoration config.
- cllama session history: `claw up` bind-mounts `.claw-session-history/` → `/claw/session-history` in the cllama container when cllama is enabled. cllama writes `
//history.jsonl` — one entry per successful 2xx completion. This is infrastructure-owned (proxy-written). Agents have no read API against it in Phase 1. Distinct from `/claw/memory`, which is runner-owned. Both surfaces are persistent across container restarts AND driver migrations (`CLAW_TYPE` changes).
-- Provider API keys for cllama-managed services belong in `x-claw.cllama-env`, not regular agent `environment:` blocks.
+- Provider API keys for cllama-managed services belong in `x-claw.cllama-env`, not regular agent `environment:` blocks. Native Gemini uses `GEMINI_API_KEY` as the primary env name and also accepts `GOOGLE_API_KEY` as a lower-priority alias.
- For cllama-enabled `count > 1` services, bearer tokens and context are per ordinal, not per base service.
- `compose.generated.yml` and `Dockerfile.generated` are generated artifacts. Inspect them, but do not hand-edit them as source.
- OpenClaw config and cron paths are mounted as directories, not single files, because the runtime performs atomic rewrites.
diff --git a/cllama b/cllama
index 732597c..b56deba 160000
--- a/cllama
+++ b/cllama
@@ -1 +1 @@
-Subproject commit 732597c513155c96dbe444f91e8f07e5a4d14ba5
+Subproject commit b56deba663133d746222ca023aa595b633972f7e
diff --git a/cmd/claw/compose_up.go b/cmd/claw/compose_up.go
index 1f924cb..b90109c 100644
--- a/cmd/claw/compose_up.go
+++ b/cmd/claw/compose_up.go
@@ -2705,6 +2705,9 @@ var seedKeyDefs = []seedKeyDef{
{"ANTHROPIC_API_KEY_1", "anthropic", "seed:ANTHROPIC_API_KEY_1", "backup-1"},
{"OPENROUTER_API_KEY", "openrouter", "seed:OPENROUTER_API_KEY", "primary"},
{"OPENROUTER_API_KEY_1", "openrouter", "seed:OPENROUTER_API_KEY_1", "backup-1"},
+ {"GEMINI_API_KEY", "google", "seed:GEMINI_API_KEY", "primary"},
+ {"GEMINI_API_KEY_1", "google", "seed:GEMINI_API_KEY_1", "backup-1"},
+ {"GOOGLE_API_KEY", "google", "seed:GOOGLE_API_KEY", "backup-2"},
}
// v2ProviderFile is the providers.json v2 on-disk shape (write path only).
@@ -2740,6 +2743,7 @@ var defaultBaseURLs = map[string]string{
"xai": "https://api.x.ai/v1",
"anthropic": "https://api.anthropic.com/v1",
"openrouter": "https://openrouter.ai/api/v1",
+ "google": "https://generativelanguage.googleapis.com/v1beta/openai",
}
var defaultAuths = map[string]string{
@@ -2790,6 +2794,7 @@ func mergeProviderSeeds(authDir string, p *pod.Pod) error {
"XAI_BASE_URL": "xai",
"ANTHROPIC_BASE_URL": "anthropic",
"OPENROUTER_BASE_URL": "openrouter",
+ "GOOGLE_BASE_URL": "google",
}
customBaseURLs := make(map[string]string)
for envKey, prov := range baseURLEnvMap {
@@ -3004,8 +3009,10 @@ func loadOrGenerateUIToken(authDir string) (string, error) {
func isProviderKey(key string) bool {
switch key {
case "OPENAI_API_KEY", "OPENAI_API_KEY_1", "OPENAI_API_KEY_2",
+ "XAI_API_KEY", "XAI_API_KEY_1",
"ANTHROPIC_API_KEY", "ANTHROPIC_API_KEY_1",
- "OPENROUTER_API_KEY", "OPENROUTER_API_KEY_1":
+ "OPENROUTER_API_KEY", "OPENROUTER_API_KEY_1",
+ "GEMINI_API_KEY", "GEMINI_API_KEY_1", "GOOGLE_API_KEY":
return true
}
return strings.HasPrefix(key, "PROVIDER_API_KEY")
diff --git a/cmd/claw/compose_up_test.go b/cmd/claw/compose_up_test.go
index 25506c9..303182c 100644
--- a/cmd/claw/compose_up_test.go
+++ b/cmd/claw/compose_up_test.go
@@ -2143,7 +2143,10 @@ func TestCollectProxyTypes(t *testing.T) {
func TestStripLLMKeys(t *testing.T) {
env := map[string]string{
"OPENAI_API_KEY": "sk-real",
+ "XAI_API_KEY": "sk-xai",
"ANTHROPIC_API_KEY": "sk-ant",
+ "GEMINI_API_KEY": "sk-gemini",
+ "GOOGLE_API_KEY": "sk-google",
"DISCORD_BOT_TOKEN": "keep",
"LOG_LEVEL": "info",
}
@@ -2151,9 +2154,18 @@ func TestStripLLMKeys(t *testing.T) {
if _, ok := env["OPENAI_API_KEY"]; ok {
t.Error("should strip OPENAI_API_KEY")
}
+ if _, ok := env["XAI_API_KEY"]; ok {
+ t.Error("should strip XAI_API_KEY")
+ }
if _, ok := env["ANTHROPIC_API_KEY"]; ok {
t.Error("should strip ANTHROPIC_API_KEY")
}
+ if _, ok := env["GEMINI_API_KEY"]; ok {
+ t.Error("should strip GEMINI_API_KEY")
+ }
+ if _, ok := env["GOOGLE_API_KEY"]; ok {
+ t.Error("should strip GOOGLE_API_KEY")
+ }
if env["DISCORD_BOT_TOKEN"] != "keep" {
t.Error("should keep non-LLM keys")
}
@@ -2167,10 +2179,15 @@ func TestIsProviderKey(t *testing.T) {
{"OPENAI_API_KEY", true},
{"OPENAI_API_KEY_1", true},
{"OPENAI_API_KEY_2", true},
+ {"XAI_API_KEY", true},
+ {"XAI_API_KEY_1", true},
{"ANTHROPIC_API_KEY", true},
{"ANTHROPIC_API_KEY_1", true},
{"OPENROUTER_API_KEY", true},
{"OPENROUTER_API_KEY_1", true},
+ {"GEMINI_API_KEY", true},
+ {"GEMINI_API_KEY_1", true},
+ {"GOOGLE_API_KEY", true},
{"PROVIDER_API_KEY_CUSTOM", true},
{"DISCORD_BOT_TOKEN", false},
{"LOG_LEVEL", false},
@@ -2295,6 +2312,116 @@ func TestMergeProviderSeedsWritesXAIProvider(t *testing.T) {
}
}
+func TestMergeProviderSeedsWritesGoogleProvider(t *testing.T) {
+ dir := t.TempDir()
+ p := &pod.Pod{
+ Services: map[string]*pod.Service{
+ "analyst": {
+ Claw: &pod.ClawBlock{
+ CllamaEnv: map[string]string{
+ "GEMINI_API_KEY": "gemini-primary",
+ "GOOGLE_API_KEY": "google-alias",
+ "GOOGLE_BASE_URL": "https://proxy.example.test/google",
+ },
+ },
+ },
+ },
+ }
+ if err := mergeProviderSeeds(dir, p); err != nil {
+ t.Fatalf("mergeProviderSeeds: %v", err)
+ }
+
+ data, err := os.ReadFile(filepath.Join(dir, "providers.json"))
+ if err != nil {
+ t.Fatalf("read providers.json: %v", err)
+ }
+
+ var probe struct {
+ Providers map[string]struct {
+ BaseURL string `json:"base_url"`
+ ActiveKeyID string `json:"active_key_id"`
+ Keys []struct {
+ ID string `json:"id"`
+ Secret string `json:"secret"`
+ } `json:"keys"`
+ } `json:"providers"`
+ }
+ if err := json.Unmarshal(data, &probe); err != nil {
+ t.Fatalf("parse providers.json: %v", err)
+ }
+
+ google, ok := probe.Providers["google"]
+ if !ok {
+ t.Fatal("google missing from output")
+ }
+ if google.BaseURL != "https://proxy.example.test/google" {
+ t.Fatalf("expected google base URL override, got %q", google.BaseURL)
+ }
+ if google.ActiveKeyID != "seed:GEMINI_API_KEY" {
+ t.Fatalf("expected google active key to prefer GEMINI_API_KEY, got %q", google.ActiveKeyID)
+ }
+ if len(google.Keys) != 2 {
+ t.Fatalf("expected 2 google keys, got %d", len(google.Keys))
+ }
+ if google.Keys[0].ID != "seed:GEMINI_API_KEY" || google.Keys[0].Secret != "gemini-primary" {
+ t.Fatalf("unexpected primary google key: %+v", google.Keys[0])
+ }
+ if google.Keys[1].ID != "seed:GOOGLE_API_KEY" || google.Keys[1].Secret != "google-alias" {
+ t.Fatalf("unexpected alias google key: %+v", google.Keys[1])
+ }
+}
+
+func TestMergeProviderSeedsUsesGoogleAliasWhenGeminiMissing(t *testing.T) {
+ dir := t.TempDir()
+ p := &pod.Pod{
+ Services: map[string]*pod.Service{
+ "analyst": {
+ Claw: &pod.ClawBlock{
+ CllamaEnv: map[string]string{
+ "GOOGLE_API_KEY": "google-alias",
+ },
+ },
+ },
+ },
+ }
+ if err := mergeProviderSeeds(dir, p); err != nil {
+ t.Fatalf("mergeProviderSeeds: %v", err)
+ }
+
+ data, err := os.ReadFile(filepath.Join(dir, "providers.json"))
+ if err != nil {
+ t.Fatalf("read providers.json: %v", err)
+ }
+
+ var probe struct {
+ Providers map[string]struct {
+ BaseURL string `json:"base_url"`
+ ActiveKeyID string `json:"active_key_id"`
+ Keys []struct {
+ ID string `json:"id"`
+ Secret string `json:"secret"`
+ } `json:"keys"`
+ } `json:"providers"`
+ }
+ if err := json.Unmarshal(data, &probe); err != nil {
+ t.Fatalf("parse providers.json: %v", err)
+ }
+
+ google, ok := probe.Providers["google"]
+ if !ok {
+ t.Fatal("google missing from output")
+ }
+ if google.BaseURL != "https://generativelanguage.googleapis.com/v1beta/openai" {
+ t.Fatalf("expected default google base URL, got %q", google.BaseURL)
+ }
+ if google.ActiveKeyID != "seed:GOOGLE_API_KEY" {
+ t.Fatalf("expected GOOGLE_API_KEY alias to become active key, got %q", google.ActiveKeyID)
+ }
+ if len(google.Keys) != 1 || google.Keys[0].ID != "seed:GOOGLE_API_KEY" || google.Keys[0].Secret != "google-alias" {
+ t.Fatalf("unexpected google alias seed output: %+v", google.Keys)
+ }
+}
+
func TestMergeProviderSeedsPreservesExistingRuntimeKeys(t *testing.T) {
dir := t.TempDir()
diff --git a/docs/plans/2026-04-06-gemini-provider-support.md b/docs/plans/2026-04-06-gemini-provider-support.md
new file mode 100644
index 0000000..920f3b5
--- /dev/null
+++ b/docs/plans/2026-04-06-gemini-provider-support.md
@@ -0,0 +1,160 @@
+# First-Class Google Gemini Provider Support
+
+**Date:** 2026-04-06
+**Status:** Draft
+**Issue:** #119
+**Execution plan:** `docs/plans/2026-04-08-119-gemini-provider-support.md`
+**Scope:** cllama provider registry, compose_up env seeding, cost tracking
+
+## Problem
+
+Clawdapus has no native `google` provider. Gemini models work today only through OpenRouter (`openrouter/google/gemini-2.5-flash`). Operators who want direct Gemini API access — for cost separation, latency, or independence from OpenRouter — have no supported path.
+
+Three code locations need the provider added:
+
+1. **cllama provider registry** (`cllama/internal/provider/provider.go`) — `knownProviders`, `envKeyMap`, `envBaseURLMap`, `defaultAuth`, `defaultAPIFormat`, `LoadFromEnv`
+2. **compose_up env seeding** (`cmd/claw/compose_up.go`) — `seedKeyDefs`, `isProviderKey`, base URL map
+3. **cllama cost tracking** (`cllama/internal/cost/pricing.go`) — already has OpenRouter Google pricing, needs direct `google` provider entries
+
+## Google Gemini API Compatibility
+
+Google provides an OpenAI-compatible endpoint at `https://generativelanguage.googleapis.com/v1beta/openai/`. This means:
+- Auth: `bearer` (standard `Authorization: Bearer ` header)
+- API format: `openai` (standard chat completions)
+- Model refs: `google/gemini-2.5-flash`, `google/gemini-2.5-pro`, etc.
+
+The cllama proxy's `splitModel` function already handles the `provider/model` split correctly. A request for `google/gemini-2.5-flash` would split to provider=`google`, upstream model=`gemini-2.5-flash`, and route to the Google base URL.
+
+## Implementation Steps
+
+### Step 1: cllama Provider Registry
+
+**File:** `cllama/internal/provider/provider.go`
+
+Add `google` to `knownProviders`:
+```go
+var knownProviders = map[string]string{
+ "openai": "https://api.openai.com/v1",
+ "xai": "https://api.x.ai/v1",
+ "anthropic": "https://api.anthropic.com/v1",
+ "openrouter": "https://openrouter.ai/api/v1",
+ "ollama": "http://ollama:11434/v1",
+ "google": "https://generativelanguage.googleapis.com/v1beta/openai",
+}
+```
+
+Add env key mappings to `envKeyMap`:
+```go
+"GEMINI_API_KEY": "google",
+"GEMINI_API_KEY_1": "google",
+"GOOGLE_API_KEY": "google",
+```
+
+Add base URL mapping to `envBaseURLMap`:
+```go
+"GOOGLE_BASE_URL": "google",
+```
+
+Add Google to `LoadFromEnv` key definitions:
+```go
+"google": {
+ {"GEMINI_API_KEY", "seed:GEMINI_API_KEY", "primary"},
+ {"GEMINI_API_KEY_1", "seed:GEMINI_API_KEY_1", "backup-1"},
+ {"GOOGLE_API_KEY", "seed:GOOGLE_API_KEY", "backup-2"},
+},
+```
+
+`GEMINI_API_KEY` takes priority over `GOOGLE_API_KEY` because it's more specific. `GOOGLE_API_KEY` is accepted as an alias since some tooling uses that name.
+
+Auth and API format defaults: `google` uses `bearer` auth and `openai` format — both are the defaults in `defaultAuth` and `defaultAPIFormat`, so no changes needed there.
+
+### Step 2: compose_up Env Seeding
+
+**File:** `cmd/claw/compose_up.go`
+
+Add to `seedKeyDefs`:
+```go
+{"GEMINI_API_KEY", "google", "seed:GEMINI_API_KEY", "primary"},
+{"GEMINI_API_KEY_1", "google", "seed:GEMINI_API_KEY_1", "backup-1"},
+{"GOOGLE_API_KEY", "google", "seed:GOOGLE_API_KEY", "backup-2"},
+```
+
+Add to `isProviderKey`:
+```go
+case "OPENAI_API_KEY", "OPENAI_API_KEY_1", "OPENAI_API_KEY_2",
+ "ANTHROPIC_API_KEY", "ANTHROPIC_API_KEY_1",
+ "OPENROUTER_API_KEY", "OPENROUTER_API_KEY_1",
+ "GEMINI_API_KEY", "GEMINI_API_KEY_1", "GOOGLE_API_KEY":
+ return true
+```
+
+Add to the base URL env map (around line 2748):
+```go
+"GOOGLE_BASE_URL": "google",
+```
+
+### Step 3: Cost Tracking
+
+**File:** `cllama/internal/cost/pricing.go`
+
+Add direct `google` provider pricing alongside the existing OpenRouter Google entries:
+
+```go
+"google": {
+ "gemini-2.5-pro": {InputPerMTok: 1.25, OutputPerMTok: 10.0},
+ "gemini-2.5-flash": {InputPerMTok: 0.15, OutputPerMTok: 0.60},
+},
+```
+
+These match the OpenRouter pass-through rates already in-tree (lines 72-73).
+
+### Step 4: Tests
+
+**File:** `cllama/internal/provider/provider_test.go`
+
+- Test `LoadFromEnv` picks up `GEMINI_API_KEY` and registers a `google` provider with correct base URL
+- Test `GOOGLE_API_KEY` fallback when `GEMINI_API_KEY` is absent
+- Test that `google` provider has `bearer` auth and `openai` api_format
+- Test `Get("google")` returns correct provider after env loading
+
+**File:** `cmd/claw/compose_up_test.go`
+
+- Test `isProviderKey` returns true for `GEMINI_API_KEY`, `GEMINI_API_KEY_1`, `GOOGLE_API_KEY`
+- Test that `seedKeyDefs` includes google entries (if tested directly)
+
+**File:** `cllama/internal/cost/pricing_test.go` (if exists)
+
+- Test direct `google/gemini-2.5-flash` pricing lookup returns expected rates
+
+### Step 5: Documentation
+
+- Update `site/guide/cllama.md` to list `google` as a supported provider
+- Add example showing direct Gemini configuration:
+
+```yaml
+x-claw:
+ cllama-defaults:
+ env:
+ GEMINI_API_KEY: "${GEMINI_API_KEY}"
+
+services:
+ analyst:
+ x-claw:
+ models:
+ primary: google/gemini-2.5-flash
+```
+
+- Update `AGENTS.md` gotchas to note that both `GEMINI_API_KEY` and `GOOGLE_API_KEY` are recognized
+
+## What This Does NOT Change
+
+- Proxy handler (`cllama/internal/proxy/handler.go`) — `splitModel` already handles `google/model` correctly
+- Model policy (`cllama/internal/proxy/modelpolicy.go`) — provider-agnostic, works with any provider prefix
+- Driver configs — all drivers use `shared.CollectProviders(rc.Models)` which extracts the provider from model refs; `google` will be collected automatically
+- OpenRouter routing — `openrouter/google/gemini-*` continues to work as before (routed to OpenRouter, not to Google directly)
+
+## Risks
+
+- **Gemini API compatibility**: Google's OpenAI-compatible endpoint is in `v1beta`. If the endpoint path changes, only `knownProviders["google"]` needs updating. The `GOOGLE_BASE_URL` env override provides an escape hatch.
+- **Key naming collision**: `GOOGLE_API_KEY` is a common env var name used by other Google services (Maps, Cloud, etc.). Making `GEMINI_API_KEY` the primary and `GOOGLE_API_KEY` a lower-priority alias mitigates accidental key leakage into the wrong service.
+- **cllama submodule boundary**: Steps 1, 3, and 4 (provider tests) touch the cllama submodule. This requires a commit inside `cllama/`, then updating the submodule pointer in the main repo.
diff --git a/docs/plans/2026-04-08-119-gemini-provider-support.md b/docs/plans/2026-04-08-119-gemini-provider-support.md
new file mode 100644
index 0000000..a37ec83
--- /dev/null
+++ b/docs/plans/2026-04-08-119-gemini-provider-support.md
@@ -0,0 +1,245 @@
+# Issue #119: First-Class Google Gemini Provider Support — Implementation Plan
+
+**Date:** 2026-04-08
+**Status:** Draft
+**Issue:** #119
+**Related design note:** `docs/plans/2026-04-06-gemini-provider-support.md`
+**Scope:** cllama provider registry, `claw up` provider seeding, pricing coverage, docs, release pinning
+
+## Goal
+
+Let operators declare direct Gemini model refs such as `google/gemini-2.5-flash`
+and seed native Google credentials through `x-claw.cllama-env`, without routing
+through OpenRouter.
+
+This is an implementation plan for the design in
+`2026-04-06-gemini-provider-support.md`. The earlier doc remains the design/why
+document; this one is the execution/how document.
+
+## Contract Decisions
+
+- `GEMINI_API_KEY` is the primary env var for native Gemini routing.
+- `GOOGLE_API_KEY` is accepted as a lower-priority alias.
+- `GOOGLE_BASE_URL` is accepted as an override for the default Google endpoint.
+- Direct Gemini uses Google’s OpenAI-compatible endpoint:
+ `https://generativelanguage.googleapis.com/v1beta/openai`
+- The provider name is `google`, so model refs are `google/`.
+- Scope is compile-time/provider-seed support only. No runtime policy or
+ `model-restrict` work belongs in this issue.
+
+## Why The Two Drafts Needed Reconciliation
+
+The 2026-04-06 draft had the right feature shape and release concerns, but it
+was too high-level to execute directly.
+
+Claude’s 2026-04-08 draft was stronger on test-first sequencing and caught one
+real adjacent bug in `cmd/claw/compose_up.go`: `isProviderKey()` still omits
+`XAI_API_KEY` even though xAI is already seeded through `seedKeyDefs`. That
+means `stripLLMKeys()` can leak xAI keys into agent env. That fix belongs in
+the same patch because this issue touches the same provider-key hygiene surface.
+
+Claude’s draft was still missing two things the original design had right:
+
+- the canonical doc target is `AGENTS.md`, not `CLAUDE.md`
+- the work is not actually operator-shippable until a Gemini-capable cllama
+ image is published and the main repo pin is updated
+
+## Execution Plan
+
+### 1. Preflight
+
+- [ ] Confirm current code still matches the plan surfaces:
+ - `cllama/internal/provider/provider.go`
+ - `cllama/internal/cost/pricing.go`
+ - `cmd/claw/compose_up.go`
+ - `internal/driver/shared/model.go`
+- [ ] Verify `#119` is in `In Progress` on the project board before code work.
+- [ ] Check whether the `cllama/` submodule worktree is already dirty before
+ editing so we do not trample unrelated changes.
+
+### 2. cllama: add `google` provider support
+
+**Files**
+
+- `cllama/internal/provider/provider.go`
+- `cllama/internal/provider/provider_test.go`
+
+**Implementation**
+
+- [ ] Add `google` to `knownProviders` with the default base URL
+ `https://generativelanguage.googleapis.com/v1beta/openai`.
+- [ ] Extend `envKeyMap` with:
+ - `GEMINI_API_KEY`
+ - `GEMINI_API_KEY_1`
+ - `GOOGLE_API_KEY`
+- [ ] Extend `envBaseURLMap` with `GOOGLE_BASE_URL`.
+- [ ] Extend `LoadFromEnv()` seed definitions so `google` keys are loaded in
+ this order:
+ 1. `GEMINI_API_KEY`
+ 2. `GEMINI_API_KEY_1`
+ 3. `GOOGLE_API_KEY`
+- [ ] Preserve existing default auth/API behavior unless live code inspection
+ shows Google needs a provider-specific override. The expected outcome is still
+ `auth=bearer` and `api_format=openai`.
+
+**Tests**
+
+- [ ] `GEMINI_API_KEY` seeds `google`
+- [ ] `GOOGLE_API_KEY` works as fallback
+- [ ] `GEMINI_API_KEY` wins when both are set
+- [ ] `GOOGLE_BASE_URL` overrides the default base URL
+
+**Verification**
+
+- [ ] `cd cllama && go test ./internal/provider`
+
+### 3. cllama: add direct Google pricing coverage
+
+**Files**
+
+- `cllama/internal/cost/pricing.go`
+- `cllama/internal/cost/pricing_test.go`
+
+**Implementation**
+
+- [ ] Add direct `google` pricing entries for the Gemini models we already
+ support via OpenRouter pricing rows.
+- [ ] Verify the rates against the current source of truth at implementation
+ time instead of blindly copying stale numbers.
+
+**Tests**
+
+- [ ] Add direct `google/gemini-*` lookup coverage in `pricing_test.go`.
+
+**Verification**
+
+- [ ] `cd cllama && go test ./internal/cost`
+- [ ] `cd cllama && go test ./...`
+- [ ] `cd cllama && go vet ./...`
+
+**Commit boundary**
+
+- [ ] Commit the full cllama feature as one coherent submodule change, not as
+ multiple unrelated submodule commits.
+
+### 4. Main repo: compile Google seeds into `providers.json`
+
+**Files**
+
+- `cmd/claw/compose_up.go`
+- `cmd/claw/compose_up_test.go`
+
+**Implementation**
+
+- [ ] Extend `seedKeyDefs` with `google` entries:
+ - `GEMINI_API_KEY`
+ - `GEMINI_API_KEY_1`
+ - `GOOGLE_API_KEY`
+- [ ] Extend `defaultBaseURLs` with `google`.
+- [ ] Extend the local `baseURLEnvMap` in `mergeProviderSeeds()` with
+ `GOOGLE_BASE_URL`.
+- [ ] Extend `isProviderKey()` for the three Google key vars.
+- [ ] Also add the missing `XAI_API_KEY` and `XAI_API_KEY_1` cases to
+ `isProviderKey()` so `stripLLMKeys()` stops leaking xAI keys into agent env.
+
+**Tests**
+
+- [ ] Add/extend tests that prove `mergeProviderSeeds()` writes a `google`
+ provider into `providers.json`.
+- [ ] Extend `TestIsProviderKey` coverage for:
+ - `GEMINI_API_KEY`
+ - `GEMINI_API_KEY_1`
+ - `GOOGLE_API_KEY`
+ - `XAI_API_KEY`
+ - `XAI_API_KEY_1`
+- [ ] Make sure the provider-key stripping tests still pass after the xAI fix.
+
+**Verification**
+
+- [ ] `go test ./cmd/claw/... -run 'TestMergeProviderSeeds|TestIsProviderKey|TestStripLLMKeys'`
+- [ ] `go test ./cmd/claw/...`
+
+### 5. Docs and operator guidance
+
+**Files**
+
+- `site/guide/cllama.md`
+- `AGENTS.md`
+- `site/changelog.md`
+- optionally `cllama/README.md` if standalone proxy docs need parity
+
+**Implementation**
+
+- [ ] Update the cllama guide to list native Google support and show a direct
+ `google/gemini-*` example using `GEMINI_API_KEY`.
+- [ ] Document that `GOOGLE_API_KEY` is a lower-priority alias.
+- [ ] Update `AGENTS.md` with the current provider-seeding behavior so future
+ agents do not rediscover this by reading code.
+- [ ] Add a changelog entry before merge, since this is a user-visible feature
+ on `master`.
+
+### 6. Main repo integration and release pinning
+
+This is the missing step from Claude’s draft. Without it, the issue can merge as
+source code but still not be operator-usable through the four-verb release path.
+
+**Files / surfaces**
+
+- the `cllama/` submodule pointer in the main repo
+- `cmd/claw/image_lifecycle.go`
+- cllama image publication workflow / tag process
+
+**Implementation**
+
+- [ ] Update the root repo to the committed Gemini-capable `cllama/` SHA.
+- [ ] Publish a new versioned `ghcr.io/mostlydev/cllama:` image from that
+ submodule commit.
+- [ ] Bump the pinned cllama tag in `cmd/claw/image_lifecycle.go`.
+- [ ] If the issue is considered release-worthy immediately, make sure the next
+ release path uses the new pinned ref rather than the previous cllama image.
+
+**Why this is required**
+
+ADR-022 moved operators to pinned infra refs. If the published cllama image tag
+does not contain the Gemini support, `claw pull` and released `claw` binaries
+will not actually deliver the feature.
+
+### 7. End-to-end verification
+
+- [ ] `go test ./...`
+- [ ] `go vet ./...`
+- [ ] Verify generated `providers.json` includes a `google` provider when a pod
+ supplies `GEMINI_API_KEY` in `x-claw.cllama-env`.
+- [ ] If a real Gemini key is available, run one smoke path with a pod that
+ declares `models.primary: google/gemini-...` and confirm the request routes
+ without OpenRouter.
+- [ ] Move `#119` to `In review` only after code, docs, verification, submodule
+ pointer update, and published-image pinning are all complete.
+
+## Out Of Scope
+
+- runtime `model-restrict` or any live model retargeting
+- changes to `claw inspect`
+- OpenRouter behavior changes
+- driver-specific model handling refactors
+- prompt/policy changes in `cllama/internal/proxy/handler.go`
+
+## Acceptance Criteria
+
+- `google/` refs are valid end-to-end through cllama and `claw up`
+- `GEMINI_API_KEY` works as the primary native Gemini credential
+- `GOOGLE_API_KEY` works as a lower-priority alias
+- `GOOGLE_BASE_URL` can override the default endpoint
+- direct `google` pricing lookups work in cllama cost tracking
+- `providers.json` generation in `claw up` emits `google` correctly
+- provider-key stripping no longer leaks xAI keys
+- docs reflect native Gemini support
+- a published cllama image and updated main-repo pin make the feature usable by
+ released Clawdapus binaries
+
+## Notes
+
+- `internal/driver/shared/model.go` is already generic enough for `google`
+ provider refs; no driver refactor should be necessary.
+- The `cllama` proxy routing path is already provider-prefix based. This issue
+ is primarily about registry seeding, base URL/defaults, and release wiring.
diff --git a/internal/infraimages/release_manifest.go b/internal/infraimages/release_manifest.go
index 04f5ac8..94df547 100644
--- a/internal/infraimages/release_manifest.go
+++ b/internal/infraimages/release_manifest.go
@@ -7,7 +7,7 @@ const (
DefaultClawAPITag = DefaultClawInfraTag
DefaultClawdashTag = DefaultClawInfraTag
DefaultClawWallTag = DefaultClawInfraTag
- DefaultCllamaTag = "v0.3.2"
+ DefaultCllamaTag = "v0.3.3"
DefaultHermesBaseTag = "v2026.3.17"
)
diff --git a/site/changelog.md b/site/changelog.md
index a30edba..2572751 100644
--- a/site/changelog.md
+++ b/site/changelog.md
@@ -29,7 +29,7 @@ outline: deep
## Unreleased
-
+- **Native Gemini provider support** ([#119](https://github.com/mostlydev/clawdapus/issues/119)) — cllama now supports direct `google/` routing through Google's OpenAI-compatible endpoint. `GEMINI_API_KEY` is the primary seed env var, `GOOGLE_API_KEY` is accepted as a lower-priority alias, `claw up` compiles Google keys into `providers.json`, and the provider-key strip path now also removes xAI/Gemini secrets from agent envs.
## v0.8.0 {#v0-8-0}
diff --git a/site/guide/cllama.md b/site/guide/cllama.md
index 9055872..c8f4c6a 100644
--- a/site/guide/cllama.md
+++ b/site/guide/cllama.md
@@ -20,7 +20,7 @@ A single proxy instance serves the entire pod. Bearer tokens resolve which agent
Isolation is achieved by strictly separating secrets:
-- **The proxy holds the real API keys.** Provider credentials (OpenRouter, Anthropic, OpenAI) are configured in the pod-level `cllama-defaults.env` block and never enter agent containers.
+- **The proxy holds the real API keys.** Provider credentials (OpenRouter, Anthropic, OpenAI, Gemini/Google) are configured in the pod-level `cllama-defaults.env` block and never enter agent containers.
- **Agents get unique bearer tokens.** Each agent (and each ordinal of a scaled agent) receives a unique token generated during `claw up`.
- **No credentials, no bypass.** Because agents lack the credentials to call providers directly, all successful inference *must* pass through the proxy -- even if a malicious prompt tricks the agent into ignoring its configured base URL.
@@ -173,7 +173,7 @@ The cllama container receives its configuration through environment variables in
|---|---|
| `CLAW_POD` | The name of the pod (e.g., `crypto-ops`). |
| `CLAW_CONTEXT_ROOT` | Path to the shared context mount root (defaults to `/claw/context`). |
-| `PROVIDER_API_KEY_*` | Real provider API keys -- `OPENAI_API_KEY`, `ANTHROPIC_API_KEY`, `OPENROUTER_API_KEY`, etc. |
+| `PROVIDER_API_KEY_*` | Real provider API keys -- `OPENAI_API_KEY`, `ANTHROPIC_API_KEY`, `OPENROUTER_API_KEY`, `GEMINI_API_KEY` / `GOOGLE_API_KEY`, etc. |
### Where Provider Keys Go
@@ -187,8 +187,14 @@ x-claw:
env:
OPENROUTER_API_KEY: "${OPENROUTER_API_KEY}"
ANTHROPIC_API_KEY: "${ANTHROPIC_API_KEY}"
+ GEMINI_API_KEY: "${GEMINI_API_KEY}"
```
+For native Gemini routing, declare models as `google/`. `GEMINI_API_KEY`
+is the primary env name; `GOOGLE_API_KEY` is accepted as a lower-priority
+alias. `GOOGLE_BASE_URL` can override the default OpenAI-compatible Google
+endpoint when needed.
+
::: warning cllama-env, Not environment
Provider API keys belong in `x-claw.cllama-defaults.env` (or service-level `x-claw.cllama-env`), never in the service's compose `environment:` block. Putting real keys in `environment:` defeats credential starvation -- the agent container would have direct provider access.
:::
@@ -234,6 +240,28 @@ services:
cllama-env: *cllama-keys
```
+### Native Gemini Routing
+
+Direct Gemini works through Google's OpenAI-compatible endpoint. Use the
+`google/` provider prefix and seed the key through `x-claw.cllama-env`.
+
+```yaml
+services:
+ analyst:
+ x-claw:
+ agent: analyst
+ cllama: passthrough
+ models:
+ primary: google/gemini-2.5-flash
+ cllama-env:
+ GEMINI_API_KEY: ${GEMINI_API_KEY}
+ # optional override for proxies or alternate endpoints
+ GOOGLE_BASE_URL: ${GOOGLE_BASE_URL}
+```
+
+If both `GEMINI_API_KEY` and `GOOGLE_API_KEY` are present, cllama prefers
+`GEMINI_API_KEY` as the active seed key.
+
### Count Expansion with cllama
When a service declares both `cllama` and `count > 1`, each ordinal gets its own bearer token and context directory. The proxy authenticates each ordinal independently: