Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 15 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -147,6 +147,20 @@ process_types = ["channel", "branch"]
"anthropic/claude-sonnet-4" = ["anthropic/claude-haiku-4.5"]
```

**Z.ai (GLM) example** — use GLM models directly with a [GLM Coding Plan](https://z.ai) subscription:

```toml
[llm]
zhipu_key = "env:ZHIPU_API_KEY"

[defaults.routing]
channel = "zhipu/glm-4.7"
worker = "zhipu/glm-4.7"

[defaults.routing.task_overrides]
coding = "zhipu/glm-4.7"
```

### Skills

Extensible skill system for domain-specific behavior:
Expand Down Expand Up @@ -297,7 +311,7 @@ Read the full vision in [docs/spacedrive.md](docs/spacedrive.md).
### Prerequisites

- **Rust** 1.85+ ([rustup](https://rustup.rs/))
- An LLM API key (OpenRouter, Anthropic, OpenAI, etc.)
- An LLM API key (OpenRouter, Anthropic, OpenAI, Z.ai, etc.)

### Build and Run

Expand Down
23 changes: 17 additions & 6 deletions src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -55,12 +55,13 @@ pub struct LlmConfig {
pub anthropic_key: Option<String>,
pub openai_key: Option<String>,
pub openrouter_key: Option<String>,
pub zhipu_key: Option<String>,
}

impl LlmConfig {
/// Check if any provider key is configured.
pub fn has_any_key(&self) -> bool {
self.anthropic_key.is_some() || self.openai_key.is_some() || self.openrouter_key.is_some()
self.anthropic_key.is_some() || self.openai_key.is_some() || self.openrouter_key.is_some() || self.zhipu_key.is_some()
}
}

Expand Down Expand Up @@ -851,6 +852,7 @@ struct TomlLlmConfig {
anthropic_key: Option<String>,
openai_key: Option<String>,
openrouter_key: Option<String>,
zhipu_key: Option<String>,
}

#[derive(Deserialize, Default)]
Expand Down Expand Up @@ -882,8 +884,7 @@ struct TomlRoutingConfig {
rate_limit_cooldown_secs: Option<u64>,
#[serde(default)]
task_overrides: HashMap<String, String>,
#[serde(default)]
fallbacks: HashMap<String, Vec<String>>,
fallbacks: Option<HashMap<String, Vec<String>>>,
}

#[derive(Deserialize)]
Expand Down Expand Up @@ -1079,8 +1080,10 @@ fn resolve_routing(toml: Option<TomlRoutingConfig>, base: &RoutingConfig) -> Rou
let mut task_overrides = base.task_overrides.clone();
task_overrides.extend(t.task_overrides);

let mut fallbacks = base.fallbacks.clone();
fallbacks.extend(t.fallbacks);
let fallbacks = match t.fallbacks {
Some(f) => f,
None => base.fallbacks.clone(),
};

RoutingConfig {
channel: t.channel.unwrap_or_else(|| base.channel.clone()),
Expand Down Expand Up @@ -1155,6 +1158,7 @@ impl Config {
anthropic_key: std::env::var("ANTHROPIC_API_KEY").ok(),
openai_key: std::env::var("OPENAI_API_KEY").ok(),
openrouter_key: std::env::var("OPENROUTER_API_KEY").ok(),
zhipu_key: std::env::var("ZHIPU_API_KEY").ok(),
};

// Note: We allow boot without provider keys now. System starts in setup mode.
Expand Down Expand Up @@ -1220,6 +1224,12 @@ impl Config {
.as_deref()
.and_then(resolve_env_value)
.or_else(|| std::env::var("OPENROUTER_API_KEY").ok()),
zhipu_key: toml
.llm
.zhipu_key
.as_deref()
.and_then(resolve_env_value)
.or_else(|| std::env::var("ZHIPU_API_KEY").ok()),
};

// Note: We allow boot without provider keys now. System starts in setup mode.
Expand Down Expand Up @@ -2010,7 +2020,7 @@ pub fn run_onboarding() -> anyhow::Result<PathBuf> {
println!();

// 1. Pick a provider
let providers = &["Anthropic", "OpenRouter", "OpenAI"];
let providers = &["Anthropic", "OpenRouter", "OpenAI", "Z.ai (GLM)"];
let provider_idx = Select::new()
.with_prompt("Which LLM provider do you want to use?")
.items(providers)
Expand All @@ -2021,6 +2031,7 @@ pub fn run_onboarding() -> anyhow::Result<PathBuf> {
0 => ("Anthropic API key", "anthropic_key"),
1 => ("OpenRouter API key", "openrouter_key"),
2 => ("OpenAI API key", "openai_key"),
3 => ("Z.ai (GLM) API key", "zhipu_key"),
_ => unreachable!(),
};

Expand Down
2 changes: 2 additions & 0 deletions src/llm/manager.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,8 @@ impl LlmManager {
.ok_or_else(|| LlmError::MissingProviderKey("openai".into()).into()),
"openrouter" => self.config.openrouter_key.clone()
.ok_or_else(|| LlmError::MissingProviderKey("openrouter".into()).into()),
"zhipu" => self.config.zhipu_key.clone()
.ok_or_else(|| LlmError::MissingProviderKey("zhipu".into()).into()),
_ => Err(LlmError::UnknownProvider(provider.into()).into()),
}
}
Expand Down
86 changes: 86 additions & 0 deletions src/llm/model.rs
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,7 @@ impl SpacebotModel {
"anthropic" => self.call_anthropic(request).await,
"openai" => self.call_openai(request).await,
"openrouter" => self.call_openrouter(request).await,
"zhipu" => self.call_zhipu(request).await,
other => Err(CompletionError::ProviderError(format!(
"unknown provider: {other}"
))),
Expand Down Expand Up @@ -510,6 +511,91 @@ impl SpacebotModel {
// OpenRouter returns OpenAI-format responses
parse_openai_response(response_body, "OpenRouter")
}

async fn call_zhipu(
&self,
request: CompletionRequest,
) -> Result<completion::CompletionResponse<RawResponse>, CompletionError> {
let api_key = self
.llm_manager
.get_api_key("zhipu")
.map_err(|e| CompletionError::ProviderError(e.to_string()))?;

let mut messages = Vec::new();

if let Some(preamble) = &request.preamble {
messages.push(serde_json::json!({
"role": "system",
"content": preamble,
}));
}

messages.extend(convert_messages_to_openai(&request.chat_history));

let mut body = serde_json::json!({
"model": self.model_name,
"messages": messages,
});

if let Some(max_tokens) = request.max_tokens {
body["max_tokens"] = serde_json::json!(max_tokens);
}

if let Some(temperature) = request.temperature {
body["temperature"] = serde_json::json!(temperature);
}

if !request.tools.is_empty() {
let tools: Vec<serde_json::Value> = request
.tools
.iter()
.map(|t| {
serde_json::json!({
"type": "function",
"function": {
"name": t.name,
"description": t.description,
"parameters": t.parameters,
}
})
})
.collect();
body["tools"] = serde_json::json!(tools);
}

let response = self
.llm_manager
.http_client()
.post("https://api.z.ai/api/paas/v4/chat/completions")
.header("authorization", format!("Bearer {api_key}"))
.header("content-type", "application/json")
.json(&body)
.send()
.await
.map_err(|e| CompletionError::ProviderError(e.to_string()))?;

let status = response.status();
let response_text = response
.text()
.await
.map_err(|e| CompletionError::ProviderError(format!("failed to read response body: {e}")))?;

let response_body: serde_json::Value = serde_json::from_str(&response_text)
.map_err(|e| CompletionError::ProviderError(format!(
"Z.ai response ({status}) is not valid JSON: {e}\nBody: {}", truncate_body(&response_text)
)))?;

if !status.is_success() {
let message = response_body["error"]["message"]
.as_str()
.unwrap_or("unknown error");
return Err(CompletionError::ProviderError(format!(
"Z.ai API error ({status}): {message}"
)));
}

parse_openai_response(response_body, "Z.ai")
}
}

// --- Helpers ---
Expand Down