Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion agent_core/core/impl/llm/interface.py
Original file line number Diff line number Diff line change
Expand Up @@ -331,7 +331,7 @@ def _generate_response_sync(
if log_response:
logger.info(f"[LLM SEND] system={system_prompt} | user={user_prompt}")

if self.provider == "openai":
if self.provider in ("openai", "minimax", "deepseek", "moonshot"):
response = self._generate_openai(system_prompt, user_prompt)
elif self.provider == "remote":
response = self._generate_ollama(system_prompt, user_prompt)
Expand Down
5 changes: 4 additions & 1 deletion agent_core/core/impl/settings/manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,10 @@
"openai": "",
"anthropic": "",
"google": "",
"byteplus": ""
"byteplus": "",
"minimax": "",
"deepseek": "",
"moonshot": ""
},
"endpoints": {
"remote_model_url": "",
Expand Down
2 changes: 1 addition & 1 deletion agent_core/core/impl/vlm/interface.py
Original file line number Diff line number Diff line change
Expand Up @@ -227,7 +227,7 @@ def describe_image_bytes(
if log_response:
logger.info(f"[LLM SEND] system={system_prompt} | user={user_prompt}")

if self.provider == "openai":
if self.provider in ("openai", "minimax", "deepseek", "moonshot"):
response = self._openai_describe_bytes(image_bytes, system_prompt, user_prompt)
elif self.provider == "remote":
response = self._ollama_describe_bytes(image_bytes, system_prompt, user_prompt)
Expand Down
37 changes: 37 additions & 0 deletions agent_core/core/models/connection_tester.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,9 @@ def test_provider_connection(
elif provider == "remote":
url = base_url or cfg.default_base_url
return _test_remote(url, timeout)
elif provider in ("minimax", "deepseek", "moonshot"):
url = cfg.default_base_url
return _test_openai_compat(provider, api_key, url, timeout)
else:
return {
"success": False,
Expand Down Expand Up @@ -348,3 +351,37 @@ def _test_remote(base_url: Optional[str], timeout: float) -> Dict[str, Any]:
"provider": "remote",
"error": f"Could not connect to {url}: {str(e)}",
}


def _test_openai_compat(
provider: str, api_key: Optional[str], base_url: str, timeout: float
) -> Dict[str, Any]:
"""Test an OpenAI-compatible API (MiniMax, DeepSeek, Moonshot)."""
names = {"minimax": "MiniMax", "deepseek": "DeepSeek", "moonshot": "Moonshot"}
display = names.get(provider, provider)

if not api_key:
return {
"success": False,
"message": f"API key is required for {display}",
"provider": provider,
"error": "Missing API key",
}

try:
with httpx.Client(timeout=timeout) as client:
response = client.get(
f"{base_url.rstrip('/')}/models",
headers={"Authorization": f"Bearer {api_key}"},
)

if response.status_code == 200:
return {"success": True, "message": f"Successfully connected to {display} API", "provider": provider}
elif response.status_code == 401:
return {"success": False, "message": "Invalid API key", "provider": provider, "error": "Authentication failed - check your API key"}
else:
return {"success": False, "message": f"API returned status {response.status_code}", "provider": provider, "error": response.text[:200] if response.text else "Unknown error"}
except httpx.TimeoutException:
return {"success": False, "message": "Connection timed out", "provider": provider, "error": "Request timed out - check your network connection"}
except httpx.RequestError as e:
return {"success": False, "message": "Network error", "provider": provider, "error": str(e)}
20 changes: 20 additions & 0 deletions agent_core/core/models/factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,9 @@ def create(
Returns:
Dictionary with provider context including client instances
"""
# OpenAI-compatible providers that use OpenAI client with a custom base_url
_OPENAI_COMPAT = {"minimax", "deepseek", "moonshot"}

if provider not in PROVIDER_CONFIG:
raise ValueError(f"Unsupported provider: {provider}")

Expand Down Expand Up @@ -144,4 +147,21 @@ def create(
"initialized": True,
}

if provider in _OPENAI_COMPAT:
if not api_key:
if deferred:
return empty_context
raise ValueError(f"API key required for {provider}")

return {
"provider": provider,
"model": model,
"client": OpenAI(api_key=api_key, base_url=resolved_base_url),
"gemini_client": None,
"remote_url": None,
"byteplus": None,
"anthropic_client": None,
"initialized": True,
}

raise RuntimeError("Unreachable")
15 changes: 15 additions & 0 deletions agent_core/core/models/model_registry.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,4 +29,19 @@
InterfaceType.VLM: "llava-v1.6",
InterfaceType.EMBEDDING: "nomic-embed-text",
},
"minimax": {
InterfaceType.LLM: "MiniMax-Text-01",
InterfaceType.VLM: None,
InterfaceType.EMBEDDING: None,
},
"deepseek": {
InterfaceType.LLM: "deepseek-chat",
InterfaceType.VLM: "deepseek-chat",
InterfaceType.EMBEDDING: None,
},
"moonshot": {
InterfaceType.LLM: "moonshot-v1-8k",
InterfaceType.VLM: None,
InterfaceType.EMBEDDING: None,
},
}
12 changes: 12 additions & 0 deletions agent_core/core/models/provider_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,4 +25,16 @@ class ProviderConfig:
base_url_env="REMOTE_MODEL_URL",
default_base_url="http://localhost:11434",
),
"minimax": ProviderConfig(
api_key_env="MINIMAX_API_KEY",
default_base_url="https://api.minimax.chat/v1",
),
"deepseek": ProviderConfig(
api_key_env="DEEPSEEK_API_KEY",
default_base_url="https://api.deepseek.com",
),
"moonshot": ProviderConfig(
api_key_env="MOONSHOT_API_KEY",
default_base_url="https://api.moonshot.cn/v1",
),
}
7 changes: 4 additions & 3 deletions app/config/settings.json
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@
"enabled": true
},
"model": {
"llm_provider": "gemini",
"vlm_provider": "gemini",
"llm_provider": "remote",
"vlm_provider": "remote",
"llm_model": null,
"vlm_model": null
},
Expand All @@ -24,7 +24,8 @@
"remote_model_url": "",
"byteplus_base_url": "https://ark.ap-southeast.bytepluses.com/api/v3",
"google_api_base": "",
"google_api_version": ""
"google_api_version": "",
"remote": "http://localhost:11434"
},
"gui": {
"enabled": true,
Expand Down
30 changes: 25 additions & 5 deletions app/onboarding/interfaces/steps.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,11 +127,9 @@ def get_default(self) -> str:


class ApiKeyStep:
"""API key input step."""
"""API key input step — or Ollama connection setup for the remote provider."""

name = "api_key"
title = "Enter API Key"
description = "Enter your API key for the selected provider."
required = True

# Maps provider to environment variable name
Expand All @@ -140,19 +138,39 @@ class ApiKeyStep:
"gemini": "GOOGLE_API_KEY",
"byteplus": "BYTEPLUS_API_KEY",
"anthropic": "ANTHROPIC_API_KEY",
"remote": None, # Ollama doesn't need API key
"remote": None, # Ollama uses a base URL, not an API key
}

def __init__(self, provider: str = "openai"):
self.provider = provider

@property
def title(self) -> str:
if self.provider == "remote":
return "Connect Ollama"
return "Enter API Key"

@property
def description(self) -> str:
if self.provider == "remote":
return (
"Connect to your local Ollama instance.\n"
"If Ollama isn't installed yet, we'll help you set it up."
)
return "Enter your API key for the selected provider."

def get_options(self) -> List[StepOption]:
# Free-form input, no options
return []

def validate(self, value: Any) -> tuple[bool, Optional[str]]:
# Remote (Ollama) doesn't need API key
if self.provider == "remote":
# Value is the Ollama base URL
if not value or not isinstance(value, str):
return True, None # Empty = use default URL
v = value.strip()
if not (v.startswith("http://") or v.startswith("https://")):
return False, "Please enter a valid URL (e.g. http://localhost:11434)"
return True, None

if not value or not isinstance(value, str):
Expand All @@ -164,6 +182,8 @@ def validate(self, value: Any) -> tuple[bool, Optional[str]]:
return True, None

def get_default(self) -> str:
if self.provider == "remote":
return "http://localhost:11434"
# Check settings.json for existing key
from app.config import get_api_key
return get_api_key(self.provider)
Expand Down
35 changes: 35 additions & 0 deletions app/tui/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,41 @@ def save_settings_to_json(provider: str, api_key: str) -> bool:
save_settings_to_env = save_settings_to_json


def save_remote_endpoint(url: str) -> bool:
"""Save the Ollama (remote) base URL to settings.json.

Args:
url: The base URL for the Ollama server (e.g. http://localhost:11434)

Returns:
True if saved successfully, False otherwise
"""
try:
settings = _load_settings()

if "model" not in settings:
settings["model"] = {}
settings["model"]["llm_provider"] = "remote"
settings["model"]["vlm_provider"] = "remote"

if "endpoints" not in settings:
settings["endpoints"] = {}
settings["endpoints"]["remote"] = url

if not _save_settings(settings):
return False

from app.config import reload_settings
reload_settings()

logger.info(f"[SETTINGS] Saved remote endpoint={url} to settings.json")
return True

except Exception as e:
logger.error(f"[SETTINGS] Failed to save remote endpoint: {e}")
return False


def get_api_key_env_name(provider: str) -> Optional[str]:
"""Get the environment variable name for a provider's API key."""
if provider not in PROVIDER_CONFIG:
Expand Down
Loading