Skip to content
Closed
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
115 changes: 115 additions & 0 deletions cortex/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -180,6 +180,114 @@ def demo(self):
"""
return run_demo()

def config(self):
from cortex.hardware_detection import detect_hardware

print("\n🧠 CORTEX INTERACTIVE SETUP")
print("=" * 32)

# 1️⃣ Detect hardware first
print("\n🔍 Detecting hardware...")
hw = detect_hardware()

gpu_info = None
if getattr(hw, "gpu", None):
print(f"✔ GPU detected: {hw.gpu}")
gpu_info = str(hw.gpu)
has_gpu = True
else:
print("⚠️ No GPU detected (CPU mode)")
has_gpu = False

cpu_model = getattr(hw.cpu, "model", None) if hw.cpu else None
if cpu_model:
print(f"✔ CPU: {cpu_model}")
else:
print("✔ CPU detected")

# RAM (safe detection, same logic as demo)
if hasattr(hw, "memory") and hw.memory:
ram_gb = getattr(hw.memory, "total_gb", None)
if ram_gb:
print(f"✔ RAM: {ram_gb} GB")
else:
print("✔ RAM detected")
else:
print("✔ RAM: Unknown")

# 2️⃣ Provider selection
print("\n🤖 Select default LLM provider:\n")

print("[1] Anthropic Claude (cloud)")
print("[2] OpenAI GPT (cloud)")
if has_gpu:
print("[3] Ollama (local) - recommended for your hardware")
else:
print("[3] Ollama (local)")

choice = input("\nChoice (1/2/3): ").strip()

provider_map = {
"1": "anthropic",
"2": "openai",
"3": "ollama",
}

provider = provider_map.get(choice)

if not provider:
print("❌ Invalid choice. Please re-run `cortex config`.")
return 1

print(f"\n✔ Selected provider: {provider}\n")
# 3️⃣ API key configuration (if required)
api_key = None

if provider in ("anthropic", "openai"):
env_var = "ANTHROPIC_API_KEY" if provider == "anthropic" else "OPENAI_API_KEY"
print(f"🔑 Enter your {env_var}:")

api_key = input("> ").strip()

# Very light validation
if len(api_key) < 10:
print("❌ API key looks invalid. Please re-run `cortex config`.")
return 1

print("✔ API key accepted\n")
else:
print("ℹ️ Ollama selected — no API key required\n")

print("Setup step complete.\n")
# 4️⃣ Save configuration to ~/.cortex/config.yaml
from pathlib import Path

import yaml

config_dir = Path.home() / ".cortex"
config_dir.mkdir(exist_ok=True)

config_path = config_dir / "config.yaml"

config_data = {
"provider": provider,
"hardware": {
"gpu": gpu_info,
"cpu": str(getattr(hw, "cpu", None)),
"memory_gb": getattr(hw, "memory_gb", None),
},
"preferences": {
"verbose": False,
"dry_run_default": False,
},
}

with open(config_path, "w") as f:
yaml.safe_dump(config_data, f)

print(f"💾 Configuration saved to {config_path}")
return 0

def stack(self, args: argparse.Namespace) -> int:
"""Handle `cortex stack` commands (list/describe/install/dry-run)."""
try:
Expand Down Expand Up @@ -824,6 +932,11 @@ def main():
# Wizard command
wizard_parser = subparsers.add_parser("wizard", help="Configure API key interactively")

# config command
config_parser = subparsers.add_parser(
"config", help="Interactive setup wizard for Contex Configuration"
)

# Status command
status_parser = subparsers.add_parser("status", help="Show system status")

Expand Down Expand Up @@ -910,6 +1023,8 @@ def main():
return cli.demo()
elif args.command == "wizard":
return cli.wizard()
elif args.command == "config":
return cli.config()
elif args.command == "status":
return cli.status()
elif args.command == "install":
Expand Down
Loading