diff --git a/README.md b/README.md
index 4a5ad7e8b87..7b3395da41d 100644
--- a/README.md
+++ b/README.md
@@ -27,7 +27,7 @@ cog.out(text)
+src="https://img.shields.io/badge/📦%20Installs-2.4M-2ecc71?style=flat-square&labelColor=555555"/>
0:
- final_reminders.append(self.gpt_prompts.tool_prompt)
+ if self.mcp_profile_manager:
+ tool_prompt_from_mcp = self.mcp_profile_manager.get_tool_prompt_if_active()
+ if tool_prompt_from_mcp:
+ final_reminders.append(tool_prompt_from_mcp)
final_reminders = "\n\n".join(final_reminders)
@@ -1649,163 +1654,179 @@ def process_tool_calls(self, tool_call_response):
if tool_call_response is None:
return False
- tool_calls = tool_call_response.choices[0].message.tool_calls
+ llm_tool_calls = tool_call_response.choices[0].message.tool_calls
+ if not llm_tool_calls:
+ return False
+
# Collect all tool calls grouped by server
- server_tool_calls = self._gather_server_tool_calls(tool_calls)
+ # The keys of server_tool_calls are McpServer instances
+ server_tool_calls_map = self._gather_server_tool_calls(llm_tool_calls)
+
+ if server_tool_calls_map and self.num_tool_calls < self.max_tool_calls:
+ self._print_tool_call_info(server_tool_calls_map)
+
+ approved_server_tool_calls_map: Dict[McpServer, List[Any]] = {}
+ any_tool_cancelled = False
+
+ for mcp_server_instance, tool_calls_for_server in server_tool_calls_map.items():
+ server_name = mcp_server_instance.name
+ server_no_confirm = False # Default to needing confirmation
+
+ if self.mcp_profile_manager:
+ server_no_confirm = self.mcp_profile_manager.is_server_no_confirm(server_name)
- if server_tool_calls and self.num_tool_calls < self.max_tool_calls:
- self._print_tool_call_info(server_tool_calls)
+ proceed_for_this_server = False
+ if server_no_confirm:
+ proceed_for_this_server = True
+ else:
+ if self.io.confirm_ask(f"Run tools for server '{server_name}'?"):
+ proceed_for_this_server = True
+ else:
+ self.io.tool_output(f"Tool execution for server '{server_name}' cancelled by user.")
+ any_tool_cancelled = True
+
+ if proceed_for_this_server:
+ approved_server_tool_calls_map[mcp_server_instance] = tool_calls_for_server
- if self.io.confirm_ask("Run tools?"):
- tool_responses = self._execute_tool_calls(server_tool_calls)
+ if approved_server_tool_calls_map:
+ tool_responses = self._execute_tool_calls(approved_server_tool_calls_map)
- # Add the assistant message with tool calls
- # Converting to a dict so it can be safely dumped to json
+ # Add the assistant message with ALL original tool calls
+ # (even if some were cancelled, the LLM requested them)
self.cur_messages.append(tool_call_response.choices[0].message.to_dict())
- # Add all tool responses
+ # Add responses only for the tools that were actually executed
for tool_response in tool_responses:
self.cur_messages.append(tool_response)
+
+ return True # Indicates that some tools were processed or attempted
+ elif any_tool_cancelled:
+ # All tools were cancelled by the user, but we should still record the LLM's attempt
+ self.cur_messages.append(tool_call_response.choices[0].message.to_dict())
+ # Add a generic response indicating cancellation if needed, or just return False
+ # For now, returning False implies no *successful* tool execution path was taken.
+ return False
+ else:
+ # No tools to execute (e.g., map was empty initially, or all were filtered out for other reasons)
+ return False
+
- return True
elif self.num_tool_calls >= self.max_tool_calls:
self.io.tool_warning(f"Only {self.max_tool_calls} tool calls allowed, stopping.")
return False
- def _print_tool_call_info(self, server_tool_calls):
- """Print information about an MCP tool call."""
- self.io.tool_output("Preparing to run MCP tools", bold=True)
-
- for server, tool_calls in server_tool_calls.items():
+ def _print_tool_call_info(self, server_tool_calls_map):
+ """Print information about an MCP tool call on a single line."""
+ for server, tool_calls in server_tool_calls_map.items():
for tool_call in tool_calls:
- self.io.tool_output(f"Tool Call: {tool_call.function.name}")
- self.io.tool_output(f"Arguments: {tool_call.function.arguments}")
- self.io.tool_output(f"MCP Server: {server.name}")
+ output_str = (
+ f"[reverse][bold]MCP[/bold][/reverse] [yellow]{server.name}:"
+ f" {tool_call.function.name}[/yellow] ({tool_call.function.arguments})"
+ )
+ self.io.tool_output(output_str)
if self.verbose:
- self.io.tool_output(f"Tool ID: {tool_call.id}")
- self.io.tool_output(f"Tool type: {tool_call.type}")
-
- self.io.tool_output("\n")
+ self.io.tool_output(
+ f" (Verbose: ID={tool_call.id}, Type={tool_call.type})"
+ )
+ self.io.tool_output("") # Add a newline after all tool call infos
- def _gather_server_tool_calls(self, tool_calls):
- """Collect all tool calls grouped by server.
+ def _gather_server_tool_calls(self, llm_tool_calls: List[Any]) -> Dict[McpServer, List[Any]]:
+ """Collect all tool calls grouped by McpServer instance.
Args:
- tool_calls: List of tool calls from the LLM response
+ llm_tool_calls: List of tool_call objects from the LLM response.
Returns:
- dict: Dictionary mapping servers to their respective tool calls
+ dict: Dictionary mapping McpServer instances to a list of their respective tool_call objects.
+ """
+ gathered_calls: Dict[McpServer, List[Any]] = {}
+
+ if not self.mcp_profile_manager or not self.mcp_profile_manager.active_mcp_client_pool:
+ return gathered_calls
+
+ active_pool = self.mcp_profile_manager.active_mcp_client_pool
+ # tools_by_server is Dict[server_name, List[tool_definition_dict]]
+ tools_by_server = active_pool.get_cached_tools_by_server()
+ # mcp_instances is Dict[server_name, McpServer_instance]
+ mcp_instances = active_pool.clients
+
+ if not tools_by_server or not mcp_instances:
+ return gathered_calls
+
+ for llm_tc in llm_tool_calls:
+ found_tool_for_llm_tc = False
+ for server_name, mcp_tool_definitions in tools_by_server.items():
+ for mcp_tool_def in mcp_tool_definitions:
+ if mcp_tool_def.get("function", {}).get("name") == llm_tc.function.name:
+ target_server_instance = mcp_instances.get(server_name)
+ if target_server_instance:
+ if target_server_instance not in gathered_calls:
+ gathered_calls[target_server_instance] = []
+ gathered_calls[target_server_instance].append(llm_tc)
+ found_tool_for_llm_tc = True
+ break # Found the definition for this llm_tc, move to next llm_tc
+ if found_tool_for_llm_tc:
+ break # Moved to next llm_tc
+
+ return gathered_calls
+
+ def _execute_tool_calls(self, server_tool_calls_map: Dict[McpServer, List[Any]]):
+ """Process tool calls from the response and execute them.
+ Args:
+ server_tool_calls_map: Dict mapping McpServer instances to their list of tool_calls.
+ Returns:
+ A list of tool response messages.
"""
- if not self.mcp_tools or len(self.mcp_tools) == 0:
- return None
-
- server_tool_calls = {}
- for tool_call in tool_calls:
- # Check if this tool_call matches any MCP tool
- for server_name, server_tools in self.mcp_tools:
- for tool in server_tools:
- if tool.get("function", {}).get("name") == tool_call.function.name:
- # Find the McpServer instance that will be used for communication
- for server in self.mcp_servers:
- if server.name == server_name:
- if server not in server_tool_calls:
- server_tool_calls[server] = []
- server_tool_calls[server].append(tool_call)
- break
-
- return server_tool_calls
-
- def _execute_tool_calls(self, tool_calls):
- """Process tool calls from the response and execute them if they match MCP tools.
- Returns a list of tool response messages."""
tool_responses = []
# Define the coroutine to execute all tool calls for a single server
- async def _exec_server_tools(server, tool_calls_list):
- tool_responses = []
+ async def _exec_server_tools(mcp_server_instance: McpServer, tool_calls_for_server: List[Any]):
+ # Renamed 'server' to 'mcp_server_instance' for clarity
+ responses_for_server = []
try:
# Connect to the server once
- session = await server.connect()
+ session = await mcp_server_instance.connect()
# Execute all tool calls for this server
- for tool_call in tool_calls_list:
+ for tool_call_obj in tool_calls_for_server:
call_result = await experimental_mcp_client.call_openai_tool(
session=session,
- openai_tool=tool_call,
+ openai_tool=tool_call_obj, # This is the tool_call object from LLM
)
result_text = str(call_result.content[0].text)
- tool_responses.append(
- {"role": "tool", "tool_call_id": tool_call.id, "content": result_text}
+ responses_for_server.append(
+ {"role": "tool", "tool_call_id": tool_call_obj.id, "content": result_text}
)
finally:
- await server.disconnect()
- return tool_responses
+ await mcp_server_instance.disconnect()
+ return responses_for_server
# Execute all tool calls concurrently
- async def _execute_all_tool_calls():
+ async def _execute_all_tool_calls_async():
tasks = []
- for server, tool_calls_list in tool_calls.items():
- tasks.append(_exec_server_tools(server, tool_calls_list))
+ for mcp_server_instance, tool_calls_for_server in server_tool_calls_map.items():
+ tasks.append(_exec_server_tools(mcp_server_instance, tool_calls_for_server))
# Wait for all tasks to complete
- results = await asyncio.gather(*tasks)
- return results
+ results_list_of_lists = await asyncio.gather(*tasks, return_exceptions=True)
+ return results_list_of_lists
# Run the async execution and collect results
- if tool_calls:
- all_results = asyncio.run(_execute_all_tool_calls())
+ if server_tool_calls_map:
+ all_results_nested = asyncio.run(_execute_all_tool_calls_async())
# Flatten the results from all servers
- for server_results in all_results:
- tool_responses.extend(server_results)
+ for server_batch_result in all_results_nested:
+ if isinstance(server_batch_result, Exception):
+ self.io.tool_error(f"Error executing batch of tool calls: {server_batch_result}")
+ # Potentially add a generic error tool_response for all tools in this batch
+ elif isinstance(server_batch_result, list):
+ tool_responses.extend(server_batch_result)
return tool_responses
- def initialize_mcp_tools(self):
- """
- Initialize tools from all configured MCP servers. MCP Servers that fail to be
- initialized will not be available to the Coder instance.
- """
- tools = []
-
- async def get_server_tools(server):
- try:
- session = await server.connect()
- server_tools = await experimental_mcp_client.load_mcp_tools(
- session=session, format="openai"
- )
- return (server.name, server_tools)
- except Exception as e:
- self.io.tool_warning(f"Error initializing MCP server {server.name}:\n{e}")
- return None
- finally:
- await server.disconnect()
-
- async def get_all_server_tools():
- tasks = [get_server_tools(server) for server in self.mcp_servers]
- results = await asyncio.gather(*tasks)
- return [result for result in results if result is not None]
-
- if self.mcp_servers:
- tools = asyncio.run(get_all_server_tools())
-
- if len(tools) > 0:
- self.io.tool_output("MCP servers configured:")
- for server_name, server_tools in tools:
- self.io.tool_output(f" - {server_name}")
-
- if self.verbose:
- for tool in server_tools:
- tool_name = tool.get("function", {}).get("name", "unknown")
- tool_desc = tool.get("function", {}).get("description", "").split("\n")[0]
- self.io.tool_output(f" - {tool_name}: {tool_desc}")
-
- self.mcp_tools = tools
-
- def get_tool_list(self):
- """Get a flattened list of all MCP tools."""
- tool_list = []
- if self.mcp_tools:
- for _, server_tools in self.mcp_tools:
- tool_list.extend(server_tools)
- return tool_list
+ def get_tool_list(self) -> List[Dict[str, Any]]:
+ """Get a flattened list of all MCP tools from the active profile."""
+ if self.mcp_profile_manager and self.mcp_profile_manager.active_mcp_client_pool:
+ return self.mcp_profile_manager.active_mcp_client_pool.get_cached_tools_flat_list(self.mcp_profile_manager)
+ return []
def reply_completed(self):
pass
diff --git a/aider/coders/base_prompts.py b/aider/coders/base_prompts.py
index cd93b9ef807..a3211669352 100644
--- a/aider/coders/base_prompts.py
+++ b/aider/coders/base_prompts.py
@@ -57,17 +57,16 @@ class CoderPrompts:
no_shell_cmd_reminder = ""
tool_prompt = """
-
-When solving problems, you have special tools available. Please follow these rules:
+You have MCP (Model Context Protocol) tools available.
+Follow these rules:
1. Always use the exact format required for each tool and include all needed information.
2. Only use tools that are currently available in this conversation.
-3. Don't mention tool names when talking to people. Say "I'll check your code" instead
- of "I'll use the code_analyzer tool."
-4. Only use tools when necessary. If you know the answer, just respond directly.
+3. Don't mention tool names unless explicitly requested. For example, if a code_analyzer tool is available, say "I'll check your code" before making the tool call instead of "I'll use the code_analyzer tool." unless explicitly requested.
+4. Only use tools when necessary. If you know the answer, just respond directly unless explicitly requested.
5. Before using any tool, briefly explain why you need to use it.
-
"""
rename_with_shell = ""
go_ahead_tip = ""
+
diff --git a/aider/commands.py b/aider/commands.py
index aaf6d7ddd9a..c1c65fc895f 100644
--- a/aider/commands.py
+++ b/aider/commands.py
@@ -12,6 +12,7 @@
from PIL import Image, ImageGrab
from prompt_toolkit.completion import Completion, PathCompleter
from prompt_toolkit.document import Document
+from prompt_toolkit.shortcuts import checkboxlist_dialog
from aider import models, prompts, voice
from aider.editor import pipe_editor
@@ -36,6 +37,7 @@ def __init__(self, placeholder=None, **kwargs):
class Commands:
voice = None
scraper = None
+ mcp_profile_manager = None
def clone(self):
return Commands(
@@ -48,6 +50,7 @@ def clone(self):
verbose=self.verbose,
editor=self.editor,
original_read_only_fnames=self.original_read_only_fnames,
+ mcp_profile_manager=self.mcp_profile_manager,
)
def __init__(
@@ -63,10 +66,12 @@ def __init__(
verbose=False,
editor=None,
original_read_only_fnames=None,
+ mcp_profile_manager=None,
):
self.io = io
self.coder = coder
self.parser = parser
+ self.mcp_profile_manager = mcp_profile_manager
self.args = args
self.verbose = verbose
@@ -1588,6 +1593,133 @@ def cmd_reasoning_effort(self, args):
announcements = "\n".join(self.coder.get_announcements())
self.io.tool_output(announcements)
+ def cmd_mcp(self, args_str: str):
+ "Manage MCP (Model-Controller-Proxy) profiles"
+ args = args_str.strip().split()
+
+ if not self.mcp_profile_manager:
+ self.io.tool_error("MCP profile manager is not initialized.")
+ return
+
+ if not args:
+ profiles_details = self.mcp_profile_manager.list_profiles_details()
+ self.io.tool_output("Available MCP profiles:")
+ if profiles_details:
+ for name, server_configs in profiles_details:
+ server_names_list = [s_conf.get("name", "UnknownServer") for s_conf in server_configs]
+ self.io.tool_output(
+ f" - {name}: {', '.join(server_names_list) if server_names_list else ' (no servers)'}"
+ )
+ else:
+ self.io.tool_output(" (No profiles defined)")
+
+ active_name = self.mcp_profile_manager.active_profile_name
+ if active_name:
+ self.io.tool_output(f"\nCurrently active MCP profile: {active_name}")
+ else:
+ self.io.tool_output("\nNo MCP profile is currently active.")
+
+ self.io.tool_output("\nCommands:")
+ self.io.tool_output(" /mcp new - Create a new MCP profile")
+ self.io.tool_output(" /mcp tools - Configure enabled tools for a server in the active profile")
+ self.io.tool_output(" /mcp enable - Enable an MCP profile")
+ self.io.tool_output(" /mcp disable - Disable the active MCP profile")
+ self.io.tool_output(" /mcp rm - Delete an MCP profile")
+ self.io.tool_output(" /mcp persist - Save the active MCP profile as the default for next launch")
+ self.io.tool_output(" /mcp persist clear - Clear the persisted default MCP profile")
+ return
+
+ sub_command = args[0]
+ if sub_command == "new":
+ if len(args) > 1:
+ profile_name = args[1]
+ if self.mcp_profile_manager.get_profile(profile_name):
+ self.io.tool_error(f"Profile '{profile_name}' already exists.")
+ return
+
+ known_server_names = self.mcp_profile_manager.get_known_server_names()
+ if not known_server_names:
+ self.io.tool_error("No MCP servers found. Configure servers first (e.g., in .aider.conf.yml).")
+ return
+
+ # First dialog: Select servers
+ server_selection_choices = [(name, name) for name in known_server_names]
+ selected_server_names = checkboxlist_dialog(
+ title=f"Create MCP Profile: {profile_name} (Step 1/2)",
+ text="Select servers to include in this profile:",
+ values=server_selection_choices,
+ ).run()
+
+ if not selected_server_names:
+ self.io.tool_output("Profile creation cancelled or no servers selected.")
+ return
+
+ # Second dialog: Configure no_confirm for selected servers
+ no_confirm_choices = [(name, name) for name in selected_server_names]
+
+ # checkboxlist_dialog returns None if Cancel is chosen
+ servers_for_no_confirm = checkboxlist_dialog(
+ title=f"Create MCP Profile: {profile_name} (Step 2/2)",
+ text="Select servers that should NOT require confirmation for tool use:",
+ values=no_confirm_choices,
+ ).run()
+
+ # If the second dialog was cancelled, treat it as cancelling the whole operation
+ if servers_for_no_confirm is None:
+ self.io.tool_output("Profile creation cancelled during no_confirm configuration.")
+ return
+
+ final_selected_servers_config = []
+ for server_name in selected_server_names:
+ final_selected_servers_config.append(
+ {
+ "name": server_name,
+ "no_confirm": server_name in servers_for_no_confirm,
+ }
+ )
+
+ self.mcp_profile_manager.create_new_profile(profile_name, final_selected_servers_config)
+ # self.io.tool_output(f"MCP profile '{profile_name}' created with {len(final_selected_servers_config)} server(s).") # Redundant with manager's output
+
+ else:
+ self.io.tool_error("Usage: /mcp new ")
+
+ elif sub_command == "tools":
+ if len(args) > 1:
+ server_name = args[1]
+ self.mcp_profile_manager.configure_server_tools(server_name)
+ else:
+ self.io.tool_error("Usage: /mcp tools ")
+
+ elif sub_command == "enable":
+ if len(args) > 1:
+ profile_name = args[1]
+ self.mcp_profile_manager.enable_profile(profile_name, self.coder.main_model, self.coder.main_model.edit_format)
+ else:
+ self.io.tool_error("Usage: /mcp enable ")
+ elif sub_command == "disable":
+ if len(args) == 1:
+ self.mcp_profile_manager.disable_profile()
+ else:
+ self.io.tool_error("Usage: /mcp disable")
+ elif sub_command == "rm":
+ if len(args) > 1:
+ profile_name = args[1]
+ self.mcp_profile_manager.delete_profile(profile_name)
+ else:
+ self.io.tool_error("Usage: /mcp rm ")
+ elif sub_command == "persist":
+ if len(args) == 1: # /mcp persist
+ self.mcp_profile_manager.persist_active_profile()
+ elif len(args) == 2 and args[1] == "clear": # /mcp persist clear
+ self.mcp_profile_manager.clear_persisted_default_profile()
+ else:
+ self.io.tool_error("Usage: /mcp persist [clear]")
+ else:
+ self.io.tool_error(f"Unknown /mcp subcommand: {sub_command}")
+ self.io.tool_output("Valid subcommands are: new, tools, enable, disable, rm, persist")
+
+
def cmd_copy_context(self, args=None):
"""Copy the current chat context as markdown, suitable to paste into a web UI"""
diff --git a/aider/io.py b/aider/io.py
index f28a1c86dfb..8b3e4400990 100644
--- a/aider/io.py
+++ b/aider/io.py
@@ -146,18 +146,66 @@ def tokenize(self):
)
def get_command_completions(self, document, complete_event, text, words):
- if len(words) == 1 and not text[-1].isspace():
- partial = words[0].lower()
- candidates = [cmd for cmd in self.command_names if cmd.startswith(partial)]
+ if len(words) == 1 and not text[-1].isspace(): # Completing the command itself
+ partial_cmd = words[0].lower()
+ # Ensure self.command_names is populated
+ if not hasattr(self, 'command_names') or not self.command_names:
+ if self.commands:
+ self.command_names = self.commands.get_commands()
+ else:
+ self.command_names = []
+
+ candidates = [cmd for cmd in self.command_names if cmd.startswith(partial_cmd)]
for candidate in sorted(candidates):
- yield Completion(candidate, start_position=-len(words[-1]))
- return
-
- if len(words) <= 1 or text[-1].isspace():
+ yield Completion(candidate, start_position=-len(words[0]))
return
+ # If we are here, words[0] is a complete command.
+ # We are completing arguments for words[0].
cmd = words[0]
- partial = words[-1].lower()
+
+ # If text ends with a space, or no partial word to complete
+ if text[-1].isspace() or len(words) == 1 :
+ partial_arg = ""
+ else: # We are completing the current (last) word as an argument
+ partial_arg = words[-1].lower()
+
+ # Special handling for /mcp
+ if cmd == "/mcp":
+ if len(words) == 2 and not text[-1].isspace(): # Completing the subcommand (enable/disable)
+ subcommands = ["enable", "disable"]
+ for sub_cmd_candidate in subcommands:
+ if sub_cmd_candidate.startswith(partial_arg):
+ yield Completion(sub_cmd_candidate, start_position=-len(words[-1]))
+ return
+ elif (len(words) == 2 and text[-1].isspace()) or \
+ (len(words) == 1 and text[-1].isspace()): # Suggesting subcommand after "/mcp "
+ subcommands = ["enable", "disable"]
+ for sub_cmd_candidate in subcommands:
+ yield Completion(sub_cmd_candidate, start_position=0)
+ return
+ elif len(words) >= 2 and words[1] == "enable":
+ # Completing profile name for "/mcp enable "
+ # Or suggesting profile names after "/mcp enable "
+ if self.commands and self.commands.mcp_profile_manager:
+ profile_names = self.commands.mcp_profile_manager.list_profile_names()
+ for profile_name in profile_names:
+ if profile_name.startswith(partial_arg):
+ start_pos = -len(words[-1]) if (len(words) > 2 and not text[-1].isspace()) else 0
+ yield Completion(profile_name, start_position=start_pos)
+ return
+ return # No mcp_profile_manager or no profiles
+
+ # Fallback to existing general command argument completion
+ if (len(words) <= 1 or text[-1].isspace()) and cmd != "/mcp": # Avoid this path if we handled /mcp above
+ return
+
+ # If we fell through /mcp logic, and we are not at a space, use original partial.
+ # Otherwise, partial_arg would have been set above.
+ if cmd != "/mcp" and not text[-1].isspace(): # Original logic for other commands
+ partial = words[-1].lower()
+ else: # Use the partial_arg determined for argument completion
+ partial = partial_arg
matches, _, _ = self.commands.matching_commands(cmd)
if len(matches) == 1:
@@ -983,22 +1031,41 @@ def tool_warning(self, message="", strip=True):
def tool_output(self, *messages, log_only=False, bold=False):
if messages:
- hist = " ".join(messages)
+ hist_parts = []
+ for m in messages:
+ if isinstance(m, Text):
+ hist_parts.append(m.plain)
+ else:
+ hist_parts.append(str(m))
+ hist = " ".join(hist_parts)
hist = f"{hist.strip()}"
self.append_chat_history(hist, linebreak=True, blockquote=True)
if log_only:
return
- messages = list(map(Text, messages))
- style = dict()
+ processed_messages = []
+ for m in messages:
+ if not isinstance(m, Text):
+ # If m is a string, parse it for Rich markup
+ processed_messages.append(Text.from_markup(str(m)))
+ else:
+ # If m is already a Text object, use it as is
+ processed_messages.append(m)
+
+ style_args = {}
if self.pretty:
if self.tool_output_color:
- style["color"] = ensure_hash_prefix(self.tool_output_color)
- style["reverse"] = bold
-
- style = RichStyle(**style)
- self.console.print(*messages, style=style)
+ style_args["color"] = ensure_hash_prefix(self.tool_output_color)
+ if bold: # bold is a boolean, reverse is the RichStyle attribute
+ style_args["reverse"] = True
+
+ style = RichStyle(**style_args) if style_args else None
+
+ if style:
+ self.console.print(*processed_messages, style=style)
+ else:
+ self.console.print(*processed_messages)
def get_assistant_mdstream(self):
mdargs = dict(style=self.assistant_output_color, code_theme=self.code_theme)
diff --git a/aider/main.py b/aider/main.py
index 9358f6b6bd8..3cc0a3d68a5 100644
--- a/aider/main.py
+++ b/aider/main.py
@@ -30,7 +30,7 @@
from aider.history import ChatSummary
from aider.io import InputOutput
from aider.llm import litellm # noqa: F401; properly init litellm on launch
-from aider.mcp import load_mcp_servers
+from aider.mcp.mcp_profile_manager import MCPProfileManager
from aider.models import ModelSettings
from aider.onboarding import offer_openrouter_oauth, select_default_model
from aider.repo import ANY_GIT_ERROR, GitRepo
@@ -738,6 +738,10 @@ def get_io(pretty):
if args.gitignore:
check_gitignore(git_root, io)
+ # Instantiate MCPProfileManager
+ mcp_profile_manager = MCPProfileManager(io, args)
+ mcp_profile_manager.load_or_initialize_profiles()
+
if args.verbose:
show = format_settings(parser, args)
io.tool_output(show)
@@ -939,6 +943,7 @@ def get_io(pretty):
verbose=args.verbose,
editor=args.editor,
original_read_only_fnames=read_only_fnames,
+ mcp_profile_manager=mcp_profile_manager,
)
summarizer = ChatSummary(
@@ -964,13 +969,20 @@ def get_io(pretty):
# Track auto-commits configuration
analytics.event("auto_commits", enabled=bool(args.auto_commits))
- try:
- # Load MCP servers from config string or file
- mcp_servers = load_mcp_servers(args.mcp_servers, args.mcp_servers_file, io, args.verbose)
-
- if not mcp_servers:
- mcp_servers = []
+ # Enable persisted default MCP profile if one exists
+ if mcp_profile_manager.persisted_active_profile_name:
+ if args.verbose:
+ io.tool_output(
+ f"Attempting to enable persisted default MCP profile:"
+ f" {mcp_profile_manager.persisted_active_profile_name}"
+ )
+ mcp_profile_manager.enable_profile(
+ mcp_profile_manager.persisted_active_profile_name,
+ main_model,
+ main_model.edit_format # Use the main model's default edit format
+ )
+ try:
coder = Coder.create(
main_model=main_model,
edit_format=args.edit_format,
@@ -1003,7 +1015,7 @@ def get_io(pretty):
detect_urls=args.detect_urls,
auto_copy_context=args.copy_paste,
auto_accept_architect=args.auto_accept_architect,
- mcp_servers=mcp_servers,
+ mcp_profile_manager=mcp_profile_manager,
)
except UnknownEditFormat as err:
io.tool_error(str(err))
diff --git a/aider/mcp/mcp_client_pool.py b/aider/mcp/mcp_client_pool.py
new file mode 100644
index 00000000000..d11d8f31684
--- /dev/null
+++ b/aider/mcp/mcp_client_pool.py
@@ -0,0 +1,281 @@
+import asyncio
+import logging
+import asyncio
+import logging
+from typing import List, Dict, Any, Optional, Tuple
+
+from aider.mcp.server import McpServer # Assuming McpServer is in this path
+from aider.mcp.tool_filter import filter_tools_for_server
+
+
+class MCPClientPool:
+ """
+ Manages a pool of MCPClient instances, one for each configured MCP server.
+ Provides a way to get available tools from all connected servers and
+ to shut down all connections gracefully.
+ """
+
+ def __init__(self, io: Any, model: Any, edit_format: str, server_configs: List[Dict[str, Any]]):
+ """
+ Initializes the MCPClientPool.
+
+ Args:
+ io: The input/output object for logging and user interaction.
+ model: The main language model being used.
+ edit_format: The edit format being used.
+ server_configs: A list of configurations for each MCP server.
+ """
+ self.io = io
+ self.model = model
+ self.edit_format = edit_format
+ self.server_configs = server_configs
+ self.clients: Dict[str, McpServer] = {}
+ self.cached_tools_by_server: Optional[Dict[str, List[Dict[str, Any]]]] = None
+ self._init_clients()
+
+ def _init_clients(self):
+ """
+ Initializes McpServer instances based on the server configurations.
+ """
+ for config in self.server_configs:
+ server_name = config.get("name")
+ if not server_name:
+ logging.warning("MCP server config missing 'name', skipping.")
+ if self.io and hasattr(self.io, 'tool_warning'):
+ self.io.tool_warning("An MCP server configuration was missing a 'name' and has been skipped.")
+ continue
+
+ if server_name in self.clients:
+ logging.warning(f"Duplicate MCP server name '{server_name}' found in configuration, skipping.")
+ if self.io and hasattr(self.io, 'tool_warning'):
+ self.io.tool_warning(f"Duplicate MCP server name '{server_name}' found. Only the first configuration will be used.")
+ continue
+
+ try:
+ self.clients[server_name] = McpServer(server_config=config)
+ # TODO: Potentially connect here or connect on-demand when tools are requested.
+ # For now, just initializing. Connection will happen when tools are fetched.
+ logging.info(f"Initialized McpServer for '{server_name}'.")
+ except Exception as e:
+ logging.error(f"Failed to initialize McpServer for '{server_name}': {e}")
+ if self.io and hasattr(self.io, 'tool_error'):
+ self.io.tool_error(f"Failed to initialize MCP server '{server_name}': {e}")
+
+ async def _fetch_tools_for_server(self, server_name: str, client: McpServer, context: Optional[Dict[str, Any]] = None) -> Optional[List[Dict[str, Any]]]:
+ """Helper to fetch tools for a single server."""
+ from litellm import experimental_mcp_client # Local import to avoid circular dependency if mcp.py imports aider stuff
+ try:
+ session = await client.connect()
+ # The context parameter for load_mcp_tools is not standard, removing for now.
+ # If needed, it would be passed to experimental_get_tools if that was the target.
+ server_tools = await experimental_mcp_client.load_mcp_tools(session=session, format="openai")
+ logging.info(f"Retrieved {len(server_tools)} tools from {server_name}")
+ return server_tools
+ except Exception as e:
+ logging.error(f"Error getting tools from MCP server {server_name}: {e}")
+ if self.io and hasattr(self.io, 'tool_error'):
+ self.io.tool_error(f"Could not retrieve tools from MCP server '{server_name}': {e}")
+ return None
+ finally:
+ # Ensure client is disconnected even if tool fetching fails
+ await client.disconnect()
+
+
+ async def _fetch_tools_for_all_servers(self, context: Optional[Dict[str, Any]] = None) -> Dict[str, List[Dict[str, Any]]]:
+ """
+ Connects to all MCP servers and retrieves their available tools.
+
+ Args:
+ context: Optional context to pass to the MCP servers (currently unused by load_mcp_tools).
+
+ Returns:
+ A dictionary mapping server names to their list of tool definitions.
+ """
+ tools_by_server: Dict[str, List[Dict[str, Any]]] = {}
+
+ tasks = []
+ for server_name, client in self.clients.items():
+ tasks.append(self._fetch_tools_for_server(server_name, client, context))
+
+ results = await asyncio.gather(*tasks, return_exceptions=True)
+
+ for i, server_name in enumerate(self.clients.keys()):
+ result = results[i]
+ if isinstance(result, Exception):
+ logging.error(f"Exception fetching tools from {server_name}: {result}")
+ # Optionally store empty list or skip this server
+ elif result is not None:
+ tools_by_server[server_name] = result
+
+ return tools_by_server
+
+ async def fetch_and_cache_tools(self, context: Optional[Dict[str, Any]] = None):
+ """
+ Fetches tools from all configured servers and caches them.
+ """
+ if self.io and hasattr(self.io, 'tool_output'):
+ self.io.tool_output("Fetching tools from enabled MCP profile servers...")
+ self.cached_tools_by_server = await self._fetch_tools_for_all_servers(context)
+ if self.io and hasattr(self.io, 'tool_output'):
+ num_tools = sum(len(tools) for tools in self.cached_tools_by_server.values())
+ num_servers = len(self.cached_tools_by_server)
+ self.io.tool_output(f"Fetched {num_tools} tool(s) from {num_servers} server(s).")
+
+
+ def get_cached_tools_flat_list(self, mcp_profile_manager) -> List[Dict[str, Any]]:
+ """
+ Returns a flat list of tool definitions from all servers in the pool,
+ filtered by the enabled_tools setting in the active MCP profile for each server.
+ """
+ all_effective_tools_flat = []
+ if self.cached_tools_by_server and mcp_profile_manager and mcp_profile_manager.active_profile_name:
+ for server_name, tools_definitions_for_server in self.cached_tools_by_server.items():
+ if tools_definitions_for_server: # Ensure the list is not empty
+ # Get enabled tool names for this server from the active profile
+ enabled_tool_names = mcp_profile_manager.get_enabled_tools_for_server(server_name)
+
+ # Filter the tools for this server
+ filtered_server_tools = filter_tools_for_server(tools_definitions_for_server, enabled_tool_names)
+ all_effective_tools_flat.extend(filtered_server_tools)
+
+ # Basic deduplication by function name, preferring the first encountered.
+ # More sophisticated deduplication might be needed if tools can have same name but different defs.
+ seen_tool_names = set()
+ deduplicated_tools = []
+ for tool_def in all_effective_tools_flat:
+ try:
+ if isinstance(tool_def, dict) and isinstance(tool_def.get("function"), dict):
+ tool_name = tool_def["function"].get("name")
+ if tool_name and tool_name not in seen_tool_names:
+ seen_tool_names.add(tool_name)
+ deduplicated_tools.append(tool_def)
+ except (KeyError, TypeError):
+ continue # Skip malformed tool_def
+ return deduplicated_tools
+
+ def get_effective_tools_and_tool_choice(
+ self,
+ mcp_profile_manager, # MCPProfileManager instance
+ current_tool_choice: Optional[Any], # The tool_choice that would be used if no MCP filtering
+ io: Optional[Any] # For logging warnings
+ ) -> Tuple[List[Dict[str, Any]], Optional[Any]]:
+ """
+ Gets the list of MCP tool definitions, filtered by enabled status in the profile,
+ and logs a warning if current_tool_choice refers to a disabled MCP tool.
+
+ Args:
+ mcp_profile_manager: The MCPProfileManager to get enabled tool lists.
+ current_tool_choice: The tool_choice value that would be active.
+ io: InputOutput object for logging.
+
+ Returns:
+ A tuple containing:
+ - List of effective (filtered) MCP tool definitions.
+ - The original current_tool_choice (adjustment is for logging only here).
+ """
+ effective_mcp_tools_flat_list = self.get_cached_tools_flat_list(mcp_profile_manager)
+
+ # Check if the current_tool_choice (if it's specific) refers to an MCP tool that got disabled.
+ if isinstance(current_tool_choice, dict) and current_tool_choice.get("type") == "function":
+ function_spec = current_tool_choice.get("function")
+ if isinstance(function_spec, dict):
+ chosen_tool_name = function_spec.get("name")
+ if chosen_tool_name:
+ # Check if this chosen tool was an MCP tool by seeing if it was defined by any MCP server
+ # (regardless of its current enabled status).
+ was_an_mcp_tool = False
+ if self.cached_tools_by_server:
+ for _, tools_definitions_for_server in self.cached_tools_by_server.items():
+ if tools_definitions_for_server:
+ for tool_def in tools_definitions_for_server:
+ try:
+ if isinstance(tool_def, dict) and \
+ isinstance(tool_def.get("function"), dict) and \
+ tool_def["function"].get("name") == chosen_tool_name:
+ was_an_mcp_tool = True
+ break
+ except (KeyError, TypeError):
+ continue
+ if was_an_mcp_tool:
+ break
+
+ if was_an_mcp_tool:
+ # Now check if this chosen MCP tool is in the *effective* (enabled) list
+ is_chosen_mcp_tool_enabled = False
+ for tool_def in effective_mcp_tools_flat_list:
+ try:
+ if isinstance(tool_def, dict) and \
+ isinstance(tool_def.get("function"), dict) and \
+ tool_def["function"].get("name") == chosen_tool_name:
+ is_chosen_mcp_tool_enabled = True
+ break
+ except (KeyError, TypeError):
+ continue
+
+ if not is_chosen_mcp_tool_enabled:
+ if io and hasattr(io, 'tool_warning'):
+ io.tool_warning(
+ f"Warning: Tool '{chosen_tool_name}' is specified by `tool_choice` but is not "
+ f"enabled for any server in the active MCP profile. "
+ f"The `tool_choice` may effectively be 'auto' or 'none' by the LLM."
+ )
+
+ return effective_mcp_tools_flat_list, current_tool_choice
+
+ def get_cached_tools_by_server(self) -> Optional[Dict[str, List[Dict[str, Any]]]]:
+ """
+ Returns the cached tools, structured as a dictionary mapping server name to its tools.
+ """
+ return self.cached_tools_by_server
+
+ def shutdown(self):
+ """
+ Shuts down all MCP client connections.
+ This method is synchronous but calls an asynchronous helper.
+ """
+ logging.info("Shutting down MCPClientPool...")
+ try:
+ asyncio.run(self._shutdown_async())
+ except RuntimeError as e:
+ if "cannot be called from a running event loop" in str(e):
+ # If we're already in an event loop, schedule _shutdown_async
+ # This might happen if shutdown is called from an async context
+ loop = asyncio.get_event_loop()
+ if loop.is_running():
+ loop.create_task(self._shutdown_async())
+ logging.info("Scheduled MCPClientPool shutdown in existing event loop.")
+ else: # Should not happen if RuntimeError is for running loop
+ logging.error(f"MCPClientPool shutdown error: {e}")
+ else:
+ logging.error(f"MCPClientPool shutdown error: {e}")
+ except Exception as e: # Catch any other unexpected errors
+ logging.error(f"Unexpected error during MCPClientPool shutdown: {e}")
+
+
+ async def _shutdown_async(self):
+ """
+ Asynchronously disconnects all McpServer instances.
+ """
+ logging.info("Starting asynchronous shutdown of MCP clients.")
+ shutdown_tasks = []
+ for server_name, client in self.clients.items():
+ logging.info(f"Initiating shutdown for MCP server: {server_name}")
+ shutdown_tasks.append(client.disconnect())
+
+ results = await asyncio.gather(*shutdown_tasks, return_exceptions=True)
+
+ for server_name, result in zip(self.clients.keys(), results):
+ if isinstance(result, Exception):
+ logging.error(f"Error disconnecting from MCP server {server_name}: {result}")
+ if self.io and hasattr(self.io, 'tool_warning'):
+ self.io.tool_warning(f"Error during disconnect from MCP server '{server_name}': {result}")
+ else:
+ logging.info(f"Successfully disconnected from MCP server: {server_name}")
+
+ self.clients.clear()
+ logging.info("All MCP clients have been processed for shutdown.")
+
+ # TODO: Add methods for coder to interact with tools, e.g.:
+ # async def execute_tool(self, tool_name: str, arguments: Dict[str, Any]) -> Any:
+ # pass
+ # This would require routing the call to the correct McpServer instance.
diff --git a/aider/mcp/mcp_profile_manager.py b/aider/mcp/mcp_profile_manager.py
new file mode 100644
index 00000000000..a5a3cd5de6e
--- /dev/null
+++ b/aider/mcp/mcp_profile_manager.py
@@ -0,0 +1,641 @@
+import asyncio
+from pathlib import Path
+from typing import List, Dict, Optional, Tuple, Any
+from dataclasses import dataclass, asdict
+import yaml
+import json # Added
+from prompt_toolkit.shortcuts import checkboxlist_dialog # Added
+from aider.mcp import load_mcp_servers # Added
+from aider.mcp.mcp_client_pool import MCPClientPool
+from aider.coders.base_prompts import CoderPrompts
+
+
+MCP_PROFILES_YAML_PATH = Path.home() / ".aider.mcp.profiles.yml"
+
+# Constants for YAML keys
+PROFILE_NAME_KEY = "name"
+PROFILE_SERVERS_KEY = "servers"
+SERVER_NAME_KEY = "name"
+SERVER_NO_CONFIRM_KEY = "no_confirm"
+PROFILE_SERVER_ENABLED_TOOLS_KEY = "enabled_tools" # New key for server entry in profile
+# Old key, for removal during load if present
+OLD_PROFILE_SERVER_NAMES_KEY = "server_names"
+# New top-level keys for the YAML file structure
+YAML_PROFILES_KEY = "profiles"
+YAML_DEFAULT_PROFILE_KEY = "default_profile_name"
+
+
+@dataclass
+class MCPProfile:
+ name: str
+ servers: List[Dict[str, Any]] # Each dict: {'name': str, 'no_confirm': bool, 'enabled_tools': Optional[List[str]]}
+
+def load_mcp_profiles() -> Tuple[Dict[str, MCPProfile], Optional[str]]:
+ profiles: Dict[str, MCPProfile] = {}
+ persisted_active_profile_name: Optional[str] = None
+ try:
+ with open(MCP_PROFILES_YAML_PATH, 'r') as f:
+ raw_data = yaml.safe_load(f)
+
+ profiles_list_from_yaml = []
+
+ if isinstance(raw_data, dict):
+ persisted_active_profile_name = raw_data.get(YAML_DEFAULT_PROFILE_KEY)
+ profiles_list_from_yaml = raw_data.get(YAML_PROFILES_KEY, [])
+ if not isinstance(profiles_list_from_yaml, list):
+ print(f"Warning: '{YAML_PROFILES_KEY}' key in MCP profiles YAML at {MCP_PROFILES_YAML_PATH} is not a list. Profiles section will be ignored.")
+ profiles_list_from_yaml = []
+ elif isinstance(raw_data, list): # Old format compatibility
+ profiles_list_from_yaml = raw_data
+ # persisted_active_profile_name remains None
+ elif raw_data is None: # File is empty
+ # profiles_list_from_yaml remains [], persisted_active_profile_name remains None
+ pass
+ else: # File content is not a list or dict as expected
+ print(f"Warning: MCP profiles YAML content at {MCP_PROFILES_YAML_PATH} is not a list or dictionary. File will be treated as empty.")
+ # profiles_list_from_yaml remains [], persisted_active_profile_name remains None
+
+ # Process the extracted list of profiles
+ for profile_dict in profiles_list_from_yaml:
+ if not isinstance(profile_dict, dict):
+ print(f"Warning: Skipping non-dictionary profile entry: {profile_dict}")
+ continue
+ try:
+ # Ensure servers is a list of dicts with name and no_confirm.
+ loaded_servers_data = profile_dict.get(PROFILE_SERVERS_KEY)
+ parsed_servers_list = []
+ profile_name_for_error = profile_dict.get(PROFILE_NAME_KEY, 'UnknownProfile')
+
+ if isinstance(loaded_servers_data, list):
+ for server_entry in loaded_servers_data:
+ current_server_dict = {}
+ if isinstance(server_entry, str): # server_entry is just a name
+ current_server_dict = {
+ SERVER_NAME_KEY: server_entry,
+ SERVER_NO_CONFIRM_KEY: False,
+ PROFILE_SERVER_ENABLED_TOOLS_KEY: None # Default for string-only entry
+ }
+ elif isinstance(server_entry, dict):
+ s_name = server_entry.get(SERVER_NAME_KEY)
+ if not isinstance(s_name, str) or not s_name:
+ print(f"Warning: Server entry in profile '{profile_name_for_error}' is missing a valid '{SERVER_NAME_KEY}': {server_entry}. Skipping this server.")
+ continue
+ s_no_confirm = server_entry.get(SERVER_NO_CONFIRM_KEY, False)
+ if not isinstance(s_no_confirm, bool):
+ print(f"Warning: '{SERVER_NO_CONFIRM_KEY}' for server '{s_name}' in profile '{profile_name_for_error}' is not a boolean (type: {type(s_no_confirm)}). Defaulting to False.")
+ s_no_confirm = False
+
+ current_server_dict = {
+ SERVER_NAME_KEY: s_name,
+ SERVER_NO_CONFIRM_KEY: s_no_confirm
+ }
+
+ enabled_tools_val = server_entry.get(PROFILE_SERVER_ENABLED_TOOLS_KEY)
+ if enabled_tools_val is not None: # Key is present
+ if isinstance(enabled_tools_val, list) and all(isinstance(tool_name, str) for tool_name in enabled_tools_val):
+ current_server_dict[PROFILE_SERVER_ENABLED_TOOLS_KEY] = enabled_tools_val
+ else:
+ print(f"Warning: '{PROFILE_SERVER_ENABLED_TOOLS_KEY}' for server '{s_name}' in profile '{profile_name_for_error}' is invalid (must be a list of strings). Defaulting to all tools enabled for this server in this profile.")
+ current_server_dict[PROFILE_SERVER_ENABLED_TOOLS_KEY] = None
+ else: # Key is not present
+ current_server_dict[PROFILE_SERVER_ENABLED_TOOLS_KEY] = None
+ else:
+ print(f"Warning: Invalid server entry type in profile '{profile_name_for_error}': {server_entry} (type: {type(server_entry)}). Skipping this server.")
+ continue
+ parsed_servers_list.append(current_server_dict)
+ elif loaded_servers_data is not None: # It was present but not a list
+ print(
+ f"Warning: '{PROFILE_SERVERS_KEY}' for profile '{profile_name_for_error}' is not a list"
+ f" (type: {type(loaded_servers_data)}). Initializing as empty list."
+ )
+ # parsed_servers_list remains empty
+
+ profile_dict[PROFILE_SERVERS_KEY] = parsed_servers_list # Store the parsed list
+ if OLD_PROFILE_SERVER_NAMES_KEY in profile_dict: # Remove old key if present for clean object creation
+ del profile_dict[OLD_PROFILE_SERVER_NAMES_KEY]
+
+ # Ensure 'name' key is present for MCPProfile dataclass
+ if PROFILE_NAME_KEY not in profile_dict:
+ print(f"Warning: Profile entry is missing '{PROFILE_NAME_KEY}': {profile_dict}. Skipping this profile.")
+ continue
+
+
+ # Rename 'servers' to 'servers' for MCPProfile dataclass if needed (already done by key const)
+ # profile_dict_for_dataclass = {
+ # 'name': profile_dict[PROFILE_NAME_KEY],
+ # 'servers': profile_dict[PROFILE_SERVERS_KEY]
+ # }
+
+
+ profile = MCPProfile(**profile_dict)
+ profiles[profile.name] = profile
+ except TypeError as e: # Catches issues like missing 'name' or other fields for MCPProfile
+ profile_name_display = profile_dict.get(PROFILE_NAME_KEY, str(profile_dict))
+ print(f"Warning: Skipping malformed profile entry for '{profile_name_display}': {e}")
+ return profiles, persisted_active_profile_name
+ except FileNotFoundError:
+ return {}, None
+ except yaml.YAMLError as e:
+ # Potentially log this, e.g., using io if available
+ print(f"Warning: Error parsing MCP profiles YAML: {e}")
+ return {}, None
+
+def save_mcp_profiles(profiles: Dict[str, MCPProfile], active_profile_name: Optional[str]):
+ try:
+ profiles_list_to_save = []
+ for profile_obj in profiles.values():
+ profile_dict = asdict(profile_obj) # Converts MCPProfile to dict
+
+ # Ensure 'servers' key exists and is a list before iterating
+ if PROFILE_SERVERS_KEY in profile_dict and isinstance(profile_dict[PROFILE_SERVERS_KEY], list):
+ processed_servers = []
+ for server_conf_dict in profile_dict[PROFILE_SERVERS_KEY]:
+ # server_conf_dict is a new dict created by asdict
+ if PROFILE_SERVER_ENABLED_TOOLS_KEY in server_conf_dict and \
+ server_conf_dict[PROFILE_SERVER_ENABLED_TOOLS_KEY] is None:
+ del server_conf_dict[PROFILE_SERVER_ENABLED_TOOLS_KEY]
+ processed_servers.append(server_conf_dict)
+ profile_dict[PROFILE_SERVERS_KEY] = processed_servers
+ profiles_list_to_save.append(profile_dict)
+
+ data_to_save = {YAML_PROFILES_KEY: profiles_list_to_save}
+ if active_profile_name is not None:
+ data_to_save[YAML_DEFAULT_PROFILE_KEY] = active_profile_name
+
+ with open(MCP_PROFILES_YAML_PATH, 'w') as f:
+ yaml.dump(data_to_save, f, sort_keys=False)
+ except (IOError, yaml.YAMLError) as e:
+ # Potentially log this, e.g., using io if available
+ print(f"Warning: Error saving MCP profiles YAML: {e}")
+
+def get_known_mcp_server_names(
+ mcpservers_arg: Optional[Any], # This is args.mcpservers
+ mcpservers_file_arg: Optional[str], # This is args.mcpservers_file
+ io: Optional[Any]
+) -> List[str]:
+ loaded_mcp_objects = []
+
+ # 1. Process mcpservers_arg
+ if mcpservers_arg:
+ temp_servers_from_arg = []
+ if isinstance(mcpservers_arg, dict):
+ try:
+ # Convert dict to JSON string to pass to load_mcp_servers
+ mcp_json_string = json.dumps(mcpservers_arg)
+ # Call load_mcp_servers with the JSON string
+ temp_servers_from_arg = load_mcp_servers(mcp_servers=mcp_json_string, mcp_servers_file=None, io=io, verbose=False)
+ except TypeError as e: # json.dumps might fail
+ if io and hasattr(io, 'tool_warning'):
+ io.tool_warning(f"Could not serialize MCP servers dict to JSON: {e}")
+ elif isinstance(mcpservers_arg, str):
+ # load_mcp_servers will first try mcp_servers as a JSON string.
+ # If that doesn't yield results, we then try mcpservers_arg as a file path.
+ servers_from_json_str = load_mcp_servers(mcp_servers=mcpservers_arg, mcp_servers_file=None, io=io, verbose=False)
+ if servers_from_json_str:
+ temp_servers_from_arg = servers_from_json_str
+ else:
+ # If not a valid JSON string or it was empty, try as a file path
+ temp_servers_from_arg = load_mcp_servers(mcp_servers=None, mcp_servers_file=mcpservers_arg, io=io, verbose=False)
+ elif isinstance(mcpservers_arg, list):
+ if io and hasattr(io, 'tool_warning'):
+ io.tool_warning(
+ f"MCP servers config from direct config was a list, skipping. Expected a dictionary or JSON string."
+ )
+ # Other types for mcpservers_arg will be implicitly ignored or handled by load_mcp_servers
+
+ if temp_servers_from_arg:
+ loaded_mcp_objects.extend(temp_servers_from_arg)
+
+ # 2. Process mcpservers_file_arg
+ if mcpservers_file_arg:
+ # If mcpservers_arg was a path and is the same as mcpservers_file_arg,
+ # load_mcp_servers might process it again.
+ # The set operation for names at the end handles duplicates.
+ servers_from_file_arg = load_mcp_servers(mcp_servers=None, mcp_servers_file=mcpservers_file_arg, io=io, verbose=False)
+ if servers_from_file_arg:
+ loaded_mcp_objects.extend(servers_from_file_arg)
+
+ # Extract unique names from the McpServer objects
+ server_names_set = set()
+ for server_obj in loaded_mcp_objects:
+ if hasattr(server_obj, 'name') and isinstance(server_obj.name, str) and server_obj.name:
+ server_names_set.add(server_obj.name)
+
+ return sorted(list(server_names_set))
+
+class MCPProfileManager:
+ def __init__(self, io, settings):
+ self.io = io
+ self.settings = settings
+ self.profiles: Dict[str, MCPProfile] = {}
+ self.active_profile_name: Optional[str] = None
+ self.persisted_active_profile_name: Optional[str] = None # For storing loaded active profile name
+ self.active_mcp_client_pool: Optional[MCPClientPool] = None
+ self.settings = settings
+ self.profiles: Dict[str, MCPProfile] = {}
+ self.active_profile_name: Optional[str] = None
+ self.persisted_active_profile_name: Optional[str] = None # For storing loaded active profile name
+ self.active_mcp_client_pool: Optional[MCPClientPool] = None
+
+ def load_or_initialize_profiles(self):
+ self.profiles, self.persisted_active_profile_name = load_mcp_profiles()
+
+ mcpservers_arg_val = getattr(self.settings, 'mcp_servers', None)
+ mcpservers_file_arg_val = getattr(self.settings, 'mcp_servers_file', None)
+
+ current_known_server_names = get_known_mcp_server_names(
+ mcpservers_arg_val,
+ mcpservers_file_arg_val,
+ self.io
+ )
+
+ needs_save = False
+ old_all_profile_data = self.profiles.get("all") # This is an MCPProfile object or None
+
+ # Construct the new 'all' profile's server list, preserving no_confirm where possible
+ new_all_profile_servers_list = []
+ for s_name in current_known_server_names:
+ no_confirm_val = False # Default
+ if old_all_profile_data:
+ # Check if this server was in the old 'all' profile
+ existing_server_config = next(
+ (s_conf for s_conf in old_all_profile_data.servers if s_conf.get(SERVER_NAME_KEY) == s_name),
+ None
+ )
+ if existing_server_config:
+ no_confirm_val = existing_server_config.get(SERVER_NO_CONFIRM_KEY, False)
+ # Preserve enabled_tools if it was set for this server in the old 'all' profile
+ # Defaults to None (all tools enabled) if not previously set or server is new
+ enabled_tools_val = existing_server_config.get(PROFILE_SERVER_ENABLED_TOOLS_KEY, None)
+ else:
+ # Server is new to the 'all' profile, or 'all' profile itself is new
+ enabled_tools_val = None
+ else:
+ # old_all_profile_data does not exist (first run or empty profiles file)
+ enabled_tools_val = None
+
+ new_all_profile_servers_list.append({
+ SERVER_NAME_KEY: s_name,
+ SERVER_NO_CONFIRM_KEY: no_confirm_val,
+ PROFILE_SERVER_ENABLED_TOOLS_KEY: enabled_tools_val
+ })
+
+ # Sort for consistent comparison later
+ new_all_profile_servers_list_sorted = sorted(new_all_profile_servers_list, key=lambda x: x[SERVER_NAME_KEY])
+
+ # Create/Update the 'all' profile in the in-memory dictionary
+ self.profiles["all"] = MCPProfile(name="all", servers=new_all_profile_servers_list_sorted)
+
+ if old_all_profile_data is None:
+ # The 'all' profile was not in the loaded file, so we created it.
+ needs_save = True
+ if self.io and hasattr(self.io, 'tool_output'):
+ self.io.tool_output("Initialized default MCP profile 'all'.")
+ else:
+ # The 'all' profile was in the loaded file. Check if it changed.
+ # Sort old server data for consistent comparison
+ old_all_profile_servers_list_sorted = sorted(old_all_profile_data.servers, key=lambda x: x[SERVER_NAME_KEY])
+ if old_all_profile_servers_list_sorted != new_all_profile_servers_list_sorted:
+ needs_save = True
+ if self.io and hasattr(self.io, 'tool_output'):
+ self.io.tool_output("Updated MCP profile 'all' with current server configurations (names or no_confirm values changed).")
+
+ if needs_save:
+ # self.active_profile_name is None at this point.
+ # We pass self.persisted_active_profile_name to preserve the loaded default.
+ save_mcp_profiles(self.profiles, self.persisted_active_profile_name)
+
+ def get_profile(self, name: str) -> Optional[MCPProfile]:
+ return self.profiles.get(name)
+
+ def list_profile_names(self) -> List[str]:
+ return list(self.profiles.keys())
+
+ def list_profiles_details(self) -> List[Tuple[str, List[Dict[str, Any]]]]:
+ return [(profile.name, profile.servers) for profile in self.profiles.values()]
+
+ def get_known_server_names(self) -> List[str]:
+ """Helper to get current known server names based on settings."""
+ mcpservers_arg_val = getattr(self.settings, 'mcp_servers', None)
+ mcpservers_file_arg_val = getattr(self.settings, 'mcp_servers_file', None)
+ return get_known_mcp_server_names(
+ mcpservers_arg_val,
+ mcpservers_file_arg_val,
+ self.io
+ )
+
+ def create_new_profile(self, profile_name: str, selected_servers_config: List[Dict[str, Any]]):
+ if profile_name in self.profiles:
+ self.io.tool_error(f"Profile '{profile_name}' already exists. Cannot create.")
+ return
+
+ # selected_servers_config is already in the format: [{'name': str, 'no_confirm': bool}, ...]
+ # So, we can use it directly.
+
+ new_profile = MCPProfile(name=profile_name, servers=selected_servers_config)
+ self.profiles[profile_name] = new_profile
+ save_mcp_profiles(self.profiles, self.persisted_active_profile_name)
+ self.io.tool_output(f"MCP Profile '{profile_name}' created and saved.")
+
+ def configure_server_tools(self, target_server_name: str):
+ if not self.active_profile_name:
+ self.io.tool_error("No MCP profile is currently active. Enable a profile first with `/mcp enable `.")
+ return
+
+ active_profile = self.get_profile(self.active_profile_name)
+ if not active_profile:
+ # This case should ideally not happen if active_profile_name is set
+ self.io.tool_error(f"Internal error: Active profile '{self.active_profile_name}' data not found.")
+ return
+
+ server_config_in_profile = None
+ server_config_index = -1
+ for i, s_conf in enumerate(active_profile.servers):
+ if s_conf.get(SERVER_NAME_KEY) == target_server_name:
+ server_config_in_profile = s_conf
+ server_config_index = i
+ break
+
+ if not server_config_in_profile:
+ self.io.tool_error(f"Server '{target_server_name}' not found in active profile '{self.active_profile_name}'.")
+ return
+
+ if not self.active_mcp_client_pool:
+ self.io.tool_error(f"MCP client pool not initialized for active profile '{self.active_profile_name}'. Cannot fetch tool list.")
+ return
+
+ pool = self.active_mcp_client_pool
+ # The existence of active_mcp_client_pool is already checked earlier in this function.
+
+ if not hasattr(pool, 'cached_tools_by_server') or pool.cached_tools_by_server is None:
+ self.io.tool_error(
+ f"Tool information cache is not available for active profile '{self.active_profile_name}'. "
+ "Tools might not have been fetched successfully when the profile was enabled."
+ )
+ return
+
+ # Check if the target server was even part of the successfully initialized clients in the pool.
+ # pool.clients is Dict[str, McpServer], mapping server name to McpServer instance.
+ if not hasattr(pool, 'clients') or target_server_name not in pool.clients:
+ self.io.tool_error(
+ f"Server '{target_server_name}' was not successfully initialized or found in the active MCP client pool. "
+ "Check server configuration or connection issues during profile enabling."
+ )
+ return
+
+ # Get tool definitions from the pool's cache for the specific server.
+ # This will be None if the server isn't in the cache (e.g., failed tool fetch, or reported no tools).
+ # It will be a list (possibly empty) if tools were fetched.
+ server_tool_definitions = pool.cached_tools_by_server.get(target_server_name)
+
+ all_tool_names = []
+ # Process server_tool_definitions if it's a non-empty list.
+ # The existing parsing logic for tool_def to populate all_tool_names follows.
+ if isinstance(server_tool_definitions, list) and server_tool_definitions:
+ for tool_def in server_tool_definitions:
+ if isinstance(tool_def, dict) and \
+ tool_def.get('type') == 'function' and \
+ isinstance(tool_def.get('function'), dict) and \
+ isinstance(tool_def['function'].get('name'), str):
+ all_tool_names.append(tool_def['function']['name'])
+
+ if not all_tool_names:
+ self.io.tool_output(f"No tools reported by server '{target_server_name}'. Nothing to configure.")
+ # Ensure enabled_tools is empty or None if no tools are available
+ if server_config_in_profile.get(PROFILE_SERVER_ENABLED_TOOLS_KEY) is not None:
+ server_config_in_profile[PROFILE_SERVER_ENABLED_TOOLS_KEY] = None # Or [] if explicit empty is preferred
+ save_mcp_profiles(self.profiles, self.persisted_active_profile_name)
+ self.io.tool_output(f"Cleared enabled tools for server '{target_server_name}' as it reports no available tools.")
+ return
+
+ all_tool_names.sort()
+
+ current_enabled_tools_in_profile = server_config_in_profile.get(PROFILE_SERVER_ENABLED_TOOLS_KEY)
+
+ # Determine which tools should be pre-selected in the dialog:
+ # - If current_enabled_tools_in_profile is None, all available tools are pre-selected.
+ # - Otherwise, only tools present in both all_tool_names (from server)
+ # and current_enabled_tools_in_profile (from saved profile) are pre-selected.
+ pre_selected_tool_names = []
+ if current_enabled_tools_in_profile is None: # None means all tools are implicitly enabled
+ pre_selected_tool_names = list(all_tool_names)
+ else: # It's a list of explicitly enabled tools
+ pre_selected_tool_names = [
+ tool_name for tool_name in all_tool_names if tool_name in current_enabled_tools_in_profile
+ ]
+
+ choices = [(name, name) for name in all_tool_names]
+
+ # checkboxlist_dialog expects default_values to be a list of the *values* of the selected choices
+ selected_tools_from_dialog = checkboxlist_dialog(
+ title=f"Configure Tools for Server: {target_server_name}",
+ text=f"Select tools to enable for server '{target_server_name}' in profile '{self.active_profile_name}':\n(Space to toggle, Enter to confirm)",
+ values=choices,
+ default_values=pre_selected_tool_names # Pass the names that should be pre-checked
+ ).run()
+
+ if selected_tools_from_dialog is None: # User cancelled the dialog
+ self.io.tool_output("Tool configuration cancelled.")
+ return
+
+ # Update the profile
+ # If all tools are selected, store None to signify "all enabled by default"
+ # Otherwise, store the explicit list.
+ if set(selected_tools_from_dialog) == set(all_tool_names):
+ new_enabled_tools_for_profile = None
+ else:
+ new_enabled_tools_for_profile = selected_tools_from_dialog
+
+ active_profile.servers[server_config_index][PROFILE_SERVER_ENABLED_TOOLS_KEY] = new_enabled_tools_for_profile
+
+ save_mcp_profiles(self.profiles, self.persisted_active_profile_name)
+ self.io.tool_output(f"Tool configuration updated for server '{target_server_name}' in profile '{self.active_profile_name}'.")
+
+
+ def delete_profile(self, profile_name: str):
+ if profile_name == "all":
+ self.io.tool_error("The 'all' profile cannot be deleted.")
+ return
+
+ if profile_name not in self.profiles:
+ self.io.tool_error(f"Profile '{profile_name}' not found.")
+ return
+
+ if self.active_profile_name == profile_name:
+ # This will set self.active_profile_name = None in memory.
+ # The save call within disable_profile will use self.persisted_active_profile_name.
+ self.disable_profile()
+
+ profile_was_persisted_default = (self.persisted_active_profile_name == profile_name)
+
+ del self.profiles[profile_name] # Delete from in-memory dict
+
+ if profile_was_persisted_default:
+ self.persisted_active_profile_name = None # Update in-memory persisted name
+ self.io.tool_output(f"MCP profile '{profile_name}' was the persisted default and has been cleared as default.")
+
+ # Save the updated profiles list and the current (possibly updated) persisted_active_profile_name
+ save_mcp_profiles(self.profiles, self.persisted_active_profile_name)
+ self.io.tool_output(f"MCP profile '{profile_name}' deleted.")
+
+ def is_server_no_confirm(self, server_name_to_check: str) -> bool:
+ """
+ Checks if a specific server in the currently active profile is set to 'no_confirm'.
+ Defaults to False (meaning, confirmation is required) if the profile is not active,
+ the server is not found in the profile, or 'no_confirm' is not set/False.
+ """
+ if not self.active_profile_name:
+ return False
+
+ active_profile = self.get_profile(self.active_profile_name)
+ if not active_profile:
+ return False
+
+ for server_config in active_profile.servers:
+ if server_config.get(SERVER_NAME_KEY) == server_name_to_check:
+ return server_config.get(SERVER_NO_CONFIRM_KEY, False)
+
+ return False # Server not found in profile or no_confirm not set
+
+ def get_enabled_tools_for_server(self, server_name_to_check: str) -> Optional[List[str]]:
+ """
+ Returns the list of enabled tool names for a specific server in the
+ currently active profile. Returns None if all tools are implicitly enabled
+ (i.e., 'enabled_tools' was not specified or was null for this server in the profile).
+ Returns an empty list if the server is found but explicitly has no tools enabled (e.g. enabled_tools: []).
+ Returns None if the profile is not active or the server is not found in the active profile.
+ """
+ if not self.active_profile_name:
+ return None
+
+ active_profile = self.get_profile(self.active_profile_name)
+ if not active_profile:
+ return None
+
+ for server_config in active_profile.servers:
+ if server_config.get(SERVER_NAME_KEY) == server_name_to_check:
+ # .get will return None if PROFILE_SERVER_ENABLED_TOOLS_KEY is missing
+ return server_config.get(PROFILE_SERVER_ENABLED_TOOLS_KEY)
+
+ return None # Server not found in profile
+
+ def get_tool_prompt_if_active(self) -> Optional[str]:
+ """
+ Returns the standard tool prompt string if an MCP profile is active,
+ otherwise None.
+ """
+ if self.active_profile_name:
+ return CoderPrompts.tool_prompt
+ return None
+
+ def persist_active_profile(self):
+ """Saves the currently active MCP profile name to the configuration file."""
+ if self.active_profile_name:
+ save_mcp_profiles(self.profiles, self.active_profile_name)
+ self.persisted_active_profile_name = self.active_profile_name
+ self.io.tool_output(f"MCP profile '{self.active_profile_name}' persisted as default.")
+ else:
+ self.io.tool_output("No active MCP profile to persist.")
+
+ def clear_persisted_default_profile(self):
+ """Clears the persisted default MCP profile name from the configuration file."""
+ if self.persisted_active_profile_name is None:
+ self.io.tool_output("No default MCP profile is currently persisted.")
+ return
+
+ cleared_profile_name = self.persisted_active_profile_name
+ self.persisted_active_profile_name = None
+ save_mcp_profiles(self.profiles, None) # Pass None to remove the key
+ self.io.tool_output(f"Cleared persisted default MCP profile '{cleared_profile_name}'.")
+
+
+ def enable_profile(self, profile_name: str, main_model, main_edit_format):
+ if self.active_profile_name == profile_name and self.active_mcp_client_pool:
+ self.io.tool_output(f"MCP profile '{profile_name}' is already active.")
+ return
+
+ if self.active_profile_name is not None:
+ self.disable_profile()
+
+ profile_to_enable = self.get_profile(profile_name)
+
+ if not profile_to_enable:
+ self.io.tool_error(f"MCP profile '{profile_name}' not found.")
+ return
+
+ server_configs_in_profile = profile_to_enable.servers
+ if not server_configs_in_profile:
+ self.io.tool_warning(f"MCP profile '{profile_name}' has no servers configured. Profile enabled but no connections made.")
+ self.active_profile_name = profile_name
+ self.active_mcp_client_pool = None # Ensure it's cleared
+ return
+
+ # Retrieve server configurations using the centralized load_mcp_servers function
+ mcpservers_arg_val = getattr(self.settings, 'mcp_servers', None)
+ mcpservers_file_arg_val = getattr(self.settings, 'mcp_servers_file', None)
+ verbose_setting = getattr(self.settings, 'verbose', False)
+
+ all_mcp_server_objects = load_mcp_servers(
+ mcp_servers=mcpservers_arg_val,
+ mcp_servers_file=mcpservers_file_arg_val,
+ io=self.io,
+ verbose=verbose_setting
+ )
+
+ # Filter these McpServer objects by name and get their config dictionaries
+ profile_server_names = [s_conf[SERVER_NAME_KEY] for s_conf in server_configs_in_profile]
+ matched_server_configs = [
+ s_obj.config for s_obj in all_mcp_server_objects
+ if s_obj.name in profile_server_names
+ ]
+
+ if not matched_server_configs:
+ self.io.tool_error(f"No configured MCP servers found for profile '{profile_name}'. Check server names and .aider.conf.yml.")
+ return
+
+ pool = MCPClientPool(self.io, main_model, main_edit_format, matched_server_configs)
+ try:
+ # Run the async method to fetch tools in a blocking way
+ asyncio.run(pool.fetch_and_cache_tools())
+ except RuntimeError as e:
+ if "cannot be called from a running event loop" in str(e):
+ loop = asyncio.get_event_loop()
+ if loop.is_running():
+ # This case should ideally be avoided by design.
+ # If enable_profile is called from an async context, it should be awaited.
+ # For now, log a warning and proceed without tools if this rare case occurs.
+ self.io.tool_warning("Warning: Could not fetch MCP tools due to running asyncio event loop during profile enabling.")
+ else:
+ self.io.tool_error(f"Error fetching MCP tools: {e}")
+ return # Stop if asyncio.run fails not due to existing loop
+ else:
+ self.io.tool_error(f"Error fetching MCP tools: {e}")
+ return # Stop on other runtime errors
+
+ self.active_mcp_client_pool = pool
+ self.active_profile_name = profile_name
+ # Enabling a profile does not change the persisted default profile.
+ # Only /mcp persist command does that.
+ # We still save the profiles list in case other changes (like 'all' profile update) occurred.
+ save_mcp_profiles(self.profiles, self.persisted_active_profile_name)
+ self.io.tool_output(f"MCP profile '{profile_name}' enabled with {len(matched_server_configs)} server(s).")
+
+ def disable_profile(self):
+ if self.active_mcp_client_pool:
+ # Assuming MCPClientPool has a shutdown method.
+ # If not, this will need to be added to MCPClientPool.
+ if hasattr(self.active_mcp_client_pool, 'shutdown') and callable(getattr(self.active_mcp_client_pool, 'shutdown')):
+ self.active_mcp_client_pool.shutdown()
+ self.active_mcp_client_pool = None
+
+ was_active = self.active_profile_name is not None
+ if was_active:
+ disabled_profile_name = self.active_profile_name
+ self.active_profile_name = None
+ # Disabling a profile does not change the persisted default profile.
+ # Only /mcp persist clear command does that.
+ # We still save the profiles list in case other changes occurred.
+ save_mcp_profiles(self.profiles, self.persisted_active_profile_name)
+ self.io.tool_output(f"MCP profile '{disabled_profile_name}' disabled.")
+ else:
+ self.io.tool_output("No MCP profile was active.")
diff --git a/aider/mcp/tool_filter.py b/aider/mcp/tool_filter.py
new file mode 100644
index 00000000000..eaa1ad53208
--- /dev/null
+++ b/aider/mcp/tool_filter.py
@@ -0,0 +1,38 @@
+from typing import List, Dict, Optional, Any
+
+def filter_tools_for_server(
+ original_tool_definitions: List[Dict[str, Any]],
+ enabled_tool_names: Optional[List[str]]
+) -> List[Dict[str, Any]]:
+ """
+ Filters a list of tool definitions based on a list of enabled tool names.
+
+ Args:
+ original_tool_definitions: The original list of tool definition dictionaries.
+ enabled_tool_names: An optional list of tool names that are enabled.
+ If None, all original_tool_definitions are considered enabled.
+
+ Returns:
+ A new list containing only the tool definitions that are enabled.
+ """
+ if enabled_tool_names is None:
+ # If enabled_tool_names is None, it means all tools are implicitly enabled for this server in the profile.
+ return list(original_tool_definitions) # Return a copy
+
+ filtered_tools = []
+ if original_tool_definitions: # Ensure there are tools to iterate over
+ for tool_def in original_tool_definitions:
+ try:
+ # Ensure tool_def is a dict and has the expected structure
+ if isinstance(tool_def, dict) and \
+ tool_def.get("type") == "function" and \
+ isinstance(tool_def.get("function"), dict) and \
+ isinstance(tool_def["function"].get("name"), str) and \
+ tool_def["function"]["name"] in enabled_tool_names:
+ filtered_tools.append(tool_def)
+ except (KeyError, TypeError):
+ # Handle cases where tool_def might not have the expected structure
+ # This might happen if tool definitions are malformed.
+ # Optionally, log a warning here if io is available.
+ continue
+ return filtered_tools
diff --git a/aider/models.py b/aider/models.py
index d0d65079a29..cf95adad1a3 100644
--- a/aider/models.py
+++ b/aider/models.py
@@ -879,22 +879,26 @@ def is_ollama(self):
def github_copilot_token_to_open_ai_key(self):
# check to see if there's an openai api key
# If so, check to see if it's expire
- openai_api_key = 'OPENAI_API_KEY'
+ openai_api_key = "OPENAI_API_KEY"
if openai_api_key not in os.environ or (
- int(dict(x.split("=") for x in os.environ[openai_api_key].split(";"))['exp']) < int(datetime.now().timestamp())
+ int(dict(x.split("=") for x in os.environ[openai_api_key].split(";"))["exp"])
+ < int(datetime.now().timestamp())
):
import requests
+
headers = {
- 'Authorization': f"Bearer {os.environ['GITHUB_COPILOT_TOKEN']}",
- 'Editor-Version': self.extra_params['extra_headers']['Editor-Version'],
- 'Copilot-Integration-Id': self.extra_params['extra_headers']['Copilot-Integration-Id'],
- 'Content-Type': 'application/json',
+ "Authorization": f"Bearer {os.environ['GITHUB_COPILOT_TOKEN']}",
+ "Editor-Version": self.extra_params["extra_headers"]["Editor-Version"],
+ "Copilot-Integration-Id": self.extra_params["extra_headers"][
+ "Copilot-Integration-Id"
+ ],
+ "Content-Type": "application/json",
}
res = requests.get("https://api.github.com/copilot_internal/v2/token", headers=headers)
- os.environ[openai_api_key] = res.json()['token']
+ os.environ[openai_api_key] = res.json()["token"]
- def send_completion(self, messages, functions, stream, temperature=None):
+ def send_completion(self, messages, functions, stream, temperature=None, tools=None):
if os.environ.get("AIDER_SANITY_CHECK_TURNS"):
sanity_check_messages(messages)
@@ -936,7 +940,7 @@ def send_completion(self, messages, functions, stream, temperature=None):
kwargs["messages"] = messages
# Are we using github copilot?
- if 'GITHUB_COPILOT_TOKEN' in os.environ:
+ if "GITHUB_COPILOT_TOKEN" in os.environ:
self.github_copilot_token_to_open_ai_key()
res = litellm.completion(**kwargs)
diff --git a/aider/resources/model-settings.yml b/aider/resources/model-settings.yml
index 9f3416df5e7..23cd2887517 100644
--- a/aider/resources/model-settings.yml
+++ b/aider/resources/model-settings.yml
@@ -1747,3 +1747,7 @@
editor_edit_format: editor-diff
accepts_settings: ["thinking_tokens"]
+- name: vertex_ai/gemini-2.5-flash-preview-05-20
+ edit_format: diff
+ use_repo_map: true
+ accepts_settings: ["reasoning_effort", "thinking_tokens"]
\ No newline at end of file
diff --git a/aider/website/_data/polyglot_leaderboard.yml b/aider/website/_data/polyglot_leaderboard.yml
index 13ef32e368a..51676fc729b 100644
--- a/aider/website/_data/polyglot_leaderboard.yml
+++ b/aider/website/_data/polyglot_leaderboard.yml
@@ -1419,4 +1419,62 @@
date: 2025-05-25
versions: 0.83.3.dev
seconds_per_case: 44.1
- total_cost: 65.7484
\ No newline at end of file
+ total_cost: 65.7484
+
+- dirname: 2025-05-26-15-56-31--flash25-05-20-24k-think # dirname is misleading
+ test_cases: 225
+ model: gemini-2.5-flash-preview-05-20 (no think)
+ edit_format: diff
+ commit_hash: 214b811-dirty
+ thinking_tokens: 0 # <-- no thinking
+ pass_rate_1: 20.9
+ pass_rate_2: 44.0
+ pass_num_1: 47
+ pass_num_2: 99
+ percent_cases_well_formed: 93.8
+ error_outputs: 16
+ num_malformed_responses: 16
+ num_with_malformed_responses: 14
+ user_asks: 79
+ lazy_comments: 0
+ syntax_errors: 0
+ indentation_errors: 0
+ exhausted_context_windows: 0
+ prompt_tokens: 5512458
+ completion_tokens: 514145
+ test_timeouts: 4
+ total_tests: 225
+ command: aider --model gemini/gemini-2.5-flash-preview-05-20
+ date: 2025-05-26
+ versions: 0.83.3.dev
+ seconds_per_case: 12.2
+ total_cost: 1.1354
+
+- dirname: 2025-05-25-22-58-44--flash25-05-20-24k-think
+ test_cases: 225
+ model: gemini-2.5-flash-preview-05-20 (24k think)
+ edit_format: diff
+ commit_hash: a8568c3-dirty
+ thinking_tokens: 24576
+ pass_rate_1: 26.2
+ pass_rate_2: 55.1
+ pass_num_1: 59
+ pass_num_2: 124
+ percent_cases_well_formed: 95.6
+ error_outputs: 15
+ num_malformed_responses: 15
+ num_with_malformed_responses: 10
+ user_asks: 101
+ lazy_comments: 0
+ syntax_errors: 0
+ indentation_errors: 0
+ exhausted_context_windows: 0
+ prompt_tokens: 3666792
+ completion_tokens: 2703162
+ test_timeouts: 4
+ total_tests: 225
+ command: aider --model gemini/gemini-2.5-flash-preview-05-20
+ date: 2025-05-25
+ versions: 0.83.3.dev
+ seconds_per_case: 53.9
+ total_cost: 8.5625
\ No newline at end of file
diff --git a/aider/website/assets/sample-analytics.jsonl b/aider/website/assets/sample-analytics.jsonl
index 376627272d9..c010e2f5368 100644
--- a/aider/website/assets/sample-analytics.jsonl
+++ b/aider/website/assets/sample-analytics.jsonl
@@ -1,271 +1,3 @@
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806583}
-{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806583}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806583}
-{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806583}
-{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806583}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806583}
-{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806583}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806583}
-{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806584}
-{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806584}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806584}
-{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806584}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806584}
-{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806584}
-{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806584}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806584}
-{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806584}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806584}
-{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806585}
-{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806585}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806585}
-{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806585}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806585}
-{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806585}
-{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806585}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806585}
-{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806585}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806585}
-{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806586}
-{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806586}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806586}
-{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806586}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806586}
-{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806586}
-{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806586}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806586}
-{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806586}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806586}
-{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806587}
-{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806587}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806587}
-{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806587}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806587}
-{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806587}
-{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806587}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806587}
-{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806587}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806588}
-{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806588}
-{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806588}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806588}
-{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806588}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806588}
-{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806588}
-{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806588}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806588}
-{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806588}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806589}
-{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806589}
-{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806589}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806589}
-{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806589}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806680}
-{"event": "repo", "properties": {"num_files": 624}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806681}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806681}
-{"event": "exit", "properties": {"reason": "Exit flag set"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806681}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806681}
-{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806681}
-{"event": "repo", "properties": {"num_files": 624}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806681}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806681}
-{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806681}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806797}
-{"event": "gui session", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806805}
-{"event": "exit", "properties": {"reason": "GUI session ended"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746806805}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746807045}
-{"event": "repo", "properties": {"num_files": 624}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746807045}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746807045}
-{"event": "cli session", "properties": {"main_model": "gemini/gemini-2.5-pro-exp-03-25", "weak_model": "gemini/gemini-2.5-flash-preview-04-17", "editor_model": "gemini/gemini-2.5-pro-exp-03-25", "edit_format": "udiff-simple"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746807045}
-{"event": "exit", "properties": {"reason": "Control-C"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746807048}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746807269}
-{"event": "repo", "properties": {"num_files": 624}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746807269}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746807269}
-{"event": "cli session", "properties": {"main_model": "gemini/gemini-2.5-pro-exp-03-25", "weak_model": "gemini/gemini-2.5-flash-preview-04-17", "editor_model": "gemini/gemini-2.5-pro-exp-03-25", "edit_format": "udiff-simple"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746807269}
-{"event": "command_run", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746807285}
-{"event": "command_run", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746807297}
-{"event": "command_add", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746807308}
-{"event": "command_add", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746807318}
-{"event": "command_read-only", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746807321}
-{"event": "command_run", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746807329}
-{"event": "command_ask", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746807363}
-{"event": "message_send_starting", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746807363}
-{"event": "command_ask", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746807398}
-{"event": "message_send_starting", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746807398}
-{"event": "message_send", "properties": {"main_model": "gemini/gemini-2.5-pro-exp-03-25", "weak_model": "gemini/gemini-2.5-flash-preview-04-17", "editor_model": "gemini/gemini-2.5-pro-exp-03-25", "edit_format": "ask", "prompt_tokens": 23026, "completion_tokens": 502, "total_tokens": 23528, "cost": 0, "total_cost": 0.0}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746807420}
-{"event": "message_send_starting", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746807470}
-{"event": "message_send", "properties": {"main_model": "gemini/gemini-2.5-pro-exp-03-25", "weak_model": "gemini/gemini-2.5-flash-preview-04-17", "editor_model": "gemini/gemini-2.5-pro-exp-03-25", "edit_format": "udiff-simple", "prompt_tokens": 23799, "completion_tokens": 166, "total_tokens": 23965, "cost": 0, "total_cost": 0.0}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746807477}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746809445}
-{"event": "repo", "properties": {"num_files": 624}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746809445}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746809445}
-{"event": "cli session", "properties": {"main_model": "gemini/gemini-2.5-pro-exp-03-25", "weak_model": "gemini/gemini-2.5-flash-preview-04-17", "editor_model": "gemini/gemini-2.5-pro-exp-03-25", "edit_format": "udiff-simple"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746809445}
-{"event": "exit", "properties": {"reason": "Completed main CLI coder.run"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746809475}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746809482}
-{"event": "repo", "properties": {"num_files": 624}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746809482}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746809482}
-{"event": "cli session", "properties": {"main_model": "gemini/gemini-2.5-pro-exp-03-25", "weak_model": "gemini/gemini-2.5-flash-preview-04-17", "editor_model": "gemini/gemini-2.5-pro-exp-03-25", "edit_format": "udiff-simple"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746809482}
-{"event": "message_send_starting", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746809497}
-{"event": "message_send", "properties": {"main_model": "gemini/gemini-2.5-pro-exp-03-25", "weak_model": "gemini/gemini-2.5-flash-preview-04-17", "editor_model": "gemini/gemini-2.5-pro-exp-03-25", "edit_format": "udiff-simple", "prompt_tokens": 8097, "completion_tokens": 165, "total_tokens": 8262, "cost": 0, "total_cost": 0.0}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746809502}
-{"event": "exit", "properties": {"reason": "Completed main CLI coder.run"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746809516}
-{"event": "exit", "properties": {"reason": "Completed main CLI coder.run"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746809724}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746809985}
-{"event": "repo", "properties": {"num_files": 624}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746809986}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746809986}
-{"event": "message_send_starting", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746809986}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746809996}
-{"event": "repo", "properties": {"num_files": 624}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746809997}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746809997}
-{"event": "message_send_starting", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746809997}
-{"event": "exit", "properties": {"reason": "Completed --message"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810015}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810207}
-{"event": "repo", "properties": {"num_files": 624}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810207}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810207}
-{"event": "message_send_starting", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810207}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810226}
-{"event": "repo", "properties": {"num_files": 624}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810226}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810226}
-{"event": "cli session", "properties": {"main_model": "gemini/gemini-2.5-pro-exp-03-25", "weak_model": "gemini/gemini-2.5-flash-preview-04-17", "editor_model": "gemini/gemini-2.5-pro-exp-03-25", "edit_format": "udiff-simple"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810227}
-{"event": "message_send_starting", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810240}
-{"event": "message_send", "properties": {"main_model": "gemini/gemini-2.5-pro-exp-03-25", "weak_model": "gemini/gemini-2.5-flash-preview-04-17", "editor_model": "gemini/gemini-2.5-pro-exp-03-25", "edit_format": "udiff-simple", "prompt_tokens": 21143, "completion_tokens": 203, "total_tokens": 21346, "cost": 0, "total_cost": 0.0}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810247}
-{"event": "command_exit", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810275}
-{"event": "exit", "properties": {"reason": "/exit"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810275}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810280}
-{"event": "repo", "properties": {"num_files": 624}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810281}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810281}
-{"event": "message_send_starting", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810281}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810324}
-{"event": "repo", "properties": {"num_files": 624}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810324}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810324}
-{"event": "cli session", "properties": {"main_model": "gemini/gemini-2.5-pro-exp-03-25", "weak_model": "gemini/gemini-2.5-flash-preview-04-17", "editor_model": "gemini/gemini-2.5-pro-exp-03-25", "edit_format": "udiff-simple"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810325}
-{"event": "message_send_starting", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810335}
-{"event": "message_send", "properties": {"main_model": "gemini/gemini-2.5-pro-exp-03-25", "weak_model": "gemini/gemini-2.5-flash-preview-04-17", "editor_model": "gemini/gemini-2.5-pro-exp-03-25", "edit_format": "udiff-simple", "prompt_tokens": 22156, "completion_tokens": 681, "total_tokens": 22837, "cost": 0, "total_cost": 0.0}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810354}
-{"event": "command_exit", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810368}
-{"event": "exit", "properties": {"reason": "/exit"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810368}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810372}
-{"event": "repo", "properties": {"num_files": 624}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810372}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810372}
-{"event": "message_send_starting", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810373}
-{"event": "exit", "properties": {"reason": "Completed --message"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810382}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810383}
-{"event": "repo", "properties": {"num_files": 624}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810384}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810384}
-{"event": "message_send_starting", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810384}
-{"event": "exit", "properties": {"reason": "Control-C"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810386}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810390}
-{"event": "repo", "properties": {"num_files": 624}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810391}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810391}
-{"event": "message_send_starting", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810391}
-{"event": "exit", "properties": {"reason": "Completed --message"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810396}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810436}
-{"event": "repo", "properties": {"num_files": 624}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810437}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810437}
-{"event": "exit", "properties": {"reason": "Completed lint/test/commit"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810441}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810453}
-{"event": "repo", "properties": {"num_files": 624}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810454}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810454}
-{"event": "message_send_starting", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810454}
-{"event": "exit", "properties": {"reason": "Completed --message"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810503}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810513}
-{"event": "repo", "properties": {"num_files": 624}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810513}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810513}
-{"event": "message_send_starting", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810513}
-{"event": "message_send", "properties": {"main_model": "gemini/gemini-2.5-pro-exp-03-25", "weak_model": "gemini/gemini-2.5-flash-preview-04-17", "editor_model": "gemini/gemini-2.5-pro-exp-03-25", "edit_format": "udiff-simple", "prompt_tokens": 8627, "completion_tokens": 44, "total_tokens": 8671, "cost": 0, "total_cost": 0.0}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810519}
-{"event": "exit", "properties": {"reason": "Completed --message"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810519}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810525}
-{"event": "repo", "properties": {"num_files": 624}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810525}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810525}
-{"event": "message_send_starting", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810525}
-{"event": "message_send", "properties": {"main_model": "gemini/gemini-2.5-pro-exp-03-25", "weak_model": "gemini/gemini-2.5-flash-preview-04-17", "editor_model": "gemini/gemini-2.5-pro-exp-03-25", "edit_format": "udiff-simple", "prompt_tokens": 8428, "completion_tokens": 48, "total_tokens": 8476, "cost": 0, "total_cost": 0.0}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810532}
-{"event": "exit", "properties": {"reason": "Completed --message"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810532}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810538}
-{"event": "repo", "properties": {"num_files": 624}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810539}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810539}
-{"event": "message_send_starting", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810539}
-{"event": "exit", "properties": {"reason": "Completed --message"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746810545}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811483}
-{"event": "repo", "properties": {"num_files": 624}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811483}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811483}
-{"event": "cli session", "properties": {"main_model": "gemini/gemini-2.5-pro-exp-03-25", "weak_model": "gemini/gemini-2.5-flash-preview-04-17", "editor_model": "gemini/gemini-2.5-pro-exp-03-25", "edit_format": "udiff-simple"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811483}
-{"event": "exit", "properties": {"reason": "Control-C"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811485}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811519}
-{"event": "repo", "properties": {"num_files": 614}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811520}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811520}
-{"event": "message_send_starting", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811520}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811526}
-{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811526}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811526}
-{"event": "message_send_starting", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811526}
-{"event": "exit", "properties": {"reason": "Completed --message"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811531}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811540}
-{"event": "repo", "properties": {"num_files": 614}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811540}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811540}
-{"event": "cli session", "properties": {"main_model": "gemini/gemini-2.5-pro-exp-03-25", "weak_model": "gemini/gemini-2.5-flash-preview-04-17", "editor_model": "gemini/gemini-2.5-pro-exp-03-25", "edit_format": "udiff-simple"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811540}
-{"event": "message_send_starting", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811542}
-{"event": "message_send", "properties": {"main_model": "gemini/gemini-2.5-pro-exp-03-25", "weak_model": "gemini/gemini-2.5-flash-preview-04-17", "editor_model": "gemini/gemini-2.5-pro-exp-03-25", "edit_format": "udiff-simple", "prompt_tokens": 8329, "completion_tokens": 48, "total_tokens": 8377, "cost": 0, "total_cost": 0.0}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811547}
-{"event": "exit", "properties": {"reason": "Control-C"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811552}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811566}
-{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811567}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811567}
-{"event": "cli session", "properties": {"main_model": "gemini/gemini-2.5-flash-preview-04-17", "weak_model": "gemini/gemini-2.5-flash-preview-04-17", "editor_model": "gemini/gemini-2.5-flash-preview-04-17", "edit_format": "diff"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811567}
-{"event": "exit", "properties": {"reason": "Completed main CLI coder.run"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811569}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811575}
-{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811575}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811575}
-{"event": "cli session", "properties": {"main_model": "gemini/gemini-2.5-flash-preview-04-17", "weak_model": "gemini/gemini-2.5-flash-preview-04-17", "editor_model": "gemini/gemini-2.5-flash-preview-04-17", "edit_format": "diff"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811575}
-{"event": "message_send_starting", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811595}
-{"event": "exit", "properties": {"reason": "Completed main CLI coder.run"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811608}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811613}
-{"event": "repo", "properties": {"num_files": 624}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811614}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811614}
-{"event": "cli session", "properties": {"main_model": "gemini/gemini-2.5-flash-preview-04-17", "weak_model": "gemini/gemini-2.5-flash-preview-04-17", "editor_model": "gemini/gemini-2.5-flash-preview-04-17", "edit_format": "diff"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811614}
-{"event": "exit", "properties": {"reason": "Control-C"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811615}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811618}
-{"event": "repo", "properties": {"num_files": 624}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811618}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811618}
-{"event": "cli session", "properties": {"main_model": "gemini/gemini-2.5-pro-exp-03-25", "weak_model": "gemini/gemini-2.5-flash-preview-04-17", "editor_model": "gemini/gemini-2.5-pro-exp-03-25", "edit_format": "udiff-simple"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811618}
-{"event": "message_send_starting", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811620}
-{"event": "command_exit", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811628}
-{"event": "exit", "properties": {"reason": "/exit"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811628}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811652}
-{"event": "repo", "properties": {"num_files": 624}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811653}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811653}
-{"event": "cli session", "properties": {"main_model": "gemini/gemini-2.5-pro-exp-03-25", "weak_model": "gemini/gemini-2.5-flash-preview-04-17", "editor_model": "gemini/gemini-2.5-pro-exp-03-25", "edit_format": "udiff-simple"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811653}
-{"event": "message_send_starting", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811654}
-{"event": "message_send_starting", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811658}
-{"event": "command_exit", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811662}
-{"event": "exit", "properties": {"reason": "/exit"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811662}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811703}
-{"event": "repo", "properties": {"num_files": 624}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811703}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811703}
-{"event": "cli session", "properties": {"main_model": "gemini/gemini-2.5-pro-exp-03-25", "weak_model": "gemini/gemini-2.5-flash-preview-04-17", "editor_model": "gemini/gemini-2.5-pro-exp-03-25", "edit_format": "udiff-simple"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811703}
-{"event": "exit", "properties": {"reason": "Completed main CLI coder.run"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746811705}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746830323}
-{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746830323}
-{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746830323}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746830323}
-{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746830323}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746830323}
-{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746830324}
-{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746830324}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746830324}
-{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746830324}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746830324}
-{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746830324}
-{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746830324}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746830324}
-{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746830324}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746830324}
-{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746830324}
-{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746830324}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746830324}
-{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746830324}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746830324}
-{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746830325}
-{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746830325}
-{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746830325}
-{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746830325}
-{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746830325}
-{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746830325}
-{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746830325}
{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746830325}
{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746830325}
{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1746830325}
@@ -998,3 +730,271 @@
{"event": "message_send_starting", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748035605}
{"event": "message_send", "properties": {"main_model": "gemini/gemini-2.5-pro-exp-03-25", "weak_model": "gemini/gemini-2.5-flash-preview-04-17", "editor_model": "gemini/gemini-2.5-pro-exp-03-25", "edit_format": "udiff-simple", "prompt_tokens": 5842, "completion_tokens": 408, "total_tokens": 6250, "cost": 0, "total_cost": 0.0}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748035674}
{"event": "exit", "properties": {"reason": "Completed --message"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748035674}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040556}
+{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040562}
+{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040562}
+{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040562}
+{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040562}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040562}
+{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040563}
+{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040563}
+{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040563}
+{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040563}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040563}
+{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040564}
+{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040564}
+{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040564}
+{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040564}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040564}
+{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040564}
+{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040564}
+{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040564}
+{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040564}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040564}
+{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040565}
+{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040565}
+{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040565}
+{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040565}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040565}
+{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040565}
+{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040566}
+{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040566}
+{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040566}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040566}
+{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040566}
+{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040566}
+{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040566}
+{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040566}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040566}
+{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040567}
+{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040567}
+{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040567}
+{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040567}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040567}
+{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040567}
+{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040567}
+{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040567}
+{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040567}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040567}
+{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040568}
+{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040568}
+{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040568}
+{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040568}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040568}
+{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040569}
+{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040569}
+{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040569}
+{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040569}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040569}
+{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040569}
+{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040569}
+{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040569}
+{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040569}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040569}
+{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040570}
+{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040570}
+{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040570}
+{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040570}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040570}
+{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040571}
+{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040571}
+{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040571}
+{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040571}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040571}
+{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040571}
+{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040571}
+{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040571}
+{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040571}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040571}
+{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040572}
+{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040572}
+{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040572}
+{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040572}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040642}
+{"event": "repo", "properties": {"num_files": 627}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040643}
+{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040643}
+{"event": "exit", "properties": {"reason": "Exit flag set"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040643}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040643}
+{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040644}
+{"event": "repo", "properties": {"num_files": 627}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040644}
+{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040644}
+{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040644}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040661}
+{"event": "gui session", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040667}
+{"event": "exit", "properties": {"reason": "GUI session ended"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040667}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040791}
+{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040796}
+{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040796}
+{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040796}
+{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040796}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040796}
+{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040797}
+{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040797}
+{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040797}
+{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040797}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040797}
+{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040798}
+{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040798}
+{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040798}
+{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040798}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040798}
+{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040798}
+{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040798}
+{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040799}
+{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040799}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040799}
+{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040799}
+{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040799}
+{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040799}
+{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040799}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040799}
+{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040800}
+{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040800}
+{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040800}
+{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040800}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040800}
+{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040801}
+{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040801}
+{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040801}
+{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040801}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040801}
+{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040802}
+{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040802}
+{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040802}
+{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040802}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040802}
+{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040802}
+{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040802}
+{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040802}
+{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040802}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040802}
+{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040803}
+{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040803}
+{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040803}
+{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040803}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040803}
+{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040804}
+{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040804}
+{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040804}
+{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040804}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040804}
+{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040805}
+{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040805}
+{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040805}
+{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040805}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040805}
+{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040805}
+{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040805}
+{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040805}
+{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040805}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040805}
+{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040806}
+{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040806}
+{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040806}
+{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040806}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040806}
+{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040807}
+{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040807}
+{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040807}
+{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040807}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040807}
+{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040808}
+{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040808}
+{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040808}
+{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040808}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040884}
+{"event": "repo", "properties": {"num_files": 627}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040885}
+{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040885}
+{"event": "exit", "properties": {"reason": "Exit flag set"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040885}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040885}
+{"event": "model warning", "properties": {"main_model": "None", "weak_model": "None", "editor_model": "None"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040886}
+{"event": "repo", "properties": {"num_files": 627}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040886}
+{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040886}
+{"event": "exit", "properties": {"reason": "Unknown edit format"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040886}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040976}
+{"event": "gui session", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040981}
+{"event": "exit", "properties": {"reason": "GUI session ended"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748040981}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748204071}
+{"event": "repo", "properties": {"num_files": 627}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748204071}
+{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748204071}
+{"event": "cli session", "properties": {"main_model": "gemini/gemini-2.5-pro-exp-03-25", "weak_model": "gemini/gemini-2.5-flash-preview-04-17", "editor_model": "gemini/gemini-2.5-pro-exp-03-25", "edit_format": "udiff-simple"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748204071}
+{"event": "command_add", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748204076}
+{"event": "message_send_starting", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748204081}
+{"event": "message_send", "properties": {"main_model": "gemini/gemini-2.5-pro-exp-03-25", "weak_model": "gemini/gemini-2.5-flash-preview-04-17", "editor_model": "gemini/gemini-2.5-pro-exp-03-25", "edit_format": "udiff-simple", "prompt_tokens": 9284, "completion_tokens": 168, "total_tokens": 9452, "cost": 0, "total_cost": 0.0}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748204086}
+{"event": "message_send_starting", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748204110}
+{"event": "message_send", "properties": {"main_model": "gemini/gemini-2.5-pro-exp-03-25", "weak_model": "gemini/gemini-2.5-flash-preview-04-17", "editor_model": "gemini/gemini-2.5-pro-exp-03-25", "edit_format": "udiff-simple", "prompt_tokens": 9576, "completion_tokens": 151, "total_tokens": 9727, "cost": 0, "total_cost": 0.0}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748204114}
+{"event": "command_exit", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748204122}
+{"event": "exit", "properties": {"reason": "/exit"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748204122}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748210276}
+{"event": "repo", "properties": {"num_files": 627}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748210277}
+{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748210277}
+{"event": "cli session", "properties": {"main_model": "gemini/gemini-2.5-pro-exp-03-25", "weak_model": "gemini/gemini-2.5-flash-preview-04-17", "editor_model": "gemini/gemini-2.5-pro-exp-03-25", "edit_format": "udiff-simple"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748210277}
+{"event": "command_model", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748210283}
+{"event": "command_think-tokens", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748210287}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748210340}
+{"event": "exit", "properties": {"reason": "Listed models"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748210341}
+{"event": "command_run", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748210356}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748210357}
+{"event": "exit", "properties": {"reason": "Listed models"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748210357}
+{"event": "command_ask", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748210397}
+{"event": "message_send_starting", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748210397}
+{"event": "message_send", "properties": {"main_model": "anthropic/claude-sonnet-4-20250514", "weak_model": "gemini/gemini-2.5-flash-preview-04-17", "editor_model": "gemini/gemini-2.5-pro-exp-03-25", "edit_format": "ask", "prompt_tokens": 20175, "completion_tokens": 1279, "total_tokens": 21454, "cost": 0.07971, "total_cost": 0.07971}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748210417}
+{"event": "message_send_starting", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748210470}
+{"event": "message_send", "properties": {"main_model": "anthropic/claude-sonnet-4-20250514", "weak_model": "gemini/gemini-2.5-flash-preview-04-17", "editor_model": "gemini/gemini-2.5-pro-exp-03-25", "edit_format": "diff", "prompt_tokens": 23225, "completion_tokens": 1770, "total_tokens": 24995, "cost": 0.096225, "total_cost": 0.175935}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748210501}
+{"event": "command_clear", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748210508}
+{"event": "command_run", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748210514}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748210514}
+{"event": "exit", "properties": {"reason": "Listed models"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748210515}
+{"event": "command_ask", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748210531}
+{"event": "message_send_starting", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748210531}
+{"event": "message_send", "properties": {"main_model": "anthropic/claude-sonnet-4-20250514", "weak_model": "gemini/gemini-2.5-flash-preview-04-17", "editor_model": "gemini/gemini-2.5-pro-exp-03-25", "edit_format": "ask", "prompt_tokens": 19937, "completion_tokens": 1031, "total_tokens": 20968, "cost": 0.07527600000000001, "total_cost": 0.251211}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748210550}
+{"event": "message_send_starting", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748210585}
+{"event": "message_send", "properties": {"main_model": "anthropic/claude-sonnet-4-20250514", "weak_model": "gemini/gemini-2.5-flash-preview-04-17", "editor_model": "gemini/gemini-2.5-pro-exp-03-25", "edit_format": "diff", "prompt_tokens": 22700, "completion_tokens": 2391, "total_tokens": 25091, "cost": 0.103965, "total_cost": 0.35517600000000005}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748210619}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748212228}
+{"event": "repo", "properties": {"num_files": 627}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748212228}
+{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748212228}
+{"event": "exit", "properties": {"reason": "Completed lint/test/commit"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748212230}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748212874}
+{"event": "repo", "properties": {"num_files": 627}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748212874}
+{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748212874}
+{"event": "cli session", "properties": {"main_model": "gemini/gemini-2.5-flash-preview-05-20", "weak_model": "gemini/gemini-2.5-flash-preview-04-17", "editor_model": "gemini/gemini-2.5-flash-preview-05-20", "edit_format": "whole"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748212874}
+{"event": "message_send_starting", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748212876}
+{"event": "message_send", "properties": {"main_model": "gemini/gemini-2.5-flash-preview-05-20", "weak_model": "gemini/gemini-2.5-flash-preview-04-17", "editor_model": "gemini/gemini-2.5-flash-preview-05-20", "edit_format": "whole", "prompt_tokens": 7576, "completion_tokens": 62, "total_tokens": 7638, "cost": 0.0011736, "total_cost": 0.0011736}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748212878}
+{"event": "command_exit", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748212893}
+{"event": "exit", "properties": {"reason": "/exit"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748212893}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748213354}
+{"event": "exit", "properties": {"reason": "Listed models"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748213355}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748213415}
+{"event": "model warning", "properties": {"main_model": "gemini/REDACTED", "weak_model": "gemini/gemini-2.5-flash-preview-04-17", "editor_model": "gemini/REDACTED"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748213415}
+{"event": "exit", "properties": {"reason": "Keyboard interrupt during model warnings"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748213417}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748213421}
+{"event": "model warning", "properties": {"main_model": "gemini/REDACTED", "weak_model": "gemini/gemini-2.5-flash-preview-04-17", "editor_model": "gemini/REDACTED"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748213422}
+{"event": "exit", "properties": {"reason": "Keyboard interrupt during model warnings"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748213429}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748213433}
+{"event": "repo", "properties": {"num_files": 627}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748213434}
+{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748213434}
+{"event": "cli session", "properties": {"main_model": "gemini/REDACTED", "weak_model": "gemini/gemini-2.5-flash-preview-04-17", "editor_model": "gemini/REDACTED", "edit_format": "whole"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748213434}
+{"event": "message_send_starting", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748213435}
+{"event": "exit", "properties": {"reason": "Completed main CLI coder.run"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748213445}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748213451}
+{"event": "no-repo", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748213451}
+{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748213451}
+{"event": "message_send_starting", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748213451}
+{"event": "message_send", "properties": {"main_model": "gemini/REDACTED", "weak_model": "gemini/gemini-2.5-flash-preview-04-17", "editor_model": "gemini/REDACTED", "edit_format": "whole", "prompt_tokens": 600, "completion_tokens": 43, "total_tokens": 643, "cost": 0, "total_cost": 0.0}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748213453}
+{"event": "exit", "properties": {"reason": "Completed --message"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748213453}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748214005}
+{"event": "repo", "properties": {"num_files": 627}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748214005}
+{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748214005}
+{"event": "exit", "properties": {"reason": "Completed lint/test/commit"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748214007}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748274959}
+{"event": "repo", "properties": {"num_files": 627}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748274959}
+{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748274959}
+{"event": "exit", "properties": {"reason": "Completed lint/test/commit"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748274961}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748274992}
+{"event": "repo", "properties": {"num_files": 627}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748274992}
+{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748274992}
+{"event": "exit", "properties": {"reason": "Completed lint/test/commit"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748274995}
+{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748287099}
+{"event": "repo", "properties": {"num_files": 627}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748287099}
+{"event": "auto_commits", "properties": {"enabled": true}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748287099}
+{"event": "exit", "properties": {"reason": "Completed lint/test/commit"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1748287102}
diff --git a/aider/website/docs/config/adv-model-settings.md b/aider/website/docs/config/adv-model-settings.md
index 844c7b6578e..8059b182bdd 100644
--- a/aider/website/docs/config/adv-model-settings.md
+++ b/aider/website/docs/config/adv-model-settings.md
@@ -158,6 +158,34 @@ cog.out("```\n")
system_prompt_prefix: null
accepts_settings: null
+- name: anthropic.claude-opus-4-20250514-v1:0
+ edit_format: diff
+ weak_model_name: anthropic.claude-3-5-haiku-20241022-v1:0
+ use_repo_map: true
+ extra_params:
+ extra_headers:
+ anthropic-beta: prompt-caching-2024-07-31,pdfs-2024-09-25,output-128k-2025-02-19
+ max_tokens: 64000
+ cache_control: true
+ editor_model_name: anthropic.claude-sonnet-4-20250514-v1:0
+ editor_edit_format: editor-diff
+ accepts_settings:
+ - thinking_tokens
+
+- name: anthropic.claude-sonnet-4-20250514-v1:0
+ edit_format: diff
+ weak_model_name: anthropic.claude-3-5-haiku-20241022-v1:0
+ use_repo_map: true
+ extra_params:
+ extra_headers:
+ anthropic-beta: prompt-caching-2024-07-31,pdfs-2024-09-25,output-128k-2025-02-19
+ max_tokens: 64000
+ cache_control: true
+ editor_model_name: anthropic.claude-sonnet-4-20250514-v1:0
+ editor_edit_format: editor-diff
+ accepts_settings:
+ - thinking_tokens
+
- name: anthropic/claude-3-5-haiku-20241022
edit_format: diff
weak_model_name: anthropic/claude-3-5-haiku-20241022
@@ -246,6 +274,34 @@ cog.out("```\n")
anthropic-beta: prompt-caching-2024-07-31,pdfs-2024-09-25
cache_control: true
+- name: anthropic/claude-opus-4-20250514
+ edit_format: diff
+ weak_model_name: anthropic/claude-3-5-haiku-20241022
+ use_repo_map: true
+ extra_params:
+ extra_headers:
+ anthropic-beta: prompt-caching-2024-07-31,pdfs-2024-09-25,output-128k-2025-02-19
+ max_tokens: 64000
+ cache_control: true
+ editor_model_name: anthropic/claude-sonnet-4-20250514
+ editor_edit_format: editor-diff
+ accepts_settings:
+ - thinking_tokens
+
+- name: anthropic/claude-sonnet-4-20250514
+ edit_format: diff
+ weak_model_name: anthropic/claude-3-5-haiku-20241022
+ use_repo_map: true
+ extra_params:
+ extra_headers:
+ anthropic-beta: prompt-caching-2024-07-31,pdfs-2024-09-25,output-128k-2025-02-19
+ max_tokens: 64000
+ cache_control: true
+ editor_model_name: anthropic/claude-sonnet-4-20250514
+ editor_edit_format: editor-diff
+ accepts_settings:
+ - thinking_tokens
+
- name: azure/gpt-4.1
edit_format: diff
weak_model_name: azure/gpt-4.1-mini
@@ -407,6 +463,20 @@ cog.out("```\n")
accepts_settings:
- thinking_tokens
+- name: bedrock/anthropic.claude-sonnet-4-20250514-v1:0
+ edit_format: diff
+ weak_model_name: bedrock/anthropic.claude-3-5-haiku-20241022-v1:0
+ use_repo_map: true
+ extra_params:
+ extra_headers:
+ anthropic-beta: prompt-caching-2024-07-31,pdfs-2024-09-25,output-128k-2025-02-19
+ max_tokens: 64000
+ cache_control: true
+ editor_model_name: bedrock/anthropic.claude-sonnet-4-20250514-v1:0
+ editor_edit_format: editor-diff
+ accepts_settings:
+ - thinking_tokens
+
- name: bedrock/us.anthropic.claude-3-7-sonnet-20250219-v1:0
edit_format: diff
weak_model_name: bedrock/us.anthropic.claude-3-5-haiku-20241022-v1:0
@@ -423,6 +493,20 @@ cog.out("```\n")
accepts_settings:
- thinking_tokens
+- name: bedrock/us.anthropic.claude-sonnet-4-20250514-v1:0
+ edit_format: diff
+ weak_model_name: bedrock/us.anthropic.claude-3-5-haiku-20241022-v1:0
+ use_repo_map: true
+ extra_params:
+ extra_headers:
+ anthropic-beta: prompt-caching-2024-07-31,pdfs-2024-09-25,output-128k-2025-02-19
+ max_tokens: 64000
+ cache_control: true
+ editor_model_name: bedrock/us.anthropic.claude-sonnet-4-20250514-v1:0
+ editor_edit_format: editor-diff
+ accepts_settings:
+ - thinking_tokens
+
- name: bedrock_converse/anthropic.claude-3-7-sonnet-20250219-v1:0
edit_format: diff
weak_model_name: bedrock_converse/anthropic.claude-3-5-haiku-20241022-v1:0
@@ -439,6 +523,62 @@ cog.out("```\n")
accepts_settings:
- thinking_tokens
+- name: bedrock_converse/anthropic.claude-opus-4-20250514-v1:0
+ edit_format: diff
+ weak_model_name: bedrock_converse/anthropic.claude-3-5-haiku-20241022-v1:0
+ use_repo_map: true
+ extra_params:
+ extra_headers:
+ anthropic-beta: prompt-caching-2024-07-31,pdfs-2024-09-25,output-128k-2025-02-19
+ max_tokens: 64000
+ cache_control: true
+ editor_model_name: bedrock_converse/anthropic.claude-sonnet-4-20250514-v1:0
+ editor_edit_format: editor-diff
+ accepts_settings:
+ - thinking_tokens
+
+- name: bedrock_converse/anthropic.claude-sonnet-4-20250514-v1:0
+ edit_format: diff
+ weak_model_name: bedrock_converse/anthropic.claude-3-5-haiku-20241022-v1:0
+ use_repo_map: true
+ extra_params:
+ extra_headers:
+ anthropic-beta: prompt-caching-2024-07-31,pdfs-2024-09-25,output-128k-2025-02-19
+ max_tokens: 64000
+ cache_control: true
+ editor_model_name: bedrock_converse/anthropic.claude-sonnet-4-20250514-v1:0
+ editor_edit_format: editor-diff
+ accepts_settings:
+ - thinking_tokens
+
+- name: bedrock_converse/eu.anthropic.claude-opus-4-20250514-v1:0
+ edit_format: diff
+ weak_model_name: bedrock_converse/eu.anthropic.claude-3-5-haiku-20241022-v1:0
+ use_repo_map: true
+ extra_params:
+ extra_headers:
+ anthropic-beta: prompt-caching-2024-07-31,pdfs-2024-09-25,output-128k-2025-02-19
+ max_tokens: 64000
+ cache_control: true
+ editor_model_name: bedrock_converse/eu.anthropic.claude-sonnet-4-20250514-v1:0
+ editor_edit_format: editor-diff
+ accepts_settings:
+ - thinking_tokens
+
+- name: bedrock_converse/eu.anthropic.claude-sonnet-4-20250514-v1:0
+ edit_format: diff
+ weak_model_name: bedrock_converse/eu.anthropic.claude-3-5-haiku-20241022-v1:0
+ use_repo_map: true
+ extra_params:
+ extra_headers:
+ anthropic-beta: prompt-caching-2024-07-31,pdfs-2024-09-25,output-128k-2025-02-19
+ max_tokens: 64000
+ cache_control: true
+ editor_model_name: bedrock_converse/eu.anthropic.claude-sonnet-4-20250514-v1:0
+ editor_edit_format: editor-diff
+ accepts_settings:
+ - thinking_tokens
+
- name: bedrock_converse/us.anthropic.claude-3-7-sonnet-20250219-v1:0
edit_format: diff
weak_model_name: bedrock_converse/us.anthropic.claude-3-5-haiku-20241022-v1:0
@@ -455,6 +595,34 @@ cog.out("```\n")
accepts_settings:
- thinking_tokens
+- name: bedrock_converse/us.anthropic.claude-opus-4-20250514-v1:0
+ edit_format: diff
+ weak_model_name: bedrock_converse/us.anthropic.claude-3-5-haiku-20241022-v1:0
+ use_repo_map: true
+ extra_params:
+ extra_headers:
+ anthropic-beta: prompt-caching-2024-07-31,pdfs-2024-09-25,output-128k-2025-02-19
+ max_tokens: 64000
+ cache_control: true
+ editor_model_name: bedrock_converse/us.anthropic.claude-sonnet-4-20250514-v1:0
+ editor_edit_format: editor-diff
+ accepts_settings:
+ - thinking_tokens
+
+- name: bedrock_converse/us.anthropic.claude-sonnet-4-20250514-v1:0
+ edit_format: diff
+ weak_model_name: bedrock_converse/us.anthropic.claude-3-5-haiku-20241022-v1:0
+ use_repo_map: true
+ extra_params:
+ extra_headers:
+ anthropic-beta: prompt-caching-2024-07-31,pdfs-2024-09-25,output-128k-2025-02-19
+ max_tokens: 64000
+ cache_control: true
+ editor_model_name: bedrock_converse/us.anthropic.claude-sonnet-4-20250514-v1:0
+ editor_edit_format: editor-diff
+ accepts_settings:
+ - thinking_tokens
+
- name: claude-3-5-haiku-20241022
edit_format: diff
weak_model_name: claude-3-5-haiku-20241022
@@ -538,6 +706,34 @@ cog.out("```\n")
- name: claude-3-sonnet-20240229
weak_model_name: claude-3-5-haiku-20241022
+- name: claude-opus-4-20250514
+ edit_format: diff
+ weak_model_name: claude-3-5-haiku-20241022
+ use_repo_map: true
+ extra_params:
+ extra_headers:
+ anthropic-beta: prompt-caching-2024-07-31,pdfs-2024-09-25,output-128k-2025-02-19
+ max_tokens: 64000
+ cache_control: true
+ editor_model_name: claude-sonnet-4-20250514
+ editor_edit_format: editor-diff
+ accepts_settings:
+ - thinking_tokens
+
+- name: claude-sonnet-4-20250514
+ edit_format: diff
+ weak_model_name: claude-3-5-haiku-20241022
+ use_repo_map: true
+ extra_params:
+ extra_headers:
+ anthropic-beta: prompt-caching-2024-07-31,pdfs-2024-09-25,output-128k-2025-02-19
+ max_tokens: 64000
+ cache_control: true
+ editor_model_name: claude-sonnet-4-20250514
+ editor_edit_format: editor-diff
+ accepts_settings:
+ - thinking_tokens
+
- name: cohere_chat/command-a-03-2025
examples_as_sys_msg: true
@@ -600,6 +796,34 @@ cog.out("```\n")
editor_model_name: deepseek/deepseek-chat
editor_edit_format: editor-diff
+- name: eu.anthropic.claude-opus-4-20250514-v1:0
+ edit_format: diff
+ weak_model_name: eu.anthropic.claude-3-5-haiku-20241022-v1:0
+ use_repo_map: true
+ extra_params:
+ extra_headers:
+ anthropic-beta: prompt-caching-2024-07-31,pdfs-2024-09-25,output-128k-2025-02-19
+ max_tokens: 64000
+ cache_control: true
+ editor_model_name: eu.anthropic.claude-sonnet-4-20250514-v1:0
+ editor_edit_format: editor-diff
+ accepts_settings:
+ - thinking_tokens
+
+- name: eu.anthropic.claude-sonnet-4-20250514-v1:0
+ edit_format: diff
+ weak_model_name: eu.anthropic.claude-3-5-haiku-20241022-v1:0
+ use_repo_map: true
+ extra_params:
+ extra_headers:
+ anthropic-beta: prompt-caching-2024-07-31,pdfs-2024-09-25,output-128k-2025-02-19
+ max_tokens: 64000
+ cache_control: true
+ editor_model_name: eu.anthropic.claude-sonnet-4-20250514-v1:0
+ editor_edit_format: editor-diff
+ accepts_settings:
+ - thinking_tokens
+
- name: fireworks_ai/accounts/fireworks/models/deepseek-r1
edit_format: diff
weak_model_name: fireworks_ai/accounts/fireworks/models/deepseek-v3
@@ -1145,6 +1369,20 @@ cog.out("```\n")
accepts_settings:
- thinking_tokens
+- name: openrouter/anthropic/claude-sonnet-4
+ edit_format: diff
+ weak_model_name: openrouter/anthropic/claude-3-5-haiku
+ use_repo_map: true
+ extra_params:
+ extra_headers:
+ anthropic-beta: prompt-caching-2024-07-31,pdfs-2024-09-25,output-128k-2025-02-19
+ max_tokens: 64000
+ cache_control: true
+ editor_model_name: openrouter/anthropic/claude-sonnet-4
+ editor_edit_format: editor-diff
+ accepts_settings:
+ - thinking_tokens
+
- name: openrouter/cohere/command-a-03-2025
examples_as_sys_msg: true
@@ -1434,6 +1672,34 @@ cog.out("```\n")
accepts_settings:
- reasoning_effort
+- name: us.anthropic.claude-opus-4-20250514-v1:0
+ edit_format: diff
+ weak_model_name: us.anthropic.claude-3-5-haiku-20241022-v1:0
+ use_repo_map: true
+ extra_params:
+ extra_headers:
+ anthropic-beta: prompt-caching-2024-07-31,pdfs-2024-09-25,output-128k-2025-02-19
+ max_tokens: 64000
+ cache_control: true
+ editor_model_name: us.anthropic.claude-sonnet-4-20250514-v1:0
+ editor_edit_format: editor-diff
+ accepts_settings:
+ - thinking_tokens
+
+- name: us.anthropic.claude-sonnet-4-20250514-v1:0
+ edit_format: diff
+ weak_model_name: us.anthropic.claude-3-5-haiku-20241022-v1:0
+ use_repo_map: true
+ extra_params:
+ extra_headers:
+ anthropic-beta: prompt-caching-2024-07-31,pdfs-2024-09-25,output-128k-2025-02-19
+ max_tokens: 64000
+ cache_control: true
+ editor_model_name: us.anthropic.claude-sonnet-4-20250514-v1:0
+ editor_edit_format: editor-diff
+ accepts_settings:
+ - thinking_tokens
+
- name: vertex_ai-anthropic_models/vertex_ai/claude-3-7-sonnet@20250219
edit_format: diff
weak_model_name: vertex_ai/claude-3-5-haiku@20241022
@@ -1447,6 +1713,28 @@ cog.out("```\n")
accepts_settings:
- thinking_tokens
+- name: vertex_ai-anthropic_models/vertex_ai/claude-opus-4@20250514
+ edit_format: diff
+ weak_model_name: vertex_ai/claude-3-5-haiku@20241022
+ use_repo_map: true
+ extra_params:
+ max_tokens: 64000
+ editor_model_name: vertex_ai-anthropic_models/vertex_ai/claude-sonnet-4@20250514
+ editor_edit_format: editor-diff
+ accepts_settings:
+ - thinking_tokens
+
+- name: vertex_ai-anthropic_models/vertex_ai/claude-sonnet-4@20250514
+ edit_format: diff
+ weak_model_name: vertex_ai/claude-3-5-haiku@20241022
+ use_repo_map: true
+ extra_params:
+ max_tokens: 64000
+ editor_model_name: vertex_ai-anthropic_models/vertex_ai/claude-sonnet-4@20250514
+ editor_edit_format: editor-diff
+ accepts_settings:
+ - thinking_tokens
+
- name: vertex_ai-language-models/gemini-2.5-flash-preview-04-17
edit_format: diff
use_repo_map: true
@@ -1502,6 +1790,35 @@ cog.out("```\n")
- name: vertex_ai/claude-3-sonnet@20240229
weak_model_name: vertex_ai/claude-3-5-haiku@20241022
+- name: vertex_ai/claude-opus-4@20250514
+ edit_format: diff
+ weak_model_name: vertex_ai/claude-3-5-haiku@20241022
+ use_repo_map: true
+ extra_params:
+ max_tokens: 64000
+ editor_model_name: vertex_ai/claude-sonnet-4@20250514
+ editor_edit_format: editor-diff
+ accepts_settings:
+ - thinking_tokens
+
+- name: vertex_ai/claude-sonnet-4@20250514
+ edit_format: diff
+ weak_model_name: vertex_ai/claude-3-5-haiku@20241022
+ use_repo_map: true
+ extra_params:
+ max_tokens: 64000
+ editor_model_name: vertex_ai/claude-sonnet-4@20250514
+ editor_edit_format: editor-diff
+ accepts_settings:
+ - thinking_tokens
+
+- name: vertex_ai/gemini-2.5-flash-preview-05-20
+ edit_format: diff
+ use_repo_map: true
+ accepts_settings:
+ - reasoning_effort
+ - thinking_tokens
+
- name: vertex_ai/gemini-2.5-pro-exp-03-25
edit_format: diff-fenced
weak_model_name: vertex_ai-language-models/gemini-2.5-flash-preview-04-17
diff --git a/aider/website/docs/config/model-aliases.md b/aider/website/docs/config/model-aliases.md
index c3871a0944d..0a9a32d55f2 100644
--- a/aider/website/docs/config/model-aliases.md
+++ b/aider/website/docs/config/model-aliases.md
@@ -86,10 +86,10 @@ for alias, model in sorted(MODEL_ALIASES.items()):
- `grok3`: xai/grok-3-beta
- `haiku`: claude-3-5-haiku-20241022
- `optimus`: openrouter/openrouter/optimus-alpha
-- `opus`: claude-3-opus-20240229
+- `opus`: claude-opus-4-20250514
- `quasar`: openrouter/openrouter/quasar-alpha
- `r1`: deepseek/deepseek-reasoner
-- `sonnet`: anthropic/claude-3-7-sonnet-20250219
+- `sonnet`: anthropic/claude-sonnet-4-20250514
## Priority
diff --git a/aider/website/docs/faq.md b/aider/website/docs/faq.md
index 07132ce6ff7..6e8baecbb14 100644
--- a/aider/website/docs/faq.md
+++ b/aider/website/docs/faq.md
@@ -264,10 +264,17 @@ tr:hover { background-color: #f5f5f5; }
| Model Name | Total Tokens | Percent |
-| gemini/gemini-2.5-pro-exp-03-25 | 1,216,051 | 67.6% |
-| o3 | 542,669 | 30.2% |
+| gemini/gemini-2.5-pro-exp-03-25 | 1,109,768 | 61.9% |
+| o3 | 542,669 | 30.3% |
+| anthropic/claude-sonnet-4-20250514 | 92,508 | 5.2% |
| gemini/gemini-2.5-pro-preview-05-06 | 40,256 | 2.2% |
+| gemini/gemini-2.5-flash-preview-05-20 | 7,638 | 0.4% |
+| gemini/REDACTED | 643 | 0.0% |
+
+{: .note :}
+Some models show as REDACTED, because they are new or unpopular models.
+Aider's analytics only records the names of "well known" LLMs.
## How are the "aider wrote xx% of code" stats computed?
diff --git a/aider/website/docs/leaderboards/index.md b/aider/website/docs/leaderboards/index.md
index 1485ce5327b..18aac4a2499 100644
--- a/aider/website/docs/leaderboards/index.md
+++ b/aider/website/docs/leaderboards/index.md
@@ -285,6 +285,6 @@ mod_dates = [get_last_modified_date(file) for file in files]
latest_mod_date = max(mod_dates)
cog.out(f"{latest_mod_date.strftime('%B %d, %Y.')}")
]]]-->
-May 09, 2025.
+May 26, 2025.
diff --git a/aider/website/index.html b/aider/website/index.html
index 87e19b86272..9bcaa13a767 100644
--- a/aider/website/index.html
+++ b/aider/website/index.html
@@ -69,11 +69,11 @@ AI pair programming in your terminal
]]]-->
⭐ GitHub Stars
- 33K
+ 34K
📦 Installs
- 2.3M
+ 2.4M
📈 Tokens/week