diff --git a/.claude/commands/create-spec.md b/.claude/commands/create-spec.md index f8cae28e..a992c493 100644 --- a/.claude/commands/create-spec.md +++ b/.claude/commands/create-spec.md @@ -518,6 +518,25 @@ Write this JSON file: - The UI polls this file to detect completion and show the Continue button - If the user asks for additional changes after you've written this, you may update it again when the new changes are complete +## 4. Register the Project (REQUIRED) + +**After writing the status file**, register the project so it appears in the AutoCoder UI. + +Run this command using Bash: + +```bash +python /home/john/autocoder/register_project.py "" "$ARGUMENTS" +``` + +Where `` is derived from the project path (the last directory component, e.g., `my-app` from `~/projects/my-app`). + +**Example:** +```bash +python /home/john/autocoder/register_project.py "my-app" "/home/john/projects/my-app" +``` + +**Note:** If the project is already registered, this will succeed silently. This ensures projects created via `/create-spec` appear in the UI dropdown. + --- # AFTER FILE GENERATION: NEXT STEPS diff --git a/agent.py b/agent.py index 50edc46d..0dffe387 100644 --- a/agent.py +++ b/agent.py @@ -9,6 +9,7 @@ import io import re import sys +import time from datetime import datetime, timedelta from pathlib import Path from typing import Optional @@ -22,8 +23,32 @@ sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding="utf-8", errors="replace") sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding="utf-8", errors="replace") + +def safe_print(*args, **kwargs) -> None: + """ + Print with retry logic to handle EAGAIN errors. + + When stdout is a pipe (subprocess), the buffer can fill up causing + BlockingIOError (errno 11). This function retries with backoff. + """ + max_retries = 5 + for attempt in range(max_retries): + try: + print(*args, **kwargs) + return + except BlockingIOError: + if attempt < max_retries - 1: + time.sleep(0.1 * (attempt + 1)) # Backoff: 0.1s, 0.2s, 0.3s... + else: + # Last resort: try without flush + kwargs.pop('flush', None) + try: + print(*args, **kwargs) + except Exception: + pass # Give up silently + from client import create_client -from progress import has_features, print_progress_summary, print_session_header +from progress import all_features_complete, has_features, print_progress_summary, print_session_header from prompts import ( copy_spec_to_project, get_coding_prompt, @@ -53,7 +78,7 @@ async def run_agent_session( - "continue" if agent should continue working - "error" if an error occurred """ - print("Sending prompt to Claude Agent SDK...\n") + safe_print("Sending prompt to Claude Agent SDK...\n") try: # Send the query @@ -71,15 +96,15 @@ async def run_agent_session( if block_type == "TextBlock" and hasattr(block, "text"): response_text += block.text - print(block.text, end="", flush=True) + safe_print(block.text, end="", flush=True) elif block_type == "ToolUseBlock" and hasattr(block, "name"): - print(f"\n[Tool: {block.name}]", flush=True) + safe_print(f"\n[Tool: {block.name}]", flush=True) if hasattr(block, "input"): input_str = str(block.input) if len(input_str) > 200: - print(f" Input: {input_str[:200]}...", flush=True) + safe_print(f" Input: {input_str[:200]}...", flush=True) else: - print(f" Input: {input_str}", flush=True) + safe_print(f" Input: {input_str}", flush=True) # Handle UserMessage (tool results) elif msg_type == "UserMessage" and hasattr(msg, "content"): @@ -92,20 +117,20 @@ async def run_agent_session( # Check if command was blocked by security hook if "blocked" in str(result_content).lower(): - print(f" [BLOCKED] {result_content}", flush=True) + safe_print(f" [BLOCKED] {result_content}", flush=True) elif is_error: # Show errors (truncated) error_str = str(result_content)[:500] - print(f" [Error] {error_str}", flush=True) + safe_print(f" [Error] {error_str}", flush=True) else: # Tool succeeded - just show brief confirmation - print(" [Done]", flush=True) + safe_print(" [Done]", flush=True) - print("\n" + "-" * 70 + "\n") + safe_print("\n" + "-" * 70 + "\n") return "continue", response_text except Exception as e: - print(f"Error during agent session: {e}") + safe_print(f"Error during agent session: {e}") return "error", str(e) @@ -174,6 +199,17 @@ async def run_autonomous_agent( print("To continue, run the script again without --max-iterations") break + # Check if all features are complete (skip on first run - initializer hasn't created features yet) + if not is_first_run and all_features_complete(project_dir): + print("\n" + "=" * 70) + print(" ALL FEATURES COMPLETE!") + print("=" * 70) + print("\nAll features have been implemented and are passing.") + print("The agent will now stop to save API credits.") + print("\nTo add more features, use the UI or add them to the database,") + print("then restart the agent.") + break + # Print session header print_session_header(iteration, is_first_run) diff --git a/client.py b/client.py index 7074fef8..c01e274f 100644 --- a/client.py +++ b/client.py @@ -207,10 +207,17 @@ def create_client(project_dir: Path, model: str, yolo_mode: bool = False): } if not yolo_mode: # Include Playwright MCP server for browser automation (standard mode only) - # Headless mode is configurable via PLAYWRIGHT_HEADLESS environment variable - playwright_args = ["@playwright/mcp@latest", "--viewport-size", "1280x720"] - if get_playwright_headless(): - playwright_args.append("--headless") + # Uses Playwright's bundled Chromium in headless mode for remote/server environments + chromium_path = os.path.expanduser("~/.cache/ms-playwright/chromium-1200/chrome-linux64/chrome") + playwright_args = [ + "@playwright/mcp@latest", + "--viewport-size", "1280x720", + "--headless", # Always headless for server environments + "--no-sandbox", # Required for some Linux environments + ] + # Use Playwright's Chromium if available (works on headless servers) + if os.path.exists(chromium_path): + playwright_args.extend(["--executable-path", chromium_path]) mcp_servers["playwright"] = { "command": "npx", "args": playwright_args, diff --git a/mcp_server/feature_mcp.py b/mcp_server/feature_mcp.py index 1534bc1b..4f9765ec 100755 --- a/mcp_server/feature_mcp.py +++ b/mcp_server/feature_mcp.py @@ -373,6 +373,9 @@ def feature_create_bulk( Features are assigned sequential priorities based on their order. All features start with passes=false. + Duplicate detection: Features with the same name as existing features + are skipped to prevent duplicates. + This is typically used by the initializer agent to set up the initial feature list from the app specification. @@ -384,17 +387,25 @@ def feature_create_bulk( - steps (list[str]): Implementation/test steps Returns: - JSON with: created (int) - number of features created + JSON with: created (int), skipped (int), skipped_names (list) """ session = get_session() try: # Use lock to prevent race condition in priority assignment with _priority_lock: + # Get existing feature names for duplicate detection + existing_names = set( + name[0] for name in session.query(Feature.name).all() + ) + # Get the starting priority max_priority_result = session.query(Feature.priority).order_by(Feature.priority.desc()).first() start_priority = (max_priority_result[0] + 1) if max_priority_result else 1 created_count = 0 + skipped_count = 0 + skipped_names = [] + for i, feature_data in enumerate(features): # Validate required fields if not all(key in feature_data for key in ["category", "name", "description", "steps"]): @@ -402,8 +413,17 @@ def feature_create_bulk( "error": f"Feature at index {i} missing required fields (category, name, description, steps)" }) + # Skip duplicates + if feature_data["name"] in existing_names: + skipped_count += 1 + skipped_names.append(feature_data["name"]) + continue + + # Add to existing names to catch duplicates within this batch + existing_names.add(feature_data["name"]) + db_feature = Feature( - priority=start_priority + i, + priority=start_priority + created_count, category=feature_data["category"], name=feature_data["name"], description=feature_data["description"], @@ -415,7 +435,91 @@ def feature_create_bulk( session.commit() - return json.dumps({"created": created_count}, indent=2) + result = {"created": created_count, "skipped": skipped_count} + if skipped_names: + result["skipped_names"] = skipped_names[:10] # Limit to first 10 + if len(skipped_names) > 10: + result["skipped_names"].append(f"... and {len(skipped_names) - 10} more") + + return json.dumps(result, indent=2) + except Exception as e: + session.rollback() + return json.dumps({"error": str(e)}) + finally: + session.close() + + +@mcp.tool() +def feature_db_repair() -> str: + """Repair the feature database by removing duplicates and compacting IDs. + + This tool performs the following repairs: + 1. Removes duplicate features (keeping the one with lowest ID) + 2. Compacts IDs to be sequential (1, 2, 3, ...) with no gaps + 3. Resets priorities to match the new sequential IDs + + Use this if the database has inconsistencies like duplicate IDs or gaps. + + Returns: + JSON with: duplicates_removed (int), ids_compacted (bool), + old_max_id (int), new_max_id (int), total_features (int) + """ + session = get_session() + try: + from sqlalchemy import text + + # Step 1: Find and remove duplicates (keep lowest ID for each name) + duplicates_query = """ + SELECT id FROM features + WHERE id NOT IN ( + SELECT MIN(id) FROM features GROUP BY name + ) + """ + result = session.execute(text(duplicates_query)) + duplicate_ids = [row[0] for row in result.fetchall()] + duplicates_removed = len(duplicate_ids) + + if duplicate_ids: + session.execute( + text(f"DELETE FROM features WHERE id IN ({','.join(map(str, duplicate_ids))})") + ) + session.commit() + + # Step 2: Get current state + all_features = session.query(Feature).order_by(Feature.priority.asc(), Feature.id.asc()).all() + old_max_id = max(f.id for f in all_features) if all_features else 0 + total_features = len(all_features) + + # Step 3: Check if compaction is needed + expected_ids = set(range(1, total_features + 1)) + actual_ids = set(f.id for f in all_features) + needs_compaction = expected_ids != actual_ids + + new_max_id = old_max_id + if needs_compaction and all_features: + # Create a mapping from old ID to new ID + # We need to use raw SQL to avoid SQLAlchemy's identity map issues + + # First, shift all IDs to negative to avoid conflicts + session.execute(text("UPDATE features SET id = -id")) + session.commit() + + # Then assign new sequential IDs + for new_id, feature in enumerate(all_features, start=1): + session.execute( + text(f"UPDATE features SET id = {new_id}, priority = {new_id} WHERE id = {-feature.id}") + ) + session.commit() + + new_max_id = total_features + + return json.dumps({ + "duplicates_removed": duplicates_removed, + "ids_compacted": needs_compaction, + "old_max_id": old_max_id, + "new_max_id": new_max_id, + "total_features": total_features + }, indent=2) except Exception as e: session.rollback() return json.dumps({"error": str(e)}) diff --git a/progress.py b/progress.py index dfb700b4..fbea8768 100644 --- a/progress.py +++ b/progress.py @@ -17,6 +17,21 @@ PROGRESS_CACHE_FILE = ".progress_cache" +def all_features_complete(project_dir: Path) -> bool: + """ + Check if all features in the project are complete (passing). + + Returns True if: + - There are features AND all of them are passing + + Returns False if: + - No features exist, OR + - There are pending/failing features + """ + passing, in_progress, total = count_passing_tests(project_dir) + return total > 0 and passing == total + + def has_features(project_dir: Path) -> bool: """ Check if the project has features in the database. diff --git a/register_project.py b/register_project.py new file mode 100644 index 00000000..c023e09e --- /dev/null +++ b/register_project.py @@ -0,0 +1,65 @@ +#!/usr/bin/env python3 +""" +Register Project Script +======================== + +Simple CLI script to register a project in the autocoder registry. +Called by the /create-spec command after generating spec files. + +Usage: + python register_project.py + +Example: + python register_project.py my-app ~/projects/my-app +""" + +import sys +from pathlib import Path + +# Add parent directory to path so we can import registry +sys.path.insert(0, str(Path(__file__).parent)) + +from registry import register_project, get_project_path, RegistryError + + +def main(): + if len(sys.argv) != 3: + print("Usage: python register_project.py ", file=sys.stderr) + sys.exit(1) + + name = sys.argv[1] + path = Path(sys.argv[2]).expanduser().resolve() + + # Check if already registered + existing_path = get_project_path(name) + if existing_path: + if existing_path.resolve() == path: + print(f"Project '{name}' is already registered at {path}") + sys.exit(0) + else: + print(f"Project '{name}' is already registered at a different path: {existing_path}", file=sys.stderr) + sys.exit(1) + + # Validate path exists + if not path.exists(): + print(f"Error: Path does not exist: {path}", file=sys.stderr) + sys.exit(1) + + if not path.is_dir(): + print(f"Error: Path is not a directory: {path}", file=sys.stderr) + sys.exit(1) + + # Register the project + try: + register_project(name, path) + print(f"Registered project '{name}' at {path}") + except RegistryError as e: + print(f"Error: {e}", file=sys.stderr) + sys.exit(1) + except ValueError as e: + print(f"Invalid project name: {e}", file=sys.stderr) + sys.exit(1) + + +if __name__ == "__main__": + main() diff --git a/security.py b/security.py index 4e03117e..35065394 100644 --- a/security.py +++ b/security.py @@ -42,8 +42,14 @@ "sleep", "kill", # Kill by PID "pkill", # For killing dev servers; validated separately + "pgrep", # For checking running processes + # Directory navigation + "cd", # Change directory (used in compound commands) # Network/API testing "curl", + "jq", # JSON parsing + # Database + "sqlite3", # SQLite database queries # File operations "mv", "rm", # Use with caution @@ -189,6 +195,9 @@ def validate_pkill_command(command_string: str) -> tuple[bool, str]: "npx", "vite", "next", + "playwright", + "chrome", + "chromium", } try: diff --git a/server/main.py b/server/main.py index 9340315f..495914cd 100644 --- a/server/main.py +++ b/server/main.py @@ -72,15 +72,10 @@ async def lifespan(app: FastAPI): lifespan=lifespan, ) -# CORS - allow only localhost origins for security +# CORS - allow all origins for LAN access app.add_middleware( CORSMiddleware, - allow_origins=[ - "http://localhost:5173", # Vite dev server - "http://127.0.0.1:5173", - "http://localhost:8888", # Production - "http://127.0.0.1:8888", - ], + allow_origins=["*"], allow_credentials=True, allow_methods=["*"], allow_headers=["*"], @@ -88,19 +83,20 @@ async def lifespan(app: FastAPI): # ============================================================================ -# Security Middleware +# Security Middleware (disabled for LAN access) # ============================================================================ -@app.middleware("http") -async def require_localhost(request: Request, call_next): - """Only allow requests from localhost.""" - client_host = request.client.host if request.client else None - - # Allow localhost connections - if client_host not in ("127.0.0.1", "::1", "localhost", None): - raise HTTPException(status_code=403, detail="Localhost access only") - - return await call_next(request) +# NOTE: Localhost restriction removed to allow LAN access +# @app.middleware("http") +# async def require_localhost(request: Request, call_next): +# """Only allow requests from localhost.""" +# client_host = request.client.host if request.client else None +# +# # Allow localhost connections +# if client_host not in ("127.0.0.1", "::1", "localhost", None): +# raise HTTPException(status_code=403, detail="Localhost access only") +# +# return await call_next(request) # ============================================================================ @@ -207,7 +203,7 @@ async def serve_spa(path: str): import uvicorn uvicorn.run( "server.main:app", - host="127.0.0.1", # Localhost only for security + host="0.0.0.0", # LAN accessible port=8888, reload=True, ) diff --git a/server/routers/features.py b/server/routers/features.py index ce0f388d..adcb1d44 100644 --- a/server/routers/features.py +++ b/server/routers/features.py @@ -249,6 +249,92 @@ async def delete_feature(project_name: str, feature_id: int): raise HTTPException(status_code=500, detail="Failed to delete feature") +@router.post("/repair") +async def repair_database(project_name: str): + """ + Repair the feature database by removing duplicates and compacting IDs. + + Performs the following repairs: + 1. Removes duplicate features (keeping the one with lowest ID) + 2. Compacts IDs to be sequential (1, 2, 3, ...) with no gaps + 3. Resets priorities to match the new sequential IDs + """ + from sqlalchemy import text + + project_name = validate_project_name(project_name) + project_dir = _get_project_path(project_name) + + if not project_dir: + raise HTTPException(status_code=404, detail=f"Project '{project_name}' not found in registry") + + if not project_dir.exists(): + raise HTTPException(status_code=404, detail="Project directory not found") + + db_file = project_dir / "features.db" + if not db_file.exists(): + return {"duplicates_removed": 0, "ids_compacted": False, "total_features": 0} + + _, Feature = _get_db_classes() + + try: + with get_db_session(project_dir) as session: + # Step 1: Find and remove duplicates (keep lowest ID for each name) + duplicates_query = """ + SELECT id FROM features + WHERE id NOT IN ( + SELECT MIN(id) FROM features GROUP BY name + ) + """ + result = session.execute(text(duplicates_query)) + duplicate_ids = [row[0] for row in result.fetchall()] + duplicates_removed = len(duplicate_ids) + + if duplicate_ids: + session.execute( + text(f"DELETE FROM features WHERE id IN ({','.join(map(str, duplicate_ids))})") + ) + session.commit() + + # Step 2: Get current state + all_features = session.query(Feature).order_by(Feature.priority.asc(), Feature.id.asc()).all() + old_max_id = max(f.id for f in all_features) if all_features else 0 + total_features = len(all_features) + + # Step 3: Check if compaction is needed + expected_ids = set(range(1, total_features + 1)) + actual_ids = set(f.id for f in all_features) + needs_compaction = expected_ids != actual_ids + + new_max_id = old_max_id + if needs_compaction and all_features: + # First, shift all IDs to negative to avoid conflicts + session.execute(text("UPDATE features SET id = -id")) + session.commit() + + # Then assign new sequential IDs + for new_id, feature in enumerate(all_features, start=1): + session.execute( + text(f"UPDATE features SET id = {new_id}, priority = {new_id} WHERE id = {-feature.id}") + ) + session.commit() + + new_max_id = total_features + + return { + "success": True, + "duplicates_removed": duplicates_removed, + "ids_compacted": needs_compaction, + "old_max_id": old_max_id, + "new_max_id": new_max_id, + "total_features": total_features + } + except HTTPException: + raise + except Exception: + logger.exception("Failed to repair database") + raise HTTPException(status_code=500, detail="Failed to repair database") + + @router.patch("/{feature_id}/skip") async def skip_feature(project_name: str, feature_id: int): """ diff --git a/server/services/expand_chat_session.py b/server/services/expand_chat_session.py index 71e56bb1..12a182e7 100644 --- a/server/services/expand_chat_session.py +++ b/server/services/expand_chat_session.py @@ -152,6 +152,7 @@ async def start(self) -> AsyncGenerator[dict, None]: "allow": [ "Read(./**)", "Glob(./**)", + "Bash(*)", # Allow bash for sqlite3 queries ], }, } @@ -177,6 +178,7 @@ async def start(self) -> AsyncGenerator[dict, None]: allowed_tools=[ "Read", "Glob", + "Bash", # For sqlite3 queries ], permission_mode="acceptEdits", max_turns=100, diff --git a/start_ui.py b/start_ui.py index 267ae12d..8adddf9e 100644 --- a/start_ui.py +++ b/start_ui.py @@ -44,7 +44,7 @@ def find_available_port(start: int = 8888, max_attempts: int = 10) -> int: for port in range(start, start + max_attempts): try: with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: - s.bind(("127.0.0.1", port)) + s.bind(("0.0.0.0", port)) return port except OSError: continue @@ -158,14 +158,14 @@ def start_dev_server(port: int) -> tuple: venv_python = get_venv_python() print("\n Starting development servers...") - print(f" - FastAPI backend: http://127.0.0.1:{port}") + print(f" - FastAPI backend: http://0.0.0.0:{port}") print(" - Vite frontend: http://127.0.0.1:5173") # Start FastAPI backend = subprocess.Popen([ str(venv_python), "-m", "uvicorn", "server.main:app", - "--host", "127.0.0.1", + "--host", "0.0.0.0", "--port", str(port), "--reload" ], cwd=str(ROOT)) @@ -185,12 +185,12 @@ def start_production_server(port: int): """Start FastAPI server in production mode.""" venv_python = get_venv_python() - print(f"\n Starting server at http://127.0.0.1:{port}") + print(f"\n Starting server at http://0.0.0.0:{port}") return subprocess.Popen([ str(venv_python), "-m", "uvicorn", "server.main:app", - "--host", "127.0.0.1", + "--host", "0.0.0.0", "--port", str(port) ], cwd=str(ROOT)) @@ -280,7 +280,7 @@ def main() -> None: webbrowser.open(f"http://127.0.0.1:{port}") print("\n" + "=" * 50) - print(f" Server running at http://127.0.0.1:{port}") + print(f" Server running at http://0.0.0.0:{port}") print(" Press Ctrl+C to stop") print("=" * 50) diff --git a/ui/package-lock.json b/ui/package-lock.json index 6135f476..43fbd75a 100644 --- a/ui/package-lock.json +++ b/ui/package-lock.json @@ -69,6 +69,7 @@ "integrity": "sha512-e7jT4DxYvIDLk1ZHmU/m/mB19rex9sv0c2ftBtjSBv+kVM/902eh0fINUzD7UwLLNR+jU585GxUJ8/EBfAM5fw==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "@babel/code-frame": "^7.27.1", "@babel/generator": "^7.28.5", @@ -2326,6 +2327,7 @@ "integrity": "sha512-cisd7gxkzjBKU2GgdYrTdtQx1SORymWyaAFhaxQPK9bYO9ot3Y5OikQRvY0VYQtvwjeQnizCINJAenh/V7MK2w==", "devOptional": true, "license": "MIT", + "peer": true, "dependencies": { "@types/prop-types": "*", "csstype": "^3.2.2" @@ -2337,6 +2339,7 @@ "integrity": "sha512-MEe3UeoENYVFXzoXEWsvcpg6ZvlrFNlOQ7EOsvhI3CfAXwzPfO8Qwuxd40nepsYKqyyVQnTdEfv68q91yLcKrQ==", "devOptional": true, "license": "MIT", + "peer": true, "peerDependencies": { "@types/react": "^18.0.0" } @@ -2386,6 +2389,7 @@ "integrity": "sha512-3xP4XzzDNQOIqBMWogftkwxhg5oMKApqY0BAflmLZiFYHqyhSOxv/cd/zPQLTcCXr4AkaKb25joocY0BD1WC6A==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "@typescript-eslint/scope-manager": "8.51.0", "@typescript-eslint/types": "8.51.0", @@ -2658,6 +2662,7 @@ "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", "dev": true, "license": "MIT", + "peer": true, "bin": { "acorn": "bin/acorn" }, @@ -2775,6 +2780,7 @@ } ], "license": "MIT", + "peer": true, "dependencies": { "baseline-browser-mapping": "^2.9.0", "caniuse-lite": "^1.0.30001759", @@ -3042,6 +3048,7 @@ "integrity": "sha512-LEyamqS7W5HB3ujJyvi0HQK/dtVINZvd5mAAp9eT5S/ujByGjiZLCzPcHVzuXbpJDJF/cxwHlfceVUDZ2lnSTw==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "@eslint-community/eslint-utils": "^4.8.0", "@eslint-community/regexpp": "^4.12.1", @@ -4021,6 +4028,7 @@ "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", "dev": true, "license": "MIT", + "peer": true, "engines": { "node": ">=12" }, @@ -4082,6 +4090,7 @@ "resolved": "https://registry.npmjs.org/react/-/react-18.3.1.tgz", "integrity": "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ==", "license": "MIT", + "peer": true, "dependencies": { "loose-envify": "^1.1.0" }, @@ -4094,6 +4103,7 @@ "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-18.3.1.tgz", "integrity": "sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw==", "license": "MIT", + "peer": true, "dependencies": { "loose-envify": "^1.1.0", "scheduler": "^0.23.2" @@ -4387,6 +4397,7 @@ "integrity": "sha512-hjcS1mhfuyi4WW8IWtjP7brDrG2cuDZukyrYrSauoXGNgx0S7zceP07adYkJycEr56BOUTNPzbInooiN3fn1qw==", "dev": true, "license": "Apache-2.0", + "peer": true, "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" @@ -4509,6 +4520,7 @@ "integrity": "sha512-o5a9xKjbtuhY6Bi5S3+HvbRERmouabWbyUcpXXUA1u+GNUKoROi9byOJ8M0nHbHYHkYICiMlqxkg1KkYmm25Sw==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "esbuild": "^0.21.3", "postcss": "^8.4.43", diff --git a/ui/src/App.tsx b/ui/src/App.tsx index 50b02973..582ffd5a 100644 --- a/ui/src/App.tsx +++ b/ui/src/App.tsx @@ -23,6 +23,17 @@ import { DevServerControl } from './components/DevServerControl' import { Loader2, Settings } from 'lucide-react' import type { Feature } from './lib/types' +// Apply dark mode on initial load (before React renders) +function initDarkMode() { + const saved = localStorage.getItem('darkMode') + const prefersDark = window.matchMedia('(prefers-color-scheme: dark)').matches + const isDark = saved !== null ? saved === 'true' : prefersDark + if (isDark) { + document.documentElement.classList.add('dark') + } +} +initDarkMode() + function App() { // Initialize selected project from localStorage const [selectedProject, setSelectedProject] = useState(() => { @@ -58,6 +69,12 @@ function App() { // Persist selected project to localStorage const handleSelectProject = useCallback((project: string | null) => { + // Invalidate old project's cached data to prevent stale data showing + if (selectedProject && selectedProject !== project) { + queryClient.removeQueries({ queryKey: ['features', selectedProject] }) + queryClient.removeQueries({ queryKey: ['agent-status', selectedProject] }) + } + setSelectedProject(project) try { if (project) { @@ -68,7 +85,7 @@ function App() { } catch { // localStorage not available } - }, []) + }, [selectedProject, queryClient]) // Validate stored project exists (clear if project was deleted) useEffect(() => { @@ -172,7 +189,7 @@ function App() { return (
{/* Header */} -
+
{/* Logo and Title */} diff --git a/ui/src/components/AgentThought.tsx b/ui/src/components/AgentThought.tsx index 65a50a11..fa2f8ef5 100644 --- a/ui/src/components/AgentThought.tsx +++ b/ui/src/components/AgentThought.tsx @@ -60,18 +60,22 @@ export function AgentThought({ logs, agentStatus }: AgentThoughtProps) { : 0 // Determine if component should be visible + // Use displayedThought for visibility check to prevent flickering when + // new logs come in without a valid thought const shouldShow = useMemo(() => { - if (!thought) return false + const hasContent = thought || displayedThought + if (!hasContent) return false if (agentStatus === 'running') return true if (agentStatus === 'paused') { return Date.now() - lastLogTimestamp < IDLE_TIMEOUT } return false - }, [thought, agentStatus, lastLogTimestamp]) + }, [thought, displayedThought, agentStatus, lastLogTimestamp]) // Animate text changes using CSS transitions + // Only update displayedThought when we have a new valid thought useEffect(() => { - if (thought !== displayedThought && thought) { + if (thought && thought !== displayedThought) { // Fade out setTextVisible(false) // After fade out, update text and fade in @@ -89,11 +93,16 @@ export function AgentThought({ logs, agentStatus }: AgentThoughtProps) { setIsVisible(true) } else { // Delay hiding to allow exit animation - const timeout = setTimeout(() => setIsVisible(false), 300) + const timeout = setTimeout(() => { + setIsVisible(false) + // Clear displayed thought only after fully hidden + setDisplayedThought(null) + }, 300) return () => clearTimeout(timeout) } }, [shouldShow]) + // Don't render if not visible or no content to display if (!isVisible || !displayedThought) return null const isRunning = agentStatus === 'running' diff --git a/ui/src/components/AssistantChat.tsx b/ui/src/components/AssistantChat.tsx index ef8aeb32..422a40d3 100644 --- a/ui/src/components/AssistantChat.tsx +++ b/ui/src/components/AssistantChat.tsx @@ -131,7 +131,7 @@ export function AssistantChat({ projectName }: AssistantChatProps) { )} {/* Input area */} -
+