diff --git a/.env.example b/.env.example
new file mode 100644
index 0000000..912af37
--- /dev/null
+++ b/.env.example
@@ -0,0 +1,91 @@
+# =============================================================================
+# NeroSpatial Backend - Environment Configuration Template
+# =============================================================================
+# Copy this file to .env and fill in your values:
+# cp .env.example .env
+#
+# IMPORTANT: Never commit .env to Git!
+# =============================================================================
+
+# -----------------------------------------------------------------------------
+# Application
+# -----------------------------------------------------------------------------
+APP_NAME=NeroSpatial Backend
+APP_VERSION=0.1.0
+DEBUG=true
+ENVIRONMENT=development
+LOG_LEVEL=INFO
+
+# -----------------------------------------------------------------------------
+# Server
+# -----------------------------------------------------------------------------
+HOST=0.0.0.0
+PORT=8000
+
+# -----------------------------------------------------------------------------
+# Azure Key Vault (REQUIRED for production/staging)
+# All secrets are loaded from Key Vault. Only these credentials go in .env
+# -----------------------------------------------------------------------------
+AZURE_KEY_VAULT_URL=https://your-vault-name.vault.azure.net/
+AZURE_TENANT_ID=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+AZURE_CLIENT_ID=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+AZURE_CLIENT_SECRET=your-client-secret-here
+
+# -----------------------------------------------------------------------------
+# Azure App Configuration (REQUIRED for production/staging)
+# Single source of truth for all non-secret configuration
+# -----------------------------------------------------------------------------
+AZURE_APP_CONFIG_URL=https://your-config-name.azconfig.io
+
+# -----------------------------------------------------------------------------
+# PostgreSQL (URLs only - password from Key Vault)
+# These are defaults/overrides. Production uses App Configuration.
+# -----------------------------------------------------------------------------
+POSTGRES_HOST=localhost
+POSTGRES_PORT=5432
+POSTGRES_DB=nerospatial
+POSTGRES_USER=nerospatial
+# POSTGRES_PASSWORD - Loaded from Key Vault secret "postgres-password"
+POSTGRES_POOL_MIN=5
+POSTGRES_POOL_MAX=20
+
+# -----------------------------------------------------------------------------
+# Redis (URLs only - password from Key Vault)
+# These are defaults/overrides. Production uses App Configuration.
+# -----------------------------------------------------------------------------
+REDIS_HOST=localhost
+REDIS_PORT=6379
+REDIS_DB=0
+# REDIS_PASSWORD - Loaded from Key Vault secret "redis-password"
+
+# -----------------------------------------------------------------------------
+# JWT Authentication
+# Keys loaded from Key Vault. These are defaults/overrides.
+# -----------------------------------------------------------------------------
+JWT_ALGORITHM=RS256
+JWT_ACCESS_TOKEN_TTL=900
+JWT_REFRESH_TOKEN_TTL=604800
+JWT_CACHE_TTL=300
+# JWT_PRIVATE_KEY - Loaded from Key Vault secret "jwt-private-key"
+# JWT_PUBLIC_KEY - Loaded from Key Vault secret "jwt-public-key"
+
+# -----------------------------------------------------------------------------
+# OpenTelemetry
+# -----------------------------------------------------------------------------
+OTEL_ENDPOINT=http://localhost:4317
+OTEL_ENABLE_TRACING=true
+OTEL_ENABLE_METRICS=true
+
+# -----------------------------------------------------------------------------
+# Startup Configuration
+# -----------------------------------------------------------------------------
+STARTUP_TIMEOUT_SECONDS=30
+STARTUP_RETRY_ATTEMPTS=3
+STARTUP_RETRY_DELAY_SECONDS=2
+
+# -----------------------------------------------------------------------------
+# Google OAuth (Skip for now - Phase 2)
+# -----------------------------------------------------------------------------
+# GOOGLE_CLIENT_ID - Loaded from Key Vault (when implemented)
+# GOOGLE_CLIENT_SECRET - Loaded from Key Vault (when implemented)
+# GOOGLE_REDIRECT_URI=http://localhost:8000/auth/callback
diff --git a/.gitignore b/.gitignore
index 6b7176e..e9be4c6 100644
--- a/.gitignore
+++ b/.gitignore
@@ -47,3 +47,5 @@ Thumbs.db
# UV
.uv/
+.cursor/
+*.mdc*
diff --git a/README.md b/README.md
index cc0c8ce..d32fc2d 100644
--- a/README.md
+++ b/README.md
@@ -37,15 +37,86 @@ uv run python main.py
## Endpoints
-- `GET /health` - Health check endpoint
-- `GET /helloword` - Hello world endpoint
+### Health Endpoints
+
+- `GET /health` - Detailed health check with dependency status
+- `GET /ready` - Readiness probe (Kubernetes/load balancer)
+- `GET /live` - Liveness probe (Kubernetes)
+
+### Application Endpoints
+
+- `GET /helloworld` - Hello world endpoint
+
+## Infrastructure Setup
+
+### Local Development
+
+Start infrastructure services (PostgreSQL, Redis, Jaeger) using Docker Compose:
+
+```bash
+docker compose -f docker-compose.infra.yml up -d
+```
+
+This will start:
+
+- PostgreSQL on port 5432
+- Redis on port 6379
+- Jaeger (tracing) on ports 4317 (OTLP) and 16686 (UI)
+
+### Database Initialization
+
+The database schema is automatically initialized when the PostgreSQL container starts for the first time via `scripts/init-db.sql`.
+
+### JWT Key Generation
+
+Generate JWT RS256 keys for authentication:
+
+```bash
+./scripts/generate-keys.sh
+```
+
+This creates `keys/private.pem` and `keys/public.pem`. Store these in Azure Key Vault for production.
+
+### Azure Key Vault Setup
+
+Set up Azure Key Vault and upload secrets:
+
+```bash
+./scripts/setup-keyvault.sh
+```
+
+This script will:
+
+1. Create Key Vault (if not exists)
+2. Create Service Principal with proper permissions
+3. Upload JWT keys and other secrets
+4. Output credentials for your `.env` file
## Configuration
-Configuration is managed through:
+Configuration is managed through a hierarchy:
+
+1. **Azure App Configuration** (single source of truth for production/staging)
+
+ - Non-secret settings (URLs, ports, feature flags)
+ - Environment-specific configuration using labels
+
+2. **Azure Key Vault** (secrets)
+
+ - Passwords, JWT keys, OAuth credentials
+ - Referenced from App Configuration
+
+3. **`.env` file** (bootstrap and development fallback)
+ - Azure credentials to access App Config and Key Vault
+ - Local overrides for development
+ - Minimal - only what's needed to bootstrap
+
+### Environment Validation
+
+- **Production/Staging**: Requires Azure App Config and Key Vault URLs. Server will not start without them.
+- **Development**: Optional Azure services. Falls back to `.env` file if not configured.
-- `config.py` - Configuration module using Pydantic Settings
-- `.env` - Environment variables for secrets (will be replaced by Azure Key Vault in the future)
+See `.env.example` for all available configuration options.
## Pre-commit Hooks
diff --git a/api/__init__.py b/api/__init__.py
new file mode 100644
index 0000000..d1e594b
--- /dev/null
+++ b/api/__init__.py
@@ -0,0 +1 @@
+"""API routes package."""
diff --git a/api/health.py b/api/health.py
new file mode 100644
index 0000000..2a71e75
--- /dev/null
+++ b/api/health.py
@@ -0,0 +1,93 @@
+"""
+Health check endpoints for production monitoring.
+
+Provides /health, /ready, and /live endpoints for Kubernetes and load balancers.
+"""
+
+from datetime import UTC, datetime
+
+from fastapi import APIRouter, Request
+from fastapi.responses import JSONResponse
+
+from core.app_state import AppState
+from core.database import verify_database_connection
+from core.redis import verify_redis_connection
+
+router = APIRouter(tags=["Health"])
+
+
+@router.get("/health")
+async def health_check(request: Request) -> JSONResponse:
+ """
+ Detailed health check with dependency status.
+
+ Returns:
+ - status: overall health status
+ - checks: individual service checks
+ - metadata: app info and uptime
+ """
+ state: AppState = request.app.state.app_state
+
+ checks = {
+ "database": await verify_database_connection(state.db_pool),
+ "redis": await verify_redis_connection(state.redis_client),
+ "key_vault": state.key_vault.is_available() if state.key_vault else False,
+ }
+
+ all_healthy = all(checks.values())
+ uptime = (datetime.now(UTC) - state.started_at).total_seconds()
+
+ return JSONResponse(
+ status_code=200 if all_healthy else 503,
+ content={
+ "status": "healthy" if all_healthy else "unhealthy",
+ "checks": checks,
+ "metadata": {
+ "service": state.settings.app_name,
+ "version": state.settings.app_version,
+ "environment": state.settings.environment,
+ "uptime_seconds": uptime,
+ },
+ },
+ )
+
+
+@router.get("/ready")
+async def readiness_check(request: Request) -> JSONResponse:
+ """
+ Readiness probe for Kubernetes/load balancers.
+
+ Returns 200 only when app is fully initialized and ready.
+ Used by load balancers to know when to send traffic.
+ """
+ state: AppState = request.app.state.app_state
+
+ if not state.is_ready:
+ return JSONResponse(
+ status_code=503,
+ content={"status": "not_ready", "errors": state.startup_errors},
+ )
+
+ # Verify critical dependencies
+ db_ok = await verify_database_connection(state.db_pool)
+ redis_ok = await verify_redis_connection(state.redis_client)
+
+ if not (db_ok and redis_ok):
+ return JSONResponse(
+ status_code=503,
+ content={"status": "not_ready", "database": db_ok, "redis": redis_ok},
+ )
+
+ return JSONResponse(content={"status": "ready"})
+
+
+@router.get("/live")
+async def liveness_check() -> JSONResponse:
+ """
+ Liveness probe for Kubernetes.
+
+ Returns 200 if the process is alive.
+ Does NOT check dependencies - only process health.
+ Used by Kubernetes to know when to restart container.
+ """
+ return JSONResponse(content={"status": "alive"})
diff --git a/config.py b/config.py
index d9b45f4..1121869 100644
--- a/config.py
+++ b/config.py
@@ -1,10 +1,13 @@
"""
Configuration module for NeroSpatial Backend.
-This module handles application configuration. In the future, configurations
-will be fetched from Azure App Configuration Store.
+This module handles application configuration. Configurations are loaded from:
+1. Azure App Configuration (single source of truth for production/staging)
+2. Azure Key Vault (secrets)
+3. .env file (fallback for development, bootstrap credentials)
"""
+from pydantic import field_validator
from pydantic_settings import BaseSettings, SettingsConfigDict
@@ -15,21 +18,112 @@ class Settings(BaseSettings):
env_file=".env", env_file_encoding="utf-8", case_sensitive=False, extra="ignore"
)
- # Application settings
+ # =========================================================================
+ # Bootstrap Settings (from .env only)
+ # =========================================================================
+ azure_key_vault_url: str | None = None
+ azure_app_config_url: str | None = None
+ azure_tenant_id: str | None = None
+ azure_client_id: str | None = None
+ azure_client_secret: str | None = None
+ environment: str = "development"
+
+ # =========================================================================
+ # Application Settings
+ # =========================================================================
app_name: str = "NeroSpatial Backend"
app_version: str = "0.1.0"
debug: bool = False
-
- # Server settings
host: str = "0.0.0.0"
port: int = 8000
+ log_level: str = "INFO"
- # Azure settings (for future use)
- azure_key_vault_url: str | None = None
- azure_config_store_url: str | None = None
- azure_tenant_id: str | None = None
- azure_client_id: str | None = None
- azure_client_secret: str | None = None
+ # =========================================================================
+ # PostgreSQL
+ # =========================================================================
+ postgres_host: str = "localhost"
+ postgres_port: int = 5432
+ postgres_db: str = "nerospatial"
+ postgres_user: str = "nerospatial"
+ postgres_password: str | None = None
+ postgres_pool_min: int = 5
+ postgres_pool_max: int = 20
+
+ # =========================================================================
+ # Redis
+ # =========================================================================
+ redis_host: str = "localhost"
+ redis_port: int = 6379
+ redis_db: int = 0
+ redis_password: str | None = None
+
+ # =========================================================================
+ # JWT Authentication
+ # =========================================================================
+ jwt_algorithm: str = "RS256"
+ jwt_access_token_ttl: int = 900 # 15 minutes
+ jwt_refresh_token_ttl: int = 604800 # 7 days
+ jwt_cache_ttl: int = 300 # 5 minutes
+ jwt_private_key: str | None = None
+ jwt_public_key: str | None = None
+
+ # =========================================================================
+ # OpenTelemetry
+ # =========================================================================
+ otel_endpoint: str = "http://localhost:4317"
+ otel_enable_tracing: bool = True
+ otel_enable_metrics: bool = True
+
+ # =========================================================================
+ # Startup Configuration
+ # =========================================================================
+ startup_timeout_seconds: int = 30
+ startup_retry_attempts: int = 3
+ startup_retry_delay_seconds: int = 2
+
+ @field_validator("environment")
+ @classmethod
+ def validate_environment(cls, v: str) -> str:
+ """Validate environment is one of allowed values."""
+ allowed = {"development", "staging", "production"}
+ if v.lower() not in allowed:
+ raise ValueError(f"environment must be one of {allowed}, got '{v}'")
+ return v.lower()
+
+ def is_production(self) -> bool:
+ """Check if running in production environment."""
+ return self.environment.lower() == "production"
+
+ def is_staging(self) -> bool:
+ """Check if running in staging environment."""
+ return self.environment.lower() == "staging"
+
+ def is_development(self) -> bool:
+ """Check if running in development environment."""
+ return self.environment.lower() == "development"
+
+ @property
+ def postgres_url(self) -> str:
+ """Build PostgreSQL connection URL."""
+ if not self.postgres_password:
+ return (
+ f"postgresql://{self.postgres_user}@{self.postgres_host}:"
+ f"{self.postgres_port}/{self.postgres_db}"
+ )
+ return (
+ f"postgresql://{self.postgres_user}:{self.postgres_password}"
+ f"@{self.postgres_host}:{self.postgres_port}/{self.postgres_db}"
+ )
+
+ @property
+ def redis_url(self) -> str:
+ """Build Redis connection URL."""
+ if self.redis_password:
+ return (
+ f"redis://:{self.redis_password}@{self.redis_host}:"
+ f"{self.redis_port}/{self.redis_db}"
+ )
+ return f"redis://{self.redis_host}:{self.redis_port}/{self.redis_db}"
# Global settings instance
diff --git a/core/__init__.py b/core/__init__.py
index 849788d..db099b8 100644
--- a/core/__init__.py
+++ b/core/__init__.py
@@ -1,5 +1,22 @@
"""Core module for NeroSpatial Backend - shared utilities."""
+from core.app_state import AppState, DatabasePool, RedisClient
+from core.auth import JWTAuth
+from core.config_loader import ConfigLoader
+from core.database import create_database_pool, verify_database_connection
+from core.exceptions import (
+ AuthenticationError,
+ AuthorizationError,
+ CircuitBreakerOpenError,
+ DatabaseError,
+ LLMProviderError,
+ NeroSpatialException,
+ RateLimitExceeded,
+ SessionExpiredError,
+ SessionNotFoundError,
+ ValidationError,
+ VLMTimeoutError,
+)
from core.keyvault import KeyVaultClient
from core.logger import (
TraceContext,
@@ -9,47 +26,94 @@
setup_logging,
)
from core.models import (
+ # Enums - User & Auth
+ AuditAction,
+ # Models - User & Auth
+ AuditLog,
+ # Models - Control & Binary
BinaryFrame,
ControlMessage,
+ # Enums - Session & Control
ControlMessageType,
+ # Models - Interaction
ConversationHistory,
FrameFlags,
InteractionTurn,
- OAuthTokens,
- # Enums
+ OAuthProvider,
+ RefreshToken,
SessionMode,
- # Models
+ # Models - Session
SessionState,
StreamType,
TokenBlacklistEntry,
+ TokenRevocationReason,
User,
UserContext,
UserStatus,
)
+from core.redis import create_redis_client, verify_redis_connection
+from core.telemetry import Metrics, TelemetryManager
__all__ = [
+ # App State
+ "AppState",
+ "DatabasePool",
+ "RedisClient",
+ # Config
+ "ConfigLoader",
+ # Database
+ "create_database_pool",
+ "verify_database_connection",
# KeyVault
"KeyVaultClient",
+ # Redis
+ "create_redis_client",
+ "verify_redis_connection",
# Logger
"get_logger",
"setup_logging",
"set_trace_id",
"get_trace_id",
"TraceContext",
- # Model Enums
+ # Auth
+ "JWTAuth",
+ # Exceptions
+ "NeroSpatialException",
+ "AuthenticationError",
+ "AuthorizationError",
+ "SessionExpiredError",
+ "SessionNotFoundError",
+ "VLMTimeoutError",
+ "LLMProviderError",
+ "CircuitBreakerOpenError",
+ "DatabaseError",
+ "RateLimitExceeded",
+ "ValidationError",
+ # Telemetry
+ "TelemetryManager",
+ "Metrics",
+ # Enums - User & Auth
+ "UserStatus",
+ "OAuthProvider",
+ "TokenRevocationReason",
+ "AuditAction",
+ # Enums - Session & Control
"SessionMode",
"ControlMessageType",
"StreamType",
"FrameFlags",
- "UserStatus",
- # Models
- "SessionState",
+ # Models - User & Auth
+ "User",
"UserContext",
- "OAuthTokens",
+ "RefreshToken",
"TokenBlacklistEntry",
- "User",
+ "AuditLog",
+ # Models - Session
+ "SessionState",
+ # Models - Interaction
"InteractionTurn",
"ConversationHistory",
+ # Models - Control & Binary
"ControlMessage",
"BinaryFrame",
]
diff --git a/core/app_state.py b/core/app_state.py
new file mode 100644
index 0000000..aac669f
--- /dev/null
+++ b/core/app_state.py
@@ -0,0 +1,92 @@
+"""
+Application state management for NeroSpatial Backend.
+
+Provides centralized state container for all application services and resources.
+"""
+
+from dataclasses import dataclass, field
+from datetime import UTC, datetime
+from typing import Any, Protocol
+
+from config import Settings
+from core.auth import JWTAuth
+from core.keyvault import KeyVaultClient
+from core.telemetry import TelemetryManager
+
+
+class DatabasePool(Protocol):
+ """Protocol for database connection pool."""
+
+ async def acquire(self) -> Any:
+ """Acquire connection from pool."""
+ ...
+
+ async def release(self, conn: Any) -> None:
+ """Release connection back to pool."""
+ ...
+
+ async def close(self) -> None:
+ """Close pool and all connections."""
+ ...
+
+ async def execute(self, query: str, *args: Any) -> Any:
+ """Execute query directly (convenience method)."""
+ ...
+
+
+class RedisClient(Protocol):
+ """Protocol for Redis client."""
+
+ async def get(self, key: str) -> str | None:
+ """Get value from Redis."""
+ ...
+
+ async def setex(self, key: str, ttl: int, value: str) -> None:
+ """Set value with TTL."""
+ ...
+
+ async def delete(self, key: str) -> None:
+ """Delete key."""
+ ...
+
+ async def ping(self) -> bool:
+ """Ping Redis server."""
+ ...
+
+ async def close(self) -> None:
+ """Close Redis connection."""
+ ...
+
+
+@dataclass
+class AppState:
+ """Application state container."""
+
+ settings: Settings
+ db_pool: DatabasePool | None = None
+ redis_client: RedisClient | None = None
+ jwt_auth: JWTAuth | None = None
+ telemetry: TelemetryManager | None = None
+ key_vault: KeyVaultClient | None = None
+
+ # Startup metadata
+ started_at: datetime = field(default_factory=lambda: datetime.now(UTC))
+ is_ready: bool = False
+ startup_errors: list[str] = field(default_factory=list)
+
+ def mark_ready(self) -> None:
+ """Mark application as ready to accept traffic."""
+ self.is_ready = True
+
+ def add_startup_error(self, error: str) -> None:
+ """Record startup error."""
+ self.startup_errors.append(error)
+
+ async def cleanup(self) -> None:
+ """Cleanup all resources."""
+ if self.redis_client:
+ await self.redis_client.close()
+ if self.db_pool:
+ await self.db_pool.close()
+ if self.telemetry:
+ self.telemetry.shutdown()
diff --git a/core/auth.py b/core/auth.py
new file mode 100644
index 0000000..edafbc9
--- /dev/null
+++ b/core/auth.py
@@ -0,0 +1,622 @@
+"""
+JWT authentication and user context management.
+
+Provides JWT token validation, user context extraction with caching,
+token generation, refresh, and blacklist management.
+"""
+
+import hashlib
+import secrets
+from datetime import UTC, datetime, timedelta
+from typing import Any, Protocol
+from uuid import UUID, uuid4
+
+import jwt
+from jwt import PyJWKClient
+
+from core.exceptions import AuthenticationError, AuthorizationError
+from core.logger import get_logger, get_trace_id
+from core.models import (
+ OAuthProvider,
+ RefreshToken,
+ TokenBlacklistEntry,
+ TokenRevocationReason,
+ User,
+ UserContext,
+ UserStatus,
+)
+
+logger = get_logger(__name__)
+
+
+class RedisClientProtocol(Protocol):
+ """Protocol for Redis client interface."""
+
+ async def get(self, key: str) -> str | None:
+ """Get value from Redis."""
+ ...
+
+ async def setex(self, key: str, ttl: int, value: str) -> None:
+ """Set value with TTL."""
+ ...
+
+ async def delete(self, key: str) -> None:
+ """Delete key."""
+ ...
+
+ async def exists(self, key: str) -> bool:
+ """Check if key exists."""
+ ...
+
+
+class PostgresClientProtocol(Protocol):
+ """Protocol for Postgres client interface."""
+
+ async def get_user(self, user_id: UUID) -> User | None:
+ """Get user by ID."""
+ ...
+
+ async def get_user_by_email(self, email: str) -> User | None:
+ """Get user by email."""
+ ...
+
+ async def create_refresh_token(self, token: RefreshToken) -> None:
+ """Create refresh token."""
+ ...
+
+ async def get_refresh_token(self, token_hash: str) -> RefreshToken | None:
+ """Get refresh token by hash."""
+ ...
+
+ async def rotate_refresh_token(
+ self, old_token_id: UUID, new_token: RefreshToken
+ ) -> None:
+ """Rotate refresh token."""
+ ...
+
+ async def delete_user_refresh_tokens(self, user_id: UUID) -> None:
+ """Delete all refresh tokens for user."""
+ ...
+
+ async def create_token_blacklist_entry(self, entry: TokenBlacklistEntry) -> None:
+ """Create blacklist entry."""
+ ...
+
+
+class JWTAuth:
+ """
+ JWT authentication and user context management.
+
+ Handles JWT validation, user context extraction with Redis caching,
+ token generation, refresh with rotation, and blacklist management.
+
+ Note: Requires Redis and Postgres clients for full functionality.
+ Can work with mocks for testing.
+ """
+
+ def __init__(
+ self,
+ private_key: str | None = None,
+ public_key: str | None = None,
+ public_key_url: str | None = None,
+ algorithm: str = "RS256",
+ access_token_ttl: int = 900, # 15 minutes
+ refresh_token_ttl: int = 604800, # 7 days
+ cache_ttl_seconds: int = 300, # 5 minutes
+ redis_client: RedisClientProtocol | None = None,
+ postgres_client: PostgresClientProtocol | None = None,
+ ):
+ """
+ Initialize JWT auth.
+
+ Args:
+ private_key: RS256 private key for signing (PEM format)
+ public_key: RS256 public key for verification (PEM format)
+ public_key_url: URL to fetch JWKS (alternative to public_key)
+ algorithm: JWT algorithm (RS256 or HS256)
+ access_token_ttl: Access token TTL in seconds (default 15 min)
+ refresh_token_ttl: Refresh token TTL in seconds (default 7 days)
+ cache_ttl_seconds: User context cache TTL in seconds (default 5 min)
+ redis_client: Redis client for caching and blacklist (optional)
+ postgres_client: Postgres client for user lookup (optional)
+ """
+ self.algorithm = algorithm
+ self.access_token_ttl = access_token_ttl
+ self.refresh_token_ttl = refresh_token_ttl
+ self.cache_ttl = cache_ttl_seconds
+ self.redis_client = redis_client
+ self.postgres_client = postgres_client
+
+ # Setup JWT verification
+ if public_key_url:
+ self.jwks_client: PyJWKClient | None = PyJWKClient(public_key_url)
+ self.public_key: str | None = None
+ elif public_key:
+ self.jwks_client = None
+ self.public_key = public_key
+ else:
+ raise ValueError("Either public_key_url or public_key required")
+
+ # Private key for signing (if provided)
+ self.private_key = private_key
+
+ logger.info(
+ f"JWTAuth initialized with algorithm={algorithm}, "
+ f"access_ttl={access_token_ttl}s, refresh_ttl={refresh_token_ttl}s"
+ )
+
+ async def validate_token(self, token: str) -> dict[str, Any]:
+ """
+ Validate JWT token and return claims.
+
+ Args:
+ token: JWT token string
+
+ Returns:
+ Decoded JWT claims
+
+ Raises:
+ AuthenticationError: If token is invalid, expired, or blacklisted
+ """
+ trace_id = get_trace_id()
+
+ try:
+ # Get signing key
+ if self.jwks_client:
+ # RS256: Fetch signing key from JWKS
+ signing_key = self.jwks_client.get_signing_key_from_jwt(token)
+ key = signing_key.key
+ else:
+ # HS256 or direct public key
+ key = self.public_key
+
+ # Decode and verify token
+ payload = jwt.decode(
+ token,
+ key,
+ algorithms=[self.algorithm],
+ options={"verify_exp": True, "verify_signature": True},
+ )
+
+ # Check blacklist
+ jti = payload.get("jti")
+ if jti and await self.is_blacklisted(jti):
+ raise AuthenticationError(
+ "Token is blacklisted",
+ trace_id=trace_id,
+ user_id=UUID(payload.get("sub") or payload.get("user_id", "")),
+ )
+
+ return payload
+
+ except jwt.ExpiredSignatureError as e:
+ raise AuthenticationError(
+ "Token expired",
+ trace_id=trace_id,
+ ) from e
+ except jwt.InvalidTokenError as e:
+ raise AuthenticationError(
+ f"Invalid token: {str(e)}",
+ trace_id=trace_id,
+ ) from e
+
+ async def extract_user_context(self, token: str) -> UserContext:
+ """
+ Extract user context from JWT with Redis caching.
+
+ Flow:
+ 1. Validate token
+ 2. Check blacklist
+ 3. Check Redis cache
+ 4. If cache miss, build from claims (or query DB if needed)
+ 5. Cache result
+
+ Args:
+ token: JWT token string
+
+ Returns:
+ UserContext instance
+
+ Raises:
+ AuthenticationError: If token is invalid
+ AuthorizationError: If user status is not ACTIVE
+ """
+ trace_id = get_trace_id()
+
+ # Validate token
+ claims = await self.validate_token(token)
+
+ user_id = UUID(claims.get("sub") or claims.get("user_id", ""))
+ if not user_id:
+ raise AuthenticationError(
+ "Token missing user_id claim",
+ trace_id=trace_id,
+ )
+
+ # Check Redis cache
+ if self.redis_client:
+ cache_key = f"user:context:{user_id}"
+ cached = await self.redis_client.get(cache_key)
+ if cached:
+ try:
+ import json
+
+ cached_data = json.loads(cached)
+ context = UserContext(**cached_data)
+ # Check if still valid (not expired)
+ if not context.is_expired():
+ logger.debug(f"User context cache hit for user {user_id}")
+ return context
+ except Exception as e:
+ logger.warning(f"Failed to parse cached user context: {e}")
+
+ # Build user context from claims
+ # If postgres_client is available, we could fetch full user data
+ # For now, build from JWT claims
+ context = UserContext(
+ user_id=user_id,
+ email=claims.get("email", ""),
+ name=claims.get("name"),
+ oauth_provider=OAuthProvider(claims.get("oauth_provider", "google")),
+ status=UserStatus(claims.get("status", "active")),
+ token_id=claims.get("jti", ""),
+ issued_at=datetime.fromtimestamp(claims.get("iat", 0), tz=UTC),
+ expires_at=datetime.fromtimestamp(claims.get("exp", 0), tz=UTC),
+ session_id=UUID(claims["session_id"]) if claims.get("session_id") else None,
+ )
+
+ # Check user status
+ if not context.is_active():
+ raise AuthorizationError(
+ f"User status is {context.status.value}, not ACTIVE",
+ trace_id=trace_id,
+ user_id=user_id,
+ )
+
+ # Cache with TTL
+ if self.redis_client:
+ cache_key = f"user:context:{user_id}"
+ try:
+ import json
+
+ await self.redis_client.setex(
+ cache_key,
+ self.cache_ttl,
+ json.dumps(context.model_dump(), default=str),
+ )
+ except Exception as e:
+ logger.warning(f"Failed to cache user context: {e}")
+
+ return context
+
+ async def generate_tokens(
+ self,
+ user: User,
+ ip_address: str | None = None,
+ user_agent: str | None = None,
+ ) -> tuple[str, str]:
+ """
+ Generate access token and refresh token.
+
+ Args:
+ user: User model instance
+ ip_address: Client IP address (optional)
+ user_agent: Client user agent (optional)
+
+ Returns:
+ Tuple of (access_token, refresh_token)
+
+ Raises:
+ ValueError: If private_key not provided
+ """
+ if not self.private_key:
+ raise ValueError("private_key required for token generation")
+
+ trace_id = get_trace_id()
+ now = datetime.now(UTC)
+
+ # Generate JWT ID for access token
+ access_jti = str(uuid4())
+
+ # Access token claims
+ access_token_claims = {
+ "sub": str(user.user_id),
+ "user_id": str(user.user_id),
+ "email": user.email,
+ "name": user.name,
+ "oauth_provider": user.oauth_provider.value,
+ "status": user.status.value,
+ "jti": access_jti,
+ "iat": int(now.timestamp()),
+ "exp": int((now + timedelta(seconds=self.access_token_ttl)).timestamp()),
+ "type": "access",
+ }
+
+ # Sign access token
+ access_token = jwt.encode(
+ access_token_claims,
+ self.private_key,
+ algorithm=self.algorithm,
+ )
+
+ # Generate refresh token
+ refresh_token_value = secrets.token_urlsafe(32)
+ refresh_token_id = uuid4()
+ refresh_token_hash = hashlib.sha256(refresh_token_value.encode()).hexdigest()
+
+ expires_at = now + timedelta(seconds=self.refresh_token_ttl)
+
+ # Create refresh token model
+ refresh_token = RefreshToken(
+ token_id=refresh_token_id,
+ user_id=user.user_id,
+ token_hash=refresh_token_hash,
+ expires_at=expires_at,
+ created_at=now,
+ ip_address=ip_address,
+ user_agent=user_agent,
+ )
+
+ # Store refresh token in database
+ if self.postgres_client:
+ try:
+ await self.postgres_client.create_refresh_token(refresh_token)
+ except Exception as e:
+ logger.error(f"Failed to store refresh token: {e}")
+ raise
+
+ logger.info(
+ f"Generated tokens for user {user.user_id}",
+ extra={"trace_id": trace_id, "user_id": str(user.user_id)},
+ )
+
+ return access_token, refresh_token_value
+
+ async def refresh_tokens(
+ self,
+ refresh_token: str,
+ ip_address: str | None = None,
+ ) -> tuple[str, str]:
+ """
+ Refresh tokens with rotation.
+
+ Flow:
+ 1. Hash refresh token and find in database
+ 2. Check if expired or rotated
+ 3. Get user and check status
+ 4. Generate new tokens
+ 5. Mark old token as rotated
+ 6. Blacklist old access token jti (if available)
+
+ Args:
+ refresh_token: Current refresh token string
+ ip_address: Client IP address (optional)
+
+ Returns:
+ Tuple of (new_access_token, new_refresh_token)
+
+ Raises:
+ AuthenticationError: If refresh token is invalid or expired
+ AuthorizationError: If user is not active
+ ValueError: If postgres_client not available
+ """
+ if not self.postgres_client:
+ raise ValueError("postgres_client required for token refresh")
+
+ trace_id = get_trace_id()
+
+ # Hash refresh token
+ token_hash = hashlib.sha256(refresh_token.encode()).hexdigest()
+
+ # Find refresh token in database
+ stored_token = await self.postgres_client.get_refresh_token(token_hash)
+ if not stored_token:
+ raise AuthenticationError(
+ "Refresh token not found",
+ trace_id=trace_id,
+ )
+
+ # Check if expired or rotated
+ if not stored_token.is_valid():
+ raise AuthenticationError(
+ "Refresh token expired or rotated",
+ trace_id=trace_id,
+ user_id=stored_token.user_id,
+ )
+
+ # Get user
+ user = await self.postgres_client.get_user(stored_token.user_id)
+ if not user:
+ raise AuthenticationError(
+ "User not found",
+ trace_id=trace_id,
+ user_id=stored_token.user_id,
+ )
+
+ # Check user status
+ if not user.is_active():
+ raise AuthorizationError(
+ f"User status is {user.status.value}, not ACTIVE",
+ trace_id=trace_id,
+ user_id=user.user_id,
+ )
+
+ # Generate new tokens
+ new_access_token, new_refresh_token = await self.generate_tokens(
+ user, ip_address=ip_address
+ )
+
+ # Mark old token as rotated
+ new_refresh_token_hash = hashlib.sha256(new_refresh_token.encode()).hexdigest()
+ new_refresh_token_model = RefreshToken(
+ token_id=uuid4(),
+ user_id=user.user_id,
+ token_hash=new_refresh_token_hash,
+ expires_at=datetime.now(UTC) + timedelta(seconds=self.refresh_token_ttl),
+ created_at=datetime.now(UTC),
+ previous_token_id=stored_token.token_id,
+ rotated_at=datetime.now(UTC),
+ )
+
+ await self.postgres_client.rotate_refresh_token(
+ stored_token.token_id, new_refresh_token_model
+ )
+
+ logger.info(
+ f"Refreshed tokens for user {user.user_id}",
+ extra={"trace_id": trace_id, "user_id": str(user.user_id)},
+ )
+
+ return new_access_token, new_refresh_token
+
+ async def blacklist_token(
+ self,
+ jti: str,
+ user_id: UUID,
+ reason: TokenRevocationReason,
+ expires_at: datetime,
+ ip_address: str | None = None,
+ ) -> None:
+ """
+ Add token to blacklist (Redis + PostgreSQL).
+
+ Args:
+ jti: JWT ID (jti claim)
+ user_id: User ID who owns the token
+ reason: Revocation reason
+ expires_at: Original token expiration time
+ ip_address: IP address where revocation occurred (optional)
+ """
+ trace_id = get_trace_id()
+
+ # Create blacklist entry
+ entry = TokenBlacklistEntry(
+ token_id=jti,
+ user_id=user_id,
+ revoked_at=datetime.now(UTC),
+ expires_at=expires_at,
+ reason=reason,
+ ip_address=ip_address,
+ )
+
+ # Store in Redis (fast lookup)
+ if self.redis_client:
+ redis_key = f"blacklist:{jti}"
+ ttl = int((expires_at - datetime.now(UTC)).total_seconds())
+ if ttl > 0:
+ try:
+ await self.redis_client.setex(redis_key, ttl, "1")
+ except Exception as e:
+ logger.warning(f"Failed to blacklist token in Redis: {e}")
+
+ # Store in PostgreSQL (persistence)
+ if self.postgres_client:
+ try:
+ await self.postgres_client.create_token_blacklist_entry(entry)
+ except Exception as e:
+ logger.warning(f"Failed to blacklist token in Postgres: {e}")
+
+ logger.info(
+ f"Token blacklisted: {jti}",
+ extra={
+ "trace_id": trace_id,
+ "user_id": str(user_id),
+ "reason": reason.value,
+ },
+ )
+
+ async def is_blacklisted(self, jti: str) -> bool:
+ """
+ Check if token is blacklisted (Redis fast lookup).
+
+ Args:
+ jti: JWT ID (jti claim)
+
+ Returns:
+ True if token is blacklisted
+ """
+ if not self.redis_client:
+ # If Redis not available, check Postgres
+ # This is slower but works as fallback
+ if self.postgres_client:
+ # For now, return False if Redis unavailable
+ # Full implementation would query Postgres
+ return False
+ return False
+
+ redis_key = f"blacklist:{jti}"
+ exists = await self.redis_client.exists(redis_key)
+ return exists
+
+ async def logout(
+ self,
+ token: str,
+ ip_address: str | None = None,
+ ) -> None:
+ """
+ Logout user.
+
+ Flow:
+ 1. Extract jti from token
+ 2. Blacklist token
+ 3. Delete all refresh tokens for user
+ 4. Invalidate user context cache
+
+ Args:
+ token: JWT access token
+ ip_address: IP address where logout occurred (optional)
+ """
+ trace_id = get_trace_id()
+
+ try:
+ # Validate token to extract claims
+ claims = await self.validate_token(token)
+ jti = claims.get("jti")
+ user_id = UUID(claims.get("sub") or claims.get("user_id", ""))
+ expires_at = datetime.fromtimestamp(claims.get("exp", 0), tz=UTC)
+
+ if jti:
+ # Blacklist token
+ await self.blacklist_token(
+ jti,
+ user_id,
+ TokenRevocationReason.LOGOUT,
+ expires_at,
+ ip_address,
+ )
+
+ # Delete all refresh tokens for user
+ if self.postgres_client:
+ try:
+ await self.postgres_client.delete_user_refresh_tokens(user_id)
+ except Exception as e:
+ logger.warning(f"Failed to delete refresh tokens: {e}")
+
+ # Invalidate user context cache
+ if self.redis_client:
+ cache_key = f"user:context:{user_id}"
+ try:
+ await self.redis_client.delete(cache_key)
+ except Exception as e:
+ logger.warning(f"Failed to invalidate cache: {e}")
+
+ logger.info(
+ f"User logged out: {user_id}",
+ extra={"trace_id": trace_id, "user_id": str(user_id)},
+ )
+
+ except AuthenticationError:
+ # If token is invalid, still try to clean up if we have user_id
+ # This handles edge cases where token is expired but logout is called
+ logger.warning(
+ "Logout called with invalid token, cleanup may be incomplete"
+ )
+
+ def generate_trace_id(self) -> str:
+ """
+ Generate unique trace ID for request.
+
+ Returns:
+ UUID string as trace ID
+ """
+ return str(uuid4())
diff --git a/core/config_loader.py b/core/config_loader.py
new file mode 100644
index 0000000..0cb2281
--- /dev/null
+++ b/core/config_loader.py
@@ -0,0 +1,230 @@
+"""
+Configuration loader from Azure App Configuration + Key Vault.
+
+Provides single source of truth for configuration with:
+- Azure App Configuration for non-secret settings
+- Azure Key Vault for secrets
+- Environment-based validation (strict for prod/staging)
+- Retry logic with exponential backoff
+- Fallback to .env for development
+"""
+
+import asyncio
+from typing import Any
+
+from azure.appconfiguration import AzureAppConfigurationClient
+from azure.identity import ClientSecretCredential, DefaultAzureCredential
+
+from config import Settings
+from core.exceptions import ValidationError
+from core.keyvault import KeyVaultClient
+from core.logger import get_logger
+
+logger = get_logger(__name__)
+
+
+class ConfigLoader:
+ """Load configuration from Azure App Configuration + Key Vault."""
+
+ def __init__(self, bootstrap_settings: Settings):
+ """
+ Initialize configuration loader.
+
+ Args:
+ bootstrap_settings: Settings loaded from .env file
+ """
+ self.bootstrap = bootstrap_settings
+ self.key_vault: KeyVaultClient | None = None
+ self.app_config: AzureAppConfigurationClient | None = None
+
+ def _validate_requirements(self) -> None:
+ """
+ Validate Azure configuration requirements based on environment.
+
+ Rules:
+ - production/staging: MUST have App Config and Key Vault URLs
+ - development: Optional, falls back to .env
+
+ Raises:
+ ValidationError: If production/staging missing required Azure config
+ """
+ env = self.bootstrap.environment.lower()
+
+ if env in ("production", "staging"):
+ # Strict requirements for production/staging
+ missing = []
+
+ if not self.bootstrap.azure_app_config_url:
+ missing.append("AZURE_APP_CONFIG_URL")
+
+ if not self.bootstrap.azure_key_vault_url:
+ missing.append("AZURE_KEY_VAULT_URL")
+
+ if missing:
+ raise ValidationError(
+ f"Environment '{env}' requires Azure services. "
+ f"Missing: {', '.join(missing)}. "
+ f"Set these in .env or environment variables.",
+ field="azure_config",
+ )
+
+ # Also require credentials for production/staging
+ if not all(
+ [
+ self.bootstrap.azure_tenant_id,
+ self.bootstrap.azure_client_id,
+ self.bootstrap.azure_client_secret,
+ ]
+ ):
+ raise ValidationError(
+ f"Environment '{env}' requires Azure credentials. "
+ f"Set AZURE_TENANT_ID, AZURE_CLIENT_ID, and AZURE_CLIENT_SECRET.",
+ field="azure_credentials",
+ )
+
+ logger.info(
+ f"Environment '{env}' validated: "
+ "Azure App Config and Key Vault required"
+ )
+ else:
+ # Development: Optional, will fallback to .env
+ if not self.bootstrap.azure_app_config_url:
+ logger.warning(
+ "AZURE_APP_CONFIG_URL not set. "
+ "Development mode: falling back to .env file only."
+ )
+ if not self.bootstrap.azure_key_vault_url:
+ logger.warning(
+ "AZURE_KEY_VAULT_URL not set. "
+ "Development mode: falling back to .env file only."
+ )
+
+ async def load(self) -> dict[str, Any]:
+ """
+ Load all configuration with retry logic.
+
+ Returns:
+ Dictionary of configuration key-value pairs
+
+ Raises:
+ ValidationError: If production/staging missing required Azure config
+ """
+ self._validate_requirements()
+
+ # If no Azure services configured, return empty dict (use .env fallback)
+ if not self.bootstrap.azure_app_config_url:
+ if self.bootstrap.is_development():
+ logger.info("Using .env file configuration only (development mode)")
+ return {}
+ # This should have been caught by validation, but double-check
+ raise ValidationError(
+ f"Environment '{self.bootstrap.environment}' "
+ "requires Azure App Configuration",
+ field="azure_app_config_url",
+ )
+
+ return await self._load_with_retry()
+
+ async def _load_with_retry(self) -> dict[str, Any]:
+ """
+ Load from Azure with exponential backoff retry.
+
+ Returns:
+ Dictionary of configuration key-value pairs
+
+ Raises:
+ Exception: If all retry attempts fail
+ """
+ for attempt in range(self.bootstrap.startup_retry_attempts):
+ try:
+ return await self._load_from_azure()
+ except Exception as e:
+ if attempt == self.bootstrap.startup_retry_attempts - 1:
+ logger.error(
+ f"Failed to load configuration after "
+ f"{self.bootstrap.startup_retry_attempts} attempts: {e}"
+ )
+ raise
+ delay = self.bootstrap.startup_retry_delay_seconds * (2**attempt)
+ logger.warning(
+ f"Configuration load attempt {attempt + 1} failed: {e}. "
+ f"Retrying in {delay} seconds..."
+ )
+ await asyncio.sleep(delay)
+
+ # Should never reach here, but satisfy type checker
+ raise Exception("Failed to load configuration")
+
+ async def _load_from_azure(self) -> dict[str, Any]:
+ """
+ Load configuration from Azure App Configuration + Key Vault.
+
+ Returns:
+ Dictionary of configuration key-value pairs
+ """
+ # Initialize Azure clients
+ if (
+ self.bootstrap.azure_tenant_id
+ and self.bootstrap.azure_client_id
+ and self.bootstrap.azure_client_secret
+ ):
+ credential = ClientSecretCredential(
+ tenant_id=self.bootstrap.azure_tenant_id,
+ client_id=self.bootstrap.azure_client_id,
+ client_secret=self.bootstrap.azure_client_secret,
+ )
+ else:
+ # Use DefaultAzureCredential (Managed Identity, Azure CLI, etc.)
+ credential = DefaultAzureCredential()
+
+ self.app_config = AzureAppConfigurationClient(
+ base_url=self.bootstrap.azure_app_config_url,
+ credential=credential,
+ )
+
+ self.key_vault = KeyVaultClient(
+ vault_url=self.bootstrap.azure_key_vault_url,
+ tenant_id=self.bootstrap.azure_tenant_id,
+ client_id=self.bootstrap.azure_client_id,
+ client_secret=self.bootstrap.azure_client_secret,
+ )
+
+ # Load all configuration from App Config
+ config_dict: dict[str, Any] = {}
+
+ # List all configuration settings (with environment label filter)
+ label_filter = (
+ f"{self.bootstrap.environment}*" if self.bootstrap.environment else None
+ )
+
+ for setting in self.app_config.list_configuration_settings(
+ label_filter=label_filter
+ ):
+ # Convert key path to Python attribute name
+ # e.g., "postgres/host" -> "postgres_host"
+ key = setting.key.replace("/", "_").replace("-", "_").lower()
+
+ # Check if this is a Key Vault reference
+ if (
+ setting.content_type
+ == "application/vnd.microsoft.appconfig.keyvaultref+json"
+ ):
+ # Extract secret name from Key Vault URL
+ import json
+
+ kv_ref = json.loads(setting.value)
+ secret_name = kv_ref.get("uri", "").split("/secrets/")[-1]
+ # Load from Key Vault
+ secret_value = await self.key_vault.get_secret(secret_name)
+ if secret_value:
+ config_dict[key] = secret_value
+ else:
+ # Regular configuration value
+ config_dict[key] = setting.value
+
+ logger.info(
+ f"Loaded {len(config_dict)} configuration values "
+ "from Azure App Configuration"
+ )
+
+ return config_dict
diff --git a/core/database.py b/core/database.py
new file mode 100644
index 0000000..fc8d228
--- /dev/null
+++ b/core/database.py
@@ -0,0 +1,69 @@
+"""
+Database connection pool factory and utilities.
+
+Provides database pool creation and verification functions.
+Stub implementation - real asyncpg pool implementation in memory module.
+"""
+
+from typing import Any
+
+from config import Settings
+from core.app_state import DatabasePool
+from core.logger import get_logger
+
+logger = get_logger(__name__)
+
+
+async def create_database_pool(settings: Settings) -> DatabasePool:
+ """
+ Create database connection pool.
+
+ Args:
+ settings: Application settings
+
+ Returns:
+ Database connection pool
+
+ Note:
+ This is a stub implementation. Real asyncpg pool implementation
+ will be in the memory module.
+ """
+ logger.warning(
+ "create_database_pool: Using stub implementation. "
+ "Real implementation will be in memory module."
+ )
+
+ # Stub implementation - returns a mock pool
+ # Real implementation will use asyncpg.create_pool()
+ class StubPool:
+ async def acquire(self) -> Any:
+ return None
+
+ async def release(self, conn: Any) -> None:
+ pass
+
+ async def close(self) -> None:
+ pass
+
+ async def execute(self, query: str, *args: Any) -> Any:
+ return None
+
+ return StubPool()
+
+
+async def verify_database_connection(pool: DatabasePool) -> bool:
+ """
+ Verify database is accessible.
+
+ Args:
+ pool: Database connection pool
+
+ Returns:
+ True if database is accessible, False otherwise
+ """
+ try:
+ await pool.execute("SELECT 1")
+ return True
+ except Exception as e:
+ logger.error(f"Database connection verification failed: {e}")
+ return False
diff --git a/core/exceptions.py b/core/exceptions.py
new file mode 100644
index 0000000..0c6d926
--- /dev/null
+++ b/core/exceptions.py
@@ -0,0 +1,401 @@
+"""
+Custom exception hierarchy for NeroSpatial Backend.
+
+All exceptions inherit from NeroSpatialException and include
+trace_id and user_id context for distributed tracing.
+"""
+
+from typing import Any
+from uuid import UUID
+
+
+class NeroSpatialException(Exception):
+ """
+ Base exception for all NeroSpatial errors.
+
+ Includes trace_id and user_id context for distributed tracing.
+
+ Attributes:
+ message: Human-readable error message
+ trace_id: Distributed trace ID for request tracking
+ user_id: User ID associated with the error (if applicable)
+ context: Additional context as key-value pairs
+ """
+
+ def __init__(
+ self,
+ message: str,
+ trace_id: str | None = None,
+ user_id: UUID | None = None,
+ **kwargs: Any,
+ ):
+ """
+ Initialize exception.
+
+ Args:
+ message: Error message
+ trace_id: Distributed trace ID
+ user_id: User ID (if applicable)
+ **kwargs: Additional context fields
+ """
+ self.message = message
+ self.trace_id = trace_id
+ self.user_id = user_id
+ self.context = kwargs
+ super().__init__(self.message)
+
+ def __str__(self) -> str:
+ """Format exception as string with context."""
+ parts = [self.message]
+ if self.trace_id:
+ parts.append(f"trace_id={self.trace_id}")
+ if self.user_id:
+ parts.append(f"user_id={self.user_id}")
+ if self.context:
+ context_str = ", ".join(f"{k}={v}" for k, v in self.context.items())
+ parts.append(f"context=({context_str})")
+ return " | ".join(parts)
+
+ def __repr__(self) -> str:
+ """Return detailed representation."""
+ return (
+ f"{self.__class__.__name__}("
+ f"message={self.message!r}, "
+ f"trace_id={self.trace_id!r}, "
+ f"user_id={self.user_id!r}, "
+ f"context={self.context!r})"
+ )
+
+
+class AuthenticationError(NeroSpatialException):
+ """
+ JWT validation failed or token expired.
+
+ Raised when:
+ - Token signature is invalid
+ - Token is expired
+ - Token is malformed
+ - Token is blacklisted
+ """
+
+ pass
+
+
+class AuthorizationError(NeroSpatialException):
+ """
+ User lacks required permissions or status.
+
+ Raised when:
+ - User status is not ACTIVE
+ - User lacks required permission (future RBAC)
+ - Account is suspended/blacklisted
+ """
+
+ pass
+
+
+class SessionExpiredError(NeroSpatialException):
+ """
+ Session TTL expired in Redis.
+
+ Raised when:
+ - Session last_activity exceeds TTL
+ - Session not found in Redis (may also raise SessionNotFoundError)
+ """
+
+ def __init__(
+ self,
+ session_id: UUID,
+ trace_id: str | None = None,
+ user_id: UUID | None = None,
+ **kwargs: Any,
+ ):
+ """
+ Initialize session expired error.
+
+ Args:
+ session_id: Expired session ID
+ trace_id: Distributed trace ID
+ user_id: User ID
+ **kwargs: Additional context
+ """
+ self.session_id = session_id
+ super().__init__(
+ f"Session {session_id} has expired",
+ trace_id=trace_id,
+ user_id=user_id,
+ session_id=str(session_id),
+ **kwargs,
+ )
+
+
+class SessionNotFoundError(NeroSpatialException):
+ """
+ Session not found in Redis.
+
+ Raised when:
+ - Session ID doesn't exist in Redis
+ - Session was deleted
+ """
+
+ def __init__(
+ self,
+ session_id: UUID,
+ trace_id: str | None = None,
+ user_id: UUID | None = None,
+ **kwargs: Any,
+ ):
+ """
+ Initialize session not found error.
+
+ Args:
+ session_id: Missing session ID
+ trace_id: Distributed trace ID
+ user_id: User ID
+ **kwargs: Additional context
+ """
+ self.session_id = session_id
+ super().__init__(
+ f"Session {session_id} not found",
+ trace_id=trace_id,
+ user_id=user_id,
+ session_id=str(session_id),
+ **kwargs,
+ )
+
+
+class VLMTimeoutError(NeroSpatialException):
+ """
+ VLM inference exceeded timeout threshold.
+
+ Raised when:
+ - VLM processing takes longer than configured timeout
+ - VLM service is unresponsive
+ """
+
+ def __init__(
+ self,
+ timeout_ms: int,
+ trace_id: str | None = None,
+ user_id: UUID | None = None,
+ **kwargs: Any,
+ ):
+ """
+ Initialize VLM timeout error.
+
+ Args:
+ timeout_ms: Timeout threshold in milliseconds
+ trace_id: Distributed trace ID
+ user_id: User ID
+ **kwargs: Additional context
+ """
+ self.timeout_ms = timeout_ms
+ super().__init__(
+ f"VLM inference timeout after {timeout_ms}ms",
+ trace_id=trace_id,
+ user_id=user_id,
+ timeout_ms=timeout_ms,
+ **kwargs,
+ )
+
+
+class LLMProviderError(NeroSpatialException):
+ """
+ LLM API call failed (network, rate limit, etc.).
+
+ Raised when:
+ - LLM API returns error status
+ - Network timeout
+ - Rate limit exceeded
+ - Provider service unavailable
+ """
+
+ def __init__(
+ self,
+ message: str,
+ provider: str,
+ status_code: int | None = None,
+ trace_id: str | None = None,
+ user_id: UUID | None = None,
+ **kwargs: Any,
+ ):
+ """
+ Initialize LLM provider error.
+
+ Args:
+ message: Error message
+ provider: LLM provider name (e.g., "groq", "gemini")
+ status_code: HTTP status code (if applicable)
+ trace_id: Distributed trace ID
+ user_id: User ID
+ **kwargs: Additional context
+ """
+ self.provider = provider
+ self.status_code = status_code
+ super().__init__(
+ f"{provider}: {message}",
+ trace_id=trace_id,
+ user_id=user_id,
+ provider=provider,
+ status_code=status_code,
+ **kwargs,
+ )
+
+
+class CircuitBreakerOpenError(NeroSpatialException):
+ """
+ Circuit breaker is open, provider unavailable.
+
+ Raised when:
+ - Circuit breaker state is OPEN
+ - Too many failures detected
+ - Provider marked as unhealthy
+ """
+
+ def __init__(
+ self,
+ provider: str,
+ trace_id: str | None = None,
+ user_id: UUID | None = None,
+ **kwargs: Any,
+ ):
+ """
+ Initialize circuit breaker error.
+
+ Args:
+ provider: Provider name (e.g., "groq", "gemini")
+ trace_id: Distributed trace ID
+ user_id: User ID
+ **kwargs: Additional context
+ """
+ self.provider = provider
+ super().__init__(
+ f"Circuit breaker open for {provider}",
+ trace_id=trace_id,
+ user_id=user_id,
+ provider=provider,
+ **kwargs,
+ )
+
+
+class DatabaseError(NeroSpatialException):
+ """
+ Database operation failed.
+
+ Raised when:
+ - Connection failure
+ - Query execution error
+ - Transaction rollback
+ - Constraint violation
+ """
+
+ def __init__(
+ self,
+ message: str,
+ db_type: str,
+ operation: str,
+ trace_id: str | None = None,
+ user_id: UUID | None = None,
+ **kwargs: Any,
+ ):
+ """
+ Initialize database error.
+
+ Args:
+ message: Error message
+ db_type: Database type (e.g., "postgres", "redis", "cassandra")
+ operation: Operation that failed (e.g., "get_user", "create_session")
+ trace_id: Distributed trace ID
+ user_id: User ID
+ **kwargs: Additional context
+ """
+ self.db_type = db_type
+ self.operation = operation
+ super().__init__(
+ f"{db_type} {operation} failed: {message}",
+ trace_id=trace_id,
+ user_id=user_id,
+ db_type=db_type,
+ operation=operation,
+ **kwargs,
+ )
+
+
+class RateLimitExceeded(NeroSpatialException):
+ """
+ User exceeded rate limit.
+
+ Raised when:
+ - Request count exceeds limit in time window
+ - User account locked due to rate limit violations
+ """
+
+ def __init__(
+ self,
+ message: str,
+ limit: int,
+ window_seconds: int,
+ trace_id: str | None = None,
+ user_id: UUID | None = None,
+ **kwargs: Any,
+ ):
+ """
+ Initialize rate limit error.
+
+ Args:
+ message: Error message
+ limit: Rate limit value
+ window_seconds: Time window in seconds
+ trace_id: Distributed trace ID
+ user_id: User ID
+ **kwargs: Additional context
+ """
+ self.limit = limit
+ self.window_seconds = window_seconds
+ super().__init__(
+ f"Rate limit exceeded: {limit} per {window_seconds}s - {message}",
+ trace_id=trace_id,
+ user_id=user_id,
+ limit=limit,
+ window_seconds=window_seconds,
+ **kwargs,
+ )
+
+
+class ValidationError(NeroSpatialException):
+ """
+ Input validation failed.
+
+ Raised when:
+ - Invalid input format
+ - Missing required fields
+ - Value out of range
+ - Type mismatch
+ """
+
+ def __init__(
+ self,
+ message: str,
+ field: str | None = None,
+ trace_id: str | None = None,
+ user_id: UUID | None = None,
+ **kwargs: Any,
+ ):
+ """
+ Initialize validation error.
+
+ Args:
+ message: Error message
+ field: Field name that failed validation (if applicable)
+ trace_id: Distributed trace ID
+ user_id: User ID
+ **kwargs: Additional context
+ """
+ self.field = field
+ super().__init__(
+ message,
+ trace_id=trace_id,
+ user_id=user_id,
+ field=field,
+ **kwargs,
+ )
diff --git a/core/models.py b/core/models.py
deleted file mode 100644
index 2c90805..0000000
--- a/core/models.py
+++ /dev/null
@@ -1,235 +0,0 @@
-"""
-Shared Pydantic models for NeroSpatial Backend.
-
-All models are immutable (frozen) to prevent accidental mutation and ensure
-thread-safe operations across the platform.
-"""
-
-from datetime import UTC, datetime
-from enum import Enum
-from typing import Any
-from uuid import UUID
-
-from pydantic import BaseModel, ConfigDict, Field
-
-# ============================================================================
-# Enums
-# ============================================================================
-
-
-class SessionMode(str, Enum):
- """Session operation mode"""
-
- ACTIVE = "active" # Real-time conversational AI
- PASSIVE = "passive" # Silent observer mode
-
-
-class ControlMessageType(str, Enum):
- """WebSocket control message types"""
-
- SESSION_CONTROL = "session_control"
- ERROR = "error"
- ACK = "ack"
- HEARTBEAT = "heartbeat"
-
-
-class StreamType(int, Enum):
- """Binary frame stream types"""
-
- AUDIO = 0x01
- VIDEO = 0x02
- CONTROL = 0x03
-
-
-class FrameFlags(int, Enum):
- """Binary frame flags"""
-
- END_OF_STREAM = 0x01
- PRIORITY = 0x02
- ERROR = 0x04
-
-
-class UserStatus(str, Enum):
- """User account status"""
-
- ACTIVE = "active"
- BLACKLISTED = "blacklisted"
- SUSPENDED = "suspended"
-
-
-# ============================================================================
-# Session Models
-# ============================================================================
-
-
-class SessionState(BaseModel):
- """Session state stored in Redis"""
-
- session_id: UUID
- user_id: UUID
- mode: SessionMode
- created_at: datetime
- last_activity: datetime
- voice_id: str | None = None
- enable_vision: bool = False
- preferences: dict[str, Any] = Field(default_factory=dict)
-
- model_config = ConfigDict(frozen=True)
-
-
-# ============================================================================
-# User Models
-# ============================================================================
-
-
-class UserContext(BaseModel):
- """Lightweight user context extracted from JWT token"""
-
- user_id: UUID
- email: str
- created_at: datetime
- name: str | None = None
- oauth_provider: str = "google"
-
- model_config = ConfigDict(frozen=True)
-
-
-class OAuthTokens(BaseModel):
- """OAuth token storage"""
-
- access_token: str
- refresh_token: str
- id_token: str | None = None
- expires_at: datetime
- token_type: str = "Bearer"
- scope: str | None = None
-
- model_config = ConfigDict(frozen=True)
-
-
-class TokenBlacklistEntry(BaseModel):
- """Revoked token tracking"""
-
- token_id: str # JWT jti (JWT ID) or token hash
- user_id: UUID
- revoked_at: datetime
- expires_at: datetime # Original token expiration (for cleanup)
-
- model_config = ConfigDict(frozen=True)
-
-
-class User(BaseModel):
- """Full user profile with OAuth integration"""
-
- user_id: UUID
- email: str
- name: str | None = None
- oauth_provider: str = "google"
- status: UserStatus = UserStatus.ACTIVE
- created_at: datetime
- updated_at: datetime
- last_login: datetime | None = None
- oauth_tokens: OAuthTokens | None = None
- picture_url: str | None = None
- locale: str | None = None
-
- model_config = ConfigDict(frozen=True)
-
- # Note: Sessions tracked separately in Redis (not in User model)
-
-
-# ============================================================================
-# Interaction Models
-# ============================================================================
-
-
-class InteractionTurn(BaseModel):
- """Single interaction turn (user query + AI response)"""
-
- turn_id: UUID
- session_id: UUID
- timestamp: datetime
- transcript: str
- scene_description: str | None = None
- llm_response: str
- model_used: str # "groq", "gemini", "ollama"
- latency_ms: int
- tokens_used: int | None = None
-
- model_config = ConfigDict(frozen=True)
-
-
-class ConversationHistory(BaseModel):
- """Last N turns for context retrieval"""
-
- user_id: UUID
- turns: list[InteractionTurn] = Field(default_factory=list)
- max_turns: int = 10
-
- model_config = ConfigDict(frozen=True)
-
- def add_turn(self, turn: InteractionTurn) -> "ConversationHistory":
- """Add turn and maintain max_turns limit"""
- new_turns = [turn] + self.turns
- return ConversationHistory(
- user_id=self.user_id,
- turns=new_turns[: self.max_turns],
- max_turns=self.max_turns,
- )
-
-
-# ============================================================================
-# Control Message Models
-# ============================================================================
-
-
-class ControlMessage(BaseModel):
- """Control message sent via WebSocket (stream_type=0x03)"""
-
- type: ControlMessageType
- action: str | None = (
- None # "start_active_mode", "start_passive_mode", "end_session"
- )
- payload: dict[str, Any] = Field(default_factory=dict)
- timestamp: datetime = Field(default_factory=lambda: datetime.now(UTC))
-
- model_config = ConfigDict(frozen=True)
-
-
-# ============================================================================
-# Binary Frame Models
-# ============================================================================
-
-
-class BinaryFrame(BaseModel):
- """Parsed binary frame from WebSocket"""
-
- stream_type: StreamType
- flags: int
- payload: bytes
- length: int
-
- model_config = ConfigDict(frozen=True)
-
- @classmethod
- def parse(cls, data: bytes) -> "BinaryFrame":
- """Parse 4-byte header + payload"""
- if len(data) < 4:
- raise ValueError("Frame too short")
-
- stream_type = StreamType(data[0])
- flags = data[1]
- length = int.from_bytes(data[2:4], "big")
- payload = data[4 : 4 + length]
-
- if len(payload) != length:
- raise ValueError("Payload length mismatch")
-
- return cls(stream_type=stream_type, flags=flags, payload=payload, length=length)
-
- def to_bytes(self) -> bytes:
- """Serialize to binary frame format"""
- header = bytes(
- [self.stream_type.value, self.flags, *self.length.to_bytes(2, "big")]
- )
- return header + self.payload
diff --git a/core/models/__init__.py b/core/models/__init__.py
new file mode 100644
index 0000000..a189fa5
--- /dev/null
+++ b/core/models/__init__.py
@@ -0,0 +1,76 @@
+"""
+Core models package - re-exports all models for backward compatibility.
+
+All models can be imported from this package:
+ from core.models import User, SessionState, InteractionTurn
+
+Or from domain-specific modules:
+ from core.models.user import User
+ from core.models.session import SessionState
+ from core.models.interaction import InteractionTurn
+ from core.models.protocol import BinaryFrame
+
+Schema Version: 1.0
+"""
+
+# Import all from domain modules
+from core.models.interaction import (
+ # Models
+ ConversationHistory,
+ InteractionTurn,
+)
+from core.models.protocol import (
+ # Models
+ BinaryFrame,
+ ControlMessage,
+ # Enums
+ ControlMessageType,
+ FrameFlags,
+ StreamType,
+)
+from core.models.session import (
+ # Enum
+ SessionMode,
+ # Model
+ SessionState,
+)
+from core.models.user import (
+ # Enums
+ AuditAction,
+ # Models
+ AuditLog,
+ OAuthProvider,
+ RefreshToken,
+ TokenBlacklistEntry,
+ TokenRevocationReason,
+ User,
+ UserContext,
+ UserStatus,
+)
+
+__all__ = [
+ # Enums - User & Auth
+ "UserStatus",
+ "OAuthProvider",
+ "TokenRevocationReason",
+ "AuditAction",
+ # Enums - Session & Control
+ "SessionMode",
+ "ControlMessageType",
+ "StreamType",
+ "FrameFlags",
+ # Models - User & Auth
+ "User",
+ "UserContext",
+ "RefreshToken",
+ "TokenBlacklistEntry",
+ "AuditLog",
+ # Models - Session
+ "SessionState",
+ # Models - Interaction
+ "InteractionTurn",
+ "ConversationHistory",
+ # Models - Control & Binary
+ "ControlMessage",
+ "BinaryFrame",
+]
diff --git a/core/models/interaction.py b/core/models/interaction.py
new file mode 100644
index 0000000..46752de
--- /dev/null
+++ b/core/models/interaction.py
@@ -0,0 +1,283 @@
+"""
+Interaction and conversation models.
+
+This module contains models for tracking user interactions, conversation turns,
+and conversation history.
+
+Schema Version: 1.0
+"""
+
+from datetime import UTC, datetime
+from typing import Any
+from uuid import UUID
+
+from pydantic import BaseModel, ConfigDict, Field, field_validator, model_validator
+
+# ============================================================================
+# Models
+# ============================================================================
+
+
+class InteractionTurn(BaseModel):
+ """
+ Single interaction turn (user query + AI response).
+
+ Represents one complete interaction cycle in a conversation.
+ Stored in Cassandra for time-series analysis.
+
+ Attributes:
+ turn_id: Unique identifier for this turn
+ user_id: User who owns this interaction
+ session_id: Session where this interaction occurred
+ timestamp: When the interaction started (UTC, timezone-aware)
+ transcript: User's transcribed speech (non-empty)
+ scene_description: VLM description of visual context (if any)
+ llm_response: AI's response text (non-empty)
+ model_used: LLM provider/model used (e.g., "groq", "gemini", "ollama")
+ latency_ms: Total response latency in milliseconds (>= 0)
+ tokens_used: Number of tokens consumed (if tracked, >= 0)
+ metadata: Extensible JSON field for future additions
+ schema_version: Model schema version for migrations
+ """
+
+ # Primary fields
+ turn_id: UUID
+ user_id: UUID # CRITICAL: Required for Cassandra partition key
+ session_id: UUID
+ timestamp: datetime
+
+ # Content fields
+ transcript: str
+ scene_description: str | None = None
+ llm_response: str
+ model_used: str # "groq", "gemini", "ollama"
+
+ # Metrics
+ latency_ms: int
+ tokens_used: int | None = None
+
+ # Extensibility
+ metadata: dict[str, Any] = Field(default_factory=dict)
+ schema_version: str = "1.0"
+
+ model_config = ConfigDict(frozen=True)
+
+ @field_validator("timestamp", mode="before")
+ @classmethod
+ def ensure_utc(cls, v: datetime) -> datetime:
+ """Ensure timestamps are timezone-aware (UTC)."""
+ if v.tzinfo is None:
+ return v.replace(tzinfo=UTC)
+ return v
+
+ @field_validator("transcript", "llm_response", mode="before")
+ @classmethod
+ def validate_non_empty_string(cls, v: str) -> str:
+ """Validate that string fields are non-empty."""
+ if not v or not v.strip():
+ raise ValueError("String field cannot be empty")
+ return v.strip()
+
+ @field_validator("latency_ms", mode="before")
+ @classmethod
+ def validate_latency(cls, v: int) -> int:
+ """Validate latency is non-negative."""
+ if v < 0:
+ raise ValueError("latency_ms must be >= 0")
+ return v
+
+ @field_validator("tokens_used", mode="before")
+ @classmethod
+ def validate_tokens(cls, v: int | None) -> int | None:
+ """Validate tokens_used is non-negative if provided."""
+ if v is not None and v < 0:
+ raise ValueError("tokens_used must be >= 0 if provided")
+ return v
+
+ def is_recent(self, threshold_seconds: int = 300) -> bool:
+ """
+ Check if turn is recent (within threshold).
+
+ Args:
+ threshold_seconds: Threshold in seconds (default 300 = 5 min)
+
+ Returns:
+ True if turn occurred within threshold
+ """
+ age = self.calculate_age_seconds()
+ return age < threshold_seconds
+
+ def calculate_age_seconds(self) -> int:
+ """
+ Calculate age of turn in seconds.
+
+ Returns:
+ Age in seconds (0 if timestamp is in the future)
+ """
+ elapsed = (datetime.now(UTC) - self.timestamp).total_seconds()
+ return max(0, int(elapsed))
+
+ def get_total_tokens(self) -> int:
+ """
+ Return total tokens consumed.
+
+ Returns:
+ Number of tokens (0 if not tracked)
+ """
+ return self.tokens_used if self.tokens_used is not None else 0
+
+
+class ConversationHistory(BaseModel):
+ """
+ Last N turns for context retrieval.
+
+ Maintained in Redis for fast access during active sessions.
+ Immutable - add_turn returns a new instance.
+
+ Redis Key Pattern: `history:{user_id}`
+
+ Attributes:
+ user_id: User who owns this history
+ turns: List of recent interaction turns (newest first)
+ max_turns: Maximum number of turns to retain (must be > 0, <= 100)
+ metadata: Extensible JSON field for future additions
+ schema_version: Model schema version for migrations
+ last_updated: Timestamp of last update (UTC, timezone-aware)
+ """
+
+ # Primary fields
+ user_id: UUID
+ turns: list[InteractionTurn] = Field(default_factory=list)
+ max_turns: int = 10
+
+ # Extensibility
+ metadata: dict[str, Any] = Field(default_factory=dict)
+ schema_version: str = "1.0"
+ last_updated: datetime = Field(default_factory=lambda: datetime.now(UTC))
+
+ model_config = ConfigDict(frozen=True)
+
+ @field_validator("max_turns", mode="before")
+ @classmethod
+ def validate_max_turns(cls, v: int) -> int:
+ """Validate max_turns is within reasonable bounds."""
+ if v <= 0:
+ raise ValueError("max_turns must be > 0")
+ if v > 100:
+ raise ValueError("max_turns must be <= 100")
+ return v
+
+ @field_validator("last_updated", mode="before")
+ @classmethod
+ def ensure_utc(cls, v: datetime) -> datetime:
+ """Ensure timestamps are timezone-aware (UTC)."""
+ if v.tzinfo is None:
+ return v.replace(tzinfo=UTC)
+ return v
+
+ @model_validator(mode="after")
+ def validate_turns_user_id(self) -> "ConversationHistory":
+ """Validate that all turns belong to the same user_id."""
+ for turn in self.turns:
+ if turn.user_id != self.user_id:
+ raise ValueError(
+ f"Turn {turn.turn_id} belongs to user {turn.user_id}, "
+ f"but history belongs to user {self.user_id}"
+ )
+ return self
+
+ def add_turn(self, turn: InteractionTurn) -> "ConversationHistory":
+ """
+ Add turn and maintain max_turns limit.
+
+ Validates that turn's user_id matches history's user_id.
+
+ Args:
+ turn: InteractionTurn to add
+
+ Returns:
+ New ConversationHistory instance (immutable pattern)
+
+ Raises:
+ ValueError: If turn's user_id doesn't match history's user_id
+ """
+ if turn.user_id != self.user_id:
+ raise ValueError(
+ f"Cannot add turn for user {turn.user_id} to history {self.user_id}"
+ )
+
+ new_turns = [turn, *self.turns]
+ return ConversationHistory(
+ user_id=self.user_id,
+ turns=new_turns[: self.max_turns],
+ max_turns=self.max_turns,
+ metadata=self.metadata,
+ schema_version=self.schema_version,
+ last_updated=datetime.now(UTC),
+ )
+
+ def is_empty(self) -> bool:
+ """
+ Check if history has no turns.
+
+ Returns:
+ True if history is empty
+ """
+ return len(self.turns) == 0
+
+ def get_oldest_turn(self) -> InteractionTurn | None:
+ """
+ Get oldest turn (last in list, since turns are newest first).
+
+ Returns:
+ Oldest InteractionTurn or None if empty
+ """
+ return self.turns[-1] if self.turns else None
+
+ def get_newest_turn(self) -> InteractionTurn | None:
+ """
+ Get newest turn (first in list, since turns are newest first).
+
+ Returns:
+ Newest InteractionTurn or None if empty
+ """
+ return self.turns[0] if self.turns else None
+
+ def get_turns_count(self) -> int:
+ """
+ Return number of turns.
+
+ Returns:
+ Number of turns (alias for len(turns))
+ """
+ return len(self.turns)
+
+ def should_trim(self) -> bool:
+ """
+ Check if history exceeds max_turns (defensive check).
+
+ Returns:
+ True if history has more turns than max_turns
+ """
+ return len(self.turns) > self.max_turns
+
+ def get_total_tokens(self) -> int:
+ """
+ Sum of all tokens across turns.
+
+ Returns:
+ Total tokens consumed across all turns
+ """
+ return sum(turn.get_total_tokens() for turn in self.turns)
+
+ def get_average_latency(self) -> float | None:
+ """
+ Average latency across turns.
+
+ Returns:
+ Average latency in milliseconds, or None if empty
+ """
+ if not self.turns:
+ return None
+ total_latency = sum(turn.latency_ms for turn in self.turns)
+ return total_latency / len(self.turns)
diff --git a/core/models/protocol.py b/core/models/protocol.py
new file mode 100644
index 0000000..275201a
--- /dev/null
+++ b/core/models/protocol.py
@@ -0,0 +1,363 @@
+"""
+WebSocket protocol models.
+
+This module contains models for WebSocket binary frame protocol and control messages.
+
+Schema Version: 1.0
+"""
+
+from datetime import UTC, datetime
+from enum import Enum
+from typing import Any, ClassVar
+
+from pydantic import BaseModel, ConfigDict, Field, field_validator, model_validator
+
+# ============================================================================
+# Enums
+# ============================================================================
+
+
+class ControlMessageType(str, Enum):
+ """WebSocket control message types."""
+
+ SESSION_CONTROL = "session_control"
+ ERROR = "error"
+ ACK = "ack"
+ HEARTBEAT = "heartbeat"
+
+
+class StreamType(int, Enum):
+ """Binary frame stream types."""
+
+ AUDIO = 0x01
+ VIDEO = 0x02
+ CONTROL = 0x03
+
+
+class FrameFlags(int, Enum):
+ """Binary frame flags."""
+
+ END_OF_STREAM = 0x01
+ PRIORITY = 0x02
+ ERROR = 0x04
+
+
+# ============================================================================
+# Models
+# ============================================================================
+
+
+class ControlMessage(BaseModel):
+ """
+ Control message sent via WebSocket (stream_type=0x03).
+
+ Used for session management, heartbeats, and error reporting.
+
+ Allowed Actions by Type:
+ - SESSION_CONTROL: "start_active_mode", "start_passive_mode", "end_session"
+ - ERROR: None or error-specific action
+ - ACK: None or ack-specific action
+ - HEARTBEAT: None
+
+ Attributes:
+ type: Type of control message
+ action: Specific action (required for SESSION_CONTROL, optional for others)
+ payload: Additional data as JSON
+ timestamp: When the message was created (UTC, timezone-aware)
+ metadata: Extensible JSON field for future additions
+ schema_version: Model schema version for migrations
+ """
+
+ # Primary fields
+ type: ControlMessageType
+ action: str | None = None
+ payload: dict[str, Any] = Field(default_factory=dict)
+ timestamp: datetime = Field(default_factory=lambda: datetime.now(UTC))
+
+ # Extensibility
+ metadata: dict[str, Any] = Field(default_factory=dict)
+ schema_version: str = "1.0"
+
+ model_config = ConfigDict(frozen=True)
+
+ # Valid actions for SESSION_CONTROL messages
+ _SESSION_CONTROL_ACTIONS = {
+ "start_active_mode",
+ "start_passive_mode",
+ "end_session",
+ }
+
+ @field_validator("timestamp", mode="before")
+ @classmethod
+ def ensure_utc(cls, v: datetime) -> datetime:
+ """Ensure timestamps are timezone-aware (UTC)."""
+ if v.tzinfo is None:
+ return v.replace(tzinfo=UTC)
+ return v
+
+ @model_validator(mode="after")
+ def validate_action(self) -> "ControlMessage":
+ """Validate action field based on message type."""
+ if self.type == ControlMessageType.SESSION_CONTROL:
+ if self.action is None:
+ raise ValueError(
+ "action is required for SESSION_CONTROL messages. "
+ "Allowed values: start_active_mode, start_passive_mode, end_session"
+ )
+ if self.action not in self._SESSION_CONTROL_ACTIONS:
+ raise ValueError(
+ f"Invalid action '{self.action}' for SESSION_CONTROL. "
+ f"Allowed values: {', '.join(self._SESSION_CONTROL_ACTIONS)}"
+ )
+ elif self.type == ControlMessageType.HEARTBEAT:
+ if self.action is not None:
+ raise ValueError("action must be None for HEARTBEAT messages")
+ # ERROR and ACK can have optional actions, no validation needed
+
+ return self
+
+ def is_session_control(self) -> bool:
+ """Check if message type is SESSION_CONTROL."""
+ return self.type == ControlMessageType.SESSION_CONTROL
+
+ def is_error(self) -> bool:
+ """Check if message type is ERROR."""
+ return self.type == ControlMessageType.ERROR
+
+ def is_heartbeat(self) -> bool:
+ """Check if message type is HEARTBEAT."""
+ return self.type == ControlMessageType.HEARTBEAT
+
+ def is_ack(self) -> bool:
+ """Check if message type is ACK."""
+ return self.type == ControlMessageType.ACK
+
+ def get_action_type(self) -> str | None:
+ """
+ Return action type (for SESSION_CONTROL messages).
+
+ Returns:
+ Action string or None
+ """
+ return self.action
+
+ def has_payload(self) -> bool:
+ """
+ Check if payload is non-empty.
+
+ Returns:
+ True if payload has content
+ """
+ return len(self.payload) > 0
+
+
+class BinaryFrame(BaseModel):
+ """
+ Parsed binary frame from WebSocket.
+
+ Binary frame protocol for efficient audio/video streaming.
+
+ Frame format:
+ [Header: 4 bytes] [Payload: N bytes]
+ - Byte 0: Stream Type (0x01=Audio, 0x02=Video, 0x03=Control)
+ - Byte 1: Flags (bitwise OR of FrameFlags values)
+ - Bytes 2-3: Payload Length (uint16, big-endian, max 65535)
+
+ Attributes:
+ stream_type: Type of stream (AUDIO, VIDEO, CONTROL)
+ flags: Frame flags (bitwise OR of FrameFlags enum values, 0-255)
+ payload: Raw payload bytes
+ length: Payload length in bytes (must match payload size, max 65535)
+ metadata: Extensible JSON field for future additions
+ schema_version: Model schema version for migrations
+ """
+
+ # Primary fields
+ stream_type: StreamType
+ flags: int
+ payload: bytes
+ length: int
+
+ # Extensibility
+ metadata: dict[str, Any] = Field(default_factory=dict)
+ schema_version: str = "1.0"
+
+ model_config = ConfigDict(frozen=True)
+
+ # Maximum payload size (uint16 max)
+ MAX_PAYLOAD_SIZE: ClassVar[int] = 65535
+
+ @field_validator("flags", mode="before")
+ @classmethod
+ def validate_flags(cls, v: int) -> int:
+ """Validate flags are within valid range (0-255)."""
+ if not 0 <= v <= 255:
+ raise ValueError(f"flags must be between 0 and 255, got {v}")
+ return v
+
+ @field_validator("length", mode="before")
+ @classmethod
+ def validate_length(cls, v: int) -> int:
+ """Validate length is within valid range (0-65535)."""
+ if not 0 <= v <= cls.MAX_PAYLOAD_SIZE:
+ raise ValueError(
+ f"length must be between 0 and {cls.MAX_PAYLOAD_SIZE}, got {v}"
+ )
+ return v
+
+ @model_validator(mode="after")
+ def validate_payload_integrity(self) -> "BinaryFrame":
+ """Validate that length matches actual payload size."""
+ if len(self.payload) != self.length:
+ raise ValueError(
+ f"Payload length mismatch: length={self.length}, "
+ f"actual payload size={len(self.payload)}"
+ )
+ if len(self.payload) > self.MAX_PAYLOAD_SIZE:
+ raise ValueError(
+ f"Payload size {len(self.payload)} exceeds maximum "
+ f"{self.MAX_PAYLOAD_SIZE} bytes"
+ )
+ return self
+
+ def has_flag(self, flag: FrameFlags) -> bool:
+ """
+ Check if specific flag is set.
+
+ Args:
+ flag: FrameFlags enum value to check
+
+ Returns:
+ True if flag is set
+ """
+ return bool(self.flags & flag.value)
+
+ def is_control(self) -> bool:
+ """Check if stream_type is CONTROL."""
+ return self.stream_type == StreamType.CONTROL
+
+ def is_audio(self) -> bool:
+ """Check if stream_type is AUDIO."""
+ return self.stream_type == StreamType.AUDIO
+
+ def is_video(self) -> bool:
+ """Check if stream_type is VIDEO."""
+ return self.stream_type == StreamType.VIDEO
+
+ def is_end_of_stream(self) -> bool:
+ """Check if END_OF_STREAM flag is set."""
+ return self.has_flag(FrameFlags.END_OF_STREAM)
+
+ def is_priority(self) -> bool:
+ """Check if PRIORITY flag is set."""
+ return self.has_flag(FrameFlags.PRIORITY)
+
+ def has_error(self) -> bool:
+ """Check if ERROR flag is set."""
+ return self.has_flag(FrameFlags.ERROR)
+
+ def get_total_size(self) -> int:
+ """
+ Return total frame size (header + payload).
+
+ Returns:
+ Total size in bytes (4-byte header + payload length)
+ """
+ return 4 + self.length
+
+ def validate_integrity(self) -> bool:
+ """
+ Validate that length matches payload (defensive check).
+
+ Returns:
+ True if integrity is valid
+
+ Raises:
+ ValueError: If integrity check fails
+ """
+ if len(self.payload) != self.length:
+ raise ValueError(
+ f"Integrity check failed: length={self.length}, "
+ f"actual payload size={len(self.payload)}"
+ )
+ return True
+
+ @classmethod
+ def parse(cls, data: bytes) -> "BinaryFrame":
+ """
+ Parse 4-byte header + payload from raw bytes.
+
+ Args:
+ data: Raw bytes containing header and payload
+
+ Returns:
+ Parsed BinaryFrame instance
+
+ Raises:
+ ValueError: If frame is too short, length mismatch, or exceeds max size
+ """
+ if len(data) < 4:
+ raise ValueError(
+ f"Frame too short: expected at least 4 bytes (header), got {len(data)}"
+ )
+
+ try:
+ stream_type = StreamType(data[0])
+ except ValueError as e:
+ raise ValueError(f"Invalid stream type: {data[0]:#02x}") from e
+
+ flags = data[1]
+ length = int.from_bytes(data[2:4], "big")
+
+ # Validate length before accessing payload
+ if length > cls.MAX_PAYLOAD_SIZE:
+ raise ValueError(
+ f"Payload length {length} exceeds maximum {cls.MAX_PAYLOAD_SIZE} bytes"
+ )
+
+ if len(data) < 4 + length:
+ raise ValueError(
+ f"Incomplete frame: expected {4 + length} bytes, got {len(data)}"
+ )
+
+ payload = data[4 : 4 + length]
+
+ if len(payload) != length:
+ raise ValueError(
+ f"Payload length mismatch: header says {length}, "
+ f"actual payload size is {len(payload)}"
+ )
+
+ return cls(
+ stream_type=stream_type,
+ flags=flags,
+ payload=payload,
+ length=length,
+ )
+
+ def to_bytes(self) -> bytes:
+ """
+ Serialize to binary frame format.
+
+ Validates integrity before serialization.
+
+ Returns:
+ Raw bytes ready for WebSocket transmission
+
+ Raises:
+ ValueError: If integrity validation fails
+ """
+ # Validate integrity before serialization
+ self.validate_integrity()
+
+ # Ensure length matches payload
+ if self.length != len(self.payload):
+ raise ValueError(
+ f"Cannot serialize: length={self.length} does not match "
+ f"payload size={len(self.payload)}"
+ )
+
+ header = bytes(
+ [self.stream_type.value, self.flags, *self.length.to_bytes(2, "big")]
+ )
+ return header + self.payload
diff --git a/core/models/session.py b/core/models/session.py
new file mode 100644
index 0000000..8d007ed
--- /dev/null
+++ b/core/models/session.py
@@ -0,0 +1,130 @@
+"""
+Session management models.
+
+This module contains models related to WebSocket session state and management.
+
+Schema Version: 1.0
+"""
+
+from datetime import UTC, datetime
+from enum import Enum
+from typing import Any
+from uuid import UUID
+
+from pydantic import BaseModel, ConfigDict, Field, IPvAnyAddress, field_validator
+
+# ============================================================================
+# Enums
+# ============================================================================
+
+
+class SessionMode(str, Enum):
+ """Session operation mode."""
+
+ ACTIVE = "active" # Real-time conversational AI
+ PASSIVE = "passive" # Silent observer mode
+
+
+# ============================================================================
+# Models
+# ============================================================================
+
+
+class SessionState(BaseModel):
+ """
+ Session state stored in Redis.
+
+ Represents an active WebSocket session for a user.
+
+ Attributes:
+ session_id: Unique session identifier
+ user_id: User who owns this session
+ mode: Current session mode (ACTIVE or PASSIVE)
+ created_at: Session creation timestamp (UTC)
+ last_activity: Last activity timestamp for TTL extension
+ voice_id: Selected voice for TTS (if any)
+ enable_vision: Whether vision processing is enabled
+ preferences: User preferences for this session
+ metadata: Extensible JSON field for future additions
+ schema_version: Model schema version for migrations
+ device_info: Optional device/client information
+ ip_address: Optional IP address for security tracking
+ user_agent: Optional user agent string for tracking
+ """
+
+ # Primary fields
+ session_id: UUID
+ user_id: UUID
+ mode: SessionMode
+
+ # Timestamps (UTC)
+ created_at: datetime
+ last_activity: datetime
+
+ # Configuration
+ voice_id: str | None = None
+ enable_vision: bool = False
+
+ # Extensibility
+ preferences: dict[str, Any] = Field(default_factory=dict)
+ metadata: dict[str, Any] = Field(default_factory=dict)
+ schema_version: str = "1.0"
+
+ # Optional tracking
+ device_info: dict[str, Any] | None = None
+ ip_address: IPvAnyAddress | None = None
+ user_agent: str | None = None
+
+ model_config = ConfigDict(frozen=True)
+
+ @field_validator("created_at", "last_activity", mode="before")
+ @classmethod
+ def ensure_utc(cls, v: datetime) -> datetime:
+ """Ensure timestamps are timezone-aware (UTC)."""
+ if v.tzinfo is None:
+ return v.replace(tzinfo=UTC)
+ return v
+
+ def is_active(self, ttl_seconds: int = 3600) -> bool:
+ """Check if session is still active (not expired)."""
+ return not self.is_expired(ttl_seconds)
+
+ def is_expired(self, ttl_seconds: int = 3600) -> bool:
+ """Check if session has expired based on TTL."""
+ elapsed = (datetime.now(UTC) - self.last_activity).total_seconds()
+ return elapsed > ttl_seconds
+
+ def update_activity(self) -> "SessionState":
+ """
+ Return new SessionState with updated last_activity.
+
+ Returns a new SessionState instance (immutable pattern).
+ """
+ return SessionState(**{**self.model_dump(), "last_activity": datetime.now(UTC)})
+
+ def calculate_ttl_remaining(self, ttl_seconds: int = 3600) -> int:
+ """
+ Calculate remaining TTL in seconds.
+
+ Args:
+ ttl_seconds: Total TTL in seconds (default 3600 = 1 hour)
+
+ Returns:
+ Remaining TTL in seconds (0 if expired)
+ """
+ elapsed = (datetime.now(UTC) - self.last_activity).total_seconds()
+ remaining = ttl_seconds - elapsed
+ return max(0, int(remaining))
+
+ def should_extend_ttl(self, activity_threshold_seconds: int = 300) -> bool:
+ """
+ Check if TTL should be extended (activity within threshold).
+
+ Args:
+ activity_threshold_seconds: Threshold in seconds (default 300 = 5 min)
+
+ Returns:
+ True if activity is recent enough to warrant TTL extension
+ """
+ elapsed = (datetime.now(UTC) - self.last_activity).total_seconds()
+ return elapsed < activity_threshold_seconds
diff --git a/core/models/user.py b/core/models/user.py
new file mode 100644
index 0000000..6975580
--- /dev/null
+++ b/core/models/user.py
@@ -0,0 +1,314 @@
+"""
+User and Authentication models.
+
+This module contains all models related to user management, authentication,
+and authorization including OAuth, tokens, and audit logging.
+
+Schema Version: 1.0
+"""
+
+from datetime import UTC, datetime
+from enum import Enum
+from typing import Any
+from uuid import UUID
+
+from pydantic import (
+ BaseModel,
+ ConfigDict,
+ EmailStr,
+ Field,
+ HttpUrl,
+ IPvAnyAddress,
+ field_validator,
+)
+
+# ============================================================================
+# Enums
+# ============================================================================
+
+
+class UserStatus(str, Enum):
+ """
+ User account status with defined transition rules.
+
+ State Transitions:
+ - PENDING_VERIFICATION -> ACTIVE (after email verification)
+ - ACTIVE -> LOCKED (rate limit exceeded, auto-expires)
+ - ACTIVE -> SUSPENDED (admin action)
+ - ACTIVE -> BLACKLISTED (security violation, permanent)
+ - LOCKED -> ACTIVE (after lockout period)
+ - SUSPENDED -> ACTIVE (admin action)
+ """
+
+ ACTIVE = "active"
+ PENDING_VERIFICATION = "pending_verification"
+ SUSPENDED = "suspended"
+ BLACKLISTED = "blacklisted"
+ LOCKED = "locked" # Temporary lock (rate limit violations)
+
+
+class OAuthProvider(str, Enum):
+ """Supported OAuth providers for authentication."""
+
+ GOOGLE = "google"
+ GITHUB = "github"
+ MICROSOFT = "microsoft"
+
+
+class TokenRevocationReason(str, Enum):
+ """Reasons for token revocation/blacklisting."""
+
+ LOGOUT = "logout" # User initiated logout
+ REFRESH = "refresh" # Token rotated during refresh
+ SECURITY = "security" # Security concern (password change, suspicious activity)
+ ADMIN = "admin" # Admin initiated revocation
+ EXPIRED = "expired" # Token naturally expired
+
+
+class AuditAction(str, Enum):
+ """Audit log action types for security event tracking."""
+
+ LOGIN = "login"
+ LOGOUT = "logout"
+ TOKEN_REFRESH = "token_refresh"
+ PASSWORD_CHANGE = "password_change"
+ PROFILE_UPDATE = "profile_update"
+ ACCOUNT_DELETE = "account_delete"
+ STATUS_CHANGE = "status_change"
+ RATE_LIMIT_EXCEEDED = "rate_limit_exceeded"
+
+
+# ============================================================================
+# Models
+# ============================================================================
+
+
+class User(BaseModel):
+ """
+ Full user profile with OAuth integration.
+
+ Stored in PostgreSQL. This is the authoritative user record.
+
+ Attributes:
+ user_id: Unique identifier (UUID)
+ email: User's email address (validated format)
+ name: Display name (optional)
+ oauth_provider: OAuth provider used for authentication
+ oauth_sub: OAuth provider's subject ID for the user
+ status: Current account status
+ created_at: Account creation timestamp (UTC)
+ updated_at: Last update timestamp (UTC)
+ last_login: Last successful login timestamp (UTC)
+ deleted_at: Soft delete timestamp (UTC), None if active
+ picture_url: Profile picture URL (validated)
+ locale: User's locale preference (e.g., "en", "en-US")
+ metadata: Extensible JSON field for future additions
+ schema_version: Model schema version for migrations
+ """
+
+ # Primary fields
+ user_id: UUID
+ email: EmailStr
+ name: str | None = None
+
+ # OAuth fields
+ oauth_provider: OAuthProvider
+ oauth_sub: str | None = None
+
+ # Status & timestamps
+ status: UserStatus = UserStatus.ACTIVE
+ created_at: datetime
+ updated_at: datetime
+ last_login: datetime | None = None
+ deleted_at: datetime | None = None # Soft delete support
+
+ # Profile fields
+ picture_url: HttpUrl | None = None
+ locale: str = "en"
+
+ # Extensibility
+ metadata: dict[str, Any] = Field(default_factory=dict)
+ schema_version: str = "1.0"
+
+ model_config = ConfigDict(frozen=True)
+
+ @field_validator("locale")
+ @classmethod
+ def validate_locale(cls, v: str) -> str:
+ """Validate locale format and normalize to lowercase."""
+ if len(v) > 10:
+ raise ValueError("Locale must be max 10 characters")
+ return v.lower()
+
+ @field_validator(
+ "created_at", "updated_at", "last_login", "deleted_at", mode="before"
+ )
+ @classmethod
+ def ensure_utc(cls, v: datetime | None) -> datetime | None:
+ """Ensure all timestamps are timezone-aware (UTC)."""
+ if v is None:
+ return None
+ if v.tzinfo is None:
+ return v.replace(tzinfo=UTC)
+ return v
+
+ def is_active(self) -> bool:
+ """Check if user account is active and not deleted."""
+ return self.status == UserStatus.ACTIVE and self.deleted_at is None
+
+ def is_deleted(self) -> bool:
+ """Check if user account is soft-deleted."""
+ return self.deleted_at is not None
+
+
+class UserContext(BaseModel):
+ """
+ Lightweight user context extracted from JWT token.
+
+ Cached in Redis for fast access during request processing.
+ This model represents the claims structure of the JWT.
+
+ Attributes:
+ user_id: User's unique identifier
+ email: User's email address
+ name: Display name (optional)
+ oauth_provider: OAuth provider used
+ status: Current account status (for authorization checks)
+ token_id: JWT jti (JWT ID) claim for blacklist checking
+ issued_at: Token issue time (iat claim)
+ expires_at: Token expiration time (exp claim)
+ session_id: Associated WebSocket session ID (if connected)
+ """
+
+ # User identity
+ user_id: UUID
+ email: EmailStr
+ name: str | None = None
+
+ # Auth metadata
+ oauth_provider: OAuthProvider
+ status: UserStatus = UserStatus.ACTIVE
+
+ # Token metadata (from JWT claims)
+ token_id: str # JWT jti claim
+ issued_at: datetime # JWT iat claim
+ expires_at: datetime # JWT exp claim
+
+ # Session info (optional, set when WebSocket connected)
+ session_id: UUID | None = None
+
+ model_config = ConfigDict(frozen=True)
+
+ def is_active(self) -> bool:
+ """Check if user can perform actions."""
+ return self.status == UserStatus.ACTIVE
+
+ def is_expired(self) -> bool:
+ """Check if token is expired."""
+ return datetime.now(UTC) > self.expires_at
+
+ def is_valid(self) -> bool:
+ """Check if context is valid (active user, not expired)."""
+ return self.is_active() and not self.is_expired()
+
+
+class RefreshToken(BaseModel):
+ """
+ Refresh token for token rotation.
+
+ Stored in PostgreSQL as SHA-256 hash. Supports token rotation
+ with tracking of previous tokens for security analysis.
+
+ Attributes:
+ token_id: Unique identifier for this refresh token
+ user_id: Owner of this token
+ token_hash: SHA-256 hash of the actual token value
+ expires_at: Token expiration timestamp (UTC)
+ created_at: Token creation timestamp (UTC)
+ rotated_at: When this token was rotated (replaced), None if still active
+ previous_token_id: Previous token in rotation chain (for audit)
+ ip_address: IP address where token was issued
+ user_agent: User agent string where token was issued
+ """
+
+ token_id: UUID
+ user_id: UUID
+ token_hash: str # SHA-256 hash, never store raw token
+ expires_at: datetime
+ created_at: datetime
+ rotated_at: datetime | None = None
+ previous_token_id: UUID | None = None
+ ip_address: IPvAnyAddress | None = None
+ user_agent: str | None = None
+
+ model_config = ConfigDict(frozen=True)
+
+ def is_expired(self) -> bool:
+ """Check if refresh token is expired."""
+ return datetime.now(UTC) > self.expires_at
+
+ def is_rotated(self) -> bool:
+ """Check if token has been rotated (replaced)."""
+ return self.rotated_at is not None
+
+ def is_valid(self) -> bool:
+ """Check if token is valid (not expired, not rotated)."""
+ return not self.is_expired() and not self.is_rotated()
+
+
+class TokenBlacklistEntry(BaseModel):
+ """
+ Revoked token tracking for security.
+
+ Stored in both Redis (fast lookup) and PostgreSQL (persistence).
+ Redis entry has TTL matching original token expiration.
+
+ Attributes:
+ token_id: JWT jti (JWT ID) claim
+ user_id: User who owned this token
+ revoked_at: When the token was revoked (UTC)
+ expires_at: Original token expiration (for cleanup scheduling)
+ reason: Why the token was revoked
+ ip_address: IP address where revocation occurred (audit)
+ """
+
+ token_id: str # JWT jti claim
+ user_id: UUID
+ revoked_at: datetime
+ expires_at: datetime
+ reason: TokenRevocationReason
+ ip_address: IPvAnyAddress | None = None
+
+ model_config = ConfigDict(frozen=True)
+
+ def is_cleanup_ready(self) -> bool:
+ """Check if entry can be cleaned up (original token expired)."""
+ return datetime.now(UTC) > self.expires_at
+
+
+class AuditLog(BaseModel):
+ """
+ Audit trail for security events.
+
+ Stored in PostgreSQL for compliance and security analysis.
+ User ID is nullable to handle cases where user is deleted.
+
+ Attributes:
+ log_id: Unique identifier for this log entry
+ user_id: User who performed the action (None if user deleted)
+ action: Type of action performed
+ details: Additional context as JSON
+ ip_address: Client IP address
+ user_agent: Client user agent string
+ created_at: When the action occurred (UTC)
+ """
+
+ log_id: UUID
+ user_id: UUID | None
+ action: AuditAction
+ details: dict[str, Any] = Field(default_factory=dict)
+ ip_address: IPvAnyAddress | None = None
+ user_agent: str | None = None
+ created_at: datetime
+
+ model_config = ConfigDict(frozen=True)
diff --git a/core/redis.py b/core/redis.py
new file mode 100644
index 0000000..a4afc6f
--- /dev/null
+++ b/core/redis.py
@@ -0,0 +1,69 @@
+"""
+Redis client factory and utilities.
+
+Provides Redis client creation and verification functions.
+Stub implementation - real aioredis implementation in memory module.
+"""
+
+from config import Settings
+from core.app_state import RedisClient
+from core.logger import get_logger
+
+logger = get_logger(__name__)
+
+
+async def create_redis_client(settings: Settings) -> RedisClient:
+ """
+ Create Redis client.
+
+ Args:
+ settings: Application settings
+
+ Returns:
+ Redis client
+
+ Note:
+ This is a stub implementation. Real aioredis implementation
+ will be in the memory module.
+ """
+ logger.warning(
+ "create_redis_client: Using stub implementation. "
+ "Real implementation will be in memory module."
+ )
+
+ # Stub implementation - returns a mock client
+ # Real implementation will use aioredis.from_url()
+ class StubClient:
+ async def get(self, key: str) -> str | None:
+ return None
+
+ async def setex(self, key: str, ttl: int, value: str) -> None:
+ pass
+
+ async def delete(self, key: str) -> None:
+ pass
+
+ async def ping(self) -> bool:
+ return True
+
+ async def close(self) -> None:
+ pass
+
+ return StubClient()
+
+
+async def verify_redis_connection(client: RedisClient) -> bool:
+ """
+ Verify Redis is accessible.
+
+ Args:
+ client: Redis client
+
+ Returns:
+ True if Redis is accessible, False otherwise
+ """
+ try:
+ return await client.ping()
+ except Exception as e:
+ logger.error(f"Redis connection verification failed: {e}")
+ return False
diff --git a/core/telemetry.py b/core/telemetry.py
new file mode 100644
index 0000000..808acea
--- /dev/null
+++ b/core/telemetry.py
@@ -0,0 +1,254 @@
+"""
+OpenTelemetry instrumentation for distributed tracing and metrics.
+
+Provides telemetry setup, span creation, and metric recording helpers.
+"""
+
+from typing import Any
+
+from opentelemetry import metrics, trace
+from opentelemetry.exporter.otlp.proto.grpc.metric_exporter import OTLPMetricExporter
+from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter
+from opentelemetry.sdk.metrics import MeterProvider
+from opentelemetry.sdk.metrics.export import PeriodicExportingMetricReader
+from opentelemetry.sdk.resources import Resource
+from opentelemetry.sdk.trace import TracerProvider
+from opentelemetry.sdk.trace.export import BatchSpanProcessor
+
+from core.logger import get_logger
+
+logger = get_logger(__name__)
+
+
+class TelemetryManager:
+ """
+ OpenTelemetry instrumentation manager.
+
+ Handles distributed tracing, metrics, and logging integration.
+ """
+
+ def __init__(
+ self,
+ service_name: str,
+ otlp_endpoint: str,
+ environment: str = "production",
+ enable_tracing: bool = True,
+ enable_metrics: bool = True,
+ ):
+ """
+ Initialize telemetry.
+
+ Args:
+ service_name: Service name (e.g., "nerospatial-gateway")
+ otlp_endpoint: OTLP gRPC endpoint (e.g., "http://jaeger:4317")
+ environment: Deployment environment (e.g., "production", "development")
+ enable_tracing: Enable distributed tracing
+ enable_metrics: Enable metrics collection
+ """
+ self.service_name = service_name
+ self.otlp_endpoint = otlp_endpoint
+ self.environment = environment
+ self.enable_tracing = enable_tracing
+ self.enable_metrics = enable_metrics
+
+ # Create resource with service metadata
+ self.resource = Resource.create(
+ {
+ "service.name": service_name,
+ "service.environment": environment,
+ }
+ )
+
+ # Initialize tracing
+ if enable_tracing:
+ self._setup_tracing()
+
+ # Initialize metrics
+ if enable_metrics:
+ self._setup_metrics()
+
+ logger.info(
+ f"TelemetryManager initialized: service={service_name}, "
+ f"endpoint={otlp_endpoint}, env={environment}"
+ )
+
+ def _setup_tracing(self) -> None:
+ """Setup OpenTelemetry tracing."""
+ try:
+ # Create tracer provider
+ self.tracer_provider = TracerProvider(resource=self.resource)
+
+ # Create OTLP exporter
+ otlp_exporter = OTLPSpanExporter(endpoint=self.otlp_endpoint, insecure=True)
+
+ # Create batch span processor
+ span_processor = BatchSpanProcessor(otlp_exporter)
+ self.tracer_provider.add_span_processor(span_processor)
+
+ # Set global tracer provider
+ trace.set_tracer_provider(self.tracer_provider)
+
+ logger.info("Tracing initialized")
+ except Exception as e:
+ logger.warning(f"Failed to initialize tracing: {e}")
+ self.enable_tracing = False
+
+ def _setup_metrics(self) -> None:
+ """Setup OpenTelemetry metrics."""
+ try:
+ # Create metric reader with periodic export
+ metric_reader = PeriodicExportingMetricReader(
+ OTLPMetricExporter(endpoint=self.otlp_endpoint, insecure=True),
+ export_interval_millis=5000, # Export every 5 seconds
+ )
+
+ # Create meter provider
+ self.meter_provider = MeterProvider(
+ resource=self.resource,
+ metric_readers=[metric_reader],
+ )
+
+ # Set global meter provider
+ metrics.set_meter_provider(self.meter_provider)
+
+ logger.info("Metrics initialized")
+ except Exception as e:
+ logger.warning(f"Failed to initialize metrics: {e}")
+ self.enable_metrics = False
+
+ def get_tracer(self, name: str | None = None) -> trace.Tracer:
+ """
+ Get tracer for specific module.
+
+ Args:
+ name: Tracer name (defaults to service_name)
+
+ Returns:
+ Tracer instance
+ """
+ if not self.enable_tracing:
+ # Return no-op tracer if tracing disabled
+ return trace.NoOpTracer()
+
+ tracer_name = name or self.service_name
+ return trace.get_tracer(tracer_name)
+
+ def get_meter(self, name: str | None = None) -> metrics.Meter:
+ """
+ Get meter for metrics.
+
+ Args:
+ name: Meter name (defaults to service_name)
+
+ Returns:
+ Meter instance
+ """
+ meter_name = name or self.service_name
+ # Always return a meter (it will be no-op if metrics disabled)
+ # OpenTelemetry doesn't have NoOpMeter, but meters are safe to use when disabled
+ return metrics.get_meter(meter_name)
+
+ def create_span(
+ self,
+ name: str,
+ tracer_name: str | None = None,
+ attributes: dict[str, Any] | None = None,
+ ) -> trace.Span:
+ """
+ Create span with optional attributes.
+
+ Args:
+ name: Span name
+ tracer_name: Tracer name (defaults to service_name)
+ attributes: Span attributes (key-value pairs)
+
+ Returns:
+ Span instance
+ """
+ tracer = self.get_tracer(tracer_name)
+ span = tracer.start_span(name)
+
+ if attributes:
+ for key, value in attributes.items():
+ span.set_attribute(key, str(value))
+
+ return span
+
+ def record_metric(
+ self,
+ name: str,
+ value: float,
+ tags: dict[str, str] | None = None,
+ metric_type: str = "histogram",
+ ) -> None:
+ """
+ Record custom metric.
+
+ Args:
+ name: Metric name
+ value: Metric value
+ tags: Metric tags/labels (key-value pairs)
+ metric_type: Metric type ("histogram", "counter", "gauge")
+ """
+ if not self.enable_metrics:
+ return
+
+ meter = self.get_meter()
+ tags = tags or {}
+
+ try:
+ if metric_type == "histogram":
+ histogram = meter.create_histogram(name)
+ histogram.record(value, tags)
+ elif metric_type == "counter":
+ counter = meter.create_counter(name)
+ counter.add(int(value), tags)
+ elif metric_type == "gauge":
+ gauge = meter.create_up_down_counter(name)
+ gauge.add(int(value), tags)
+ else:
+ logger.warning(f"Unknown metric type: {metric_type}")
+ except Exception as e:
+ logger.warning(f"Failed to record metric {name}: {e}")
+
+ def shutdown(self) -> None:
+ """Shutdown telemetry (flush and close exporters)."""
+ try:
+ if self.enable_tracing and hasattr(self, "tracer_provider"):
+ self.tracer_provider.shutdown()
+
+ if self.enable_metrics and hasattr(self, "meter_provider"):
+ self.meter_provider.shutdown()
+
+ logger.info("TelemetryManager shut down")
+ except Exception as e:
+ logger.warning(f"Error during telemetry shutdown: {e}")
+
+
+# Predefined metric names
+class Metrics:
+ """Predefined metric names for consistency."""
+
+ # Request metrics
+ REQUEST_DURATION = "nerospatial_request_duration_seconds"
+ REQUESTS_TOTAL = "nerospatial_requests_total"
+
+ # WebSocket metrics
+ WEBSOCKET_CONNECTIONS = "nerospatial_websocket_connections"
+
+ # LLM metrics
+ LLM_TTFT = "nerospatial_llm_ttft_seconds" # Time to first token
+ LLM_ERRORS = "nerospatial_llm_errors_total"
+ LLM_TOKENS = "nerospatial_llm_tokens_total"
+
+ # VLM metrics
+ VLM_INFERENCE = "nerospatial_vlm_inference_seconds"
+ VLM_QUEUE_DEPTH = "nerospatial_vlm_queue_depth"
+
+ # Database metrics
+ DB_QUERY_DURATION = "nerospatial_db_query_duration_seconds"
+ DB_CONNECTIONS = "nerospatial_db_connections"
+
+ # Auth metrics
+ AUTH_LOGIN_TOTAL = "nerospatial_auth_login_total"
+ AUTH_TOKEN_VALIDATION = "nerospatial_auth_token_validation_total"
diff --git a/docker-compose.infra.yml b/docker-compose.infra.yml
new file mode 100644
index 0000000..491c5fd
--- /dev/null
+++ b/docker-compose.infra.yml
@@ -0,0 +1,88 @@
+# Docker Compose Infrastructure Services
+# Local development infrastructure for NeroSpatial Backend
+
+version: "3.8"
+
+services:
+ # ==========================================================================
+ # PostgreSQL - Primary Database
+ # ==========================================================================
+ postgres:
+ image: postgres:16-alpine
+ container_name: nerospatial-postgres
+ environment:
+ POSTGRES_DB: ${POSTGRES_DB:-nerospatial}
+ POSTGRES_USER: ${POSTGRES_USER:-nerospatial}
+ POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-dev-password-change-me}
+ ports:
+ - "5432:5432"
+ volumes:
+ - postgres_data:/var/lib/postgresql/data
+ - ./scripts/init-db.sql:/docker-entrypoint-initdb.d/init-db.sql:ro
+ healthcheck:
+ test:
+ [
+ "CMD-SHELL",
+ "pg_isready -U ${POSTGRES_USER:-nerospatial} -d ${POSTGRES_DB:-nerospatial}",
+ ]
+ interval: 10s
+ timeout: 5s
+ retries: 5
+ start_period: 5s
+ restart: unless-stopped
+ networks:
+ - nerospatial-network
+
+ # ==========================================================================
+ # Redis - Cache, Sessions, Rate Limiting
+ # ==========================================================================
+ redis:
+ image: redis:7-alpine
+ container_name: nerospatial-redis
+ command: redis-server --requirepass ${REDIS_PASSWORD:-dev-redis-password-change-me}
+ ports:
+ - "6379:6379"
+ volumes:
+ - redis_data:/data
+ healthcheck:
+ test:
+ [
+ "CMD",
+ "redis-cli",
+ "-a",
+ "${REDIS_PASSWORD:-dev-redis-password-change-me}",
+ "ping",
+ ]
+ interval: 10s
+ timeout: 5s
+ retries: 5
+ start_period: 5s
+ restart: unless-stopped
+ networks:
+ - nerospatial-network
+
+ # ==========================================================================
+ # Jaeger - Distributed Tracing (Optional for Dev)
+ # ==========================================================================
+ jaeger:
+ image: jaegertracing/all-in-one:1.54
+ container_name: nerospatial-jaeger
+ environment:
+ COLLECTOR_OTLP_ENABLED: true
+ ports:
+ - "16686:16686" # Jaeger UI
+ - "4317:4317" # OTLP gRPC
+ - "4318:4318" # OTLP HTTP
+ restart: unless-stopped
+ networks:
+ - nerospatial-network
+
+volumes:
+ postgres_data:
+ driver: local
+ redis_data:
+ driver: local
+
+networks:
+ nerospatial-network:
+ driver: bridge
diff --git a/docker-compose.yml b/docker-compose.yml
index 2e1e8af..65ee5be 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -24,7 +24,13 @@ services:
required: false
restart: unless-stopped
healthcheck:
- test: ["CMD", "python", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:8000/health')"]
+ test:
+ [
+ "CMD",
+ "python",
+ "-c",
+ "import urllib.request; urllib.request.urlopen('http://localhost:8000/ready')",
+ ]
interval: 30s
timeout: 10s
retries: 3
diff --git a/docs/AUTH_IMPLEMENTATION.md b/docs/AUTH_IMPLEMENTATION.md
new file mode 100644
index 0000000..0699b5a
--- /dev/null
+++ b/docs/AUTH_IMPLEMENTATION.md
@@ -0,0 +1,937 @@
+# Authentication & Authorization Implementation Plan
+
+**Version:** 1.0
+**Status:** Reference Document (Implementation Pending)
+**Purpose:** Complete authentication and authorization implementation guide for NeroSpatial Backend
+
+---
+
+## Overview
+
+This document outlines the complete authentication and authorization strategy for NeroSpatial Backend, including OAuth2 flows, JWT token management, WebSocket authentication, and authorization policies.
+
+---
+
+## Database Schema Design
+
+### Entity Relationship Diagram
+
+```mermaid
+erDiagram
+ users ||--o{ refresh_tokens : "has"
+ users ||--o{ token_blacklist : "has"
+ users ||--o{ user_sessions : "has"
+ users ||--o{ audit_logs : "generates"
+
+ users {
+ uuid user_id PK
+ varchar email UK
+ varchar name
+ varchar oauth_provider
+ varchar oauth_sub
+ varchar status
+ timestamp created_at
+ timestamp updated_at
+ timestamp last_login
+ timestamp deleted_at
+ varchar picture_url
+ varchar locale
+ jsonb metadata
+ varchar schema_version
+ }
+
+ refresh_tokens {
+ uuid token_id PK
+ uuid user_id FK
+ varchar token_hash
+ timestamp expires_at
+ timestamp created_at
+ timestamp rotated_at
+ uuid previous_token_id FK
+ varchar ip_address
+ varchar user_agent
+ }
+
+ token_blacklist {
+ varchar token_id PK
+ uuid user_id FK
+ timestamp revoked_at
+ timestamp expires_at
+ varchar reason
+ varchar ip_address
+ }
+
+ user_sessions {
+ uuid session_id PK
+ uuid user_id FK
+ varchar mode
+ timestamp created_at
+ timestamp last_activity
+ jsonb metadata
+ }
+
+ audit_logs {
+ uuid log_id PK
+ uuid user_id FK
+ varchar action
+ jsonb details
+ varchar ip_address
+ timestamp created_at
+ }
+```
+
+### PostgreSQL Schema
+
+```sql
+-- Enum types
+CREATE TYPE user_status AS ENUM (
+ 'active',
+ 'pending_verification',
+ 'suspended',
+ 'blacklisted',
+ 'locked'
+);
+
+CREATE TYPE oauth_provider AS ENUM (
+ 'google',
+ 'github',
+ 'microsoft'
+);
+
+CREATE TYPE audit_action AS ENUM (
+ 'login',
+ 'logout',
+ 'token_refresh',
+ 'password_change',
+ 'profile_update',
+ 'account_delete'
+);
+
+-- Users table
+CREATE TABLE users (
+ user_id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
+ email VARCHAR(255) UNIQUE NOT NULL,
+ name VARCHAR(255),
+ oauth_provider oauth_provider NOT NULL DEFAULT 'google',
+ oauth_sub VARCHAR(255), -- OAuth provider subject ID
+ status user_status NOT NULL DEFAULT 'active',
+ created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
+ updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
+ last_login TIMESTAMPTZ,
+ deleted_at TIMESTAMPTZ,
+ picture_url VARCHAR(500),
+ locale VARCHAR(10) DEFAULT 'en',
+ metadata JSONB DEFAULT '{}',
+ schema_version VARCHAR(10) NOT NULL DEFAULT '1.0',
+
+ CONSTRAINT email_format CHECK (email ~* '^[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,}$')
+);
+
+-- Refresh tokens table
+CREATE TABLE refresh_tokens (
+ token_id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
+ user_id UUID NOT NULL REFERENCES users(user_id) ON DELETE CASCADE,
+ token_hash VARCHAR(64) NOT NULL, -- SHA-256 hash
+ expires_at TIMESTAMPTZ NOT NULL,
+ created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
+ rotated_at TIMESTAMPTZ,
+ previous_token_id UUID REFERENCES refresh_tokens(token_id),
+ ip_address INET,
+ user_agent VARCHAR(500)
+);
+
+-- Token blacklist table
+CREATE TABLE token_blacklist (
+ token_id VARCHAR(255) PRIMARY KEY, -- JWT jti
+ user_id UUID NOT NULL REFERENCES users(user_id) ON DELETE CASCADE,
+ revoked_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
+ expires_at TIMESTAMPTZ NOT NULL,
+ reason VARCHAR(50), -- 'logout', 'refresh', 'security', 'admin'
+ ip_address INET
+);
+
+-- Audit logs table
+CREATE TABLE audit_logs (
+ log_id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
+ user_id UUID REFERENCES users(user_id) ON DELETE SET NULL,
+ action audit_action NOT NULL,
+ details JSONB DEFAULT '{}',
+ ip_address INET,
+ created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
+);
+
+-- Indexes
+CREATE INDEX idx_users_email ON users(email);
+CREATE INDEX idx_users_status ON users(status) WHERE deleted_at IS NULL;
+CREATE INDEX idx_users_oauth ON users(oauth_provider, oauth_sub);
+CREATE INDEX idx_refresh_tokens_user ON refresh_tokens(user_id);
+CREATE INDEX idx_refresh_tokens_hash ON refresh_tokens(token_hash);
+CREATE INDEX idx_refresh_tokens_expires ON refresh_tokens(expires_at);
+CREATE INDEX idx_token_blacklist_user ON token_blacklist(user_id);
+CREATE INDEX idx_token_blacklist_expires ON token_blacklist(expires_at);
+CREATE INDEX idx_audit_logs_user ON audit_logs(user_id);
+CREATE INDEX idx_audit_logs_created ON audit_logs(created_at DESC);
+
+-- Auto-update trigger
+CREATE OR REPLACE FUNCTION update_updated_at()
+RETURNS TRIGGER AS $$
+BEGIN
+ NEW.updated_at = NOW();
+ RETURN NEW;
+END;
+$$ LANGUAGE plpgsql;
+
+CREATE TRIGGER users_updated_at
+ BEFORE UPDATE ON users
+ FOR EACH ROW EXECUTE FUNCTION update_updated_at();
+
+-- Cleanup function (run via pg_cron)
+CREATE OR REPLACE FUNCTION cleanup_expired_tokens()
+RETURNS void AS $$
+BEGIN
+ DELETE FROM refresh_tokens WHERE expires_at < NOW();
+ DELETE FROM token_blacklist WHERE expires_at < NOW();
+END;
+$$ LANGUAGE plpgsql;
+```
+
+### Redis Schema
+
+```
+# User context cache (5 min TTL)
+user:context:{user_id} -> JSON(UserContext)
+
+# Token blacklist for fast lookup (TTL = token expiration)
+blacklist:{jti} -> "1"
+
+# Active sessions per user
+user:{user_id}:sessions -> SET[session_id, ...]
+
+# Rate limiting (sliding window)
+ratelimit:{user_id}:{endpoint}:{window} -> count
+
+# Login attempts (for lockout)
+login:attempts:{email} -> count (TTL: 15 min)
+```
+
+---
+
+## Authentication Flows
+
+### 1. OAuth2 Login Flow
+
+```mermaid
+sequenceDiagram
+ participant C as Client
+ participant API as NeroSpatial API
+ participant OAuth as Google OAuth
+ participant DB as PostgreSQL
+ participant Redis
+
+ C->>API: GET /auth/login?provider=google
+ API->>API: Generate state + PKCE verifier
+ API->>Redis: Store state (5 min TTL)
+ API->>C: Redirect to Google consent
+
+ C->>OAuth: User authorizes
+ OAuth->>API: Callback with code + state
+
+ API->>Redis: Verify state
+ API->>OAuth: Exchange code for tokens
+ OAuth->>API: Return tokens + id_token
+
+ API->>API: Validate id_token
+ API->>DB: Find or create user
+
+ alt New User
+ API->>DB: INSERT user
+ API->>API: Set status = pending_verification
+ else Existing User
+ API->>DB: UPDATE last_login
+ end
+
+ API->>API: Generate JWT (15 min)
+ API->>API: Generate refresh token (7 days)
+ API->>DB: Store refresh token hash
+ API->>Redis: Cache UserContext
+ API->>DB: INSERT audit_log (login)
+
+ API->>C: Return tokens + user info
+```
+
+### 2. WebSocket Authentication Flow
+
+```mermaid
+sequenceDiagram
+ participant C as Client
+ participant WS as WebSocket Gateway
+ participant Auth as Auth Module
+ participant Redis
+ participant DB as PostgreSQL
+
+ C->>WS: Connect wss://.../ws?token={jwt}
+ WS->>Auth: validate_token(jwt)
+
+ Auth->>Auth: Decode JWT
+ Auth->>Auth: Verify signature (RS256)
+ Auth->>Auth: Check expiration
+
+ alt Token Invalid
+ Auth->>WS: AuthenticationError
+ WS->>C: Close(4001)
+ else Token Valid
+ Auth->>Redis: GET blacklist:{jti}
+ alt Blacklisted
+ Auth->>WS: AuthenticationError
+ WS->>C: Close(4001)
+ else Not Blacklisted
+ Auth->>Redis: GET user:context:{user_id}
+ alt Cache Miss
+ Auth->>DB: Get user
+ Auth->>Auth: Build UserContext
+ Auth->>Redis: SET user:context:{user_id}
+ end
+ Auth->>Auth: Check user status
+ alt Status != ACTIVE
+ Auth->>WS: AuthorizationError
+ WS->>C: Close(4003)
+ else Active
+ Auth->>WS: Return UserContext
+ WS->>WS: Create session
+ WS->>C: ACK(session_id)
+ end
+ end
+ end
+```
+
+### 3. Token Refresh Flow
+
+```mermaid
+sequenceDiagram
+ participant C as Client
+ participant API as NeroSpatial API
+ participant DB as PostgreSQL
+ participant Redis
+
+ C->>API: POST /auth/refresh
+ Note over C,API: Body: refresh_token
+
+ API->>API: Hash refresh token
+ API->>DB: Find token by hash
+
+ alt Not Found or Expired
+ API->>C: 401 Unauthorized
+ else Found
+ API->>DB: Get user
+ alt User Suspended
+ API->>C: 403 Forbidden
+ else User Active
+ API->>API: Generate new JWT
+ API->>API: Generate new refresh token
+
+ API->>DB: Mark old token rotated
+ API->>DB: Insert new refresh token
+ API->>Redis: Blacklist old JWT jti
+ API->>Redis: Invalidate user context cache
+ API->>DB: INSERT audit_log
+
+ API->>C: Return new tokens
+ end
+ end
+```
+
+### 4. Logout Flow
+
+```mermaid
+sequenceDiagram
+ participant C as Client
+ participant API as NeroSpatial API
+ participant DB as PostgreSQL
+ participant Redis
+
+ C->>API: POST /auth/logout
+ Note over C,API: Headers: Authorization: Bearer {jwt}
+
+ API->>API: Extract jti from JWT
+ API->>Redis: SET blacklist:{jti} (TTL = token expiry)
+ API->>DB: DELETE refresh_tokens WHERE user_id = ?
+ API->>Redis: DEL user:context:{user_id}
+ API->>DB: INSERT audit_log (logout)
+
+ API->>C: 200 OK
+```
+
+---
+
+## Authorization Strategy
+
+### User Status State Machine
+
+```mermaid
+stateDiagram-v2
+ [*] --> PendingVerification: New OAuth user
+ PendingVerification --> Active: Email verified
+ Active --> Locked: Rate limit exceeded
+ Locked --> Active: Lockout expires (15 min)
+ Active --> Suspended: Admin action
+ Suspended --> Active: Admin action
+ Active --> Blacklisted: Security violation
+ Blacklisted --> [*]: Permanent ban
+
+ note right of Active: Full access to all features
+ note right of PendingVerification: Limited access, no WebSocket
+ note right of Locked: Temporary block, auto-expires
+ note right of Suspended: Manual admin intervention required
+ note right of Blacklisted: Permanent, no recovery
+```
+
+### Access Control Matrix
+
+| Status | WebSocket | HTTP API | Profile Read | Profile Write | Admin |
+|--------|-----------|----------|--------------|---------------|-------|
+| ACTIVE | ✅ | ✅ | ✅ | ✅ | ❌ |
+| PENDING_VERIFICATION | ❌ | ✅ (limited) | ✅ | ✅ | ❌ |
+| LOCKED | ❌ | ❌ | ✅ | ❌ | ❌ |
+| SUSPENDED | ❌ | ❌ | ❌ | ❌ | ❌ |
+| BLACKLISTED | ❌ | ❌ | ❌ | ❌ | ❌ |
+
+### Rate Limiting
+
+**Per-User Limits:**
+| Resource | Limit | Window | Action on Exceed |
+|----------|-------|--------|------------------|
+| HTTP API | 100 requests | 1 minute | 429 Too Many Requests |
+| WebSocket connections | 3 concurrent | - | Reject new connections |
+| LLM queries | 50 queries | 1 hour | Queue or reject |
+| Login attempts | 5 attempts | 15 minutes | Account LOCKED |
+
+**Implementation:**
+- Redis-based distributed rate limiting
+- Sliding window algorithm
+- Per-endpoint and per-user limits
+- Exponential backoff on repeated violations
+
+### Future: Role-Based Access Control (RBAC)
+
+```python
+class Role(str, Enum):
+ USER = "user" # Standard user
+ PREMIUM = "premium" # Premium subscription
+ ADMIN = "admin" # System administrator
+
+class Permission(str, Enum):
+ WEBSOCKET_CONNECT = "websocket:connect"
+ LLM_QUERY = "llm:query"
+ VISION_ENABLE = "vision:enable"
+ PASSIVE_MODE = "passive:enable"
+ ADMIN_USERS = "admin:users"
+ ADMIN_SYSTEM = "admin:system"
+
+ROLE_PERMISSIONS = {
+ Role.USER: [
+ Permission.WEBSOCKET_CONNECT,
+ Permission.LLM_QUERY,
+ ],
+ Role.PREMIUM: [
+ Permission.WEBSOCKET_CONNECT,
+ Permission.LLM_QUERY,
+ Permission.VISION_ENABLE,
+ Permission.PASSIVE_MODE,
+ ],
+ Role.ADMIN: list(Permission), # All permissions
+}
+```
+
+---
+
+## Implementation Components
+
+### 1. core/auth.py - JWT Authentication
+
+```python
+class JWTAuth:
+ """JWT authentication service"""
+
+ def __init__(
+ self,
+ private_key: str, # RS256 private key for signing
+ public_key: str, # RS256 public key for verification
+ access_token_ttl: int = 900, # 15 minutes
+ refresh_token_ttl: int = 604800, # 7 days
+ redis_client: RedisClient,
+ db_client: PostgresClient,
+ ):
+ pass
+
+ async def validate_token(self, token: str) -> dict[str, Any]:
+ """
+ Validate JWT and return claims.
+
+ Raises:
+ AuthenticationError: If token is invalid or expired
+ """
+ pass
+
+ async def extract_user_context(self, token: str) -> UserContext:
+ """
+ Extract UserContext from JWT with Redis caching.
+
+ Flow:
+ 1. Validate token
+ 2. Check blacklist
+ 3. Check Redis cache
+ 4. If cache miss, query DB and cache
+ """
+ pass
+
+ async def generate_tokens(
+ self,
+ user: User,
+ ip_address: str | None = None,
+ user_agent: str | None = None,
+ ) -> tuple[str, str]:
+ """
+ Generate access token and refresh token.
+
+ Returns:
+ Tuple of (access_token, refresh_token)
+ """
+ pass
+
+ async def refresh_tokens(
+ self,
+ refresh_token: str,
+ ip_address: str | None = None,
+ ) -> tuple[str, str]:
+ """
+ Refresh tokens with rotation.
+
+ Flow:
+ 1. Validate refresh token
+ 2. Check user status
+ 3. Generate new tokens
+ 4. Mark old token as rotated
+ 5. Blacklist old access token jti
+ """
+ pass
+
+ async def blacklist_token(
+ self,
+ jti: str,
+ user_id: UUID,
+ reason: TokenRevocationReason,
+ expires_at: datetime,
+ ip_address: str | None = None,
+ ) -> None:
+ """Add token to blacklist (Redis + PostgreSQL)"""
+ pass
+
+ async def is_blacklisted(self, jti: str) -> bool:
+ """Check if token is blacklisted (Redis fast lookup)"""
+ pass
+
+ async def logout(self, token: str, ip_address: str | None = None) -> None:
+ """
+ Logout user.
+
+ Flow:
+ 1. Extract jti from token
+ 2. Blacklist token
+ 3. Delete all refresh tokens for user
+ 4. Invalidate user context cache
+ 5. Log audit event
+ """
+ pass
+```
+
+### 2. api/auth_routes.py - OAuth Endpoints
+
+```python
+from fastapi import APIRouter, Request, Response, HTTPException, Depends
+from fastapi.responses import RedirectResponse
+
+router = APIRouter(prefix="/auth", tags=["Authentication"])
+
+@router.get("/login")
+async def login(
+ provider: OAuthProvider = Query(...),
+ redirect_uri: str | None = Query(None),
+):
+ """
+ Initiate OAuth login flow.
+
+ Query Parameters:
+ provider: OAuth provider (google, github, microsoft)
+ redirect_uri: Optional redirect URI after login
+
+ Returns:
+ Redirect to OAuth provider consent page
+ """
+ pass
+
+@router.get("/callback")
+async def callback(
+ code: str = Query(...),
+ state: str = Query(...),
+):
+ """
+ OAuth callback handler.
+
+ Query Parameters:
+ code: Authorization code from OAuth provider
+ state: State parameter for CSRF protection
+
+ Returns:
+ JSON with access_token, refresh_token, user info
+ """
+ pass
+
+@router.post("/refresh")
+async def refresh(
+ request: Request,
+ body: RefreshTokenRequest,
+):
+ """
+ Refresh access token.
+
+ Body:
+ refresh_token: Current refresh token
+
+ Returns:
+ JSON with new access_token, refresh_token
+ """
+ pass
+
+@router.post("/logout")
+async def logout(
+ request: Request,
+ user: UserContext = Depends(get_current_user),
+):
+ """
+ Logout user.
+
+ Requires:
+ Authorization: Bearer {access_token}
+
+ Returns:
+ 200 OK
+ """
+ pass
+
+@router.get("/me")
+async def get_current_user_info(
+ user: UserContext = Depends(get_current_user),
+):
+ """
+ Get current user info.
+
+ Requires:
+ Authorization: Bearer {access_token}
+
+ Returns:
+ JSON with user profile
+ """
+ pass
+```
+
+### 3. core/middleware.py - Auth Middleware
+
+```python
+from fastapi import Request, HTTPException
+from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
+
+security = HTTPBearer()
+
+async def get_current_user(
+ credentials: HTTPAuthorizationCredentials = Depends(security),
+ auth: JWTAuth = Depends(get_auth),
+) -> UserContext:
+ """
+ Dependency to get current authenticated user.
+
+ Raises:
+ HTTPException 401: If token is invalid
+ HTTPException 403: If user is not active
+ """
+ pass
+
+def require_status(*statuses: UserStatus):
+ """
+ Dependency factory to require specific user status.
+
+ Usage:
+ @router.get("/protected")
+ async def protected(user: UserContext = Depends(require_status(UserStatus.ACTIVE))):
+ pass
+ """
+ pass
+
+def require_permission(permission: Permission):
+ """
+ Dependency factory to require specific permission (future RBAC).
+ """
+ pass
+```
+
+### 4. core/rate_limiter.py - Rate Limiting
+
+```python
+class RateLimiter:
+ """Redis-based distributed rate limiter using sliding window"""
+
+ def __init__(self, redis_client: RedisClient):
+ pass
+
+ async def check_limit(
+ self,
+ key: str,
+ limit: int,
+ window_seconds: int,
+ ) -> tuple[bool, int]:
+ """
+ Check if request is within limits.
+
+ Returns:
+ Tuple of (allowed: bool, remaining: int)
+ """
+ pass
+
+ async def record_request(self, key: str, window_seconds: int) -> None:
+ """Record a request for rate limiting"""
+ pass
+
+ async def get_remaining(self, key: str, limit: int, window_seconds: int) -> int:
+ """Get remaining requests in current window"""
+ pass
+
+
+# Middleware
+async def rate_limit_middleware(
+ request: Request,
+ call_next,
+ limit: int = 100,
+ window: int = 60,
+):
+ """Rate limiting middleware for HTTP requests"""
+ pass
+```
+
+---
+
+## Security Considerations
+
+### 1. Token Security
+
+| Aspect | Implementation |
+|--------|----------------|
+| Algorithm | RS256 (asymmetric) |
+| Access Token TTL | 15 minutes |
+| Refresh Token TTL | 7 days |
+| Refresh Token Storage | SHA-256 hash in PostgreSQL |
+| Token Rotation | On every refresh |
+| Blacklist | Redis (fast) + PostgreSQL (persistent) |
+
+### 2. OAuth Security
+
+- **State Parameter**: Random UUID stored in Redis (5 min TTL)
+- **PKCE**: Required for mobile/SPA clients
+- **Token Validation**: Verify id_token signature and claims
+- **Nonce**: Included in authorization request
+
+### 3. Password Security (Future)
+
+If adding email/password authentication:
+- Bcrypt hashing (cost factor 12)
+- Minimum 8 characters, require complexity
+- Account lockout after 5 failed attempts
+- Password reset via email with time-limited token
+
+### 4. Database Security
+
+- Encrypted connections (SSL/TLS required)
+- Parameterized queries (prevent SQL injection)
+- Least privilege database user
+- Sensitive fields encrypted at rest
+
+### 5. Transport Security
+
+- HTTPS only (HSTS header)
+- Secure cookies (HttpOnly, Secure, SameSite=Strict)
+- CORS with explicit origins
+- Content Security Policy headers
+
+---
+
+## GDPR Compliance
+
+### 1. Data Retention
+
+| Data Type | Retention Period | Cleanup Method |
+|-----------|------------------|----------------|
+| User Profile | Until deletion request | Soft delete → Hard delete after 30 days |
+| Access Tokens | 15 minutes | Auto-expire |
+| Refresh Tokens | 7 days | Auto-expire + cleanup function |
+| Token Blacklist | Until original token expiry | Cleanup function |
+| Audit Logs | 90 days | Scheduled cleanup |
+
+### 2. Right to Access
+
+```
+GET /api/user/export
+
+Returns:
+- User profile data
+- OAuth provider info
+- Active sessions
+- Recent audit logs
+- Interaction history (from other modules)
+```
+
+### 3. Right to Deletion
+
+```
+DELETE /api/user/account
+
+Flow:
+1. Set deleted_at = NOW()
+2. Revoke all tokens
+3. Delete all sessions
+4. Anonymize audit logs
+5. After 30 days: Hard delete user and cascade
+```
+
+### 4. Right to Rectification
+
+```
+PATCH /api/user/profile
+
+Allowed updates:
+- name
+- locale
+- picture_url
+- metadata
+
+Audit log created for each update.
+```
+
+---
+
+## Monitoring & Observability
+
+### Metrics (Prometheus)
+
+```
+# Authentication metrics
+auth_login_total{provider, status}
+auth_logout_total{status}
+auth_token_validation_total{status}
+auth_token_refresh_total{status}
+auth_blacklist_total{reason}
+
+# Rate limiting metrics
+auth_rate_limit_exceeded_total{endpoint, user_id}
+auth_account_locked_total{reason}
+
+# Performance metrics
+auth_token_validation_duration_seconds
+auth_user_context_cache_hit_total
+auth_user_context_cache_miss_total
+```
+
+### Logs (Structured JSON)
+
+```json
+{
+ "timestamp": "2024-01-15T10:30:00Z",
+ "level": "INFO",
+ "service": "auth",
+ "trace_id": "abc-123",
+ "event": "login_success",
+ "user_id": "uuid",
+ "provider": "google",
+ "ip_address": "1.2.3.4"
+}
+```
+
+### Alerts
+
+| Alert | Condition | Severity |
+|-------|-----------|----------|
+| High Login Failures | > 100 failures in 5 min | Warning |
+| Token Validation Errors | > 5% error rate | Critical |
+| Rate Limit Exceeded | > 50 users locked in 1 hour | Warning |
+| Suspicious Activity | Same user from multiple IPs | Warning |
+
+---
+
+## Testing Strategy
+
+### Unit Tests
+
+- JWT validation (valid, invalid, expired)
+- Token generation and parsing
+- Blacklist operations
+- Rate limiter logic
+- User status transitions
+
+### Integration Tests
+
+- OAuth flow end-to-end (mock OAuth provider)
+- Token refresh with rotation
+- WebSocket authentication
+- Rate limiting behavior
+- Cache invalidation
+
+### Security Tests
+
+- SQL injection attempts
+- Token manipulation
+- Replay attacks
+- Brute force protection
+
+---
+
+## Implementation Phases
+
+### Phase 1: Core Auth (Current Focus)
+- Finalize user models
+- Implement JWTAuth class
+- Implement token validation
+- Add blacklist support
+
+### Phase 2: OAuth Integration
+- Google OAuth provider
+- Login/callback endpoints
+- Token generation
+
+### Phase 3: Token Management
+- Refresh token rotation
+- Logout functionality
+- Rate limiting
+
+### Phase 4: Security Hardening
+- Audit logging
+- Monitoring metrics
+- Security headers
+- GDPR endpoints
+
+---
+
+## Dependencies
+
+```toml
+# pyproject.toml additions
+[project.dependencies]
+pyjwt = ">=2.8.0"
+cryptography = ">=41.0.0" # For RS256
+httpx = ">=0.27.0" # For OAuth HTTP calls
+```
+
+---
+
+## References
+
+- [RFC 6749 - OAuth 2.0](https://tools.ietf.org/html/rfc6749)
+- [RFC 7519 - JSON Web Token](https://tools.ietf.org/html/rfc7519)
+- [RFC 7636 - PKCE](https://tools.ietf.org/html/rfc7636)
+- [OWASP Authentication Cheat Sheet](https://cheatsheetseries.owasp.org/cheatsheets/Authentication_Cheat_Sheet.html)
diff --git a/docs/COMPONENT_PLANS_MASTER.md b/docs/COMPONENT_PLANS_MASTER.md
index 9c8e2d8..4e3e033 100644
--- a/docs/COMPONENT_PLANS_MASTER.md
+++ b/docs/COMPONENT_PLANS_MASTER.md
@@ -21,20 +21,35 @@ This document serves as the index for all detailed component implementation plan
## Component Plans
-### 1. [Core Module](./COMPONENT_PLAN_CORE.md)
+### 1. [Core Module](./CORE_MODULE.md) - IMPLEMENTED
+
**Foundation layer** - Shared utilities, auth, telemetry, exceptions, models, logging
-- `core/auth.py` - JWT validation, user context extraction
-- `core/telemetry.py` - OpenTelemetry setup (traces, metrics, logs)
-- `core/exceptions.py` - Custom exceptions (SessionExpired, VLMTimeout, etc.)
-- `core/models.py` - Pydantic schemas (Session, User, Interaction, etc.)
-- `core/logger.py` - Structured logging configuration
+
+**Status:** Production Ready
+
+**Implemented Components:**
+
+- `core/auth.py` - JWT authentication with RS256, token refresh, blacklist
+- `core/telemetry.py` - OpenTelemetry tracing and metrics
+- `core/exceptions.py` - Custom exception hierarchy with trace context
+- `core/models/` - Pydantic models (User, Session, Interaction, Protocol)
+- `core/logger.py` - Structured JSON logging with trace_id propagation
+- `core/app_state.py` - Application state container
+- `core/config_loader.py` - Azure App Config + Key Vault integration
+- `core/keyvault.py` - Azure Key Vault client with caching
+- `core/database.py` - DatabasePool protocol (stub for memory module)
+- `core/redis.py` - RedisClient protocol (stub for memory module)
**Dependencies:** None (foundation for all modules)
+**Reference:** See [CORE_MODULE.md](./CORE_MODULE.md) for full API documentation
+
---
### 2. [Gateway Module](./COMPONENT_PLAN_GATEWAY.md)
+
**WebSocket gateway** - Connection management, session handling, stream demultiplexing
+
- `gateway/ws_handler.py` - WebSocket connection lifecycle management
- `gateway/session_manager.py` - Redis session CRUD operations
- `gateway/demux.py` - Input demuxer (splits audio/video streams)
@@ -45,7 +60,9 @@ This document serves as the index for all detailed component implementation plan
---
### 3. [Perception Module](./COMPONENT_PLAN_PERCEPTION.md)
+
**Sensory processing layer** - STT, VLM, OCR, frame sampling
+
- `perception/audio.py` - Deepgram Nova-2 streaming STT client
- `perception/vision.py` - Qwen-VL / Phi-3 Vision inference wrapper
- `perception/ocr.py` - EasyOCR integration (triggered when has_text=true)
@@ -56,7 +73,9 @@ This document serves as the index for all detailed component implementation plan
---
### 4. [Cognition Module](./COMPONENT_PLAN_COGNITION.md)
+
**Intelligence layer** - Context fusion, LLM routing, prompt building, circuit breaking
+
- `cognition/sync_node.py` - Context fusion state machine (the "Brainstem")
- `cognition/llm_router.py` - Groq → Gemini → Ollama fallback chain
- `cognition/prompt_builder.py` - Jinja2 template engine for prompt construction
@@ -67,7 +86,9 @@ This document serves as the index for all detailed component implementation plan
---
### 5. [Memory Module](./COMPONENT_PLAN_MEMORY.md)
+
**Persistence layer** - All database operations
+
- `memory/redis_client.py` - Hot memory (sessions) + job queue (Redis Streams)
- `memory/cassandra_client.py` - Interaction text logs (deletable, not immutable)
- `memory/arango_client.py` - Knowledge graph + vector search
@@ -79,7 +100,9 @@ This document serves as the index for all detailed component implementation plan
---
### 6. [Passive Module](./COMPONENT_PLAN_PASSIVE.md)
+
**Background workers** - Async processing for Passive Mode
+
- `passive/audio_worker.py` - Whisper v3 Large batch transcription
- `passive/embedding_worker.py` - sentence-transformers for text → vector
- `passive/graph_updater.py` - ArangoDB async bulk writes
@@ -90,7 +113,9 @@ This document serves as the index for all detailed component implementation plan
---
### 7. [API Module](./COMPONENT_PLAN_API.md)
+
**HTTP REST endpoints** - OAuth2, admin operations
+
- `api/auth_routes.py` - OAuth2 callbacks, token refresh endpoints
- `api/admin_routes.py` - User data export/deletion (GDPR compliance)
@@ -101,6 +126,7 @@ This document serves as the index for all detailed component implementation plan
## Implementation Order
### Phase 1: The Spine (Weeks 1-3)
+
1. **Core Module** (Foundation - must be first)
2. **Memory Module** (Redis client only)
3. **Gateway Module** (WebSocket handler)
@@ -108,21 +134,26 @@ This document serves as the index for all detailed component implementation plan
5. **Cognition Module** (LLM Router only)
### Phase 2: The Eyes (Weeks 4-6)
+
6. **Perception Module** (Vision, OCR, Frame Sampler)
7. **Cognition Module** (Sync Node, Prompt Builder)
### Phase 3: The Memory (Weeks 7-9)
+
8. **Memory Module** (Full - Cassandra, ArangoDB, Postgres)
9. **API Module** (Auth routes)
### Phase 4: The Subconscious (Weeks 10-12)
+
10. **Passive Module** (All workers)
11. **Memory Module** (Graph Builder)
### Phase 5: Intelligence Refinement (Weeks 13-15)
+
12. **Cognition Module** (Circuit Breaker)
### Phase 6: Production Hardening (Weeks 16-18)
+
13. **API Module** (Admin routes - GDPR)
---
@@ -130,17 +161,20 @@ This document serves as the index for all detailed component implementation plan
## Cross-Module Communication Patterns
### Direct Function Calls (In-Process)
+
- Gateway → Demux → Audio/Vision: Direct Python function calls
- Audio → Sync Node: Direct function call with event emission
- Sync Node → LLM Router: Direct function invocation
- **Latency:** <1ms (no serialization overhead)
### Event-Driven (Async)
+
- Perception → Cognition: Async events via asyncio.Queue
- Gateway → Passive: Redis Streams messages
- **Latency:** <5ms (in-memory queue) or async (Redis Streams)
### Database Operations (Async)
+
- All modules → Memory: Async database clients with connection pooling
- **Latency:** 5-10ms (local) or 50-100ms (remote)
@@ -149,27 +183,32 @@ This document serves as the index for all detailed component implementation plan
## Concurrency Strategy Summary
### Gateway Module
+
- One asyncio task per WebSocket connection
- Concurrent message handling per connection
- Connection pool: Max 10K concurrent connections
### Perception Module
+
- Audio: One Deepgram WebSocket per session (async)
- Vision: Queue-based processing (max 10 concurrent VLM inferences)
- Frame Sampler: Single async task per session
### Cognition Module
+
- Sync Node: One state machine per session (async task)
- LLM Router: Connection pooling (max 50 concurrent LLM requests)
- Circuit Breaker: Thread-safe state with asyncio.Lock
### Memory Module
+
- Redis: Connection pool (50 connections)
- Cassandra: Connection pool (20 connections)
- ArangoDB: Connection pool (30 connections)
- Postgres: Connection pool (10 connections)
### Passive Module
+
- Worker pool: 3-5 async workers per worker type
- Redis Stream consumer groups for coordination
- Batch processing for efficiency
@@ -179,16 +218,19 @@ This document serves as the index for all detailed component implementation plan
## Event Loop Optimization Principles
1. **Never Block the Event Loop**
+
- All I/O operations are async (aiohttp, aioredis, asyncpg, etc.)
- CPU-bound operations use `asyncio.to_thread()`
- VLM inference runs in separate process/thread pool
2. **Connection Pooling**
+
- All database clients use connection pools
- HTTP clients use connection pools (aiohttp)
- Reuse connections across requests
3. **Batch Operations**
+
- Cassandra: Batch writes (fire-and-forget)
- ArangoDB: Bulk graph updates
- Redis: Pipeline operations when possible
@@ -200,9 +242,23 @@ This document serves as the index for all detailed component implementation plan
---
-## Next Steps
+## Documentation Approach
+
+**Important:** When implementing new modules, always:
+
+1. **Reference actual code** - Check implemented modules (e.g., `core/`) for real APIs
+2. **Use real imports** - Import from implemented modules, not assumptions from plans
+3. **Update docs after implementation** - Replace `COMPONENT_PLAN_*.md` with `*_MODULE.md` reference docs
+4. **Test against reality** - Tests should use actual implementations, not mocked assumptions
-1. Review each component plan in detail
-2. Start with Core Module implementation
-3. Follow implementation order for dependencies
-4. Test each module in isolation before integration
+**Completed Reference Docs:**
+
+- [CORE_MODULE.md](./CORE_MODULE.md) - Full API reference for core module
+
+**Pending Implementation Plans:**
+
+- `COMPONENT_PLAN_GATEWAY.md` - Will become `GATEWAY_MODULE.md` after implementation
+- `COMPONENT_PLAN_PERCEPTION.md` - Will become `PERCEPTION_MODULE.md` after implementation
+- etc.
+
+---
diff --git a/docs/COMPONENT_PLAN_CORE.md b/docs/COMPONENT_PLAN_CORE.md
deleted file mode 100644
index 8c65742..0000000
--- a/docs/COMPONENT_PLAN_CORE.md
+++ /dev/null
@@ -1,786 +0,0 @@
-# Core Module Implementation Plan
-
-**Module:** `core/`
-**Purpose:** Foundation layer providing shared utilities for all modules
-**Dependencies:** None (imported by all other modules)
-
----
-
-## Overview
-
-The core module provides stateless, thread-safe utilities used across the entire platform. It includes authentication, telemetry, exception handling, shared data models, and structured logging.
-
-**Key Principles:**
-- Stateless design (no shared mutable state)
-- Thread-safe operations
-- Low-latency (<1ms overhead for most operations)
-- Context-aware (trace_id propagation via contextvars)
-
----
-
-## File Structure
-
-```
-core/
-├── __init__.py
-├── auth.py # JWT validation, user context extraction
-├── telemetry.py # OpenTelemetry setup (traces, metrics, logs)
-├── exceptions.py # Custom exceptions (SessionExpired, VLMTimeout, etc.)
-├── models.py # Pydantic schemas (Session, User, Interaction, etc.)
-└── logger.py # Structured logging configuration
-```
-
----
-
-## 1. `core/models.py` - Shared Pydantic Schemas
-
-### Purpose
-Centralized data models used across all modules. All models are immutable (frozen) to prevent accidental mutation.
-
-### Schemas
-
-#### Session Models
-
-```python
-from pydantic import BaseModel, Field
-from enum import Enum
-from uuid import UUID
-from datetime import datetime
-from typing import Optional, Dict, Any, List
-
-class SessionMode(str, Enum):
- """Session operation mode"""
- ACTIVE = "active" # Real-time conversational AI
- PASSIVE = "passive" # Silent observer mode
-
-class SessionState(BaseModel):
- """Session state stored in Redis"""
- session_id: UUID
- user_id: UUID
- mode: SessionMode
- created_at: datetime
- last_activity: datetime
- voice_id: Optional[str] = None
- enable_vision: bool = False
- preferences: Dict[str, Any] = Field(default_factory=dict)
-
- class Config:
- frozen = True # Immutable
- json_encoders = {
- UUID: str,
- datetime: lambda v: v.isoformat()
- }
-```
-
-#### User Models
-
-```python
-class UserContext(BaseModel):
- """User context extracted from JWT token"""
- user_id: UUID
- email: str
- name: Optional[str] = None
- created_at: datetime
- oauth_provider: Optional[str] = None # "google", "github", etc.
-
- class Config:
- frozen = True
-```
-
-#### Interaction Models
-
-```python
-class InteractionTurn(BaseModel):
- """Single interaction turn (user query + AI response)"""
- turn_id: UUID
- session_id: UUID
- timestamp: datetime
- transcript: str
- scene_description: Optional[str] = None
- llm_response: str
- model_used: str # "groq", "gemini", "ollama"
- latency_ms: int
- tokens_used: Optional[int] = None
-
- class Config:
- frozen = True
-
-class ConversationHistory(BaseModel):
- """Last N turns for context retrieval"""
- user_id: UUID
- turns: List[InteractionTurn] = Field(default_factory=list)
- max_turns: int = 10
-
- def add_turn(self, turn: InteractionTurn) -> "ConversationHistory":
- """Add turn and maintain max_turns limit"""
- new_turns = [turn] + self.turns
- return ConversationHistory(
- user_id=self.user_id,
- turns=new_turns[:self.max_turns],
- max_turns=self.max_turns
- )
-```
-
-#### Control Messages
-
-```python
-class ControlMessageType(str, Enum):
- """WebSocket control message types"""
- SESSION_CONTROL = "session_control"
- ERROR = "error"
- ACK = "ack"
- HEARTBEAT = "heartbeat"
-
-class ControlMessage(BaseModel):
- """Control message sent via WebSocket (stream_type=0x03)"""
- type: ControlMessageType
- action: Optional[str] = None # "start_active_mode", "start_passive_mode", "end_session"
- payload: Dict[str, Any] = Field(default_factory=dict)
- timestamp: datetime = Field(default_factory=datetime.utcnow)
-
- class Config:
- frozen = True
-```
-
-#### Binary Frame Models
-
-```python
-class StreamType(int, Enum):
- """Binary frame stream types"""
- AUDIO = 0x01
- VIDEO = 0x02
- CONTROL = 0x03
-
-class FrameFlags(int, Enum):
- """Binary frame flags"""
- END_OF_STREAM = 0x01
- PRIORITY = 0x02
- ERROR = 0x04
-
-class BinaryFrame(BaseModel):
- """Parsed binary frame from WebSocket"""
- stream_type: StreamType
- flags: int
- payload: bytes
- length: int
-
- @classmethod
- def parse(cls, data: bytes) -> "BinaryFrame":
- """Parse 4-byte header + payload"""
- if len(data) < 4:
- raise ValueError("Frame too short")
-
- stream_type = StreamType(data[0])
- flags = data[1]
- length = int.from_bytes(data[2:4], 'big')
- payload = data[4:4+length]
-
- if len(payload) != length:
- raise ValueError("Payload length mismatch")
-
- return cls(
- stream_type=stream_type,
- flags=flags,
- payload=payload,
- length=length
- )
-
- def to_bytes(self) -> bytes:
- """Serialize to binary frame format"""
- header = bytes([
- self.stream_type.value,
- self.flags,
- *self.length.to_bytes(2, 'big')
- ])
- return header + self.payload
-```
-
-### Concurrency Considerations
-- All models use `frozen=True` to prevent mutation
-- `Field(default_factory=dict)` avoids shared mutable defaults
-- UUID generation uses `uuid.uuid4()` (thread-safe)
-- Datetime uses `datetime.utcnow()` (thread-safe)
-
----
-
-## 2. `core/exceptions.py` - Custom Exception Hierarchy
-
-### Purpose
-Structured error handling with context (trace_id, user_id) for distributed tracing.
-
-### Exception Classes
-
-```python
-from typing import Optional
-from uuid import UUID
-
-class NeroSpatialException(Exception):
- """Base exception for all NeroSpatial errors"""
- def __init__(
- self,
- message: str,
- trace_id: Optional[str] = None,
- user_id: Optional[UUID] = None,
- **kwargs
- ):
- self.message = message
- self.trace_id = trace_id
- self.user_id = user_id
- self.context = kwargs
- super().__init__(self.message)
-
- def __str__(self) -> str:
- parts = [self.message]
- if self.trace_id:
- parts.append(f"trace_id={self.trace_id}")
- if self.user_id:
- parts.append(f"user_id={self.user_id}")
- return " | ".join(parts)
-
-class AuthenticationError(NeroSpatialException):
- """JWT validation failed or token expired"""
- pass
-
-class SessionExpiredError(NeroSpatialException):
- """Session TTL expired in Redis"""
- pass
-
-class SessionNotFoundError(NeroSpatialException):
- """Session not found in Redis"""
- pass
-
-class VLMTimeoutError(NeroSpatialException):
- """VLM inference exceeded timeout threshold"""
- def __init__(self, timeout_ms: int, **kwargs):
- self.timeout_ms = timeout_ms
- super().__init__(f"VLM inference timeout after {timeout_ms}ms", **kwargs)
-
-class LLMProviderError(NeroSpatialException):
- """LLM API call failed (network, rate limit, etc.)"""
- def __init__(
- self,
- message: str,
- provider: str,
- status_code: Optional[int] = None,
- **kwargs
- ):
- self.provider = provider
- self.status_code = status_code
- super().__init__(f"{provider}: {message}", **kwargs)
-
-class CircuitBreakerOpenError(NeroSpatialException):
- """Circuit breaker is open, provider unavailable"""
- def __init__(self, provider: str, **kwargs):
- self.provider = provider
- super().__init__(f"Circuit breaker open for {provider}", **kwargs)
-
-class DatabaseError(NeroSpatialException):
- """Database operation failed"""
- def __init__(
- self,
- message: str,
- db_type: str,
- operation: str,
- **kwargs
- ):
- self.db_type = db_type
- self.operation = operation
- super().__init__(
- f"{db_type} {operation} failed: {message}",
- **kwargs
- )
-
-class RateLimitExceeded(NeroSpatialException):
- """User exceeded rate limit"""
- def __init__(
- self,
- message: str,
- limit: int,
- window_seconds: int,
- **kwargs
- ):
- self.limit = limit
- self.window_seconds = window_seconds
- super().__init__(
- f"Rate limit exceeded: {limit} per {window_seconds}s",
- **kwargs
- )
-
-class ValidationError(NeroSpatialException):
- """Input validation failed"""
- def __init__(self, message: str, field: Optional[str] = None, **kwargs):
- self.field = field
- super().__init__(message, **kwargs)
-```
-
-### Error Handling Pattern
-```python
-# Example usage in gateway
-try:
- user_context = await auth.validate_token(token)
-except AuthenticationError as e:
- logger.error("JWT validation failed", extra={"error": str(e), "trace_id": trace_id})
- raise
-```
-
----
-
-## 3. `core/auth.py` - JWT Validation & User Context
-
-### Purpose
-JWT token validation with RS256, user context extraction, and caching.
-
-### Implementation
-
-```python
-import jwt
-from jwt import PyJWKClient
-from typing import Dict, Any, Optional
-from uuid import UUID
-from datetime import datetime, timedelta
-import asyncio
-from functools import lru_cache
-
-from core.models import UserContext
-from core.exceptions import AuthenticationError
-from core.logger import get_logger
-
-logger = get_logger(__name__)
-
-class JWTAuth:
- """JWT authentication and user context management"""
-
- def __init__(
- self,
- public_key_url: Optional[str] = None,
- public_key: Optional[str] = None,
- algorithm: str = "RS256",
- cache_ttl_seconds: int = 300 # 5 minutes
- ):
- """
- Initialize JWT auth.
-
- Args:
- public_key_url: URL to fetch JWKS (for RS256)
- public_key: Direct public key string (alternative to URL)
- algorithm: JWT algorithm (RS256 or HS256)
- cache_ttl_seconds: User context cache TTL
- """
- self.algorithm = algorithm
- self.cache_ttl = cache_ttl_seconds
-
- if public_key_url:
- self.jwks_client = PyJWKClient(public_key_url)
- elif public_key:
- self.public_key = public_key
- else:
- raise ValueError("Either public_key_url or public_key required")
-
- # User context cache: {user_id: (context, expiry)}
- self._user_cache: Dict[UUID, tuple[UserContext, datetime]] = {}
- self._cache_lock = asyncio.Lock()
-
- async def validate_token(self, token: str) -> Dict[str, Any]:
- """
- Validate JWT token and return claims.
-
- Raises AuthenticationError if invalid.
-
- Concurrency: Thread-safe, no shared state during validation
- """
- try:
- if hasattr(self, 'jwks_client'):
- # RS256: Fetch signing key from JWKS
- signing_key = self.jwks_client.get_signing_key_from_jwt(token)
- payload = jwt.decode(
- token,
- signing_key.key,
- algorithms=[self.algorithm],
- options={"verify_exp": True, "verify_signature": True}
- )
- else:
- # HS256 or direct public key
- payload = jwt.decode(
- token,
- self.public_key,
- algorithms=[self.algorithm],
- options={"verify_exp": True, "verify_signature": True}
- )
-
- return payload
-
- except jwt.ExpiredSignatureError:
- raise AuthenticationError("Token expired")
- except jwt.InvalidTokenError as e:
- raise AuthenticationError(f"Invalid token: {str(e)}")
-
- async def extract_user_context(self, token: str) -> UserContext:
- """
- Extract user_id and user info from token.
- Caches user context in memory (TTL: 5 minutes).
-
- Concurrency: Uses asyncio.Lock for cache updates
- """
- claims = await self.validate_token(token)
-
- user_id = UUID(claims.get("sub") or claims.get("user_id"))
-
- # Check cache
- async with self._cache_lock:
- if user_id in self._user_cache:
- context, expiry = self._user_cache[user_id]
- if datetime.utcnow() < expiry:
- return context
- # Expired, remove from cache
- del self._user_cache[user_id]
-
- # Build user context from claims
- context = UserContext(
- user_id=user_id,
- email=claims.get("email", ""),
- name=claims.get("name"),
- created_at=datetime.fromtimestamp(claims.get("iat", 0)),
- oauth_provider=claims.get("oauth_provider")
- )
-
- # Cache with TTL
- async with self._cache_lock:
- expiry = datetime.utcnow() + timedelta(seconds=self.cache_ttl)
- self._user_cache[user_id] = (context, expiry)
-
- return context
-
- def generate_trace_id(self) -> str:
- """Generate unique trace ID for request"""
- import uuid
- return str(uuid.uuid4())
-
- async def clear_cache(self, user_id: Optional[UUID] = None):
- """Clear user context cache"""
- async with self._cache_lock:
- if user_id:
- self._user_cache.pop(user_id, None)
- else:
- self._user_cache.clear()
-```
-
-### Concurrency Optimizations
-- JWT decoding is CPU-bound but fast (cache decoded structure)
-- User context caching with `asyncio.Lock` for thread-safety
-- Public key loaded once at startup (immutable)
-- Cache cleanup on expiry (lazy eviction)
-
-### Event Loop Considerations
-- JWT validation is synchronous (use `asyncio.to_thread()` if needed for heavy workloads)
-- Cache lookups are in-memory (no I/O blocking)
-- JWKS fetching is async (if using URL)
-
----
-
-## 4. `core/telemetry.py` - OpenTelemetry Instrumentation
-
-### Purpose
-Distributed tracing, metrics, and logging integration with OpenTelemetry.
-
-### Implementation
-
-```python
-from opentelemetry import trace, metrics
-from opentelemetry.sdk.trace import TracerProvider
-from opentelemetry.sdk.trace.export import BatchSpanProcessor
-from opentelemetry.sdk.metrics import MeterProvider
-from opentelemetry.sdk.metrics.export import PeriodicExportingMetricReader
-from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter
-from opentelemetry.exporter.otlp.proto.grpc.metric_exporter import OTLPMetricExporter
-from opentelemetry.sdk.resources import Resource
-from typing import Optional, Dict, Any
-import contextvars
-
-# Context variable for trace_id propagation
-trace_id_var: contextvars.ContextVar[Optional[str]] = contextvars.ContextVar('trace_id', default=None)
-
-class TelemetryManager:
- """OpenTelemetry instrumentation manager"""
-
- def __init__(
- self,
- service_name: str,
- otlp_endpoint: str,
- environment: str = "production"
- ):
- """
- Initialize telemetry.
-
- Args:
- service_name: Service name (e.g., "nerospatial-gateway")
- otlp_endpoint: OTLP gRPC endpoint (e.g., "http://jaeger:4317")
- environment: Deployment environment
- """
- resource = Resource.create({
- "service.name": service_name,
- "service.environment": environment
- })
-
- # Initialize TracerProvider
- self.tracer_provider = TracerProvider(resource=resource)
- otlp_exporter = OTLPSpanExporter(endpoint=otlp_endpoint, insecure=True)
- span_processor = BatchSpanProcessor(otlp_exporter)
- self.tracer_provider.add_span_processor(span_processor)
- trace.set_tracer_provider(self.tracer_provider)
-
- # Initialize MeterProvider
- self.meter_provider = MeterProvider(
- resource=resource,
- metric_readers=[
- PeriodicExportingMetricReader(
- OTLPMetricExporter(endpoint=otlp_endpoint, insecure=True),
- export_interval_millis=5000
- )
- ]
- )
- metrics.set_meter_provider(self.meter_provider)
-
- self.service_name = service_name
-
- def get_tracer(self, name: str) -> trace.Tracer:
- """Get tracer for specific module"""
- return trace.get_tracer(name)
-
- def get_meter(self, name: str) -> metrics.Meter:
- """Get meter for metrics"""
- return metrics.get_meter(name)
-
- def create_span(
- self,
- name: str,
- trace_id: Optional[str] = None,
- parent_span_id: Optional[str] = None
- ) -> trace.Span:
- """Create span with optional parent trace_id"""
- tracer = self.get_tracer(self.service_name)
-
- # Set trace_id in context
- if trace_id:
- trace_id_var.set(trace_id)
-
- # Create span (OpenTelemetry handles parent context automatically)
- span = tracer.start_span(name)
- return span
-
- def record_metric(
- self,
- name: str,
- value: float,
- tags: Dict[str, str],
- metric_type: str = "histogram"
- ):
- """Record custom metric"""
- meter = self.get_meter(self.service_name)
-
- if metric_type == "histogram":
- histogram = meter.create_histogram(name)
- histogram.record(value, tags)
- elif metric_type == "counter":
- counter = meter.create_counter(name)
- counter.add(value, tags)
- elif metric_type == "gauge":
- gauge = meter.create_up_down_counter(name)
- gauge.add(value, tags)
-
-# Predefined metrics
-METRICS = {
- "request_duration": "nerospatial_request_duration_seconds",
- "requests_total": "nerospatial_requests_total",
- "websocket_connections": "nerospatial_websocket_connections",
- "llm_ttft": "nerospatial_llm_ttft_seconds",
- "llm_errors": "nerospatial_llm_errors_total",
- "vlm_inference": "nerospatial_vlm_inference_seconds",
- "vlm_queue_depth": "nerospatial_vlm_queue_depth",
- "db_query_duration": "nerospatial_db_query_duration_seconds",
-}
-```
-
-### Metrics to Track
-- `nerospatial_request_duration_seconds` (histogram) - Request latency
-- `nerospatial_requests_total` (counter) - Request count by status
-- `nerospatial_websocket_connections` (gauge) - Active connections
-- `nerospatial_llm_ttft_seconds` (histogram) - Time to first token
-- `nerospatial_llm_errors_total` (counter) - LLM errors by provider
-- `nerospatial_vlm_inference_seconds` (histogram) - VLM latency
-- `nerospatial_vlm_queue_depth` (gauge) - VLM queue size
-- `nerospatial_db_query_duration_seconds` (histogram) - DB query latency
-
-### Concurrency Considerations
-- OpenTelemetry SDK is thread-safe
-- Metrics are recorded asynchronously (batched exports)
-- Spans are context-local (asyncio context vars)
-- Batch processors prevent blocking event loop
-
-### Event Loop Optimizations
-- OTLP exports are batched (non-blocking)
-- Use async exporters to avoid blocking event loop
-- Batch span processor flushes every 5 seconds
-
----
-
-## 5. `core/logger.py` - Structured Logging
-
-### Purpose
-JSON-structured logging with trace_id propagation via contextvars.
-
-### Implementation
-
-```python
-import logging
-import json
-from datetime import datetime
-from typing import Optional, Dict, Any
-import contextvars
-import sys
-
-# Context variable for trace_id
-trace_id_var: contextvars.ContextVar[Optional[str]] = contextvars.ContextVar('trace_id', default=None)
-
-class StructuredFormatter(logging.Formatter):
- """JSON formatter for structured logging"""
-
- def format(self, record: logging.LogRecord) -> str:
- """Format log record as JSON"""
- log_data = {
- "timestamp": datetime.utcnow().isoformat() + "Z",
- "level": record.levelname,
- "service": record.name,
- "message": record.getMessage(),
- "trace_id": trace_id_var.get(),
- }
-
- # Add exception info if present
- if record.exc_info:
- log_data["exception"] = self.formatException(record.exc_info)
-
- # Add extra fields from record
- if hasattr(record, "user_id"):
- log_data["user_id"] = str(record.user_id)
- if hasattr(record, "session_id"):
- log_data["session_id"] = str(record.session_id)
- if hasattr(record, "error"):
- log_data["error"] = record.error
- if hasattr(record, "latency_ms"):
- log_data["latency_ms"] = record.latency_ms
-
- # Add any extra context
- if hasattr(record, "extra_context"):
- log_data.update(record.extra_context)
-
- return json.dumps(log_data, default=str)
-
-def setup_logging(level: str = "INFO", service_name: str = "nerospatial"):
- """
- Configure structured JSON logging.
-
- Args:
- level: Log level (DEBUG, INFO, WARNING, ERROR, CRITICAL)
- service_name: Service name for log records
- """
- handler = logging.StreamHandler(sys.stdout)
- handler.setFormatter(StructuredFormatter())
-
- root_logger = logging.getLogger()
- root_logger.addHandler(handler)
- root_logger.setLevel(getattr(logging, level))
-
- # Set service name
- logging.getLogger(service_name).setLevel(getattr(logging, level))
-
-def get_logger(name: str) -> logging.Logger:
- """Get logger for module"""
- return logging.getLogger(name)
-
-def set_trace_id(trace_id: str):
- """Set trace_id in context for current async task"""
- trace_id_var.set(trace_id)
-
-def get_trace_id() -> Optional[str]:
- """Get trace_id from context"""
- return trace_id_var.get()
-
-# Context manager for trace_id
-class TraceContext:
- """Context manager for trace_id"""
- def __init__(self, trace_id: str):
- self.trace_id = trace_id
- self.token = None
-
- def __enter__(self):
- self.token = trace_id_var.set(self.trace_id)
- return self
-
- def __exit__(self, exc_type, exc_val, exc_tb):
- trace_id_var.reset(self.token)
-```
-
-### Concurrency Considerations
-- `contextvars` ensures trace_id is isolated per async task
-- JSON serialization is thread-safe
-- Logger instances are thread-safe
-- No shared mutable state
-
-### Event Loop Optimizations
-- Logging is synchronous but fast (in-memory buffer)
-- Consider async logging handler for high-throughput scenarios (>10K logs/sec)
-- JSON serialization is CPU-bound but lightweight
-
----
-
-## Integration Points
-
-### Core → Gateway
-- `core.auth.JWTAuth.validate_token()` called on WebSocket connection
-- `core.models.SessionState` stored in Redis by gateway
-- `core.exceptions.SessionExpiredError` raised when session TTL expires
-
-### Core → All Modules
-- All modules import `core.exceptions` for error handling
-- All modules use `core.logger.get_logger()` for logging
-- All modules use `core.telemetry` for tracing
-- All modules use `core.models` for data structures
-
----
-
-## Testing Strategy
-
-### Unit Tests
-- JWT validation with valid/invalid/expired tokens
-- Exception hierarchy and context propagation
-- Logger JSON output format validation
-- Telemetry span creation and export
-- User context caching behavior
-
-### Integration Tests
-- Trace ID propagation across async boundaries
-- Error handling in concurrent scenarios
-- Cache eviction on TTL expiry
-
----
-
-## Performance Targets
-
-- JWT validation: <1ms (cached public key)
-- User context extraction: <5ms (cache hit) or <50ms (cache miss + DB query)
-- Logging overhead: <0.1ms per log entry
-- Telemetry span creation: <0.05ms
-- Exception creation: <0.01ms
-
----
-
-## Dependencies
-
-```python
-# requirements.txt additions for core module
-pydantic>=2.0.0
-pyjwt>=2.8.0
-cryptography>=41.0.0 # For RS256
-opentelemetry-api>=1.20.0
-opentelemetry-sdk>=1.20.0
-opentelemetry-exporter-otlp-proto-grpc>=1.20.0
-```
diff --git a/docs/CORE_MODULE.md b/docs/CORE_MODULE.md
new file mode 100644
index 0000000..98a55b8
--- /dev/null
+++ b/docs/CORE_MODULE.md
@@ -0,0 +1,654 @@
+# Core Module Reference
+
+**Module:** `core/`
+**Version:** 1.0
+**Status:** Production Ready
+**Dependencies:** None (foundation for all other modules)
+
+---
+
+## Overview
+
+The core module provides the foundational utilities and shared components for the NeroSpatial Backend. It is designed as a stateless, thread-safe foundation that all other modules import and depend upon.
+
+### Design Principles
+
+- **Stateless Operations**: No shared mutable state between requests
+- **Thread-Safe**: Safe for concurrent async operations
+- **Low Latency**: < 1ms overhead for most operations
+- **Context-Aware**: Automatic trace_id propagation via `contextvars`
+- **Protocol-Based**: Uses Python protocols for dependency injection
+
+---
+
+## Architecture
+
+```mermaid
+graph TB
+ subgraph "Core Module"
+ AUTH[auth.py
JWT Authentication]
+ TELEM[telemetry.py
OpenTelemetry]
+ LOG[logger.py
Structured Logging]
+ EXC[exceptions.py
Exception Hierarchy]
+ CFG[config_loader.py
Azure Config]
+ KV[keyvault.py
Azure Key Vault]
+ STATE[app_state.py
App State Container]
+
+ subgraph "Models"
+ USER[user.py]
+ SESS[session.py]
+ INTER[interaction.py]
+ PROTO[protocol.py]
+ end
+
+ subgraph "Protocols/Stubs"
+ DB[database.py
DatabasePool Protocol]
+ REDIS[redis.py
RedisClient Protocol]
+ end
+ end
+
+ AUTH --> LOG
+ AUTH --> EXC
+ CFG --> KV
+ CFG --> EXC
+ STATE --> AUTH
+ STATE --> TELEM
+ STATE --> KV
+
+ style AUTH fill:#4CAF50
+ style TELEM fill:#2196F3
+ style LOG fill:#FF9800
+ style EXC fill:#f44336
+ style CFG fill:#9C27B0
+ style KV fill:#9C27B0
+ style STATE fill:#607D8B
+```
+
+---
+
+## Module Structure
+
+```
+core/
+├── __init__.py # Public API exports
+├── app_state.py # Application state container
+├── auth.py # JWT authentication
+├── config_loader.py # Azure App Config + Key Vault loader
+├── database.py # Database pool protocol (stub)
+├── exceptions.py # Custom exception hierarchy
+├── keyvault.py # Azure Key Vault client
+├── logger.py # Structured JSON logging
+├── redis.py # Redis client protocol (stub)
+├── telemetry.py # OpenTelemetry instrumentation
+└── models/
+ ├── __init__.py # Model exports
+ ├── user.py # User, UserContext, RefreshToken, etc.
+ ├── session.py # SessionState, SessionMode
+ ├── interaction.py # InteractionTurn, ConversationHistory
+ └── protocol.py # BinaryFrame, ControlMessage
+```
+
+---
+
+## Components
+
+### 1. Authentication (`auth.py`)
+
+JWT-based authentication with RS256 signing, token refresh with rotation, and blacklist management.
+
+```mermaid
+sequenceDiagram
+ participant Client
+ participant JWTAuth
+ participant Redis
+ participant Postgres
+
+ Client->>JWTAuth: validate_token(token)
+ JWTAuth->>JWTAuth: Verify signature
+ JWTAuth->>Redis: Check blacklist
+ Redis-->>JWTAuth: Not blacklisted
+ JWTAuth-->>Client: Claims
+
+ Client->>JWTAuth: extract_user_context(token)
+ JWTAuth->>Redis: Check cache
+ alt Cache hit
+ Redis-->>JWTAuth: Cached context
+ else Cache miss
+ JWTAuth->>JWTAuth: Build from claims
+ JWTAuth->>Redis: Cache context
+ end
+ JWTAuth-->>Client: UserContext
+```
+
+**Key Class: `JWTAuth`**
+
+| Method | Description |
+| --------------------------------------- | --------------------------------- |
+| `validate_token(token)` | Validate JWT and return claims |
+| `extract_user_context(token)` | Extract UserContext with caching |
+| `generate_tokens(user)` | Generate access + refresh tokens |
+| `refresh_tokens(refresh_token)` | Refresh with rotation |
+| `blacklist_token(jti, user_id, reason)` | Add token to blacklist |
+| `logout(token)` | Full logout (blacklist + cleanup) |
+
+**Configuration:**
+
+| Parameter | Default | Description |
+| ------------------- | --------------- | ---------------------- |
+| `algorithm` | RS256 | JWT signing algorithm |
+| `access_token_ttl` | 900 (15 min) | Access token lifetime |
+| `refresh_token_ttl` | 604800 (7 days) | Refresh token lifetime |
+| `cache_ttl_seconds` | 300 (5 min) | User context cache TTL |
+
+---
+
+### 2. Telemetry (`telemetry.py`)
+
+OpenTelemetry integration for distributed tracing and metrics.
+
+**Key Class: `TelemetryManager`**
+
+| Method | Description |
+| ---------------------------------------- | --------------------------- |
+| `get_tracer(name)` | Get tracer for module |
+| `get_meter(name)` | Get meter for metrics |
+| `create_span(name, attributes)` | Create span with attributes |
+| `record_metric(name, value, tags, type)` | Record metric |
+| `shutdown()` | Flush and close exporters |
+
+**Predefined Metrics (`Metrics` class):**
+
+| Metric | Type | Description |
+| ----------------------- | --------- | ---------------------------- |
+| `REQUEST_DURATION` | Histogram | Request latency in seconds |
+| `REQUESTS_TOTAL` | Counter | Total request count |
+| `WEBSOCKET_CONNECTIONS` | Gauge | Active WebSocket connections |
+| `LLM_TTFT` | Histogram | LLM time to first token |
+| `VLM_INFERENCE` | Histogram | VLM inference latency |
+| `DB_QUERY_DURATION` | Histogram | Database query latency |
+| `AUTH_LOGIN_TOTAL` | Counter | Total login attempts |
+
+---
+
+### 3. Logger (`logger.py`)
+
+Structured JSON logging with automatic trace_id propagation.
+
+**Log Output Format:**
+
+```json
+{
+ "timestamp": "2024-01-15T10:30:00.000Z",
+ "level": "INFO",
+ "service": "core.auth",
+ "message": "User logged in",
+ "trace_id": "abc-123-def",
+ "user_id": "user-uuid"
+}
+```
+
+**Key Functions:**
+
+| Function | Description |
+| ------------------------------------ | ----------------------------- |
+| `setup_logging(level, service_name)` | Initialize structured logging |
+| `get_logger(name)` | Get logger for module |
+| `set_trace_id(trace_id)` | Set trace_id in context |
+| `get_trace_id()` | Get current trace_id |
+| `TraceContext(trace_id)` | Context manager for trace_id |
+
+---
+
+### 4. Exceptions (`exceptions.py`)
+
+Custom exception hierarchy with trace context.
+
+```mermaid
+classDiagram
+ NeroSpatialException <|-- AuthenticationError
+ NeroSpatialException <|-- AuthorizationError
+ NeroSpatialException <|-- SessionExpiredError
+ NeroSpatialException <|-- SessionNotFoundError
+ NeroSpatialException <|-- VLMTimeoutError
+ NeroSpatialException <|-- LLMProviderError
+ NeroSpatialException <|-- CircuitBreakerOpenError
+ NeroSpatialException <|-- DatabaseError
+ NeroSpatialException <|-- RateLimitExceeded
+ NeroSpatialException <|-- ValidationError
+
+ class NeroSpatialException {
+ +str message
+ +str trace_id
+ +UUID user_id
+ +dict context
+ }
+```
+
+| Exception | When Raised |
+| ------------------------- | --------------------------------- |
+| `AuthenticationError` | Invalid/expired/blacklisted token |
+| `AuthorizationError` | User status not ACTIVE |
+| `SessionExpiredError` | Session TTL exceeded |
+| `SessionNotFoundError` | Session not in Redis |
+| `VLMTimeoutError` | VLM inference timeout |
+| `LLMProviderError` | LLM API failure |
+| `CircuitBreakerOpenError` | Provider circuit breaker open |
+| `DatabaseError` | Database operation failure |
+| `RateLimitExceeded` | Rate limit exceeded |
+| `ValidationError` | Input validation failure |
+
+---
+
+### 5. Configuration (`config_loader.py`)
+
+Azure App Configuration + Key Vault integration with environment-based validation.
+
+```mermaid
+flowchart TD
+ START[Start] --> ENV{Environment?}
+ ENV -->|production/staging| VALIDATE[Validate Azure URLs]
+ ENV -->|development| FALLBACK[Use .env fallback]
+
+ VALIDATE -->|Missing| ERROR[Raise ValidationError]
+ VALIDATE -->|OK| AZURE[Load from Azure]
+
+ AZURE --> RETRY{Retry Logic}
+ RETRY -->|Success| MERGE[Merge with Settings]
+ RETRY -->|Fail 3x| ERROR
+
+ FALLBACK --> DONE[Return empty dict]
+ MERGE --> DONE
+```
+
+**Environment Validation Rules:**
+
+| Environment | Azure App Config | Azure Key Vault | Credentials |
+| ----------- | ---------------- | --------------- | ----------- |
+| production | Required | Required | Required |
+| staging | Required | Required | Required |
+| development | Optional | Optional | Optional |
+
+---
+
+### 6. Key Vault (`keyvault.py`)
+
+Azure Key Vault client with caching and environment fallback.
+
+**Key Class: `KeyVaultClient`**
+
+| Method | Description |
+| -------------------------------------- | ---------------------------- |
+| `get_secret(name, default, use_cache)` | Get secret with caching |
+| `set_secret(name, value)` | Set secret (admin) |
+| `delete_secret(name)` | Delete secret (admin) |
+| `clear_cache(name)` | Clear cache |
+| `is_available()` | Check if Key Vault connected |
+
+**Secret Fallback Order:**
+
+1. Check in-memory cache
+2. Fetch from Azure Key Vault
+3. Fallback to environment variable (e.g., `jwt-public-key` -> `JWT_PUBLIC_KEY`)
+4. Return default value
+
+---
+
+### 7. Application State (`app_state.py`)
+
+Centralized state container for application services.
+
+**Key Class: `AppState`**
+
+| Field | Type | Description |
+| ---------------- | ---------------- | ------------------------- |
+| `settings` | Settings | Application configuration |
+| `db_pool` | DatabasePool | Database connection pool |
+| `redis_client` | RedisClient | Redis client |
+| `jwt_auth` | JWTAuth | JWT authentication |
+| `telemetry` | TelemetryManager | Telemetry manager |
+| `key_vault` | KeyVaultClient | Key Vault client |
+| `started_at` | datetime | Startup timestamp |
+| `is_ready` | bool | Ready for traffic |
+| `startup_errors` | list[str] | Startup error messages |
+
+**Protocols Defined:**
+
+- `DatabasePool`: Interface for database connection pool
+- `RedisClient`: Interface for Redis client
+
+These protocols allow the memory module to provide real implementations while core module works with stubs for testing.
+
+---
+
+### 8. Models (`models/`)
+
+Pydantic models organized by domain.
+
+#### User Models (`user.py`)
+
+| Model | Description |
+| --------------------- | ------------------------------------ |
+| `User` | Full user profile (PostgreSQL) |
+| `UserContext` | JWT-extracted context (Redis cached) |
+| `RefreshToken` | Refresh token record |
+| `TokenBlacklistEntry` | Blacklisted token |
+| `AuditLog` | Audit trail entry |
+
+#### Session Models (`session.py`)
+
+| Model | Description |
+| -------------- | ---------------------- |
+| `SessionState` | Session state (Redis) |
+| `SessionMode` | ACTIVE or PASSIVE mode |
+
+#### Interaction Models (`interaction.py`)
+
+| Model | Description |
+| --------------------- | --------------------------- |
+| `InteractionTurn` | Single conversation turn |
+| `ConversationHistory` | Conversation context window |
+
+#### Protocol Models (`protocol.py`)
+
+| Model | Description |
+| ---------------- | -------------------------- |
+| `BinaryFrame` | WebSocket binary frame |
+| `ControlMessage` | WebSocket control message |
+| `StreamType` | Audio/Video/Control stream |
+| `FrameFlags` | Frame metadata flags |
+
+---
+
+## Public API
+
+All public exports from `core/__init__.py`:
+
+```python
+from core import (
+ # App State
+ AppState, DatabasePool, RedisClient,
+
+ # Config
+ ConfigLoader,
+
+ # Auth
+ JWTAuth,
+
+ # Key Vault
+ KeyVaultClient,
+
+ # Logger
+ get_logger, setup_logging, set_trace_id, get_trace_id, TraceContext,
+
+ # Telemetry
+ TelemetryManager, Metrics,
+
+ # Exceptions
+ NeroSpatialException, AuthenticationError, AuthorizationError,
+ SessionExpiredError, SessionNotFoundError, VLMTimeoutError,
+ LLMProviderError, CircuitBreakerOpenError, DatabaseError,
+ RateLimitExceeded, ValidationError,
+
+ # Enums
+ UserStatus, OAuthProvider, TokenRevocationReason, AuditAction,
+ SessionMode, ControlMessageType, StreamType, FrameFlags,
+
+ # Models
+ User, UserContext, RefreshToken, TokenBlacklistEntry, AuditLog,
+ SessionState, InteractionTurn, ConversationHistory,
+ ControlMessage, BinaryFrame,
+)
+```
+
+---
+
+## Test Coverage
+
+| Test File | Component | Coverage |
+| ----------------------- | ---------------- | ------------------------------------------------ |
+| `test_auth.py` | JWTAuth | Token validation, generation, refresh, blacklist |
+| `test_telemetry.py` | TelemetryManager | Init, tracing, metrics, shutdown |
+| `test_exceptions.py` | All exceptions | Creation, string formatting, context |
+| `test_models.py` | All models | Validation, serialization, methods |
+| `test_keyvault.py` | KeyVaultClient | Get/set secrets, caching, fallback |
+| `test_config_loader.py` | ConfigLoader | Environment validation, loading |
+| `test_app_state.py` | AppState | State management, cleanup |
+
+**Run Tests:**
+
+```bash
+uv run pytest tests/core/ -v
+```
+
+---
+
+## Important: SRE/DevOps Requirements
+
+### Required Infrastructure
+
+| Service | Port | Purpose | Required In |
+| ---------------- | ----- | -------------------------- | ------------------ |
+| PostgreSQL | 5432 | User data, tokens | All environments |
+| Redis | 6379 | Cache, sessions, blacklist | All environments |
+| Jaeger/OTLP | 4317 | Telemetry collection | Production/Staging |
+| Azure Key Vault | HTTPS | Secret management | Production/Staging |
+| Azure App Config | HTTPS | Configuration | Production/Staging |
+
+### Required Environment Variables
+
+#### Bootstrap (Always Required)
+
+```bash
+# Environment
+ENVIRONMENT=production|staging|development
+
+# Azure (Required for production/staging)
+AZURE_KEY_VAULT_URL=https://.vault.azure.net/
+AZURE_APP_CONFIG_URL=https://.azconfig.io
+AZURE_TENANT_ID=
+AZURE_CLIENT_ID=
+AZURE_CLIENT_SECRET=
+```
+
+#### Application Settings
+
+```bash
+# Application
+APP_NAME=NeroSpatial Backend
+APP_VERSION=0.1.0
+LOG_LEVEL=INFO
+
+# PostgreSQL
+POSTGRES_HOST=localhost
+POSTGRES_PORT=5432
+POSTGRES_DB=nerospatial
+POSTGRES_USER=nerospatial
+POSTGRES_PASSWORD=
+POSTGRES_POOL_MIN=5
+POSTGRES_POOL_MAX=20
+
+# Redis
+REDIS_HOST=localhost
+REDIS_PORT=6379
+REDIS_DB=0
+REDIS_PASSWORD=
+
+# JWT (Keys from Key Vault)
+JWT_ALGORITHM=RS256
+JWT_ACCESS_TOKEN_TTL=900
+JWT_REFRESH_TOKEN_TTL=604800
+JWT_CACHE_TTL=300
+JWT_PRIVATE_KEY=
+JWT_PUBLIC_KEY=
+
+# OpenTelemetry
+OTEL_ENDPOINT=http://jaeger:4317
+OTEL_ENABLE_TRACING=true
+OTEL_ENABLE_METRICS=true
+```
+
+### Required Secrets in Azure Key Vault
+
+| Secret Name | Description |
+| ------------------- | ----------------------- |
+| `postgres-password` | PostgreSQL password |
+| `redis-password` | Redis password |
+| `jwt-private-key` | RS256 private key (PEM) |
+| `jwt-public-key` | RS256 public key (PEM) |
+
+### Health Check Endpoints
+
+| Endpoint | Purpose | Returns |
+| ------------- | ----------------- | -------------------------- |
+| `GET /health` | Full health check | Status + dependency checks |
+| `GET /ready` | Readiness probe | 200 when ready |
+| `GET /live` | Liveness probe | 200 if process alive |
+
+### Startup Sequence
+
+```mermaid
+sequenceDiagram
+ participant App
+ participant ConfigLoader
+ participant KeyVault
+ participant Database
+ participant Redis
+
+ App->>ConfigLoader: Load configuration
+ ConfigLoader->>ConfigLoader: Validate environment
+ ConfigLoader->>KeyVault: Load secrets
+ KeyVault-->>ConfigLoader: Secrets
+ ConfigLoader-->>App: Settings
+
+ App->>Database: Create pool
+ App->>Redis: Create client
+ App->>App: Initialize JWTAuth
+ App->>App: Initialize Telemetry
+
+ App->>Database: Verify connection
+ App->>Redis: Verify connection
+
+ alt All OK
+ App->>App: Mark ready
+ else Failure
+ App->>App: Record error, exit
+ end
+```
+
+### Graceful Shutdown
+
+On SIGTERM/SIGINT:
+
+1. Stop accepting new connections
+2. Close Redis connections
+3. Close database pool
+4. Shutdown telemetry (flush exporters)
+5. Exit
+
+---
+
+## Usage Examples
+
+### Initialize Logging
+
+```python
+from core import setup_logging, get_logger, TraceContext
+
+setup_logging(level="INFO", service_name="my-service")
+logger = get_logger(__name__)
+
+with TraceContext("request-123"):
+ logger.info("Processing request") # trace_id auto-included
+```
+
+### JWT Authentication
+
+```python
+from core import JWTAuth, AuthenticationError
+
+jwt_auth = JWTAuth(
+ public_key=public_key_pem,
+ private_key=private_key_pem,
+ redis_client=redis,
+ postgres_client=postgres,
+)
+
+# Validate token
+try:
+ claims = await jwt_auth.validate_token(token)
+ user_ctx = await jwt_auth.extract_user_context(token)
+except AuthenticationError as e:
+ # Handle invalid token
+ pass
+```
+
+### Configuration Loading
+
+```python
+from config import Settings
+from core import ConfigLoader, ValidationError
+
+bootstrap = Settings() # Load from .env
+loader = ConfigLoader(bootstrap)
+
+try:
+ config = await loader.load()
+ settings = Settings(**{**bootstrap.model_dump(), **config})
+except ValidationError as e:
+ # Handle missing Azure config in production
+ pass
+```
+
+### Recording Metrics
+
+```python
+from core import TelemetryManager, Metrics
+
+telemetry = TelemetryManager(
+ service_name="gateway",
+ otlp_endpoint="http://jaeger:4317",
+)
+
+# Record latency
+telemetry.record_metric(
+ Metrics.REQUEST_DURATION,
+ 0.150, # 150ms
+ tags={"endpoint": "/health"},
+ metric_type="histogram",
+)
+```
+
+---
+
+## Migration Notes
+
+### From Plan to Implementation
+
+The following changes were made from the original component plan:
+
+1. **Models split into submodules**: `models.py` -> `models/` directory
+2. **Database/Redis as protocols**: Real implementations in `memory/` module
+3. **Added `app_state.py`**: Centralized state container
+4. **Added `config_loader.py`**: Azure integration
+5. **Added `keyvault.py`**: Secret management
+
+### Future Considerations
+
+- Real database pool implementation in `memory/` module
+- Real Redis client implementation in `memory/` module
+- RBAC support in `JWTAuth` (currently status-based only)
+- Metric aggregation and alerting rules
+
+---
+
+## Changelog
+
+### v1.0 (Current)
+
+- Initial production release
+- JWT authentication with RS256
+- OpenTelemetry integration
+- Azure Key Vault + App Config
+- Structured JSON logging
+- Full exception hierarchy
+- Pydantic models for all domains
diff --git a/main.py b/main.py
index 449160f..8f95a3d 100644
--- a/main.py
+++ b/main.py
@@ -2,35 +2,154 @@
NeroSpatial Backend - FastAPI Application
Main entry point for the NeroSpatial backend API.
+Production-ready startup with Azure configuration, connection management,
+and graceful shutdown.
"""
-from fastapi import FastAPI
+from contextlib import asynccontextmanager
+
+from fastapi import FastAPI, Request
from fastapi.responses import JSONResponse
-from config import settings
+from api.health import router as health_router
+from config import Settings
+from core import (
+ JWTAuth,
+ KeyVaultClient,
+ TelemetryManager,
+ ValidationError,
+ create_database_pool,
+ create_redis_client,
+ get_logger,
+ setup_logging,
+ verify_database_connection,
+ verify_redis_connection,
+)
+from core.app_state import AppState
+from core.config_loader import ConfigLoader
+
+logger = get_logger(__name__)
+
+
+@asynccontextmanager
+async def lifespan(app: FastAPI):
+ """Production-ready application lifespan."""
+ state = None
+ try:
+ # === PHASE 1: Load Configuration ===
+ logger.info("Phase 1: Loading configuration...")
+ bootstrap = Settings()
+ loader = ConfigLoader(bootstrap)
+ config_dict = await loader.load()
+ settings = Settings(**{**bootstrap.model_dump(), **config_dict})
+
+ # === PHASE 2: Setup Logging & Telemetry ===
+ logger.info("Phase 2: Initializing logging and telemetry...")
+ setup_logging(level=settings.log_level, service_name=settings.app_name)
+ telemetry = TelemetryManager(
+ service_name=settings.app_name,
+ otlp_endpoint=settings.otel_endpoint,
+ environment=settings.environment,
+ enable_tracing=settings.otel_enable_tracing,
+ enable_metrics=settings.otel_enable_metrics,
+ )
+
+ # === PHASE 3: Initialize Key Vault ===
+ logger.info("Phase 3: Connecting to Key Vault...")
+ key_vault = KeyVaultClient(
+ vault_url=settings.azure_key_vault_url,
+ tenant_id=settings.azure_tenant_id,
+ client_id=settings.azure_client_id,
+ client_secret=settings.azure_client_secret,
+ )
+
+ # Load secrets if not already loaded from App Config
+ if not settings.postgres_password:
+ logger.info("Loading secrets from Key Vault...")
+ postgres_password = await key_vault.get_secret("postgres-password")
+ redis_password = await key_vault.get_secret("redis-password")
+ jwt_private_key = await key_vault.get_secret("jwt-private-key")
+ jwt_public_key = await key_vault.get_secret("jwt-public-key")
+
+ settings = settings.model_copy(
+ update={
+ "postgres_password": postgres_password,
+ "redis_password": redis_password,
+ "jwt_private_key": jwt_private_key,
+ "jwt_public_key": jwt_public_key,
+ }
+ )
+
+ # === PHASE 4: Initialize Connections ===
+ logger.info("Phase 4: Creating database and Redis connections...")
+ db_pool = await create_database_pool(settings)
+ redis_client = await create_redis_client(settings)
+
+ # === PHASE 5: Initialize Auth ===
+ logger.info("Phase 5: Initializing authentication...")
+ jwt_auth = JWTAuth(
+ private_key=settings.jwt_private_key,
+ public_key=settings.jwt_public_key,
+ algorithm=settings.jwt_algorithm,
+ access_token_ttl=settings.jwt_access_token_ttl,
+ refresh_token_ttl=settings.jwt_refresh_token_ttl,
+ cache_ttl_seconds=settings.jwt_cache_ttl,
+ redis_client=redis_client,
+ postgres_client=db_pool,
+ )
+
+ # === PHASE 6: Verify Connections ===
+ logger.info("Phase 6: Verifying connections...")
+ if not await verify_database_connection(db_pool):
+ raise ValidationError("Database connection verification failed")
+ if not await verify_redis_connection(redis_client):
+ raise ValidationError("Redis connection verification failed")
+
+ # === PHASE 7: Create App State ===
+ logger.info("Phase 7: Creating application state...")
+ state = AppState(
+ settings=settings,
+ db_pool=db_pool,
+ redis_client=redis_client,
+ jwt_auth=jwt_auth,
+ telemetry=telemetry,
+ key_vault=key_vault,
+ )
+ state.mark_ready()
+ app.state.app_state = state
+
+ logger.info(
+ f"Startup complete: {settings.app_name} v{settings.app_version} "
+ f"(environment: {settings.environment})"
+ )
+
+ yield
+
+ # === SHUTDOWN ===
+ logger.info("Shutting down...")
+ await state.cleanup()
+ logger.info("Shutdown complete")
+
+ except Exception as e:
+ logger.critical(f"Startup failed: {e}")
+ if state:
+ await state.cleanup()
+ raise
+
app = FastAPI(
- title=settings.app_name,
- version=settings.app_version,
- debug=settings.debug,
+ title="NeroSpatial Backend",
+ version="0.1.0",
+ lifespan=lifespan,
)
+# Register health router
+app.include_router(health_router)
-@app.get("/health")
-async def health_check():
- """
- Health check endpoint.
- Returns:
- JSONResponse: Status of the service
- """
- return JSONResponse(
- content={
- "status": "healthy",
- "service": settings.app_name,
- "version": settings.app_version,
- }
- )
+def get_app_state(request: Request) -> AppState:
+ """Dependency to get application state."""
+ return request.app.state.app_state
@app.get("/helloworld")
@@ -44,7 +163,7 @@ async def hello_world():
return JSONResponse(
content={
"message": "Hello, World!",
- "service": settings.app_name,
+ "service": "NeroSpatial Backend",
}
)
@@ -54,7 +173,7 @@ async def hello_world():
uvicorn.run(
"main:app",
- host=settings.host,
- port=settings.port,
- reload=settings.debug,
+ host="0.0.0.0",
+ port=8000,
+ reload=False,
)
diff --git a/pyproject.toml b/pyproject.toml
index 2987753..dc383f7 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -6,12 +6,23 @@ requires-python = ">=3.11"
dependencies = [
"fastapi>=0.104.0",
"uvicorn[standard]>=0.24.0",
- "pydantic>=2.5.0",
+ "pydantic[email]>=2.5.0",
"pydantic-settings>=2.1.0",
"python-dotenv>=1.0.0",
"azure-core>=1.36.0",
"azure-identity>=1.25.0",
"azure-keyvault-secrets>=4.10.0",
+ "azure-appconfiguration>=1.5.0",
+ # JWT authentication
+ "pyjwt>=2.8.0",
+ "cryptography>=41.0.0",
+ # OpenTelemetry
+ "opentelemetry-api>=1.20.0",
+ "opentelemetry-sdk>=1.20.0",
+ "opentelemetry-exporter-otlp-proto-grpc>=1.20.0",
+ # Database clients (for auth and future memory module)
+ "aioredis>=2.0.0",
+ "asyncpg>=0.29.0",
]
[project.optional-dependencies]
diff --git a/scripts/generate-keys.sh b/scripts/generate-keys.sh
new file mode 100755
index 0000000..a0c7560
--- /dev/null
+++ b/scripts/generate-keys.sh
@@ -0,0 +1,42 @@
+#!/bin/bash
+# =============================================================================
+# Generate JWT RS256 Key Pair
+# =============================================================================
+# Generates RSA 2048-bit private and public keys for JWT signing/verification
+# =============================================================================
+
+set -e
+
+SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
+KEYS_DIR="$PROJECT_ROOT/keys"
+
+echo "=== Generating JWT RS256 Key Pair ==="
+
+# Create keys directory if it doesn't exist
+mkdir -p "$KEYS_DIR"
+
+# Generate private key
+echo "Generating private key..."
+openssl genrsa -out "$KEYS_DIR/private.pem" 2048
+
+# Generate public key from private key
+echo "Generating public key..."
+openssl rsa -in "$KEYS_DIR/private.pem" -pubout -out "$KEYS_DIR/public.pem"
+
+# Set appropriate permissions
+chmod 600 "$KEYS_DIR/private.pem"
+chmod 644 "$KEYS_DIR/public.pem"
+
+echo ""
+echo "=== Keys Generated Successfully ==="
+echo "Private key: $KEYS_DIR/private.pem"
+echo "Public key: $KEYS_DIR/public.pem"
+echo ""
+echo "Next steps:"
+echo "1. Store these keys in Azure Key Vault:"
+echo " az keyvault secret set --vault-name --name jwt-private-key --file $KEYS_DIR/private.pem"
+echo " az keyvault secret set --vault-name --name jwt-public-key --file $KEYS_DIR/public.pem"
+echo ""
+echo "2. For local development, you can use these keys directly in .env"
+echo " (but prefer Key Vault for production)"
diff --git a/scripts/init-db.sql b/scripts/init-db.sql
new file mode 100644
index 0000000..7e03707
--- /dev/null
+++ b/scripts/init-db.sql
@@ -0,0 +1,154 @@
+-- =============================================================================
+-- NeroSpatial Backend - Database Initialization Script
+-- =============================================================================
+-- This script runs automatically when PostgreSQL container starts for the first time
+-- Located at: /docker-entrypoint-initdb.d/init-db.sql
+-- =============================================================================
+
+-- =============================================================================
+-- Enum Types
+-- =============================================================================
+
+CREATE TYPE user_status AS ENUM (
+ 'active',
+ 'pending_verification',
+ 'suspended',
+ 'blacklisted',
+ 'locked'
+);
+
+CREATE TYPE oauth_provider AS ENUM (
+ 'google',
+ 'github',
+ 'microsoft'
+);
+
+CREATE TYPE token_revocation_reason AS ENUM (
+ 'logout',
+ 'refresh',
+ 'security',
+ 'admin',
+ 'expired'
+);
+
+CREATE TYPE audit_action AS ENUM (
+ 'login',
+ 'logout',
+ 'token_refresh',
+ 'password_change',
+ 'profile_update',
+ 'account_delete',
+ 'status_change',
+ 'rate_limit_exceeded'
+);
+
+-- =============================================================================
+-- Tables
+-- =============================================================================
+
+-- Users table
+CREATE TABLE IF NOT EXISTS users (
+ user_id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
+ email VARCHAR(255) UNIQUE NOT NULL,
+ name VARCHAR(255),
+ oauth_provider oauth_provider NOT NULL,
+ oauth_sub VARCHAR(255),
+ status user_status NOT NULL DEFAULT 'active',
+ created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
+ updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
+ last_login TIMESTAMPTZ,
+ deleted_at TIMESTAMPTZ,
+ picture_url VARCHAR(500),
+ locale VARCHAR(10) DEFAULT 'en',
+ metadata JSONB DEFAULT '{}',
+ schema_version VARCHAR(10) NOT NULL DEFAULT '1.0',
+
+ CONSTRAINT email_format CHECK (email ~* '^[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,}$')
+);
+
+-- Refresh tokens table
+CREATE TABLE IF NOT EXISTS refresh_tokens (
+ token_id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
+ user_id UUID NOT NULL REFERENCES users(user_id) ON DELETE CASCADE,
+ token_hash VARCHAR(64) NOT NULL,
+ expires_at TIMESTAMPTZ NOT NULL,
+ created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
+ rotated_at TIMESTAMPTZ,
+ previous_token_id UUID REFERENCES refresh_tokens(token_id),
+ ip_address INET,
+ user_agent VARCHAR(500)
+);
+
+-- Token blacklist table
+CREATE TABLE IF NOT EXISTS token_blacklist (
+ token_id VARCHAR(255) PRIMARY KEY,
+ user_id UUID NOT NULL REFERENCES users(user_id) ON DELETE CASCADE,
+ revoked_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
+ expires_at TIMESTAMPTZ NOT NULL,
+ reason token_revocation_reason,
+ ip_address INET
+);
+
+-- Audit logs table
+CREATE TABLE IF NOT EXISTS audit_logs (
+ log_id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
+ user_id UUID REFERENCES users(user_id) ON DELETE SET NULL,
+ action audit_action NOT NULL,
+ details JSONB DEFAULT '{}',
+ ip_address INET,
+ user_agent VARCHAR(500),
+ created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
+);
+
+-- =============================================================================
+-- Indexes
+-- =============================================================================
+
+CREATE INDEX IF NOT EXISTS idx_users_email ON users(email);
+CREATE INDEX IF NOT EXISTS idx_users_status ON users(status) WHERE deleted_at IS NULL;
+CREATE INDEX IF NOT EXISTS idx_users_oauth ON users(oauth_provider, oauth_sub);
+CREATE INDEX IF NOT EXISTS idx_refresh_tokens_user ON refresh_tokens(user_id);
+CREATE INDEX IF NOT EXISTS idx_refresh_tokens_hash ON refresh_tokens(token_hash);
+CREATE INDEX IF NOT EXISTS idx_refresh_tokens_expires ON refresh_tokens(expires_at);
+CREATE INDEX IF NOT EXISTS idx_token_blacklist_user ON token_blacklist(user_id);
+CREATE INDEX IF NOT EXISTS idx_token_blacklist_expires ON token_blacklist(expires_at);
+CREATE INDEX IF NOT EXISTS idx_audit_logs_user ON audit_logs(user_id);
+CREATE INDEX IF NOT EXISTS idx_audit_logs_created ON audit_logs(created_at DESC);
+
+-- =============================================================================
+-- Triggers
+-- =============================================================================
+
+-- Auto-update trigger for users.updated_at
+CREATE OR REPLACE FUNCTION update_updated_at()
+RETURNS TRIGGER AS $$
+BEGIN
+ NEW.updated_at = NOW();
+ RETURN NEW;
+END;
+$$ LANGUAGE plpgsql;
+
+DROP TRIGGER IF EXISTS users_updated_at ON users;
+CREATE TRIGGER users_updated_at
+ BEFORE UPDATE ON users
+ FOR EACH ROW EXECUTE FUNCTION update_updated_at();
+
+-- =============================================================================
+-- Cleanup Functions
+-- =============================================================================
+
+-- Cleanup function for expired tokens (run via pg_cron or scheduled job)
+CREATE OR REPLACE FUNCTION cleanup_expired_tokens()
+RETURNS void AS $$
+BEGIN
+ DELETE FROM refresh_tokens WHERE expires_at < NOW();
+ DELETE FROM token_blacklist WHERE expires_at < NOW();
+END;
+$$ LANGUAGE plpgsql;
+
+-- =============================================================================
+-- Permissions
+-- =============================================================================
+
+GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO nerospatial;
+GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO nerospatial;
diff --git a/scripts/setup-keyvault.sh b/scripts/setup-keyvault.sh
new file mode 100755
index 0000000..b29fca8
--- /dev/null
+++ b/scripts/setup-keyvault.sh
@@ -0,0 +1,141 @@
+#!/bin/bash
+# =============================================================================
+# Azure Key Vault Setup Script
+# =============================================================================
+# Automates Azure Key Vault creation and secret upload
+# =============================================================================
+
+set -e
+
+# Colors for output
+RED='\033[0;31m'
+GREEN='\033[0;32m'
+YELLOW='\033[1;33m'
+NC='\033[0m' # No Color
+
+echo "=== Azure Key Vault Setup ==="
+echo ""
+
+# Check if Azure CLI is installed
+if ! command -v az &> /dev/null; then
+ echo -e "${RED}Error: Azure CLI not found. Please install it first.${NC}"
+ echo "Visit: https://docs.microsoft.com/cli/azure/install-azure-cli"
+ exit 1
+fi
+
+# Check if logged in
+if ! az account show &> /dev/null; then
+ echo -e "${YELLOW}Not logged in to Azure. Please run: az login${NC}"
+ exit 1
+fi
+
+# Get input from user
+read -p "Key Vault name (e.g., nerospatial-dev): " VAULT_NAME
+read -p "Resource group name: " RESOURCE_GROUP
+read -p "Location (e.g., eastus): " LOCATION
+
+# Create resource group if it doesn't exist
+echo ""
+echo "Checking resource group..."
+if ! az group show --name "$RESOURCE_GROUP" &> /dev/null; then
+ echo "Creating resource group: $RESOURCE_GROUP"
+ az group create --name "$RESOURCE_GROUP" --location "$LOCATION"
+else
+ echo "Resource group exists: $RESOURCE_GROUP"
+fi
+
+# Create Key Vault if it doesn't exist
+echo ""
+echo "Checking Key Vault..."
+if ! az keyvault show --name "$VAULT_NAME" --resource-group "$RESOURCE_GROUP" &> /dev/null; then
+ echo "Creating Key Vault: $VAULT_NAME"
+ az keyvault create \
+ --name "$VAULT_NAME" \
+ --resource-group "$RESOURCE_GROUP" \
+ --location "$LOCATION" \
+ --sku Standard \
+ --enable-soft-delete true \
+ --enable-purge-protection false
+ echo -e "${GREEN}Key Vault created successfully${NC}"
+else
+ echo "Key Vault exists: $VAULT_NAME"
+fi
+
+# Create Service Principal
+echo ""
+echo "Creating Service Principal for Key Vault access..."
+SP_NAME="nerospatial-backend-${VAULT_NAME}"
+SP_OUTPUT=$(az ad sp create-for-rbac \
+ --name "$SP_NAME" \
+ --role "Key Vault Secrets User" \
+ --scopes "/subscriptions/$(az account show --query id -o tsv)/resourceGroups/$RESOURCE_GROUP/providers/Microsoft.KeyVault/vaults/$VAULT_NAME" \
+ --output json)
+
+TENANT_ID=$(echo "$SP_OUTPUT" | jq -r '.tenant')
+CLIENT_ID=$(echo "$SP_OUTPUT" | jq -r '.appId')
+CLIENT_SECRET=$(echo "$SP_OUTPUT" | jq -r '.password')
+
+echo -e "${GREEN}Service Principal created${NC}"
+
+# Upload secrets if keys directory exists
+SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
+KEYS_DIR="$PROJECT_ROOT/keys"
+
+if [ -d "$KEYS_DIR" ]; then
+ echo ""
+ echo "Uploading JWT keys from $KEYS_DIR..."
+
+ if [ -f "$KEYS_DIR/private.pem" ]; then
+ az keyvault secret set \
+ --vault-name "$VAULT_NAME" \
+ --name "jwt-private-key" \
+ --file "$KEYS_DIR/private.pem"
+ echo -e "${GREEN}Uploaded jwt-private-key${NC}"
+ fi
+
+ if [ -f "$KEYS_DIR/public.pem" ]; then
+ az keyvault secret set \
+ --vault-name "$VAULT_NAME" \
+ --name "jwt-public-key" \
+ --file "$KEYS_DIR/public.pem"
+ echo -e "${GREEN}Uploaded jwt-public-key${NC}"
+ fi
+fi
+
+# Prompt for other secrets
+echo ""
+echo "Would you like to set database and Redis passwords? (y/n)"
+read -p "> " SET_PASSWORDS
+
+if [ "$SET_PASSWORDS" = "y" ]; then
+ read -sp "PostgreSQL password: " POSTGRES_PASSWORD
+ echo ""
+ az keyvault secret set \
+ --vault-name "$VAULT_NAME" \
+ --name "postgres-password" \
+ --value "$POSTGRES_PASSWORD"
+ echo -e "${GREEN}Uploaded postgres-password${NC}"
+
+ read -sp "Redis password: " REDIS_PASSWORD
+ echo ""
+ az keyvault secret set \
+ --vault-name "$VAULT_NAME" \
+ --name "redis-password" \
+ --value "$REDIS_PASSWORD"
+ echo -e "${GREEN}Uploaded redis-password${NC}"
+fi
+
+# Output credentials for .env file
+echo ""
+echo -e "${GREEN}=== Setup Complete ===${NC}"
+echo ""
+echo "Add these to your .env file:"
+echo "=========================================="
+echo "AZURE_KEY_VAULT_URL=https://${VAULT_NAME}.vault.azure.net/"
+echo "AZURE_TENANT_ID=${TENANT_ID}"
+echo "AZURE_CLIENT_ID=${CLIENT_ID}"
+echo "AZURE_CLIENT_SECRET=${CLIENT_SECRET}"
+echo "=========================================="
+echo ""
+echo -e "${YELLOW}IMPORTANT: Save the CLIENT_SECRET securely - it won't be shown again!${NC}"
diff --git a/tests/__init__.py b/tests/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/conftest.py b/tests/conftest.py
index baf2191..8f89517 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -1,14 +1,69 @@
"""Pytest configuration and fixtures."""
+from unittest.mock import AsyncMock, MagicMock
+
import pytest
from httpx import ASGITransport, AsyncClient
+from config import Settings
+from core.app_state import AppState
+from core.keyvault import KeyVaultClient
+from core.telemetry import TelemetryManager
from main import app
@pytest.fixture
-async def client():
- """Create an async test client."""
+def mock_app_state():
+ """Create a mock AppState for testing."""
+ # Create mock database pool
+ mock_db_pool = AsyncMock()
+ mock_db_pool.execute = AsyncMock(return_value=None)
+
+ # Create mock Redis client
+ mock_redis = AsyncMock()
+ mock_redis.ping = AsyncMock(return_value=True)
+
+ # Create mock Key Vault
+ mock_key_vault = MagicMock(spec=KeyVaultClient)
+ mock_key_vault.is_available = MagicMock(return_value=True)
+
+ # Create minimal settings
+ settings = Settings(
+ app_name="NeroSpatial Backend",
+ app_version="0.1.0",
+ environment="development",
+ )
+
+ # Create mock TelemetryManager to avoid OpenTelemetry global state
+ # This prevents any connection attempts to OTLP endpoint
+ mock_telemetry = MagicMock(spec=TelemetryManager)
+ mock_telemetry.service_name = settings.app_name
+ mock_telemetry.environment = settings.environment
+ mock_telemetry.get_tracer = MagicMock()
+ mock_telemetry.get_meter = MagicMock()
+ mock_telemetry.create_span = MagicMock()
+ mock_telemetry.record_metric = MagicMock()
+ mock_telemetry.shutdown = MagicMock()
+
+ # Create AppState
+ state = AppState(
+ settings=settings,
+ db_pool=mock_db_pool,
+ redis_client=mock_redis,
+ key_vault=mock_key_vault,
+ telemetry=mock_telemetry, # Use mock instead of real TelemetryManager
+ )
+ state.mark_ready()
+
+ return state
+
+
+@pytest.fixture
+async def client(mock_app_state):
+ """Create an async test client with mocked app_state."""
+ # Set app_state before creating client
+ app.state.app_state = mock_app_state
+
async with AsyncClient(
transport=ASGITransport(app=app), base_url="http://test"
) as ac:
diff --git a/tests/core/__init__.py b/tests/core/__init__.py
new file mode 100644
index 0000000..37c601d
--- /dev/null
+++ b/tests/core/__init__.py
@@ -0,0 +1 @@
+"""Tests for core module."""
diff --git a/tests/core/test_app_state.py b/tests/core/test_app_state.py
new file mode 100644
index 0000000..a66ed94
--- /dev/null
+++ b/tests/core/test_app_state.py
@@ -0,0 +1,43 @@
+"""Tests for application state management."""
+
+from datetime import UTC, datetime
+
+import pytest
+
+from config import Settings
+from core.app_state import AppState
+
+
+class TestAppState:
+ """Test AppState lifecycle."""
+
+ def test_initial_state_not_ready(self):
+ """App should not be ready initially."""
+ state = AppState(settings=Settings())
+ assert not state.is_ready
+
+ def test_mark_ready(self):
+ """Can mark app as ready."""
+ state = AppState(settings=Settings())
+ state.mark_ready()
+ assert state.is_ready
+
+ def test_startup_errors_tracked(self):
+ """Startup errors should be tracked."""
+ state = AppState(settings=Settings())
+ state.add_startup_error("Database connection failed")
+ assert "Database connection failed" in state.startup_errors
+ assert len(state.startup_errors) == 1
+
+ def test_started_at_initialized(self):
+ """Started at should be initialized."""
+ state = AppState(settings=Settings())
+ assert isinstance(state.started_at, datetime)
+ assert state.started_at.tzinfo == UTC
+
+ @pytest.mark.asyncio
+ async def test_cleanup_with_none_resources(self):
+ """Cleanup should handle None resources gracefully."""
+ state = AppState(settings=Settings())
+ # All resources are None by default
+ await state.cleanup() # Should not raise
diff --git a/tests/core/test_auth.py b/tests/core/test_auth.py
new file mode 100644
index 0000000..388f5db
--- /dev/null
+++ b/tests/core/test_auth.py
@@ -0,0 +1,481 @@
+"""Unit tests for core auth module."""
+
+import hashlib
+from datetime import UTC, datetime, timedelta
+from uuid import uuid4
+
+import jwt
+import pytest
+from cryptography.hazmat.primitives import serialization
+from cryptography.hazmat.primitives.asymmetric import rsa
+
+from core.auth import JWTAuth
+from core.exceptions import AuthenticationError, AuthorizationError
+from core.models import (
+ OAuthProvider,
+ RefreshToken,
+ TokenRevocationReason,
+ User,
+ UserStatus,
+)
+
+
+# Generate test RSA keys (module-level for reuse)
+def _generate_test_keys():
+ """Generate RSA key pair for testing."""
+ private_key = rsa.generate_private_key(public_exponent=65537, key_size=2048)
+ public_key = private_key.public_key()
+
+ private_pem = private_key.private_bytes(
+ encoding=serialization.Encoding.PEM,
+ format=serialization.PrivateFormat.PKCS8,
+ encryption_algorithm=serialization.NoEncryption(),
+ ).decode()
+
+ public_pem = public_key.public_bytes(
+ encoding=serialization.Encoding.PEM,
+ format=serialization.PublicFormat.SubjectPublicKeyInfo,
+ ).decode()
+
+ return private_pem, public_pem
+
+
+PRIVATE_KEY, PUBLIC_KEY = _generate_test_keys()
+
+
+class MockRedisClient:
+ """Mock Redis client for testing."""
+
+ def __init__(self):
+ self.data: dict[str, str] = {}
+ self.ttls: dict[str, int] = {}
+
+ async def get(self, key: str) -> str | None:
+ """Get value from mock Redis."""
+ return self.data.get(key)
+
+ async def setex(self, key: str, ttl: int, value: str) -> None:
+ """Set value with TTL."""
+ self.data[key] = value
+ self.ttls[key] = ttl
+
+ async def delete(self, key: str) -> None:
+ """Delete key."""
+ self.data.pop(key, None)
+ self.ttls.pop(key, None)
+
+ async def exists(self, key: str) -> bool:
+ """Check if key exists."""
+ return key in self.data
+
+
+class MockPostgresClient:
+ """Mock Postgres client for testing."""
+
+ def __init__(self):
+ self.users: dict[str, User] = {}
+ self.refresh_tokens: dict[str, RefreshToken] = {}
+ self.blacklist_entries: list = []
+
+ async def get_user(self, user_id: uuid4) -> User | None:
+ """Get user by ID."""
+ return self.users.get(str(user_id))
+
+ async def get_user_by_email(self, email: str) -> User | None:
+ """Get user by email."""
+ for user in self.users.values():
+ if user.email == email:
+ return user
+ return None
+
+ async def create_refresh_token(self, token: RefreshToken) -> None:
+ """Create refresh token."""
+ self.refresh_tokens[token.token_hash] = token
+
+ async def get_refresh_token(self, token_hash: str) -> RefreshToken | None:
+ """Get refresh token by hash."""
+ return self.refresh_tokens.get(token_hash)
+
+ async def rotate_refresh_token(
+ self, old_token_id: uuid4, new_token: RefreshToken
+ ) -> None:
+ """Rotate refresh token."""
+ # Mark old token as rotated
+ for hash_key, token in list(self.refresh_tokens.items()):
+ if token.token_id == old_token_id:
+ # Create new token with rotated_at set
+ rotated_token = RefreshToken(
+ **{**token.model_dump(), "rotated_at": datetime.now(UTC)}
+ )
+ # Update in dict
+ self.refresh_tokens[hash_key] = rotated_token
+ # Add new token
+ self.refresh_tokens[new_token.token_hash] = new_token
+
+ async def delete_user_refresh_tokens(self, user_id: uuid4) -> None:
+ """Delete all refresh tokens for user."""
+ to_delete = [
+ hash
+ for hash, token in self.refresh_tokens.items()
+ if token.user_id == user_id
+ ]
+ for hash in to_delete:
+ del self.refresh_tokens[hash]
+
+ async def create_token_blacklist_entry(self, entry) -> None:
+ """Create blacklist entry."""
+ self.blacklist_entries.append(entry)
+
+
+@pytest.fixture
+def mock_redis():
+ """Create mock Redis client."""
+ return MockRedisClient()
+
+
+@pytest.fixture
+def mock_postgres():
+ """Create mock Postgres client."""
+ return MockPostgresClient()
+
+
+@pytest.fixture
+def auth_with_clients(mock_redis, mock_postgres):
+ """Create JWTAuth with mock clients."""
+ return JWTAuth(
+ private_key=PRIVATE_KEY,
+ public_key=PUBLIC_KEY,
+ redis_client=mock_redis,
+ postgres_client=mock_postgres,
+ )
+
+
+@pytest.fixture
+def auth_no_clients():
+ """Create JWTAuth without clients."""
+ return JWTAuth(public_key=PUBLIC_KEY)
+
+
+@pytest.fixture
+def test_user():
+ """Create test user."""
+ now = datetime.now(UTC)
+ return User(
+ user_id=uuid4(),
+ email="test@example.com",
+ name="Test User",
+ oauth_provider=OAuthProvider.GOOGLE,
+ status=UserStatus.ACTIVE,
+ created_at=now,
+ updated_at=now,
+ )
+
+
+@pytest.mark.asyncio
+async def test_validate_token_valid(auth_no_clients, test_user):
+ """Test validating a valid token."""
+ # Generate token
+ now = datetime.now(UTC)
+ claims = {
+ "sub": str(test_user.user_id),
+ "user_id": str(test_user.user_id),
+ "email": test_user.email,
+ "iat": int(now.timestamp()),
+ "exp": int((now + timedelta(seconds=900)).timestamp()),
+ "jti": str(uuid4()),
+ }
+ token = jwt.encode(claims, PRIVATE_KEY, algorithm="RS256")
+
+ # Validate
+ decoded = await auth_no_clients.validate_token(token)
+ assert decoded["sub"] == str(test_user.user_id)
+ assert decoded["email"] == test_user.email
+
+
+@pytest.mark.asyncio
+async def test_validate_token_expired(auth_no_clients, test_user):
+ """Test validating an expired token."""
+ # Generate expired token
+ now = datetime.now(UTC)
+ claims = {
+ "sub": str(test_user.user_id),
+ "iat": int((now - timedelta(hours=1)).timestamp()),
+ "exp": int((now - timedelta(minutes=1)).timestamp()),
+ "jti": str(uuid4()),
+ }
+ token = jwt.encode(claims, PRIVATE_KEY, algorithm="RS256")
+
+ # Should raise AuthenticationError
+ with pytest.raises(AuthenticationError, match="expired"):
+ await auth_no_clients.validate_token(token)
+
+
+@pytest.mark.asyncio
+async def test_validate_token_invalid_signature(auth_no_clients):
+ """Test validating token with invalid signature."""
+ # Generate token with different key
+ other_private, _ = _generate_test_keys()
+ claims = {"sub": str(uuid4()), "iat": int(datetime.now(UTC).timestamp())}
+ token = jwt.encode(claims, other_private, algorithm="RS256")
+
+ # Should raise AuthenticationError
+ with pytest.raises(AuthenticationError, match="Invalid token"):
+ await auth_no_clients.validate_token(token)
+
+
+@pytest.mark.asyncio
+async def test_validate_token_blacklisted(auth_with_clients, test_user):
+ """Test validating a blacklisted token."""
+ # Generate token
+ now = datetime.now(UTC)
+ jti = str(uuid4())
+ claims = {
+ "sub": str(test_user.user_id),
+ "iat": int(now.timestamp()),
+ "exp": int((now + timedelta(seconds=900)).timestamp()),
+ "jti": jti,
+ }
+ token = jwt.encode(claims, PRIVATE_KEY, algorithm="RS256")
+
+ # Blacklist token
+ expires_at = now + timedelta(seconds=900)
+ await auth_with_clients.blacklist_token(
+ jti, test_user.user_id, TokenRevocationReason.LOGOUT, expires_at
+ )
+
+ # Should raise AuthenticationError
+ with pytest.raises(AuthenticationError, match="blacklisted"):
+ await auth_with_clients.validate_token(token)
+
+
+@pytest.mark.asyncio
+async def test_extract_user_context(auth_with_clients, test_user):
+ """Test extracting user context from token."""
+ # Generate token
+ now = datetime.now(UTC)
+ claims = {
+ "sub": str(test_user.user_id),
+ "user_id": str(test_user.user_id),
+ "email": test_user.email,
+ "name": test_user.name,
+ "oauth_provider": test_user.oauth_provider.value,
+ "status": test_user.status.value,
+ "jti": str(uuid4()),
+ "iat": int(now.timestamp()),
+ "exp": int((now + timedelta(seconds=900)).timestamp()),
+ }
+ token = jwt.encode(claims, PRIVATE_KEY, algorithm="RS256")
+
+ # Extract context
+ context = await auth_with_clients.extract_user_context(token)
+
+ assert context.user_id == test_user.user_id
+ assert context.email == test_user.email
+ assert context.name == test_user.name
+ assert context.oauth_provider == test_user.oauth_provider
+ assert context.status == test_user.status
+
+
+@pytest.mark.asyncio
+async def test_extract_user_context_cached(auth_with_clients, test_user):
+ """Test that user context is cached."""
+ # Generate token
+ now = datetime.now(UTC)
+ claims = {
+ "sub": str(test_user.user_id),
+ "user_id": str(test_user.user_id),
+ "email": test_user.email,
+ "oauth_provider": test_user.oauth_provider.value,
+ "status": test_user.status.value,
+ "jti": str(uuid4()),
+ "iat": int(now.timestamp()),
+ "exp": int((now + timedelta(seconds=900)).timestamp()),
+ }
+ token = jwt.encode(claims, PRIVATE_KEY, algorithm="RS256")
+
+ # First call - should cache
+ context1 = await auth_with_clients.extract_user_context(token)
+ assert context1.user_id == test_user.user_id
+
+ # Second call - should use cache
+ context2 = await auth_with_clients.extract_user_context(token)
+ assert context2.user_id == test_user.user_id
+
+ # Verify cache was used
+ cache_key = f"user:context:{test_user.user_id}"
+ cached = await auth_with_clients.redis_client.get(cache_key)
+ assert cached is not None
+
+
+@pytest.mark.asyncio
+async def test_extract_user_context_inactive_user(auth_with_clients, test_user):
+ """Test extracting context for inactive user."""
+ # Generate token with suspended status
+ now = datetime.now(UTC)
+ claims = {
+ "sub": str(test_user.user_id),
+ "user_id": str(test_user.user_id),
+ "email": test_user.email,
+ "oauth_provider": test_user.oauth_provider.value,
+ "status": UserStatus.SUSPENDED.value,
+ "jti": str(uuid4()),
+ "iat": int(now.timestamp()),
+ "exp": int((now + timedelta(seconds=900)).timestamp()),
+ }
+ token = jwt.encode(claims, PRIVATE_KEY, algorithm="RS256")
+
+ # Should raise AuthorizationError
+ with pytest.raises(AuthorizationError, match="not ACTIVE"):
+ await auth_with_clients.extract_user_context(token)
+
+
+@pytest.mark.asyncio
+async def test_generate_tokens(auth_with_clients, test_user):
+ """Test token generation."""
+ access_token, refresh_token = await auth_with_clients.generate_tokens(test_user)
+
+ # Verify access token
+ decoded = jwt.decode(access_token, PUBLIC_KEY, algorithms=["RS256"])
+ assert decoded["sub"] == str(test_user.user_id)
+ assert decoded["email"] == test_user.email
+ assert decoded["type"] == "access"
+
+ # Verify refresh token was stored
+ token_hash = hashlib.sha256(refresh_token.encode()).hexdigest()
+ stored = await auth_with_clients.postgres_client.get_refresh_token(token_hash)
+ assert stored is not None
+ assert stored.user_id == test_user.user_id
+
+
+@pytest.mark.asyncio
+async def test_generate_tokens_no_private_key(auth_no_clients, test_user):
+ """Test token generation without private key."""
+ with pytest.raises(ValueError, match="private_key required"):
+ await auth_no_clients.generate_tokens(test_user)
+
+
+@pytest.mark.asyncio
+async def test_refresh_tokens(auth_with_clients, test_user, mock_postgres):
+ """Test token refresh with rotation."""
+ # Store user in mock postgres (needed for refresh)
+ mock_postgres.users[str(test_user.user_id)] = test_user
+
+ # Generate initial tokens
+ access_token, refresh_token = await auth_with_clients.generate_tokens(test_user)
+
+ # Refresh tokens
+ new_access_token, new_refresh_token = await auth_with_clients.refresh_tokens(
+ refresh_token
+ )
+
+ # Verify new tokens are different
+ assert new_access_token != access_token
+ assert new_refresh_token != refresh_token
+
+ # Verify old refresh token is marked as rotated
+ old_hash = hashlib.sha256(refresh_token.encode()).hexdigest()
+ old_token = await auth_with_clients.postgres_client.get_refresh_token(old_hash)
+ assert old_token is not None
+ assert old_token.is_rotated()
+
+
+@pytest.mark.asyncio
+async def test_refresh_tokens_invalid(auth_with_clients):
+ """Test refreshing with invalid token."""
+ with pytest.raises(AuthenticationError, match="not found"):
+ await auth_with_clients.refresh_tokens("invalid_token")
+
+
+@pytest.mark.asyncio
+async def test_blacklist_token(auth_with_clients, test_user):
+ """Test token blacklisting."""
+ jti = str(uuid4())
+ expires_at = datetime.now(UTC) + timedelta(seconds=900)
+
+ await auth_with_clients.blacklist_token(
+ jti, test_user.user_id, TokenRevocationReason.LOGOUT, expires_at
+ )
+
+ # Verify in Redis
+ is_blacklisted = await auth_with_clients.is_blacklisted(jti)
+ assert is_blacklisted is True
+
+
+@pytest.mark.asyncio
+async def test_is_blacklisted_false(auth_with_clients):
+ """Test checking non-blacklisted token."""
+ jti = str(uuid4())
+ is_blacklisted = await auth_with_clients.is_blacklisted(jti)
+ assert is_blacklisted is False
+
+
+@pytest.mark.asyncio
+async def test_logout(auth_with_clients, test_user):
+ """Test logout flow."""
+ # Generate tokens
+ access_token, refresh_token = await auth_with_clients.generate_tokens(test_user)
+
+ # Logout
+ await auth_with_clients.logout(access_token)
+
+ # Verify token is blacklisted
+ decoded = jwt.decode(
+ access_token, PUBLIC_KEY, algorithms=["RS256"], options={"verify_exp": False}
+ )
+ jti = decoded.get("jti")
+ if jti:
+ is_blacklisted = await auth_with_clients.is_blacklisted(jti)
+ assert is_blacklisted is True
+
+ # Verify refresh tokens deleted
+ token_hash = hashlib.sha256(refresh_token.encode()).hexdigest()
+ stored = await auth_with_clients.postgres_client.get_refresh_token(token_hash)
+ assert stored is None
+
+ # Verify cache invalidated
+ cache_key = f"user:context:{test_user.user_id}"
+ cached = await auth_with_clients.redis_client.get(cache_key)
+ assert cached is None
+
+
+@pytest.mark.asyncio
+async def test_generate_trace_id(auth_no_clients):
+ """Test trace ID generation."""
+ trace_id = auth_no_clients.generate_trace_id()
+ assert isinstance(trace_id, str)
+ assert len(trace_id) > 0
+ # Should be UUID format
+ uuid4() # This will validate the format if it's a valid UUID string
+ # Just check it's a string that can be converted
+ try:
+ uuid4() # If trace_id is valid UUID string, this works
+ except Exception:
+ pass # Not a strict UUID check, just format validation
+
+
+def test_auth_init_with_public_key():
+ """Test JWTAuth initialization with public key."""
+ auth = JWTAuth(public_key=PUBLIC_KEY)
+ assert auth.public_key == PUBLIC_KEY
+ assert auth.jwks_client is None
+
+
+def test_auth_init_with_public_key_url():
+ """Test JWTAuth initialization with public key URL."""
+ # This would normally connect to a real JWKS endpoint
+ # For testing, we'll just verify it doesn't crash
+ try:
+ auth = JWTAuth(public_key_url="https://example.com/.well-known/jwks.json")
+ assert auth.public_key is None
+ assert auth.jwks_client is not None
+ except Exception:
+ # JWKS client might fail to connect, that's okay for this test
+ pass
+
+
+def test_auth_init_no_keys():
+ """Test JWTAuth initialization without keys raises error."""
+ with pytest.raises(
+ ValueError, match="Either public_key_url or public_key required"
+ ):
+ JWTAuth()
diff --git a/tests/core/test_config_loader.py b/tests/core/test_config_loader.py
new file mode 100644
index 0000000..1503eac
--- /dev/null
+++ b/tests/core/test_config_loader.py
@@ -0,0 +1,51 @@
+"""Tests for configuration loading."""
+
+import pytest
+
+from config import Settings
+from core.config_loader import ConfigLoader
+from core.exceptions import ValidationError
+
+
+class TestEnvironmentValidation:
+ """Test environment-based validation."""
+
+ def test_production_requires_azure_config(self):
+ """Production environment must have Azure App Config."""
+ settings = Settings(environment="production")
+ loader = ConfigLoader(settings)
+
+ with pytest.raises(ValidationError) as exc:
+ loader._validate_requirements()
+ assert "AZURE_APP_CONFIG_URL" in str(exc.value)
+
+ def test_development_allows_fallback(self):
+ """Development can work without Azure."""
+ settings = Settings(environment="development")
+ loader = ConfigLoader(settings)
+ loader._validate_requirements() # Should not raise
+
+ def test_staging_requires_azure_config(self):
+ """Staging environment must have Azure App Config."""
+ settings = Settings(environment="staging")
+ loader = ConfigLoader(settings)
+
+ with pytest.raises(ValidationError):
+ loader._validate_requirements()
+
+ def test_production_requires_credentials(self):
+ """Production requires Azure credentials."""
+ # Use model_construct to bypass Pydantic validation and env loading
+ settings = Settings.model_construct(
+ environment="production",
+ azure_app_config_url="https://test.azconfig.io",
+ azure_key_vault_url="https://test.vault.azure.net/",
+ azure_tenant_id=None,
+ azure_client_id=None,
+ azure_client_secret=None,
+ )
+ loader = ConfigLoader(settings)
+
+ with pytest.raises(ValidationError) as exc:
+ loader._validate_requirements()
+ assert "AZURE_TENANT_ID" in str(exc.value) or "credentials" in str(exc.value)
diff --git a/tests/core/test_exceptions.py b/tests/core/test_exceptions.py
new file mode 100644
index 0000000..3b136b3
--- /dev/null
+++ b/tests/core/test_exceptions.py
@@ -0,0 +1,209 @@
+"""Unit tests for core exceptions."""
+
+from uuid import uuid4
+
+from core.exceptions import (
+ AuthenticationError,
+ AuthorizationError,
+ CircuitBreakerOpenError,
+ DatabaseError,
+ LLMProviderError,
+ NeroSpatialException,
+ RateLimitExceeded,
+ SessionExpiredError,
+ SessionNotFoundError,
+ ValidationError,
+ VLMTimeoutError,
+)
+
+
+def test_base_exception_creation():
+ """Test NeroSpatialException base class."""
+ exc = NeroSpatialException("Test error")
+ assert exc.message == "Test error"
+ assert exc.trace_id is None
+ assert exc.user_id is None
+ assert exc.context == {}
+
+
+def test_base_exception_with_context():
+ """Test NeroSpatialException with trace_id and user_id."""
+ trace_id = "trace-123"
+ user_id = uuid4()
+ exc = NeroSpatialException(
+ "Test error", trace_id=trace_id, user_id=user_id, extra="value"
+ )
+ assert exc.message == "Test error"
+ assert exc.trace_id == trace_id
+ assert exc.user_id == user_id
+ assert exc.context == {"extra": "value"}
+
+
+def test_base_exception_str():
+ """Test exception string representation."""
+ trace_id = "trace-123"
+ user_id = uuid4()
+ exc = NeroSpatialException(
+ "Test error", trace_id=trace_id, user_id=user_id, key="value"
+ )
+ str_repr = str(exc)
+ assert "Test error" in str_repr
+ assert trace_id in str_repr
+ assert str(user_id) in str_repr
+ assert "key=value" in str_repr
+
+
+def test_authentication_error():
+ """Test AuthenticationError."""
+ exc = AuthenticationError("Invalid token", trace_id="trace-123")
+ assert exc.message == "Invalid token"
+ assert exc.trace_id == "trace-123"
+ assert isinstance(exc, NeroSpatialException)
+
+
+def test_authorization_error():
+ """Test AuthorizationError."""
+ user_id = uuid4()
+ exc = AuthorizationError("Access denied", user_id=user_id)
+ assert exc.message == "Access denied"
+ assert exc.user_id == user_id
+ assert isinstance(exc, NeroSpatialException)
+
+
+def test_session_expired_error():
+ """Test SessionExpiredError."""
+ session_id = uuid4()
+ exc = SessionExpiredError(session_id, trace_id="trace-123")
+ assert exc.session_id == session_id
+ assert f"Session {session_id}" in exc.message
+ assert exc.trace_id == "trace-123"
+ assert isinstance(exc, NeroSpatialException)
+
+
+def test_session_not_found_error():
+ """Test SessionNotFoundError."""
+ session_id = uuid4()
+ exc = SessionNotFoundError(session_id, trace_id="trace-123")
+ assert exc.session_id == session_id
+ assert f"Session {session_id}" in exc.message
+ assert exc.trace_id == "trace-123"
+ assert isinstance(exc, NeroSpatialException)
+
+
+def test_vlm_timeout_error():
+ """Test VLMTimeoutError."""
+ timeout_ms = 5000
+ exc = VLMTimeoutError(timeout_ms, trace_id="trace-123")
+ assert exc.timeout_ms == timeout_ms
+ assert f"{timeout_ms}ms" in exc.message
+ assert exc.trace_id == "trace-123"
+ assert isinstance(exc, NeroSpatialException)
+
+
+def test_llm_provider_error():
+ """Test LLMProviderError."""
+ provider = "groq"
+ status_code = 500
+ exc = LLMProviderError(
+ "API error", provider=provider, status_code=status_code, trace_id="trace-123"
+ )
+ assert exc.provider == provider
+ assert exc.status_code == status_code
+ assert provider in exc.message
+ assert isinstance(exc, NeroSpatialException)
+
+
+def test_circuit_breaker_open_error():
+ """Test CircuitBreakerOpenError."""
+ provider = "gemini"
+ exc = CircuitBreakerOpenError(provider, trace_id="trace-123")
+ assert exc.provider == provider
+ assert provider in exc.message
+ assert exc.trace_id == "trace-123"
+ assert isinstance(exc, NeroSpatialException)
+
+
+def test_database_error():
+ """Test DatabaseError."""
+ db_type = "postgres"
+ operation = "get_user"
+ exc = DatabaseError(
+ "Connection failed",
+ db_type=db_type,
+ operation=operation,
+ trace_id="trace-123",
+ )
+ assert exc.db_type == db_type
+ assert exc.operation == operation
+ assert db_type in exc.message
+ assert operation in exc.message
+ assert isinstance(exc, NeroSpatialException)
+
+
+def test_rate_limit_exceeded():
+ """Test RateLimitExceeded."""
+ limit = 100
+ window_seconds = 60
+ exc = RateLimitExceeded(
+ "Too many requests",
+ limit=limit,
+ window_seconds=window_seconds,
+ trace_id="trace-123",
+ )
+ assert exc.limit == limit
+ assert exc.window_seconds == window_seconds
+ assert str(limit) in exc.message
+ assert str(window_seconds) in exc.message
+ assert isinstance(exc, NeroSpatialException)
+
+
+def test_validation_error():
+ """Test ValidationError."""
+ field = "email"
+ exc = ValidationError("Invalid format", field=field, trace_id="trace-123")
+ assert exc.field == field
+ assert exc.message == "Invalid format"
+ assert exc.trace_id == "trace-123"
+ assert isinstance(exc, NeroSpatialException)
+
+
+def test_validation_error_without_field():
+ """Test ValidationError without field."""
+ exc = ValidationError("Invalid input", trace_id="trace-123")
+ assert exc.field is None
+ assert exc.message == "Invalid input"
+ assert isinstance(exc, NeroSpatialException)
+
+
+def test_exception_repr():
+ """Test exception __repr__ method."""
+ trace_id = "trace-123"
+ user_id = uuid4()
+ exc = NeroSpatialException(
+ "Test error", trace_id=trace_id, user_id=user_id, key="value"
+ )
+ repr_str = repr(exc)
+ assert "NeroSpatialException" in repr_str
+ assert "Test error" in repr_str
+ assert trace_id in repr_str
+ assert str(user_id) in repr_str
+
+
+def test_exception_inheritance():
+ """Test that all exceptions inherit from NeroSpatialException."""
+ exceptions = [
+ AuthenticationError("test"),
+ AuthorizationError("test"),
+ SessionExpiredError(uuid4()),
+ SessionNotFoundError(uuid4()),
+ VLMTimeoutError(1000),
+ LLMProviderError("test", "groq"),
+ CircuitBreakerOpenError("groq"),
+ DatabaseError("test", "postgres", "get_user"),
+ RateLimitExceeded("test", 100, 60),
+ ValidationError("test"),
+ ]
+
+ for exc in exceptions:
+ assert isinstance(exc, NeroSpatialException)
+ assert isinstance(exc, Exception)
diff --git a/tests/test_keyvault.py b/tests/core/test_keyvault.py
similarity index 100%
rename from tests/test_keyvault.py
rename to tests/core/test_keyvault.py
diff --git a/tests/core/test_models.py b/tests/core/test_models.py
new file mode 100644
index 0000000..400d334
--- /dev/null
+++ b/tests/core/test_models.py
@@ -0,0 +1,2586 @@
+"""Unit tests for core models."""
+
+from datetime import UTC, datetime, timedelta
+from uuid import uuid4
+
+import pytest
+
+from core.models import (
+ # Enums - User & Auth
+ AuditAction,
+ # Models - User & Auth
+ AuditLog,
+ # Models - Control & Binary
+ BinaryFrame,
+ ControlMessage,
+ # Enums - Session & Control
+ ControlMessageType,
+ # Models - Interaction
+ ConversationHistory,
+ FrameFlags,
+ InteractionTurn,
+ OAuthProvider,
+ RefreshToken,
+ SessionMode,
+ # Models - Session
+ SessionState,
+ StreamType,
+ TokenBlacklistEntry,
+ TokenRevocationReason,
+ User,
+ UserContext,
+ UserStatus,
+)
+
+# ============================================================================
+# Enum Tests
+# ============================================================================
+
+
+def test_session_mode_enum():
+ """Test SessionMode enum values"""
+ assert SessionMode.ACTIVE == "active"
+ assert SessionMode.PASSIVE == "passive"
+
+
+def test_control_message_type_enum():
+ """Test ControlMessageType enum values"""
+ assert ControlMessageType.SESSION_CONTROL == "session_control"
+ assert ControlMessageType.ERROR == "error"
+ assert ControlMessageType.ACK == "ack"
+ assert ControlMessageType.HEARTBEAT == "heartbeat"
+
+
+def test_stream_type_enum():
+ """Test StreamType enum values"""
+ assert StreamType.AUDIO == 0x01
+ assert StreamType.VIDEO == 0x02
+ assert StreamType.CONTROL == 0x03
+
+
+def test_frame_flags_enum():
+ """Test FrameFlags enum values"""
+ assert FrameFlags.END_OF_STREAM == 0x01
+ assert FrameFlags.PRIORITY == 0x02
+ assert FrameFlags.ERROR == 0x04
+
+
+def test_user_status_enum():
+ """Test UserStatus enum values"""
+ assert UserStatus.ACTIVE == "active"
+ assert UserStatus.BLACKLISTED == "blacklisted"
+ assert UserStatus.SUSPENDED == "suspended"
+
+
+# ============================================================================
+# Session Model Tests
+# ============================================================================
+
+
+def test_session_state_creation():
+ """Test SessionState model creation"""
+ session_id = uuid4()
+ user_id = uuid4()
+ now = datetime.now(UTC)
+
+ session = SessionState(
+ session_id=session_id,
+ user_id=user_id,
+ mode=SessionMode.ACTIVE,
+ created_at=now,
+ last_activity=now,
+ enable_vision=True,
+ )
+
+ assert session.session_id == session_id
+ assert session.user_id == user_id
+ assert session.mode == SessionMode.ACTIVE
+ assert session.enable_vision is True
+ assert session.voice_id is None
+ assert session.preferences == {}
+
+
+def test_session_state_immutability():
+ """Test that SessionState is immutable"""
+ session_id = uuid4()
+ user_id = uuid4()
+ now = datetime.now(UTC)
+
+ session = SessionState(
+ session_id=session_id,
+ user_id=user_id,
+ mode=SessionMode.ACTIVE,
+ created_at=now,
+ last_activity=now,
+ )
+
+ with pytest.raises(Exception): # Pydantic validation error
+ session.mode = SessionMode.PASSIVE
+
+
+def test_session_state_json_serialization():
+ """Test SessionState JSON serialization."""
+ session_id = uuid4()
+ user_id = uuid4()
+ now = datetime.now(UTC)
+
+ session = SessionState(
+ session_id=session_id,
+ user_id=user_id,
+ mode=SessionMode.ACTIVE,
+ created_at=now,
+ last_activity=now,
+ )
+
+ # Test that UUIDs are serialized as strings
+ json_data = session.model_dump_json()
+ assert str(session_id) in json_data
+ assert str(user_id) in json_data
+
+
+def test_session_state_with_all_fields():
+ """Test SessionState creation with all optional fields."""
+ session_id = uuid4()
+ user_id = uuid4()
+ now = datetime.now(UTC)
+
+ session = SessionState(
+ session_id=session_id,
+ user_id=user_id,
+ mode=SessionMode.ACTIVE,
+ created_at=now,
+ last_activity=now,
+ voice_id="voice-123",
+ enable_vision=True,
+ preferences={"theme": "dark", "language": "en"},
+ metadata={"source": "web", "version": "1.0"},
+ device_info={"type": "desktop", "os": "linux"},
+ ip_address="192.168.1.1",
+ user_agent="Mozilla/5.0",
+ )
+
+ assert session.voice_id == "voice-123"
+ assert session.enable_vision is True
+ assert session.preferences["theme"] == "dark"
+ assert session.metadata["source"] == "web"
+ assert session.device_info["type"] == "desktop"
+ assert str(session.ip_address) == "192.168.1.1"
+ assert session.user_agent == "Mozilla/5.0"
+ assert session.schema_version == "1.0"
+
+
+def test_session_state_timezone_validation():
+ """Test SessionState timezone validation (naive → UTC)."""
+ session_id = uuid4()
+ user_id = uuid4()
+ naive_now = datetime.now() # Naive datetime
+
+ session = SessionState(
+ session_id=session_id,
+ user_id=user_id,
+ mode=SessionMode.ACTIVE,
+ created_at=naive_now,
+ last_activity=naive_now,
+ )
+
+ # Should be converted to UTC
+ assert session.created_at.tzinfo is not None
+ assert session.last_activity.tzinfo is not None
+ assert session.created_at.tzinfo == UTC
+ assert session.last_activity.tzinfo == UTC
+
+
+def test_session_state_is_active():
+ """Test SessionState.is_active() method."""
+ session_id = uuid4()
+ user_id = uuid4()
+ now = datetime.now(UTC)
+
+ # Active session (recent activity)
+ active_session = SessionState(
+ session_id=session_id,
+ user_id=user_id,
+ mode=SessionMode.ACTIVE,
+ created_at=now,
+ last_activity=now,
+ )
+ assert active_session.is_active(ttl_seconds=3600) is True
+
+ # Expired session (old activity)
+ expired_session = SessionState(
+ session_id=session_id,
+ user_id=user_id,
+ mode=SessionMode.ACTIVE,
+ created_at=now - timedelta(hours=2),
+ last_activity=now - timedelta(hours=2),
+ )
+ assert expired_session.is_active(ttl_seconds=3600) is False
+
+
+def test_session_state_is_expired():
+ """Test SessionState.is_expired() method."""
+ session_id = uuid4()
+ user_id = uuid4()
+ now = datetime.now(UTC)
+
+ # Not expired
+ valid_session = SessionState(
+ session_id=session_id,
+ user_id=user_id,
+ mode=SessionMode.ACTIVE,
+ created_at=now,
+ last_activity=now,
+ )
+ assert valid_session.is_expired(ttl_seconds=3600) is False
+
+ # Expired
+ expired_session = SessionState(
+ session_id=session_id,
+ user_id=user_id,
+ mode=SessionMode.ACTIVE,
+ created_at=now - timedelta(hours=2),
+ last_activity=now - timedelta(hours=2),
+ )
+ assert expired_session.is_expired(ttl_seconds=3600) is True
+
+ # Custom TTL
+ custom_ttl_session = SessionState(
+ session_id=session_id,
+ user_id=user_id,
+ mode=SessionMode.ACTIVE,
+ created_at=now - timedelta(minutes=10),
+ last_activity=now - timedelta(minutes=10),
+ )
+ assert custom_ttl_session.is_expired(ttl_seconds=300) is True # 5 min TTL
+ assert custom_ttl_session.is_expired(ttl_seconds=3600) is False # 1 hour TTL
+
+
+def test_session_state_update_activity():
+ """Test SessionState.update_activity() method."""
+ session_id = uuid4()
+ user_id = uuid4()
+ now = datetime.now(UTC)
+
+ session1 = SessionState(
+ session_id=session_id,
+ user_id=user_id,
+ mode=SessionMode.ACTIVE,
+ created_at=now,
+ last_activity=now,
+ )
+
+ # Wait a bit and update activity
+ import time
+
+ time.sleep(0.1)
+ session2 = session1.update_activity()
+
+ # Should be new instance
+ assert session1 is not session2
+ # Original should be unchanged
+ assert session1.last_activity == now
+ # New instance should have updated timestamp
+ assert session2.last_activity > session1.last_activity
+ # Other fields should be the same
+ assert session2.session_id == session1.session_id
+ assert session2.user_id == session1.user_id
+ assert session2.mode == session1.mode
+
+
+def test_session_state_calculate_ttl_remaining():
+ """Test SessionState.calculate_ttl_remaining() method."""
+ session_id = uuid4()
+ user_id = uuid4()
+ now = datetime.now(UTC)
+
+ # Recent activity
+ recent_session = SessionState(
+ session_id=session_id,
+ user_id=user_id,
+ mode=SessionMode.ACTIVE,
+ created_at=now - timedelta(minutes=10),
+ last_activity=now - timedelta(minutes=5),
+ )
+ remaining = recent_session.calculate_ttl_remaining(ttl_seconds=3600)
+ # Should be around 55 minutes (3300 seconds), allow small variance
+ assert 3290 <= remaining <= 3600
+
+ # Expired session
+ expired_session = SessionState(
+ session_id=session_id,
+ user_id=user_id,
+ mode=SessionMode.ACTIVE,
+ created_at=now - timedelta(hours=2),
+ last_activity=now - timedelta(hours=2),
+ )
+ remaining = expired_session.calculate_ttl_remaining(ttl_seconds=3600)
+ assert remaining == 0
+
+ # Custom TTL
+ custom_session = SessionState(
+ session_id=session_id,
+ user_id=user_id,
+ mode=SessionMode.ACTIVE,
+ created_at=now - timedelta(minutes=10),
+ last_activity=now - timedelta(minutes=2),
+ )
+ remaining = custom_session.calculate_ttl_remaining(ttl_seconds=300) # 5 min TTL
+ assert 0 < remaining < 300 # Should be around 3 minutes
+
+
+def test_session_state_should_extend_ttl():
+ """Test SessionState.should_extend_ttl() method."""
+ session_id = uuid4()
+ user_id = uuid4()
+ now = datetime.now(UTC)
+
+ # Recent activity (should extend)
+ recent_session = SessionState(
+ session_id=session_id,
+ user_id=user_id,
+ mode=SessionMode.ACTIVE,
+ created_at=now - timedelta(minutes=10),
+ last_activity=now - timedelta(minutes=2),
+ )
+ assert recent_session.should_extend_ttl(activity_threshold_seconds=300) is True
+
+ # Old activity (should not extend)
+ old_session = SessionState(
+ session_id=session_id,
+ user_id=user_id,
+ mode=SessionMode.ACTIVE,
+ created_at=now - timedelta(minutes=10),
+ last_activity=now - timedelta(minutes=10),
+ )
+ assert old_session.should_extend_ttl(activity_threshold_seconds=300) is False
+
+ # Custom threshold
+ custom_session = SessionState(
+ session_id=session_id,
+ user_id=user_id,
+ mode=SessionMode.ACTIVE,
+ created_at=now - timedelta(minutes=10),
+ last_activity=now - timedelta(minutes=1),
+ )
+ assert custom_session.should_extend_ttl(activity_threshold_seconds=60) is False
+ assert custom_session.should_extend_ttl(activity_threshold_seconds=120) is True
+
+
+def test_session_state_default_values():
+ """Test SessionState default values."""
+ session_id = uuid4()
+ user_id = uuid4()
+ now = datetime.now(UTC)
+
+ session = SessionState(
+ session_id=session_id,
+ user_id=user_id,
+ mode=SessionMode.ACTIVE,
+ created_at=now,
+ last_activity=now,
+ )
+
+ assert session.voice_id is None
+ assert session.enable_vision is False
+ assert session.preferences == {}
+ assert session.metadata == {}
+ assert session.schema_version == "1.0"
+ assert session.device_info is None
+ assert session.ip_address is None
+ assert session.user_agent is None
+
+
+def test_session_mode_enum_values():
+ """Test SessionMode enum values."""
+ assert SessionMode.ACTIVE == "active"
+ assert SessionMode.PASSIVE == "passive"
+ assert SessionMode.ACTIVE.value == "active"
+ assert SessionMode.PASSIVE.value == "passive"
+
+
+# ============================================================================
+# User Model Tests
+# ============================================================================
+
+
+def test_user_context_creation():
+ """Test UserContext model creation with all fields."""
+ user_id = uuid4()
+ now = datetime.now(UTC)
+ expires = now + timedelta(minutes=15)
+
+ context = UserContext(
+ user_id=user_id,
+ email="test@example.com",
+ name="Test User",
+ oauth_provider=OAuthProvider.GOOGLE,
+ status=UserStatus.ACTIVE,
+ token_id="jti-12345",
+ issued_at=now,
+ expires_at=expires,
+ )
+
+ assert context.user_id == user_id
+ assert context.email == "test@example.com"
+ assert context.oauth_provider == OAuthProvider.GOOGLE
+ assert context.token_id == "jti-12345"
+ assert context.session_id is None
+
+
+def test_user_context_immutability():
+ """Test that UserContext is immutable."""
+ user_id = uuid4()
+ now = datetime.now(UTC)
+
+ context = UserContext(
+ user_id=user_id,
+ email="test@example.com",
+ oauth_provider=OAuthProvider.GOOGLE,
+ token_id="jti-12345",
+ issued_at=now,
+ expires_at=now + timedelta(minutes=15),
+ )
+
+ with pytest.raises(Exception):
+ context.email = "new@example.com"
+
+
+def test_token_blacklist_entry_creation():
+ """Test TokenBlacklistEntry model creation with reason."""
+ user_id = uuid4()
+ now = datetime.now(UTC)
+ expires = now + timedelta(minutes=15)
+
+ entry = TokenBlacklistEntry(
+ token_id="jti-12345",
+ user_id=user_id,
+ revoked_at=now,
+ expires_at=expires,
+ reason=TokenRevocationReason.LOGOUT,
+ ip_address="192.168.1.1",
+ )
+
+ assert entry.token_id == "jti-12345"
+ assert entry.reason == TokenRevocationReason.LOGOUT
+ assert str(entry.ip_address) == "192.168.1.1"
+
+
+def test_user_creation():
+ """Test User model creation with all fields."""
+ user_id = uuid4()
+ now = datetime.now(UTC)
+
+ user = User(
+ user_id=user_id,
+ email="test@example.com",
+ name="Test User",
+ oauth_provider=OAuthProvider.GOOGLE,
+ oauth_sub="google-12345",
+ created_at=now,
+ updated_at=now,
+ picture_url="https://example.com/photo.jpg",
+ locale="en-us",
+ )
+
+ assert user.user_id == user_id
+ assert user.email == "test@example.com"
+ assert user.name == "Test User"
+ assert user.oauth_provider == OAuthProvider.GOOGLE
+ assert user.oauth_sub == "google-12345"
+ assert user.status == UserStatus.ACTIVE
+ assert user.deleted_at is None
+ assert user.metadata == {}
+ assert user.schema_version == "1.0"
+ assert user.locale == "en-us" # Validated and lowercased
+
+
+def test_user_blacklisted_status():
+ """Test User with blacklisted status."""
+ user_id = uuid4()
+ now = datetime.now(UTC)
+
+ user = User(
+ user_id=user_id,
+ email="test@example.com",
+ oauth_provider=OAuthProvider.GOOGLE,
+ created_at=now,
+ updated_at=now,
+ status=UserStatus.BLACKLISTED,
+ )
+
+ assert user.status == UserStatus.BLACKLISTED
+
+
+def test_user_is_active():
+ """Test User.is_active() helper method."""
+ user_id = uuid4()
+ now = datetime.now(UTC)
+
+ # Active user, not deleted
+ active_user = User(
+ user_id=user_id,
+ email="test@example.com",
+ oauth_provider=OAuthProvider.GOOGLE,
+ created_at=now,
+ updated_at=now,
+ status=UserStatus.ACTIVE,
+ )
+ assert active_user.is_active() is True
+
+ # Active status but soft-deleted
+ deleted_user = User(
+ user_id=user_id,
+ email="test@example.com",
+ oauth_provider=OAuthProvider.GOOGLE,
+ created_at=now,
+ updated_at=now,
+ status=UserStatus.ACTIVE,
+ deleted_at=now,
+ )
+ assert deleted_user.is_active() is False
+
+ # Suspended user
+ suspended_user = User(
+ user_id=user_id,
+ email="test@example.com",
+ oauth_provider=OAuthProvider.GOOGLE,
+ created_at=now,
+ updated_at=now,
+ status=UserStatus.SUSPENDED,
+ )
+ assert suspended_user.is_active() is False
+
+
+def test_user_is_deleted():
+ """Test User.is_deleted() helper method."""
+ user_id = uuid4()
+ now = datetime.now(UTC)
+
+ # Not deleted
+ active_user = User(
+ user_id=user_id,
+ email="test@example.com",
+ oauth_provider=OAuthProvider.GOOGLE,
+ created_at=now,
+ updated_at=now,
+ )
+ assert active_user.is_deleted() is False
+
+ # Soft-deleted
+ deleted_user = User(
+ user_id=user_id,
+ email="test@example.com",
+ oauth_provider=OAuthProvider.GOOGLE,
+ created_at=now,
+ updated_at=now,
+ deleted_at=now,
+ )
+ assert deleted_user.is_deleted() is True
+
+
+def test_user_oauth_provider_required():
+ """Test that oauth_provider is required for User."""
+ user_id = uuid4()
+ now = datetime.now(UTC)
+
+ # Should raise error without oauth_provider
+ with pytest.raises(Exception):
+ User(
+ user_id=user_id,
+ email="test@example.com",
+ created_at=now,
+ updated_at=now,
+ )
+
+
+def test_user_context_oauth_provider_required():
+ """Test that oauth_provider is required for UserContext."""
+ user_id = uuid4()
+ now = datetime.now(UTC)
+
+ # Should raise error without oauth_provider
+ with pytest.raises(Exception):
+ UserContext(
+ user_id=user_id,
+ email="test@example.com",
+ token_id="jti-12345",
+ issued_at=now,
+ expires_at=now + timedelta(minutes=15),
+ )
+
+
+def test_user_context_is_active():
+ """Test UserContext.is_active() helper method."""
+ user_id = uuid4()
+ now = datetime.now(UTC)
+
+ # Active user
+ active_context = UserContext(
+ user_id=user_id,
+ email="test@example.com",
+ oauth_provider=OAuthProvider.GOOGLE,
+ status=UserStatus.ACTIVE,
+ token_id="jti-12345",
+ issued_at=now,
+ expires_at=now + timedelta(minutes=15),
+ )
+ assert active_context.is_active() is True
+
+ # Suspended user
+ suspended_context = UserContext(
+ user_id=user_id,
+ email="test@example.com",
+ oauth_provider=OAuthProvider.GOOGLE,
+ status=UserStatus.SUSPENDED,
+ token_id="jti-12345",
+ issued_at=now,
+ expires_at=now + timedelta(minutes=15),
+ )
+ assert suspended_context.is_active() is False
+
+
+def test_user_context_is_expired():
+ """Test UserContext.is_expired() helper method."""
+ user_id = uuid4()
+ now = datetime.now(UTC)
+
+ # Not expired
+ valid_context = UserContext(
+ user_id=user_id,
+ email="test@example.com",
+ oauth_provider=OAuthProvider.GOOGLE,
+ token_id="jti-12345",
+ issued_at=now,
+ expires_at=now + timedelta(minutes=15),
+ )
+ assert valid_context.is_expired() is False
+
+ # Expired
+ expired_context = UserContext(
+ user_id=user_id,
+ email="test@example.com",
+ oauth_provider=OAuthProvider.GOOGLE,
+ token_id="jti-12345",
+ issued_at=now - timedelta(hours=1),
+ expires_at=now - timedelta(minutes=15),
+ )
+ assert expired_context.is_expired() is True
+
+
+def test_user_context_is_valid():
+ """Test UserContext.is_valid() helper method."""
+ user_id = uuid4()
+ now = datetime.now(UTC)
+
+ # Valid: active and not expired
+ valid_context = UserContext(
+ user_id=user_id,
+ email="test@example.com",
+ oauth_provider=OAuthProvider.GOOGLE,
+ status=UserStatus.ACTIVE,
+ token_id="jti-12345",
+ issued_at=now,
+ expires_at=now + timedelta(minutes=15),
+ )
+ assert valid_context.is_valid() is True
+
+ # Invalid: expired
+ expired_context = UserContext(
+ user_id=user_id,
+ email="test@example.com",
+ oauth_provider=OAuthProvider.GOOGLE,
+ status=UserStatus.ACTIVE,
+ token_id="jti-12345",
+ issued_at=now - timedelta(hours=1),
+ expires_at=now - timedelta(minutes=15),
+ )
+ assert expired_context.is_valid() is False
+
+ # Invalid: suspended
+ suspended_context = UserContext(
+ user_id=user_id,
+ email="test@example.com",
+ oauth_provider=OAuthProvider.GOOGLE,
+ status=UserStatus.SUSPENDED,
+ token_id="jti-12345",
+ issued_at=now,
+ expires_at=now + timedelta(minutes=15),
+ )
+ assert suspended_context.is_valid() is False
+
+
+# ============================================================================
+# RefreshToken Model Tests
+# ============================================================================
+
+
+def test_refresh_token_creation():
+ """Test RefreshToken model creation."""
+ token_id = uuid4()
+ user_id = uuid4()
+ now = datetime.now(UTC)
+ expires = now + timedelta(days=7)
+
+ token = RefreshToken(
+ token_id=token_id,
+ user_id=user_id,
+ token_hash="sha256_hash_value",
+ expires_at=expires,
+ created_at=now,
+ ip_address="192.168.1.1",
+ user_agent="Mozilla/5.0",
+ )
+
+ assert token.token_id == token_id
+ assert token.user_id == user_id
+ assert token.token_hash == "sha256_hash_value"
+ assert token.expires_at == expires
+ assert token.rotated_at is None
+ assert token.previous_token_id is None
+ assert str(token.ip_address) == "192.168.1.1"
+
+
+def test_refresh_token_is_expired():
+ """Test RefreshToken.is_expired() helper method."""
+ token_id = uuid4()
+ user_id = uuid4()
+ now = datetime.now(UTC)
+
+ # Not expired
+ valid_token = RefreshToken(
+ token_id=token_id,
+ user_id=user_id,
+ token_hash="hash",
+ expires_at=now + timedelta(days=7),
+ created_at=now,
+ )
+ assert valid_token.is_expired() is False
+
+ # Expired
+ expired_token = RefreshToken(
+ token_id=token_id,
+ user_id=user_id,
+ token_hash="hash",
+ expires_at=now - timedelta(hours=1),
+ created_at=now - timedelta(days=8),
+ )
+ assert expired_token.is_expired() is True
+
+
+def test_refresh_token_is_rotated():
+ """Test RefreshToken.is_rotated() helper method."""
+ token_id = uuid4()
+ user_id = uuid4()
+ now = datetime.now(UTC)
+
+ # Not rotated
+ active_token = RefreshToken(
+ token_id=token_id,
+ user_id=user_id,
+ token_hash="hash",
+ expires_at=now + timedelta(days=7),
+ created_at=now,
+ )
+ assert active_token.is_rotated() is False
+
+ # Rotated
+ rotated_token = RefreshToken(
+ token_id=token_id,
+ user_id=user_id,
+ token_hash="hash",
+ expires_at=now + timedelta(days=7),
+ created_at=now - timedelta(days=1),
+ rotated_at=now,
+ previous_token_id=uuid4(),
+ )
+ assert rotated_token.is_rotated() is True
+
+
+def test_refresh_token_is_valid():
+ """Test RefreshToken.is_valid() helper method."""
+ token_id = uuid4()
+ user_id = uuid4()
+ now = datetime.now(UTC)
+
+ # Valid: not expired and not rotated
+ valid_token = RefreshToken(
+ token_id=token_id,
+ user_id=user_id,
+ token_hash="hash",
+ expires_at=now + timedelta(days=7),
+ created_at=now,
+ )
+ assert valid_token.is_valid() is True
+
+ # Invalid: expired
+ expired_token = RefreshToken(
+ token_id=token_id,
+ user_id=user_id,
+ token_hash="hash",
+ expires_at=now - timedelta(hours=1),
+ created_at=now - timedelta(days=8),
+ )
+ assert expired_token.is_valid() is False
+
+ # Invalid: rotated
+ rotated_token = RefreshToken(
+ token_id=token_id,
+ user_id=user_id,
+ token_hash="hash",
+ expires_at=now + timedelta(days=7),
+ created_at=now - timedelta(days=1),
+ rotated_at=now,
+ )
+ assert rotated_token.is_valid() is False
+
+
+def test_refresh_token_immutability():
+ """Test that RefreshToken is immutable."""
+ token_id = uuid4()
+ user_id = uuid4()
+ now = datetime.now(UTC)
+
+ token = RefreshToken(
+ token_id=token_id,
+ user_id=user_id,
+ token_hash="hash",
+ expires_at=now + timedelta(days=7),
+ created_at=now,
+ )
+
+ with pytest.raises(Exception):
+ token.token_hash = "new_hash"
+
+
+# ============================================================================
+# TokenBlacklistEntry Enhanced Tests
+# ============================================================================
+
+
+def test_token_blacklist_entry_is_cleanup_ready():
+ """Test TokenBlacklistEntry.is_cleanup_ready() helper method."""
+ user_id = uuid4()
+ now = datetime.now(UTC)
+
+ # Not cleanup ready (token not yet expired)
+ active_entry = TokenBlacklistEntry(
+ token_id="jti-12345",
+ user_id=user_id,
+ revoked_at=now,
+ expires_at=now + timedelta(minutes=15),
+ reason=TokenRevocationReason.LOGOUT,
+ )
+ assert active_entry.is_cleanup_ready() is False
+
+ # Cleanup ready (original token expired)
+ expired_entry = TokenBlacklistEntry(
+ token_id="jti-12345",
+ user_id=user_id,
+ revoked_at=now - timedelta(hours=1),
+ expires_at=now - timedelta(minutes=15),
+ reason=TokenRevocationReason.SECURITY,
+ )
+ assert expired_entry.is_cleanup_ready() is True
+
+
+def test_token_blacklist_entry_all_reasons():
+ """Test TokenBlacklistEntry with all revocation reasons."""
+ user_id = uuid4()
+ now = datetime.now(UTC)
+ expires = now + timedelta(minutes=15)
+
+ for reason in TokenRevocationReason:
+ entry = TokenBlacklistEntry(
+ token_id=f"jti-{reason.value}",
+ user_id=user_id,
+ revoked_at=now,
+ expires_at=expires,
+ reason=reason,
+ )
+ assert entry.reason == reason
+
+
+# ============================================================================
+# AuditLog Model Tests
+# ============================================================================
+
+
+def test_audit_log_creation():
+ """Test AuditLog model creation."""
+ log_id = uuid4()
+ user_id = uuid4()
+ now = datetime.now(UTC)
+
+ log = AuditLog(
+ log_id=log_id,
+ user_id=user_id,
+ action=AuditAction.LOGIN,
+ details={"method": "oauth", "provider": "google"},
+ ip_address="10.0.0.1",
+ user_agent="Mozilla/5.0",
+ created_at=now,
+ )
+
+ assert log.log_id == log_id
+ assert log.user_id == user_id
+ assert log.action == AuditAction.LOGIN
+ assert log.details["method"] == "oauth"
+ assert str(log.ip_address) == "10.0.0.1"
+ assert log.user_agent == "Mozilla/5.0"
+
+
+def test_audit_log_with_null_user():
+ """Test AuditLog with null user_id (user deleted)."""
+ log_id = uuid4()
+ now = datetime.now(UTC)
+
+ log = AuditLog(
+ log_id=log_id,
+ user_id=None,
+ action=AuditAction.ACCOUNT_DELETE,
+ created_at=now,
+ )
+
+ assert log.user_id is None
+ assert log.action == AuditAction.ACCOUNT_DELETE
+
+
+def test_audit_log_all_actions():
+ """Test AuditLog with all action types."""
+ log_id = uuid4()
+ user_id = uuid4()
+ now = datetime.now(UTC)
+
+ for action in AuditAction:
+ log = AuditLog(
+ log_id=log_id,
+ user_id=user_id,
+ action=action,
+ created_at=now,
+ )
+ assert log.action == action
+
+
+def test_audit_log_immutability():
+ """Test that AuditLog is immutable."""
+ log_id = uuid4()
+ user_id = uuid4()
+ now = datetime.now(UTC)
+
+ log = AuditLog(
+ log_id=log_id,
+ user_id=user_id,
+ action=AuditAction.LOGIN,
+ created_at=now,
+ )
+
+ with pytest.raises(Exception):
+ log.action = AuditAction.LOGOUT
+
+
+# ============================================================================
+# IP Address Validation Tests
+# ============================================================================
+
+
+def test_ip_address_validation_ipv4():
+ """Test IP address validation with valid IPv4."""
+ user_id = uuid4()
+ now = datetime.now(UTC)
+
+ entry = TokenBlacklistEntry(
+ token_id="jti-12345",
+ user_id=user_id,
+ revoked_at=now,
+ expires_at=now + timedelta(minutes=15),
+ reason=TokenRevocationReason.LOGOUT,
+ ip_address="192.168.1.1",
+ )
+
+ assert str(entry.ip_address) == "192.168.1.1"
+
+
+def test_ip_address_validation_ipv6():
+ """Test IP address validation with valid IPv6."""
+ user_id = uuid4()
+ now = datetime.now(UTC)
+
+ entry = TokenBlacklistEntry(
+ token_id="jti-12345",
+ user_id=user_id,
+ revoked_at=now,
+ expires_at=now + timedelta(minutes=15),
+ reason=TokenRevocationReason.LOGOUT,
+ ip_address="::1",
+ )
+
+ assert str(entry.ip_address) == "::1"
+
+
+def test_ip_address_validation_invalid():
+ """Test IP address validation with invalid address."""
+ user_id = uuid4()
+ now = datetime.now(UTC)
+
+ with pytest.raises(Exception):
+ TokenBlacklistEntry(
+ token_id="jti-12345",
+ user_id=user_id,
+ revoked_at=now,
+ expires_at=now + timedelta(minutes=15),
+ reason=TokenRevocationReason.LOGOUT,
+ ip_address="not-an-ip-address",
+ )
+
+
+def test_ip_address_validation_session_state():
+ """Test IP address validation in SessionState."""
+ session_id = uuid4()
+ user_id = uuid4()
+ now = datetime.now(UTC)
+
+ # Valid IPv4
+ session = SessionState(
+ session_id=session_id,
+ user_id=user_id,
+ mode=SessionMode.ACTIVE,
+ created_at=now,
+ last_activity=now,
+ ip_address="10.0.0.1",
+ )
+ assert str(session.ip_address) == "10.0.0.1"
+
+ # Invalid IP should raise error
+ with pytest.raises(Exception):
+ SessionState(
+ session_id=session_id,
+ user_id=user_id,
+ mode=SessionMode.ACTIVE,
+ created_at=now,
+ last_activity=now,
+ ip_address="invalid-ip",
+ )
+
+
+# ============================================================================
+# Interaction Model Tests
+# ============================================================================
+
+
+def test_interaction_turn_creation():
+ """Test InteractionTurn model creation"""
+ turn_id = uuid4()
+ user_id = uuid4()
+ session_id = uuid4()
+ now = datetime.now(UTC)
+
+ turn = InteractionTurn(
+ turn_id=turn_id,
+ user_id=user_id,
+ session_id=session_id,
+ timestamp=now,
+ transcript="Hello",
+ llm_response="Hi there!",
+ model_used="groq",
+ latency_ms=150,
+ )
+
+ assert turn.turn_id == turn_id
+ assert turn.user_id == user_id
+ assert turn.session_id == session_id
+ assert turn.transcript == "Hello"
+ assert turn.llm_response == "Hi there!"
+ assert turn.model_used == "groq"
+ assert turn.latency_ms == 150
+ assert turn.scene_description is None
+
+
+def test_conversation_history_creation():
+ """Test ConversationHistory model creation"""
+ user_id = uuid4()
+
+ history = ConversationHistory(user_id=user_id)
+
+ assert history.user_id == user_id
+ assert history.turns == []
+ assert history.max_turns == 10
+
+
+def test_conversation_history_add_turn():
+ """Test ConversationHistory.add_turn() method"""
+ user_id = uuid4()
+ session_id = uuid4()
+ now = datetime.now(UTC)
+
+ history = ConversationHistory(user_id=user_id, max_turns=3)
+
+ # Add first turn
+ turn1 = InteractionTurn(
+ turn_id=uuid4(),
+ user_id=user_id,
+ session_id=session_id,
+ timestamp=now,
+ transcript="Hello",
+ llm_response="Hi",
+ model_used="groq",
+ latency_ms=100,
+ )
+ history = history.add_turn(turn1)
+ assert len(history.turns) == 1
+
+ # Add second turn
+ turn2 = InteractionTurn(
+ turn_id=uuid4(),
+ user_id=user_id,
+ session_id=session_id,
+ timestamp=now,
+ transcript="How are you?",
+ llm_response="I'm good",
+ model_used="groq",
+ latency_ms=120,
+ )
+ history = history.add_turn(turn2)
+ assert len(history.turns) == 2
+ assert history.turns[0] == turn2 # Newest first
+
+ # Add more turns to test max_turns limit
+ for i in range(5):
+ turn = InteractionTurn(
+ turn_id=uuid4(),
+ user_id=user_id,
+ session_id=session_id,
+ timestamp=now,
+ transcript=f"Message {i}",
+ llm_response=f"Response {i}",
+ model_used="groq",
+ latency_ms=100,
+ )
+ history = history.add_turn(turn)
+
+ # Should only have max_turns (3) turns
+ assert len(history.turns) == 3
+
+
+def test_conversation_history_immutability():
+ """Test that ConversationHistory.add_turn() returns new instance"""
+ user_id = uuid4()
+ session_id = uuid4()
+ now = datetime.now(UTC)
+
+ history1 = ConversationHistory(user_id=user_id)
+ turn = InteractionTurn(
+ turn_id=uuid4(),
+ user_id=user_id,
+ session_id=session_id,
+ timestamp=now,
+ transcript="Hello",
+ llm_response="Hi",
+ model_used="groq",
+ latency_ms=100,
+ )
+
+ history2 = history1.add_turn(turn)
+
+ # Original should be unchanged
+ assert len(history1.turns) == 0
+ # New instance should have the turn
+ assert len(history2.turns) == 1
+ assert history1 is not history2
+
+
+# ============================================================================
+# Enhanced InteractionTurn Tests
+# ============================================================================
+
+
+def test_interaction_turn_user_id_required():
+ """Test that user_id is required for InteractionTurn"""
+ turn_id = uuid4()
+ user_id = uuid4()
+ session_id = uuid4()
+ now = datetime.now(UTC)
+
+ turn = InteractionTurn(
+ turn_id=turn_id,
+ user_id=user_id,
+ session_id=session_id,
+ timestamp=now,
+ transcript="Hello",
+ llm_response="Hi",
+ model_used="groq",
+ latency_ms=100,
+ )
+
+ assert turn.user_id == user_id
+
+
+def test_interaction_turn_utc_validation():
+ """Test UTC validation for timestamp field"""
+ turn_id = uuid4()
+ user_id = uuid4()
+ session_id = uuid4()
+ now_naive = datetime.now()
+
+ # Naive datetime should be converted to UTC
+ turn = InteractionTurn(
+ turn_id=turn_id,
+ user_id=user_id,
+ session_id=session_id,
+ timestamp=now_naive,
+ transcript="Hello",
+ llm_response="Hi",
+ model_used="groq",
+ latency_ms=100,
+ )
+
+ assert turn.timestamp.tzinfo is not None
+ assert turn.timestamp.tzinfo == UTC
+
+
+def test_interaction_turn_string_validation():
+ """Test string validation for transcript and llm_response"""
+ turn_id = uuid4()
+ user_id = uuid4()
+ session_id = uuid4()
+ now = datetime.now(UTC)
+
+ # Empty string should raise error
+ with pytest.raises(ValueError, match="cannot be empty"):
+ InteractionTurn(
+ turn_id=turn_id,
+ user_id=user_id,
+ session_id=session_id,
+ timestamp=now,
+ transcript="",
+ llm_response="Hi",
+ model_used="groq",
+ latency_ms=100,
+ )
+
+ with pytest.raises(ValueError, match="cannot be empty"):
+ InteractionTurn(
+ turn_id=turn_id,
+ user_id=user_id,
+ session_id=session_id,
+ timestamp=now,
+ transcript="Hello",
+ llm_response="",
+ model_used="groq",
+ latency_ms=100,
+ )
+
+ # Whitespace-only should raise error
+ with pytest.raises(ValueError, match="cannot be empty"):
+ InteractionTurn(
+ turn_id=turn_id,
+ user_id=user_id,
+ session_id=session_id,
+ timestamp=now,
+ transcript=" ",
+ llm_response="Hi",
+ model_used="groq",
+ latency_ms=100,
+ )
+
+
+def test_interaction_turn_numeric_validation():
+ """Test numeric validation for latency_ms and tokens_used"""
+ turn_id = uuid4()
+ user_id = uuid4()
+ session_id = uuid4()
+ now = datetime.now(UTC)
+
+ # Negative latency should raise error
+ with pytest.raises(ValueError, match="latency_ms must be >= 0"):
+ InteractionTurn(
+ turn_id=turn_id,
+ user_id=user_id,
+ session_id=session_id,
+ timestamp=now,
+ transcript="Hello",
+ llm_response="Hi",
+ model_used="groq",
+ latency_ms=-1,
+ )
+
+ # Negative tokens_used should raise error
+ with pytest.raises(ValueError, match="tokens_used must be >= 0"):
+ InteractionTurn(
+ turn_id=turn_id,
+ user_id=user_id,
+ session_id=session_id,
+ timestamp=now,
+ transcript="Hello",
+ llm_response="Hi",
+ model_used="groq",
+ latency_ms=100,
+ tokens_used=-1,
+ )
+
+ # Zero values should be valid
+ turn = InteractionTurn(
+ turn_id=turn_id,
+ user_id=user_id,
+ session_id=session_id,
+ timestamp=now,
+ transcript="Hello",
+ llm_response="Hi",
+ model_used="groq",
+ latency_ms=0,
+ tokens_used=0,
+ )
+
+ assert turn.latency_ms == 0
+ assert turn.tokens_used == 0
+
+
+def test_interaction_turn_metadata_and_schema_version():
+ """Test metadata and schema_version fields"""
+ turn_id = uuid4()
+ user_id = uuid4()
+ session_id = uuid4()
+ now = datetime.now(UTC)
+
+ turn = InteractionTurn(
+ turn_id=turn_id,
+ user_id=user_id,
+ session_id=session_id,
+ timestamp=now,
+ transcript="Hello",
+ llm_response="Hi",
+ model_used="groq",
+ latency_ms=100,
+ metadata={"key": "value"},
+ schema_version="1.1",
+ )
+
+ assert turn.metadata == {"key": "value"}
+ assert turn.schema_version == "1.1"
+
+ # Default values
+ turn2 = InteractionTurn(
+ turn_id=turn_id,
+ user_id=user_id,
+ session_id=session_id,
+ timestamp=now,
+ transcript="Hello",
+ llm_response="Hi",
+ model_used="groq",
+ latency_ms=100,
+ )
+
+ assert turn2.metadata == {}
+ assert turn2.schema_version == "1.0"
+
+
+def test_interaction_turn_is_recent():
+ """Test is_recent() helper method"""
+ turn_id = uuid4()
+ user_id = uuid4()
+ session_id = uuid4()
+ now = datetime.now(UTC)
+
+ turn = InteractionTurn(
+ turn_id=turn_id,
+ user_id=user_id,
+ session_id=session_id,
+ timestamp=now,
+ transcript="Hello",
+ llm_response="Hi",
+ model_used="groq",
+ latency_ms=100,
+ )
+
+ # Should be recent (just created)
+ assert turn.is_recent(threshold_seconds=300) is True
+
+ # Old turn should not be recent
+ old_time = datetime.now(UTC) - timedelta(seconds=400)
+ old_turn = InteractionTurn(
+ turn_id=uuid4(),
+ user_id=user_id,
+ session_id=session_id,
+ timestamp=old_time,
+ transcript="Hello",
+ llm_response="Hi",
+ model_used="groq",
+ latency_ms=100,
+ )
+
+ assert old_turn.is_recent(threshold_seconds=300) is False
+
+
+def test_interaction_turn_calculate_age_seconds():
+ """Test calculate_age_seconds() helper method"""
+ turn_id = uuid4()
+ user_id = uuid4()
+ session_id = uuid4()
+ now = datetime.now(UTC)
+
+ turn = InteractionTurn(
+ turn_id=turn_id,
+ user_id=user_id,
+ session_id=session_id,
+ timestamp=now,
+ transcript="Hello",
+ llm_response="Hi",
+ model_used="groq",
+ latency_ms=100,
+ )
+
+ # Age should be very small (just created)
+ age = turn.calculate_age_seconds()
+ assert age >= 0
+ assert age < 5 # Should be less than 5 seconds
+
+ # Old turn
+ old_time = datetime.now(UTC) - timedelta(seconds=100)
+ old_turn = InteractionTurn(
+ turn_id=uuid4(),
+ user_id=user_id,
+ session_id=session_id,
+ timestamp=old_time,
+ transcript="Hello",
+ llm_response="Hi",
+ model_used="groq",
+ latency_ms=100,
+ )
+
+ age = old_turn.calculate_age_seconds()
+ assert 95 <= age <= 105 # Allow some margin for execution time
+
+
+def test_interaction_turn_get_total_tokens():
+ """Test get_total_tokens() helper method"""
+ turn_id = uuid4()
+ user_id = uuid4()
+ session_id = uuid4()
+ now = datetime.now(UTC)
+
+ # With tokens_used
+ turn1 = InteractionTurn(
+ turn_id=turn_id,
+ user_id=user_id,
+ session_id=session_id,
+ timestamp=now,
+ transcript="Hello",
+ llm_response="Hi",
+ model_used="groq",
+ latency_ms=100,
+ tokens_used=150,
+ )
+
+ assert turn1.get_total_tokens() == 150
+
+ # Without tokens_used
+ turn2 = InteractionTurn(
+ turn_id=uuid4(),
+ user_id=user_id,
+ session_id=session_id,
+ timestamp=now,
+ transcript="Hello",
+ llm_response="Hi",
+ model_used="groq",
+ latency_ms=100,
+ )
+
+ assert turn2.get_total_tokens() == 0
+
+
+def test_interaction_turn_immutability():
+ """Test that InteractionTurn is immutable"""
+ turn_id = uuid4()
+ user_id = uuid4()
+ session_id = uuid4()
+ now = datetime.now(UTC)
+
+ turn = InteractionTurn(
+ turn_id=turn_id,
+ user_id=user_id,
+ session_id=session_id,
+ timestamp=now,
+ transcript="Hello",
+ llm_response="Hi",
+ model_used="groq",
+ latency_ms=100,
+ )
+
+ # Should not be able to modify fields
+ with pytest.raises(Exception): # Pydantic validation error
+ turn.transcript = "Modified"
+
+
+# ============================================================================
+# Enhanced ConversationHistory Tests
+# ============================================================================
+
+
+def test_conversation_history_metadata_and_schema_version():
+ """Test metadata, schema_version, and last_updated fields"""
+ user_id = uuid4()
+
+ history = ConversationHistory(
+ user_id=user_id,
+ metadata={"key": "value"},
+ schema_version="1.1",
+ )
+
+ assert history.metadata == {"key": "value"}
+ assert history.schema_version == "1.1"
+ assert history.last_updated is not None
+ assert history.last_updated.tzinfo == UTC
+
+ # Default values
+ history2 = ConversationHistory(user_id=user_id)
+
+ assert history2.metadata == {}
+ assert history2.schema_version == "1.0"
+ assert history2.last_updated is not None
+
+
+def test_conversation_history_max_turns_validation():
+ """Test max_turns validation"""
+ user_id = uuid4()
+
+ # Zero should raise error
+ with pytest.raises(ValueError, match="max_turns must be > 0"):
+ ConversationHistory(user_id=user_id, max_turns=0)
+
+ # Negative should raise error
+ with pytest.raises(ValueError, match="max_turns must be > 0"):
+ ConversationHistory(user_id=user_id, max_turns=-1)
+
+ # Over 100 should raise error
+ with pytest.raises(ValueError, match="max_turns must be <= 100"):
+ ConversationHistory(user_id=user_id, max_turns=101)
+
+ # Valid values
+ history1 = ConversationHistory(user_id=user_id, max_turns=1)
+ assert history1.max_turns == 1
+
+ history2 = ConversationHistory(user_id=user_id, max_turns=100)
+ assert history2.max_turns == 100
+
+
+def test_conversation_history_user_id_validation():
+ """Test that add_turn validates user_id match"""
+ user_id1 = uuid4()
+ user_id2 = uuid4()
+ session_id = uuid4()
+ now = datetime.now(UTC)
+
+ history = ConversationHistory(user_id=user_id1)
+
+ # Turn with matching user_id should work
+ turn1 = InteractionTurn(
+ turn_id=uuid4(),
+ user_id=user_id1,
+ session_id=session_id,
+ timestamp=now,
+ transcript="Hello",
+ llm_response="Hi",
+ model_used="groq",
+ latency_ms=100,
+ )
+
+ history = history.add_turn(turn1)
+ assert len(history.turns) == 1
+
+ # Turn with different user_id should raise error
+ turn2 = InteractionTurn(
+ turn_id=uuid4(),
+ user_id=user_id2,
+ session_id=session_id,
+ timestamp=now,
+ transcript="Hello",
+ llm_response="Hi",
+ model_used="groq",
+ latency_ms=100,
+ )
+
+ with pytest.raises(ValueError, match="Cannot add turn for user"):
+ history.add_turn(turn2)
+
+
+def test_conversation_history_is_empty():
+ """Test is_empty() helper method"""
+ user_id = uuid4()
+
+ history1 = ConversationHistory(user_id=user_id)
+ assert history1.is_empty() is True
+
+ session_id = uuid4()
+ now = datetime.now(UTC)
+ turn = InteractionTurn(
+ turn_id=uuid4(),
+ user_id=user_id,
+ session_id=session_id,
+ timestamp=now,
+ transcript="Hello",
+ llm_response="Hi",
+ model_used="groq",
+ latency_ms=100,
+ )
+
+ history2 = history1.add_turn(turn)
+ assert history2.is_empty() is False
+
+
+def test_conversation_history_get_oldest_and_newest_turn():
+ """Test get_oldest_turn() and get_newest_turn() helper methods"""
+ user_id = uuid4()
+ session_id = uuid4()
+ now = datetime.now(UTC)
+
+ history = ConversationHistory(user_id=user_id)
+
+ # Empty history
+ assert history.get_oldest_turn() is None
+ assert history.get_newest_turn() is None
+
+ # Add first turn
+ turn1 = InteractionTurn(
+ turn_id=uuid4(),
+ user_id=user_id,
+ session_id=session_id,
+ timestamp=now,
+ transcript="First",
+ llm_response="Response 1",
+ model_used="groq",
+ latency_ms=100,
+ )
+ history = history.add_turn(turn1)
+
+ assert history.get_newest_turn() == turn1
+ assert history.get_oldest_turn() == turn1
+
+ # Add second turn
+ turn2 = InteractionTurn(
+ turn_id=uuid4(),
+ user_id=user_id,
+ session_id=session_id,
+ timestamp=now,
+ transcript="Second",
+ llm_response="Response 2",
+ model_used="groq",
+ latency_ms=120,
+ )
+ history = history.add_turn(turn2)
+
+ assert history.get_newest_turn() == turn2 # Newest is first
+ assert history.get_oldest_turn() == turn1 # Oldest is last
+
+
+def test_conversation_history_get_turns_count():
+ """Test get_turns_count() helper method"""
+ user_id = uuid4()
+ session_id = uuid4()
+ now = datetime.now(UTC)
+
+ history = ConversationHistory(user_id=user_id)
+ assert history.get_turns_count() == 0
+
+ for i in range(3):
+ turn = InteractionTurn(
+ turn_id=uuid4(),
+ user_id=user_id,
+ session_id=session_id,
+ timestamp=now,
+ transcript=f"Message {i}",
+ llm_response=f"Response {i}",
+ model_used="groq",
+ latency_ms=100,
+ )
+ history = history.add_turn(turn)
+
+ assert history.get_turns_count() == 3
+ assert history.get_turns_count() == len(history.turns)
+
+
+def test_conversation_history_should_trim():
+ """Test should_trim() helper method (defensive check)"""
+ user_id = uuid4()
+ session_id = uuid4()
+ now = datetime.now(UTC)
+
+ history = ConversationHistory(user_id=user_id, max_turns=3)
+
+ # Should not need trimming initially
+ assert history.should_trim() is False
+
+ # Add turns up to max_turns
+ for i in range(3):
+ turn = InteractionTurn(
+ turn_id=uuid4(),
+ user_id=user_id,
+ session_id=session_id,
+ timestamp=now,
+ transcript=f"Message {i}",
+ llm_response=f"Response {i}",
+ model_used="groq",
+ latency_ms=100,
+ )
+ history = history.add_turn(turn)
+
+ # Should not need trimming (add_turn maintains limit)
+ assert history.should_trim() is False
+ assert len(history.turns) == 3
+
+
+def test_conversation_history_get_total_tokens():
+ """Test get_total_tokens() helper method"""
+ user_id = uuid4()
+ session_id = uuid4()
+ now = datetime.now(UTC)
+
+ history = ConversationHistory(user_id=user_id)
+
+ # Empty history
+ assert history.get_total_tokens() == 0
+
+ # Add turns with tokens
+ turn1 = InteractionTurn(
+ turn_id=uuid4(),
+ user_id=user_id,
+ session_id=session_id,
+ timestamp=now,
+ transcript="Hello",
+ llm_response="Hi",
+ model_used="groq",
+ latency_ms=100,
+ tokens_used=50,
+ )
+ history = history.add_turn(turn1)
+
+ turn2 = InteractionTurn(
+ turn_id=uuid4(),
+ user_id=user_id,
+ session_id=session_id,
+ timestamp=now,
+ transcript="How are you?",
+ llm_response="I'm good",
+ model_used="groq",
+ latency_ms=120,
+ tokens_used=75,
+ )
+ history = history.add_turn(turn2)
+
+ assert history.get_total_tokens() == 125
+
+ # Add turn without tokens
+ turn3 = InteractionTurn(
+ turn_id=uuid4(),
+ user_id=user_id,
+ session_id=session_id,
+ timestamp=now,
+ transcript="Bye",
+ llm_response="Goodbye",
+ model_used="groq",
+ latency_ms=100,
+ )
+ history = history.add_turn(turn3)
+
+ assert history.get_total_tokens() == 125 # Should not change
+
+
+def test_conversation_history_get_average_latency():
+ """Test get_average_latency() helper method"""
+ user_id = uuid4()
+ session_id = uuid4()
+ now = datetime.now(UTC)
+
+ history = ConversationHistory(user_id=user_id)
+
+ # Empty history
+ assert history.get_average_latency() is None
+
+ # Single turn
+ turn1 = InteractionTurn(
+ turn_id=uuid4(),
+ user_id=user_id,
+ session_id=session_id,
+ timestamp=now,
+ transcript="Hello",
+ llm_response="Hi",
+ model_used="groq",
+ latency_ms=100,
+ )
+ history = history.add_turn(turn1)
+
+ assert history.get_average_latency() == 100.0
+
+ # Multiple turns
+ turn2 = InteractionTurn(
+ turn_id=uuid4(),
+ user_id=user_id,
+ session_id=session_id,
+ timestamp=now,
+ transcript="How are you?",
+ llm_response="I'm good",
+ model_used="groq",
+ latency_ms=120,
+ )
+ history = history.add_turn(turn2)
+
+ assert history.get_average_latency() == 110.0 # (100 + 120) / 2
+
+ turn3 = InteractionTurn(
+ turn_id=uuid4(),
+ user_id=user_id,
+ session_id=session_id,
+ timestamp=now,
+ transcript="Bye",
+ llm_response="Goodbye",
+ model_used="groq",
+ latency_ms=80,
+ )
+ history = history.add_turn(turn3)
+
+ assert history.get_average_latency() == 100.0 # (100 + 120 + 80) / 3
+
+
+def test_conversation_history_add_turn_updates_last_updated():
+ """Test that add_turn() updates last_updated timestamp"""
+ user_id = uuid4()
+ session_id = uuid4()
+ now = datetime.now(UTC)
+
+ history = ConversationHistory(user_id=user_id)
+ initial_time = history.last_updated
+
+ # Wait a tiny bit to ensure timestamp difference
+ import time
+
+ time.sleep(0.01)
+
+ turn = InteractionTurn(
+ turn_id=uuid4(),
+ user_id=user_id,
+ session_id=session_id,
+ timestamp=now,
+ transcript="Hello",
+ llm_response="Hi",
+ model_used="groq",
+ latency_ms=100,
+ )
+
+ history = history.add_turn(turn)
+
+ # last_updated should be newer
+ assert history.last_updated > initial_time
+
+
+def test_conversation_history_turns_user_id_validation():
+ """Test that ConversationHistory validates all turns belong to same user_id"""
+ user_id1 = uuid4()
+ user_id2 = uuid4()
+ session_id = uuid4()
+ now = datetime.now(UTC)
+
+ # Create turn with different user_id
+ turn = InteractionTurn(
+ turn_id=uuid4(),
+ user_id=user_id2,
+ session_id=session_id,
+ timestamp=now,
+ transcript="Hello",
+ llm_response="Hi",
+ model_used="groq",
+ latency_ms=100,
+ )
+
+ # Should raise error when creating history with mismatched turn
+ with pytest.raises(ValueError, match="belongs to user"):
+ ConversationHistory(
+ user_id=user_id1,
+ turns=[turn],
+ )
+
+
+# ============================================================================
+# Control Message Tests
+# ============================================================================
+
+
+def test_control_message_creation():
+ """Test ControlMessage model creation"""
+ message = ControlMessage(
+ type=ControlMessageType.SESSION_CONTROL, action="start_active_mode"
+ )
+
+ assert message.type == ControlMessageType.SESSION_CONTROL
+ assert message.action == "start_active_mode"
+ assert message.payload == {}
+ assert message.timestamp is not None
+
+
+def test_control_message_with_payload():
+ """Test ControlMessage with payload"""
+ message = ControlMessage(
+ type=ControlMessageType.ERROR,
+ payload={"error_code": 500, "message": "Internal error"},
+ )
+
+ assert message.type == ControlMessageType.ERROR
+ assert message.payload["error_code"] == 500
+ assert message.payload["message"] == "Internal error"
+
+
+def test_control_message_default_timestamp():
+ """Test that ControlMessage has default timestamp."""
+ before = datetime.now(UTC)
+ message = ControlMessage(type=ControlMessageType.HEARTBEAT)
+ after = datetime.now(UTC)
+
+ assert before <= message.timestamp <= after
+
+
+# ============================================================================
+# Binary Frame Tests
+# ============================================================================
+
+
+def test_binary_frame_parse_valid():
+ """Test BinaryFrame.parse() with valid data"""
+ payload = b"test payload"
+ length = len(payload)
+
+ # Create frame: [stream_type, flags, length_high, length_low] + payload
+ frame_data = (
+ bytes(
+ [
+ StreamType.AUDIO.value,
+ 0x00, # flags
+ (length >> 8) & 0xFF, # length high byte
+ length & 0xFF, # length low byte
+ ]
+ )
+ + payload
+ )
+
+ frame = BinaryFrame.parse(frame_data)
+
+ assert frame.stream_type == StreamType.AUDIO
+ assert frame.flags == 0x00
+ assert frame.length == length
+ assert frame.payload == payload
+
+
+def test_binary_frame_parse_too_short():
+ """Test BinaryFrame.parse() with too short data"""
+ with pytest.raises(ValueError, match="Frame too short"):
+ BinaryFrame.parse(b"abc") # Only 3 bytes
+
+
+def test_binary_frame_parse_length_mismatch():
+ """Test BinaryFrame.parse() with payload length mismatch"""
+ # Header says length is 10, but payload is only 5 bytes
+ frame_data = (
+ bytes(
+ [
+ StreamType.VIDEO.value,
+ 0x00,
+ 0x00, # length high = 0
+ 10, # length low = 10
+ ]
+ )
+ + b"short"
+ ) # Only 5 bytes
+
+ with pytest.raises(ValueError, match="Incomplete frame"):
+ BinaryFrame.parse(frame_data)
+
+
+def test_binary_frame_to_bytes():
+ """Test BinaryFrame.to_bytes() serialization"""
+ payload = b"test data"
+ frame = BinaryFrame(
+ stream_type=StreamType.CONTROL,
+ flags=FrameFlags.PRIORITY.value,
+ payload=payload,
+ length=len(payload),
+ )
+
+ serialized = frame.to_bytes()
+
+ # Should have 4-byte header + payload
+ assert len(serialized) == 4 + len(payload)
+ assert serialized[0] == StreamType.CONTROL.value
+ assert serialized[1] == FrameFlags.PRIORITY.value
+ length_from_header = int.from_bytes(serialized[2:4], "big")
+ assert length_from_header == len(payload)
+ assert serialized[4:] == payload
+
+
+def test_binary_frame_round_trip():
+ """Test BinaryFrame parse -> to_bytes round trip"""
+ original_payload = b"round trip test data"
+ original_frame = BinaryFrame(
+ stream_type=StreamType.AUDIO,
+ flags=FrameFlags.END_OF_STREAM.value,
+ payload=original_payload,
+ length=len(original_payload),
+ )
+
+ # Serialize
+ serialized = original_frame.to_bytes()
+
+ # Parse back
+ parsed_frame = BinaryFrame.parse(serialized)
+
+ assert parsed_frame.stream_type == original_frame.stream_type
+ assert parsed_frame.flags == original_frame.flags
+ assert parsed_frame.length == original_frame.length
+ assert parsed_frame.payload == original_frame.payload
+
+
+def test_binary_frame_with_flags():
+ """Test BinaryFrame with multiple flags"""
+ frame = BinaryFrame(
+ stream_type=StreamType.VIDEO,
+ flags=FrameFlags.END_OF_STREAM.value | FrameFlags.PRIORITY.value,
+ payload=b"data",
+ length=4,
+ )
+
+ assert frame.flags & FrameFlags.END_OF_STREAM.value
+ assert frame.flags & FrameFlags.PRIORITY.value
+
+
+# ============================================================================
+# Enhanced ControlMessage Tests
+# ============================================================================
+
+
+def test_control_message_metadata_and_schema_version():
+ """Test metadata and schema_version fields"""
+ message = ControlMessage(
+ type=ControlMessageType.SESSION_CONTROL,
+ action="start_active_mode",
+ metadata={"key": "value"},
+ schema_version="1.1",
+ )
+
+ assert message.metadata == {"key": "value"}
+ assert message.schema_version == "1.1"
+
+ # Default values
+ message2 = ControlMessage(
+ type=ControlMessageType.HEARTBEAT,
+ )
+
+ assert message2.metadata == {}
+ assert message2.schema_version == "1.0"
+
+
+def test_control_message_action_validation_session_control():
+ """Test action validation for SESSION_CONTROL messages"""
+ # Valid actions
+ valid_actions = ["start_active_mode", "start_passive_mode", "end_session"]
+ for action in valid_actions:
+ message = ControlMessage(type=ControlMessageType.SESSION_CONTROL, action=action)
+ assert message.action == action
+
+ # Missing action should raise error
+ with pytest.raises(ValueError, match="action is required for SESSION_CONTROL"):
+ ControlMessage(type=ControlMessageType.SESSION_CONTROL)
+
+ # Invalid action should raise error
+ with pytest.raises(ValueError, match="Invalid action"):
+ ControlMessage(type=ControlMessageType.SESSION_CONTROL, action="invalid_action")
+
+
+def test_control_message_action_validation_heartbeat():
+ """Test action validation for HEARTBEAT messages"""
+ # HEARTBEAT should not have action
+ message = ControlMessage(type=ControlMessageType.HEARTBEAT)
+ assert message.action is None
+
+ # HEARTBEAT with action should raise error
+ with pytest.raises(ValueError, match="action must be None for HEARTBEAT"):
+ ControlMessage(type=ControlMessageType.HEARTBEAT, action="some_action")
+
+
+def test_control_message_action_validation_error_ack():
+ """Test that ERROR and ACK can have optional actions"""
+ # ERROR can have optional action
+ message1 = ControlMessage(type=ControlMessageType.ERROR, action="retry")
+ assert message1.action == "retry"
+
+ message2 = ControlMessage(type=ControlMessageType.ERROR)
+ assert message2.action is None
+
+ # ACK can have optional action
+ message3 = ControlMessage(type=ControlMessageType.ACK, action="received")
+ assert message3.action == "received"
+
+ message4 = ControlMessage(type=ControlMessageType.ACK)
+ assert message4.action is None
+
+
+def test_control_message_utc_validation():
+ """Test UTC validation for timestamp field"""
+ now_naive = datetime.now()
+
+ # Naive datetime should be converted to UTC
+ message = ControlMessage(type=ControlMessageType.HEARTBEAT, timestamp=now_naive)
+
+ assert message.timestamp.tzinfo is not None
+ assert message.timestamp.tzinfo == UTC
+
+
+def test_control_message_is_session_control():
+ """Test is_session_control() helper method"""
+ message1 = ControlMessage(
+ type=ControlMessageType.SESSION_CONTROL, action="start_active_mode"
+ )
+ assert message1.is_session_control() is True
+
+ message2 = ControlMessage(type=ControlMessageType.HEARTBEAT)
+ assert message2.is_session_control() is False
+
+
+def test_control_message_is_error():
+ """Test is_error() helper method"""
+ message1 = ControlMessage(type=ControlMessageType.ERROR)
+ assert message1.is_error() is True
+
+ message2 = ControlMessage(type=ControlMessageType.HEARTBEAT)
+ assert message2.is_error() is False
+
+
+def test_control_message_is_heartbeat():
+ """Test is_heartbeat() helper method"""
+ message1 = ControlMessage(type=ControlMessageType.HEARTBEAT)
+ assert message1.is_heartbeat() is True
+
+ message2 = ControlMessage(
+ type=ControlMessageType.SESSION_CONTROL, action="start_active_mode"
+ )
+ assert message2.is_heartbeat() is False
+
+
+def test_control_message_is_ack():
+ """Test is_ack() helper method"""
+ message1 = ControlMessage(type=ControlMessageType.ACK)
+ assert message1.is_ack() is True
+
+ message2 = ControlMessage(type=ControlMessageType.HEARTBEAT)
+ assert message2.is_ack() is False
+
+
+def test_control_message_get_action_type():
+ """Test get_action_type() helper method"""
+ message1 = ControlMessage(
+ type=ControlMessageType.SESSION_CONTROL, action="start_active_mode"
+ )
+ assert message1.get_action_type() == "start_active_mode"
+
+ message2 = ControlMessage(type=ControlMessageType.HEARTBEAT)
+ assert message2.get_action_type() is None
+
+
+def test_control_message_has_payload():
+ """Test has_payload() helper method"""
+ message1 = ControlMessage(type=ControlMessageType.ERROR, payload={"error": "test"})
+ assert message1.has_payload() is True
+
+ message2 = ControlMessage(type=ControlMessageType.HEARTBEAT)
+ assert message2.has_payload() is False
+
+
+# ============================================================================
+# Enhanced BinaryFrame Tests
+# ============================================================================
+
+
+def test_binary_frame_metadata_and_schema_version():
+ """Test metadata and schema_version fields"""
+ frame = BinaryFrame(
+ stream_type=StreamType.AUDIO,
+ flags=0,
+ payload=b"test",
+ length=4,
+ metadata={"key": "value"},
+ schema_version="1.1",
+ )
+
+ assert frame.metadata == {"key": "value"}
+ assert frame.schema_version == "1.1"
+
+ # Default values
+ frame2 = BinaryFrame(
+ stream_type=StreamType.VIDEO, flags=0, payload=b"data", length=4
+ )
+
+ assert frame2.metadata == {}
+ assert frame2.schema_version == "1.0"
+
+
+def test_binary_frame_flags_validation():
+ """Test flags validation"""
+ # Valid flags (0-255)
+ frame1 = BinaryFrame(
+ stream_type=StreamType.AUDIO, flags=0, payload=b"test", length=4
+ )
+ assert frame1.flags == 0
+
+ frame2 = BinaryFrame(
+ stream_type=StreamType.AUDIO, flags=255, payload=b"test", length=4
+ )
+ assert frame2.flags == 255
+
+ # Invalid flags (negative)
+ with pytest.raises(ValueError, match="flags must be between 0 and 255"):
+ BinaryFrame(stream_type=StreamType.AUDIO, flags=-1, payload=b"test", length=4)
+
+ # Invalid flags (too large)
+ with pytest.raises(ValueError, match="flags must be between 0 and 255"):
+ BinaryFrame(stream_type=StreamType.AUDIO, flags=256, payload=b"test", length=4)
+
+
+def test_binary_frame_length_validation():
+ """Test length validation"""
+ # Valid length (0-65535)
+ frame1 = BinaryFrame(stream_type=StreamType.AUDIO, flags=0, payload=b"", length=0)
+ assert frame1.length == 0
+
+ frame2 = BinaryFrame(
+ stream_type=StreamType.AUDIO,
+ flags=0,
+ payload=b"x" * 65535,
+ length=65535,
+ )
+ assert frame2.length == 65535
+
+ # Invalid length (negative)
+ with pytest.raises(ValueError, match="length must be between 0 and 65535"):
+ BinaryFrame(stream_type=StreamType.AUDIO, flags=0, payload=b"test", length=-1)
+
+ # Invalid length (too large)
+ with pytest.raises(ValueError, match="length must be between 0 and 65535"):
+ BinaryFrame(
+ stream_type=StreamType.AUDIO, flags=0, payload=b"test", length=65536
+ )
+
+
+def test_binary_frame_payload_integrity_validation():
+ """Test payload integrity validation"""
+ # Valid: length matches payload
+ frame1 = BinaryFrame(
+ stream_type=StreamType.AUDIO, flags=0, payload=b"test", length=4
+ )
+ assert frame1.length == len(frame1.payload)
+
+ # Invalid: length mismatch
+ with pytest.raises(ValueError, match="Payload length mismatch"):
+ BinaryFrame(stream_type=StreamType.AUDIO, flags=0, payload=b"test", length=5)
+
+ # Invalid: length too large (validated before payload integrity check)
+ with pytest.raises(ValueError, match="length must be between 0 and 65535"):
+ BinaryFrame(
+ stream_type=StreamType.AUDIO,
+ flags=0,
+ payload=b"x" * 65535,
+ length=65536,
+ )
+
+
+def test_binary_frame_has_flag():
+ """Test has_flag() helper method"""
+ frame = BinaryFrame(
+ stream_type=StreamType.AUDIO,
+ flags=FrameFlags.END_OF_STREAM.value | FrameFlags.PRIORITY.value,
+ payload=b"test",
+ length=4,
+ )
+
+ assert frame.has_flag(FrameFlags.END_OF_STREAM) is True
+ assert frame.has_flag(FrameFlags.PRIORITY) is True
+ assert frame.has_flag(FrameFlags.ERROR) is False
+
+
+def test_binary_frame_is_control():
+ """Test is_control() helper method"""
+ frame1 = BinaryFrame(
+ stream_type=StreamType.CONTROL, flags=0, payload=b"test", length=4
+ )
+ assert frame1.is_control() is True
+
+ frame2 = BinaryFrame(
+ stream_type=StreamType.AUDIO, flags=0, payload=b"test", length=4
+ )
+ assert frame2.is_control() is False
+
+
+def test_binary_frame_is_audio():
+ """Test is_audio() helper method"""
+ frame1 = BinaryFrame(
+ stream_type=StreamType.AUDIO, flags=0, payload=b"test", length=4
+ )
+ assert frame1.is_audio() is True
+
+ frame2 = BinaryFrame(
+ stream_type=StreamType.VIDEO, flags=0, payload=b"test", length=4
+ )
+ assert frame2.is_audio() is False
+
+
+def test_binary_frame_is_video():
+ """Test is_video() helper method"""
+ frame1 = BinaryFrame(
+ stream_type=StreamType.VIDEO, flags=0, payload=b"test", length=4
+ )
+ assert frame1.is_video() is True
+
+ frame2 = BinaryFrame(
+ stream_type=StreamType.AUDIO, flags=0, payload=b"test", length=4
+ )
+ assert frame2.is_video() is False
+
+
+def test_binary_frame_is_end_of_stream():
+ """Test is_end_of_stream() helper method"""
+ frame1 = BinaryFrame(
+ stream_type=StreamType.AUDIO,
+ flags=FrameFlags.END_OF_STREAM.value,
+ payload=b"test",
+ length=4,
+ )
+ assert frame1.is_end_of_stream() is True
+
+ frame2 = BinaryFrame(
+ stream_type=StreamType.AUDIO, flags=0, payload=b"test", length=4
+ )
+ assert frame2.is_end_of_stream() is False
+
+
+def test_binary_frame_is_priority():
+ """Test is_priority() helper method"""
+ frame1 = BinaryFrame(
+ stream_type=StreamType.AUDIO,
+ flags=FrameFlags.PRIORITY.value,
+ payload=b"test",
+ length=4,
+ )
+ assert frame1.is_priority() is True
+
+ frame2 = BinaryFrame(
+ stream_type=StreamType.AUDIO, flags=0, payload=b"test", length=4
+ )
+ assert frame2.is_priority() is False
+
+
+def test_binary_frame_has_error():
+ """Test has_error() helper method"""
+ frame1 = BinaryFrame(
+ stream_type=StreamType.AUDIO,
+ flags=FrameFlags.ERROR.value,
+ payload=b"test",
+ length=4,
+ )
+ assert frame1.has_error() is True
+
+ frame2 = BinaryFrame(
+ stream_type=StreamType.AUDIO, flags=0, payload=b"test", length=4
+ )
+ assert frame2.has_error() is False
+
+
+def test_binary_frame_get_total_size():
+ """Test get_total_size() helper method"""
+ payload = b"test data"
+ frame = BinaryFrame(
+ stream_type=StreamType.AUDIO, flags=0, payload=payload, length=len(payload)
+ )
+
+ assert frame.get_total_size() == 4 + len(payload)
+ assert frame.get_total_size() == 4 + 9 # 4-byte header + 9-byte payload
+
+
+def test_binary_frame_validate_integrity():
+ """Test validate_integrity() helper method"""
+ frame = BinaryFrame(
+ stream_type=StreamType.AUDIO, flags=0, payload=b"test", length=4
+ )
+
+ # Should pass validation
+ assert frame.validate_integrity() is True
+
+ # Create frame with mismatch (should fail validation)
+ # Note: This is tricky because Pydantic validates on creation
+ # We'll test the method on a valid frame
+
+
+def test_binary_frame_parse_max_payload_size():
+ """Test BinaryFrame.parse() with maximum payload size"""
+ max_payload = b"x" * 65535
+ length = len(max_payload)
+
+ frame_data = (
+ bytes(
+ [
+ StreamType.AUDIO.value,
+ 0x00,
+ (length >> 8) & 0xFF,
+ length & 0xFF,
+ ]
+ )
+ + max_payload
+ )
+
+ frame = BinaryFrame.parse(frame_data)
+ assert frame.length == 65535
+ assert len(frame.payload) == 65535
+
+
+def test_binary_frame_parse_invalid_stream_type():
+ """Test BinaryFrame.parse() with invalid stream type"""
+ frame_data = bytes([0xFF, 0x00, 0x00, 0x04]) + b"test"
+
+ with pytest.raises(ValueError, match="Invalid stream type"):
+ BinaryFrame.parse(frame_data)
+
+
+def test_binary_frame_parse_payload_too_large():
+ """Test BinaryFrame.parse() with payload exceeding max size"""
+ # Header says length is 65535 (max), verify it parses correctly
+ max_payload = b"x" * 65535
+ frame_data = bytes([StreamType.AUDIO.value, 0x00, 0xFF, 0xFF]) + max_payload
+
+ # Should parse successfully (max size is valid)
+ frame = BinaryFrame.parse(frame_data)
+ assert frame.length == 65535
+ assert len(frame.payload) == 65535
+
+ # Test that creating a frame with payload that's too large fails
+ # The payload integrity validator will catch this
+ with pytest.raises(ValueError, match="Payload length mismatch"):
+ # Create frame with payload larger than max (but length is still valid)
+ # This will fail in the model validator due to payload size check
+ BinaryFrame(
+ stream_type=StreamType.AUDIO,
+ flags=0,
+ payload=b"x" * 65536,
+ length=65535, # Length says 65535, but payload is 65536
+ )
+
+
+def test_binary_frame_to_bytes_validation():
+ """Test that to_bytes() validates before serialization"""
+ # Valid frame should serialize
+ frame1 = BinaryFrame(
+ stream_type=StreamType.AUDIO, flags=0, payload=b"test", length=4
+ )
+ serialized = frame1.to_bytes()
+ assert len(serialized) == 8 # 4-byte header + 4-byte payload
+
+ # Frame with mismatch should fail (but Pydantic prevents creation)
+ # This is tested via the model validator
+
+
+def test_binary_frame_edge_cases():
+ """Test edge cases: empty payload, multiple flags"""
+ # Empty payload
+ frame1 = BinaryFrame(stream_type=StreamType.AUDIO, flags=0, payload=b"", length=0)
+ assert frame1.length == 0
+ assert len(frame1.payload) == 0
+ assert frame1.get_total_size() == 4
+
+ # Multiple flags
+ flags = (
+ FrameFlags.END_OF_STREAM.value
+ | FrameFlags.PRIORITY.value
+ | FrameFlags.ERROR.value
+ )
+ frame2 = BinaryFrame(
+ stream_type=StreamType.VIDEO, flags=flags, payload=b"data", length=4
+ )
+
+ assert frame2.has_flag(FrameFlags.END_OF_STREAM) is True
+ assert frame2.has_flag(FrameFlags.PRIORITY) is True
+ assert frame2.has_flag(FrameFlags.ERROR) is True
+
+
+# ============================================================================
+# Default Factory Tests
+# ============================================================================
+
+
+def test_default_factories_dont_share_state():
+ """Test that default factories don't share mutable state"""
+ # Create two SessionState instances
+ session_id1 = uuid4()
+ session_id2 = uuid4()
+ user_id = uuid4()
+ now = datetime.now(UTC)
+
+ session1 = SessionState(
+ session_id=session_id1,
+ user_id=user_id,
+ mode=SessionMode.ACTIVE,
+ created_at=now,
+ last_activity=now,
+ )
+
+ session2 = SessionState(
+ session_id=session_id2,
+ user_id=user_id,
+ mode=SessionMode.ACTIVE,
+ created_at=now,
+ last_activity=now,
+ )
+
+ # Modify preferences in one
+ # Since models are frozen, we need to create new instances
+ # But the test is that default_factory creates new dicts
+ assert session1.preferences is not session2.preferences
+
+ # Same for ConversationHistory
+ user_id = uuid4()
+ history1 = ConversationHistory(user_id=user_id)
+ history2 = ConversationHistory(user_id=user_id)
+
+ assert history1.turns is not history2.turns
+
+
+# ============================================================================
+# JSON Serialization Tests
+# ============================================================================
+
+
+def test_uuid_json_serialization():
+ """Test that UUIDs are properly serialized in JSON"""
+ user_id = uuid4()
+ now = datetime.now(UTC)
+
+ user = User(
+ user_id=user_id,
+ email="test@example.com",
+ oauth_provider=OAuthProvider.GOOGLE,
+ created_at=now,
+ updated_at=now,
+ )
+
+ json_str = user.model_dump_json()
+ assert str(user_id) in json_str
+
+
+def test_datetime_json_serialization():
+ """Test that datetimes are properly serialized in JSON"""
+ now = datetime.now(UTC)
+ user_id = uuid4()
+
+ user = User(
+ user_id=user_id,
+ email="test@example.com",
+ oauth_provider=OAuthProvider.GOOGLE,
+ created_at=now,
+ updated_at=now,
+ )
+
+ json_data = user.model_dump()
+ # Datetime should be serialized as ISO format string
+ assert isinstance(json_data["created_at"], str) or isinstance(
+ json_data["created_at"], datetime
+ )
diff --git a/tests/core/test_telemetry.py b/tests/core/test_telemetry.py
new file mode 100644
index 0000000..75cacc0
--- /dev/null
+++ b/tests/core/test_telemetry.py
@@ -0,0 +1,281 @@
+"""Unit tests for core telemetry module."""
+
+import pytest
+
+from core.telemetry import Metrics, TelemetryManager
+
+
+@pytest.fixture(autouse=True)
+def cleanup_telemetry():
+ """Ensure telemetry is cleaned up after each test."""
+ yield
+ # Cleanup: shutdown any global telemetry state
+ # This prevents metrics from trying to export after tests
+ try:
+ from opentelemetry import metrics, trace
+
+ # Shutdown any existing providers
+ if hasattr(metrics, "_METER_PROVIDER"):
+ provider = metrics.get_meter_provider()
+ if hasattr(provider, "shutdown"):
+ provider.shutdown()
+
+ if hasattr(trace, "_TRACER_PROVIDER"):
+ provider = trace.get_tracer_provider()
+ if hasattr(provider, "shutdown"):
+ provider.shutdown()
+ except Exception:
+ pass # Ignore errors during cleanup
+
+
+def test_telemetry_manager_init():
+ """Test TelemetryManager initialization."""
+ manager = TelemetryManager(
+ service_name="test-service",
+ otlp_endpoint="http://localhost:4317",
+ environment="test",
+ enable_tracing=False, # Disable to prevent connection attempts
+ enable_metrics=False, # Disable to prevent connection attempts
+ )
+
+ assert manager.service_name == "test-service"
+ assert manager.otlp_endpoint == "http://localhost:4317"
+ assert manager.environment == "test"
+ assert manager.enable_tracing is False
+ assert manager.enable_metrics is False
+
+ manager.shutdown() # Cleanup
+
+
+def test_telemetry_manager_init_disabled():
+ """Test TelemetryManager with tracing/metrics disabled."""
+ manager = TelemetryManager(
+ service_name="test-service",
+ otlp_endpoint="http://localhost:4317",
+ enable_tracing=False,
+ enable_metrics=False,
+ )
+
+ assert manager.enable_tracing is False
+ assert manager.enable_metrics is False
+
+ manager.shutdown() # Cleanup
+
+
+def test_get_tracer():
+ """Test getting tracer."""
+ manager = TelemetryManager(
+ service_name="test-service",
+ otlp_endpoint="http://localhost:4317",
+ enable_tracing=True, # Enable for this test
+ enable_metrics=False, # Disable metrics
+ )
+
+ tracer = manager.get_tracer()
+ assert tracer is not None
+
+ # With custom name
+ custom_tracer = manager.get_tracer("custom-name")
+ assert custom_tracer is not None
+
+ manager.shutdown() # Cleanup
+
+
+def test_get_tracer_disabled():
+ """Test getting tracer when tracing is disabled."""
+ manager = TelemetryManager(
+ service_name="test-service",
+ otlp_endpoint="http://localhost:4317",
+ enable_tracing=False,
+ enable_metrics=False,
+ )
+
+ tracer = manager.get_tracer()
+ # Should return no-op tracer
+ assert tracer is not None
+
+ manager.shutdown() # Cleanup
+
+
+def test_get_meter():
+ """Test getting meter."""
+ manager = TelemetryManager(
+ service_name="test-service",
+ otlp_endpoint="http://localhost:4317",
+ enable_tracing=False, # Disable tracing
+ enable_metrics=True, # Enable for this test
+ )
+
+ meter = manager.get_meter()
+ assert meter is not None
+
+ # With custom name
+ custom_meter = manager.get_meter("custom-name")
+ assert custom_meter is not None
+
+ manager.shutdown() # Cleanup
+
+
+def test_get_meter_disabled():
+ """Test getting meter when metrics is disabled."""
+ manager = TelemetryManager(
+ service_name="test-service",
+ otlp_endpoint="http://localhost:4317",
+ enable_tracing=False,
+ enable_metrics=False,
+ )
+
+ meter = manager.get_meter()
+ # Should return no-op meter
+ assert meter is not None
+
+ manager.shutdown() # Cleanup
+
+
+def test_create_span():
+ """Test creating span."""
+ manager = TelemetryManager(
+ service_name="test-service",
+ otlp_endpoint="http://localhost:4317",
+ enable_tracing=True, # Enable for this test
+ enable_metrics=False, # Disable metrics
+ )
+
+ span = manager.create_span("test-span")
+ assert span is not None
+
+ # With attributes
+ span_with_attrs = manager.create_span(
+ "test-span", attributes={"key": "value", "number": 123}
+ )
+ assert span_with_attrs is not None
+
+ manager.shutdown() # Cleanup
+
+
+def test_create_span_with_tracer_name():
+ """Test creating span with custom tracer name."""
+ manager = TelemetryManager(
+ service_name="test-service",
+ otlp_endpoint="http://localhost:4317",
+ enable_tracing=True, # Enable for this test
+ enable_metrics=False, # Disable metrics
+ )
+
+ span = manager.create_span("test-span", tracer_name="custom-tracer")
+ assert span is not None
+
+ manager.shutdown() # Cleanup
+
+
+def test_record_metric_histogram():
+ """Test recording histogram metric."""
+ manager = TelemetryManager(
+ service_name="test-service",
+ otlp_endpoint="http://localhost:4317",
+ enable_tracing=False, # Disable tracing
+ enable_metrics=True, # Enable for this test
+ )
+
+ # Should not raise
+ manager.record_metric("test_metric", 1.5, metric_type="histogram")
+ manager.record_metric(
+ "test_metric", 2.0, tags={"label": "value"}, metric_type="histogram"
+ )
+
+ manager.shutdown() # Cleanup - this stops metric export
+
+
+def test_record_metric_counter():
+ """Test recording counter metric."""
+ manager = TelemetryManager(
+ service_name="test-service",
+ otlp_endpoint="http://localhost:4317",
+ enable_tracing=False, # Disable tracing
+ enable_metrics=True, # Enable for this test
+ )
+
+ # Should not raise
+ manager.record_metric("test_counter", 1, metric_type="counter")
+ manager.record_metric(
+ "test_counter", 2, tags={"label": "value"}, metric_type="counter"
+ )
+
+ manager.shutdown() # Cleanup - this stops metric export
+
+
+def test_record_metric_gauge():
+ """Test recording gauge metric."""
+ manager = TelemetryManager(
+ service_name="test-service",
+ otlp_endpoint="http://localhost:4317",
+ enable_tracing=False, # Disable tracing
+ enable_metrics=True, # Enable for this test
+ )
+
+ # Should not raise
+ manager.record_metric("test_gauge", 10, metric_type="gauge")
+ manager.record_metric(
+ "test_gauge", 20, tags={"label": "value"}, metric_type="gauge"
+ )
+
+ manager.shutdown() # Cleanup - this stops metric export
+
+
+def test_record_metric_disabled():
+ """Test recording metric when metrics is disabled."""
+ manager = TelemetryManager(
+ service_name="test-service",
+ otlp_endpoint="http://localhost:4317",
+ enable_tracing=False,
+ enable_metrics=False,
+ )
+
+ # Should not raise (no-op)
+ manager.record_metric("test_metric", 1.0)
+
+ manager.shutdown() # Cleanup
+
+
+def test_record_metric_invalid_type():
+ """Test recording metric with invalid type."""
+ manager = TelemetryManager(
+ service_name="test-service",
+ otlp_endpoint="http://localhost:4317",
+ enable_tracing=False, # Disable tracing
+ enable_metrics=True, # Enable for this test
+ )
+
+ # Should not raise (logs warning)
+ manager.record_metric("test_metric", 1.0, metric_type="invalid_type")
+
+ manager.shutdown() # Cleanup
+
+
+def test_shutdown():
+ """Test telemetry shutdown."""
+ manager = TelemetryManager(
+ service_name="test-service",
+ otlp_endpoint="http://localhost:4317",
+ enable_tracing=False, # Disable to prevent connection attempts
+ enable_metrics=False, # Disable to prevent connection attempts
+ )
+
+ # Should not raise
+ manager.shutdown()
+
+
+def test_metrics_constants():
+ """Test Metrics constants."""
+ assert Metrics.REQUEST_DURATION == "nerospatial_request_duration_seconds"
+ assert Metrics.REQUESTS_TOTAL == "nerospatial_requests_total"
+ assert Metrics.WEBSOCKET_CONNECTIONS == "nerospatial_websocket_connections"
+ assert Metrics.LLM_TTFT == "nerospatial_llm_ttft_seconds"
+ assert Metrics.LLM_ERRORS == "nerospatial_llm_errors_total"
+ assert Metrics.LLM_TOKENS == "nerospatial_llm_tokens_total"
+ assert Metrics.VLM_INFERENCE == "nerospatial_vlm_inference_seconds"
+ assert Metrics.VLM_QUEUE_DEPTH == "nerospatial_vlm_queue_depth"
+ assert Metrics.DB_QUERY_DURATION == "nerospatial_db_query_duration_seconds"
+ assert Metrics.DB_CONNECTIONS == "nerospatial_db_connections"
+ assert Metrics.AUTH_LOGIN_TOTAL == "nerospatial_auth_login_total"
+ assert Metrics.AUTH_TOKEN_VALIDATION == "nerospatial_auth_token_validation_total"
diff --git a/tests/test_health_endpoints.py b/tests/test_health_endpoints.py
new file mode 100644
index 0000000..3567f0a
--- /dev/null
+++ b/tests/test_health_endpoints.py
@@ -0,0 +1,32 @@
+"""Tests for health endpoints."""
+
+import pytest
+
+
+class TestHealthEndpoints:
+ """Test health check endpoints."""
+
+ @pytest.mark.asyncio
+ async def test_liveness_always_returns_200(self, client):
+ """Liveness should always return 200 if process is alive."""
+ response = await client.get("/live")
+ assert response.status_code == 200
+ assert response.json()["status"] == "alive"
+
+ @pytest.mark.asyncio
+ async def test_health_endpoint_exists(self, client):
+ """Health endpoint should exist and return proper structure."""
+ response = await client.get("/health")
+ assert response.status_code in (200, 503)
+ data = response.json()
+ assert "status" in data
+ assert "checks" in data
+ assert "metadata" in data
+
+ @pytest.mark.asyncio
+ async def test_ready_endpoint_exists(self, client):
+ """Ready endpoint should exist and return proper structure."""
+ response = await client.get("/ready")
+ assert response.status_code in (200, 503)
+ data = response.json()
+ assert "status" in data
diff --git a/tests/test_main.py b/tests/test_main.py
index 26566d7..9742405 100644
--- a/tests/test_main.py
+++ b/tests/test_main.py
@@ -11,8 +11,10 @@ async def test_health_check(client):
assert response.status_code == 200
data = response.json()
assert data["status"] == "healthy"
- assert "service" in data
- assert "version" in data
+ assert "metadata" in data
+ assert "service" in data["metadata"]
+ assert "version" in data["metadata"]
+ assert "checks" in data
@pytest.mark.asyncio
diff --git a/tests/test_models.py b/tests/test_models.py
deleted file mode 100644
index a4739e8..0000000
--- a/tests/test_models.py
+++ /dev/null
@@ -1,600 +0,0 @@
-"""Unit tests for core models."""
-
-from datetime import UTC, datetime, timedelta
-from uuid import uuid4
-
-import pytest
-
-from core.models import (
- BinaryFrame,
- ControlMessage,
- ControlMessageType,
- ConversationHistory,
- FrameFlags,
- InteractionTurn,
- OAuthTokens,
- SessionMode,
- SessionState,
- StreamType,
- TokenBlacklistEntry,
- User,
- UserContext,
- UserStatus,
-)
-
-# ============================================================================
-# Enum Tests
-# ============================================================================
-
-
-def test_session_mode_enum():
- """Test SessionMode enum values"""
- assert SessionMode.ACTIVE == "active"
- assert SessionMode.PASSIVE == "passive"
-
-
-def test_control_message_type_enum():
- """Test ControlMessageType enum values"""
- assert ControlMessageType.SESSION_CONTROL == "session_control"
- assert ControlMessageType.ERROR == "error"
- assert ControlMessageType.ACK == "ack"
- assert ControlMessageType.HEARTBEAT == "heartbeat"
-
-
-def test_stream_type_enum():
- """Test StreamType enum values"""
- assert StreamType.AUDIO == 0x01
- assert StreamType.VIDEO == 0x02
- assert StreamType.CONTROL == 0x03
-
-
-def test_frame_flags_enum():
- """Test FrameFlags enum values"""
- assert FrameFlags.END_OF_STREAM == 0x01
- assert FrameFlags.PRIORITY == 0x02
- assert FrameFlags.ERROR == 0x04
-
-
-def test_user_status_enum():
- """Test UserStatus enum values"""
- assert UserStatus.ACTIVE == "active"
- assert UserStatus.BLACKLISTED == "blacklisted"
- assert UserStatus.SUSPENDED == "suspended"
-
-
-# ============================================================================
-# Session Model Tests
-# ============================================================================
-
-
-def test_session_state_creation():
- """Test SessionState model creation"""
- session_id = uuid4()
- user_id = uuid4()
- now = datetime.utcnow()
-
- session = SessionState(
- session_id=session_id,
- user_id=user_id,
- mode=SessionMode.ACTIVE,
- created_at=now,
- last_activity=now,
- enable_vision=True,
- )
-
- assert session.session_id == session_id
- assert session.user_id == user_id
- assert session.mode == SessionMode.ACTIVE
- assert session.enable_vision is True
- assert session.voice_id is None
- assert session.preferences == {}
-
-
-def test_session_state_immutability():
- """Test that SessionState is immutable"""
- session_id = uuid4()
- user_id = uuid4()
- now = datetime.utcnow()
-
- session = SessionState(
- session_id=session_id,
- user_id=user_id,
- mode=SessionMode.ACTIVE,
- created_at=now,
- last_activity=now,
- )
-
- with pytest.raises(Exception): # Pydantic validation error
- session.mode = SessionMode.PASSIVE
-
-
-def test_session_state_json_serialization():
- """Test SessionState JSON serialization"""
- session_id = uuid4()
- user_id = uuid4()
- now = datetime.utcnow()
-
- session = SessionState(
- session_id=session_id,
- user_id=user_id,
- mode=SessionMode.ACTIVE,
- created_at=now,
- last_activity=now,
- )
-
- # Test that UUIDs are serialized as strings
- json_data = session.model_dump_json()
- assert str(session_id) in json_data
- assert str(user_id) in json_data
-
-
-# ============================================================================
-# User Model Tests
-# ============================================================================
-
-
-def test_user_context_creation():
- """Test UserContext model creation"""
- user_id = uuid4()
- now = datetime.utcnow()
-
- context = UserContext(
- user_id=user_id, email="test@example.com", created_at=now, name="Test User"
- )
-
- assert context.user_id == user_id
- assert context.email == "test@example.com"
- assert context.name == "Test User"
- assert context.oauth_provider == "google" # Default value
-
-
-def test_user_context_immutability():
- """Test that UserContext is immutable"""
- user_id = uuid4()
- now = datetime.utcnow()
-
- context = UserContext(user_id=user_id, email="test@example.com", created_at=now)
-
- with pytest.raises(Exception):
- context.email = "new@example.com"
-
-
-def test_oauth_tokens_creation():
- """Test OAuthTokens model creation"""
- now = datetime.utcnow()
- expires_at = now + timedelta(hours=1)
-
- tokens = OAuthTokens(
- access_token="access_token_123",
- refresh_token="refresh_token_456",
- id_token="id_token_789",
- expires_at=expires_at,
- )
-
- assert tokens.access_token == "access_token_123"
- assert tokens.refresh_token == "refresh_token_456"
- assert tokens.id_token == "id_token_789"
- assert tokens.token_type == "Bearer" # Default value
- assert tokens.expires_at == expires_at
-
-
-def test_token_blacklist_entry_creation():
- """Test TokenBlacklistEntry model creation"""
- user_id = uuid4()
- now = datetime.utcnow()
- expires_at = now + timedelta(hours=1)
-
- entry = TokenBlacklistEntry(
- token_id="jti_123", user_id=user_id, revoked_at=now, expires_at=expires_at
- )
-
- assert entry.token_id == "jti_123"
- assert entry.user_id == user_id
- assert entry.revoked_at == now
- assert entry.expires_at == expires_at
-
-
-def test_user_creation():
- """Test User model creation"""
- user_id = uuid4()
- now = datetime.utcnow()
-
- user = User(
- user_id=user_id,
- email="test@example.com",
- name="Test User",
- created_at=now,
- updated_at=now,
- )
-
- assert user.user_id == user_id
- assert user.email == "test@example.com"
- assert user.name == "Test User"
- assert user.oauth_provider == "google" # Default value
- assert user.status == UserStatus.ACTIVE # Default value
- assert user.oauth_tokens is None
-
-
-def test_user_with_oauth_tokens():
- """Test User model with OAuth tokens"""
- user_id = uuid4()
- now = datetime.utcnow()
- expires_at = now + timedelta(hours=1)
-
- tokens = OAuthTokens(
- access_token="access_token",
- refresh_token="refresh_token",
- expires_at=expires_at,
- )
-
- user = User(
- user_id=user_id,
- email="test@example.com",
- created_at=now,
- updated_at=now,
- oauth_tokens=tokens,
- )
-
- assert user.oauth_tokens is not None
- assert user.oauth_tokens.access_token == "access_token"
-
-
-def test_user_blacklisted_status():
- """Test User with blacklisted status"""
- user_id = uuid4()
- now = datetime.utcnow()
-
- user = User(
- user_id=user_id,
- email="test@example.com",
- created_at=now,
- updated_at=now,
- status=UserStatus.BLACKLISTED,
- )
-
- assert user.status == UserStatus.BLACKLISTED
-
-
-# ============================================================================
-# Interaction Model Tests
-# ============================================================================
-
-
-def test_interaction_turn_creation():
- """Test InteractionTurn model creation"""
- turn_id = uuid4()
- session_id = uuid4()
- now = datetime.utcnow()
-
- turn = InteractionTurn(
- turn_id=turn_id,
- session_id=session_id,
- timestamp=now,
- transcript="Hello",
- llm_response="Hi there!",
- model_used="groq",
- latency_ms=150,
- )
-
- assert turn.turn_id == turn_id
- assert turn.session_id == session_id
- assert turn.transcript == "Hello"
- assert turn.llm_response == "Hi there!"
- assert turn.model_used == "groq"
- assert turn.latency_ms == 150
- assert turn.scene_description is None
-
-
-def test_conversation_history_creation():
- """Test ConversationHistory model creation"""
- user_id = uuid4()
-
- history = ConversationHistory(user_id=user_id)
-
- assert history.user_id == user_id
- assert history.turns == []
- assert history.max_turns == 10
-
-
-def test_conversation_history_add_turn():
- """Test ConversationHistory.add_turn() method"""
- user_id = uuid4()
- session_id = uuid4()
- now = datetime.utcnow()
-
- history = ConversationHistory(user_id=user_id, max_turns=3)
-
- # Add first turn
- turn1 = InteractionTurn(
- turn_id=uuid4(),
- session_id=session_id,
- timestamp=now,
- transcript="Hello",
- llm_response="Hi",
- model_used="groq",
- latency_ms=100,
- )
- history = history.add_turn(turn1)
- assert len(history.turns) == 1
-
- # Add second turn
- turn2 = InteractionTurn(
- turn_id=uuid4(),
- session_id=session_id,
- timestamp=now,
- transcript="How are you?",
- llm_response="I'm good",
- model_used="groq",
- latency_ms=120,
- )
- history = history.add_turn(turn2)
- assert len(history.turns) == 2
- assert history.turns[0] == turn2 # Newest first
-
- # Add more turns to test max_turns limit
- for i in range(5):
- turn = InteractionTurn(
- turn_id=uuid4(),
- session_id=session_id,
- timestamp=now,
- transcript=f"Message {i}",
- llm_response=f"Response {i}",
- model_used="groq",
- latency_ms=100,
- )
- history = history.add_turn(turn)
-
- # Should only have max_turns (3) turns
- assert len(history.turns) == 3
-
-
-def test_conversation_history_immutability():
- """Test that ConversationHistory.add_turn() returns new instance"""
- user_id = uuid4()
- session_id = uuid4()
- now = datetime.utcnow()
-
- history1 = ConversationHistory(user_id=user_id)
- turn = InteractionTurn(
- turn_id=uuid4(),
- session_id=session_id,
- timestamp=now,
- transcript="Hello",
- llm_response="Hi",
- model_used="groq",
- latency_ms=100,
- )
-
- history2 = history1.add_turn(turn)
-
- # Original should be unchanged
- assert len(history1.turns) == 0
- # New instance should have the turn
- assert len(history2.turns) == 1
- assert history1 is not history2
-
-
-# ============================================================================
-# Control Message Tests
-# ============================================================================
-
-
-def test_control_message_creation():
- """Test ControlMessage model creation"""
- message = ControlMessage(
- type=ControlMessageType.SESSION_CONTROL, action="start_active_mode"
- )
-
- assert message.type == ControlMessageType.SESSION_CONTROL
- assert message.action == "start_active_mode"
- assert message.payload == {}
- assert message.timestamp is not None
-
-
-def test_control_message_with_payload():
- """Test ControlMessage with payload"""
- message = ControlMessage(
- type=ControlMessageType.ERROR,
- payload={"error_code": 500, "message": "Internal error"},
- )
-
- assert message.type == ControlMessageType.ERROR
- assert message.payload["error_code"] == 500
- assert message.payload["message"] == "Internal error"
-
-
-def test_control_message_default_timestamp():
- """Test that ControlMessage has default timestamp"""
- before = datetime.now(UTC)
- message = ControlMessage(type=ControlMessageType.HEARTBEAT)
- after = datetime.now(UTC)
-
- assert before <= message.timestamp <= after
-
-
-# ============================================================================
-# Binary Frame Tests
-# ============================================================================
-
-
-def test_binary_frame_parse_valid():
- """Test BinaryFrame.parse() with valid data"""
- payload = b"test payload"
- length = len(payload)
-
- # Create frame: [stream_type, flags, length_high, length_low] + payload
- frame_data = (
- bytes(
- [
- StreamType.AUDIO.value,
- 0x00, # flags
- (length >> 8) & 0xFF, # length high byte
- length & 0xFF, # length low byte
- ]
- )
- + payload
- )
-
- frame = BinaryFrame.parse(frame_data)
-
- assert frame.stream_type == StreamType.AUDIO
- assert frame.flags == 0x00
- assert frame.length == length
- assert frame.payload == payload
-
-
-def test_binary_frame_parse_too_short():
- """Test BinaryFrame.parse() with too short data"""
- with pytest.raises(ValueError, match="Frame too short"):
- BinaryFrame.parse(b"abc") # Only 3 bytes
-
-
-def test_binary_frame_parse_length_mismatch():
- """Test BinaryFrame.parse() with payload length mismatch"""
- # Header says length is 10, but payload is only 5 bytes
- frame_data = (
- bytes(
- [
- StreamType.VIDEO.value,
- 0x00,
- 0x00, # length high = 0
- 10, # length low = 10
- ]
- )
- + b"short"
- ) # Only 5 bytes
-
- with pytest.raises(ValueError, match="Payload length mismatch"):
- BinaryFrame.parse(frame_data)
-
-
-def test_binary_frame_to_bytes():
- """Test BinaryFrame.to_bytes() serialization"""
- payload = b"test data"
- frame = BinaryFrame(
- stream_type=StreamType.CONTROL,
- flags=FrameFlags.PRIORITY.value,
- payload=payload,
- length=len(payload),
- )
-
- serialized = frame.to_bytes()
-
- # Should have 4-byte header + payload
- assert len(serialized) == 4 + len(payload)
- assert serialized[0] == StreamType.CONTROL.value
- assert serialized[1] == FrameFlags.PRIORITY.value
- length_from_header = int.from_bytes(serialized[2:4], "big")
- assert length_from_header == len(payload)
- assert serialized[4:] == payload
-
-
-def test_binary_frame_round_trip():
- """Test BinaryFrame parse -> to_bytes round trip"""
- original_payload = b"round trip test data"
- original_frame = BinaryFrame(
- stream_type=StreamType.AUDIO,
- flags=FrameFlags.END_OF_STREAM.value,
- payload=original_payload,
- length=len(original_payload),
- )
-
- # Serialize
- serialized = original_frame.to_bytes()
-
- # Parse back
- parsed_frame = BinaryFrame.parse(serialized)
-
- assert parsed_frame.stream_type == original_frame.stream_type
- assert parsed_frame.flags == original_frame.flags
- assert parsed_frame.length == original_frame.length
- assert parsed_frame.payload == original_frame.payload
-
-
-def test_binary_frame_with_flags():
- """Test BinaryFrame with multiple flags"""
- frame = BinaryFrame(
- stream_type=StreamType.VIDEO,
- flags=FrameFlags.END_OF_STREAM.value | FrameFlags.PRIORITY.value,
- payload=b"data",
- length=4,
- )
-
- assert frame.flags & FrameFlags.END_OF_STREAM.value
- assert frame.flags & FrameFlags.PRIORITY.value
-
-
-# ============================================================================
-# Default Factory Tests
-# ============================================================================
-
-
-def test_default_factories_dont_share_state():
- """Test that default factories don't share mutable state"""
- # Create two SessionState instances
- session_id1 = uuid4()
- session_id2 = uuid4()
- user_id = uuid4()
- now = datetime.utcnow()
-
- session1 = SessionState(
- session_id=session_id1,
- user_id=user_id,
- mode=SessionMode.ACTIVE,
- created_at=now,
- last_activity=now,
- )
-
- session2 = SessionState(
- session_id=session_id2,
- user_id=user_id,
- mode=SessionMode.ACTIVE,
- created_at=now,
- last_activity=now,
- )
-
- # Modify preferences in one
- # Since models are frozen, we need to create new instances
- # But the test is that default_factory creates new dicts
- assert session1.preferences is not session2.preferences
-
- # Same for ConversationHistory
- user_id = uuid4()
- history1 = ConversationHistory(user_id=user_id)
- history2 = ConversationHistory(user_id=user_id)
-
- assert history1.turns is not history2.turns
-
-
-# ============================================================================
-# JSON Serialization Tests
-# ============================================================================
-
-
-def test_uuid_json_serialization():
- """Test that UUIDs are properly serialized in JSON"""
- user_id = uuid4()
- now = datetime.utcnow()
-
- user = User(
- user_id=user_id, email="test@example.com", created_at=now, updated_at=now
- )
-
- json_str = user.model_dump_json()
- assert str(user_id) in json_str
-
-
-def test_datetime_json_serialization():
- """Test that datetimes are properly serialized in JSON"""
- now = datetime.utcnow()
- user_id = uuid4()
-
- user = User(
- user_id=user_id, email="test@example.com", created_at=now, updated_at=now
- )
-
- json_data = user.model_dump()
- # Datetime should be serialized as ISO format string
- assert isinstance(json_data["created_at"], str) or isinstance(
- json_data["created_at"], datetime
- )
diff --git a/uv.lock b/uv.lock
index 6a4a9c6..eaa201b 100644
--- a/uv.lock
+++ b/uv.lock
@@ -1,6 +1,23 @@
version = 1
revision = 3
requires-python = ">=3.11"
+resolution-markers = [
+ "python_full_version >= '3.13'",
+ "python_full_version < '3.13'",
+]
+
+[[package]]
+name = "aioredis"
+version = "2.0.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "async-timeout" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/2e/cf/9eb144a0b05809ffc5d29045c4b51039000ea275bc1268d0351c9e7dfc06/aioredis-2.0.1.tar.gz", hash = "sha256:eaa51aaf993f2d71f54b70527c440437ba65340588afeb786cd87c55c89cd98e", size = 111047, upload-time = "2021-12-27T20:28:17.557Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/9b/a9/0da089c3ae7a31cbcd2dcf0214f6f571e1295d292b6139e2bac68ec081d0/aioredis-2.0.1-py3-none-any.whl", hash = "sha256:9ac0d0b3b485d293b8ca1987e6de8658d7dafcca1cddfcd1d506cae8cdebfdd6", size = 71243, upload-time = "2021-12-27T20:28:16.36Z" },
+]
[[package]]
name = "annotated-doc"
@@ -33,6 +50,77 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/7f/9c/36c5c37947ebfb8c7f22e0eb6e4d188ee2d53aa3880f3f2744fb894f0cb1/anyio-4.12.0-py3-none-any.whl", hash = "sha256:dad2376a628f98eeca4881fc56cd06affd18f659b17a747d3ff0307ced94b1bb", size = 113362, upload-time = "2025-11-28T23:36:57.897Z" },
]
+[[package]]
+name = "async-timeout"
+version = "5.0.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/a5/ae/136395dfbfe00dfc94da3f3e136d0b13f394cba8f4841120e34226265780/async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3", size = 9274, upload-time = "2024-11-06T16:41:39.6Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c", size = 6233, upload-time = "2024-11-06T16:41:37.9Z" },
+]
+
+[[package]]
+name = "asyncpg"
+version = "0.31.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/fe/cc/d18065ce2380d80b1bcce927c24a2642efd38918e33fd724bc4bca904877/asyncpg-0.31.0.tar.gz", hash = "sha256:c989386c83940bfbd787180f2b1519415e2d3d6277a70d9d0f0145ac73500735", size = 993667, upload-time = "2025-11-24T23:27:00.812Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/08/17/cc02bc49bc350623d050fa139e34ea512cd6e020562f2a7312a7bcae4bc9/asyncpg-0.31.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:eee690960e8ab85063ba93af2ce128c0f52fd655fdff9fdb1a28df01329f031d", size = 643159, upload-time = "2025-11-24T23:25:36.443Z" },
+ { url = "https://files.pythonhosted.org/packages/a4/62/4ded7d400a7b651adf06f49ea8f73100cca07c6df012119594d1e3447aa6/asyncpg-0.31.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2657204552b75f8288de08ca60faf4a99a65deef3a71d1467454123205a88fab", size = 638157, upload-time = "2025-11-24T23:25:37.89Z" },
+ { url = "https://files.pythonhosted.org/packages/d6/5b/4179538a9a72166a0bf60ad783b1ef16efb7960e4d7b9afe9f77a5551680/asyncpg-0.31.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a429e842a3a4b4ea240ea52d7fe3f82d5149853249306f7ff166cb9948faa46c", size = 2918051, upload-time = "2025-11-24T23:25:39.461Z" },
+ { url = "https://files.pythonhosted.org/packages/e6/35/c27719ae0536c5b6e61e4701391ffe435ef59539e9360959240d6e47c8c8/asyncpg-0.31.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c0807be46c32c963ae40d329b3a686356e417f674c976c07fa49f1b30303f109", size = 2972640, upload-time = "2025-11-24T23:25:41.512Z" },
+ { url = "https://files.pythonhosted.org/packages/43/f4/01ebb9207f29e645a64699b9ce0eefeff8e7a33494e1d29bb53736f7766b/asyncpg-0.31.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e5d5098f63beeae93512ee513d4c0c53dc12e9aa2b7a1af5a81cddf93fe4e4da", size = 2851050, upload-time = "2025-11-24T23:25:43.153Z" },
+ { url = "https://files.pythonhosted.org/packages/3e/f4/03ff1426acc87be0f4e8d40fa2bff5c3952bef0080062af9efc2212e3be8/asyncpg-0.31.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37fc6c00a814e18eef51833545d1891cac9aa69140598bb076b4cd29b3e010b9", size = 2962574, upload-time = "2025-11-24T23:25:44.942Z" },
+ { url = "https://files.pythonhosted.org/packages/c7/39/cc788dfca3d4060f9d93e67be396ceec458dfc429e26139059e58c2c244d/asyncpg-0.31.0-cp311-cp311-win32.whl", hash = "sha256:5a4af56edf82a701aece93190cc4e094d2df7d33f6e915c222fb09efbb5afc24", size = 521076, upload-time = "2025-11-24T23:25:46.486Z" },
+ { url = "https://files.pythonhosted.org/packages/28/fc/735af5384c029eb7f1ca60ccb8fa95521dbdaeef788edf4cecfc604c3cab/asyncpg-0.31.0-cp311-cp311-win_amd64.whl", hash = "sha256:480c4befbdf079c14c9ca43c8c5e1fe8b6296c96f1f927158d4f1e750aacc047", size = 584980, upload-time = "2025-11-24T23:25:47.938Z" },
+ { url = "https://files.pythonhosted.org/packages/2a/a6/59d0a146e61d20e18db7396583242e32e0f120693b67a8de43f1557033e2/asyncpg-0.31.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b44c31e1efc1c15188ef183f287c728e2046abb1d26af4d20858215d50d91fad", size = 662042, upload-time = "2025-11-24T23:25:49.578Z" },
+ { url = "https://files.pythonhosted.org/packages/36/01/ffaa189dcb63a2471720615e60185c3f6327716fdc0fc04334436fbb7c65/asyncpg-0.31.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0c89ccf741c067614c9b5fc7f1fc6f3b61ab05ae4aaa966e6fd6b93097c7d20d", size = 638504, upload-time = "2025-11-24T23:25:51.501Z" },
+ { url = "https://files.pythonhosted.org/packages/9f/62/3f699ba45d8bd24c5d65392190d19656d74ff0185f42e19d0bbd973bb371/asyncpg-0.31.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:12b3b2e39dc5470abd5e98c8d3373e4b1d1234d9fbdedf538798b2c13c64460a", size = 3426241, upload-time = "2025-11-24T23:25:53.278Z" },
+ { url = "https://files.pythonhosted.org/packages/8c/d1/a867c2150f9c6e7af6462637f613ba67f78a314b00db220cd26ff559d532/asyncpg-0.31.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:aad7a33913fb8bcb5454313377cc330fbb19a0cd5faa7272407d8a0c4257b671", size = 3520321, upload-time = "2025-11-24T23:25:54.982Z" },
+ { url = "https://files.pythonhosted.org/packages/7a/1a/cce4c3f246805ecd285a3591222a2611141f1669d002163abef999b60f98/asyncpg-0.31.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3df118d94f46d85b2e434fd62c84cb66d5834d5a890725fe625f498e72e4d5ec", size = 3316685, upload-time = "2025-11-24T23:25:57.43Z" },
+ { url = "https://files.pythonhosted.org/packages/40/ae/0fc961179e78cc579e138fad6eb580448ecae64908f95b8cb8ee2f241f67/asyncpg-0.31.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bd5b6efff3c17c3202d4b37189969acf8927438a238c6257f66be3c426beba20", size = 3471858, upload-time = "2025-11-24T23:25:59.636Z" },
+ { url = "https://files.pythonhosted.org/packages/52/b2/b20e09670be031afa4cbfabd645caece7f85ec62d69c312239de568e058e/asyncpg-0.31.0-cp312-cp312-win32.whl", hash = "sha256:027eaa61361ec735926566f995d959ade4796f6a49d3bde17e5134b9964f9ba8", size = 527852, upload-time = "2025-11-24T23:26:01.084Z" },
+ { url = "https://files.pythonhosted.org/packages/b5/f0/f2ed1de154e15b107dc692262395b3c17fc34eafe2a78fc2115931561730/asyncpg-0.31.0-cp312-cp312-win_amd64.whl", hash = "sha256:72d6bdcbc93d608a1158f17932de2321f68b1a967a13e014998db87a72ed3186", size = 597175, upload-time = "2025-11-24T23:26:02.564Z" },
+ { url = "https://files.pythonhosted.org/packages/95/11/97b5c2af72a5d0b9bc3fa30cd4b9ce22284a9a943a150fdc768763caf035/asyncpg-0.31.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c204fab1b91e08b0f47e90a75d1b3c62174dab21f670ad6c5d0f243a228f015b", size = 661111, upload-time = "2025-11-24T23:26:04.467Z" },
+ { url = "https://files.pythonhosted.org/packages/1b/71/157d611c791a5e2d0423f09f027bd499935f0906e0c2a416ce712ba51ef3/asyncpg-0.31.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:54a64f91839ba59008eccf7aad2e93d6e3de688d796f35803235ea1c4898ae1e", size = 636928, upload-time = "2025-11-24T23:26:05.944Z" },
+ { url = "https://files.pythonhosted.org/packages/2e/fc/9e3486fb2bbe69d4a867c0b76d68542650a7ff1574ca40e84c3111bb0c6e/asyncpg-0.31.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c0e0822b1038dc7253b337b0f3f676cadc4ac31b126c5d42691c39691962e403", size = 3424067, upload-time = "2025-11-24T23:26:07.957Z" },
+ { url = "https://files.pythonhosted.org/packages/12/c6/8c9d076f73f07f995013c791e018a1cd5f31823c2a3187fc8581706aa00f/asyncpg-0.31.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bef056aa502ee34204c161c72ca1f3c274917596877f825968368b2c33f585f4", size = 3518156, upload-time = "2025-11-24T23:26:09.591Z" },
+ { url = "https://files.pythonhosted.org/packages/ae/3b/60683a0baf50fbc546499cfb53132cb6835b92b529a05f6a81471ab60d0c/asyncpg-0.31.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0bfbcc5b7ffcd9b75ab1558f00db2ae07db9c80637ad1b2469c43df79d7a5ae2", size = 3319636, upload-time = "2025-11-24T23:26:11.168Z" },
+ { url = "https://files.pythonhosted.org/packages/50/dc/8487df0f69bd398a61e1792b3cba0e47477f214eff085ba0efa7eac9ce87/asyncpg-0.31.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:22bc525ebbdc24d1261ecbf6f504998244d4e3be1721784b5f64664d61fbe602", size = 3472079, upload-time = "2025-11-24T23:26:13.164Z" },
+ { url = "https://files.pythonhosted.org/packages/13/a1/c5bbeeb8531c05c89135cb8b28575ac2fac618bcb60119ee9696c3faf71c/asyncpg-0.31.0-cp313-cp313-win32.whl", hash = "sha256:f890de5e1e4f7e14023619399a471ce4b71f5418cd67a51853b9910fdfa73696", size = 527606, upload-time = "2025-11-24T23:26:14.78Z" },
+ { url = "https://files.pythonhosted.org/packages/91/66/b25ccb84a246b470eb943b0107c07edcae51804912b824054b3413995a10/asyncpg-0.31.0-cp313-cp313-win_amd64.whl", hash = "sha256:dc5f2fa9916f292e5c5c8b2ac2813763bcd7f58e130055b4ad8a0531314201ab", size = 596569, upload-time = "2025-11-24T23:26:16.189Z" },
+ { url = "https://files.pythonhosted.org/packages/3c/36/e9450d62e84a13aea6580c83a47a437f26c7ca6fa0f0fd40b6670793ea30/asyncpg-0.31.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:f6b56b91bb0ffc328c4e3ed113136cddd9deefdf5f79ab448598b9772831df44", size = 660867, upload-time = "2025-11-24T23:26:17.631Z" },
+ { url = "https://files.pythonhosted.org/packages/82/4b/1d0a2b33b3102d210439338e1beea616a6122267c0df459ff0265cd5807a/asyncpg-0.31.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:334dec28cf20d7f5bb9e45b39546ddf247f8042a690bff9b9573d00086e69cb5", size = 638349, upload-time = "2025-11-24T23:26:19.689Z" },
+ { url = "https://files.pythonhosted.org/packages/41/aa/e7f7ac9a7974f08eff9183e392b2d62516f90412686532d27e196c0f0eeb/asyncpg-0.31.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98cc158c53f46de7bb677fd20c417e264fc02b36d901cc2a43bd6cb0dc6dbfd2", size = 3410428, upload-time = "2025-11-24T23:26:21.275Z" },
+ { url = "https://files.pythonhosted.org/packages/6f/de/bf1b60de3dede5c2731e6788617a512bc0ebd9693eac297ee74086f101d7/asyncpg-0.31.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9322b563e2661a52e3cdbc93eed3be7748b289f792e0011cb2720d278b366ce2", size = 3471678, upload-time = "2025-11-24T23:26:23.627Z" },
+ { url = "https://files.pythonhosted.org/packages/46/78/fc3ade003e22d8bd53aaf8f75f4be48f0b460fa73738f0391b9c856a9147/asyncpg-0.31.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:19857a358fc811d82227449b7ca40afb46e75b33eb8897240c3839dd8b744218", size = 3313505, upload-time = "2025-11-24T23:26:25.235Z" },
+ { url = "https://files.pythonhosted.org/packages/bf/e9/73eb8a6789e927816f4705291be21f2225687bfa97321e40cd23055e903a/asyncpg-0.31.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:ba5f8886e850882ff2c2ace5732300e99193823e8107e2c53ef01c1ebfa1e85d", size = 3434744, upload-time = "2025-11-24T23:26:26.944Z" },
+ { url = "https://files.pythonhosted.org/packages/08/4b/f10b880534413c65c5b5862f79b8e81553a8f364e5238832ad4c0af71b7f/asyncpg-0.31.0-cp314-cp314-win32.whl", hash = "sha256:cea3a0b2a14f95834cee29432e4ddc399b95700eb1d51bbc5bfee8f31fa07b2b", size = 532251, upload-time = "2025-11-24T23:26:28.404Z" },
+ { url = "https://files.pythonhosted.org/packages/d3/2d/7aa40750b7a19efa5d66e67fc06008ca0f27ba1bd082e457ad82f59aba49/asyncpg-0.31.0-cp314-cp314-win_amd64.whl", hash = "sha256:04d19392716af6b029411a0264d92093b6e5e8285ae97a39957b9a9c14ea72be", size = 604901, upload-time = "2025-11-24T23:26:30.34Z" },
+ { url = "https://files.pythonhosted.org/packages/ce/fe/b9dfe349b83b9dee28cc42360d2c86b2cdce4cb551a2c2d27e156bcac84d/asyncpg-0.31.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:bdb957706da132e982cc6856bb2f7b740603472b54c3ebc77fe60ea3e57e1bd2", size = 702280, upload-time = "2025-11-24T23:26:32Z" },
+ { url = "https://files.pythonhosted.org/packages/6a/81/e6be6e37e560bd91e6c23ea8a6138a04fd057b08cf63d3c5055c98e81c1d/asyncpg-0.31.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6d11b198111a72f47154fa03b85799f9be63701e068b43f84ac25da0bda9cb31", size = 682931, upload-time = "2025-11-24T23:26:33.572Z" },
+ { url = "https://files.pythonhosted.org/packages/a6/45/6009040da85a1648dd5bc75b3b0a062081c483e75a1a29041ae63a0bf0dc/asyncpg-0.31.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:18c83b03bc0d1b23e6230f5bf8d4f217dc9bc08644ce0502a9d91dc9e634a9c7", size = 3581608, upload-time = "2025-11-24T23:26:35.638Z" },
+ { url = "https://files.pythonhosted.org/packages/7e/06/2e3d4d7608b0b2b3adbee0d0bd6a2d29ca0fc4d8a78f8277df04e2d1fd7b/asyncpg-0.31.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e009abc333464ff18b8f6fd146addffd9aaf63e79aa3bb40ab7a4c332d0c5e9e", size = 3498738, upload-time = "2025-11-24T23:26:37.275Z" },
+ { url = "https://files.pythonhosted.org/packages/7d/aa/7d75ede780033141c51d83577ea23236ba7d3a23593929b32b49db8ed36e/asyncpg-0.31.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:3b1fbcb0e396a5ca435a8826a87e5c2c2cc0c8c68eb6fadf82168056b0e53a8c", size = 3401026, upload-time = "2025-11-24T23:26:39.423Z" },
+ { url = "https://files.pythonhosted.org/packages/ba/7a/15e37d45e7f7c94facc1e9148c0e455e8f33c08f0b8a0b1deb2c5171771b/asyncpg-0.31.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8df714dba348efcc162d2adf02d213e5fab1bd9f557e1305633e851a61814a7a", size = 3429426, upload-time = "2025-11-24T23:26:41.032Z" },
+ { url = "https://files.pythonhosted.org/packages/13/d5/71437c5f6ae5f307828710efbe62163974e71237d5d46ebd2869ea052d10/asyncpg-0.31.0-cp314-cp314t-win32.whl", hash = "sha256:1b41f1afb1033f2b44f3234993b15096ddc9cd71b21a42dbd87fc6a57b43d65d", size = 614495, upload-time = "2025-11-24T23:26:42.659Z" },
+ { url = "https://files.pythonhosted.org/packages/3c/d7/8fb3044eaef08a310acfe23dae9a8e2e07d305edc29a53497e52bc76eca7/asyncpg-0.31.0-cp314-cp314t-win_amd64.whl", hash = "sha256:bd4107bb7cdd0e9e65fae66a62afd3a249663b844fa34d479f6d5b3bef9c04c3", size = 706062, upload-time = "2025-11-24T23:26:44.086Z" },
+]
+
+[[package]]
+name = "azure-appconfiguration"
+version = "1.7.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "azure-core" },
+ { name = "isodate" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/d3/9f/f2a9ab639df9f9db2112ded1c6286d1a685f6dadc8b56fc1f1d5faed8c57/azure_appconfiguration-1.7.2.tar.gz", hash = "sha256:cefd75b298b898a8ed9f73048f3f39f4e81059a58cd832d0523787fc1d912a06", size = 120992, upload-time = "2025-10-20T20:26:30.072Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c4/59/c21dfb3ee35fe723c7662b3e468b20532947e73e11248971c45b7554590b/azure_appconfiguration-1.7.2-py3-none-any.whl", hash = "sha256:8cb62acd32efa84ae1e1ce30118ab4b412b3652f3ab6e86f811ec2e48388d083", size = 100202, upload-time = "2025-10-20T20:26:31.261Z" },
+]
+
[[package]]
name = "azure-core"
version = "1.36.0"
@@ -329,6 +417,28 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/33/6b/e0547afaf41bf2c42e52430072fa5658766e3d65bd4b03a563d1b6336f57/distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16", size = 469047, upload-time = "2025-07-17T16:51:58.613Z" },
]
+[[package]]
+name = "dnspython"
+version = "2.8.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/8c/8b/57666417c0f90f08bcafa776861060426765fdb422eb10212086fb811d26/dnspython-2.8.0.tar.gz", hash = "sha256:181d3c6996452cb1189c4046c61599b84a5a86e099562ffde77d26984ff26d0f", size = 368251, upload-time = "2025-09-07T18:58:00.022Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ba/5a/18ad964b0086c6e62e2e7500f7edc89e3faa45033c71c1893d34eed2b2de/dnspython-2.8.0-py3-none-any.whl", hash = "sha256:01d9bbc4a2d76bf0db7c1f729812ded6d912bd318d3b1cf81d30c0f845dbf3af", size = 331094, upload-time = "2025-09-07T18:57:58.071Z" },
+]
+
+[[package]]
+name = "email-validator"
+version = "2.3.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "dnspython" },
+ { name = "idna" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/f5/22/900cb125c76b7aaa450ce02fd727f452243f2e91a61af068b40adba60ea9/email_validator-2.3.0.tar.gz", hash = "sha256:9fc05c37f2f6cf439ff414f8fc46d917929974a82244c20eb10231ba60c54426", size = 51238, upload-time = "2025-08-26T13:09:06.831Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/de/15/545e2b6cf2e3be84bc1ed85613edd75b8aea69807a71c26f4ca6a9258e82/email_validator-2.3.0-py3-none-any.whl", hash = "sha256:80f13f623413e6b197ae73bb10bf4eb0908faf509ad8362c5edeb0be7fd450b4", size = 35604, upload-time = "2025-08-26T13:09:05.858Z" },
+]
+
[[package]]
name = "fastapi"
version = "0.124.2"
@@ -353,6 +463,69 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/76/91/7216b27286936c16f5b4d0c530087e4a54eead683e6b0b73dd0c64844af6/filelock-3.20.0-py3-none-any.whl", hash = "sha256:339b4732ffda5cd79b13f4e2711a31b0365ce445d95d243bb996273d072546a2", size = 16054, upload-time = "2025-10-08T18:03:48.35Z" },
]
+[[package]]
+name = "googleapis-common-protos"
+version = "1.72.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "protobuf" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/e5/7b/adfd75544c415c487b33061fe7ae526165241c1ea133f9a9125a56b39fd8/googleapis_common_protos-1.72.0.tar.gz", hash = "sha256:e55a601c1b32b52d7a3e65f43563e2aa61bcd737998ee672ac9b951cd49319f5", size = 147433, upload-time = "2025-11-06T18:29:24.087Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c4/ab/09169d5a4612a5f92490806649ac8d41e3ec9129c636754575b3553f4ea4/googleapis_common_protos-1.72.0-py3-none-any.whl", hash = "sha256:4299c5a82d5ae1a9702ada957347726b167f9f8d1fc352477702a1e851ff4038", size = 297515, upload-time = "2025-11-06T18:29:13.14Z" },
+]
+
+[[package]]
+name = "grpcio"
+version = "1.76.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/b6/e0/318c1ce3ae5a17894d5791e87aea147587c9e702f24122cc7a5c8bbaeeb1/grpcio-1.76.0.tar.gz", hash = "sha256:7be78388d6da1a25c0d5ec506523db58b18be22d9c37d8d3a32c08be4987bd73", size = 12785182, upload-time = "2025-10-21T16:23:12.106Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a0/00/8163a1beeb6971f66b4bbe6ac9457b97948beba8dd2fc8e1281dce7f79ec/grpcio-1.76.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:2e1743fbd7f5fa713a1b0a8ac8ebabf0ec980b5d8809ec358d488e273b9cf02a", size = 5843567, upload-time = "2025-10-21T16:20:52.829Z" },
+ { url = "https://files.pythonhosted.org/packages/10/c1/934202f5cf335e6d852530ce14ddb0fef21be612ba9ecbbcbd4d748ca32d/grpcio-1.76.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:a8c2cf1209497cf659a667d7dea88985e834c24b7c3b605e6254cbb5076d985c", size = 11848017, upload-time = "2025-10-21T16:20:56.705Z" },
+ { url = "https://files.pythonhosted.org/packages/11/0b/8dec16b1863d74af6eb3543928600ec2195af49ca58b16334972f6775663/grpcio-1.76.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:08caea849a9d3c71a542827d6df9d5a69067b0a1efbea8a855633ff5d9571465", size = 6412027, upload-time = "2025-10-21T16:20:59.3Z" },
+ { url = "https://files.pythonhosted.org/packages/d7/64/7b9e6e7ab910bea9d46f2c090380bab274a0b91fb0a2fe9b0cd399fffa12/grpcio-1.76.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:f0e34c2079d47ae9f6188211db9e777c619a21d4faba6977774e8fa43b085e48", size = 7075913, upload-time = "2025-10-21T16:21:01.645Z" },
+ { url = "https://files.pythonhosted.org/packages/68/86/093c46e9546073cefa789bd76d44c5cb2abc824ca62af0c18be590ff13ba/grpcio-1.76.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8843114c0cfce61b40ad48df65abcfc00d4dba82eae8718fab5352390848c5da", size = 6615417, upload-time = "2025-10-21T16:21:03.844Z" },
+ { url = "https://files.pythonhosted.org/packages/f7/b6/5709a3a68500a9c03da6fb71740dcdd5ef245e39266461a03f31a57036d8/grpcio-1.76.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8eddfb4d203a237da6f3cc8a540dad0517d274b5a1e9e636fd8d2c79b5c1d397", size = 7199683, upload-time = "2025-10-21T16:21:06.195Z" },
+ { url = "https://files.pythonhosted.org/packages/91/d3/4b1f2bf16ed52ce0b508161df3a2d186e4935379a159a834cb4a7d687429/grpcio-1.76.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:32483fe2aab2c3794101c2a159070584e5db11d0aa091b2c0ea9c4fc43d0d749", size = 8163109, upload-time = "2025-10-21T16:21:08.498Z" },
+ { url = "https://files.pythonhosted.org/packages/5c/61/d9043f95f5f4cf085ac5dd6137b469d41befb04bd80280952ffa2a4c3f12/grpcio-1.76.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dcfe41187da8992c5f40aa8c5ec086fa3672834d2be57a32384c08d5a05b4c00", size = 7626676, upload-time = "2025-10-21T16:21:10.693Z" },
+ { url = "https://files.pythonhosted.org/packages/36/95/fd9a5152ca02d8881e4dd419cdd790e11805979f499a2e5b96488b85cf27/grpcio-1.76.0-cp311-cp311-win32.whl", hash = "sha256:2107b0c024d1b35f4083f11245c0e23846ae64d02f40b2b226684840260ed054", size = 3997688, upload-time = "2025-10-21T16:21:12.746Z" },
+ { url = "https://files.pythonhosted.org/packages/60/9c/5c359c8d4c9176cfa3c61ecd4efe5affe1f38d9bae81e81ac7186b4c9cc8/grpcio-1.76.0-cp311-cp311-win_amd64.whl", hash = "sha256:522175aba7af9113c48ec10cc471b9b9bd4f6ceb36aeb4544a8e2c80ed9d252d", size = 4709315, upload-time = "2025-10-21T16:21:15.26Z" },
+ { url = "https://files.pythonhosted.org/packages/bf/05/8e29121994b8d959ffa0afd28996d452f291b48cfc0875619de0bde2c50c/grpcio-1.76.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:81fd9652b37b36f16138611c7e884eb82e0cec137c40d3ef7c3f9b3ed00f6ed8", size = 5799718, upload-time = "2025-10-21T16:21:17.939Z" },
+ { url = "https://files.pythonhosted.org/packages/d9/75/11d0e66b3cdf998c996489581bdad8900db79ebd83513e45c19548f1cba4/grpcio-1.76.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:04bbe1bfe3a68bbfd4e52402ab7d4eb59d72d02647ae2042204326cf4bbad280", size = 11825627, upload-time = "2025-10-21T16:21:20.466Z" },
+ { url = "https://files.pythonhosted.org/packages/28/50/2f0aa0498bc188048f5d9504dcc5c2c24f2eb1a9337cd0fa09a61a2e75f0/grpcio-1.76.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d388087771c837cdb6515539f43b9d4bf0b0f23593a24054ac16f7a960be16f4", size = 6359167, upload-time = "2025-10-21T16:21:23.122Z" },
+ { url = "https://files.pythonhosted.org/packages/66/e5/bbf0bb97d29ede1d59d6588af40018cfc345b17ce979b7b45424628dc8bb/grpcio-1.76.0-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:9f8f757bebaaea112c00dba718fc0d3260052ce714e25804a03f93f5d1c6cc11", size = 7044267, upload-time = "2025-10-21T16:21:25.995Z" },
+ { url = "https://files.pythonhosted.org/packages/f5/86/f6ec2164f743d9609691115ae8ece098c76b894ebe4f7c94a655c6b03e98/grpcio-1.76.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:980a846182ce88c4f2f7e2c22c56aefd515daeb36149d1c897f83cf57999e0b6", size = 6573963, upload-time = "2025-10-21T16:21:28.631Z" },
+ { url = "https://files.pythonhosted.org/packages/60/bc/8d9d0d8505feccfdf38a766d262c71e73639c165b311c9457208b56d92ae/grpcio-1.76.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f92f88e6c033db65a5ae3d97905c8fea9c725b63e28d5a75cb73b49bda5024d8", size = 7164484, upload-time = "2025-10-21T16:21:30.837Z" },
+ { url = "https://files.pythonhosted.org/packages/67/e6/5d6c2fc10b95edf6df9b8f19cf10a34263b7fd48493936fffd5085521292/grpcio-1.76.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4baf3cbe2f0be3289eb68ac8ae771156971848bb8aaff60bad42005539431980", size = 8127777, upload-time = "2025-10-21T16:21:33.577Z" },
+ { url = "https://files.pythonhosted.org/packages/3f/c8/dce8ff21c86abe025efe304d9e31fdb0deaaa3b502b6a78141080f206da0/grpcio-1.76.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:615ba64c208aaceb5ec83bfdce7728b80bfeb8be97562944836a7a0a9647d882", size = 7594014, upload-time = "2025-10-21T16:21:41.882Z" },
+ { url = "https://files.pythonhosted.org/packages/e0/42/ad28191ebf983a5d0ecef90bab66baa5a6b18f2bfdef9d0a63b1973d9f75/grpcio-1.76.0-cp312-cp312-win32.whl", hash = "sha256:45d59a649a82df5718fd9527ce775fd66d1af35e6d31abdcdc906a49c6822958", size = 3984750, upload-time = "2025-10-21T16:21:44.006Z" },
+ { url = "https://files.pythonhosted.org/packages/9e/00/7bd478cbb851c04a48baccaa49b75abaa8e4122f7d86da797500cccdd771/grpcio-1.76.0-cp312-cp312-win_amd64.whl", hash = "sha256:c088e7a90b6017307f423efbb9d1ba97a22aa2170876223f9709e9d1de0b5347", size = 4704003, upload-time = "2025-10-21T16:21:46.244Z" },
+ { url = "https://files.pythonhosted.org/packages/fc/ed/71467ab770effc9e8cef5f2e7388beb2be26ed642d567697bb103a790c72/grpcio-1.76.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:26ef06c73eb53267c2b319f43e6634c7556ea37672029241a056629af27c10e2", size = 5807716, upload-time = "2025-10-21T16:21:48.475Z" },
+ { url = "https://files.pythonhosted.org/packages/2c/85/c6ed56f9817fab03fa8a111ca91469941fb514e3e3ce6d793cb8f1e1347b/grpcio-1.76.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:45e0111e73f43f735d70786557dc38141185072d7ff8dc1829d6a77ac1471468", size = 11821522, upload-time = "2025-10-21T16:21:51.142Z" },
+ { url = "https://files.pythonhosted.org/packages/ac/31/2b8a235ab40c39cbc141ef647f8a6eb7b0028f023015a4842933bc0d6831/grpcio-1.76.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:83d57312a58dcfe2a3a0f9d1389b299438909a02db60e2f2ea2ae2d8034909d3", size = 6362558, upload-time = "2025-10-21T16:21:54.213Z" },
+ { url = "https://files.pythonhosted.org/packages/bd/64/9784eab483358e08847498ee56faf8ff6ea8e0a4592568d9f68edc97e9e9/grpcio-1.76.0-cp313-cp313-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:3e2a27c89eb9ac3d81ec8835e12414d73536c6e620355d65102503064a4ed6eb", size = 7049990, upload-time = "2025-10-21T16:21:56.476Z" },
+ { url = "https://files.pythonhosted.org/packages/2b/94/8c12319a6369434e7a184b987e8e9f3b49a114c489b8315f029e24de4837/grpcio-1.76.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:61f69297cba3950a524f61c7c8ee12e55c486cb5f7db47ff9dcee33da6f0d3ae", size = 6575387, upload-time = "2025-10-21T16:21:59.051Z" },
+ { url = "https://files.pythonhosted.org/packages/15/0f/f12c32b03f731f4a6242f771f63039df182c8b8e2cf8075b245b409259d4/grpcio-1.76.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6a15c17af8839b6801d554263c546c69c4d7718ad4321e3166175b37eaacca77", size = 7166668, upload-time = "2025-10-21T16:22:02.049Z" },
+ { url = "https://files.pythonhosted.org/packages/ff/2d/3ec9ce0c2b1d92dd59d1c3264aaec9f0f7c817d6e8ac683b97198a36ed5a/grpcio-1.76.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:25a18e9810fbc7e7f03ec2516addc116a957f8cbb8cbc95ccc80faa072743d03", size = 8124928, upload-time = "2025-10-21T16:22:04.984Z" },
+ { url = "https://files.pythonhosted.org/packages/1a/74/fd3317be5672f4856bcdd1a9e7b5e17554692d3db9a3b273879dc02d657d/grpcio-1.76.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:931091142fd8cc14edccc0845a79248bc155425eee9a98b2db2ea4f00a235a42", size = 7589983, upload-time = "2025-10-21T16:22:07.881Z" },
+ { url = "https://files.pythonhosted.org/packages/45/bb/ca038cf420f405971f19821c8c15bcbc875505f6ffadafe9ffd77871dc4c/grpcio-1.76.0-cp313-cp313-win32.whl", hash = "sha256:5e8571632780e08526f118f74170ad8d50fb0a48c23a746bef2a6ebade3abd6f", size = 3984727, upload-time = "2025-10-21T16:22:10.032Z" },
+ { url = "https://files.pythonhosted.org/packages/41/80/84087dc56437ced7cdd4b13d7875e7439a52a261e3ab4e06488ba6173b0a/grpcio-1.76.0-cp313-cp313-win_amd64.whl", hash = "sha256:f9f7bd5faab55f47231ad8dba7787866b69f5e93bc306e3915606779bbfb4ba8", size = 4702799, upload-time = "2025-10-21T16:22:12.709Z" },
+ { url = "https://files.pythonhosted.org/packages/b4/46/39adac80de49d678e6e073b70204091e76631e03e94928b9ea4ecf0f6e0e/grpcio-1.76.0-cp314-cp314-linux_armv7l.whl", hash = "sha256:ff8a59ea85a1f2191a0ffcc61298c571bc566332f82e5f5be1b83c9d8e668a62", size = 5808417, upload-time = "2025-10-21T16:22:15.02Z" },
+ { url = "https://files.pythonhosted.org/packages/9c/f5/a4531f7fb8b4e2a60b94e39d5d924469b7a6988176b3422487be61fe2998/grpcio-1.76.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:06c3d6b076e7b593905d04fdba6a0525711b3466f43b3400266f04ff735de0cd", size = 11828219, upload-time = "2025-10-21T16:22:17.954Z" },
+ { url = "https://files.pythonhosted.org/packages/4b/1c/de55d868ed7a8bd6acc6b1d6ddc4aa36d07a9f31d33c912c804adb1b971b/grpcio-1.76.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fd5ef5932f6475c436c4a55e4336ebbe47bd3272be04964a03d316bbf4afbcbc", size = 6367826, upload-time = "2025-10-21T16:22:20.721Z" },
+ { url = "https://files.pythonhosted.org/packages/59/64/99e44c02b5adb0ad13ab3adc89cb33cb54bfa90c74770f2607eea629b86f/grpcio-1.76.0-cp314-cp314-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:b331680e46239e090f5b3cead313cc772f6caa7d0fc8de349337563125361a4a", size = 7049550, upload-time = "2025-10-21T16:22:23.637Z" },
+ { url = "https://files.pythonhosted.org/packages/43/28/40a5be3f9a86949b83e7d6a2ad6011d993cbe9b6bd27bea881f61c7788b6/grpcio-1.76.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2229ae655ec4e8999599469559e97630185fdd53ae1e8997d147b7c9b2b72cba", size = 6575564, upload-time = "2025-10-21T16:22:26.016Z" },
+ { url = "https://files.pythonhosted.org/packages/4b/a9/1be18e6055b64467440208a8559afac243c66a8b904213af6f392dc2212f/grpcio-1.76.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:490fa6d203992c47c7b9e4a9d39003a0c2bcc1c9aa3c058730884bbbb0ee9f09", size = 7176236, upload-time = "2025-10-21T16:22:28.362Z" },
+ { url = "https://files.pythonhosted.org/packages/0f/55/dba05d3fcc151ce6e81327541d2cc8394f442f6b350fead67401661bf041/grpcio-1.76.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:479496325ce554792dba6548fae3df31a72cef7bad71ca2e12b0e58f9b336bfc", size = 8125795, upload-time = "2025-10-21T16:22:31.075Z" },
+ { url = "https://files.pythonhosted.org/packages/4a/45/122df922d05655f63930cf42c9e3f72ba20aadb26c100ee105cad4ce4257/grpcio-1.76.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:1c9b93f79f48b03ada57ea24725d83a30284a012ec27eab2cf7e50a550cbbbcc", size = 7592214, upload-time = "2025-10-21T16:22:33.831Z" },
+ { url = "https://files.pythonhosted.org/packages/4a/6e/0b899b7f6b66e5af39e377055fb4a6675c9ee28431df5708139df2e93233/grpcio-1.76.0-cp314-cp314-win32.whl", hash = "sha256:747fa73efa9b8b1488a95d0ba1039c8e2dca0f741612d80415b1e1c560febf4e", size = 4062961, upload-time = "2025-10-21T16:22:36.468Z" },
+ { url = "https://files.pythonhosted.org/packages/19/41/0b430b01a2eb38ee887f88c1f07644a1df8e289353b78e82b37ef988fb64/grpcio-1.76.0-cp314-cp314-win_amd64.whl", hash = "sha256:922fa70ba549fce362d2e2871ab542082d66e2aaf0c19480ea453905b01f384e", size = 4834462, upload-time = "2025-10-21T16:22:39.772Z" },
+]
+
[[package]]
name = "h11"
version = "0.16.0"
@@ -444,6 +617,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" },
]
+[[package]]
+name = "importlib-metadata"
+version = "8.7.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "zipp" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/76/66/650a33bd90f786193e4de4b3ad86ea60b53c89b669a5c7be931fac31cdb0/importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000", size = 56641, upload-time = "2025-04-27T15:29:01.736Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/20/b0/36bd937216ec521246249be3bf9855081de4c5e06a0c9b4219dbeda50373/importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd", size = 27656, upload-time = "2025-04-27T15:29:00.214Z" },
+]
+
[[package]]
name = "iniconfig"
version = "2.3.0"
@@ -493,12 +678,20 @@ name = "nerospatial-backend"
version = "0.1.0"
source = { editable = "." }
dependencies = [
+ { name = "aioredis" },
+ { name = "asyncpg" },
+ { name = "azure-appconfiguration" },
{ name = "azure-core" },
{ name = "azure-identity" },
{ name = "azure-keyvault-secrets" },
+ { name = "cryptography" },
{ name = "fastapi" },
- { name = "pydantic" },
+ { name = "opentelemetry-api" },
+ { name = "opentelemetry-exporter-otlp-proto-grpc" },
+ { name = "opentelemetry-sdk" },
+ { name = "pydantic", extra = ["email"] },
{ name = "pydantic-settings" },
+ { name = "pyjwt" },
{ name = "python-dotenv" },
{ name = "uvicorn", extra = ["standard"] },
]
@@ -514,14 +707,22 @@ dev = [
[package.metadata]
requires-dist = [
+ { name = "aioredis", specifier = ">=2.0.0" },
+ { name = "asyncpg", specifier = ">=0.29.0" },
+ { name = "azure-appconfiguration", specifier = ">=1.5.0" },
{ name = "azure-core", specifier = ">=1.36.0" },
{ name = "azure-identity", specifier = ">=1.25.0" },
{ name = "azure-keyvault-secrets", specifier = ">=4.10.0" },
+ { name = "cryptography", specifier = ">=41.0.0" },
{ name = "fastapi", specifier = ">=0.104.0" },
{ name = "httpx", marker = "extra == 'dev'", specifier = ">=0.27.0" },
+ { name = "opentelemetry-api", specifier = ">=1.20.0" },
+ { name = "opentelemetry-exporter-otlp-proto-grpc", specifier = ">=1.20.0" },
+ { name = "opentelemetry-sdk", specifier = ">=1.20.0" },
{ name = "pre-commit", marker = "extra == 'dev'", specifier = ">=3.5.0" },
- { name = "pydantic", specifier = ">=2.5.0" },
+ { name = "pydantic", extras = ["email"], specifier = ">=2.5.0" },
{ name = "pydantic-settings", specifier = ">=2.1.0" },
+ { name = "pyjwt", specifier = ">=2.8.0" },
{ name = "pytest", marker = "extra == 'dev'", specifier = ">=8.0.0" },
{ name = "pytest-asyncio", marker = "extra == 'dev'", specifier = ">=0.23.0" },
{ name = "python-dotenv", specifier = ">=1.0.0" },
@@ -539,6 +740,88 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" },
]
+[[package]]
+name = "opentelemetry-api"
+version = "1.39.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "importlib-metadata" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/97/b9/3161be15bb8e3ad01be8be5a968a9237c3027c5be504362ff800fca3e442/opentelemetry_api-1.39.1.tar.gz", hash = "sha256:fbde8c80e1b937a2c61f20347e91c0c18a1940cecf012d62e65a7caf08967c9c", size = 65767, upload-time = "2025-12-11T13:32:39.182Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/cf/df/d3f1ddf4bb4cb50ed9b1139cc7b1c54c34a1e7ce8fd1b9a37c0d1551a6bd/opentelemetry_api-1.39.1-py3-none-any.whl", hash = "sha256:2edd8463432a7f8443edce90972169b195e7d6a05500cd29e6d13898187c9950", size = 66356, upload-time = "2025-12-11T13:32:17.304Z" },
+]
+
+[[package]]
+name = "opentelemetry-exporter-otlp-proto-common"
+version = "1.39.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "opentelemetry-proto" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/e9/9d/22d241b66f7bbde88a3bfa6847a351d2c46b84de23e71222c6aae25c7050/opentelemetry_exporter_otlp_proto_common-1.39.1.tar.gz", hash = "sha256:763370d4737a59741c89a67b50f9e39271639ee4afc999dadfe768541c027464", size = 20409, upload-time = "2025-12-11T13:32:40.885Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/8c/02/ffc3e143d89a27ac21fd557365b98bd0653b98de8a101151d5805b5d4c33/opentelemetry_exporter_otlp_proto_common-1.39.1-py3-none-any.whl", hash = "sha256:08f8a5862d64cc3435105686d0216c1365dc5701f86844a8cd56597d0c764fde", size = 18366, upload-time = "2025-12-11T13:32:20.2Z" },
+]
+
+[[package]]
+name = "opentelemetry-exporter-otlp-proto-grpc"
+version = "1.39.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "googleapis-common-protos" },
+ { name = "grpcio" },
+ { name = "opentelemetry-api" },
+ { name = "opentelemetry-exporter-otlp-proto-common" },
+ { name = "opentelemetry-proto" },
+ { name = "opentelemetry-sdk" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/53/48/b329fed2c610c2c32c9366d9dc597202c9d1e58e631c137ba15248d8850f/opentelemetry_exporter_otlp_proto_grpc-1.39.1.tar.gz", hash = "sha256:772eb1c9287485d625e4dbe9c879898e5253fea111d9181140f51291b5fec3ad", size = 24650, upload-time = "2025-12-11T13:32:41.429Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/81/a3/cc9b66575bd6597b98b886a2067eea2693408d2d5f39dad9ab7fc264f5f3/opentelemetry_exporter_otlp_proto_grpc-1.39.1-py3-none-any.whl", hash = "sha256:fa1c136a05c7e9b4c09f739469cbdb927ea20b34088ab1d959a849b5cc589c18", size = 19766, upload-time = "2025-12-11T13:32:21.027Z" },
+]
+
+[[package]]
+name = "opentelemetry-proto"
+version = "1.39.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "protobuf" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/49/1d/f25d76d8260c156c40c97c9ed4511ec0f9ce353f8108ca6e7561f82a06b2/opentelemetry_proto-1.39.1.tar.gz", hash = "sha256:6c8e05144fc0d3ed4d22c2289c6b126e03bcd0e6a7da0f16cedd2e1c2772e2c8", size = 46152, upload-time = "2025-12-11T13:32:48.681Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/51/95/b40c96a7b5203005a0b03d8ce8cd212ff23f1793d5ba289c87a097571b18/opentelemetry_proto-1.39.1-py3-none-any.whl", hash = "sha256:22cdc78efd3b3765d09e68bfbd010d4fc254c9818afd0b6b423387d9dee46007", size = 72535, upload-time = "2025-12-11T13:32:33.866Z" },
+]
+
+[[package]]
+name = "opentelemetry-sdk"
+version = "1.39.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "opentelemetry-api" },
+ { name = "opentelemetry-semantic-conventions" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/eb/fb/c76080c9ba07e1e8235d24cdcc4d125ef7aa3edf23eb4e497c2e50889adc/opentelemetry_sdk-1.39.1.tar.gz", hash = "sha256:cf4d4563caf7bff906c9f7967e2be22d0d6b349b908be0d90fb21c8e9c995cc6", size = 171460, upload-time = "2025-12-11T13:32:49.369Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/7c/98/e91cf858f203d86f4eccdf763dcf01cf03f1dae80c3750f7e635bfa206b6/opentelemetry_sdk-1.39.1-py3-none-any.whl", hash = "sha256:4d5482c478513ecb0a5d938dcc61394e647066e0cc2676bee9f3af3f3f45f01c", size = 132565, upload-time = "2025-12-11T13:32:35.069Z" },
+]
+
+[[package]]
+name = "opentelemetry-semantic-conventions"
+version = "0.60b1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "opentelemetry-api" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/91/df/553f93ed38bf22f4b999d9be9c185adb558982214f33eae539d3b5cd0858/opentelemetry_semantic_conventions-0.60b1.tar.gz", hash = "sha256:87c228b5a0669b748c76d76df6c364c369c28f1c465e50f661e39737e84bc953", size = 137935, upload-time = "2025-12-11T13:32:50.487Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/7a/5e/5958555e09635d09b75de3c4f8b9cae7335ca545d77392ffe7331534c402/opentelemetry_semantic_conventions-0.60b1-py3-none-any.whl", hash = "sha256:9fa8c8b0c110da289809292b0591220d3a7b53c1526a23021e977d68597893fb", size = 219982, upload-time = "2025-12-11T13:32:36.955Z" },
+]
+
[[package]]
name = "packaging"
version = "25.0"
@@ -582,6 +865,21 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/5d/c4/b2d28e9d2edf4f1713eb3c29307f1a63f3d67cf09bdda29715a36a68921a/pre_commit-4.5.0-py2.py3-none-any.whl", hash = "sha256:25e2ce09595174d9c97860a95609f9f852c0614ba602de3561e267547f2335e1", size = 226429, upload-time = "2025-11-22T21:02:40.836Z" },
]
+[[package]]
+name = "protobuf"
+version = "6.33.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/34/44/e49ecff446afeec9d1a66d6bbf9adc21e3c7cea7803a920ca3773379d4f6/protobuf-6.33.2.tar.gz", hash = "sha256:56dc370c91fbb8ac85bc13582c9e373569668a290aa2e66a590c2a0d35ddb9e4", size = 444296, upload-time = "2025-12-06T00:17:53.311Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/bc/91/1e3a34881a88697a7354ffd177e8746e97a722e5e8db101544b47e84afb1/protobuf-6.33.2-cp310-abi3-win32.whl", hash = "sha256:87eb388bd2d0f78febd8f4c8779c79247b26a5befad525008e49a6955787ff3d", size = 425603, upload-time = "2025-12-06T00:17:41.114Z" },
+ { url = "https://files.pythonhosted.org/packages/64/20/4d50191997e917ae13ad0a235c8b42d8c1ab9c3e6fd455ca16d416944355/protobuf-6.33.2-cp310-abi3-win_amd64.whl", hash = "sha256:fc2a0e8b05b180e5fc0dd1559fe8ebdae21a27e81ac77728fb6c42b12c7419b4", size = 436930, upload-time = "2025-12-06T00:17:43.278Z" },
+ { url = "https://files.pythonhosted.org/packages/b2/ca/7e485da88ba45c920fb3f50ae78de29ab925d9e54ef0de678306abfbb497/protobuf-6.33.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:d9b19771ca75935b3a4422957bc518b0cecb978b31d1dd12037b088f6bcc0e43", size = 427621, upload-time = "2025-12-06T00:17:44.445Z" },
+ { url = "https://files.pythonhosted.org/packages/7d/4f/f743761e41d3b2b2566748eb76bbff2b43e14d5fcab694f494a16458b05f/protobuf-6.33.2-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:b5d3b5625192214066d99b2b605f5783483575656784de223f00a8d00754fc0e", size = 324460, upload-time = "2025-12-06T00:17:45.678Z" },
+ { url = "https://files.pythonhosted.org/packages/b1/fa/26468d00a92824020f6f2090d827078c09c9c587e34cbfd2d0c7911221f8/protobuf-6.33.2-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:8cd7640aee0b7828b6d03ae518b5b4806fdfc1afe8de82f79c3454f8aef29872", size = 339168, upload-time = "2025-12-06T00:17:46.813Z" },
+ { url = "https://files.pythonhosted.org/packages/56/13/333b8f421738f149d4fe5e49553bc2a2ab75235486259f689b4b91f96cec/protobuf-6.33.2-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:1f8017c48c07ec5859106533b682260ba3d7c5567b1ca1f24297ce03384d1b4f", size = 323270, upload-time = "2025-12-06T00:17:48.253Z" },
+ { url = "https://files.pythonhosted.org/packages/0e/15/4f02896cc3df04fc465010a4c6a0cd89810f54617a32a70ef531ed75d61c/protobuf-6.33.2-py3-none-any.whl", hash = "sha256:7636aad9bb01768870266de5dc009de2d1b936771b38a793f73cbbf279c91c5c", size = 170501, upload-time = "2025-12-06T00:17:52.211Z" },
+]
+
[[package]]
name = "pycparser"
version = "2.23"
@@ -606,6 +904,11 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" },
]
+[package.optional-dependencies]
+email = [
+ { name = "email-validator" },
+]
+
[[package]]
name = "pydantic-core"
version = "2.41.5"
@@ -1121,3 +1424,12 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/1b/6c/c65773d6cab416a64d191d6ee8a8b1c68a09970ea6909d16965d26bfed1e/websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561", size = 176837, upload-time = "2025-03-05T20:02:55.237Z" },
{ url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743, upload-time = "2025-03-05T20:03:39.41Z" },
]
+
+[[package]]
+name = "zipp"
+version = "3.23.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" },
+]