diff --git a/components/backend/handlers/content.go b/components/backend/handlers/content.go index 4d2c861b5..0732cc67d 100644 --- a/components/backend/handlers/content.go +++ b/components/backend/handlers/content.go @@ -13,6 +13,7 @@ import ( "time" "ambient-code-backend/git" + "ambient-code-backend/pathutil" "github.com/gin-gonic/gin" ) @@ -61,7 +62,7 @@ func ContentGitPush(c *gin.Context) { } // Basic safety: repoDir must be under StateBaseDir - if !strings.HasPrefix(repoDir+string(os.PathSeparator), StateBaseDir+string(os.PathSeparator)) && repoDir != StateBaseDir { + if !pathutil.IsPathWithinBase(repoDir, StateBaseDir) && repoDir != StateBaseDir { log.Printf("contentGitPush: invalid repoPath resolved=%q stateBaseDir=%q", repoDir, StateBaseDir) c.JSON(http.StatusBadRequest, gin.H{"error": "invalid repoPath"}) return @@ -101,7 +102,7 @@ func ContentGitAbandon(c *gin.Context) { repoDir = StateBaseDir } - if !strings.HasPrefix(repoDir+string(os.PathSeparator), StateBaseDir+string(os.PathSeparator)) && repoDir != StateBaseDir { + if !pathutil.IsPathWithinBase(repoDir, StateBaseDir) && repoDir != StateBaseDir { log.Printf("contentGitAbandon: invalid repoPath resolved=%q base=%q", repoDir, StateBaseDir) c.JSON(http.StatusBadRequest, gin.H{"error": "invalid repoPath"}) return @@ -126,7 +127,7 @@ func ContentGitDiff(c *gin.Context) { } repoDir := filepath.Clean(filepath.Join(StateBaseDir, repoPath)) - if !strings.HasPrefix(repoDir+string(os.PathSeparator), StateBaseDir+string(os.PathSeparator)) && repoDir != StateBaseDir { + if !pathutil.IsPathWithinBase(repoDir, StateBaseDir) && repoDir != StateBaseDir { c.JSON(http.StatusBadRequest, gin.H{"error": "invalid repoPath"}) return } @@ -159,13 +160,13 @@ func ContentGitDiff(c *gin.Context) { // ContentGitStatus handles GET /content/git-status?path= func ContentGitStatus(c *gin.Context) { path := filepath.Clean("/" + strings.TrimSpace(c.Query("path"))) - if path == "/" || strings.Contains(path, "..") { + abs := filepath.Join(StateBaseDir, path) + // Verify abs is within StateBaseDir to prevent path traversal + if !pathutil.IsPathWithinBase(abs, StateBaseDir) { c.JSON(http.StatusBadRequest, gin.H{"error": "invalid path"}) return } - abs := filepath.Join(StateBaseDir, path) - // Check if directory exists if info, err := os.Stat(abs); err != nil || !info.IsDir() { c.JSON(http.StatusOK, gin.H{ @@ -224,13 +225,13 @@ func ContentGitConfigureRemote(c *gin.Context) { } path := filepath.Clean("/" + body.Path) - if path == "/" || strings.Contains(path, "..") { + abs := filepath.Join(StateBaseDir, path) + // Verify abs is within StateBaseDir to prevent path traversal + if !pathutil.IsPathWithinBase(abs, StateBaseDir) { c.JSON(http.StatusBadRequest, gin.H{"error": "invalid path"}) return } - abs := filepath.Join(StateBaseDir, path) - // Check if directory exists if info, err := os.Stat(abs); err != nil || !info.IsDir() { c.JSON(http.StatusBadRequest, gin.H{"error": "directory not found"}) @@ -301,13 +302,13 @@ func ContentGitSync(c *gin.Context) { } path := filepath.Clean("/" + body.Path) - if path == "/" || strings.Contains(path, "..") { + abs := filepath.Join(StateBaseDir, path) + // Verify abs is within StateBaseDir to prevent path traversal + if !pathutil.IsPathWithinBase(abs, StateBaseDir) { c.JSON(http.StatusBadRequest, gin.H{"error": "invalid path"}) return } - abs := filepath.Join(StateBaseDir, path) - // Check if git repo exists gitDir := filepath.Join(abs, ".git") if _, err := os.Stat(gitDir); err != nil { @@ -345,12 +346,13 @@ func ContentWrite(c *gin.Context) { log.Printf("ContentWrite: path=%q contentLen=%d encoding=%q StateBaseDir=%q", req.Path, len(req.Content), req.Encoding, StateBaseDir) path := filepath.Clean("/" + strings.TrimSpace(req.Path)) - if path == "/" || strings.Contains(path, "..") { - log.Printf("ContentWrite: invalid path rejected: path=%q", path) + abs := filepath.Join(StateBaseDir, path) + // Verify abs is within StateBaseDir to prevent path traversal + if !pathutil.IsPathWithinBase(abs, StateBaseDir) { + log.Printf("ContentWrite: path traversal attempt rejected: path=%q abs=%q", path, abs) c.JSON(http.StatusBadRequest, gin.H{"error": "invalid path"}) return } - abs := filepath.Join(StateBaseDir, path) log.Printf("ContentWrite: absolute path=%q", abs) if err := os.MkdirAll(filepath.Dir(abs), 0755); err != nil { @@ -385,12 +387,13 @@ func ContentRead(c *gin.Context) { log.Printf("ContentRead: requested path=%q StateBaseDir=%q", c.Query("path"), StateBaseDir) log.Printf("ContentRead: cleaned path=%q", path) - if path == "/" || strings.Contains(path, "..") { - log.Printf("ContentRead: invalid path rejected: path=%q", path) + abs := filepath.Join(StateBaseDir, path) + // Verify abs is within StateBaseDir to prevent path traversal + if !pathutil.IsPathWithinBase(abs, StateBaseDir) { + log.Printf("ContentRead: path traversal attempt rejected: path=%q abs=%q", path, abs) c.JSON(http.StatusBadRequest, gin.H{"error": "invalid path"}) return } - abs := filepath.Join(StateBaseDir, path) log.Printf("ContentRead: absolute path=%q", abs) b, err := os.ReadFile(abs) @@ -414,12 +417,13 @@ func ContentList(c *gin.Context) { log.Printf("ContentList: cleaned path=%q", path) log.Printf("ContentList: StateBaseDir=%q", StateBaseDir) - if path == "/" || strings.Contains(path, "..") { - log.Printf("ContentList: invalid path rejected: path=%q", path) + abs := filepath.Join(StateBaseDir, path) + // Verify abs is within StateBaseDir to prevent path traversal + if !pathutil.IsPathWithinBase(abs, StateBaseDir) { + log.Printf("ContentList: path traversal attempt rejected: path=%q abs=%q", path, abs) c.JSON(http.StatusBadRequest, gin.H{"error": "invalid path"}) return } - abs := filepath.Join(StateBaseDir, path) log.Printf("ContentList: absolute path=%q", abs) info, err := os.Stat(abs) @@ -672,7 +676,9 @@ func ContentGitMergeStatus(c *gin.Context) { path := filepath.Clean("/" + strings.TrimSpace(c.Query("path"))) branch := strings.TrimSpace(c.Query("branch")) - if path == "/" || strings.Contains(path, "..") { + abs := filepath.Join(StateBaseDir, path) + // Verify abs is within StateBaseDir to prevent path traversal + if !pathutil.IsPathWithinBase(abs, StateBaseDir) { c.JSON(http.StatusBadRequest, gin.H{"error": "invalid path"}) return } @@ -681,8 +687,6 @@ func ContentGitMergeStatus(c *gin.Context) { branch = "main" } - abs := filepath.Join(StateBaseDir, path) - // Check if git repo exists gitDir := filepath.Join(abs, ".git") if _, err := os.Stat(gitDir); err != nil { @@ -722,7 +726,9 @@ func ContentGitPull(c *gin.Context) { } path := filepath.Clean("/" + body.Path) - if path == "/" || strings.Contains(path, "..") { + abs := filepath.Join(StateBaseDir, path) + // Verify abs is within StateBaseDir to prevent path traversal + if !pathutil.IsPathWithinBase(abs, StateBaseDir) { c.JSON(http.StatusBadRequest, gin.H{"error": "invalid path"}) return } @@ -731,8 +737,6 @@ func ContentGitPull(c *gin.Context) { body.Branch = "main" } - abs := filepath.Join(StateBaseDir, path) - if err := GitPullRepo(c.Request.Context(), abs, body.Branch); err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) return @@ -757,7 +761,9 @@ func ContentGitPushToBranch(c *gin.Context) { } path := filepath.Clean("/" + body.Path) - if path == "/" || strings.Contains(path, "..") { + abs := filepath.Join(StateBaseDir, path) + // Verify abs is within StateBaseDir to prevent path traversal + if !pathutil.IsPathWithinBase(abs, StateBaseDir) { c.JSON(http.StatusBadRequest, gin.H{"error": "invalid path"}) return } @@ -770,8 +776,6 @@ func ContentGitPushToBranch(c *gin.Context) { body.Message = "Session artifacts update" } - abs := filepath.Join(StateBaseDir, path) - if err := GitPushToRepo(c.Request.Context(), abs, body.Branch, body.Message); err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) return @@ -795,7 +799,9 @@ func ContentGitCreateBranch(c *gin.Context) { } path := filepath.Clean("/" + body.Path) - if path == "/" || strings.Contains(path, "..") { + abs := filepath.Join(StateBaseDir, path) + // Verify abs is within StateBaseDir to prevent path traversal + if !pathutil.IsPathWithinBase(abs, StateBaseDir) { c.JSON(http.StatusBadRequest, gin.H{"error": "invalid path"}) return } @@ -805,8 +811,6 @@ func ContentGitCreateBranch(c *gin.Context) { return } - abs := filepath.Join(StateBaseDir, path) - if err := GitCreateBranch(c.Request.Context(), abs, body.BranchName); err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) return @@ -820,13 +824,13 @@ func ContentGitCreateBranch(c *gin.Context) { func ContentGitListBranches(c *gin.Context) { path := filepath.Clean("/" + strings.TrimSpace(c.Query("path"))) - if path == "/" || strings.Contains(path, "..") { + abs := filepath.Join(StateBaseDir, path) + // Verify abs is within StateBaseDir to prevent path traversal + if !pathutil.IsPathWithinBase(abs, StateBaseDir) { c.JSON(http.StatusBadRequest, gin.H{"error": "invalid path"}) return } - abs := filepath.Join(StateBaseDir, path) - branches, err := GitListRemoteBranches(c.Request.Context(), abs) if err != nil { // Log actual error for debugging, but return generic message to avoid leaking internal details @@ -837,3 +841,43 @@ func ContentGitListBranches(c *gin.Context) { c.JSON(http.StatusOK, gin.H{"branches": branches}) } + +// ContentDelete handles DELETE /content/delete when running in CONTENT_SERVICE_MODE +func ContentDelete(c *gin.Context) { + var req struct { + Path string `json:"path"` + } + if err := c.ShouldBindJSON(&req); err != nil { + log.Printf("ContentDelete: bind JSON failed: %v", err) + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + log.Printf("ContentDelete: path=%q StateBaseDir=%q", req.Path, StateBaseDir) + + path := filepath.Clean("/" + strings.TrimSpace(req.Path)) + abs := filepath.Join(StateBaseDir, path) + // Verify abs is within StateBaseDir to prevent path traversal + if !pathutil.IsPathWithinBase(abs, StateBaseDir) { + log.Printf("ContentDelete: path traversal attempt rejected: path=%q abs=%q", path, abs) + c.JSON(http.StatusBadRequest, gin.H{"error": "invalid path"}) + return + } + log.Printf("ContentDelete: absolute path=%q", abs) + + // Check if file exists + if _, err := os.Stat(abs); os.IsNotExist(err) { + log.Printf("ContentDelete: file not found: %q", abs) + c.JSON(http.StatusNotFound, gin.H{"error": "file not found"}) + return + } + + // Delete the file + if err := os.Remove(abs); err != nil { + log.Printf("ContentDelete: delete failed for %q: %v", abs, err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to delete file"}) + return + } + + log.Printf("ContentDelete: successfully deleted %q", abs) + c.JSON(http.StatusOK, gin.H{"message": "file deleted successfully"}) +} diff --git a/components/backend/handlers/sessions.go b/components/backend/handlers/sessions.go index 12c0cc73b..111d97bb3 100644 --- a/components/backend/handlers/sessions.go +++ b/components/backend/handlers/sessions.go @@ -3,6 +3,7 @@ package handlers import ( "context" + "encoding/base64" "encoding/json" "fmt" "io" @@ -10,11 +11,14 @@ import ( "net/http" "net/url" "os" + "path/filepath" "sort" "strings" "time" + "unicode/utf8" "ambient-code-backend/git" + "ambient-code-backend/pathutil" "ambient-code-backend/types" "github.com/gin-gonic/gin" @@ -42,6 +46,46 @@ var ( const runnerTokenRefreshedAtAnnotation = "ambient-code.io/token-refreshed-at" +// isBinaryContentType checks if a MIME type represents binary content that should be base64 encoded. +// This includes images, archives, documents, executables, and other non-text formats. +func isBinaryContentType(contentType string) bool { + // Comprehensive list of binary MIME type prefixes and exact matches + binaryPrefixes := []string{ + "image/", // All image formats (jpeg, png, gif, webp, etc.) + "audio/", // All audio formats (mp3, wav, ogg, etc.) + "video/", // All video formats (mp4, webm, avi, etc.) + "font/", // Font files (woff, woff2, ttf, etc.) + "application/octet-stream", // Generic binary + "application/pdf", // PDF documents + "application/zip", // ZIP archives + "application/x-", // Many binary formats (x-7z-compressed, x-tar, x-gzip, etc.) + "application/vnd.", // Vendor-specific formats (MS Office, etc.) + } + + // Check exact matches for common binary types not covered by prefixes + binaryExact := []string{ + "application/gzip", + "application/x-bzip2", + "application/java-archive", // JAR files + "application/msword", // Legacy .doc + "application/rtf", + } + + for _, prefix := range binaryPrefixes { + if strings.HasPrefix(contentType, prefix) { + return true + } + } + + for _, exact := range binaryExact { + if contentType == exact { + return true + } + } + + return false +} + // parseSpec parses AgenticSessionSpec with v1alpha1 fields func parseSpec(spec map[string]interface{}) types.AgenticSessionSpec { result := types.AgenticSessionSpec{} @@ -829,7 +873,10 @@ func provisionRunnerTokenForSession(c *gin.Context, reqK8s kubernetes.Interface, }, }, } - b, _ := json.Marshal(patch) + b, err := json.Marshal(patch) + if err != nil { + return fmt.Errorf("marshal patch: %w", err) + } if _, err := reqDyn.Resource(gvr).Namespace(project).Patch(c.Request.Context(), obj.GetName(), ktypes.MergePatchType, b, v1.PatchOptions{}); err != nil { return fmt.Errorf("annotate AgenticSession: %w", err) } @@ -1569,7 +1616,18 @@ func GetWorkflowMetadata(c *gin.Context) { } defer resp.Body.Close() - b, _ := io.ReadAll(resp.Body) + b, err := io.ReadAll(resp.Body) + if err != nil { + log.Printf("GetWorkflowMetadata: failed to read response body: %v", err) + c.JSON(http.StatusOK, gin.H{"commands": []interface{}{}, "agents": []interface{}{}}) + return + } + + // Log if content service returned an error + if resp.StatusCode >= 400 { + log.Printf("GetWorkflowMetadata: content service returned error status %d: %s", resp.StatusCode, string(b)) + } + c.Data(resp.StatusCode, "application/json", b) } @@ -2452,7 +2510,12 @@ func ListSessionWorkspace(c *gin.Context) { endpoint := fmt.Sprintf("http://%s.%s.svc:8080", serviceName, project) u := fmt.Sprintf("%s/content/list?path=%s", endpoint, url.QueryEscape(absPath)) log.Printf("ListSessionWorkspace: project=%s session=%s endpoint=%s", project, session, endpoint) - req, _ := http.NewRequestWithContext(c.Request.Context(), http.MethodGet, u, nil) + req, err := http.NewRequestWithContext(c.Request.Context(), http.MethodGet, u, nil) + if err != nil { + log.Printf("ListSessionWorkspace: failed to create HTTP request: %v", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to create request"}) + return + } if strings.TrimSpace(token) != "" { req.Header.Set("Authorization", token) } @@ -2465,7 +2528,17 @@ func ListSessionWorkspace(c *gin.Context) { return } defer resp.Body.Close() - b, _ := io.ReadAll(resp.Body) + b, err := io.ReadAll(resp.Body) + if err != nil { + log.Printf("ListSessionWorkspace: failed to read response body: %v", err) + c.JSON(http.StatusOK, gin.H{"items": []any{}}) + return + } + + // Log if content service returned an error (other than 404 which is handled below) + if resp.StatusCode >= 400 && resp.StatusCode != http.StatusNotFound { + log.Printf("ListSessionWorkspace: content service returned error status %d: %s", resp.StatusCode, string(b)) + } // If content service returns 404, check if it's because workspace doesn't exist yet if resp.StatusCode == http.StatusNotFound { @@ -2514,7 +2587,12 @@ func GetSessionWorkspaceFile(c *gin.Context) { endpoint := fmt.Sprintf("http://%s.%s.svc:8080", serviceName, project) u := fmt.Sprintf("%s/content/file?path=%s", endpoint, url.QueryEscape(absPath)) - req, _ := http.NewRequestWithContext(c.Request.Context(), http.MethodGet, u, nil) + req, err := http.NewRequestWithContext(c.Request.Context(), http.MethodGet, u, nil) + if err != nil { + log.Printf("GetSessionWorkspaceFile: failed to create HTTP request: %v", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to create request"}) + return + } if strings.TrimSpace(token) != "" { req.Header.Set("Authorization", token) } @@ -2525,7 +2603,18 @@ func GetSessionWorkspaceFile(c *gin.Context) { return } defer resp.Body.Close() - b, _ := io.ReadAll(resp.Body) + b, err := io.ReadAll(resp.Body) + if err != nil { + log.Printf("GetSessionWorkspaceFile: failed to read response body: %v", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to read file from content service"}) + return + } + + // Log if content service returned an error + if resp.StatusCode >= 400 { + log.Printf("GetSessionWorkspaceFile: content service returned error status %d for path %s", resp.StatusCode, sub) + } + c.Data(resp.StatusCode, resp.Header.Get("Content-Type"), b) } @@ -2543,36 +2632,184 @@ func PutSessionWorkspaceFile(c *gin.Context) { c.JSON(http.StatusBadRequest, gin.H{"error": "Project namespace required"}) return } + + // Get user-scoped K8s clients and validate authentication IMMEDIATELY + reqK8s, reqDyn := GetK8sClientsForRequest(c) + if reqK8s == nil { + c.JSON(http.StatusUnauthorized, gin.H{"error": "Invalid or missing authentication token"}) + c.Abort() + return + } + + // Validate and sanitize path to prevent directory traversal + // Use robust path validation that works across platforms sub := strings.TrimPrefix(c.Param("path"), "/") - absPath := "/sessions/" + session + "/workspace/" + sub + workspaceBase := "/sessions/" + session + "/workspace" + + // Construct absolute path using filepath.Join for proper path handling + absPath := filepath.Join(workspaceBase, sub) + + // Use robust path validation from pathutil package + // This is more secure than manual string checks and works across platforms + if !pathutil.IsPathWithinBase(absPath, workspaceBase) { + log.Printf("PutSessionWorkspaceFile: path traversal attempt detected - path=%q escapes workspace=%q", absPath, workspaceBase) + c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid path: must be within workspace directory"}) + return + } + + // Convert to forward slashes for content service (expects POSIX paths) + // filepath.Join may use backslashes on Windows, but content service always uses forward slashes + absPath = filepath.ToSlash(absPath) + token := c.GetHeader("Authorization") if strings.TrimSpace(token) == "" { token = c.GetHeader("X-Forwarded-Access-Token") } - // Try temp service first (for completed sessions), then regular service - serviceName := fmt.Sprintf("temp-content-%s", session) - k8sClt, _ := GetK8sClientsForRequest(c) - if k8sClt == nil { - c.JSON(http.StatusUnauthorized, gin.H{"error": "Invalid or missing token"}) - c.Abort() + // RBAC check: verify user has update permission on agenticsessions (file operations modify session state) + // IMPORTANT: RBAC check MUST happen BEFORE checking session existence to prevent enumeration attacks + ssar := &authzv1.SelfSubjectAccessReview{ + Spec: authzv1.SelfSubjectAccessReviewSpec{ + ResourceAttributes: &authzv1.ResourceAttributes{ + Group: "vteam.ambient-code", + Resource: "agenticsessions", + Verb: "update", + Namespace: project, + }, + }, + } + res, err := reqK8s.AuthorizationV1().SelfSubjectAccessReviews().Create(c.Request.Context(), ssar, v1.CreateOptions{}) + if err != nil { + log.Printf("RBAC check failed for file upload in project %s: %v", project, err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to verify permissions"}) return } - if _, err := k8sClt.CoreV1().Services(project).Get(c.Request.Context(), serviceName, v1.GetOptions{}); err != nil { - // Temp service doesn't exist, use regular service + if !res.Status.Allowed { + c.JSON(http.StatusForbidden, gin.H{"error": "Unauthorized to modify session workspace"}) + return + } + + // Verify session exists using reqDyn AFTER RBAC check + // This prevents enumeration attacks - unauthorized users get same "Forbidden" response + gvr := GetAgenticSessionV1Alpha1Resource() + item, err := reqDyn.Resource(gvr).Namespace(project).Get(c.Request.Context(), session, v1.GetOptions{}) + if err != nil { + if errors.IsNotFound(err) { + c.JSON(http.StatusNotFound, gin.H{"error": "Session not found"}) + return + } + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to get session"}) + return + } + + // Try temp service first (for completed sessions), then regular service + serviceName := fmt.Sprintf("temp-content-%s", session) + serviceFound := false + + if _, err := reqK8s.CoreV1().Services(project).Get(c.Request.Context(), serviceName, v1.GetOptions{}); err != nil { + // Temp service doesn't exist, try regular service serviceName = fmt.Sprintf("ambient-content-%s", session) + if _, err := reqK8s.CoreV1().Services(project).Get(c.Request.Context(), serviceName, v1.GetOptions{}); err != nil { + // Neither service exists - need to spawn temp content pod + log.Printf("PutSessionWorkspaceFile: No content service found for session %s, requesting temp pod", session) + serviceFound = false + } else { + serviceFound = true + } + } else { + serviceFound = true + } + + // If no service exists, request temp content pod and return accepted status + // We already have the session item from the existence check above + if !serviceFound { + + // Check if temp content was already requested (avoid duplicate pod creation) + annotations := item.GetAnnotations() + if annotations != nil && annotations["ambient-code.io/temp-content-requested"] == "true" { + log.Printf("PutSessionWorkspaceFile: Temp content already requested for session %s", session) + c.JSON(http.StatusAccepted, gin.H{"message": "Content service starting, please retry upload in a few seconds"}) + return + } + + // Request temp content pod via annotation + if annotations == nil { + annotations = make(map[string]string) + } + now := time.Now().UTC().Format(time.RFC3339) + annotations["ambient-code.io/temp-content-requested"] = "true" + annotations["ambient-code.io/temp-content-last-accessed"] = now + item.SetAnnotations(annotations) + + // Use optimistic locking - if resource was modified between Get and Update, K8s returns conflict + if _, err := reqDyn.Resource(gvr).Namespace(project).Update(c.Request.Context(), item, v1.UpdateOptions{}); err != nil { + if errors.IsConflict(err) { + // Another request updated the resource - likely also requested temp pod + log.Printf("PutSessionWorkspaceFile: Conflict updating session %s (concurrent request), treating as already requested", session) + c.JSON(http.StatusAccepted, gin.H{"message": "Content service starting, please retry upload in a few seconds"}) + return + } + log.Printf("PutSessionWorkspaceFile: Failed to request temp pod: %v", err) + c.JSON(http.StatusServiceUnavailable, gin.H{"error": "Content service not available, please try again in a few seconds"}) + return + } + + log.Printf("PutSessionWorkspaceFile: Requested temp content pod for session %s", session) + c.JSON(http.StatusAccepted, gin.H{"message": "Content service starting, please retry upload in a few seconds"}) + return } endpoint := fmt.Sprintf("http://%s.%s.svc:8080", serviceName, project) log.Printf("PutSessionWorkspaceFile: using service %s for session %s", serviceName, session) - payload, _ := io.ReadAll(c.Request.Body) + payload, err := io.ReadAll(c.Request.Body) + if err != nil { + log.Printf("PutSessionWorkspaceFile: failed to read request body: %v", err) + c.JSON(http.StatusBadRequest, gin.H{"error": "Failed to read file data"}) + return + } + + // Detect if content is binary and encode accordingly + encoding := "utf8" + var content string + contentType := c.GetHeader("Content-Type") + + // If no Content-Type header, detect from payload + if contentType == "" { + contentType = http.DetectContentType(payload) + } + + // Use base64 for binary content types or if content isn't valid UTF-8 + // Check comprehensive list of binary MIME types and UTF-8 validity + // IMPORTANT: Validate UTF-8 BEFORE converting to string + isBinary := isBinaryContentType(contentType) || !utf8.Valid(payload) + + if isBinary { + encoding = "base64" + content = base64.StdEncoding.EncodeToString(payload) + // Don't log user-controlled strings (contentType header) to prevent log injection + log.Printf("PutSessionWorkspaceFile: detected binary content, using base64 encoding (size=%d, contentTypeLen=%d)", len(payload), len(contentType)) + } else { + // Only convert to string after validating UTF-8 + content = string(payload) + } + wreq := struct { Path string `json:"path"` Content string `json:"content"` Encoding string `json:"encoding"` - }{Path: absPath, Content: string(payload), Encoding: "utf8"} - b, _ := json.Marshal(wreq) - req, _ := http.NewRequestWithContext(c.Request.Context(), http.MethodPost, endpoint+"/content/write", strings.NewReader(string(b))) + }{Path: absPath, Content: content, Encoding: encoding} + b, err := json.Marshal(wreq) + if err != nil { + log.Printf("PutSessionWorkspaceFile: failed to marshal request: %v", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to prepare request"}) + return + } + req, err := http.NewRequestWithContext(c.Request.Context(), http.MethodPost, endpoint+"/content/write", strings.NewReader(string(b))) + if err != nil { + log.Printf("PutSessionWorkspaceFile: failed to create HTTP request: %v", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to create request"}) + return + } if strings.TrimSpace(token) != "" { req.Header.Set("Authorization", token) } @@ -2584,10 +2821,179 @@ func PutSessionWorkspaceFile(c *gin.Context) { return } defer resp.Body.Close() - rb, _ := io.ReadAll(resp.Body) + rb, err := io.ReadAll(resp.Body) + if err != nil { + log.Printf("PutSessionWorkspaceFile: failed to read response body: %v", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to read response from content service"}) + return + } + + // Log if content service returned an error + if resp.StatusCode >= 400 { + log.Printf("PutSessionWorkspaceFile: content service returned error status %d for path %s: %s", resp.StatusCode, sub, string(rb)) + } + c.Data(resp.StatusCode, resp.Header.Get("Content-Type"), rb) } +// DeleteSessionWorkspaceFile deletes a file via content service. +func DeleteSessionWorkspaceFile(c *gin.Context) { + // Get project from context (set by middleware) or param + project := c.GetString("project") + if project == "" { + project = c.Param("projectName") + } + session := c.Param("sessionName") + + if project == "" { + log.Printf("DeleteSessionWorkspaceFile: project is empty, session=%s", session) + c.JSON(http.StatusBadRequest, gin.H{"error": "Project namespace required"}) + return + } + + // Get user-scoped K8s clients and validate authentication IMMEDIATELY + reqK8s, reqDyn := GetK8sClientsForRequest(c) + if reqK8s == nil { + c.JSON(http.StatusUnauthorized, gin.H{"error": "Invalid or missing authentication token"}) + c.Abort() + return + } + + // Validate and sanitize path to prevent directory traversal + // Use robust path validation that works across platforms + sub := strings.TrimPrefix(c.Param("path"), "/") + workspaceBase := "/sessions/" + session + "/workspace" + + // Construct absolute path using filepath.Join for proper path handling + absPath := filepath.Join(workspaceBase, sub) + + // Use robust path validation from pathutil package + // This is more secure than manual string checks and works across platforms + if !pathutil.IsPathWithinBase(absPath, workspaceBase) { + log.Printf("DeleteSessionWorkspaceFile: path traversal attempt detected - path=%q escapes workspace=%q", absPath, workspaceBase) + c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid path: must be within workspace directory"}) + return + } + + // Convert to forward slashes for content service (expects POSIX paths) + // filepath.Join may use backslashes on Windows, but content service always uses forward slashes + absPath = filepath.ToSlash(absPath) + + token := c.GetHeader("Authorization") + if strings.TrimSpace(token) == "" { + token = c.GetHeader("X-Forwarded-Access-Token") + } + + // RBAC check: verify user has update permission on agenticsessions (file operations modify session state) + // IMPORTANT: RBAC check MUST happen BEFORE checking session existence to prevent enumeration attacks + ssar := &authzv1.SelfSubjectAccessReview{ + Spec: authzv1.SelfSubjectAccessReviewSpec{ + ResourceAttributes: &authzv1.ResourceAttributes{ + Group: "vteam.ambient-code", + Resource: "agenticsessions", + Verb: "update", + Namespace: project, + }, + }, + } + res, err := reqK8s.AuthorizationV1().SelfSubjectAccessReviews().Create(c.Request.Context(), ssar, v1.CreateOptions{}) + if err != nil { + log.Printf("RBAC check failed for file deletion in project %s: %v", project, err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to verify permissions"}) + return + } + if !res.Status.Allowed { + c.JSON(http.StatusForbidden, gin.H{"error": "Unauthorized to modify session workspace"}) + return + } + + // Verify session exists using reqDyn AFTER RBAC check + // This prevents enumeration attacks - unauthorized users get same "Forbidden" response + gvr := GetAgenticSessionV1Alpha1Resource() + if _, err := reqDyn.Resource(gvr).Namespace(project).Get(c.Request.Context(), session, v1.GetOptions{}); err != nil { + if errors.IsNotFound(err) { + c.JSON(http.StatusNotFound, gin.H{"error": "Session not found"}) + return + } + log.Printf("DeleteSessionWorkspaceFile: Failed to verify session existence: %v", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to verify session"}) + return + } + + // Try temp service first, then regular service + serviceName := fmt.Sprintf("temp-content-%s", session) + serviceFound := false + + if _, err := reqK8s.CoreV1().Services(project).Get(c.Request.Context(), serviceName, v1.GetOptions{}); err != nil { + // Temp service doesn't exist, try regular service + serviceName = fmt.Sprintf("ambient-content-%s", session) + if _, err := reqK8s.CoreV1().Services(project).Get(c.Request.Context(), serviceName, v1.GetOptions{}); err != nil { + log.Printf("DeleteSessionWorkspaceFile: No content service found for session %s", session) + c.JSON(http.StatusServiceUnavailable, gin.H{"error": "Content service not available"}) + return + } else { + serviceFound = true + } + } else { + serviceFound = true + } + + if !serviceFound { + c.JSON(http.StatusServiceUnavailable, gin.H{"error": "Content service not available"}) + return + } + + endpoint := fmt.Sprintf("http://%s.%s.svc:8080", serviceName, project) + log.Printf("DeleteSessionWorkspaceFile: using service %s for session %s, path=%s", serviceName, session, absPath) + + // Use DELETE request with path in body + wreq := struct { + Path string `json:"path"` + }{Path: absPath} + b, err := json.Marshal(wreq) + if err != nil { + log.Printf("DeleteSessionWorkspaceFile: failed to marshal request: %v", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to prepare request"}) + return + } + req, err := http.NewRequestWithContext(c.Request.Context(), http.MethodDelete, endpoint+"/content/delete", strings.NewReader(string(b))) + if err != nil { + log.Printf("DeleteSessionWorkspaceFile: failed to create HTTP request: %v", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to create request"}) + return + } + if strings.TrimSpace(token) != "" { + req.Header.Set("Authorization", token) + } + req.Header.Set("Content-Type", "application/json") + client := &http.Client{Timeout: 4 * time.Second} + resp, err := client.Do(req) + if err != nil { + c.JSON(http.StatusServiceUnavailable, gin.H{"error": err.Error()}) + return + } + defer resp.Body.Close() + + // Always return JSON for consistency with frontend expectations + if resp.StatusCode >= 200 && resp.StatusCode < 300 { + c.JSON(http.StatusOK, gin.H{"message": "File deleted successfully"}) + } else { + rb, err := io.ReadAll(resp.Body) + if err != nil { + log.Printf("DeleteSessionWorkspaceFile: failed to read error response: %v", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to delete file"}) + return + } + // Try to parse error from content service, otherwise use generic message + var errResp map[string]interface{} + if err := json.Unmarshal(rb, &errResp); err == nil { + c.JSON(resp.StatusCode, errResp) + } else { + c.JSON(resp.StatusCode, gin.H{"error": "Failed to delete file"}) + } + } +} + // PushSessionRepo proxies a push request for a given session repo to the per-job content service. // POST /api/projects/:projectName/agentic-sessions/:sessionName/github/push // Body: { repoIndex: number, commitMessage?: string, branch?: string } @@ -2676,8 +3082,18 @@ func PushSessionRepo(c *gin.Context) { "branch": resolvedBranch, "outputRepoUrl": resolvedOutputURL, } - b, _ := json.Marshal(payload) - req, _ := http.NewRequestWithContext(c.Request.Context(), http.MethodPost, endpoint+"/content/github/push", strings.NewReader(string(b))) + b, err := json.Marshal(payload) + if err != nil { + log.Printf("pushSessionRepo: failed to marshal request: %v", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to prepare request"}) + return + } + req, err := http.NewRequestWithContext(c.Request.Context(), http.MethodPost, endpoint+"/content/github/push", strings.NewReader(string(b))) + if err != nil { + log.Printf("pushSessionRepo: failed to create HTTP request: %v", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to create request"}) + return + } if v := c.GetHeader("Authorization"); v != "" { req.Header.Set("Authorization", v) } @@ -2729,7 +3145,12 @@ func PushSessionRepo(c *gin.Context) { return } defer resp.Body.Close() - bodyBytes, _ := io.ReadAll(resp.Body) + bodyBytes, err := io.ReadAll(resp.Body) + if err != nil { + log.Printf("pushSessionRepo: failed to read response body: %v", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to read response from content service"}) + return + } if resp.StatusCode < 200 || resp.StatusCode >= 300 { log.Printf("pushSessionRepo: content returned status=%d body.snip=%q", resp.StatusCode, func() string { s := string(bodyBytes) @@ -2783,8 +3204,18 @@ func AbandonSessionRepo(c *gin.Context) { payload := map[string]interface{}{ "repoPath": repoPath, } - b, _ := json.Marshal(payload) - req, _ := http.NewRequestWithContext(c.Request.Context(), http.MethodPost, endpoint+"/content/github/abandon", strings.NewReader(string(b))) + b, err := json.Marshal(payload) + if err != nil { + log.Printf("abandonSessionRepo: failed to marshal request: %v", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to prepare request"}) + return + } + req, err := http.NewRequestWithContext(c.Request.Context(), http.MethodPost, endpoint+"/content/github/abandon", strings.NewReader(string(b))) + if err != nil { + log.Printf("abandonSessionRepo: failed to create HTTP request: %v", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to create request"}) + return + } if v := c.GetHeader("Authorization"); v != "" { req.Header.Set("Authorization", v) } @@ -2801,7 +3232,12 @@ func AbandonSessionRepo(c *gin.Context) { return } defer resp.Body.Close() - bodyBytes, _ := io.ReadAll(resp.Body) + bodyBytes, err := io.ReadAll(resp.Body) + if err != nil { + log.Printf("abandonSessionRepo: failed to read response body: %v", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to read response from content service"}) + return + } if resp.StatusCode < 200 || resp.StatusCode >= 300 { log.Printf("abandonSessionRepo: content returned status=%d body=%s", resp.StatusCode, string(bodyBytes)) c.Data(resp.StatusCode, "application/json", bodyBytes) @@ -2860,7 +3296,19 @@ func DiffSessionRepo(c *gin.Context) { return } defer resp.Body.Close() - bodyBytes, _ := io.ReadAll(resp.Body) + bodyBytes, err := io.ReadAll(resp.Body) + if err != nil { + log.Printf("DiffSessionRepo: failed to read response body: %v", err) + c.JSON(http.StatusOK, gin.H{ + "files": gin.H{ + "added": 0, + "removed": 0, + }, + "total_added": 0, + "total_removed": 0, + }) + return + } c.Data(resp.StatusCode, resp.Header.Get("Content-Type"), bodyBytes) } @@ -2893,7 +3341,12 @@ func GetGitStatus(c *gin.Context) { endpoint := fmt.Sprintf("http://%s.%s.svc:8080/content/git-status?path=%s", serviceName, project, url.QueryEscape(absPath)) - req, _ := http.NewRequestWithContext(c.Request.Context(), http.MethodGet, endpoint, nil) + req, err := http.NewRequestWithContext(c.Request.Context(), http.MethodGet, endpoint, nil) + if err != nil { + log.Printf("GetGitStatus: failed to create HTTP request: %v", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to create request"}) + return + } if v := c.GetHeader("Authorization"); v != "" { req.Header.Set("Authorization", v) } @@ -2905,7 +3358,12 @@ func GetGitStatus(c *gin.Context) { } defer resp.Body.Close() - bodyBytes, _ := io.ReadAll(resp.Body) + bodyBytes, err := io.ReadAll(resp.Body) + if err != nil { + log.Printf("GetGitStatus: failed to read response body: %v", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to read response from content service"}) + return + } c.Data(resp.StatusCode, resp.Header.Get("Content-Type"), bodyBytes) } @@ -2954,13 +3412,23 @@ func ConfigureGitRemote(c *gin.Context) { endpoint := fmt.Sprintf("http://%s.%s.svc:8080/content/git-configure-remote", serviceName, project) - reqBody, _ := json.Marshal(map[string]interface{}{ + reqBody, err := json.Marshal(map[string]interface{}{ "path": absPath, "remoteUrl": body.RemoteURL, "branch": body.Branch, }) + if err != nil { + log.Printf("ConfigureGitRemote: failed to marshal request: %v", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to prepare request"}) + return + } - req, _ := http.NewRequestWithContext(c.Request.Context(), http.MethodPost, endpoint, strings.NewReader(string(reqBody))) + req, err := http.NewRequestWithContext(c.Request.Context(), http.MethodPost, endpoint, strings.NewReader(string(reqBody))) + if err != nil { + log.Printf("ConfigureGitRemote: failed to create HTTP request: %v", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to create request"}) + return + } req.Header.Set("Content-Type", "application/json") if v := c.GetHeader("Authorization"); v != "" { req.Header.Set("Authorization", v) @@ -3012,7 +3480,12 @@ func ConfigureGitRemote(c *gin.Context) { } } - bodyBytes, _ := io.ReadAll(resp.Body) + bodyBytes, err := io.ReadAll(resp.Body) + if err != nil { + log.Printf("ConfigureGitRemote: failed to read response body: %v", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to read response from content service"}) + return + } c.Data(resp.StatusCode, resp.Header.Get("Content-Type"), bodyBytes) } @@ -3056,13 +3529,23 @@ func SynchronizeGit(c *gin.Context) { endpoint := fmt.Sprintf("http://%s.%s.svc:8080/content/git-sync", serviceName, project) - reqBody, _ := json.Marshal(map[string]interface{}{ + reqBody, err := json.Marshal(map[string]interface{}{ "path": absPath, "message": body.Message, "branch": body.Branch, }) + if err != nil { + log.Printf("SynchronizeGit: failed to marshal request: %v", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to prepare request"}) + return + } - req, _ := http.NewRequestWithContext(c.Request.Context(), http.MethodPost, endpoint, strings.NewReader(string(reqBody))) + req, err := http.NewRequestWithContext(c.Request.Context(), http.MethodPost, endpoint, strings.NewReader(string(reqBody))) + if err != nil { + log.Printf("SynchronizeGit: failed to create HTTP request: %v", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to create request"}) + return + } req.Header.Set("Content-Type", "application/json") if v := c.GetHeader("Authorization"); v != "" { req.Header.Set("Authorization", v) @@ -3075,7 +3558,12 @@ func SynchronizeGit(c *gin.Context) { } defer resp.Body.Close() - bodyBytes, _ := io.ReadAll(resp.Body) + bodyBytes, err := io.ReadAll(resp.Body) + if err != nil { + log.Printf("SynchronizeGit: failed to read response body: %v", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to read response from content service"}) + return + } c.Data(resp.StatusCode, resp.Header.Get("Content-Type"), bodyBytes) } @@ -3122,7 +3610,12 @@ func GetGitMergeStatus(c *gin.Context) { } defer resp.Body.Close() - bodyBytes, _ := io.ReadAll(resp.Body) + bodyBytes, err := io.ReadAll(resp.Body) + if err != nil { + log.Printf("GetGitMergeStatus: failed to read response body: %v", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to read response from content service"}) + return + } c.Data(resp.StatusCode, resp.Header.Get("Content-Type"), bodyBytes) } @@ -3164,12 +3657,22 @@ func GitPullSession(c *gin.Context) { endpoint := fmt.Sprintf("http://%s.%s.svc:8080/content/git-pull", serviceName, project) - reqBody, _ := json.Marshal(map[string]interface{}{ + reqBody, err := json.Marshal(map[string]interface{}{ "path": absPath, "branch": body.Branch, }) + if err != nil { + log.Printf("GitPullSession: failed to marshal request: %v", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to prepare request"}) + return + } - req, _ := http.NewRequestWithContext(c.Request.Context(), http.MethodPost, endpoint, strings.NewReader(string(reqBody))) + req, err := http.NewRequestWithContext(c.Request.Context(), http.MethodPost, endpoint, strings.NewReader(string(reqBody))) + if err != nil { + log.Printf("GitPullSession: failed to create HTTP request: %v", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to create request"}) + return + } req.Header.Set("Content-Type", "application/json") if v := c.GetHeader("Authorization"); v != "" { req.Header.Set("Authorization", v) @@ -3182,7 +3685,12 @@ func GitPullSession(c *gin.Context) { } defer resp.Body.Close() - bodyBytes, _ := io.ReadAll(resp.Body) + bodyBytes, err := io.ReadAll(resp.Body) + if err != nil { + log.Printf("GitPullSession: failed to read response body: %v", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to read response from content service"}) + return + } c.Data(resp.StatusCode, resp.Header.Get("Content-Type"), bodyBytes) } @@ -3228,13 +3736,23 @@ func GitPushSession(c *gin.Context) { endpoint := fmt.Sprintf("http://%s.%s.svc:8080/content/git-push", serviceName, project) - reqBody, _ := json.Marshal(map[string]interface{}{ + reqBody, err := json.Marshal(map[string]interface{}{ "path": absPath, "branch": body.Branch, "message": body.Message, }) + if err != nil { + log.Printf("GitPushSession: failed to marshal request: %v", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to prepare request"}) + return + } - req, _ := http.NewRequestWithContext(c.Request.Context(), http.MethodPost, endpoint, strings.NewReader(string(reqBody))) + req, err := http.NewRequestWithContext(c.Request.Context(), http.MethodPost, endpoint, strings.NewReader(string(reqBody))) + if err != nil { + log.Printf("GitPushSession: failed to create HTTP request: %v", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to create request"}) + return + } req.Header.Set("Content-Type", "application/json") if v := c.GetHeader("Authorization"); v != "" { req.Header.Set("Authorization", v) @@ -3247,7 +3765,12 @@ func GitPushSession(c *gin.Context) { } defer resp.Body.Close() - bodyBytes, _ := io.ReadAll(resp.Body) + bodyBytes, err := io.ReadAll(resp.Body) + if err != nil { + log.Printf("GitPushSession: failed to read response body: %v", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to read response from content service"}) + return + } c.Data(resp.StatusCode, resp.Header.Get("Content-Type"), bodyBytes) } @@ -3286,12 +3809,22 @@ func GitCreateBranchSession(c *gin.Context) { endpoint := fmt.Sprintf("http://%s.%s.svc:8080/content/git-create-branch", serviceName, project) - reqBody, _ := json.Marshal(map[string]interface{}{ + reqBody, err := json.Marshal(map[string]interface{}{ "path": absPath, "branchName": body.BranchName, }) + if err != nil { + log.Printf("GitCreateBranchSession: failed to marshal request: %v", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to prepare request"}) + return + } - req, _ := http.NewRequestWithContext(c.Request.Context(), http.MethodPost, endpoint, strings.NewReader(string(reqBody))) + req, err := http.NewRequestWithContext(c.Request.Context(), http.MethodPost, endpoint, strings.NewReader(string(reqBody))) + if err != nil { + log.Printf("GitCreateBranchSession: failed to create HTTP request: %v", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to create request"}) + return + } req.Header.Set("Content-Type", "application/json") if v := c.GetHeader("Authorization"); v != "" { req.Header.Set("Authorization", v) @@ -3304,7 +3837,12 @@ func GitCreateBranchSession(c *gin.Context) { } defer resp.Body.Close() - bodyBytes, _ := io.ReadAll(resp.Body) + bodyBytes, err := io.ReadAll(resp.Body) + if err != nil { + log.Printf("GitCreateBranchSession: failed to read response body: %v", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to read response from content service"}) + return + } c.Data(resp.StatusCode, resp.Header.Get("Content-Type"), bodyBytes) } @@ -3335,7 +3873,12 @@ func GitListBranchesSession(c *gin.Context) { endpoint := fmt.Sprintf("http://%s.%s.svc:8080/content/git-list-branches?path=%s", serviceName, project, url.QueryEscape(absPath)) - req, _ := http.NewRequestWithContext(c.Request.Context(), http.MethodGet, endpoint, nil) + req, err := http.NewRequestWithContext(c.Request.Context(), http.MethodGet, endpoint, nil) + if err != nil { + log.Printf("GitListBranchesSession: failed to create HTTP request: %v", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to create request"}) + return + } if v := c.GetHeader("Authorization"); v != "" { req.Header.Set("Authorization", v) } @@ -3347,6 +3890,11 @@ func GitListBranchesSession(c *gin.Context) { } defer resp.Body.Close() - bodyBytes, _ := io.ReadAll(resp.Body) + bodyBytes, err := io.ReadAll(resp.Body) + if err != nil { + log.Printf("GitListBranchesSession: failed to read response body: %v", err) + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to read response from content service"}) + return + } c.Data(resp.StatusCode, resp.Header.Get("Content-Type"), bodyBytes) } diff --git a/components/backend/pathutil/pathutil.go b/components/backend/pathutil/pathutil.go new file mode 100644 index 000000000..1df0c1fa2 --- /dev/null +++ b/components/backend/pathutil/pathutil.go @@ -0,0 +1,41 @@ +// Package pathutil provides utilities for secure path validation and manipulation. +package pathutil + +import ( + "path/filepath" + "strings" +) + +// IsPathWithinBase uses filepath.Rel to robustly verify that abs is within baseDir. +// This is more secure than strings.HasPrefix across different OS platforms. +// +// Security considerations: +// - Uses filepath.Clean on both paths to normalize separators and remove .. sequences +// - Uses filepath.Rel for platform-independent path validation +// - Checks for ".." prefix in relative path to detect traversal attempts +// +// Returns true if abs is within baseDir, false otherwise. +func IsPathWithinBase(abs, baseDir string) bool { + // Clean both paths before comparison to prevent path traversal attacks + // filepath.Clean normalizes paths and removes . and .. components + cleanBase := filepath.Clean(baseDir) + cleanAbs := filepath.Clean(abs) + + // Compute relative path from base to abs + relPath, err := filepath.Rel(cleanBase, cleanAbs) + if err != nil { + // filepath.Rel returns error if paths are on different volumes (Windows) + // or if one path cannot be made relative to the other + return false + } + + // If relPath starts with "..", it means abs is outside baseDir + // For example: + // base=/app/workspace, abs=/app/workspace/file -> relPath=file (OK) + // base=/app/workspace, abs=/app/secrets -> relPath=../secrets (BLOCKED) + if strings.HasPrefix(relPath, "..") { + return false + } + + return true +} diff --git a/components/backend/pathutil/pathutil_test.go b/components/backend/pathutil/pathutil_test.go new file mode 100644 index 000000000..3268c304e --- /dev/null +++ b/components/backend/pathutil/pathutil_test.go @@ -0,0 +1,102 @@ +package pathutil + +import ( + "path/filepath" + "testing" +) + +func TestIsPathWithinBase(t *testing.T) { + tests := []struct { + name string + abs string + baseDir string + expected bool + }{ + { + name: "valid path within base", + abs: "/app/workspace/file.txt", + baseDir: "/app/workspace", + expected: true, + }, + { + name: "valid nested path", + abs: "/app/workspace/subdir/file.txt", + baseDir: "/app/workspace", + expected: true, + }, + { + name: "same path", + abs: "/app/workspace", + baseDir: "/app/workspace", + expected: true, + }, + { + name: "path traversal with ..", + abs: "/app/workspace/../secrets/file.txt", + baseDir: "/app/workspace", + expected: false, + }, + { + name: "path outside base", + abs: "/app/secrets/file.txt", + baseDir: "/app/workspace", + expected: false, + }, + { + name: "path with trailing slash", + abs: "/app/workspace/file.txt/", + baseDir: "/app/workspace", + expected: true, + }, + { + name: "path with multiple ..", + abs: "/app/workspace/subdir/../../secrets/file.txt", + baseDir: "/app/workspace", + expected: false, + }, + { + name: "path with . components", + abs: "/app/workspace/./subdir/./file.txt", + baseDir: "/app/workspace", + expected: true, + }, + { + name: "relative base and abs", + abs: "workspace/file.txt", + baseDir: "workspace", + expected: true, + }, + { + name: "relative path traversal", + abs: "workspace/../secrets/file.txt", + baseDir: "workspace", + expected: false, + }, + { + name: "Windows-style path (forward slash)", + abs: "C:/app/workspace/file.txt", + baseDir: "C:/app/workspace", + expected: true, + }, + { + name: "Windows-style path traversal", + abs: "C:/app/workspace/../secrets/file.txt", + baseDir: "C:/app/workspace", + expected: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := IsPathWithinBase(tt.abs, tt.baseDir) + if result != tt.expected { + // Show cleaned paths for debugging + cleanAbs := filepath.Clean(tt.abs) + cleanBase := filepath.Clean(tt.baseDir) + relPath, _ := filepath.Rel(cleanBase, cleanAbs) + t.Errorf("IsPathWithinBase(%q, %q) = %v, want %v\n cleanAbs=%q\n cleanBase=%q\n relPath=%q", + tt.abs, tt.baseDir, result, tt.expected, cleanAbs, cleanBase, relPath) + } + }) + } +} diff --git a/components/backend/routes.go b/components/backend/routes.go index 74d4412f9..a76273fd1 100644 --- a/components/backend/routes.go +++ b/components/backend/routes.go @@ -11,6 +11,7 @@ func registerContentRoutes(r *gin.Engine) { r.POST("/content/write", handlers.ContentWrite) r.GET("/content/file", handlers.ContentRead) r.GET("/content/list", handlers.ContentList) + r.DELETE("/content/delete", handlers.ContentDelete) r.POST("/content/github/push", handlers.ContentGitPush) r.POST("/content/github/abandon", handlers.ContentGitAbandon) r.GET("/content/github/diff", handlers.ContentGitDiff) @@ -60,6 +61,7 @@ func registerRoutes(r *gin.Engine) { projectGroup.GET("/agentic-sessions/:sessionName/workspace", handlers.ListSessionWorkspace) projectGroup.GET("/agentic-sessions/:sessionName/workspace/*path", handlers.GetSessionWorkspaceFile) projectGroup.PUT("/agentic-sessions/:sessionName/workspace/*path", handlers.PutSessionWorkspaceFile) + projectGroup.DELETE("/agentic-sessions/:sessionName/workspace/*path", handlers.DeleteSessionWorkspaceFile) projectGroup.POST("/agentic-sessions/:sessionName/github/push", handlers.PushSessionRepo) projectGroup.POST("/agentic-sessions/:sessionName/github/abandon", handlers.AbandonSessionRepo) projectGroup.GET("/agentic-sessions/:sessionName/github/diff", handlers.DiffSessionRepo) diff --git a/components/frontend/.env.example b/components/frontend/.env.example index 01a918caf..2af227336 100644 --- a/components/frontend/.env.example +++ b/components/frontend/.env.example @@ -22,3 +22,12 @@ OC_EMAIL= # Set to '1' or 'true' to enable ENABLE_OC_WHOAMI=1 +# File upload size limits (in bytes) +# These control the maximum file sizes allowed for different file types +# MAX_UPLOAD_SIZE_DOCUMENTS: Maximum size for document files (default: 716800 = 700KB) +# MAX_UPLOAD_SIZE_IMAGES: Maximum size for image uploads before compression (default: 3145728 = 3MB) +# IMAGE_COMPRESSION_TARGET: Target size for compressed images (default: 358400 = 350KB) +MAX_UPLOAD_SIZE_DOCUMENTS=716800 +MAX_UPLOAD_SIZE_IMAGES=3145728 +IMAGE_COMPRESSION_TARGET=358400 + diff --git a/components/frontend/package-lock.json b/components/frontend/package-lock.json index 0a1085e88..3856e2ed5 100644 --- a/components/frontend/package-lock.json +++ b/components/frontend/package-lock.json @@ -25,6 +25,7 @@ "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", "date-fns": "^4.1.0", + "file-type": "^21.1.1", "highlight.js": "^11.11.1", "lucide-react": "^0.542.0", "next": "15.5.7", @@ -36,6 +37,7 @@ "react-resizable-panels": "^3.0.6", "rehype-highlight": "^7.0.2", "remark-gfm": "^4.0.1", + "sharp": "^0.33.0", "tailwind-merge": "^3.3.1", "zod": "^4.1.5" }, @@ -65,6 +67,16 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/@borewit/text-codec": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/@borewit/text-codec/-/text-codec-0.1.1.tgz", + "integrity": "sha512-5L/uBxmjaCIX5h8Z+uu+kA9BQLkc/Wl06UGR5ajNRxu+/XjonB5i8JpgFMrPj3LXTCPA0pv8yxUvbUi+QthGGA==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/Borewit" + } + }, "node_modules/@emnapi/core": { "version": "1.7.1", "resolved": "https://registry.npmjs.org/@emnapi/core/-/core-1.7.1.tgz", @@ -355,9 +367,9 @@ } }, "node_modules/@img/sharp-darwin-arm64": { - "version": "0.34.5", - "resolved": "https://registry.npmjs.org/@img/sharp-darwin-arm64/-/sharp-darwin-arm64-0.34.5.tgz", - "integrity": "sha512-imtQ3WMJXbMY4fxb/Ndp6HBTNVtWCUI0WdobyheGf5+ad6xX8VIDO8u2xE4qc/fr08CKG/7dDseFtn6M6g/r3w==", + "version": "0.33.5", + "resolved": "https://registry.npmjs.org/@img/sharp-darwin-arm64/-/sharp-darwin-arm64-0.33.5.tgz", + "integrity": "sha512-UT4p+iz/2H4twwAoLCqfA9UH5pI6DggwKEGuaPy7nCVQ8ZsiY5PIcrRvD1DzuY3qYL07NtIQcWnBSY/heikIFQ==", "cpu": [ "arm64" ], @@ -373,13 +385,13 @@ "url": "https://opencollective.com/libvips" }, "optionalDependencies": { - "@img/sharp-libvips-darwin-arm64": "1.2.4" + "@img/sharp-libvips-darwin-arm64": "1.0.4" } }, "node_modules/@img/sharp-darwin-x64": { - "version": "0.34.5", - "resolved": "https://registry.npmjs.org/@img/sharp-darwin-x64/-/sharp-darwin-x64-0.34.5.tgz", - "integrity": "sha512-YNEFAF/4KQ/PeW0N+r+aVVsoIY0/qxxikF2SWdp+NRkmMB7y9LBZAVqQ4yhGCm/H3H270OSykqmQMKLBhBJDEw==", + "version": "0.33.5", + "resolved": "https://registry.npmjs.org/@img/sharp-darwin-x64/-/sharp-darwin-x64-0.33.5.tgz", + "integrity": "sha512-fyHac4jIc1ANYGRDxtiqelIbdWkIuQaI84Mv45KvGRRxSAa7o7d1ZKAOBaYbnepLC1WqxfpimdeWfvqqSGwR2Q==", "cpu": [ "x64" ], @@ -395,13 +407,13 @@ "url": "https://opencollective.com/libvips" }, "optionalDependencies": { - "@img/sharp-libvips-darwin-x64": "1.2.4" + "@img/sharp-libvips-darwin-x64": "1.0.4" } }, "node_modules/@img/sharp-libvips-darwin-arm64": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-darwin-arm64/-/sharp-libvips-darwin-arm64-1.2.4.tgz", - "integrity": "sha512-zqjjo7RatFfFoP0MkQ51jfuFZBnVE2pRiaydKJ1G/rHZvnsrHAOcQALIi9sA5co5xenQdTugCvtb1cuf78Vf4g==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-darwin-arm64/-/sharp-libvips-darwin-arm64-1.0.4.tgz", + "integrity": "sha512-XblONe153h0O2zuFfTAbQYAX2JhYmDHeWikp1LM9Hul9gVPjFY427k6dFEcOL72O01QxQsWi761svJ/ev9xEDg==", "cpu": [ "arm64" ], @@ -415,9 +427,9 @@ } }, "node_modules/@img/sharp-libvips-darwin-x64": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-darwin-x64/-/sharp-libvips-darwin-x64-1.2.4.tgz", - "integrity": "sha512-1IOd5xfVhlGwX+zXv2N93k0yMONvUlANylbJw1eTah8K/Jtpi15KC+WSiaX/nBmbm2HxRM1gZ0nSdjSsrZbGKg==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-darwin-x64/-/sharp-libvips-darwin-x64-1.0.4.tgz", + "integrity": "sha512-xnGR8YuZYfJGmWPvmlunFaWJsb9T/AO2ykoP3Fz/0X5XV2aoYBPkX6xqCQvUTKKiLddarLaxpzNe+b1hjeWHAQ==", "cpu": [ "x64" ], @@ -431,9 +443,9 @@ } }, "node_modules/@img/sharp-libvips-linux-arm": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-arm/-/sharp-libvips-linux-arm-1.2.4.tgz", - "integrity": "sha512-bFI7xcKFELdiNCVov8e44Ia4u2byA+l3XtsAj+Q8tfCwO6BQ8iDojYdvoPMqsKDkuoOo+X6HZA0s0q11ANMQ8A==", + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-arm/-/sharp-libvips-linux-arm-1.0.5.tgz", + "integrity": "sha512-gvcC4ACAOPRNATg/ov8/MnbxFDJqf/pDePbBnuBDcjsI8PssmjoKMAz4LtLaVi+OnSb5FK/yIOamqDwGmXW32g==", "cpu": [ "arm" ], @@ -447,9 +459,9 @@ } }, "node_modules/@img/sharp-libvips-linux-arm64": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-arm64/-/sharp-libvips-linux-arm64-1.2.4.tgz", - "integrity": "sha512-excjX8DfsIcJ10x1Kzr4RcWe1edC9PquDRRPx3YVCvQv+U5p7Yin2s32ftzikXojb1PIFc/9Mt28/y+iRklkrw==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-arm64/-/sharp-libvips-linux-arm64-1.0.4.tgz", + "integrity": "sha512-9B+taZ8DlyyqzZQnoeIvDVR/2F4EbMepXMc/NdVbkzsJbzkUjhXv/70GQJ7tdLA4YJgNP25zukcxpX2/SueNrA==", "cpu": [ "arm64" ], @@ -495,9 +507,9 @@ } }, "node_modules/@img/sharp-libvips-linux-s390x": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-s390x/-/sharp-libvips-linux-s390x-1.2.4.tgz", - "integrity": "sha512-qmp9VrzgPgMoGZyPvrQHqk02uyjA0/QrTO26Tqk6l4ZV0MPWIW6LTkqOIov+J1yEu7MbFQaDpwdwJKhbJvuRxQ==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-s390x/-/sharp-libvips-linux-s390x-1.0.4.tgz", + "integrity": "sha512-u7Wz6ntiSSgGSGcjZ55im6uvTrOxSIS8/dgoVMoiGE9I6JAfU50yH5BoDlYA1tcuGS7g/QNtetJnxA6QEsCVTA==", "cpu": [ "s390x" ], @@ -511,9 +523,9 @@ } }, "node_modules/@img/sharp-libvips-linux-x64": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-x64/-/sharp-libvips-linux-x64-1.2.4.tgz", - "integrity": "sha512-tJxiiLsmHc9Ax1bz3oaOYBURTXGIRDODBqhveVHonrHJ9/+k89qbLl0bcJns+e4t4rvaNBxaEZsFtSfAdquPrw==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-x64/-/sharp-libvips-linux-x64-1.0.4.tgz", + "integrity": "sha512-MmWmQ3iPFZr0Iev+BAgVMb3ZyC4KeFc3jFxnNbEPas60e1cIfevbtuyf9nDGIzOaW9PdnDciJm+wFFaTlj5xYw==", "cpu": [ "x64" ], @@ -527,9 +539,9 @@ } }, "node_modules/@img/sharp-libvips-linuxmusl-arm64": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linuxmusl-arm64/-/sharp-libvips-linuxmusl-arm64-1.2.4.tgz", - "integrity": "sha512-FVQHuwx1IIuNow9QAbYUzJ+En8KcVm9Lk5+uGUQJHaZmMECZmOlix9HnH7n1TRkXMS0pGxIJokIVB9SuqZGGXw==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linuxmusl-arm64/-/sharp-libvips-linuxmusl-arm64-1.0.4.tgz", + "integrity": "sha512-9Ti+BbTYDcsbp4wfYib8Ctm1ilkugkA/uscUn6UXK1ldpC1JjiXbLfFZtRlBhjPZ5o1NCLiDbg8fhUPKStHoTA==", "cpu": [ "arm64" ], @@ -543,9 +555,9 @@ } }, "node_modules/@img/sharp-libvips-linuxmusl-x64": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linuxmusl-x64/-/sharp-libvips-linuxmusl-x64-1.2.4.tgz", - "integrity": "sha512-+LpyBk7L44ZIXwz/VYfglaX/okxezESc6UxDSoyo2Ks6Jxc4Y7sGjpgU9s4PMgqgjj1gZCylTieNamqA1MF7Dg==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linuxmusl-x64/-/sharp-libvips-linuxmusl-x64-1.0.4.tgz", + "integrity": "sha512-viYN1KX9m+/hGkJtvYYp+CCLgnJXwiQB39damAO7WMdKWlIhmYTfHjwSbQeUK/20vY154mwezd9HflVFM1wVSw==", "cpu": [ "x64" ], @@ -559,9 +571,9 @@ } }, "node_modules/@img/sharp-linux-arm": { - "version": "0.34.5", - "resolved": "https://registry.npmjs.org/@img/sharp-linux-arm/-/sharp-linux-arm-0.34.5.tgz", - "integrity": "sha512-9dLqsvwtg1uuXBGZKsxem9595+ujv0sJ6Vi8wcTANSFpwV/GONat5eCkzQo/1O6zRIkh0m/8+5BjrRr7jDUSZw==", + "version": "0.33.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-arm/-/sharp-linux-arm-0.33.5.tgz", + "integrity": "sha512-JTS1eldqZbJxjvKaAkxhZmBqPRGmxgu+qFKSInv8moZ2AmT5Yib3EQ1c6gp493HvrvV8QgdOXdyaIBrhvFhBMQ==", "cpu": [ "arm" ], @@ -577,13 +589,13 @@ "url": "https://opencollective.com/libvips" }, "optionalDependencies": { - "@img/sharp-libvips-linux-arm": "1.2.4" + "@img/sharp-libvips-linux-arm": "1.0.5" } }, "node_modules/@img/sharp-linux-arm64": { - "version": "0.34.5", - "resolved": "https://registry.npmjs.org/@img/sharp-linux-arm64/-/sharp-linux-arm64-0.34.5.tgz", - "integrity": "sha512-bKQzaJRY/bkPOXyKx5EVup7qkaojECG6NLYswgktOZjaXecSAeCWiZwwiFf3/Y+O1HrauiE3FVsGxFg8c24rZg==", + "version": "0.33.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-arm64/-/sharp-linux-arm64-0.33.5.tgz", + "integrity": "sha512-JMVv+AMRyGOHtO1RFBiJy/MBsgz0x4AWrT6QoEVVTyh1E39TrCUpTRI7mx9VksGX4awWASxqCYLCV4wBZHAYxA==", "cpu": [ "arm64" ], @@ -599,7 +611,7 @@ "url": "https://opencollective.com/libvips" }, "optionalDependencies": { - "@img/sharp-libvips-linux-arm64": "1.2.4" + "@img/sharp-libvips-linux-arm64": "1.0.4" } }, "node_modules/@img/sharp-linux-ppc64": { @@ -647,9 +659,9 @@ } }, "node_modules/@img/sharp-linux-s390x": { - "version": "0.34.5", - "resolved": "https://registry.npmjs.org/@img/sharp-linux-s390x/-/sharp-linux-s390x-0.34.5.tgz", - "integrity": "sha512-nQtCk0PdKfho3eC5MrbQoigJ2gd1CgddUMkabUj+rBevs8tZ2cULOx46E7oyX+04WGfABgIwmMC0VqieTiR4jg==", + "version": "0.33.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-s390x/-/sharp-linux-s390x-0.33.5.tgz", + "integrity": "sha512-y/5PCd+mP4CA/sPDKl2961b+C9d+vPAveS33s6Z3zfASk2j5upL6fXVPZi7ztePZ5CuH+1kW8JtvxgbuXHRa4Q==", "cpu": [ "s390x" ], @@ -665,13 +677,13 @@ "url": "https://opencollective.com/libvips" }, "optionalDependencies": { - "@img/sharp-libvips-linux-s390x": "1.2.4" + "@img/sharp-libvips-linux-s390x": "1.0.4" } }, "node_modules/@img/sharp-linux-x64": { - "version": "0.34.5", - "resolved": "https://registry.npmjs.org/@img/sharp-linux-x64/-/sharp-linux-x64-0.34.5.tgz", - "integrity": "sha512-MEzd8HPKxVxVenwAa+JRPwEC7QFjoPWuS5NZnBt6B3pu7EG2Ge0id1oLHZpPJdn3OQK+BQDiw9zStiHBTJQQQQ==", + "version": "0.33.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-x64/-/sharp-linux-x64-0.33.5.tgz", + "integrity": "sha512-opC+Ok5pRNAzuvq1AG0ar+1owsu842/Ab+4qvU879ippJBHvyY5n2mxF1izXqkPYlGuP/M556uh53jRLJmzTWA==", "cpu": [ "x64" ], @@ -687,13 +699,13 @@ "url": "https://opencollective.com/libvips" }, "optionalDependencies": { - "@img/sharp-libvips-linux-x64": "1.2.4" + "@img/sharp-libvips-linux-x64": "1.0.4" } }, "node_modules/@img/sharp-linuxmusl-arm64": { - "version": "0.34.5", - "resolved": "https://registry.npmjs.org/@img/sharp-linuxmusl-arm64/-/sharp-linuxmusl-arm64-0.34.5.tgz", - "integrity": "sha512-fprJR6GtRsMt6Kyfq44IsChVZeGN97gTD331weR1ex1c1rypDEABN6Tm2xa1wE6lYb5DdEnk03NZPqA7Id21yg==", + "version": "0.33.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linuxmusl-arm64/-/sharp-linuxmusl-arm64-0.33.5.tgz", + "integrity": "sha512-XrHMZwGQGvJg2V/oRSUfSAfjfPxO+4DkiRh6p2AFjLQztWUuY/o8Mq0eMQVIY7HJ1CDQUJlxGGZRw1a5bqmd1g==", "cpu": [ "arm64" ], @@ -709,13 +721,13 @@ "url": "https://opencollective.com/libvips" }, "optionalDependencies": { - "@img/sharp-libvips-linuxmusl-arm64": "1.2.4" + "@img/sharp-libvips-linuxmusl-arm64": "1.0.4" } }, "node_modules/@img/sharp-linuxmusl-x64": { - "version": "0.34.5", - "resolved": "https://registry.npmjs.org/@img/sharp-linuxmusl-x64/-/sharp-linuxmusl-x64-0.34.5.tgz", - "integrity": "sha512-Jg8wNT1MUzIvhBFxViqrEhWDGzqymo3sV7z7ZsaWbZNDLXRJZoRGrjulp60YYtV4wfY8VIKcWidjojlLcWrd8Q==", + "version": "0.33.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linuxmusl-x64/-/sharp-linuxmusl-x64-0.33.5.tgz", + "integrity": "sha512-WT+d/cgqKkkKySYmqoZ8y3pxx7lx9vVejxW/W4DOFMYVSkErR+w7mf2u8m/y4+xHe7yY9DAXQMWQhpnMuFfScw==", "cpu": [ "x64" ], @@ -731,20 +743,20 @@ "url": "https://opencollective.com/libvips" }, "optionalDependencies": { - "@img/sharp-libvips-linuxmusl-x64": "1.2.4" + "@img/sharp-libvips-linuxmusl-x64": "1.0.4" } }, "node_modules/@img/sharp-wasm32": { - "version": "0.34.5", - "resolved": "https://registry.npmjs.org/@img/sharp-wasm32/-/sharp-wasm32-0.34.5.tgz", - "integrity": "sha512-OdWTEiVkY2PHwqkbBI8frFxQQFekHaSSkUIJkwzclWZe64O1X4UlUjqqqLaPbUpMOQk6FBu/HtlGXNblIs0huw==", + "version": "0.33.5", + "resolved": "https://registry.npmjs.org/@img/sharp-wasm32/-/sharp-wasm32-0.33.5.tgz", + "integrity": "sha512-ykUW4LVGaMcU9lu9thv85CbRMAwfeadCJHRsg2GmeRa/cJxsVY9Rbd57JcMxBkKHag5U/x7TSBpScF4U8ElVzg==", "cpu": [ "wasm32" ], "license": "Apache-2.0 AND LGPL-3.0-or-later AND MIT", "optional": true, "dependencies": { - "@emnapi/runtime": "^1.7.0" + "@emnapi/runtime": "^1.2.0" }, "engines": { "node": "^18.17.0 || ^20.3.0 || >=21.0.0" @@ -773,9 +785,9 @@ } }, "node_modules/@img/sharp-win32-ia32": { - "version": "0.34.5", - "resolved": "https://registry.npmjs.org/@img/sharp-win32-ia32/-/sharp-win32-ia32-0.34.5.tgz", - "integrity": "sha512-FV9m/7NmeCmSHDD5j4+4pNI8Cp3aW+JvLoXcTUo0IqyjSfAZJ8dIUmijx1qaJsIiU+Hosw6xM5KijAWRJCSgNg==", + "version": "0.33.5", + "resolved": "https://registry.npmjs.org/@img/sharp-win32-ia32/-/sharp-win32-ia32-0.33.5.tgz", + "integrity": "sha512-T36PblLaTwuVJ/zw/LaH0PdZkRz5rd3SmMHX8GSmR7vtNSP5Z6bQkExdSK7xGWyxLw4sUknBuugTelgw2faBbQ==", "cpu": [ "ia32" ], @@ -792,9 +804,9 @@ } }, "node_modules/@img/sharp-win32-x64": { - "version": "0.34.5", - "resolved": "https://registry.npmjs.org/@img/sharp-win32-x64/-/sharp-win32-x64-0.34.5.tgz", - "integrity": "sha512-+29YMsqY2/9eFEiW93eqWnuLcWcufowXewwSNIT6UwZdUUCrM3oFjMWH/Z6/TMmb4hlFenmfAVbpWeup2jryCw==", + "version": "0.33.5", + "resolved": "https://registry.npmjs.org/@img/sharp-win32-x64/-/sharp-win32-x64-0.33.5.tgz", + "integrity": "sha512-MpY/o8/8kj+EcnxwvrP4aTJSWw/aZ7JIGR4aBeZkZw5B7/Jn+tY9/VNwtcoGmdT7GfggGIU4kygOMSbYnOrAbg==", "cpu": [ "x64" ], @@ -2568,6 +2580,29 @@ "react": "^18 || ^19" } }, + "node_modules/@tokenizer/inflate": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/@tokenizer/inflate/-/inflate-0.4.1.tgz", + "integrity": "sha512-2mAv+8pkG6GIZiF1kNg1jAjh27IDxEPKwdGul3snfztFerfPGI1LjDezZp3i7BElXompqEtPmoPx6c2wgtWsOA==", + "license": "MIT", + "dependencies": { + "debug": "^4.4.3", + "token-types": "^6.1.1" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/Borewit" + } + }, + "node_modules/@tokenizer/token": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/@tokenizer/token/-/token-0.3.0.tgz", + "integrity": "sha512-OvjF+z51L3ov0OyAU0duzsYuvO01PH7x4t6DJx+guahgTnBHkhJdG7soQeTSFLWN3efnHyibZ4Z8l2EuWwJN3A==", + "license": "MIT" + }, "node_modules/@tybys/wasm-util": { "version": "0.10.1", "resolved": "https://registry.npmjs.org/@tybys/wasm-util/-/wasm-util-0.10.1.tgz", @@ -3752,11 +3787,23 @@ "node": ">=6" } }, + "node_modules/color": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/color/-/color-4.2.3.tgz", + "integrity": "sha512-1rXeuUUiGGrykh+CeBdu5Ie7OJwinCgQY0bc7GCRxy5xVHy+moaqkpL/jqQq0MtQOeYcrqEz4abc5f0KtU7W4A==", + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1", + "color-string": "^1.9.0" + }, + "engines": { + "node": ">=12.5.0" + } + }, "node_modules/color-convert": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dev": true, "license": "MIT", "dependencies": { "color-name": "~1.1.4" @@ -3769,9 +3816,18 @@ "version": "1.1.4", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true, "license": "MIT" }, + "node_modules/color-string": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/color-string/-/color-string-1.9.1.tgz", + "integrity": "sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg==", + "license": "MIT", + "dependencies": { + "color-name": "^1.0.0", + "simple-swizzle": "^0.2.2" + } + }, "node_modules/comma-separated-tokens": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-2.0.3.tgz", @@ -3967,7 +4023,6 @@ "version": "2.1.2", "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.1.2.tgz", "integrity": "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==", - "devOptional": true, "license": "Apache-2.0", "engines": { "node": ">=8" @@ -4746,6 +4801,24 @@ "node": ">=16.0.0" } }, + "node_modules/file-type": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/file-type/-/file-type-21.1.1.tgz", + "integrity": "sha512-ifJXo8zUqbQ/bLbl9sFoqHNTNWbnPY1COImFfM6CCy7z+E+jC1eY9YfOKkx0fckIg+VljAy2/87T61fp0+eEkg==", + "license": "MIT", + "dependencies": { + "@tokenizer/inflate": "^0.4.1", + "strtok3": "^10.3.4", + "token-types": "^6.1.1", + "uint8array-extras": "^1.4.0" + }, + "engines": { + "node": ">=20" + }, + "funding": { + "url": "https://github.com/sindresorhus/file-type?sponsor=1" + } + }, "node_modules/fill-range": { "version": "7.1.1", "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", @@ -5195,6 +5268,26 @@ "url": "https://opencollective.com/unified" } }, + "node_modules/ieee754": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "BSD-3-Clause" + }, "node_modules/ignore": { "version": "5.3.2", "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", @@ -5295,6 +5388,12 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/is-arrayish": { + "version": "0.3.4", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.4.tgz", + "integrity": "sha512-m6UrgzFVUYawGBh1dUsWR5M2Clqic9RVXC/9f8ceNlv2IcO9j9J/z8UoCLPqtsPBFNzEpfR3xftohbfqDx8EQA==", + "license": "MIT" + }, "node_modules/is-async-function": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/is-async-function/-/is-async-function-2.1.1.tgz", @@ -7229,111 +7328,517 @@ "react-dom": "^16.8 || ^17 || ^18 || ^19 || ^19.0.0-rc" } }, - "node_modules/next/node_modules/postcss": { - "version": "8.4.31", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.31.tgz", - "integrity": "sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ==", - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/postcss" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } + "node_modules/next/node_modules/@img/sharp-darwin-arm64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-darwin-arm64/-/sharp-darwin-arm64-0.34.5.tgz", + "integrity": "sha512-imtQ3WMJXbMY4fxb/Ndp6HBTNVtWCUI0WdobyheGf5+ad6xX8VIDO8u2xE4qc/fr08CKG/7dDseFtn6M6g/r3w==", + "cpu": [ + "arm64" + ], + "license": "Apache-2.0", + "optional": true, + "os": [ + "darwin" ], - "license": "MIT", - "dependencies": { - "nanoid": "^3.3.6", - "picocolors": "^1.0.0", - "source-map-js": "^1.0.2" - }, - "engines": { - "node": "^10 || ^12 || >=14" - } - }, - "node_modules/object-assign": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/object-inspect": { - "version": "1.13.4", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", - "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", - "dev": true, - "license": "MIT", "engines": { - "node": ">= 0.4" + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" }, "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/object-keys": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", - "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.4" + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-darwin-arm64": "1.2.4" } }, - "node_modules/object.assign": { - "version": "4.1.7", - "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.7.tgz", - "integrity": "sha512-nK28WOo+QIjBkDduTINE4JkF/UJJKyf2EJxvJKfblDpyg0Q+pkOHNTL0Qwy6NP6FhE/EnzV73BxxqcJaXY9anw==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.8", - "call-bound": "^1.0.3", - "define-properties": "^1.2.1", - "es-object-atoms": "^1.0.0", - "has-symbols": "^1.1.0", - "object-keys": "^1.1.1" - }, + "node_modules/next/node_modules/@img/sharp-darwin-x64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-darwin-x64/-/sharp-darwin-x64-0.34.5.tgz", + "integrity": "sha512-YNEFAF/4KQ/PeW0N+r+aVVsoIY0/qxxikF2SWdp+NRkmMB7y9LBZAVqQ4yhGCm/H3H270OSykqmQMKLBhBJDEw==", + "cpu": [ + "x64" + ], + "license": "Apache-2.0", + "optional": true, + "os": [ + "darwin" + ], "engines": { - "node": ">= 0.4" + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" }, "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/object.entries": { - "version": "1.1.9", - "resolved": "https://registry.npmjs.org/object.entries/-/object.entries-1.1.9.tgz", - "integrity": "sha512-8u/hfXFRBD1O0hPUjioLhoWFHRmt6tKA4/vZPyckBr18l1KE9uHrFaFaUi8MDRTpi4uak2goyPTSNJLXX2k2Hw==", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.8", - "call-bound": "^1.0.4", - "define-properties": "^1.2.1", - "es-object-atoms": "^1.1.1" + "url": "https://opencollective.com/libvips" }, - "engines": { - "node": ">= 0.4" + "optionalDependencies": { + "@img/sharp-libvips-darwin-x64": "1.2.4" } }, - "node_modules/object.fromentries": { - "version": "2.0.8", - "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.8.tgz", - "integrity": "sha512-k6E21FzySsSK5a21KRADBd/NGneRegFO5pLHfdQLpRDETUNJueLXs3WCzyQ3tFRDYgbq3KHGXfTbi2bs8WQ6rQ==", - "dev": true, - "license": "MIT", - "dependencies": { + "node_modules/next/node_modules/@img/sharp-libvips-darwin-arm64": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-darwin-arm64/-/sharp-libvips-darwin-arm64-1.2.4.tgz", + "integrity": "sha512-zqjjo7RatFfFoP0MkQ51jfuFZBnVE2pRiaydKJ1G/rHZvnsrHAOcQALIi9sA5co5xenQdTugCvtb1cuf78Vf4g==", + "cpu": [ + "arm64" + ], + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "darwin" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/next/node_modules/@img/sharp-libvips-darwin-x64": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-darwin-x64/-/sharp-libvips-darwin-x64-1.2.4.tgz", + "integrity": "sha512-1IOd5xfVhlGwX+zXv2N93k0yMONvUlANylbJw1eTah8K/Jtpi15KC+WSiaX/nBmbm2HxRM1gZ0nSdjSsrZbGKg==", + "cpu": [ + "x64" + ], + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "darwin" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/next/node_modules/@img/sharp-libvips-linux-arm": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-arm/-/sharp-libvips-linux-arm-1.2.4.tgz", + "integrity": "sha512-bFI7xcKFELdiNCVov8e44Ia4u2byA+l3XtsAj+Q8tfCwO6BQ8iDojYdvoPMqsKDkuoOo+X6HZA0s0q11ANMQ8A==", + "cpu": [ + "arm" + ], + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/next/node_modules/@img/sharp-libvips-linux-arm64": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-arm64/-/sharp-libvips-linux-arm64-1.2.4.tgz", + "integrity": "sha512-excjX8DfsIcJ10x1Kzr4RcWe1edC9PquDRRPx3YVCvQv+U5p7Yin2s32ftzikXojb1PIFc/9Mt28/y+iRklkrw==", + "cpu": [ + "arm64" + ], + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/next/node_modules/@img/sharp-libvips-linux-s390x": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-s390x/-/sharp-libvips-linux-s390x-1.2.4.tgz", + "integrity": "sha512-qmp9VrzgPgMoGZyPvrQHqk02uyjA0/QrTO26Tqk6l4ZV0MPWIW6LTkqOIov+J1yEu7MbFQaDpwdwJKhbJvuRxQ==", + "cpu": [ + "s390x" + ], + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/next/node_modules/@img/sharp-libvips-linux-x64": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-x64/-/sharp-libvips-linux-x64-1.2.4.tgz", + "integrity": "sha512-tJxiiLsmHc9Ax1bz3oaOYBURTXGIRDODBqhveVHonrHJ9/+k89qbLl0bcJns+e4t4rvaNBxaEZsFtSfAdquPrw==", + "cpu": [ + "x64" + ], + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/next/node_modules/@img/sharp-libvips-linuxmusl-arm64": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linuxmusl-arm64/-/sharp-libvips-linuxmusl-arm64-1.2.4.tgz", + "integrity": "sha512-FVQHuwx1IIuNow9QAbYUzJ+En8KcVm9Lk5+uGUQJHaZmMECZmOlix9HnH7n1TRkXMS0pGxIJokIVB9SuqZGGXw==", + "cpu": [ + "arm64" + ], + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/next/node_modules/@img/sharp-libvips-linuxmusl-x64": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@img/sharp-libvips-linuxmusl-x64/-/sharp-libvips-linuxmusl-x64-1.2.4.tgz", + "integrity": "sha512-+LpyBk7L44ZIXwz/VYfglaX/okxezESc6UxDSoyo2Ks6Jxc4Y7sGjpgU9s4PMgqgjj1gZCylTieNamqA1MF7Dg==", + "cpu": [ + "x64" + ], + "license": "LGPL-3.0-or-later", + "optional": true, + "os": [ + "linux" + ], + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/next/node_modules/@img/sharp-linux-arm": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-arm/-/sharp-linux-arm-0.34.5.tgz", + "integrity": "sha512-9dLqsvwtg1uuXBGZKsxem9595+ujv0sJ6Vi8wcTANSFpwV/GONat5eCkzQo/1O6zRIkh0m/8+5BjrRr7jDUSZw==", + "cpu": [ + "arm" + ], + "license": "Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linux-arm": "1.2.4" + } + }, + "node_modules/next/node_modules/@img/sharp-linux-arm64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-arm64/-/sharp-linux-arm64-0.34.5.tgz", + "integrity": "sha512-bKQzaJRY/bkPOXyKx5EVup7qkaojECG6NLYswgktOZjaXecSAeCWiZwwiFf3/Y+O1HrauiE3FVsGxFg8c24rZg==", + "cpu": [ + "arm64" + ], + "license": "Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linux-arm64": "1.2.4" + } + }, + "node_modules/next/node_modules/@img/sharp-linux-s390x": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-s390x/-/sharp-linux-s390x-0.34.5.tgz", + "integrity": "sha512-nQtCk0PdKfho3eC5MrbQoigJ2gd1CgddUMkabUj+rBevs8tZ2cULOx46E7oyX+04WGfABgIwmMC0VqieTiR4jg==", + "cpu": [ + "s390x" + ], + "license": "Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linux-s390x": "1.2.4" + } + }, + "node_modules/next/node_modules/@img/sharp-linux-x64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linux-x64/-/sharp-linux-x64-0.34.5.tgz", + "integrity": "sha512-MEzd8HPKxVxVenwAa+JRPwEC7QFjoPWuS5NZnBt6B3pu7EG2Ge0id1oLHZpPJdn3OQK+BQDiw9zStiHBTJQQQQ==", + "cpu": [ + "x64" + ], + "license": "Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linux-x64": "1.2.4" + } + }, + "node_modules/next/node_modules/@img/sharp-linuxmusl-arm64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linuxmusl-arm64/-/sharp-linuxmusl-arm64-0.34.5.tgz", + "integrity": "sha512-fprJR6GtRsMt6Kyfq44IsChVZeGN97gTD331weR1ex1c1rypDEABN6Tm2xa1wE6lYb5DdEnk03NZPqA7Id21yg==", + "cpu": [ + "arm64" + ], + "license": "Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linuxmusl-arm64": "1.2.4" + } + }, + "node_modules/next/node_modules/@img/sharp-linuxmusl-x64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-linuxmusl-x64/-/sharp-linuxmusl-x64-0.34.5.tgz", + "integrity": "sha512-Jg8wNT1MUzIvhBFxViqrEhWDGzqymo3sV7z7ZsaWbZNDLXRJZoRGrjulp60YYtV4wfY8VIKcWidjojlLcWrd8Q==", + "cpu": [ + "x64" + ], + "license": "Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-libvips-linuxmusl-x64": "1.2.4" + } + }, + "node_modules/next/node_modules/@img/sharp-wasm32": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-wasm32/-/sharp-wasm32-0.34.5.tgz", + "integrity": "sha512-OdWTEiVkY2PHwqkbBI8frFxQQFekHaSSkUIJkwzclWZe64O1X4UlUjqqqLaPbUpMOQk6FBu/HtlGXNblIs0huw==", + "cpu": [ + "wasm32" + ], + "license": "Apache-2.0 AND LGPL-3.0-or-later AND MIT", + "optional": true, + "dependencies": { + "@emnapi/runtime": "^1.7.0" + }, + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/next/node_modules/@img/sharp-win32-ia32": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-win32-ia32/-/sharp-win32-ia32-0.34.5.tgz", + "integrity": "sha512-FV9m/7NmeCmSHDD5j4+4pNI8Cp3aW+JvLoXcTUo0IqyjSfAZJ8dIUmijx1qaJsIiU+Hosw6xM5KijAWRJCSgNg==", + "cpu": [ + "ia32" + ], + "license": "Apache-2.0 AND LGPL-3.0-or-later", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/next/node_modules/@img/sharp-win32-x64": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/@img/sharp-win32-x64/-/sharp-win32-x64-0.34.5.tgz", + "integrity": "sha512-+29YMsqY2/9eFEiW93eqWnuLcWcufowXewwSNIT6UwZdUUCrM3oFjMWH/Z6/TMmb4hlFenmfAVbpWeup2jryCw==", + "cpu": [ + "x64" + ], + "license": "Apache-2.0 AND LGPL-3.0-or-later", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + } + }, + "node_modules/next/node_modules/postcss": { + "version": "8.4.31", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.31.tgz", + "integrity": "sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ==", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "nanoid": "^3.3.6", + "picocolors": "^1.0.0", + "source-map-js": "^1.0.2" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/next/node_modules/sharp": { + "version": "0.34.5", + "resolved": "https://registry.npmjs.org/sharp/-/sharp-0.34.5.tgz", + "integrity": "sha512-Ou9I5Ft9WNcCbXrU9cMgPBcCK8LiwLqcbywW3t4oDV37n1pzpuNLsYiAV8eODnjbtQlSDwZ2cUEeQz4E54Hltg==", + "hasInstallScript": true, + "license": "Apache-2.0", + "optional": true, + "dependencies": { + "@img/colour": "^1.0.0", + "detect-libc": "^2.1.2", + "semver": "^7.7.3" + }, + "engines": { + "node": "^18.17.0 || ^20.3.0 || >=21.0.0" + }, + "funding": { + "url": "https://opencollective.com/libvips" + }, + "optionalDependencies": { + "@img/sharp-darwin-arm64": "0.34.5", + "@img/sharp-darwin-x64": "0.34.5", + "@img/sharp-libvips-darwin-arm64": "1.2.4", + "@img/sharp-libvips-darwin-x64": "1.2.4", + "@img/sharp-libvips-linux-arm": "1.2.4", + "@img/sharp-libvips-linux-arm64": "1.2.4", + "@img/sharp-libvips-linux-ppc64": "1.2.4", + "@img/sharp-libvips-linux-riscv64": "1.2.4", + "@img/sharp-libvips-linux-s390x": "1.2.4", + "@img/sharp-libvips-linux-x64": "1.2.4", + "@img/sharp-libvips-linuxmusl-arm64": "1.2.4", + "@img/sharp-libvips-linuxmusl-x64": "1.2.4", + "@img/sharp-linux-arm": "0.34.5", + "@img/sharp-linux-arm64": "0.34.5", + "@img/sharp-linux-ppc64": "0.34.5", + "@img/sharp-linux-riscv64": "0.34.5", + "@img/sharp-linux-s390x": "0.34.5", + "@img/sharp-linux-x64": "0.34.5", + "@img/sharp-linuxmusl-arm64": "0.34.5", + "@img/sharp-linuxmusl-x64": "0.34.5", + "@img/sharp-wasm32": "0.34.5", + "@img/sharp-win32-arm64": "0.34.5", + "@img/sharp-win32-ia32": "0.34.5", + "@img/sharp-win32-x64": "0.34.5" + } + }, + "node_modules/object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-inspect": { + "version": "1.13.4", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", + "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/object-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/object.assign": { + "version": "4.1.7", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.7.tgz", + "integrity": "sha512-nK28WOo+QIjBkDduTINE4JkF/UJJKyf2EJxvJKfblDpyg0Q+pkOHNTL0Qwy6NP6FhE/EnzV73BxxqcJaXY9anw==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0", + "has-symbols": "^1.1.0", + "object-keys": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/object.entries": { + "version": "1.1.9", + "resolved": "https://registry.npmjs.org/object.entries/-/object.entries-1.1.9.tgz", + "integrity": "sha512-8u/hfXFRBD1O0hPUjioLhoWFHRmt6tKA4/vZPyckBr18l1KE9uHrFaFaUi8MDRTpi4uak2goyPTSNJLXX2k2Hw==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.4", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/object.fromentries": { + "version": "2.0.8", + "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.8.tgz", + "integrity": "sha512-k6E21FzySsSK5a21KRADBd/NGneRegFO5pLHfdQLpRDETUNJueLXs3WCzyQ3tFRDYgbq3KHGXfTbi2bs8WQ6rQ==", + "dev": true, + "license": "MIT", + "dependencies": { "call-bind": "^1.0.7", "define-properties": "^1.2.1", "es-abstract": "^1.23.2", @@ -8052,7 +8557,6 @@ "version": "7.7.3", "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", - "devOptional": true, "license": "ISC", "bin": { "semver": "bin/semver.js" @@ -8111,16 +8615,15 @@ } }, "node_modules/sharp": { - "version": "0.34.5", - "resolved": "https://registry.npmjs.org/sharp/-/sharp-0.34.5.tgz", - "integrity": "sha512-Ou9I5Ft9WNcCbXrU9cMgPBcCK8LiwLqcbywW3t4oDV37n1pzpuNLsYiAV8eODnjbtQlSDwZ2cUEeQz4E54Hltg==", + "version": "0.33.5", + "resolved": "https://registry.npmjs.org/sharp/-/sharp-0.33.5.tgz", + "integrity": "sha512-haPVm1EkS9pgvHrQ/F3Xy+hgcuMV0Wm9vfIBSiwZ05k+xgb0PkBQpGsAA/oWdDobNaZTH5ppvHtzCFbnSEwHVw==", "hasInstallScript": true, "license": "Apache-2.0", - "optional": true, "dependencies": { - "@img/colour": "^1.0.0", - "detect-libc": "^2.1.2", - "semver": "^7.7.3" + "color": "^4.2.3", + "detect-libc": "^2.0.3", + "semver": "^7.6.3" }, "engines": { "node": "^18.17.0 || ^20.3.0 || >=21.0.0" @@ -8129,30 +8632,25 @@ "url": "https://opencollective.com/libvips" }, "optionalDependencies": { - "@img/sharp-darwin-arm64": "0.34.5", - "@img/sharp-darwin-x64": "0.34.5", - "@img/sharp-libvips-darwin-arm64": "1.2.4", - "@img/sharp-libvips-darwin-x64": "1.2.4", - "@img/sharp-libvips-linux-arm": "1.2.4", - "@img/sharp-libvips-linux-arm64": "1.2.4", - "@img/sharp-libvips-linux-ppc64": "1.2.4", - "@img/sharp-libvips-linux-riscv64": "1.2.4", - "@img/sharp-libvips-linux-s390x": "1.2.4", - "@img/sharp-libvips-linux-x64": "1.2.4", - "@img/sharp-libvips-linuxmusl-arm64": "1.2.4", - "@img/sharp-libvips-linuxmusl-x64": "1.2.4", - "@img/sharp-linux-arm": "0.34.5", - "@img/sharp-linux-arm64": "0.34.5", - "@img/sharp-linux-ppc64": "0.34.5", - "@img/sharp-linux-riscv64": "0.34.5", - "@img/sharp-linux-s390x": "0.34.5", - "@img/sharp-linux-x64": "0.34.5", - "@img/sharp-linuxmusl-arm64": "0.34.5", - "@img/sharp-linuxmusl-x64": "0.34.5", - "@img/sharp-wasm32": "0.34.5", - "@img/sharp-win32-arm64": "0.34.5", - "@img/sharp-win32-ia32": "0.34.5", - "@img/sharp-win32-x64": "0.34.5" + "@img/sharp-darwin-arm64": "0.33.5", + "@img/sharp-darwin-x64": "0.33.5", + "@img/sharp-libvips-darwin-arm64": "1.0.4", + "@img/sharp-libvips-darwin-x64": "1.0.4", + "@img/sharp-libvips-linux-arm": "1.0.5", + "@img/sharp-libvips-linux-arm64": "1.0.4", + "@img/sharp-libvips-linux-s390x": "1.0.4", + "@img/sharp-libvips-linux-x64": "1.0.4", + "@img/sharp-libvips-linuxmusl-arm64": "1.0.4", + "@img/sharp-libvips-linuxmusl-x64": "1.0.4", + "@img/sharp-linux-arm": "0.33.5", + "@img/sharp-linux-arm64": "0.33.5", + "@img/sharp-linux-s390x": "0.33.5", + "@img/sharp-linux-x64": "0.33.5", + "@img/sharp-linuxmusl-arm64": "0.33.5", + "@img/sharp-linuxmusl-x64": "0.33.5", + "@img/sharp-wasm32": "0.33.5", + "@img/sharp-win32-ia32": "0.33.5", + "@img/sharp-win32-x64": "0.33.5" } }, "node_modules/shebang-command": { @@ -8254,6 +8752,15 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/simple-swizzle": { + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.4.tgz", + "integrity": "sha512-nAu1WFPQSMNr2Zn9PGSZK9AGn4t/y97lEm+MXTtUDwfP0ksAIX4nO+6ruD9Jwut4C49SB1Ws+fbXsm/yScWOHw==", + "license": "MIT", + "dependencies": { + "is-arrayish": "^0.3.1" + } + }, "node_modules/source-map-js": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", @@ -8444,6 +8951,22 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/strtok3": { + "version": "10.3.4", + "resolved": "https://registry.npmjs.org/strtok3/-/strtok3-10.3.4.tgz", + "integrity": "sha512-KIy5nylvC5le1OdaaoCJ07L+8iQzJHGH6pWDuzS+d07Cu7n1MZ2x26P8ZKIWfbK02+XIL8Mp4RkWeqdUCrDMfg==", + "license": "MIT", + "dependencies": { + "@tokenizer/token": "^0.3.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/Borewit" + } + }, "node_modules/style-to-js": { "version": "1.1.21", "resolved": "https://registry.npmjs.org/style-to-js/-/style-to-js-1.1.21.tgz", @@ -8603,6 +9126,24 @@ "node": ">=8.0" } }, + "node_modules/token-types": { + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/token-types/-/token-types-6.1.1.tgz", + "integrity": "sha512-kh9LVIWH5CnL63Ipf0jhlBIy0UsrMj/NJDfpsy1SqOXlLKEVyXXYrnFxFT1yOOYVGBSApeVnjPw/sBz5BfEjAQ==", + "license": "MIT", + "dependencies": { + "@borewit/text-codec": "^0.1.0", + "@tokenizer/token": "^0.3.0", + "ieee754": "^1.2.1" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/Borewit" + } + }, "node_modules/trim-lines": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/trim-lines/-/trim-lines-3.0.1.tgz", @@ -8770,6 +9311,18 @@ "node": ">=14.17" } }, + "node_modules/uint8array-extras": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/uint8array-extras/-/uint8array-extras-1.5.0.tgz", + "integrity": "sha512-rvKSBiC5zqCCiDZ9kAOszZcDvdAHwwIKJG33Ykj43OKcWsnmcBRL09YTU4nOeHZ8Y2a7l1MgTd08SBe9A8Qj6A==", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/unbox-primitive": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.1.0.tgz", diff --git a/components/frontend/package.json b/components/frontend/package.json index 6d72bea77..ab31b1edf 100644 --- a/components/frontend/package.json +++ b/components/frontend/package.json @@ -26,6 +26,7 @@ "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", "date-fns": "^4.1.0", + "file-type": "^21.1.1", "highlight.js": "^11.11.1", "lucide-react": "^0.542.0", "next": "15.5.7", @@ -37,6 +38,7 @@ "react-resizable-panels": "^3.0.6", "rehype-highlight": "^7.0.2", "remark-gfm": "^4.0.1", + "sharp": "^0.33.0", "tailwind-merge": "^3.3.1", "zod": "^4.1.5" }, diff --git a/components/frontend/src/app/api/projects/[name]/agentic-sessions/[sessionName]/workspace/[...path]/route.ts b/components/frontend/src/app/api/projects/[name]/agentic-sessions/[sessionName]/workspace/[...path]/route.ts index 2d9737965..2ea4215c4 100644 --- a/components/frontend/src/app/api/projects/[name]/agentic-sessions/[sessionName]/workspace/[...path]/route.ts +++ b/components/frontend/src/app/api/projects/[name]/agentic-sessions/[sessionName]/workspace/[...path]/route.ts @@ -33,4 +33,19 @@ export async function PUT( return new Response(respBody, { status: resp.status, headers: { 'Content-Type': 'application/json' } }) } +export async function DELETE( + request: Request, + { params }: { params: Promise<{ name: string; sessionName: string; path: string[] }> }, +) { + const { name, sessionName, path } = await params + const headers = await buildForwardHeadersAsync(request) + const rel = path.join('/') + const resp = await fetch(`${BACKEND_URL}/projects/${encodeURIComponent(name)}/agentic-sessions/${encodeURIComponent(sessionName)}/workspace/${encodeURIComponent(rel)}`, { + method: 'DELETE', + headers, + }) + const respBody = await resp.text() + return new Response(respBody, { status: resp.status, headers: { 'Content-Type': 'application/json' } }) +} + diff --git a/components/frontend/src/app/api/projects/[name]/agentic-sessions/[sessionName]/workspace/upload/route.ts b/components/frontend/src/app/api/projects/[name]/agentic-sessions/[sessionName]/workspace/upload/route.ts new file mode 100644 index 000000000..298026029 --- /dev/null +++ b/components/frontend/src/app/api/projects/[name]/agentic-sessions/[sessionName]/workspace/upload/route.ts @@ -0,0 +1,561 @@ +import { buildForwardHeadersAsync } from '@/lib/auth'; +import { BACKEND_URL } from '@/lib/config'; +import { NextRequest } from 'next/server'; +import { fileTypeFromBuffer } from 'file-type'; + +// Maximum file sizes based on type +// SDK has 1MB JSON limit, base64 adds ~33% overhead, plus JSON structure overhead +// Conservative compression target: 350KB raw → ~467KB base64 → ~490KB total (safe margin) +// Text files don't get base64 encoded, so they can be larger (700KB safe limit) +// These limits are configurable via environment variables to allow different values per environment +const MAX_DOCUMENT_SIZE = parseInt(process.env.MAX_UPLOAD_SIZE_DOCUMENTS || '716800'); // Default 700KB for documents +const MAX_IMAGE_SIZE = parseInt(process.env.MAX_UPLOAD_SIZE_IMAGES || '3145728'); // Default 3MB upload limit +const IMAGE_COMPRESSION_TARGET = parseInt(process.env.IMAGE_COMPRESSION_TARGET || '358400'); // Default 350KB target + +// Determine if a file is an image based on content type +const isImageFile = (contentType: string): boolean => { + return contentType.startsWith('image/'); +}; + +// Get the appropriate max file size based on content type +const getMaxFileSize = (contentType: string): number => { + return isImageFile(contentType) ? MAX_IMAGE_SIZE : MAX_DOCUMENT_SIZE; +}; + +// Format size limit for error messages +const formatSizeLimit = (contentType: string): string => { + const maxSize = getMaxFileSize(contentType); + const sizeInKB = Math.round(maxSize / 1024); + const fileType = isImageFile(contentType) ? 'images' : 'documents'; + return `${sizeInKB}KB for ${fileType}`; +}; + +// Sanitize filename to prevent path traversal and malicious characters +// Removes path separators (/, \, ..), null bytes, and limits length +function sanitizeFilename(filename: string): string { + // Remove path separators and null bytes + return filename.replace(/[\/\\\0]/g, '_').substring(0, 255); +} + +// Validate URL to prevent SSRF attacks +// Returns true if URL is safe to fetch, false otherwise +function isValidUrl(urlString: string): boolean { + try { + const url = new URL(urlString); + + // Only allow http and https protocols + if (!['http:', 'https:'].includes(url.protocol)) { + return false; + } + + // Block private IP ranges and localhost + const hostname = url.hostname.toLowerCase(); + + // Block localhost + if (hostname === 'localhost' || hostname === '127.0.0.1' || hostname === '::1') { + return false; + } + + // Block private IPv4 ranges (10.0.0.0/8, 172.16.0.0/12, 192.168.0.0/16) + const ipv4Regex = /^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})$/; + const ipv4Match = hostname.match(ipv4Regex); + if (ipv4Match) { + const [, a, b, c, d] = ipv4Match.map(Number); + // Check if any octet is invalid + if (a > 255 || b > 255 || c > 255 || d > 255) { + return false; + } + // Block private ranges + if (a === 10 || (a === 172 && b >= 16 && b <= 31) || (a === 192 && b === 168)) { + return false; + } + // Block link-local (169.254.0.0/16) + if (a === 169 && b === 254) { + return false; + } + } + + // Block link-local IPv6 (fe80::/10) + if (hostname.startsWith('fe80:') || hostname.startsWith('[fe80:')) { + return false; + } + + return true; + } catch { + return false; + } +} + +// Validate file content type via magic bytes +// Returns the actual MIME type detected from file content, or null if detection fails +// This prevents Content-Type header spoofing attacks +async function validateFileType(buffer: ArrayBuffer, claimedType: string): Promise { + try { + // Convert ArrayBuffer to Uint8Array for file-type library + const uint8Array = new Uint8Array(buffer); + + // Detect actual file type from magic bytes + const detected = await fileTypeFromBuffer(uint8Array); + + if (!detected) { + // If detection fails, treat as plain text/binary + // Allow common text-based types that don't have magic bytes (JSON, XML, CSV, YAML, JavaScript, etc.) + const textTypes = [ + 'text/', + 'application/json', + 'application/xml', + 'application/javascript', + 'application/yaml', + 'application/x-yaml', + 'text/yaml', + 'text/csv', + 'application/csv', + 'application/octet-stream', + ]; + + if (textTypes.some(t => claimedType.startsWith(t)) || claimedType === 'application/octet-stream') { + return claimedType; + } + // For other types, reject if we can't verify + throw new Error('Unable to verify file type. File may be corrupted or unsupported.'); + } + + // Normalize both types for comparison (remove parameters like charset) + const normalizedClaimed = claimedType.split(';')[0].trim().toLowerCase(); + const normalizedDetected = detected.mime.toLowerCase(); + + // Check if types match + if (normalizedClaimed !== normalizedDetected) { + // Special case: allow jpeg/jpg variations + const isJpegVariant = (type: string) => type === 'image/jpeg' || type === 'image/jpg'; + if (isJpegVariant(normalizedClaimed) && isJpegVariant(normalizedDetected)) { + return detected.mime; + } + + // Types don't match - reject + throw new Error( + `Content-Type mismatch: claimed '${normalizedClaimed}' but detected '${normalizedDetected}'. ` + + `This may indicate a malicious file or incorrect Content-Type header.` + ); + } + + // Use the detected type (more trustworthy than header) + return detected.mime; + } catch (error) { + // Re-throw validation errors + if (error instanceof Error) { + throw error; + } + throw new Error('File type validation failed'); + } +} + +// Compress image if it exceeds size limit +// Returns compressed image buffer or original if already small enough, along with compression metadata +// IMPORTANT: Compression preserves original format (PNG stays PNG, JPEG stays JPEG, etc.) +async function compressImageIfNeeded( + buffer: ArrayBuffer, + contentType: string, + maxSize: number +): Promise<{ buffer: ArrayBuffer; compressed: boolean; originalSize: number; finalSize: number; contentType: string }> { + const originalSize = buffer.byteLength; + + // Only compress actual images (not SVGs or other formats that won't benefit) + if (!contentType.match(/^image\/(jpeg|jpg|png|webp)$/i)) { + return { buffer, compressed: false, originalSize, finalSize: originalSize, contentType }; + } + + // If already under limit, return as-is + if (buffer.byteLength <= maxSize) { + return { buffer, compressed: false, originalSize, finalSize: originalSize, contentType }; + } + + // Use sharp library for server-side image processing + try { + const sharp = (await import('sharp')).default; + + // Determine output format and compression options based on original type + const isJpeg = contentType.match(/^image\/(jpeg|jpg)$/i); + const isPng = contentType.match(/^image\/png$/i); + const isWebp = contentType.match(/^image\/webp$/i); + + let compressed: Buffer; + let quality = 80; + + // Compress with format-specific settings + if (isJpeg) { + // JPEG: use quality compression + compressed = await sharp(Buffer.from(buffer)) + .jpeg({ quality, mozjpeg: true }) + .toBuffer(); + + // Reduce quality iteratively if still too large + while (compressed.byteLength > maxSize && quality > 20) { + quality -= 10; + compressed = await sharp(Buffer.from(buffer)) + .jpeg({ quality, mozjpeg: true }) + .toBuffer(); + } + } else if (isPng) { + // PNG: use compression level (lossless) and quality (lossy palette reduction) + compressed = await sharp(Buffer.from(buffer)) + .png({ quality, compressionLevel: 9, palette: true }) + .toBuffer(); + + // Reduce quality iteratively if still too large + while (compressed.byteLength > maxSize && quality > 20) { + quality -= 10; + compressed = await sharp(Buffer.from(buffer)) + .png({ quality, compressionLevel: 9, palette: true }) + .toBuffer(); + } + } else if (isWebp) { + // WebP: use quality compression + compressed = await sharp(Buffer.from(buffer)) + .webp({ quality }) + .toBuffer(); + + // Reduce quality iteratively if still too large + while (compressed.byteLength > maxSize && quality > 20) { + quality -= 10; + compressed = await sharp(Buffer.from(buffer)) + .webp({ quality }) + .toBuffer(); + } + } else { + // Fallback: shouldn't reach here due to earlier check + throw new Error(`Unsupported image format: ${contentType}`); + } + + // If still too large after quality reduction, resize dimensions + if (compressed.byteLength > maxSize) { + const metadata = await sharp(Buffer.from(buffer)).metadata(); + const width = metadata.width || 1920; + const height = metadata.height || 1080; + + // Reduce by 25% iteratively + let scale = 0.75; + while (compressed.byteLength > maxSize && scale > 0.25) { + const sharpInstance = sharp(Buffer.from(buffer)) + .resize(Math.floor(width * scale), Math.floor(height * scale), { + fit: 'inside', + withoutEnlargement: true, + }); + + // Apply format-specific compression after resize + if (isJpeg) { + compressed = await sharpInstance.jpeg({ quality: 70, mozjpeg: true }).toBuffer(); + } else if (isPng) { + compressed = await sharpInstance.png({ quality: 70, compressionLevel: 9, palette: true }).toBuffer(); + } else if (isWebp) { + compressed = await sharpInstance.webp({ quality: 70 }).toBuffer(); + } + + scale -= 0.1; + } + } + + const finalSize = compressed.byteLength; + console.log( + `Compressed ${contentType} image: ${originalSize} bytes -> ${finalSize} bytes (${Math.round((finalSize / originalSize) * 100)}%)` + ); + + // Convert Node.js Buffer to ArrayBuffer by creating a new ArrayBuffer and copying data + const arrayBuffer = new ArrayBuffer(finalSize); + const view = new Uint8Array(arrayBuffer); + view.set(compressed); + // Return with original contentType preserved + return { buffer: arrayBuffer, compressed: true, originalSize, finalSize, contentType }; + } catch (error) { + console.error('Failed to compress image:', error); + // If compression fails, throw error rather than uploading oversized file + throw new Error('Image too large and compression failed'); + } +} + +// Helper function to compress and validate file buffer +// Handles both images (with compression) and non-images (size validation only) +async function compressAndValidate( + buffer: ArrayBuffer, + contentType: string +): Promise<{ buffer: ArrayBuffer; contentType: string; compressionInfo: { compressed: boolean; originalSize: number; finalSize: number } }> { + const maxSize = getMaxFileSize(contentType); + + // For images, compress if needed instead of rejecting + if (isImageFile(contentType)) { + try { + const result = await compressImageIfNeeded(buffer, contentType, IMAGE_COMPRESSION_TARGET); + return { + buffer: result.buffer, + contentType: result.contentType, + compressionInfo: { + compressed: result.compressed, + originalSize: result.originalSize, + finalSize: result.finalSize, + }, + }; + } catch (error) { + console.error('Image compression failed:', error); + throw new Error(`Image too large and could not be compressed. Please reduce image size and try again.`); + } + } else { + // For non-images, enforce strict size limit + if (buffer.byteLength > maxSize) { + throw new Error(`File too large. Maximum size is ${formatSizeLimit(contentType)}`); + } + // No compression needed for non-images + const compressionInfo = { + compressed: false, + originalSize: buffer.byteLength, + finalSize: buffer.byteLength, + }; + return { buffer, contentType, compressionInfo }; + } +} + +// Helper function to upload file to workspace with retry logic +// Handles 202 Accepted responses (content service starting) with retries +async function uploadFileToWorkspace( + buffer: ArrayBuffer, + filename: string, + contentType: string, + headers: HeadersInit, + name: string, + sessionName: string +): Promise { + const maxRetries = 3; + const retryDelay = 2000; // 2 seconds + + for (let retries = 0; retries < maxRetries; retries++) { + const resp = await fetch( + `${BACKEND_URL}/projects/${encodeURIComponent(name)}/agentic-sessions/${encodeURIComponent(sessionName)}/workspace/file-uploads/${encodeURIComponent(filename)}`, + { + method: 'PUT', + headers: { + ...headers, + 'Content-Type': contentType, + }, + body: buffer, + } + ); + + // If 202 Accepted (content service starting), wait and retry + if (resp.status === 202) { + if (retries < maxRetries - 1) { + await new Promise((resolve) => setTimeout(resolve, retryDelay)); + continue; + } + } + + return resp; + } + + // Should never reach here, but TypeScript needs a return + throw new Error('Upload failed after all retries'); +} + +export async function POST( + request: NextRequest, + { params }: { params: Promise<{ name: string; sessionName: string }> }, +) { + const { name, sessionName } = await params; + const headers = await buildForwardHeadersAsync(request); + + try { + const formData = await request.formData(); + const uploadType = formData.get('type') as string; + + if (uploadType === 'local') { + // Handle local file upload + const file = formData.get('file') as File; + if (!file) { + return new Response(JSON.stringify({ error: 'No file provided' }), { + status: 400, + headers: { 'Content-Type': 'application/json' }, + }); + } + + // Sanitize filename to prevent path traversal attacks + const rawFilename = (formData.get('filename') as string) || file.name; + const filename = sanitizeFilename(rawFilename); + const claimedContentType = file.type || 'application/octet-stream'; + const fileArrayBuffer = await file.arrayBuffer(); + + // Validate file type via magic bytes to prevent malicious file uploads + let validatedContentType: string; + try { + validatedContentType = await validateFileType(fileArrayBuffer, claimedContentType); + } catch (error) { + console.error('File type validation failed:', error); + return new Response( + JSON.stringify({ + error: 'File type validation failed' + }), + { + status: 400, + headers: { 'Content-Type': 'application/json' }, + } + ); + } + + // Compress and validate file size + let fileBuffer: ArrayBuffer; + let finalContentType: string; + let compressionInfo: { compressed: boolean; originalSize: number; finalSize: number }; + + try { + const result = await compressAndValidate(fileArrayBuffer, validatedContentType); + fileBuffer = result.buffer; + finalContentType = result.contentType; + compressionInfo = result.compressionInfo; + } catch (error) { + return new Response( + JSON.stringify({ error: error instanceof Error ? error.message : 'File validation failed' }), + { + status: 413, // Payload Too Large + headers: { 'Content-Type': 'application/json' }, + } + ); + } + + // Upload to workspace with retry logic + const resp = await uploadFileToWorkspace(fileBuffer, filename, finalContentType, headers, name, sessionName); + + if (!resp.ok) { + const errorText = await resp.text(); + console.error('Upload failed:', errorText); + return new Response(JSON.stringify({ error: 'Failed to upload file' }), { + status: resp.status, + headers: { 'Content-Type': 'application/json' }, + }); + } + + return new Response( + JSON.stringify({ + success: true, + filename, + compressed: compressionInfo.compressed, + originalSize: compressionInfo.originalSize, + finalSize: compressionInfo.finalSize, + }), + { + status: 200, + headers: { 'Content-Type': 'application/json' }, + } + ); + } else if (uploadType === 'url') { + // Handle URL-based file upload + const fileUrl = formData.get('url') as string; + const rawFilename = formData.get('filename') as string; + + if (!fileUrl || !rawFilename) { + return new Response(JSON.stringify({ error: 'URL and filename are required' }), { + status: 400, + headers: { 'Content-Type': 'application/json' }, + }); + } + + // Sanitize filename to prevent path traversal attacks + const filename = sanitizeFilename(rawFilename); + + // Validate URL to prevent SSRF attacks + if (!isValidUrl(fileUrl)) { + return new Response( + JSON.stringify({ + error: 'Invalid URL: only http/https protocols are allowed and private IPs are blocked' + }), + { + status: 400, + headers: { 'Content-Type': 'application/json' }, + } + ); + } + + // Download the file from URL + const fileResp = await fetch(fileUrl); + if (!fileResp.ok) { + return new Response(JSON.stringify({ error: 'Failed to download file from URL' }), { + status: 400, + headers: { 'Content-Type': 'application/json' }, + }); + } + + const claimedContentType = fileResp.headers.get('content-type') || 'application/octet-stream'; + const fileArrayBuffer = await fileResp.arrayBuffer(); + + // Validate file type via magic bytes to prevent Content-Type spoofing + let validatedContentType: string; + try { + validatedContentType = await validateFileType(fileArrayBuffer, claimedContentType); + } catch (error) { + console.error('File type validation failed:', error); + return new Response( + JSON.stringify({ + error: 'File type validation failed' + }), + { + status: 400, + headers: { 'Content-Type': 'application/json' }, + } + ); + } + + // Compress and validate file size + let fileBuffer: ArrayBuffer; + let finalContentType: string; + let compressionInfo: { compressed: boolean; originalSize: number; finalSize: number }; + + try { + const result = await compressAndValidate(fileArrayBuffer, validatedContentType); + fileBuffer = result.buffer; + finalContentType = result.contentType; + compressionInfo = result.compressionInfo; + } catch (error) { + return new Response( + JSON.stringify({ error: error instanceof Error ? error.message : 'File validation failed' }), + { + status: 413, // Payload Too Large + headers: { 'Content-Type': 'application/json' }, + } + ); + } + + // Upload to workspace with retry logic + const resp = await uploadFileToWorkspace(fileBuffer, filename, finalContentType, headers, name, sessionName); + + if (!resp.ok) { + const errorText = await resp.text(); + console.error('Upload failed:', errorText); + return new Response(JSON.stringify({ error: 'Failed to upload file' }), { + status: resp.status, + headers: { 'Content-Type': 'application/json' }, + }); + } + + return new Response( + JSON.stringify({ + success: true, + filename, + compressed: compressionInfo.compressed, + originalSize: compressionInfo.originalSize, + finalSize: compressionInfo.finalSize, + }), + { + status: 200, + headers: { 'Content-Type': 'application/json' }, + } + ); + } else { + return new Response(JSON.stringify({ error: 'Invalid upload type' }), { + status: 400, + headers: { 'Content-Type': 'application/json' }, + }); + } + } catch (error) { + console.error('File upload error:', error); + return new Response(JSON.stringify({ error: 'Internal server error' }), { + status: 500, + headers: { 'Content-Type': 'application/json' }, + }); + } +} diff --git a/components/frontend/src/app/projects/[name]/sessions/[sessionName]/components/accordions/repositories-accordion.tsx b/components/frontend/src/app/projects/[name]/sessions/[sessionName]/components/accordions/repositories-accordion.tsx index 192933d1e..5724a08a7 100644 --- a/components/frontend/src/app/projects/[name]/sessions/[sessionName]/components/accordions/repositories-accordion.tsx +++ b/components/frontend/src/app/projects/[name]/sessions/[sessionName]/components/accordions/repositories-accordion.tsx @@ -1,7 +1,7 @@ "use client"; import { useState } from "react"; -import { GitBranch, X, Link, Loader2 } from "lucide-react"; +import { GitBranch, X, Link, Loader2, CloudUpload } from "lucide-react"; import { AccordionItem, AccordionTrigger, AccordionContent } from "@/components/ui/accordion"; import { Badge } from "@/components/ui/badge"; import { Button } from "@/components/ui/button"; @@ -11,20 +11,33 @@ type Repository = { branch?: string; }; +type UploadedFile = { + name: string; + path: string; + size?: number; +}; + type RepositoriesAccordionProps = { repositories?: Repository[]; + uploadedFiles?: UploadedFile[]; onAddRepository: () => void; onRemoveRepository: (repoName: string) => void; + onRemoveFile?: (fileName: string) => void; }; export function RepositoriesAccordion({ repositories = [], + uploadedFiles = [], onAddRepository, onRemoveRepository, + onRemoveFile, }: RepositoriesAccordionProps) { const [removingRepo, setRemovingRepo] = useState(null); + const [removingFile, setRemovingFile] = useState(null); + + const totalContextItems = repositories.length + uploadedFiles.length; - const handleRemove = async (repoName: string) => { + const handleRemoveRepo = async (repoName: string) => { if (confirm(`Remove repository ${repoName}?`)) { setRemovingRepo(repoName); try { @@ -35,15 +48,27 @@ export function RepositoriesAccordion({ } }; + const handleRemoveFile = async (fileName: string) => { + if (!onRemoveFile) return; + if (confirm(`Remove file ${fileName}?`)) { + setRemovingFile(fileName); + try { + await onRemoveFile(fileName); + } finally { + setRemovingFile(null); + } + } + }; + return (
Context - {repositories.length > 0 && ( + {totalContextItems > 0 && ( - {repositories.length} + {totalContextItems} )}
@@ -54,8 +79,8 @@ export function RepositoriesAccordion({ Add additional context to improve AI responses.

- {/* Repository List */} - {repositories.length === 0 ? ( + {/* Context Items List (Repos + Uploaded Files) */} + {totalContextItems === 0 ? (
@@ -68,22 +93,23 @@ export function RepositoriesAccordion({
) : (
+ {/* Repositories */} {repositories.map((repo, idx) => { const repoName = repo.url.split('/').pop()?.replace('.git', '') || `repo-${idx}`; const isRemoving = removingRepo === repoName; - + return ( -
+
{repoName}
{repo.url}
-
); })} + + {/* Uploaded Files */} + {uploadedFiles.map((file, idx) => { + const isRemoving = removingFile === file.name; + const fileSizeKB = file.size ? (file.size / 1024).toFixed(1) : null; + + return ( +
+ +
+
{file.name}
+ {fileSizeKB && ( +
{fileSizeKB} KB
+ )} +
+ {onRemoveFile && ( + + )} +
+ ); + })} +
+ + {onUploadFile && ( + <> + +
+ +

+ Upload files directly to your workspace for use as context +

+ +
+ + )}
diff --git a/components/frontend/src/app/projects/[name]/sessions/[sessionName]/components/modals/upload-file-modal.tsx b/components/frontend/src/app/projects/[name]/sessions/[sessionName]/components/modals/upload-file-modal.tsx new file mode 100644 index 000000000..6e6ee9218 --- /dev/null +++ b/components/frontend/src/app/projects/[name]/sessions/[sessionName]/components/modals/upload-file-modal.tsx @@ -0,0 +1,275 @@ +"use client"; + +import { useState, useRef } from "react"; +import { Loader2, Link, FileUp } from "lucide-react"; +import { + Dialog, + DialogContent, + DialogHeader, + DialogTitle, + DialogDescription, + DialogFooter, +} from "@/components/ui/dialog"; +import { Button } from "@/components/ui/button"; +import { Input } from "@/components/ui/input"; +import { Label } from "@/components/ui/label"; +import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs"; +import { Alert, AlertDescription } from "@/components/ui/alert"; + +// Maximum file sizes based on type +// Documents (text files): 700KB limit - no base64 encoding overhead +// Images: 3MB upload limit - realistic compression to 350KB target +const MAX_DOCUMENT_SIZE = 700 * 1024; // 700KB for documents +const MAX_IMAGE_SIZE = 3 * 1024 * 1024; // 3MB for images (server will compress to 350KB) + +// Determine if a file is an image based on MIME type +const isImageFile = (fileType: string): boolean => { + return fileType.startsWith('image/'); +}; + +// Get the appropriate max file size based on file type +const getMaxFileSize = (fileType: string): number => { + return isImageFile(fileType) ? MAX_IMAGE_SIZE : MAX_DOCUMENT_SIZE; +}; + +const formatFileSize = (bytes: number): string => { + if (bytes < 1024) return `${bytes} B`; + if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(2)} KB`; + if (bytes < 1024 * 1024 * 1024) return `${(bytes / (1024 * 1024)).toFixed(2)} MB`; + return `${(bytes / (1024 * 1024 * 1024)).toFixed(2)} GB`; +}; + +type UploadFileModalProps = { + open: boolean; + onOpenChange: (open: boolean) => void; + onUploadFile: (source: { + type: "local" | "url"; + file?: File; + url?: string; + filename?: string; + }) => Promise; + isLoading?: boolean; +}; + +export function UploadFileModal({ + open, + onOpenChange, + onUploadFile, + isLoading = false, +}: UploadFileModalProps) { + const [activeTab, setActiveTab] = useState<"local" | "url">("local"); + const [fileUrl, setFileUrl] = useState(""); + const [selectedFile, setSelectedFile] = useState(null); + const [isStartingService, setIsStartingService] = useState(false); + const [fileSizeError, setFileSizeError] = useState(null); + const [isValidating, setIsValidating] = useState(false); + const fileInputRef = useRef(null); + + const handleSubmit = async () => { + setIsStartingService(false); + + if (activeTab === "local") { + if (!selectedFile) return; + try { + await onUploadFile({ type: "local", file: selectedFile }); + } catch (error) { + // Check if error is about content service starting + if (error instanceof Error && error.message.includes("starting")) { + setIsStartingService(true); + } + throw error; + } + } else { + if (!fileUrl.trim()) return; + + // Extract filename from URL + const urlParts = fileUrl.split("/"); + const filename = urlParts[urlParts.length - 1] || "downloaded-file"; + + try { + await onUploadFile({ type: "url", url: fileUrl.trim(), filename }); + } catch (error) { + // Check if error is about content service starting + if (error instanceof Error && error.message.includes("starting")) { + setIsStartingService(true); + } + throw error; + } + } + + // Reset form on success + setFileUrl(""); + setSelectedFile(null); + setIsStartingService(false); + setFileSizeError(null); + if (fileInputRef.current) { + fileInputRef.current.value = ""; + } + }; + + const handleCancel = () => { + setFileUrl(""); + setSelectedFile(null); + setIsStartingService(false); + setFileSizeError(null); + setIsValidating(false); + setActiveTab("local"); + if (fileInputRef.current) { + fileInputRef.current.value = ""; + } + onOpenChange(false); + }; + + const handleFileSelect = async (e: React.ChangeEvent) => { + const file = e.target.files?.[0]; + if (!file) return; + + // Show loading state while validating + setIsValidating(true); + setFileSizeError(null); + setSelectedFile(null); + + // Use setTimeout to allow UI to update with loading state + setTimeout(() => { + const fileType = file.type || 'application/octet-stream'; + const maxSize = getMaxFileSize(fileType); + const fileTypeLabel = isImageFile(fileType) ? 'images' : 'documents'; + + // Check file size based on type + if (file.size > maxSize) { + setFileSizeError( + `File size (${formatFileSize(file.size)}) exceeds maximum allowed size of ${formatFileSize(maxSize)} for ${fileTypeLabel}` + ); + setSelectedFile(null); + if (fileInputRef.current) { + fileInputRef.current.value = ""; + } + } else { + setFileSizeError(null); + setSelectedFile(file); + } + setIsValidating(false); + }, 0); + }; + + const isSubmitDisabled = () => { + if (isLoading || isValidating) return true; + if (activeTab === "local") return !selectedFile; + if (activeTab === "url") return !fileUrl.trim(); + return true; + }; + + return ( + + + + Upload File + + Upload files to your workspace from your local machine or a URL. Files will be available in + the file-uploads folder. Maximum file size: {formatFileSize(MAX_IMAGE_SIZE)} for images, {formatFileSize(MAX_DOCUMENT_SIZE)} for documents. + + + + {fileSizeError && ( + + {fileSizeError} + + )} + + {isValidating && ( + + + + Validating file... + + + )} + + {isStartingService && ( + + + + Content service is starting. This may take a few seconds. Your upload will automatically retry. + + + )} + + { + setActiveTab(v as "local" | "url"); + setFileSizeError(null); // Clear error when switching tabs + }} + className="w-full" + > + + + + Local File + + + + From URL + + + + +
+ + + {selectedFile && !isValidating && ( +

+ Selected: {selectedFile.name} ({(selectedFile.size / 1024).toFixed(1)} KB) +

+ )} +
+
+ + +
+ + setFileUrl(e.target.value)} + disabled={isLoading || isValidating} + /> +

+ The file will be downloaded and uploaded to your workspace +

+
+
+
+ + + + + +
+
+ ); +} diff --git a/components/frontend/src/app/projects/[name]/sessions/[sessionName]/lib/types.ts b/components/frontend/src/app/projects/[name]/sessions/[sessionName]/lib/types.ts index 6e6c7a07e..377e18773 100644 --- a/components/frontend/src/app/projects/[name]/sessions/[sessionName]/lib/types.ts +++ b/components/frontend/src/app/projects/[name]/sessions/[sessionName]/lib/types.ts @@ -32,7 +32,7 @@ export type GitStatus = { }; export type DirectoryOption = { - type: 'artifacts' | 'repo' | 'workflow'; + type: 'artifacts' | 'repo' | 'workflow' | 'file-uploads'; name: string; path: string; }; diff --git a/components/frontend/src/app/projects/[name]/sessions/[sessionName]/page.tsx b/components/frontend/src/app/projects/[name]/sessions/[sessionName]/page.tsx index 7a681ca1f..2091b249e 100644 --- a/components/frontend/src/app/projects/[name]/sessions/[sessionName]/page.tsx +++ b/components/frontend/src/app/projects/[name]/sessions/[sessionName]/page.tsx @@ -65,6 +65,7 @@ import { getPhaseColor } from "@/utils/session-helpers"; // Extracted components import { AddContextModal } from "./components/modals/add-context-modal"; +import { UploadFileModal } from "./components/modals/upload-file-modal"; import { CustomWorkflowDialog } from "./components/modals/custom-workflow-dialog"; import { ManageRemoteDialog } from "./components/modals/manage-remote-dialog"; import { CommitChangesDialog } from "./components/modals/commit-changes-dialog"; @@ -117,6 +118,7 @@ export default function ProjectSessionDetailPage({ const [backHref, setBackHref] = useState(null); const [openAccordionItems, setOpenAccordionItems] = useState(["workflows"]); const [contextModalOpen, setContextModalOpen] = useState(false); + const [uploadModalOpen, setUploadModalOpen] = useState(false); const [repoChanging, setRepoChanging] = useState(false); const [firstMessageLoaded, setFirstMessageLoaded] = useState(false); const [mobileMenuOpen, setMobileMenuOpen] = useState(false); @@ -255,6 +257,81 @@ export default function ProjectSessionDetailPage({ }, }); + // File upload mutation + const uploadFileMutation = useMutation({ + mutationFn: async (source: { + type: "local" | "url"; + file?: File; + url?: string; + filename?: string; + }) => { + const formData = new FormData(); + formData.append("type", source.type); + + if (source.type === "local" && source.file) { + formData.append("file", source.file); + formData.append("filename", source.file.name); + } else if (source.type === "url" && source.url && source.filename) { + formData.append("url", source.url); + formData.append("filename", source.filename); + } + + const response = await fetch( + `/api/projects/${projectName}/agentic-sessions/${sessionName}/workspace/upload`, + { + method: "POST", + body: formData, + }, + ); + + if (!response.ok) { + const error = await response.json(); + throw new Error(error.error || "Upload failed"); + } + + return response.json(); + }, + onSuccess: async (data) => { + successToast(`File "${data.filename}" uploaded successfully`); + // Refresh workspace to show uploaded file + await refetchFileUploadsList(); + await refetchDirectoryFiles(); + await refetchArtifactsFiles(); + setUploadModalOpen(false); + }, + onError: (error: Error) => { + errorToast(error.message || "Failed to upload file"); + }, + }); + + // File removal mutation + const removeFileMutation = useMutation({ + mutationFn: async (fileName: string) => { + const response = await fetch( + `/api/projects/${projectName}/agentic-sessions/${sessionName}/workspace/file-uploads/${fileName}`, + { + method: "DELETE", + }, + ); + + if (!response.ok) { + const error = await response.json(); + throw new Error(error.error || "Failed to remove file"); + } + + return response.json(); + }, + onSuccess: async () => { + successToast("File removed successfully"); + // Refresh file lists + await refetchFileUploadsList(); + await refetchDirectoryFiles(); + }, + onError: (error: Error) => { + errorToast(error.message || "Failed to remove file"); + }, + }); + // Fetch OOTB workflows const { data: ootbWorkflows = [] } = useOOTBWorkflows(projectName); @@ -324,6 +401,15 @@ export default function ProjectSessionDetailPage({ { enabled: openAccordionItems.includes("artifacts") }, ); + // File uploads list (for Context accordion) + const { data: fileUploadsList = [], refetch: refetchFileUploadsList } = + useWorkspaceList( + projectName, + sessionName, + "file-uploads", + { enabled: openAccordionItems.includes("context") }, + ); + // Track if we've already initialized from session const initializedFromSessionRef = useRef(false); @@ -380,6 +466,7 @@ export default function ProjectSessionDetailPage({ const directoryOptions = useMemo(() => { const options: DirectoryOption[] = [ { type: "artifacts", name: "Shared Artifacts", path: "artifacts" }, + { type: "file-uploads", name: "File Uploads", path: "file-uploads" }, ]; if (session?.spec?.repos) { @@ -761,10 +848,18 @@ export default function ProjectSessionDetailPage({ ({ + name: f.name, + path: f.path, + size: f.size, + }))} onAddRepository={() => setContextModalOpen(true)} onRemoveRepository={(repoName) => removeRepoMutation.mutate(repoName) } + onRemoveFile={(fileName) => + removeFileMutation.mutate(fileName) + } /> )} + {opt.type === "file-uploads" && ( + + )} {opt.type === "repo" && ( )} @@ -935,14 +1033,26 @@ export default function ProjectSessionDetailPage({
) : ( - +
+ + +
)} @@ -1240,9 +1350,19 @@ export default function ProjectSessionDetailPage({ await addRepoMutation.mutateAsync({ url, branch }); setContextModalOpen(false); }} + onUploadFile={() => setUploadModalOpen(true)} isLoading={addRepoMutation.isPending} /> + { + await uploadFileMutation.mutateAsync(source); + }} + isLoading={uploadFileMutation.isPending} + /> + void; onSelect: (node: FileTreeNode) => void; onToggle: (node: FileTreeNode) => void; + onUpload?: () => void; k8sResources?: { pvcName?: string; pvcExists?: boolean; @@ -26,7 +27,7 @@ export type WorkspaceTabProps = { onRetrySpawn?: () => void; }; -const WorkspaceTab: React.FC = ({ session, wsLoading, wsUnavailable, wsTree, wsSelectedPath, onRefresh, onSelect, onToggle, k8sResources, contentPodError, onRetrySpawn }) => { +const WorkspaceTab: React.FC = ({ session, wsLoading, wsUnavailable, wsTree, wsSelectedPath, onRefresh, onSelect, onToggle, onUpload, k8sResources, contentPodError, onRetrySpawn }) => { if (wsLoading) { return (
@@ -90,9 +91,17 @@ const WorkspaceTab: React.FC = ({ session, wsLoading, wsUnava

{wsTree.length} items

)}
- +
+ {onUpload && ( + + )} + +
{wsTree.length === 0 ? ( diff --git a/components/operator/internal/handlers/sessions.go b/components/operator/internal/handlers/sessions.go index 1059c807c..af15eb656 100644 --- a/components/operator/internal/handlers/sessions.go +++ b/components/operator/internal/handlers/sessions.go @@ -292,12 +292,15 @@ func handleAgenticSessionEvent(obj *unstructured.Unstructured) error { } // === TEMP CONTENT POD RECONCILIATION === - // Manage temporary content pods for workspace access on stopped sessions + // Manage temporary content pods for workspace access when runner is not active tempContentRequested := annotations != nil && annotations[tempContentRequestedAnnotation] == "true" tempPodName := fmt.Sprintf("temp-content-%s", name) - // Only manage temp pods for stopped/completed/failed sessions + // Manage temp pods for: + // - Pending sessions (for pre-upload before runner starts) + // - Stopped/Completed/Failed sessions (for post-session workspace access) + // Do NOT create temp pods for Running/Creating sessions (they have ambient-content service) if phase == "Stopped" || phase == "Completed" || phase == "Failed" { if tempContentRequested { // User wants workspace access - ensure temp pod exists @@ -330,6 +333,33 @@ func handleAgenticSessionEvent(obj *unstructured.Unstructured) error { return nil } + // For Pending sessions: allow temp pod creation for file uploads, but don't return early + // This ensures Job creation can proceed when user starts the session + if phase == "Pending" { + if tempContentRequested { + // User wants to upload files - ensure temp pod exists + if err := reconcileTempContentPodWithPatch(sessionNamespace, name, tempPodName, currentObj, statusPatch); err != nil { + log.Printf("[TempPod] Failed to reconcile temp pod for Pending session: %v", err) + } + // Apply status changes but CONTINUE to allow Job creation logic below + if statusPatch.HasChanges() { + if err := statusPatch.Apply(); err != nil { + log.Printf("[TempPod] Warning: failed to apply status patch: %v", err) + } + } + // Do NOT return - continue to Job creation logic + } else { + // Temp pod not requested - delete if it exists + _, err := config.K8sClient.CoreV1().Pods(sessionNamespace).Get(context.TODO(), tempPodName, v1.GetOptions{}) + if err == nil { + log.Printf("[TempPod] Deleting temp pod from Pending session: %s", tempPodName) + if err := config.K8sClient.CoreV1().Pods(sessionNamespace).Delete(context.TODO(), tempPodName, v1.DeleteOptions{}); err != nil && !errors.IsNotFound(err) { + log.Printf("[TempPod] Failed to delete temp pod: %v", err) + } + } + } + } + // === CONTINUE WITH PHASE-BASED RECONCILIATION === // Early exit: If desired-phase is "Stopped", do not recreate jobs or reconcile @@ -772,6 +802,44 @@ func handleAgenticSessionEvent(obj *unstructured.Unstructured) error { log.Printf("Langfuse disabled, skipping secret copy") } + // CRITICAL: Delete temp content pod before creating Job to avoid PVC mount conflict + // The PVC is ReadWriteOnce, so only one pod can mount it at a time + tempPodName = fmt.Sprintf("temp-content-%s", name) + if _, err := config.K8sClient.CoreV1().Pods(sessionNamespace).Get(context.TODO(), tempPodName, v1.GetOptions{}); err == nil { + log.Printf("[PVCConflict] Deleting temp pod %s before creating Job (ReadWriteOnce PVC)", tempPodName) + + // Force immediate termination with zero grace period + gracePeriod := int64(0) + deleteOptions := v1.DeleteOptions{ + GracePeriodSeconds: &gracePeriod, + } + if err := config.K8sClient.CoreV1().Pods(sessionNamespace).Delete(context.TODO(), tempPodName, deleteOptions); err != nil && !errors.IsNotFound(err) { + log.Printf("[PVCConflict] Warning: failed to delete temp pod: %v", err) + } + + // Wait for temp pod to fully terminate to prevent PVC mount conflicts + // This is critical because ReadWriteOnce PVCs cannot be mounted by multiple pods + // With gracePeriod=0, this should complete in 1-3 seconds + log.Printf("[PVCConflict] Waiting for temp pod %s to fully terminate...", tempPodName) + maxWaitSeconds := 10 // Reduced from 30 since we're force-deleting + for i := 0; i < maxWaitSeconds*4; i++ { // Poll 4x per second for faster detection + _, err := config.K8sClient.CoreV1().Pods(sessionNamespace).Get(context.TODO(), tempPodName, v1.GetOptions{}) + if errors.IsNotFound(err) { + elapsed := float64(i) * 0.25 + log.Printf("[PVCConflict] Temp pod fully terminated after %.2f seconds", elapsed) + break + } + if i == (maxWaitSeconds*4)-1 { + log.Printf("[PVCConflict] Warning: temp pod still exists after %d seconds, proceeding anyway", maxWaitSeconds) + } + time.Sleep(250 * time.Millisecond) // Poll every 250ms instead of 1s + } + + // Clear temp pod annotations since we're starting the session + _ = clearAnnotation(sessionNamespace, name, tempContentRequestedAnnotation) + _ = clearAnnotation(sessionNamespace, name, tempContentLastAccessedAnnotation) + } + // Create a Kubernetes Job for this AgenticSession jobName := fmt.Sprintf("%s-job", name) @@ -2152,7 +2220,8 @@ func reconcileTempContentPodWithPatch(sessionNamespace, sessionName, tempPodName }}, }, Spec: corev1.PodSpec{ - RestartPolicy: corev1.RestartPolicyNever, + RestartPolicy: corev1.RestartPolicyNever, + TerminationGracePeriodSeconds: int64Ptr(0), // Enable instant termination Containers: []corev1.Container{{ Name: "content", Image: appConfig.ContentServiceImage, diff --git a/components/runners/claude-code-runner/pyproject.toml b/components/runners/claude-code-runner/pyproject.toml index e52e89e6f..918fd3fe5 100644 --- a/components/runners/claude-code-runner/pyproject.toml +++ b/components/runners/claude-code-runner/pyproject.toml @@ -12,7 +12,7 @@ dependencies = [ "aiohttp>=3.8.0", "pyjwt>=2.8.0", "anthropic[vertex]>=0.68.0", - "claude-agent-sdk>=0.1.4", + "claude-agent-sdk>=0.1.12", "langfuse>=3.0.0", "mcp-atlassian>=0.11.9", ] diff --git a/components/runners/claude-code-runner/wrapper.py b/components/runners/claude-code-runner/wrapper.py index 38f32b675..90943f07a 100644 --- a/components/runners/claude-code-runner/wrapper.py +++ b/components/runners/claude-code-runner/wrapper.py @@ -312,6 +312,12 @@ async def _run_claude_agent_sdk(self, prompt: str): if artifacts_path not in add_dirs: add_dirs.append(artifacts_path) logging.info("Added artifacts directory as additional directory") + + # Add file-uploads directory + file_uploads_path = str(Path(self.context.workspace_path) / "file-uploads") + if file_uploads_path not in add_dirs: + add_dirs.append(file_uploads_path) + logging.info("Added file-uploads directory as additional directory") elif repos_cfg: # Multi-repo mode: Prefer explicit MAIN_REPO_NAME, else use MAIN_REPO_INDEX, else default to 0 main_name = (os.getenv('MAIN_REPO_NAME') or '').strip() @@ -341,6 +347,12 @@ async def _run_claude_agent_sdk(self, prompt: str): if artifacts_path not in add_dirs: add_dirs.append(artifacts_path) logging.info("Added artifacts directory as additional directory") + + # Add file-uploads directory for repos mode too + file_uploads_path = str(Path(self.context.workspace_path) / "file-uploads") + if file_uploads_path not in add_dirs: + add_dirs.append(file_uploads_path) + logging.info("Added file-uploads directory as additional directory") else: # No workflow and no repos: start in artifacts directory for ad-hoc work cwd_path = str(Path(self.context.workspace_path) / "artifacts") @@ -1880,6 +1892,38 @@ def _build_workspace_context_prompt(self, repos_cfg, workflow_name, artifacts_pa prompt += f"Working directory: workflows/{workflow_name}/\n" prompt += "This directory contains workflow logic and automation scripts.\n\n" + # File uploads directory - PRIORITIZE THIS for user context + prompt += "## User-Uploaded Files (IMPORTANT)\n" + prompt += "Location: file-uploads/\n" + prompt += "Purpose: User-uploaded context files (screenshots, documents, images, PDFs, specs, designs).\n" + prompt += "ALWAYS check this directory when starting a new task - it often contains critical context.\n" + prompt += "Files here were uploaded by the user via the UI and are available for you to read and reference.\n" + + # List existing files if directory exists + file_uploads_path = Path(self.context.workspace_path) / "file-uploads" + if file_uploads_path.exists() and file_uploads_path.is_dir(): + try: + files = sorted([f.name for f in file_uploads_path.iterdir() if f.is_file()]) + if files: + prompt += f"\nCurrently uploaded files ({len(files)}):\n" + for filename in files: + prompt += f" - {filename}\n" + prompt += "READ THESE FILES if they're relevant to the user's task!\n" + else: + prompt += "\nNo files currently uploaded.\n" + except Exception as e: + logging.warning(f"Failed to list file-uploads directory: {e}") + prompt += "\n(Unable to list uploaded files)\n" + else: + prompt += "\nNo files currently uploaded.\n" + + prompt += "\nCommon use cases:\n" + prompt += " - Screenshots showing UI issues or design mockups\n" + prompt += " - Specification documents and requirements\n" + prompt += " - Reference images or diagrams\n" + prompt += " - Error logs or debug output\n" + prompt += "This directory persists across sessions - check it proactively when unclear about task context.\n\n" + # Artifacts directory prompt += "## Shared Artifacts Directory\n" prompt += f"Location: {artifacts_path}\n"