From 926ac8c01a5dc2acb0ec436704aac25b588cbd82 Mon Sep 17 00:00:00 2001 From: Christopher Petito Date: Thu, 5 Feb 2026 14:58:04 +0100 Subject: [PATCH] improve @ attachment speed so we dont hang in large or deeply nested dirs Signed-off-by: Christopher Petito --- pkg/fsx/fs.go | 124 ++++++ pkg/fsx/walk_test.go | 374 ++++++++++++++++++ pkg/tui/components/completion/completion.go | 99 ++++- .../components/completion/completion_test.go | 249 +++++++++++- .../editor/completions/completion.go | 14 + pkg/tui/components/editor/completions/file.go | 153 ++++++- pkg/tui/components/editor/editor.go | 180 ++++++++- 7 files changed, 1168 insertions(+), 25 deletions(-) create mode 100644 pkg/fsx/walk_test.go diff --git a/pkg/fsx/fs.go b/pkg/fsx/fs.go index bc4303c59..34d5d20bc 100644 --- a/pkg/fsx/fs.go +++ b/pkg/fsx/fs.go @@ -1,8 +1,11 @@ package fsx import ( + "context" + "io/fs" "os" "path/filepath" + "strings" ) type TreeNode struct { @@ -91,3 +94,124 @@ func CollectFilesFromTree(node *TreeNode, basePath string, files *[]string) { } } } + +// WalkFilesOptions configures the bounded file walker. +type WalkFilesOptions struct { + // MaxFiles is the maximum number of files to return (0 = no limit, but defaults to DefaultMaxFiles). + MaxFiles int + // MaxDepth is the maximum directory depth to descend (0 = unlimited). + // Depth 1 means only root directory, depth 2 means root + immediate children, etc. + MaxDepth int + // ShouldIgnore is an optional function to filter out paths (return true to skip). + ShouldIgnore func(path string) bool +} + +// DefaultMaxFiles is the default cap for file walking to prevent runaway scans. +const DefaultMaxFiles = 20000 + +// heavyDirs are directory names that are skipped by default even outside VCS repos. +var heavyDirs = map[string]bool{ + ".git": true, + "node_modules": true, + "vendor": true, + "__pycache__": true, + ".venv": true, + "venv": true, + ".tox": true, + "dist": true, + "build": true, + ".cache": true, + ".gradle": true, + ".idea": true, + ".vscode": true, +} + +// WalkFiles walks the directory tree starting at root and returns a list of file paths. +// It is bounded by MaxFiles (defaults to DefaultMaxFiles) and skips hidden directories +// and known heavy directories like node_modules, vendor, etc. +// The walk respects context cancellation. +func WalkFiles(ctx context.Context, root string, opts WalkFilesOptions) ([]string, error) { + maxFiles := opts.MaxFiles + if maxFiles <= 0 { + maxFiles = DefaultMaxFiles + } + + // Clean root path for consistent depth calculation + root = filepath.Clean(root) + rootDepth := strings.Count(root, string(filepath.Separator)) + + var files []string + + err := filepath.WalkDir(root, func(path string, d fs.DirEntry, err error) error { + // Check context cancellation + if ctx.Err() != nil { + return ctx.Err() + } + + // If we hit the max, stop walking + if len(files) >= maxFiles { + return fs.SkipAll + } + + if err != nil { + // For root directory errors (like ENOENT), return the error + if path == root { + return err + } + // Skip subdirectories we can't read + if d != nil && d.IsDir() { + return fs.SkipDir + } + return nil + } + + // Check depth limit + if opts.MaxDepth > 0 { + pathDepth := strings.Count(filepath.Clean(path), string(filepath.Separator)) - rootDepth + if d.IsDir() && pathDepth >= opts.MaxDepth { + return fs.SkipDir + } + } + + name := d.Name() + + // Skip hidden files/directories (starting with .) + if strings.HasPrefix(name, ".") && name != "." { + if d.IsDir() { + return fs.SkipDir + } + return nil + } + + // Skip known heavy directories + if d.IsDir() && heavyDirs[name] { + return fs.SkipDir + } + + // Apply custom ignore function + if opts.ShouldIgnore != nil && opts.ShouldIgnore(path) { + if d.IsDir() { + return fs.SkipDir + } + return nil + } + + // Only collect files, not directories + if !d.IsDir() { + // Store path relative to root for cleaner display + relPath, relErr := filepath.Rel(root, path) + if relErr != nil { + relPath = path + } + files = append(files, relPath) + } + + return nil + }) + + if err != nil && err != context.Canceled && err != context.DeadlineExceeded { + return files, err + } + + return files, nil +} diff --git a/pkg/fsx/walk_test.go b/pkg/fsx/walk_test.go new file mode 100644 index 000000000..d598c5f0d --- /dev/null +++ b/pkg/fsx/walk_test.go @@ -0,0 +1,374 @@ +package fsx + +import ( + "context" + "os" + "path/filepath" + "strings" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestWalkFiles_Basic(t *testing.T) { + t.Parallel() + + tmpDir := t.TempDir() + + // Create structure: + // tmpDir/ + // src/ + // main.go + // util.go + // lib/ + // helper.go + + dirs := []string{ + filepath.Join(tmpDir, "src"), + filepath.Join(tmpDir, "lib"), + } + for _, d := range dirs { + require.NoError(t, os.MkdirAll(d, 0o755)) + } + + files := map[string]string{ + filepath.Join(tmpDir, "src", "main.go"): "package main", + filepath.Join(tmpDir, "src", "util.go"): "package main", + filepath.Join(tmpDir, "lib", "helper.go"): "package lib", + } + for path, content := range files { + require.NoError(t, os.WriteFile(path, []byte(content), 0o644)) + } + + t.Run("collects all files", func(t *testing.T) { + t.Parallel() + + got, err := WalkFiles(t.Context(), tmpDir, WalkFilesOptions{}) + require.NoError(t, err) + assert.Len(t, got, 3, "should find all 3 files") + }) + + t.Run("returns relative paths", func(t *testing.T) { + t.Parallel() + + got, err := WalkFiles(t.Context(), tmpDir, WalkFilesOptions{}) + require.NoError(t, err) + + for _, f := range got { + assert.False(t, filepath.IsAbs(f), "path should be relative: %s", f) + assert.False(t, strings.HasPrefix(f, tmpDir), "path should not start with tmpDir") + } + }) +} + +func TestWalkFiles_MaxFiles(t *testing.T) { + t.Parallel() + + tmpDir := t.TempDir() + + // Create 10 files + for i := range 10 { + f := filepath.Join(tmpDir, "file"+string(rune('a'+i))+".txt") + require.NoError(t, os.WriteFile(f, []byte("content"), 0o644)) + } + + t.Run("respects MaxFiles limit", func(t *testing.T) { + t.Parallel() + + got, err := WalkFiles(t.Context(), tmpDir, WalkFilesOptions{MaxFiles: 5}) + require.NoError(t, err) + assert.Len(t, got, 5, "should return exactly 5 files") + }) + + t.Run("returns all if MaxFiles is larger", func(t *testing.T) { + t.Parallel() + + got, err := WalkFiles(t.Context(), tmpDir, WalkFilesOptions{MaxFiles: 100}) + require.NoError(t, err) + assert.Len(t, got, 10, "should return all 10 files") + }) +} + +func TestWalkFiles_MaxDepth(t *testing.T) { + t.Parallel() + + tmpDir := t.TempDir() + + // Create structure with 3 levels: + // tmpDir/ + // level1.txt (depth 1) + // dir1/ + // level2.txt (depth 2) + // dir2/ + // level3.txt (depth 3) + // dir3/ + // level4.txt (depth 4) + + dirs := []string{ + filepath.Join(tmpDir, "dir1", "dir2", "dir3"), + } + for _, d := range dirs { + require.NoError(t, os.MkdirAll(d, 0o755)) + } + + files := map[string]string{ + filepath.Join(tmpDir, "level1.txt"): "level 1", + filepath.Join(tmpDir, "dir1", "level2.txt"): "level 2", + filepath.Join(tmpDir, "dir1", "dir2", "level3.txt"): "level 3", + filepath.Join(tmpDir, "dir1", "dir2", "dir3", "level4.txt"): "level 4", + } + for path, content := range files { + require.NoError(t, os.WriteFile(path, []byte(content), 0o644)) + } + + t.Run("MaxDepth 1 gets only root files", func(t *testing.T) { + t.Parallel() + + got, err := WalkFiles(t.Context(), tmpDir, WalkFilesOptions{MaxDepth: 1}) + require.NoError(t, err) + assert.Len(t, got, 1, "should only find level1.txt") + assert.Contains(t, got[0], "level1.txt") + }) + + t.Run("MaxDepth 2 gets 2 levels", func(t *testing.T) { + t.Parallel() + + got, err := WalkFiles(t.Context(), tmpDir, WalkFilesOptions{MaxDepth: 2}) + require.NoError(t, err) + assert.Len(t, got, 2, "should find level1.txt and level2.txt") + }) + + t.Run("MaxDepth 3 gets 3 levels", func(t *testing.T) { + t.Parallel() + + got, err := WalkFiles(t.Context(), tmpDir, WalkFilesOptions{MaxDepth: 3}) + require.NoError(t, err) + assert.Len(t, got, 3, "should find 3 files") + }) + + t.Run("MaxDepth 0 means unlimited", func(t *testing.T) { + t.Parallel() + + got, err := WalkFiles(t.Context(), tmpDir, WalkFilesOptions{MaxDepth: 0}) + require.NoError(t, err) + assert.Len(t, got, 4, "should find all 4 files") + }) +} + +func TestWalkFiles_HiddenDirectories(t *testing.T) { + t.Parallel() + + tmpDir := t.TempDir() + + // Create structure with hidden directories: + // tmpDir/ + // .git/ + // config + // .cache/ + // data + // src/ + // main.go + + dirs := []string{ + filepath.Join(tmpDir, ".git"), + filepath.Join(tmpDir, ".cache"), + filepath.Join(tmpDir, "src"), + } + for _, d := range dirs { + require.NoError(t, os.MkdirAll(d, 0o755)) + } + + files := map[string]string{ + filepath.Join(tmpDir, ".git", "config"): "[core]", + filepath.Join(tmpDir, ".cache", "data"): "cached", + filepath.Join(tmpDir, "src", "main.go"): "package main", + } + for path, content := range files { + require.NoError(t, os.WriteFile(path, []byte(content), 0o644)) + } + + t.Run("skips hidden directories", func(t *testing.T) { + t.Parallel() + + got, err := WalkFiles(t.Context(), tmpDir, WalkFilesOptions{}) + require.NoError(t, err) + + assert.Len(t, got, 1, "should only find src/main.go") + assert.Contains(t, got[0], "main.go") + + for _, f := range got { + assert.False(t, strings.HasPrefix(filepath.Base(f), ".")) + assert.NotContains(t, f, ".git") + assert.NotContains(t, f, ".cache") + } + }) +} + +func TestWalkFiles_HeavyDirectories(t *testing.T) { + t.Parallel() + + tmpDir := t.TempDir() + + // Create structure with heavy directories: + // tmpDir/ + // node_modules/ + // package.js + // vendor/ + // lib.go + // __pycache__/ + // cache.pyc + // src/ + // main.go + + dirs := []string{ + filepath.Join(tmpDir, "node_modules"), + filepath.Join(tmpDir, "vendor"), + filepath.Join(tmpDir, "__pycache__"), + filepath.Join(tmpDir, "src"), + } + for _, d := range dirs { + require.NoError(t, os.MkdirAll(d, 0o755)) + } + + files := map[string]string{ + filepath.Join(tmpDir, "node_modules", "pkg.js"): "module.exports = {}", + filepath.Join(tmpDir, "vendor", "lib.go"): "package vendor", + filepath.Join(tmpDir, "__pycache__", "cache.pyc"): "bytecode", + filepath.Join(tmpDir, "src", "main.go"): "package main", + } + for path, content := range files { + require.NoError(t, os.WriteFile(path, []byte(content), 0o644)) + } + + t.Run("skips heavy directories", func(t *testing.T) { + t.Parallel() + + got, err := WalkFiles(t.Context(), tmpDir, WalkFilesOptions{}) + require.NoError(t, err) + + assert.Len(t, got, 1, "should only find src/main.go") + assert.Contains(t, got[0], "main.go") + + for _, f := range got { + assert.NotContains(t, f, "node_modules") + assert.NotContains(t, f, "vendor") + assert.NotContains(t, f, "__pycache__") + } + }) +} + +func TestWalkFiles_ShouldIgnore(t *testing.T) { + t.Parallel() + + tmpDir := t.TempDir() + + // Create structure + dirs := []string{ + filepath.Join(tmpDir, "src"), + filepath.Join(tmpDir, "tests"), + } + for _, d := range dirs { + require.NoError(t, os.MkdirAll(d, 0o755)) + } + + files := map[string]string{ + filepath.Join(tmpDir, "src", "main.go"): "package main", + filepath.Join(tmpDir, "src", "main_test.go"): "package main", + filepath.Join(tmpDir, "tests", "e2e.go"): "package tests", + } + for path, content := range files { + require.NoError(t, os.WriteFile(path, []byte(content), 0o644)) + } + + t.Run("ShouldIgnore filters files", func(t *testing.T) { + t.Parallel() + + shouldIgnore := func(path string) bool { + return strings.HasSuffix(path, "_test.go") + } + + got, err := WalkFiles(t.Context(), tmpDir, WalkFilesOptions{ + ShouldIgnore: shouldIgnore, + }) + require.NoError(t, err) + + assert.Len(t, got, 2, "should exclude _test.go files") + for _, f := range got { + assert.False(t, strings.HasSuffix(f, "_test.go")) + } + }) + + t.Run("ShouldIgnore filters directories", func(t *testing.T) { + t.Parallel() + + shouldIgnore := func(path string) bool { + return strings.Contains(path, "tests") + } + + got, err := WalkFiles(t.Context(), tmpDir, WalkFilesOptions{ + ShouldIgnore: shouldIgnore, + }) + require.NoError(t, err) + + for _, f := range got { + assert.NotContains(t, f, "tests") + } + }) +} + +func TestWalkFiles_ContextCancellation(t *testing.T) { + t.Parallel() + + tmpDir := t.TempDir() + + // Create many files + for i := range 100 { + f := filepath.Join(tmpDir, "file"+string(rune(i%26+'a'))+string(rune(i/26+'0'))+".txt") + require.NoError(t, os.WriteFile(f, []byte("content"), 0o644)) + } + + t.Run("respects context cancellation", func(t *testing.T) { + t.Parallel() + + ctx, cancel := context.WithCancel(t.Context()) + cancel() // Cancel immediately + + got, err := WalkFiles(ctx, tmpDir, WalkFilesOptions{}) + // Should either return an error or return partial results + // The important thing is it doesn't hang + _ = got + _ = err + }) + + t.Run("returns partial results on timeout", func(t *testing.T) { + t.Parallel() + + ctx, cancel := context.WithTimeout(t.Context(), 1*time.Millisecond) + defer cancel() + + // This should return quickly due to timeout + _, err := WalkFiles(ctx, tmpDir, WalkFilesOptions{}) + // May or may not error depending on timing + _ = err + }) +} + +func TestWalkFiles_EmptyDirectory(t *testing.T) { + t.Parallel() + + tmpDir := t.TempDir() + + got, err := WalkFiles(t.Context(), tmpDir, WalkFilesOptions{}) + require.NoError(t, err) + assert.Empty(t, got, "should return empty for empty directory") +} + +func TestWalkFiles_NonExistentDirectory(t *testing.T) { + t.Parallel() + + got, err := WalkFiles(t.Context(), "/nonexistent/path/that/does/not/exist", WalkFilesOptions{}) + require.Error(t, err, "should return error for non-existent root directory") + assert.Empty(t, got) +} diff --git a/pkg/tui/components/completion/completion.go b/pkg/tui/components/completion/completion.go index 7edfa04fa..08551fab0 100644 --- a/pkg/tui/components/completion/completion.go +++ b/pkg/tui/components/completion/completion.go @@ -61,6 +61,24 @@ type SelectionChangedMsg struct { Value string } +// AppendItemsMsg appends items to the current completion list without closing the popup. +// Useful for async loading of completion items. +type AppendItemsMsg struct { + Items []Item +} + +// ReplaceItemsMsg replaces non-pinned items in the completion list. +// Pinned items (like "Browse files…") are preserved. +// Useful for full async load that supersedes initial results. +type ReplaceItemsMsg struct { + Items []Item +} + +// SetLoadingMsg sets the loading state for the completion popup. +type SetLoadingMsg struct { + Loading bool +} + type matchResult struct { item Item score int @@ -119,6 +137,7 @@ type manager struct { scrollOffset int visible bool matchMode MatchMode + loading bool // true when async loading is in progress } // New creates a new completion component @@ -168,8 +187,44 @@ func (c *manager) Update(msg tea.Msg) (layout.Model, tea.Cmd) { case CloseMsg: c.visible = false + c.loading = false + return c, nil + + case SetLoadingMsg: + c.loading = msg.Loading return c, nil + case AppendItemsMsg: + // Append new items to the existing list + c.items = append(c.items, msg.Items...) + // Re-filter with current query + c.filterItems(c.query) + // Make popup visible if we now have items + if len(c.filteredItems) > 0 && !c.visible { + c.visible = true + } + cmd := c.notifySelectionChanged() + return c, cmd + + case ReplaceItemsMsg: + // Keep pinned items, replace everything else + var pinnedItems []Item + for _, item := range c.items { + if item.Pinned { + pinnedItems = append(pinnedItems, item) + } + } + // Combine pinned items with new items + c.items = append(pinnedItems, msg.Items...) + // Re-filter with current query + c.filterItems(c.query) + // Make popup visible if we have items + if len(c.filteredItems) > 0 && !c.visible { + c.visible = true + } + cmd := c.notifySelectionChanged() + return c, cmd + case tea.KeyPressMsg: switch { case key.Matches(msg, c.keyMap.Up): @@ -186,8 +241,8 @@ func (c *manager) Update(msg tea.Msg) (layout.Model, tea.Cmd) { if c.selected < len(c.filteredItems)-1 { c.selected++ } - if c.selected >= c.scrollOffset+10 { - c.scrollOffset = c.selected - 9 + if c.selected >= c.scrollOffset+maxItems { + c.scrollOffset = c.selected - maxItems + 1 } cmd := c.notifySelectionChanged() return c, cmd @@ -232,7 +287,11 @@ func (c *manager) View() string { var lines []string if len(c.filteredItems) == 0 { - lines = append(lines, styles.CompletionNoResultsStyle.Render("No command found")) + if c.loading { + lines = append(lines, styles.CompletionNoResultsStyle.Render("Loading…")) + } else { + lines = append(lines, styles.CompletionNoResultsStyle.Render("No results")) + } } else { visibleStart := c.scrollOffset visibleEnd := min(c.scrollOffset+maxItems, len(c.filteredItems)) @@ -297,8 +356,23 @@ func (c *manager) notifySelectionChanged() tea.Cmd { } func (c *manager) filterItems(query string) { + // Pinned items are always shown at the top, in their original order. + var pinnedItems []Item + for _, item := range c.items { + if item.Pinned { + pinnedItems = append(pinnedItems, item) + } + } + if query == "" { - c.filteredItems = c.items + // Preserve original order for non-pinned items. + c.filteredItems = make([]Item, 0, len(c.items)) + c.filteredItems = append(c.filteredItems, pinnedItems...) + for _, item := range c.items { + if !item.Pinned { + c.filteredItems = append(c.filteredItems, item) + } + } // Reset selection when clearing the query if c.selected >= len(c.filteredItems) { c.selected = max(0, len(c.filteredItems)-1) @@ -307,10 +381,12 @@ func (c *manager) filterItems(query string) { } lowerQuery := strings.ToLower(query) - var pinnedItems []Item var matches []matchResult for _, item := range c.items { + if item.Pinned { + continue + } var matched bool var score int @@ -340,15 +416,10 @@ func (c *manager) filterItems(query string) { } if matched { - if item.Pinned { - // Pinned items keep their original order at the top - pinnedItems = append(pinnedItems, item) - } else { - matches = append(matches, matchResult{ - item: item, - score: score, - }) - } + matches = append(matches, matchResult{ + item: item, + score: score, + }) } } diff --git a/pkg/tui/components/completion/completion_test.go b/pkg/tui/components/completion/completion_test.go index f0e159bd3..8dc42685e 100644 --- a/pkg/tui/components/completion/completion_test.go +++ b/pkg/tui/components/completion/completion_test.go @@ -85,6 +85,253 @@ func TestCompletionManagerStaysOpenWithNoResults(t *testing.T) { m.Update(QueryMsg{Query: "xyz"}) view := m.View() - assert.Contains(t, view, "No command found", "should show no results message") + assert.Contains(t, view, "No results", "should show no results message") + }) +} + +func TestCompletionManagerAppendItems(t *testing.T) { + t.Parallel() + + t.Run("AppendItemsMsg adds items to existing list", func(t *testing.T) { + t.Parallel() + + m := New().(*manager) + + // Open with initial items + m.Update(OpenMsg{ + Items: []Item{ + {Label: "initial", Value: "/initial"}, + }, + }) + assert.Len(t, m.items, 1) + assert.Len(t, m.filteredItems, 1) + + // Append more items + m.Update(AppendItemsMsg{ + Items: []Item{ + {Label: "appended1", Value: "/appended1"}, + {Label: "appended2", Value: "/appended2"}, + }, + }) + + assert.Len(t, m.items, 3, "should have 3 total items") + assert.Len(t, m.filteredItems, 3, "filtered should also have 3 items") + }) + + t.Run("AppendItemsMsg respects current query filter", func(t *testing.T) { + t.Parallel() + + m := New().(*manager) + + // Open with initial items + m.Update(OpenMsg{ + Items: []Item{ + {Label: "foo", Value: "/foo"}, + }, + }) + + // Set a query filter + m.Update(QueryMsg{Query: "bar"}) + assert.Empty(t, m.filteredItems, "no items match 'bar'") + + // Append items that match the filter + m.Update(AppendItemsMsg{ + Items: []Item{ + {Label: "bar", Value: "/bar"}, + {Label: "barbaz", Value: "/barbaz"}, + {Label: "nomatch", Value: "/nomatch"}, + }, + }) + + assert.Len(t, m.items, 4, "should have 4 total items") + assert.Len(t, m.filteredItems, 2, "only 2 items match 'bar'") + }) + + t.Run("AppendItemsMsg makes popup visible if items match", func(t *testing.T) { + t.Parallel() + + m := New().(*manager) + + // Open with no items initially + m.Update(OpenMsg{Items: []Item{}}) + assert.False(t, m.visible, "should not be visible with no items") + + // Append items + m.Update(AppendItemsMsg{ + Items: []Item{ + {Label: "new", Value: "/new"}, + }, + }) + + assert.True(t, m.visible, "should become visible after appending items") + }) +} + +func TestCompletionManagerReplaceItems(t *testing.T) { + t.Parallel() + + t.Run("ReplaceItemsMsg replaces non-pinned items", func(t *testing.T) { + t.Parallel() + + m := New().(*manager) + + // Open with initial items (some pinned, some not) + m.Update(OpenMsg{ + Items: []Item{ + {Label: "Browse files…", Value: "", Pinned: true}, + {Label: "old1.txt", Value: "@old1.txt"}, + {Label: "old2.txt", Value: "@old2.txt"}, + }, + }) + assert.Len(t, m.items, 3) + + // Replace with new items + m.Update(ReplaceItemsMsg{ + Items: []Item{ + {Label: "new1.txt", Value: "@new1.txt"}, + {Label: "new2.txt", Value: "@new2.txt"}, + {Label: "new3.txt", Value: "@new3.txt"}, + }, + }) + + // Should have pinned item + 3 new items + assert.Len(t, m.items, 4, "should have 4 items total") + + // First item should be pinned + assert.Equal(t, "Browse files…", m.items[0].Label) + assert.True(t, m.items[0].Pinned) + + // Rest should be new items + assert.Equal(t, "new1.txt", m.items[1].Label) + assert.Equal(t, "new2.txt", m.items[2].Label) + assert.Equal(t, "new3.txt", m.items[3].Label) + }) + + t.Run("ReplaceItemsMsg preserves multiple pinned items", func(t *testing.T) { + t.Parallel() + + m := New().(*manager) + + // Open with multiple pinned items + m.Update(OpenMsg{ + Items: []Item{ + {Label: "paste-1", Value: "@paste-1", Pinned: true}, + {Label: "paste-2", Value: "@paste-2", Pinned: true}, + {Label: "Browse files…", Value: "", Pinned: true}, + {Label: "old.txt", Value: "@old.txt"}, + }, + }) + + // Replace with new items + m.Update(ReplaceItemsMsg{ + Items: []Item{ + {Label: "new.txt", Value: "@new.txt"}, + }, + }) + + // Should have 3 pinned items + 1 new item + assert.Len(t, m.items, 4, "should preserve all pinned items") + + pinnedCount := 0 + for _, item := range m.items { + if item.Pinned { + pinnedCount++ + } + } + assert.Equal(t, 3, pinnedCount, "should have 3 pinned items") + }) +} + +func TestCompletionManagerLoading(t *testing.T) { + t.Parallel() + + t.Run("SetLoadingMsg updates loading state", func(t *testing.T) { + t.Parallel() + + m := New().(*manager) + assert.False(t, m.loading) + + m.Update(SetLoadingMsg{Loading: true}) + assert.True(t, m.loading) + + m.Update(SetLoadingMsg{Loading: false}) + assert.False(t, m.loading) + }) + + t.Run("CloseMsg resets loading state", func(t *testing.T) { + t.Parallel() + + m := New().(*manager) + m.Update(SetLoadingMsg{Loading: true}) + assert.True(t, m.loading) + + m.Update(CloseMsg{}) + assert.False(t, m.loading) + }) + + t.Run("view shows Loading message when loading with no items", func(t *testing.T) { + t.Parallel() + + m := New().(*manager) + m.width = 80 + m.height = 24 + + // Open with no items but set loading + m.Update(OpenMsg{Items: []Item{}}) + m.visible = true // force visible for test + m.Update(SetLoadingMsg{Loading: true}) + + view := m.View() + assert.Contains(t, view, "Loading", "should show loading message") + }) +} + +func TestCompletionManagerPinnedItems(t *testing.T) { + t.Parallel() + + t.Run("pinned items always appear regardless of query", func(t *testing.T) { + t.Parallel() + + m := New().(*manager) + + // Open with mixed pinned and regular items + m.Update(OpenMsg{ + Items: []Item{ + {Label: "Browse files…", Value: "", Pinned: true}, + {Label: "main.go", Value: "@main.go"}, + {Label: "utils.go", Value: "@utils.go"}, + }, + }) + + // Query that doesn't match any regular items + m.Update(QueryMsg{Query: "xyz"}) + + // Pinned item should still appear + assert.Len(t, m.filteredItems, 1, "pinned item should always appear") + assert.Equal(t, "Browse files…", m.filteredItems[0].Label) + assert.True(t, m.filteredItems[0].Pinned) + }) + + t.Run("pinned items appear at top of results", func(t *testing.T) { + t.Parallel() + + m := New().(*manager) + + // Open with pinned item last in list + m.Update(OpenMsg{ + Items: []Item{ + {Label: "main.go", Value: "@main.go"}, + {Label: "utils.go", Value: "@utils.go"}, + {Label: "Browse files…", Value: "", Pinned: true}, + }, + }) + + // Query that matches regular items + m.Update(QueryMsg{Query: "main"}) + + // Pinned item should be first + assert.Len(t, m.filteredItems, 2, "pinned + matching item") + assert.Equal(t, "Browse files…", m.filteredItems[0].Label, "pinned should be first") + assert.Equal(t, "main.go", m.filteredItems[1].Label, "matching item should be second") }) } diff --git a/pkg/tui/components/editor/completions/completion.go b/pkg/tui/components/editor/completions/completion.go index bd1948241..110db6763 100644 --- a/pkg/tui/components/editor/completions/completion.go +++ b/pkg/tui/components/editor/completions/completion.go @@ -1,6 +1,8 @@ package completions import ( + "context" + "github.com/docker/cagent/pkg/app" "github.com/docker/cagent/pkg/tui/components/completion" ) @@ -14,6 +16,18 @@ type Completion interface { MatchMode() completion.MatchMode } +// AsyncLoader is an optional interface for completions that support async loading. +// This allows the editor to load items in the background without blocking the UI. +type AsyncLoader interface { + // LoadInitialItemsAsync loads a shallow set of items quickly (e.g., 2 levels deep, ~100 files). + // Returns a channel that receives initial items for immediate display. + LoadInitialItemsAsync(ctx context.Context) <-chan []completion.Item + + // LoadItemsAsync loads all items in a background goroutine with context support. + // It returns a channel that receives the items when loading is complete. + LoadItemsAsync(ctx context.Context) <-chan []completion.Item +} + func Completions(a *app.App) []Completion { return []Completion{ NewCommandCompletion(a), diff --git a/pkg/tui/components/editor/completions/file.go b/pkg/tui/components/editor/completions/file.go index 8959cc677..32dc19694 100644 --- a/pkg/tui/components/editor/completions/file.go +++ b/pkg/tui/components/editor/completions/file.go @@ -1,12 +1,20 @@ package completions import ( + "context" + "sort" "sync" "github.com/docker/cagent/pkg/fsx" "github.com/docker/cagent/pkg/tui/components/completion" ) +// Initial loading limits for snappy UX +const ( + initialMaxFiles = 100 + initialMaxDepth = 2 +) + type fileCompletion struct { mu sync.Mutex items []completion.Item @@ -39,20 +47,26 @@ func (c *fileCompletion) Items() []completion.Item { } // Try to create VCS matcher for current directory - vcsMatcher, err := fsx.NewVCSMatcher(".") + vcsMatcher, _ := fsx.NewVCSMatcher(".") // Prepare shouldIgnore function var shouldIgnore func(string) bool - if err == nil && vcsMatcher != nil { + if vcsMatcher != nil { shouldIgnore = vcsMatcher.ShouldIgnore } - files, err := fsx.ListDirectory(".", shouldIgnore) + // Use bounded walker to avoid scanning huge directories + files, err := fsx.WalkFiles(context.Background(), ".", fsx.WalkFilesOptions{ + ShouldIgnore: shouldIgnore, + }) if err != nil { // Do not mark as loaded on error, allow retry return nil } + // Sort files by name + sort.Strings(files) + items := make([]completion.Item, len(files)) for i, f := range files { items[i] = completion.Item{ @@ -67,6 +81,139 @@ func (c *fileCompletion) Items() []completion.Item { return c.items } +// LoadInitialItemsAsync loads a shallow set of items quickly for immediate display. +// It scans 2 levels deep with a max of 100 files for a snappy initial UX. +func (c *fileCompletion) LoadInitialItemsAsync(ctx context.Context) <-chan []completion.Item { + ch := make(chan []completion.Item, 1) + + go func() { + defer close(ch) + + // Check if we already have full items cached + c.mu.Lock() + if c.loaded { + items := c.items + c.mu.Unlock() + select { + case ch <- items: + case <-ctx.Done(): + } + return + } + c.mu.Unlock() + + // Try to create VCS matcher for current directory + vcsMatcher, _ := fsx.NewVCSMatcher(".") + + var shouldIgnore func(string) bool + if vcsMatcher != nil { + shouldIgnore = vcsMatcher.ShouldIgnore + } + + // Shallow scan: 2 levels deep, max 100 files + files, err := fsx.WalkFiles(ctx, ".", fsx.WalkFilesOptions{ + MaxFiles: initialMaxFiles, + MaxDepth: initialMaxDepth, + ShouldIgnore: shouldIgnore, + }) + if err != nil || ctx.Err() != nil { + select { + case ch <- nil: + case <-ctx.Done(): + } + return + } + + // Sort files by name + sort.Strings(files) + + items := make([]completion.Item, len(files)) + for i, f := range files { + items[i] = completion.Item{ + Label: f, + Value: "@" + f, + } + } + + // Don't cache initial items - we'll cache full items later + select { + case ch <- items: + case <-ctx.Done(): + } + }() + + return ch +} + +// LoadItemsAsync loads all file items in a background goroutine with context support. +// It returns a channel that receives the items when loading is complete. +func (c *fileCompletion) LoadItemsAsync(ctx context.Context) <-chan []completion.Item { + ch := make(chan []completion.Item, 1) + + go func() { + defer close(ch) + + c.mu.Lock() + // Return cached items if already loaded + if c.loaded { + items := c.items + c.mu.Unlock() + select { + case ch <- items: + case <-ctx.Done(): + } + return + } + c.mu.Unlock() + + // Try to create VCS matcher for current directory + vcsMatcher, _ := fsx.NewVCSMatcher(".") + + // Prepare shouldIgnore function + var shouldIgnore func(string) bool + if vcsMatcher != nil { + shouldIgnore = vcsMatcher.ShouldIgnore + } + + // Full scan with default limits + files, err := fsx.WalkFiles(ctx, ".", fsx.WalkFilesOptions{ + ShouldIgnore: shouldIgnore, + }) + if err != nil || ctx.Err() != nil { + // Return nil on error or cancellation + select { + case ch <- nil: + case <-ctx.Done(): + } + return + } + + // Sort files by name + sort.Strings(files) + + items := make([]completion.Item, len(files)) + for i, f := range files { + items[i] = completion.Item{ + Label: f, + Value: "@" + f, + } + } + + // Cache the results + c.mu.Lock() + c.items = items + c.loaded = true + c.mu.Unlock() + + select { + case ch <- items: + case <-ctx.Done(): + } + }() + + return ch +} + func (c *fileCompletion) MatchMode() completion.MatchMode { return completion.MatchFuzzy } diff --git a/pkg/tui/components/editor/editor.go b/pkg/tui/components/editor/editor.go index da4d4d79d..75f0cf0e7 100644 --- a/pkg/tui/components/editor/editor.go +++ b/pkg/tui/components/editor/editor.go @@ -1,6 +1,7 @@ package editor import ( + "context" "fmt" "log/slog" "os" @@ -84,6 +85,13 @@ type Editor interface { SendContent() tea.Cmd } +// fileLoadResultMsg is sent when async file loading completes. +type fileLoadResultMsg struct { + loadID uint64 + items []completion.Item + isFullLoad bool // true for full load, false for initial shallow load +} + // editor implements [Editor] type editor struct { textarea textarea.Model @@ -117,6 +125,15 @@ type editor struct { recording bool // recordingDotPhase tracks the animation phase for the recording dots cursor recordingDotPhase int + + // fileLoadID is incremented each time we start a new file load to ignore stale results + fileLoadID uint64 + // fileLoadStarted tracks whether we've started initial loading for the current completion + fileLoadStarted bool + // fileFullLoadStarted tracks whether we've started full file loading (triggered by typing) + fileFullLoadStarted bool + // fileLoadCancel cancels any in-progress file loading + fileLoadCancel context.CancelFunc } // New creates a new editor component @@ -601,6 +618,11 @@ func (e *editor) Update(msg tea.Msg) (layout.Model, tea.Cmd) { return e, cmd case completion.SelectedMsg: + // If the item has an Execute function, run it instead of inserting text + if msg.Execute != nil { + e.clearSuggestion() + return e, msg.Execute() + } if e.currentCompletion.AutoSubmit() { // For auto-submit completions (like commands), use the selected // command value (e.g., "/exit") instead of what the user typed @@ -632,7 +654,36 @@ func (e *editor) Update(msg tea.Msg) (layout.Model, tea.Cmd) { e.currentCompletion = nil e.clearSuggestion() e.refreshSuggestion() + // Reset file loading state + e.fileLoadStarted = false + e.fileFullLoadStarted = false + if e.fileLoadCancel != nil { + e.fileLoadCancel() + e.fileLoadCancel = nil + } return e, e.textarea.Focus() + + case fileLoadResultMsg: + // Ignore stale results from older loads. + if msg.loadID != e.fileLoadID { + return e, nil + } + + // Always stop the loading indicator for the active load, even if it was cancelled/errored. + if msg.items == nil { + return e, core.CmdHandler(completion.SetLoadingMsg{Loading: false}) + } + // For full load, replace items (keeping pinned); for initial, append + var itemsCmd tea.Cmd + if msg.isFullLoad { + itemsCmd = core.CmdHandler(completion.ReplaceItemsMsg{Items: msg.items}) + } else { + itemsCmd = core.CmdHandler(completion.AppendItemsMsg{Items: msg.items}) + } + return e, tea.Batch( + core.CmdHandler(completion.SetLoadingMsg{Loading: false}), + itemsCmd, + ) case completion.SelectionChangedMsg: // Show the selected completion item as a suggestion in the editor if msg.Value != "" && e.currentCompletion != nil { @@ -901,31 +952,146 @@ func (e *editor) updateCompletionQuery() tea.Cmd { if e.currentCompletion != nil && strings.HasPrefix(currentWord, e.currentCompletion.Trigger()) { e.completionWord = strings.TrimPrefix(currentWord, e.currentCompletion.Trigger()) - return core.CmdHandler(completion.QueryMsg{Query: e.completionWord}) + + // For @ completion, start full file loading when user starts typing (if not already started) + var loadCmd tea.Cmd + if e.currentCompletion.Trigger() == "@" && e.completionWord != "" && !e.fileFullLoadStarted { + loadCmd = e.startFullFileLoad() + } + + queryCmd := core.CmdHandler(completion.QueryMsg{Query: e.completionWord}) + if loadCmd != nil { + return tea.Batch(queryCmd, loadCmd) + } + return queryCmd } e.completionWord = "" return core.CmdHandler(completion.CloseMsg{}) } +// startFullFileLoad starts full background file loading and returns a command that will +// emit a fileLoadResultMsg when complete. This is triggered when the user starts typing. +func (e *editor) startFullFileLoad() tea.Cmd { + e.fileFullLoadStarted = true + e.fileLoadID++ + loadID := e.fileLoadID + + // Cancel any previous load + if e.fileLoadCancel != nil { + e.fileLoadCancel() + } + + ctx, cancel := context.WithCancel(context.Background()) + e.fileLoadCancel = cancel + + // Find the file completion that supports async loading + var asyncLoader completions.AsyncLoader + for _, c := range e.completions { + if c.Trigger() == "@" { + if al, ok := c.(completions.AsyncLoader); ok { + asyncLoader = al + break + } + } + } + + if asyncLoader == nil { + return nil + } + + // Set loading state + loadingCmd := core.CmdHandler(completion.SetLoadingMsg{Loading: true}) + + // Start full async load + asyncCmd := func() tea.Msg { + ch := asyncLoader.LoadItemsAsync(ctx) + items := <-ch + return fileLoadResultMsg{loadID: loadID, items: items, isFullLoad: true} + } + + return tea.Batch(loadingCmd, asyncCmd) +} + func (e *editor) startCompletion(c completions.Completion) tea.Cmd { e.currentCompletion = c - items := c.Items() - // Prepend paste placeholders for @ trigger so users can easily reference them + // For @ trigger, open instantly with paste items + "Browse files…" and start async file loading if c.Trigger() == "@" { - pasteItems := e.getPasteCompletionItems() - if len(pasteItems) > 0 { - items = append(pasteItems, items...) - } + items := e.getPasteCompletionItems() + // Add "Browse files…" action that opens the file picker dialog + items = append(items, completion.Item{ + Label: "Browse files…", + Description: "Open file picker", + Value: "", // No value to insert + Execute: func() tea.Cmd { + return core.CmdHandler(messages.AttachFileMsg{FilePath: ""}) + }, + Pinned: true, + }) + + openCmd := core.CmdHandler(completion.OpenMsg{ + Items: items, + MatchMode: c.MatchMode(), + }) + + // Start initial shallow file loading immediately + loadCmd := e.startInitialFileLoad() + + return tea.Batch(openCmd, loadCmd) } + items := c.Items() + return core.CmdHandler(completion.OpenMsg{ Items: items, MatchMode: c.MatchMode(), }) } +// startInitialFileLoad starts a shallow file scan for immediate display. +// It loads ~100 files from 2 levels deep for a snappy initial UX. +func (e *editor) startInitialFileLoad() tea.Cmd { + e.fileLoadStarted = true + e.fileLoadID++ + loadID := e.fileLoadID + + // Cancel any previous load + if e.fileLoadCancel != nil { + e.fileLoadCancel() + } + + ctx, cancel := context.WithCancel(context.Background()) + e.fileLoadCancel = cancel + + // Find the file completion that supports async loading + var asyncLoader completions.AsyncLoader + for _, c := range e.completions { + if c.Trigger() == "@" { + if al, ok := c.(completions.AsyncLoader); ok { + asyncLoader = al + break + } + } + } + + if asyncLoader == nil { + return nil + } + + // Set loading state + loadingCmd := core.CmdHandler(completion.SetLoadingMsg{Loading: true}) + + // Start initial shallow load + asyncCmd := func() tea.Msg { + ch := asyncLoader.LoadInitialItemsAsync(ctx) + items := <-ch + return fileLoadResultMsg{loadID: loadID, items: items, isFullLoad: false} + } + + return tea.Batch(loadingCmd, asyncCmd) +} + // getPasteCompletionItems returns completion items for paste attachments only. func (e *editor) getPasteCompletionItems() []completion.Item { var items []completion.Item