diff --git a/README.md b/README.md index e75f1db..177e0af 100644 --- a/README.md +++ b/README.md @@ -92,7 +92,11 @@ Create `config.json`: ```json { - "documents_dir": "./documents", + "document_patterns": [ + "./documents", + "./notes/**/*.md", + "./projects/backend/**/*.md" + ], "db_path": "./vectors.db", "chunk_size": 500, "search_top_k": 5, @@ -109,7 +113,11 @@ Create `config.json`: ### Configuration Options -- `documents_dir`: Directory containing markdown files +- `document_patterns`: Array of document paths and glob patterns + - Supports directory paths: `"./documents"` + - Supports glob patterns: `"./docs/**/*.md"` (recursive) + - Multiple patterns: Index files from different locations + - **Note**: Old `documents_dir` field is still supported (automatically migrated) - `db_path`: Vector database file path - `chunk_size`: Document chunk size in characters - `search_top_k`: Number of search results to return @@ -118,6 +126,19 @@ Create `config.json`: - `model.name`: Embedding model name - `model.dimensions`: Vector dimensions +### Pattern Examples + +```json +{ + "document_patterns": [ + "./documents", // All .md files in documents/ + "./notes/**/*.md", // Recursive search in notes/ + "./projects/*/docs/*.md", // docs/ in each project + "/path/to/external/docs" // Absolute path + ] +} +``` + ## MCP Tools DevRag provides the following tools via Model Context Protocol: @@ -168,7 +189,11 @@ Configure for your project's docs directory: ```json { - "documents_dir": "./docs", + "document_patterns": [ + "./docs", + "./api-docs/**/*.md", + "./wiki/**/*.md" + ], "db_path": "./.devrag/vectors.db" } ``` @@ -415,7 +440,11 @@ Claude Codeで: ```json { - "documents_dir": "./documents", + "document_patterns": [ + "./documents", + "./notes/**/*.md", + "./projects/backend/**/*.md" + ], "db_path": "./vectors.db", "chunk_size": 500, "search_top_k": 5, @@ -432,7 +461,11 @@ Claude Codeで: ### 設定項目 -- `documents_dir`: マークダウンファイルを配置するディレクトリ +- `document_patterns`: ドキュメントのパスとglobパターンの配列 + - ディレクトリパス対応: `"./documents"` + - globパターン対応: `"./docs/**/*.md"` (再帰的) + - 複数パターン: 異なる場所からファイルをインデックス化 + - **注意**: 旧形式の`documents_dir`もサポート(自動的に移行) - `db_path`: ベクトルデータベースのパス - `chunk_size`: ドキュメントのチャンクサイズ(文字数) - `search_top_k`: 検索結果の返却件数 @@ -441,6 +474,19 @@ Claude Codeで: - `model.name`: 埋め込みモデル名 - `model.dimensions`: ベクトル次元数 +### パターン例 + +```json +{ + "document_patterns": [ + "./documents", // documents/内の全.mdファイル + "./notes/**/*.md", // notes/内を再帰的に検索 + "./projects/*/docs/*.md", // 各プロジェクトのdocs/ + "/path/to/external/docs" // 絶対パス + ] +} +``` + ## MCPツール Model Context Protocolを通じて以下のツールを提供: @@ -491,7 +537,11 @@ Model Context Protocolを通じて以下のツールを提供: ```json { - "documents_dir": "./docs", + "document_patterns": [ + "./docs", + "./api-docs/**/*.md", + "./wiki/**/*.md" + ], "db_path": "./.devrag/vectors.db" } ``` diff --git a/cmd/main.go b/cmd/main.go index f6af472..8febf4c 100644 --- a/cmd/main.go +++ b/cmd/main.go @@ -28,7 +28,7 @@ func main() { } fmt.Fprintf(os.Stderr, "[INFO] Configuration loaded successfully\n") - fmt.Fprintf(os.Stderr, "[INFO] Documents directory: %s\n", cfg.DocumentsDir) + fmt.Fprintf(os.Stderr, "[INFO] Document patterns: %v\n", cfg.DocumentPatterns) fmt.Fprintf(os.Stderr, "[INFO] Database path: %s\n", cfg.DBPath) fmt.Fprintf(os.Stderr, "[INFO] Model: %s (dimensions: %d)\n", cfg.Model.Name, cfg.Model.Dimensions) fmt.Fprintf(os.Stderr, "[INFO] Device: %s\n", cfg.Compute.Device) @@ -46,10 +46,12 @@ func main() { // 4. Initialize components - // Ensure documents directory exists - if err := os.MkdirAll(cfg.DocumentsDir, 0755); err != nil { - fmt.Fprintf(os.Stderr, "[FATAL] Failed to create documents directory: %v\n", err) - os.Exit(1) + // Ensure base directories exist + baseDirs := cfg.GetBaseDirectories() + for _, dir := range baseDirs { + if err := os.MkdirAll(dir, 0755); err != nil { + fmt.Fprintf(os.Stderr, "[WARN] Failed to create directory %s: %v\n", dir, err) + } } // Initialize database diff --git a/internal/config/config.go b/internal/config/config.go index 206b897..b828f9c 100644 --- a/internal/config/config.go +++ b/internal/config/config.go @@ -4,14 +4,18 @@ import ( "encoding/json" "fmt" "os" + "path/filepath" + "strings" ) type Config struct { - DocumentsDir string `json:"documents_dir"` - DBPath string `json:"db_path"` - ChunkSize int `json:"chunk_size"` - SearchTopK int `json:"search_top_k"` - Compute struct { + // Deprecated: Use DocumentPatterns instead + DocumentsDir string `json:"documents_dir,omitempty"` + DocumentPatterns []string `json:"document_patterns,omitempty"` + DBPath string `json:"db_path"` + ChunkSize int `json:"chunk_size"` + SearchTopK int `json:"search_top_k"` + Compute struct { Device string `json:"device"` FallbackToCPU bool `json:"fallback_to_cpu"` } `json:"compute"` @@ -24,10 +28,10 @@ type Config struct { // DefaultConfig returns default configuration func DefaultConfig() *Config { cfg := &Config{ - DocumentsDir: "./documents", - DBPath: "./vectors.db", - ChunkSize: 500, - SearchTopK: 5, + DocumentPatterns: []string{"./documents"}, + DBPath: "./vectors.db", + ChunkSize: 500, + SearchTopK: 5, } cfg.Compute.Device = "auto" cfg.Compute.FallbackToCPU = true @@ -61,6 +65,14 @@ func Load() (*Config, error) { return nil, fmt.Errorf("failed to read config: %w", err) } + // First unmarshal to check which fields are present + var rawConfig map[string]interface{} + if err := json.Unmarshal(data, &rawConfig); err != nil { + fmt.Fprintf(os.Stderr, "[WARN] Invalid JSON in config.json: %v\n", err) + fmt.Fprintf(os.Stderr, "[WARN] Using default configuration\n") + return DefaultConfig(), nil + } + cfg := DefaultConfig() if err := json.Unmarshal(data, cfg); err != nil { fmt.Fprintf(os.Stderr, "[WARN] Invalid JSON in config.json: %v\n", err) @@ -68,6 +80,21 @@ func Load() (*Config, error) { return cfg, nil } + // Migrate old format to new format + _, hasOldField := rawConfig["documents_dir"] + _, hasNewField := rawConfig["document_patterns"] + + if hasOldField && !hasNewField { + fmt.Fprintf(os.Stderr, "[INFO] Migrating from documents_dir to document_patterns\n") + cfg.DocumentPatterns = []string{cfg.DocumentsDir} + cfg.DocumentsDir = "" // Clear deprecated field + } + + // Validate that at least one pattern is configured + if len(cfg.DocumentPatterns) == 0 { + cfg.DocumentPatterns = []string{"./documents"} + } + fmt.Fprintf(os.Stderr, "[INFO] Loaded configuration from %s\n", configFile) return cfg, nil } @@ -97,5 +124,210 @@ func (c *Config) Validate() error { if c.Model.Dimensions <= 0 { return fmt.Errorf("model.dimensions must be positive") } + if len(c.DocumentPatterns) == 0 { + return fmt.Errorf("at least one document pattern must be specified") + } return nil } + +// GetDocumentFiles expands all document patterns and returns matching markdown files +func (c *Config) GetDocumentFiles() ([]string, error) { + files := make(map[string]bool) // Use map to deduplicate + + for _, pattern := range c.DocumentPatterns { + matches, err := c.expandPattern(pattern) + if err != nil { + fmt.Fprintf(os.Stderr, "[WARN] Failed to expand pattern %s: %v\n", pattern, err) + continue + } + for _, match := range matches { + files[match] = true + } + } + + // Convert map to slice + result := make([]string, 0, len(files)) + for file := range files { + result = append(result, file) + } + + return result, nil +} + +// expandPattern expands a single pattern to matching markdown files +// Supports both directory paths (e.g., "./docs") and glob patterns (e.g., "./docs/**/*.md") +func (c *Config) expandPattern(pattern string) ([]string, error) { + var files []string + + // Check if pattern looks like a directory (no wildcards and no .md extension) + if !strings.Contains(pattern, "*") && !strings.Contains(pattern, "?") { + // Treat as directory - walk it for all .md files + err := filepath.Walk(pattern, func(path string, info os.FileInfo, err error) error { + if err != nil { + return nil // Continue despite errors + } + if !info.IsDir() && filepath.Ext(path) == ".md" { + files = append(files, path) + } + return nil + }) + if err != nil { + return nil, err + } + return files, nil + } + + // Pattern contains wildcards - need to handle ** specially + if strings.Contains(pattern, "**") { + return c.expandDoubleStarPattern(pattern) + } + + // Simple glob pattern (no **) + matches, err := filepath.Glob(pattern) + if err != nil { + return nil, err + } + + // Filter to only markdown files + for _, match := range matches { + info, err := os.Stat(match) + if err != nil { + continue + } + if !info.IsDir() && filepath.Ext(match) == ".md" { + files = append(files, match) + } + } + + return files, nil +} + +// expandDoubleStarPattern handles patterns with ** (recursive directory matching) +func (c *Config) expandDoubleStarPattern(pattern string) ([]string, error) { + var files []string + + // Split pattern at ** + parts := strings.SplitN(pattern, "**", 2) + if len(parts) != 2 { + return nil, fmt.Errorf("invalid ** pattern: %s", pattern) + } + + baseDir := parts[0] + suffix := parts[1] + + // Clean up baseDir + if baseDir == "" { + baseDir = "." + } else { + baseDir = strings.TrimSuffix(baseDir, string(filepath.Separator)) + } + + // Walk the base directory + err := filepath.Walk(baseDir, func(path string, info os.FileInfo, err error) error { + if err != nil { + return nil // Continue despite errors + } + + if info.IsDir() { + return nil + } + + // Check if path matches the suffix pattern + if c.matchesSuffix(path, baseDir, suffix) { + files = append(files, path) + } + + return nil + }) + + if err != nil { + return nil, err + } + + return files, nil +} + +// matchesSuffix checks if a file path matches the suffix pattern after ** +func (c *Config) matchesSuffix(path, baseDir, suffix string) bool { + // Remove baseDir from path + relPath, err := filepath.Rel(baseDir, path) + if err != nil { + return false + } + + // If suffix is empty or just a separator, match all .md files + if suffix == "" || suffix == string(filepath.Separator) || suffix == "/" { + return filepath.Ext(path) == ".md" + } + + // Clean suffix + suffix = strings.TrimPrefix(suffix, string(filepath.Separator)) + suffix = strings.TrimPrefix(suffix, "/") + + // If suffix is just *.md, match all markdown files + if suffix == "*.md" { + return filepath.Ext(path) == ".md" + } + + // Check if relPath matches the suffix pattern + matched, err := filepath.Match(suffix, filepath.Base(relPath)) + if err != nil { + return false + } + + if matched && filepath.Ext(path) == ".md" { + return true + } + + // For patterns like "subdir/*.md", check full relative path + matched, err = filepath.Match(suffix, relPath) + if err != nil { + return false + } + + return matched && filepath.Ext(path) == ".md" +} + +// GetBaseDirectories returns the base directories from all patterns +// This is useful for path validation +func (c *Config) GetBaseDirectories() []string { + dirs := make(map[string]bool) + + for _, pattern := range c.DocumentPatterns { + // Extract base directory from pattern + baseDir := c.extractBaseDir(pattern) + if baseDir != "" { + absDir, err := filepath.Abs(baseDir) + if err == nil { + dirs[absDir] = true + } + } + } + + result := make([]string, 0, len(dirs)) + for dir := range dirs { + result = append(result, dir) + } + + return result +} + +// extractBaseDir extracts the base directory from a pattern +func (c *Config) extractBaseDir(pattern string) string { + // Find the first wildcard + wildcardIndex := strings.IndexAny(pattern, "*?") + if wildcardIndex == -1 { + // No wildcards - entire pattern is base directory + return pattern + } + + // Get the directory part before the wildcard + baseDir := pattern[:wildcardIndex] + baseDir = filepath.Dir(baseDir) + + if baseDir == "." { + return baseDir + } + + return baseDir +} diff --git a/internal/config/config_test.go b/internal/config/config_test.go index 8a1abb5..6485319 100644 --- a/internal/config/config_test.go +++ b/internal/config/config_test.go @@ -2,6 +2,7 @@ package config import ( "os" + "path/filepath" "testing" ) @@ -18,8 +19,8 @@ func TestLoadConfig_NoFile(t *testing.T) { t.Fatalf("Expected no error, got %v", err) } - if cfg.DocumentsDir != "./documents" { - t.Errorf("Expected default documents_dir, got %s", cfg.DocumentsDir) + if len(cfg.DocumentPatterns) != 1 || cfg.DocumentPatterns[0] != "./documents" { + t.Errorf("Expected default document_patterns [./documents], got %v", cfg.DocumentPatterns) } if cfg.ChunkSize != 500 { @@ -57,7 +58,7 @@ func TestLoadConfig_Valid(t *testing.T) { // Create test config testConfig := `{ - "documents_dir": "./test_docs", + "document_patterns": ["./test_docs", "./other_docs/**/*.md"], "db_path": "./test.db", "chunk_size": 300, "search_top_k": 10, @@ -80,8 +81,14 @@ func TestLoadConfig_Valid(t *testing.T) { t.Fatalf("Expected no error, got %v", err) } - if cfg.DocumentsDir != "./test_docs" { - t.Errorf("Expected documents_dir './test_docs', got %s", cfg.DocumentsDir) + if len(cfg.DocumentPatterns) != 2 { + t.Errorf("Expected 2 document patterns, got %d", len(cfg.DocumentPatterns)) + } + if cfg.DocumentPatterns[0] != "./test_docs" { + t.Errorf("Expected first pattern './test_docs', got %s", cfg.DocumentPatterns[0]) + } + if cfg.DocumentPatterns[1] != "./other_docs/**/*.md" { + t.Errorf("Expected second pattern './other_docs/**/*.md', got %s", cfg.DocumentPatterns[1]) } if cfg.DBPath != "./test.db" { @@ -224,8 +231,8 @@ func TestDefaultConfig(t *testing.T) { } // Verify all default values - if cfg.DocumentsDir != "./documents" { - t.Errorf("Wrong default documents_dir: %s", cfg.DocumentsDir) + if len(cfg.DocumentPatterns) != 1 || cfg.DocumentPatterns[0] != "./documents" { + t.Errorf("Wrong default document_patterns: %v", cfg.DocumentPatterns) } if cfg.DBPath != "./vectors.db" { t.Errorf("Wrong default db_path: %s", cfg.DBPath) @@ -237,3 +244,187 @@ func TestDefaultConfig(t *testing.T) { t.Errorf("Wrong default search_top_k: %d", cfg.SearchTopK) } } + +func TestLoadConfig_BackwardsCompatibility(t *testing.T) { + // Test that old documents_dir format is migrated to document_patterns + tmpDir := t.TempDir() + originalDir, _ := os.Getwd() + defer os.Chdir(originalDir) + + os.Chdir(tmpDir) + + // Create test config with old format + oldConfig := `{ + "documents_dir": "./old_docs", + "db_path": "./test.db", + "chunk_size": 300, + "search_top_k": 10, + "compute": { + "device": "cpu", + "fallback_to_cpu": false + }, + "model": { + "name": "test-model", + "dimensions": 256 + } +}` + + if err := os.WriteFile("config.json", []byte(oldConfig), 0644); err != nil { + t.Fatal(err) + } + + cfg, err := Load() + if err != nil { + t.Fatalf("Expected no error, got %v", err) + } + + // Verify migration + if len(cfg.DocumentPatterns) != 1 { + t.Errorf("Expected 1 document pattern after migration, got %d", len(cfg.DocumentPatterns)) + } + if cfg.DocumentPatterns[0] != "./old_docs" { + t.Errorf("Expected pattern './old_docs', got %s", cfg.DocumentPatterns[0]) + } + if cfg.DocumentsDir != "" { + t.Errorf("Expected deprecated DocumentsDir to be cleared, got %s", cfg.DocumentsDir) + } +} + +func TestGetDocumentFiles(t *testing.T) { + // Create temporary test directory structure + tmpDir := t.TempDir() + + // Create test files + testFiles := []string{ + "docs/file1.md", + "docs/subdir/file2.md", + "notes/file3.md", + "other/file4.txt", // not markdown + } + + for _, file := range testFiles { + fullPath := filepath.Join(tmpDir, file) + dir := filepath.Dir(fullPath) + if err := os.MkdirAll(dir, 0755); err != nil { + t.Fatal(err) + } + if err := os.WriteFile(fullPath, []byte("# Test"), 0644); err != nil { + t.Fatal(err) + } + } + + tests := []struct { + name string + patterns []string + expectedCount int + shouldContain []string + shouldNotContain []string + }{ + { + name: "single directory pattern", + patterns: []string{tmpDir + "/docs"}, + expectedCount: 2, + shouldContain: []string{"file1.md", "file2.md"}, + }, + { + name: "glob pattern with **", + patterns: []string{tmpDir + "/docs/**/*.md"}, + expectedCount: 2, + shouldContain: []string{"file1.md", "file2.md"}, + }, + { + name: "multiple patterns", + patterns: []string{tmpDir + "/docs", tmpDir + "/notes"}, + expectedCount: 3, + shouldContain: []string{"file1.md", "file2.md", "file3.md"}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + cfg := &Config{ + DocumentPatterns: tt.patterns, + } + + files, err := cfg.GetDocumentFiles() + if err != nil { + t.Fatalf("GetDocumentFiles failed: %v", err) + } + + if len(files) != tt.expectedCount { + t.Errorf("Expected %d files, got %d: %v", tt.expectedCount, len(files), files) + } + + // Check that expected files are present + for _, expected := range tt.shouldContain { + found := false + for _, file := range files { + if contains(file, expected) { + found = true + break + } + } + if !found { + t.Errorf("Expected to find file containing '%s', but it was not found in %v", expected, files) + } + } + }) + } +} + +func TestGetBaseDirectories(t *testing.T) { + tmpDir := t.TempDir() + + tests := []struct { + name string + patterns []string + expectedCount int + }{ + { + name: "single directory", + patterns: []string{tmpDir + "/docs"}, + expectedCount: 1, + }, + { + name: "glob pattern", + patterns: []string{tmpDir + "/docs/**/*.md"}, + expectedCount: 1, + }, + { + name: "multiple patterns with subdirectory", + patterns: []string{tmpDir + "/docs/**/*.md", tmpDir + "/docs/subdir/*.md"}, + expectedCount: 2, // Different base directories (docs and docs/subdir) + }, + { + name: "multiple patterns different bases", + patterns: []string{tmpDir + "/docs", tmpDir + "/notes"}, + expectedCount: 2, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + cfg := &Config{ + DocumentPatterns: tt.patterns, + } + + dirs := cfg.GetBaseDirectories() + if len(dirs) != tt.expectedCount { + t.Errorf("Expected %d base directories, got %d: %v", tt.expectedCount, len(dirs), dirs) + } + }) + } +} + +func contains(s, substr string) bool { + return len(s) >= len(substr) && (s == substr || len(s) > len(substr) && (s[len(s)-len(substr):] == substr || s[:len(substr)] == substr || containsMiddle(s, substr))) +} + +func containsMiddle(s, substr string) bool { + for i := 0; i <= len(s)-len(substr); i++ { + if s[i:i+len(substr)] == substr { + return true + } + } + return false +} diff --git a/internal/indexer/sync.go b/internal/indexer/sync.go index c7e07cf..1d3e1bb 100644 --- a/internal/indexer/sync.go +++ b/internal/indexer/sync.go @@ -39,31 +39,25 @@ func (idx *Indexer) Sync() (*SyncResult, error) { dbFileMap[filename] = modTime } - // Step 2: Scan filesystem (filepath -> mtime) + // Step 2: Scan filesystem using document patterns fsFiles := make(map[string]time.Time) - err = filepath.Walk(idx.config.DocumentsDir, func(path string, info os.FileInfo, err error) error { - if err != nil { - fmt.Fprintf(os.Stderr, "[WARN] Error accessing %s: %v\n", path, err) - return nil // Continue walking despite errors - } - // Skip directories - if info.IsDir() { - return nil - } + // Get all markdown files matching the configured patterns + matchedFiles, err := idx.config.GetDocumentFiles() + if err != nil { + return nil, fmt.Errorf("failed to get document files: %w", err) + } - // Only process markdown files - if filepath.Ext(path) != ".md" { - return nil + // Get modification times for all matched files + for _, path := range matchedFiles { + info, err := os.Stat(path) + if err != nil { + fmt.Fprintf(os.Stderr, "[WARN] Error accessing %s: %v\n", path, err) + continue } // Store file path and modification time fsFiles[path] = info.ModTime() - return nil - }) - - if err != nil { - return nil, fmt.Errorf("failed to scan filesystem: %w", err) } fmt.Fprintf(os.Stderr, "[INFO] Found %d markdown files in filesystem\n", len(fsFiles)) diff --git a/internal/mcp/tools.go b/internal/mcp/tools.go index 51ec9ca..29664ae 100644 --- a/internal/mcp/tools.go +++ b/internal/mcp/tools.go @@ -79,7 +79,7 @@ func (s *MCPServer) handleIndexMarkdown(ctx context.Context, request mcp.CallToo } // Validate path (prevent path traversal) - if err := validatePath(filePath, s.config.DocumentsDir); err != nil { + if err := validatePath(filePath, s.config.GetBaseDirectories()); err != nil { return mcp.NewToolResultError(fmt.Sprintf("invalid path: %v", err)), nil } @@ -149,9 +149,8 @@ func (s *MCPServer) handleDeleteDocument(ctx context.Context, request mcp.CallTo return mcp.NewToolResultError(fmt.Sprintf("failed to delete from database: %v", err)), nil } - // Delete file - filePath := filepath.Join(s.config.DocumentsDir, filename) - if err := os.Remove(filePath); err != nil { + // Delete file (filename is the full path) + if err := os.Remove(filename); err != nil { fmt.Fprintf(os.Stderr, "[WARN] Failed to delete file: %v\n", err) } @@ -186,9 +185,8 @@ func (s *MCPServer) handleReindexDocument(ctx context.Context, request mcp.CallT return mcp.NewToolResultError(fmt.Sprintf("failed to delete document: %v", err)), nil } - // Reindex - filePath := filepath.Join(s.config.DocumentsDir, filename) - if err := s.indexer.IndexFile(filePath); err != nil { + // Reindex (filename is the full path) + if err := s.indexer.IndexFile(filename); err != nil { return mcp.NewToolResultError(fmt.Sprintf("failed to reindex: %v", err)), nil } @@ -234,7 +232,7 @@ func (s *MCPServer) handleAddFrontmatter(ctx context.Context, request mcp.CallTo } // Validate path - if err := validatePath(filePath, s.config.DocumentsDir); err != nil { + if err := validatePath(filePath, s.config.GetBaseDirectories()); err != nil { return mcp.NewToolResultError(fmt.Sprintf("invalid path: %v", err)), nil } @@ -306,7 +304,7 @@ func (s *MCPServer) handleUpdateFrontmatter(ctx context.Context, request mcp.Cal } // Validate path - if err := validatePath(filePath, s.config.DocumentsDir); err != nil { + if err := validatePath(filePath, s.config.GetBaseDirectories()); err != nil { return mcp.NewToolResultError(fmt.Sprintf("invalid path: %v", err)), nil } @@ -343,26 +341,31 @@ func (s *MCPServer) handleUpdateFrontmatter(ctx context.Context, request mcp.Cal } // validatePath prevents path traversal attacks -func validatePath(filePath, baseDir string) error { +// It checks if the file is within any of the configured base directories +func validatePath(filePath string, baseDirs []string) error { absPath, err := filepath.Abs(filePath) if err != nil { return err } - absBase, err := filepath.Abs(baseDir) - if err != nil { - return err - } + // Check if path is within any of the base directories + for _, baseDir := range baseDirs { + absBase, err := filepath.Abs(baseDir) + if err != nil { + continue + } - relPath, err := filepath.Rel(absBase, absPath) - if err != nil { - return err - } + relPath, err := filepath.Rel(absBase, absPath) + if err != nil { + continue + } - // Check if path escapes base directory - if len(relPath) > 0 && relPath[0] == '.' { - return fmt.Errorf("path traversal detected: %s", filePath) + // Check if path escapes base directory + if len(relPath) > 0 && relPath[0] != '.' { + // Path is within this base directory + return nil + } } - return nil + return fmt.Errorf("path not within any configured document directory: %s", filePath) }