diff --git a/.github/workflows/go.yml b/.github/workflows/go.yml index d3fbfab..33b3794 100644 --- a/.github/workflows/go.yml +++ b/.github/workflows/go.yml @@ -40,3 +40,6 @@ jobs: - name: Test run: make test + + - name: Check coverage + run: make check-coverage diff --git a/AGENTS.md b/AGENTS.md new file mode 100644 index 0000000..2db1fcf --- /dev/null +++ b/AGENTS.md @@ -0,0 +1,92 @@ +# Project Guidelines + +## Overview + +CLI tool for managing local backups using `rsync` as the engine. +Built in Go with `cobra` for CLI, `afero` for filesystem abstraction, and YAML for configuration. Local-only — no remote rsync support. + +## Code Style + +- Go 1.25; follow idiomatic Go conventions +- Format with `go fmt`; lint with `golangci-lint` (config in `.golangci.yml`) +- All linters enabled by default — check `.golangci.yml` for disabled ones +- Keep packages focused: `cmd/` for CLI wiring, `internal/` for core logic +- Prefer dependency injection over global state for testability +- Use interfaces at consumption boundaries (see `internal/exec.go`, `internal/job_command.go`) +- All output in commands routed through `cmd.OutOrStdout()` or injected `io.Writer` — never raw `fmt.Printf` +- All commands use `RunE` with wrapped errors + +## Architecture + +``` +backup/ + main.go # Entrypoint — calls cmd.BuildRootCommand().Execute() + cmd/ # Cobra commands: list, run, simulate, config (show/validate), check-coverage, version + root.go # BuildRootCommand() / BuildRootCommandWithFs(fs) — injects afero.Fs + test/ # Cobra command integration tests + internal/ # Core logic: config, job execution, rsync wrapper, coverage checker + test/ # Unit tests + mockery-generated mocks +``` + +### Key Types & Interfaces + +- **`Exec`** (interface): Command execution abstraction (`OsExec` for real, `MockExec` for tests) +- **`JobCommand`** (interface): `Run(job Job) JobStatus` + `GetVersionInfo()` — implemented by `ListCommand`, `SimulateCommand`, `SyncCommand` +- **`SharedCommand`** (struct): Base for all commands — holds `BinPath`, `BaseLogPath`, `Shell Exec`, `Output io.Writer` +- **`CoverageChecker`** (struct): Analyzes path coverage with injected `*log.Logger` and `afero.Fs` +- **`Config`**: YAML-based (`Config`, `Job`, `Path`, variables with `${var}` substitution) + +### Dependency Injection + +- `Exec` injected into all command constructors (`NewListCommand`, `NewSimulateCommand`, `NewSyncCommand`) +- `io.Writer` injected into `SharedCommand` for output capture +- `afero.Fs` injected into `BuildRootCommandWithFs()` → `buildCheckCoverageCommand(fs)` +- `*log.Logger` injected into `CoverageChecker` and `Config.Apply()` +- Commands use `cmd.OutOrStdout()` for testable output + +## Build and Test + +```sh +make build # Build to dist/backup +make test # go test -race ./... -v +make lint # golangci-lint run ./... +make lint-fix # Auto-fix lint issues +make format # go fmt ./... +make tidy # gofmt -s + go mod tidy +make sanity-check # format + clean + tidy +make check-coverage # Fail if coverage < 90% +make report-coverage # Generate HTML coverage report +``` + +## Testing Conventions + +- See `TESTING_GUIDE.md` for patterns and examples +- Use **dependency injection** — inject interfaces, not concrete types +- **Mocks**: Generated with [mockery](https://github.com/vektra/mockery) (config: `.mockery.yml`) + - Mock files live in `internal/test/` as `mock__test.go` + - Mock structs named `Mock` (e.g., `MockExec`, `MockJobCommand`) + - See `MOCKERY_INTEGRATION.md` for setup details +- Use `testify` for assertions (`require` / `assert`) +- Test files live in `/test/` subdirectories +- Prefer table-driven tests for multiple input scenarios +- Use `afero.NewMemMapFs()` in tests — never hit the real filesystem +- Use `bytes.Buffer` or `io.Discard` for output capture in tests +- CI enforces coverage threshold via `make check-coverage` + +## CI Pipeline + +CI runs on every push/PR to `main` (`.github/workflows/go.yml`): +1. Sanity check (format + clean + mod tidy) +2. Lint (golangci-lint) +3. Build +4. Test (with `-race` flag) +5. Coverage threshold enforcement (90%) + +## Conventions + +- No remote rsync — only locally mounted paths +- Job-level granularity: each backup job can be listed, simulated, or run independently +- Dry-run/simulate mode available for all operations +- Logging goes to both an injected `io.Writer` (user output) and `*log.Logger` (file logging) under `logs/` +- Custom YAML unmarshaling handles job defaults (see `internal/job.go`) +- CI runs sanity checks, lint, and build on every push/PR (`.github/workflows/go.yml`) diff --git a/MOCKERY_INTEGRATION.md b/MOCKERY_INTEGRATION.md new file mode 100644 index 0000000..2ef7cd9 --- /dev/null +++ b/MOCKERY_INTEGRATION.md @@ -0,0 +1,114 @@ +# Mockery Integration Guide + +This document explains how mockery is integrated for generating mocks from interfaces. + +## Installation + +```bash +go install github.com/vektra/mockery/v3@latest +``` + +## Configuration + +The project uses `.mockery.yml` to control mock generation: + +```yaml +all: false +dir: '{{.InterfaceDir}}/test' +filename: mock_{{.InterfaceName | lower}}_test.go +force-file-write: true +formatter: goimports +generate: true +include-auto-generated: false +log-level: info +structname: 'Mock{{.InterfaceName}}' +pkgname: 'internal_test' +recursive: false +template: testify +packages: + backup-rsync/backup/internal: + interfaces: + Exec: + JobCommand: +``` + +Key points: +- **Output directory**: `/test/` (alongside other test files) +- **Filename**: `mock__test.go` +- **Struct naming**: `Mock` (e.g., `MockExec`, `MockJobCommand`) +- **Package**: `internal_test` (external test package) +- **Template**: `testify` for expectation-based mocking + +## Generated Mocks + +| Mock | Source Interface | File | +|---|---|---| +| `MockExec` | `Exec` | `backup/internal/test/mock_exec_test.go` | +| `MockJobCommand` | `JobCommand` | `backup/internal/test/mock_jobcommand_test.go` | + +## Usage Examples + +### MockJobCommand — Testing Config.Apply + +```go +func TestConfigApply_VersionInfoSuccess(t *testing.T) { + mockCmd := NewMockJobCommand(t) + var output bytes.Buffer + logger := log.New(&bytes.Buffer{}, "", 0) + + cfg := Config{ + Jobs: []Job{ + {Name: "job1", Source: "/src/", Target: "/dst/", Enabled: true}, + {Name: "job2", Source: "/src2/", Target: "/dst2/", Enabled: false}, + }, + } + + mockCmd.EXPECT().GetVersionInfo().Return("rsync version 3.2.3", "/usr/bin/rsync", nil).Once() + mockCmd.EXPECT().Run(mock.AnythingOfType("internal.Job")).Return(Success).Once() + + err := cfg.Apply(mockCmd, logger, &output) + require.NoError(t, err) + assert.Contains(t, output.String(), "Status [job1]: SUCCESS") +} +``` + +### MockExec — Testing Command Execution + +```go +func TestSyncCommand_Run_Success(t *testing.T) { + mockExec := NewMockExec(t) + var output bytes.Buffer + + cmd := NewSyncCommand("/usr/bin/rsync", "/tmp/logs", mockExec, &output) + job := Job{Name: "docs", Source: "/src/", Target: "/dst/", Enabled: true, Delete: true} + + mockExec.EXPECT().Execute("/usr/bin/rsync", mock.Anything).Return([]byte("done"), nil).Once() + + status := cmd.Run(job) + assert.Equal(t, Success, status) +} +``` + +### Testing Disabled Jobs (no mock expectations needed) + +```go +func TestJobApply_DisabledJob(t *testing.T) { + mockCmd := NewMockJobCommand(t) + disabledJob := Job{Name: "skip_me", Enabled: false} + + // No expectations set — Run should NOT be called + status := disabledJob.Apply(mockCmd) + assert.Equal(t, Skipped, status) + // MockJobCommand automatically verifies Run was not called +} +``` + +## Regenerating Mocks + +When interfaces change, regenerate with: + +```bash +mockery +``` + +This updates all mocks according to `.mockery.yml`. Generated files are committed to the repository. \ No newline at end of file diff --git a/Makefile b/Makefile index 48404b3..b4b88c2 100644 --- a/Makefile +++ b/Makefile @@ -3,8 +3,9 @@ # Build command with common flags BUILD_CMD = CGO_ENABLED=0 go build -trimpath -ldflags="-s -w" -tags=prod PACKAGE = ./backup/main.go +COVERAGE_THRESHOLD = 98 -.PHONY: build clean test lint tidy checksums release sanity-check check-mod-tidy lint-config-check lint-fix format check-clean +.PHONY: build clean test lint tidy checksums release sanity-check check-mod-tidy lint-config-check lint-fix format check-clean check-coverage format: go fmt ./... @@ -33,7 +34,7 @@ sanity-check: format check-clean check-mod-tidy @echo "OK: All sanity checks passed." test: - go test ./... -v + go test -race ./... -v tidy: gofmt -s -w . @@ -73,6 +74,17 @@ report-size: build go install github.com/Zxilly/go-size-analyzer/cmd/gsa@latest gsa --web --listen=":8910" --open dist/backup +check-coverage: + @go test ./... -count=1 -coverprofile=/tmp/coverage.out -coverpkg=./backup/... + @COVERAGE=$$(go tool cover -func=/tmp/coverage.out | grep '^total:' | awk '{print int($$3)}'); \ + echo "Total coverage: $${COVERAGE}%"; \ + if [ "$${COVERAGE}" -lt "$(COVERAGE_THRESHOLD)" ]; then \ + echo "FAIL: Coverage $${COVERAGE}% is below threshold $(COVERAGE_THRESHOLD)%"; \ + exit 1; \ + else \ + echo "OK: Coverage $${COVERAGE}% meets threshold $(COVERAGE_THRESHOLD)%"; \ + fi + report-coverage: @mkdir -p coverage @go test ./... -count=1 -coverprofile=coverage/coverage.out -coverpkg=./backup/... diff --git a/TESTING_GUIDE.md b/TESTING_GUIDE.md new file mode 100644 index 0000000..1758263 --- /dev/null +++ b/TESTING_GUIDE.md @@ -0,0 +1,161 @@ +# Testing Guide + +## Overview + +All tests use dependency injection — no global state mutation. Key patterns: + +- **`testify`** for assertions (`require` for fatal checks, `assert` for non-fatal) +- **`mockery`** for generated mocks (`MockExec`, `MockJobCommand`) +- **`afero`** for in-memory filesystem abstraction +- **`bytes.Buffer`** for capturing output +- **Table-driven tests** for multiple input scenarios +- Test files live in `/test/` subdirectories + +## Test Architecture + +``` +backup/ + cmd/test/ + commands_test.go # CLI integration tests (all commands) + root_test.go # Root command help output + internal/test/ + check_test.go # CoverageChecker tests (afero-based) + config_test.go # Config loading, validation, Apply + helper_test.go # NormalizePath, CreateMainLogger + job_test.go # Job.Apply with MockJobCommand + rsync_test.go # Command constructors, Run methods, GetVersionInfo + mock_exec_test.go # Generated mock for Exec interface + mock_jobcommand_test.go # Generated mock for JobCommand interface +``` + +## Dependency Injection Points + +| Dependency | Interface/Type | Real | Test | +|---|---|---|---| +| Command execution | `internal.Exec` | `OsExec` | `MockExec` or `stubExec` | +| Job runner | `internal.JobCommand` | `ListCommand`, `SyncCommand`, `SimulateCommand` | `MockJobCommand` | +| Filesystem | `afero.Fs` | `afero.NewOsFs()` | `afero.NewMemMapFs()` | +| Output | `io.Writer` | `os.Stdout` / `cmd.OutOrStdout()` | `bytes.Buffer` | +| Logging | `*log.Logger` | File-backed logger | `log.New(&buf, "", 0)` | +| Time | `time.Time` | `time.Now()` | Fixed `time.Date(...)` | + +## Command-Level Tests (cmd/test/) + +Commands are tested through cobra's `Execute()` with captured stdout: + +```go +// Stub for Exec interface — lightweight alternative to MockExec for cmd tests +type stubExec struct { + output []byte + err error +} + +func (s *stubExec) Execute(_ string, _ ...string) ([]byte, error) { + return s.output, s.err +} + +// Helper: run a command with full dependency injection +func executeCommandWithDeps(t *testing.T, fs afero.Fs, shell internal.Exec, args ...string) (string, error) { + t.Helper() + rootCmd := cmd.BuildRootCommandWithDeps(fs, shell) + var stdout bytes.Buffer + rootCmd.SetOut(&stdout) + rootCmd.SetErr(&bytes.Buffer{}) + rootCmd.SetArgs(args) + err := rootCmd.Execute() + return stdout.String(), err +} +``` + +Usage: + +```go +func TestRun_ValidConfig(t *testing.T) { + cfgPath := writeConfigFile(t, `...yaml...`) + shell := &stubExec{output: []byte("rsync version 3.2.7 protocol version 31\n")} + + stdout, err := executeCommandWithDeps(t, afero.NewMemMapFs(), shell, "run", "--config", cfgPath) + + require.NoError(t, err) + assert.Contains(t, stdout, "Job: docs") + assert.Contains(t, stdout, "Status [docs]: SUCCESS") +} +``` + +Three builder levels available: +- `BuildRootCommand()` — production defaults (real OS filesystem, real exec) +- `BuildRootCommandWithFs(fs)` — custom filesystem, real exec +- `BuildRootCommandWithDeps(fs, shell)` — full control for testing + +## Internal Tests — Mockery Mocks + +Generated mocks use the expectation pattern: + +```go +func TestConfigApply_VersionInfoSuccess(t *testing.T) { + mockCmd := NewMockJobCommand(t) + var output bytes.Buffer + logger := log.New(&bytes.Buffer{}, "", 0) + + cfg := Config{ + Jobs: []Job{ + {Name: "job1", Source: "/src/", Target: "/dst/", Enabled: true}, + }, + } + + mockCmd.EXPECT().GetVersionInfo().Return("rsync version 3.2.3", "/usr/bin/rsync", nil).Once() + mockCmd.EXPECT().Run(mock.AnythingOfType("internal.Job")).Return(Success).Once() + + err := cfg.Apply(mockCmd, logger, &output) + require.NoError(t, err) +} +``` + +## CoverageChecker Tests (afero) + +The `CoverageChecker` uses `afero.Fs` so tests never hit the real filesystem: + +```go +func newTestChecker() (*CoverageChecker, *bytes.Buffer) { + var buf bytes.Buffer + checker := &CoverageChecker{ + Logger: log.New(&buf, "", 0), + Fs: afero.NewMemMapFs(), + } + return checker, &buf +} +``` + +## Deterministic Time + +`CreateMainLogger` accepts `time.Time` for predictable log paths: + +```go +func fixedTime() time.Time { + return time.Date(2025, 6, 15, 14, 30, 45, 0, time.UTC) +} + +func TestCreateMainLogger_DeterministicLogPath(t *testing.T) { + _, logPath, cleanup, err := CreateMainLogger("backup.yaml", true, fixedTime()) + require.NoError(t, err) + defer cleanup() + assert.Equal(t, "logs/sync-2025-06-15T14-30-45-backup-sim", logPath) +} +``` + +## Running Tests + +```sh +make test # go test -race ./... -v +make check-coverage # Fail if coverage < 90% +make report-coverage # Generate HTML coverage report +``` + +## Key Principles + +1. **Inject, don't hardcode** — all external dependencies go through interfaces +2. **Never hit the real filesystem** in unit tests — use `afero.NewMemMapFs()` +3. **Use `require` for errors, `assert` for values** — `require` stops the test on failure +4. **Table-driven tests** for multiple input/output scenarios +5. **Scope mocks to individual tests** — each test creates its own mock instance +6. **Defer cleanup** — `CreateMainLogger` returns a cleanup function; always `defer` it \ No newline at end of file diff --git a/backup/cmd/check-coverage.go b/backup/cmd/check-coverage.go index b7f7738..2af8113 100644 --- a/backup/cmd/check-coverage.go +++ b/backup/cmd/check-coverage.go @@ -2,6 +2,8 @@ package cmd import ( "fmt" + "log" + "os" "backup-rsync/backup/internal" @@ -9,22 +11,33 @@ import ( "github.com/spf13/cobra" ) -func buildCheckCoverageCommand() *cobra.Command { - var fs = afero.NewOsFs() - +func buildCheckCoverageCommand(fs afero.Fs) *cobra.Command { return &cobra.Command{ Use: "check-coverage", Short: "Check path coverage", - Run: func(cmd *cobra.Command, args []string) { + RunE: func(cmd *cobra.Command, args []string) error { configPath, _ := cmd.Flags().GetString("config") - cfg := internal.LoadResolvedConfig(configPath) - uncoveredPaths := internal.ListUncoveredPaths(fs, cfg) - fmt.Println("Uncovered paths:") + cfg, err := internal.LoadResolvedConfig(configPath) + if err != nil { + return fmt.Errorf("loading config: %w", err) + } + + checker := &internal.CoverageChecker{ + Logger: log.New(os.Stderr, "", log.LstdFlags), + Fs: fs, + } + + uncoveredPaths := checker.ListUncoveredPaths(cfg) + + out := cmd.OutOrStdout() + fmt.Fprintln(out, "Uncovered paths:") for _, path := range uncoveredPaths { - fmt.Println(path) + fmt.Fprintln(out, path) } + + return nil }, } } diff --git a/backup/cmd/config.go b/backup/cmd/config.go index c3ecdd3..95c684f 100644 --- a/backup/cmd/config.go +++ b/backup/cmd/config.go @@ -16,20 +16,34 @@ func buildConfigCommand() *cobra.Command { var showVerb = &cobra.Command{ Use: "show", Short: "Show resolved configuration", - Run: func(cmd *cobra.Command, args []string) { + RunE: func(cmd *cobra.Command, args []string) error { configPath, _ := cmd.Flags().GetString("config") - cfg := internal.LoadResolvedConfig(configPath) - fmt.Printf("Resolved Configuration:\n%s\n", cfg) + + cfg, err := internal.LoadResolvedConfig(configPath) + if err != nil { + return fmt.Errorf("loading config: %w", err) + } + + fmt.Fprintf(cmd.OutOrStdout(), "Resolved Configuration:\n%s\n", cfg) + + return nil }, } var validateVerb = &cobra.Command{ Use: "validate", Short: "Validate configuration", - Run: func(cmd *cobra.Command, args []string) { + RunE: func(cmd *cobra.Command, args []string) error { configPath, _ := cmd.Flags().GetString("config") - internal.LoadResolvedConfig(configPath) - fmt.Println("Configuration is valid.") + + _, err := internal.LoadResolvedConfig(configPath) + if err != nil { + return fmt.Errorf("validating config: %w", err) + } + + fmt.Fprintln(cmd.OutOrStdout(), "Configuration is valid.") + + return nil }, } diff --git a/backup/cmd/list.go b/backup/cmd/list.go index 2e957a3..341105a 100644 --- a/backup/cmd/list.go +++ b/backup/cmd/list.go @@ -2,24 +2,32 @@ package cmd import ( "backup-rsync/backup/internal" + "fmt" "io" "log" "github.com/spf13/cobra" ) -func buildListCommand() *cobra.Command { +func buildListCommand(shell internal.Exec) *cobra.Command { return &cobra.Command{ Use: "list", Short: "List the commands that will be executed", - Run: func(cmd *cobra.Command, args []string) { + RunE: func(cmd *cobra.Command, args []string) error { configPath, _ := cmd.Flags().GetString("config") rsyncPath, _ := cmd.Flags().GetString("rsync-path") - cfg := internal.LoadResolvedConfig(configPath) - command := internal.NewListCommand(rsyncPath) + + cfg, err := internal.LoadResolvedConfig(configPath) + if err != nil { + return fmt.Errorf("loading config: %w", err) + } + + out := cmd.OutOrStdout() + command := internal.NewListCommand(rsyncPath, shell, out) logger := log.New(io.Discard, "", 0) - cfg.Apply(command, logger) + + return cfg.Apply(command, logger, out) }, } } diff --git a/backup/cmd/root.go b/backup/cmd/root.go index 2e2d794..3bb1dcb 100644 --- a/backup/cmd/root.go +++ b/backup/cmd/root.go @@ -1,10 +1,24 @@ package cmd import ( + "backup-rsync/backup/internal" + + "github.com/spf13/afero" "github.com/spf13/cobra" ) +// BuildRootCommand creates the root cobra command with production defaults. func BuildRootCommand() *cobra.Command { + return BuildRootCommandWithDeps(afero.NewOsFs(), &internal.OsExec{}) +} + +// BuildRootCommandWithFs creates the root command with a custom filesystem. +func BuildRootCommandWithFs(fs afero.Fs) *cobra.Command { + return BuildRootCommandWithDeps(fs, &internal.OsExec{}) +} + +// BuildRootCommandWithDeps creates the root command with full dependency injection. +func BuildRootCommandWithDeps(fs afero.Fs, shell internal.Exec) *cobra.Command { rootCmd := &cobra.Command{ Use: "backup", Short: "A tool for managing backups", @@ -15,12 +29,12 @@ func BuildRootCommand() *cobra.Command { rootCmd.PersistentFlags().String("rsync-path", "/usr/bin/rsync", "Path to the rsync binary") rootCmd.AddCommand( - buildListCommand(), - buildRunCommand(), - buildSimulateCommand(), + buildListCommand(shell), + buildRunCommand(shell), + buildSimulateCommand(shell), buildConfigCommand(), - buildCheckCoverageCommand(), - buildVersionCommand(), + buildCheckCoverageCommand(fs), + buildVersionCommand(shell), ) return rootCmd diff --git a/backup/cmd/run.go b/backup/cmd/run.go index cc133ea..6d1ec21 100644 --- a/backup/cmd/run.go +++ b/backup/cmd/run.go @@ -2,23 +2,36 @@ package cmd import ( "backup-rsync/backup/internal" + "fmt" + "time" "github.com/spf13/cobra" ) -func buildRunCommand() *cobra.Command { +func buildRunCommand(shell internal.Exec) *cobra.Command { return &cobra.Command{ Use: "run", Short: "Execute the sync jobs", - Run: func(cmd *cobra.Command, args []string) { + RunE: func(cmd *cobra.Command, args []string) error { configPath, _ := cmd.Flags().GetString("config") rsyncPath, _ := cmd.Flags().GetString("rsync-path") - cfg := internal.LoadResolvedConfig(configPath) - logger, logPath := internal.CreateMainLogger(configPath, false) - command := internal.NewSyncCommand(rsyncPath, logPath) + cfg, err := internal.LoadResolvedConfig(configPath) + if err != nil { + return fmt.Errorf("loading config: %w", err) + } - cfg.Apply(command, logger) + logger, logPath, cleanup, err := internal.CreateMainLogger(configPath, false, time.Now()) + if err != nil { + return fmt.Errorf("creating logger: %w", err) + } + + defer cleanup() + + out := cmd.OutOrStdout() + command := internal.NewSyncCommand(rsyncPath, logPath, shell, out) + + return cfg.Apply(command, logger, out) }, } } diff --git a/backup/cmd/simulate.go b/backup/cmd/simulate.go index eb4ab83..f63bfd4 100644 --- a/backup/cmd/simulate.go +++ b/backup/cmd/simulate.go @@ -2,23 +2,36 @@ package cmd import ( "backup-rsync/backup/internal" + "fmt" + "time" "github.com/spf13/cobra" ) -func buildSimulateCommand() *cobra.Command { +func buildSimulateCommand(shell internal.Exec) *cobra.Command { return &cobra.Command{ Use: "simulate", Short: "Simulate the sync jobs", - Run: func(cmd *cobra.Command, args []string) { + RunE: func(cmd *cobra.Command, args []string) error { configPath, _ := cmd.Flags().GetString("config") rsyncPath, _ := cmd.Flags().GetString("rsync-path") - cfg := internal.LoadResolvedConfig(configPath) - logger, logPath := internal.CreateMainLogger(configPath, true) - command := internal.NewSimulateCommand(rsyncPath, logPath) + cfg, err := internal.LoadResolvedConfig(configPath) + if err != nil { + return fmt.Errorf("loading config: %w", err) + } - cfg.Apply(command, logger) + logger, logPath, cleanup, err := internal.CreateMainLogger(configPath, true, time.Now()) + if err != nil { + return fmt.Errorf("creating logger: %w", err) + } + + defer cleanup() + + out := cmd.OutOrStdout() + command := internal.NewSimulateCommand(rsyncPath, logPath, shell, out) + + return cfg.Apply(command, logger, out) }, } } diff --git a/backup/cmd/test/commands_test.go b/backup/cmd/test/commands_test.go new file mode 100644 index 0000000..b8b72f8 --- /dev/null +++ b/backup/cmd/test/commands_test.go @@ -0,0 +1,401 @@ +package cmd_test + +import ( + "bytes" + "os" + "path/filepath" + "testing" + + "backup-rsync/backup/cmd" + "backup-rsync/backup/internal" + + "github.com/spf13/afero" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +type stubExec struct { + output []byte + err error +} + +func (s *stubExec) Execute(_ string, _ ...string) ([]byte, error) { + return s.output, s.err +} + +func writeConfigFile(t *testing.T, content string) string { + t.Helper() + + dir := t.TempDir() + path := filepath.Join(dir, "test.yaml") + + err := os.WriteFile(path, []byte(content), 0600) + require.NoError(t, err) + + return path +} + +func executeCommand(t *testing.T, args ...string) (string, error) { + t.Helper() + + return executeCommandWithFs(t, afero.NewMemMapFs(), args...) +} + +func executeCommandWithFs(t *testing.T, fs afero.Fs, args ...string) (string, error) { + t.Helper() + + rootCmd := cmd.BuildRootCommandWithFs(fs) + + var stdout bytes.Buffer + + rootCmd.SetOut(&stdout) + rootCmd.SetErr(&bytes.Buffer{}) + rootCmd.SetArgs(args) + + err := rootCmd.Execute() + + return stdout.String(), err +} + +func executeCommandWithDeps(t *testing.T, fs afero.Fs, shell internal.Exec, args ...string) (string, error) { + t.Helper() + + rootCmd := cmd.BuildRootCommandWithDeps(fs, shell) + + var stdout bytes.Buffer + + rootCmd.SetOut(&stdout) + rootCmd.SetErr(&bytes.Buffer{}) + rootCmd.SetArgs(args) + + err := rootCmd.Execute() + + return stdout.String(), err +} + +// --- config show --- + +func TestConfigShow_ValidConfig(t *testing.T) { + cfgPath := writeConfigFile(t, ` +sources: + - path: "/home" +targets: + - path: "/backup" +jobs: + - name: "docs" + source: "/home/docs/" + target: "/backup/docs/" +`) + + stdout, err := executeCommand(t, "config", "show", "--config", cfgPath) + + require.NoError(t, err) + assert.Contains(t, stdout, "docs") + assert.Contains(t, stdout, "/home/docs") + assert.Contains(t, stdout, "/backup/docs") +} + +func TestConfigShow_MissingFile(t *testing.T) { + _, err := executeCommand(t, "config", "show", "--config", "/nonexistent/config.yaml") + + require.Error(t, err) + assert.Contains(t, err.Error(), "loading config") +} + +func TestConfigShow_InvalidYAML(t *testing.T) { + cfgPath := writeConfigFile(t, `{{{invalid yaml`) + + _, err := executeCommand(t, "config", "show", "--config", cfgPath) + + require.Error(t, err) + assert.Contains(t, err.Error(), "loading config") +} + +// --- config validate --- + +func TestConfigValidate_ValidConfig(t *testing.T) { + cfgPath := writeConfigFile(t, ` +sources: + - path: "/home" +targets: + - path: "/backup" +jobs: + - name: "docs" + source: "/home/docs/" + target: "/backup/docs/" +`) + + stdout, err := executeCommand(t, "config", "validate", "--config", cfgPath) + + require.NoError(t, err) + assert.Contains(t, stdout, "Configuration is valid.") +} + +func TestConfigValidate_MissingFile(t *testing.T) { + _, err := executeCommand(t, "config", "validate", "--config", "/nonexistent/config.yaml") + + require.Error(t, err) + assert.Contains(t, err.Error(), "validating config") +} + +func TestConfigValidate_DuplicateJobNames(t *testing.T) { + cfgPath := writeConfigFile(t, ` +sources: + - path: "/home" +targets: + - path: "/backup" +jobs: + - name: "same" + source: "/home/a/" + target: "/backup/a/" + - name: "same" + source: "/home/b/" + target: "/backup/b/" +`) + + _, err := executeCommand(t, "config", "validate", "--config", cfgPath) + + require.Error(t, err) + assert.Contains(t, err.Error(), "validating config") +} + +// --- list --- + +func TestList_MissingConfig(t *testing.T) { + _, err := executeCommand(t, "list", "--config", "/nonexistent/config.yaml") + + require.Error(t, err) + assert.Contains(t, err.Error(), "loading config") +} + +// --- run --- + +func TestRun_MissingConfig(t *testing.T) { + _, err := executeCommand(t, "run", "--config", "/nonexistent/config.yaml") + + require.Error(t, err) + assert.Contains(t, err.Error(), "loading config") +} + +func TestRun_CreateLoggerError(t *testing.T) { + cfgPath := writeConfigFile(t, ` +sources: + - path: "/home" +targets: + - path: "/backup" +jobs: + - name: "docs" + source: "/home/docs/" + target: "/backup/docs/" +`) + + // Block log directory creation by placing a file named "logs" + tmpDir := t.TempDir() + t.Chdir(tmpDir) + + require.NoError(t, os.WriteFile(filepath.Join(tmpDir, "logs"), []byte("block"), 0600)) + + shell := &stubExec{output: []byte("rsync version 3.2.7 protocol version 31\n")} + + _, err := executeCommandWithDeps(t, afero.NewMemMapFs(), shell, "run", "--config", cfgPath) + + require.Error(t, err) + assert.Contains(t, err.Error(), "creating logger") +} + +func TestRun_ValidConfig(t *testing.T) { + cfgPath := writeConfigFile(t, ` +sources: + - path: "/home" +targets: + - path: "/backup" +jobs: + - name: "docs" + source: "/home/docs/" + target: "/backup/docs/" + enabled: true +`) + + shell := &stubExec{output: []byte("rsync version 3.2.7 protocol version 31\n")} + + stdout, err := executeCommandWithDeps(t, afero.NewMemMapFs(), shell, "run", "--config", cfgPath) + + require.NoError(t, err) + assert.Contains(t, stdout, "Job: docs") + assert.Contains(t, stdout, "Status [docs]: SUCCESS") +} + +// --- simulate --- + +func TestSimulate_MissingConfig(t *testing.T) { + _, err := executeCommand(t, "simulate", "--config", "/nonexistent/config.yaml") + + require.Error(t, err) + assert.Contains(t, err.Error(), "loading config") +} + +func TestSimulate_CreateLoggerError(t *testing.T) { + cfgPath := writeConfigFile(t, ` +sources: + - path: "/home" +targets: + - path: "/backup" +jobs: + - name: "docs" + source: "/home/docs/" + target: "/backup/docs/" +`) + + tmpDir := t.TempDir() + t.Chdir(tmpDir) + + require.NoError(t, os.WriteFile(filepath.Join(tmpDir, "logs"), []byte("block"), 0600)) + + shell := &stubExec{output: []byte("rsync version 3.2.7 protocol version 31\n")} + + _, err := executeCommandWithDeps(t, afero.NewMemMapFs(), shell, "simulate", "--config", cfgPath) + + require.Error(t, err) + assert.Contains(t, err.Error(), "creating logger") +} + +func TestSimulate_ValidConfig(t *testing.T) { + cfgPath := writeConfigFile(t, ` +sources: + - path: "/home" +targets: + - path: "/backup" +jobs: + - name: "docs" + source: "/home/docs/" + target: "/backup/docs/" + enabled: true +`) + + shell := &stubExec{output: []byte("rsync version 3.2.7 protocol version 31\n")} + + stdout, err := executeCommandWithDeps(t, afero.NewMemMapFs(), shell, "simulate", "--config", cfgPath) + + require.NoError(t, err) + assert.Contains(t, stdout, "Job: docs") + assert.Contains(t, stdout, "Status [docs]: SUCCESS") +} + +// --- version --- + +func TestVersion_InvalidRsyncPath(t *testing.T) { + _, err := executeCommand(t, "version", "--rsync-path", "not-absolute") + + require.Error(t, err) + assert.Contains(t, err.Error(), "getting version info") +} + +func TestVersion_NonExistentRsyncPath(t *testing.T) { + _, err := executeCommand(t, "version", "--rsync-path", "/nonexistent/rsync") + + require.Error(t, err) + assert.Contains(t, err.Error(), "getting version info") +} + +// --- check-coverage --- + +func TestCheckCoverage_MissingConfig(t *testing.T) { + _, err := executeCommand(t, "check-coverage", "--config", "/nonexistent/config.yaml") + + require.Error(t, err) + assert.Contains(t, err.Error(), "loading config") +} + +func TestCheckCoverage_WithUncoveredPaths(t *testing.T) { + cfgPath := writeConfigFile(t, ` +sources: + - path: "/src" +targets: + - path: "/dst" +jobs: + - name: "docs" + source: "/src/docs/" + target: "/dst/docs/" +`) + + fs := afero.NewMemMapFs() + _ = fs.MkdirAll("/src/docs", 0755) + _ = fs.MkdirAll("/src/photos", 0755) + + stdout, err := executeCommandWithFs(t, fs, "check-coverage", "--config", cfgPath) + + require.NoError(t, err) + assert.Contains(t, stdout, "Uncovered paths:") + assert.Contains(t, stdout, "/src") +} + +func TestCheckCoverage_ValidConfig(t *testing.T) { + cfgPath := writeConfigFile(t, ` +sources: + - path: "/src" +targets: + - path: "/dst" +jobs: + - name: "docs" + source: "/src/docs/" + target: "/dst/docs/" +`) + + fs := afero.NewMemMapFs() + _ = fs.MkdirAll("/src/docs", 0755) + + stdout, err := executeCommandWithFs(t, fs, "check-coverage", "--config", cfgPath) + + require.NoError(t, err) + assert.Contains(t, stdout, "Uncovered paths:") +} + +// --- version (positive path) --- + +func TestVersion_ValidRsync(t *testing.T) { + // Only run if rsync is actually installed + _, err := os.Stat("/usr/bin/rsync") + if os.IsNotExist(err) { + t.Skip("rsync not installed") + } + + stdout, err := executeCommand(t, "version", "--rsync-path", "/usr/bin/rsync") + + require.NoError(t, err) + assert.Contains(t, stdout, "Rsync Binary Path: /usr/bin/rsync") + assert.Contains(t, stdout, "Version Info:") +} + +func TestVersion_WithMockExec(t *testing.T) { + shell := &stubExec{output: []byte("rsync version 3.2.7 protocol version 31\n")} + + stdout, err := executeCommandWithDeps(t, afero.NewMemMapFs(), shell, "version", "--rsync-path", "/usr/bin/rsync") + + require.NoError(t, err) + assert.Contains(t, stdout, "Rsync Binary Path: /usr/bin/rsync") + assert.Contains(t, stdout, "rsync version 3.2.7") +} + +// --- list (positive path) --- + +func TestList_ValidConfig(t *testing.T) { + cfgPath := writeConfigFile(t, ` +sources: + - path: "/home" +targets: + - path: "/backup" +jobs: + - name: "docs" + source: "/home/docs/" + target: "/backup/docs/" + enabled: true +`) + + shell := &stubExec{output: []byte("rsync version 3.2.7 protocol version 31\n")} + + stdout, err := executeCommandWithDeps(t, afero.NewMemMapFs(), shell, "list", "--config", cfgPath) + + require.NoError(t, err) + assert.Contains(t, stdout, "Job: docs") + assert.Contains(t, stdout, "Status [docs]: SUCCESS") +} diff --git a/backup/cmd/version.go b/backup/cmd/version.go index e6e4c69..06af080 100644 --- a/backup/cmd/version.go +++ b/backup/cmd/version.go @@ -8,23 +8,24 @@ import ( "github.com/spf13/cobra" ) -func buildVersionCommand() *cobra.Command { +func buildVersionCommand(shell internal.Exec) *cobra.Command { return &cobra.Command{ Use: "version", Short: "Prints the rsync version, protocol version, and full path to the rsync binary.", - Run: func(cmd *cobra.Command, args []string) { + RunE: func(cmd *cobra.Command, args []string) error { rsyncPath, _ := cmd.Flags().GetString("rsync-path") - rsync := internal.NewSyncCommand(rsyncPath, "") + out := cmd.OutOrStdout() + rsync := internal.NewSyncCommand(rsyncPath, "", shell, out) output, _, err := rsync.GetVersionInfo() if err != nil { - fmt.Printf("%v\n", err) - - return + return fmt.Errorf("getting version info: %w", err) } - fmt.Printf("Rsync Binary Path: %s\n", rsyncPath) - fmt.Printf("Version Info: %s", output) + fmt.Fprintf(out, "Rsync Binary Path: %s\n", rsyncPath) + fmt.Fprintf(out, "Version Info: %s", output) + + return nil }, } } diff --git a/backup/internal/check.go b/backup/internal/check.go index 362b53c..104599a 100644 --- a/backup/internal/check.go +++ b/backup/internal/check.go @@ -10,23 +10,18 @@ import ( "github.com/spf13/afero" ) -func isExcluded(path string, job Job) bool { - for _, exclusion := range job.Exclusions { - exclusionPath := filepath.Join(job.Source, exclusion) - if strings.HasPrefix(NormalizePath(path), exclusionPath) { - return true - } - } - - return false +// CoverageChecker analyzes path coverage against a configuration. +type CoverageChecker struct { + Logger *log.Logger + Fs afero.Fs } -func IsExcludedGlobally(path string, sources []Path) bool { +func (c *CoverageChecker) IsExcludedGlobally(path string, sources []Path) bool { for _, source := range sources { for _, exclusion := range source.Exclusions { exclusionPath := filepath.Join(source.Path, exclusion) if strings.HasPrefix(NormalizePath(path), exclusionPath) { - log.Printf("EXCLUDED: Path '%s' is globally excluded by '%s' in source '%s'", path, exclusion, source.Path) + c.Logger.Printf("EXCLUDED: Path '%s' is globally excluded by '%s' in source '%s'", path, exclusion, source.Path) return true } @@ -36,25 +31,24 @@ func IsExcludedGlobally(path string, sources []Path) bool { return false } -func isCoveredByJob(path string, job Job) bool { - if NormalizePath(job.Source) == NormalizePath(path) { - log.Printf("COVERED: Path '%s' is covered by job '%s'", path, job.Name) - - return true - } +func (c *CoverageChecker) ListUncoveredPaths(cfg Config) []string { + var result []string - if isExcluded(path, job) { - log.Printf("EXCLUDED: Path '%s' is excluded by job '%s'", path, job.Name) + seen := make(map[string]bool) - return true + for _, source := range cfg.Sources { + c.checkPath(source.Path, cfg, &result, seen) } - return false + sort.Strings(result) // Ensure consistent ordering for test comparison + + return result } -func isCovered(path string, jobs []Job) bool { - for _, job := range jobs { - if isCoveredByJob(path, job) { +func (c *CoverageChecker) isExcluded(path string, job Job) bool { + for _, exclusion := range job.Exclusions { + exclusionPath := filepath.Join(job.Source, exclusion) + if strings.HasPrefix(NormalizePath(path), exclusionPath) { return true } } @@ -62,23 +56,35 @@ func isCovered(path string, jobs []Job) bool { return false } -func ListUncoveredPaths(fs afero.Fs, cfg Config) []string { - var result []string +func (c *CoverageChecker) isCoveredByJob(path string, job Job) bool { + if NormalizePath(job.Source) == NormalizePath(path) { + c.Logger.Printf("COVERED: Path '%s' is covered by job '%s'", path, job.Name) - seen := make(map[string]bool) + return true + } - for _, source := range cfg.Sources { - checkPath(fs, source.Path, cfg, &result, seen) + if c.isExcluded(path, job) { + c.Logger.Printf("EXCLUDED: Path '%s' is excluded by job '%s'", path, job.Name) + + return true } - sort.Strings(result) // Ensure consistent ordering for test comparison + return false +} - return result +func (c *CoverageChecker) isCovered(path string, jobs []Job) bool { + for _, job := range jobs { + if c.isCoveredByJob(path, job) { + return true + } + } + + return false } -func checkPath(fs afero.Fs, path string, cfg Config, result *[]string, seen map[string]bool) { +func (c *CoverageChecker) checkPath(path string, cfg Config, result *[]string, seen map[string]bool) { if seen[path] { - log.Printf("SKIP: Path '%s' already seen", path) + c.Logger.Printf("SKIP: Path '%s' already seen", path) return } @@ -86,42 +92,43 @@ func checkPath(fs afero.Fs, path string, cfg Config, result *[]string, seen map[ seen[path] = true // Skip if globally excluded - if IsExcludedGlobally(path, cfg.Sources) { - log.Printf("SKIP: Path '%s' is globally excluded", path) + if c.IsExcludedGlobally(path, cfg.Sources) { + c.Logger.Printf("SKIP: Path '%s' is globally excluded", path) return } // Skip if covered by a job - if isCovered(path, cfg.Jobs) { - log.Printf("SKIP: Path '%s' is covered by a job", path) + if c.isCovered(path, cfg.Jobs) { + c.Logger.Printf("SKIP: Path '%s' is covered by a job", path) return } // Check if it's effectively covered through descendants - if isEffectivelyCovered(fs, path, cfg) { - log.Printf("SKIP: Path '%s' is effectively covered", path) + if c.isEffectivelyCovered(path, cfg) { + c.Logger.Printf("SKIP: Path '%s' is effectively covered", path) return } // Add uncovered path - log.Printf("ADD: Path '%s' is uncovered", path) + c.Logger.Printf("ADD: Path '%s' is uncovered", path) *result = append(*result, path) } -// Check if a directory is effectively covered (all its descendants are covered or excluded). -func isEffectivelyCovered(fs afero.Fs, path string, cfg Config) bool { - children, err := getChildDirectories(fs, path) +// isEffectivelyCovered checks if a directory is effectively covered +// (all its descendants are covered or excluded). +func (c *CoverageChecker) isEffectivelyCovered(path string, cfg Config) bool { + children, err := getChildDirectories(c.Fs, path) if err != nil { - log.Printf("ERROR: could not get child directories of '%s': %v", path, err) + c.Logger.Printf("ERROR: could not get child directories of '%s': %v", path, err) return false } if len(children) == 0 { - log.Printf("NOT COVERED: Path '%s' has no children", path) + c.Logger.Printf("NOT COVERED: Path '%s' has no children", path) return false // Leaf directories are not effectively covered unless directly covered } @@ -129,15 +136,15 @@ func isEffectivelyCovered(fs afero.Fs, path string, cfg Config) bool { allCovered := true for _, child := range children { - if !IsExcludedGlobally(child, cfg.Sources) && !isCovered(child, cfg.Jobs) && !isEffectivelyCovered(fs, child, cfg) { - log.Printf("UNCOVERED CHILD: Path '%s' has uncovered child '%s'", path, child) + if !c.IsExcludedGlobally(child, cfg.Sources) && !c.isCovered(child, cfg.Jobs) && !c.isEffectivelyCovered(child, cfg) { + c.Logger.Printf("UNCOVERED CHILD: Path '%s' has uncovered child '%s'", path, child) allCovered = false } } if allCovered { - log.Printf("COVERED: Path '%s' is effectively covered", path) + c.Logger.Printf("COVERED: Path '%s' is effectively covered", path) } return allCovered diff --git a/backup/internal/config.go b/backup/internal/config.go index 8c8f1e9..6324c39 100644 --- a/backup/internal/config.go +++ b/backup/internal/config.go @@ -18,6 +18,7 @@ var ( ErrInvalidPath = errors.New("invalid path") ErrPathValidation = errors.New("path validation failed") ErrOverlappingPath = errors.New("overlapping path detected") + ErrJobFailure = errors.New("one or more jobs failed") ) // Config represents the overall backup configuration. @@ -29,12 +30,15 @@ type Config struct { } func (cfg Config) String() string { - out, _ := yaml.Marshal(cfg) + out, err := yaml.Marshal(cfg) + if err != nil { + return fmt.Sprintf("error marshaling config: %v", err) + } return string(out) } -func (cfg Config) Apply(rsync JobCommand, logger *log.Logger) { +func (cfg Config) Apply(rsync JobCommand, logger *log.Logger, output io.Writer) error { versionInfo, fullpath, err := rsync.GetVersionInfo() if err != nil { logger.Printf("Failed to fetch rsync version: %v", err) @@ -43,11 +47,32 @@ func (cfg Config) Apply(rsync JobCommand, logger *log.Logger) { logger.Printf("Rsync Version Info: %s", versionInfo) } + var succeeded, failed, skipped int + for _, job := range cfg.Jobs { status := job.Apply(rsync) logger.Printf("STATUS [%s]: %s", job.Name, status) - fmt.Printf("Status [%s]: %s\n", job.Name, status) + fmt.Fprintf(output, "Status [%s]: %s\n", job.Name, status) + + switch status { + case Success: + succeeded++ + case Failure: + failed++ + case Skipped: + skipped++ + } } + + summary := fmt.Sprintf("Summary: %d succeeded, %d failed, %d skipped", succeeded, failed, skipped) + logger.Print(summary) + fmt.Fprintln(output, summary) + + if failed > 0 { + return fmt.Errorf("%w: %d of %d jobs", ErrJobFailure, failed, len(cfg.Jobs)) + } + + return nil } func LoadConfig(reader io.Reader) (Config, error) { @@ -153,7 +178,6 @@ func validateJobPaths(jobs []Job, pathType string, getPath func(job Job) string) if pathType == "source" { for _, exclusion := range job2.Exclusions { exclusionPath := NormalizePath(filepath.Join(job2.Source, exclusion)) - // log.Printf("job2: %s %s\n", job2.Name, exclusionPath) if strings.HasPrefix(path1, exclusionPath) { excluded = true @@ -173,40 +197,39 @@ func validateJobPaths(jobs []Job, pathType string, getPath func(job Job) string) return nil } -func LoadResolvedConfig(configPath string) Config { +func LoadResolvedConfig(configPath string) (Config, error) { configFile, err := os.Open(configPath) if err != nil { - log.Fatalf("Failed to open config: %v", err) + return Config{}, fmt.Errorf("failed to open config: %w", err) } + defer configFile.Close() cfg, err := LoadConfig(configFile) - _ = configFile.Close() - if err != nil { - log.Fatalf("Failed to parse YAML: %v", err) + return Config{}, fmt.Errorf("failed to parse YAML: %w", err) } err = ValidateJobNames(cfg.Jobs) if err != nil { - log.Fatalf("Job validation failed: %v", err) + return Config{}, fmt.Errorf("job validation failed: %w", err) } resolvedCfg := ResolveConfig(cfg) err = ValidatePaths(resolvedCfg) if err != nil { - log.Fatalf("Path validation failed: %v", err) + return Config{}, fmt.Errorf("path validation failed: %w", err) } err = validateJobPaths(resolvedCfg.Jobs, "source", func(job Job) string { return job.Source }) if err != nil { - log.Fatalf("Job source path validation failed: %v", err) + return Config{}, fmt.Errorf("job source path validation failed: %w", err) } err = validateJobPaths(resolvedCfg.Jobs, "target", func(job Job) string { return job.Target }) if err != nil { - log.Fatalf("Job target path validation failed: %v", err) + return Config{}, fmt.Errorf("job target path validation failed: %w", err) } - return resolvedCfg + return resolvedCfg, nil } diff --git a/backup/internal/exec.go b/backup/internal/exec.go index e72c984..3f94f50 100644 --- a/backup/internal/exec.go +++ b/backup/internal/exec.go @@ -7,6 +7,7 @@ import ( "strings" ) +// Exec abstracts command execution for testability. type Exec interface { Execute(name string, args ...string) ([]byte, error) } diff --git a/backup/internal/helper.go b/backup/internal/helper.go index ce5d0da..080576c 100644 --- a/backup/internal/helper.go +++ b/backup/internal/helper.go @@ -2,6 +2,7 @@ package internal import ( + "fmt" "log" "os" "path/filepath" @@ -22,10 +23,10 @@ func NormalizePath(path string) string { const LogFilePermission = 0644 const LogDirPermission = 0755 -func getLogPath(simulate bool, configPath string) string { +func getLogPath(simulate bool, configPath string, now time.Time) string { filename := filepath.Base(configPath) filename = strings.TrimSuffix(filename, ".yaml") - logPath := "logs/sync-" + time.Now().Format("2006-01-02T15-04-05") + "-" + filename + logPath := "logs/sync-" + now.Format("2006-01-02T15-04-05") + "-" + filename if simulate { logPath += "-sim" @@ -34,21 +35,25 @@ func getLogPath(simulate bool, configPath string) string { return logPath } -func CreateMainLogger(configPath string, simulate bool) (*log.Logger, string) { - logPath := getLogPath(simulate, configPath) +func CreateMainLogger(configPath string, simulate bool, now time.Time) (*log.Logger, string, func() error, error) { + logPath := getLogPath(simulate, configPath, now) overallLogPath := logPath + "/summary.log" err := os.MkdirAll(logPath, LogDirPermission) if err != nil { - log.Fatalf("Failed to create log directory: %v", err) + return nil, "", nil, fmt.Errorf("failed to create log directory: %w", err) } overallLogFile, err := os.OpenFile(overallLogPath, os.O_CREATE|os.O_WRONLY|os.O_APPEND, LogFilePermission) if err != nil { - log.Fatalf("Failed to open overall log file: %v", err) + return nil, "", nil, fmt.Errorf("failed to open overall log file: %w", err) } logger := log.New(overallLogFile, "", log.LstdFlags) - return logger, logPath + cleanup := func() error { + return overallLogFile.Close() + } + + return logger, logPath, cleanup, nil } diff --git a/backup/internal/job_command.go b/backup/internal/job_command.go index f7c772f..80e7be7 100644 --- a/backup/internal/job_command.go +++ b/backup/internal/job_command.go @@ -1,13 +1,18 @@ package internal +// JobStatus represents the outcome of a job execution. type JobStatus string const ( + // Success indicates the job completed successfully. Success JobStatus = "SUCCESS" + // Failure indicates the job failed. Failure JobStatus = "FAILURE" + // Skipped indicates the job was skipped (e.g., disabled). Skipped JobStatus = "SKIPPED" ) +// JobCommand defines the interface for running backup jobs. type JobCommand interface { Run(job Job) JobStatus GetVersionInfo() (string, string, error) diff --git a/backup/internal/rsync.go b/backup/internal/rsync.go index 03b12a6..73d3b43 100644 --- a/backup/internal/rsync.go +++ b/backup/internal/rsync.go @@ -3,6 +3,7 @@ package internal import ( "errors" "fmt" + "io" "os" "path/filepath" "strings" @@ -13,23 +14,39 @@ var ErrInvalidRsyncPath = errors.New("rsync path must be an absolute path") const RsyncVersionFlag = "--version" +// SharedCommand holds common state for all rsync command types. type SharedCommand struct { BinPath string BaseLogPath string - Shell Exec + Shell Exec + Output io.Writer +} + +// NewSharedCommand creates a SharedCommand with the given dependencies. +func NewSharedCommand(binPath string, logPath string, shell Exec, output io.Writer) SharedCommand { + return SharedCommand{ + BinPath: binPath, + BaseLogPath: logPath, + Shell: shell, + Output: output, + } +} + +func (c SharedCommand) JobLogPath(job Job) string { + return fmt.Sprintf("%s/job-%s.log", c.BaseLogPath, job.Name) } func (c SharedCommand) PrintArgs(job Job, args []string) { - fmt.Printf("Job: %s\n", job.Name) - fmt.Printf("Command: %s %s\n", c.BinPath, strings.Join(args, " ")) + fmt.Fprintf(c.Output, "Job: %s\n", job.Name) + fmt.Fprintf(c.Output, "Command: %s %s\n", c.BinPath, strings.Join(args, " ")) } func (c SharedCommand) RunWithArgs(job Job, args []string) JobStatus { c.PrintArgs(job, args) out, err := c.Shell.Execute(c.BinPath, args...) - fmt.Printf("Output:\n%s\n", string(out)) + fmt.Fprintf(c.Output, "Output:\n%s\n", string(out)) if err != nil { return Failure @@ -47,7 +64,7 @@ func (c SharedCommand) RunWithArgsAndCaptureOutput(job Job, args []string, logPa if logPath != "" { writeErr := os.WriteFile(logPath, out, LogFilePermission) if writeErr != nil { - fmt.Printf("Warning: Failed to write output to log file %s: %v\n", logPath, writeErr) + fmt.Fprintf(c.Output, "Warning: Failed to write output to log file %s: %v\n", logPath, writeErr) } } @@ -78,6 +95,7 @@ func (c SharedCommand) GetVersionInfo() (string, string, error) { return string(output), rsyncPath, nil } +// ArgumentsForJob builds the rsync argument list for a given job. func ArgumentsForJob(job Job, logPath string, simulate bool) []string { args := []string{"-aiv", "--stats"} diff --git a/backup/internal/rsync_list.go b/backup/internal/rsync_list.go index b181af0..23b39fe 100644 --- a/backup/internal/rsync_list.go +++ b/backup/internal/rsync_list.go @@ -1,24 +1,21 @@ package internal -import ( - "fmt" -) +import "io" +// ListCommand prints the rsync commands that would be executed without running them. type ListCommand struct { SharedCommand } -func NewListCommand(binPath string) ListCommand { +// NewListCommand creates a ListCommand with the given dependencies. +func NewListCommand(binPath string, shell Exec, output io.Writer) ListCommand { return ListCommand{ - SharedCommand: SharedCommand{ - BinPath: binPath, - BaseLogPath: "", - Shell: &OsExec{}, - }, + SharedCommand: NewSharedCommand(binPath, "", shell, output), } } + func (c ListCommand) Run(job Job) JobStatus { - logPath := fmt.Sprintf("%s/job-%s.log", c.BaseLogPath, job.Name) + logPath := c.JobLogPath(job) args := ArgumentsForJob(job, logPath, false) c.PrintArgs(job, args) diff --git a/backup/internal/rsync_simulate.go b/backup/internal/rsync_simulate.go index ddcb527..aa9319d 100644 --- a/backup/internal/rsync_simulate.go +++ b/backup/internal/rsync_simulate.go @@ -1,25 +1,21 @@ package internal -import ( - "fmt" -) +import "io" +// SimulateCommand runs rsync in dry-run mode and captures output. type SimulateCommand struct { SharedCommand } -func NewSimulateCommand(binPath string, logPath string) SimulateCommand { +// NewSimulateCommand creates a SimulateCommand with the given dependencies. +func NewSimulateCommand(binPath string, logPath string, shell Exec, output io.Writer) SimulateCommand { return SimulateCommand{ - SharedCommand: SharedCommand{ - BinPath: binPath, - BaseLogPath: logPath, - Shell: &OsExec{}, - }, + SharedCommand: NewSharedCommand(binPath, logPath, shell, output), } } func (c SimulateCommand) Run(job Job) JobStatus { - logPath := fmt.Sprintf("%s/job-%s.log", c.BaseLogPath, job.Name) + logPath := c.JobLogPath(job) // Don't use --log-file in simulate mode as rsync doesn't log file changes to it in dry-run args := ArgumentsForJob(job, "", true) diff --git a/backup/internal/rsync_sync.go b/backup/internal/rsync_sync.go index ea22044..fb1b1f5 100644 --- a/backup/internal/rsync_sync.go +++ b/backup/internal/rsync_sync.go @@ -1,25 +1,21 @@ package internal -import ( - "fmt" -) +import "io" +// SyncCommand runs rsync to perform the actual backup. type SyncCommand struct { SharedCommand } -func NewSyncCommand(binPath string, logPath string) SyncCommand { +// NewSyncCommand creates a SyncCommand with the given dependencies. +func NewSyncCommand(binPath string, logPath string, shell Exec, output io.Writer) SyncCommand { return SyncCommand{ - SharedCommand: SharedCommand{ - BinPath: binPath, - BaseLogPath: logPath, - Shell: &OsExec{}, - }, + SharedCommand: NewSharedCommand(binPath, logPath, shell, output), } } func (c SyncCommand) Run(job Job) JobStatus { - logPath := fmt.Sprintf("%s/job-%s.log", c.BaseLogPath, job.Name) + logPath := c.JobLogPath(job) args := ArgumentsForJob(job, logPath, false) return c.RunWithArgs(job, args) diff --git a/backup/internal/test/check_test.go b/backup/internal/test/check_test.go index 3c4a139..236cee6 100644 --- a/backup/internal/test/check_test.go +++ b/backup/internal/test/check_test.go @@ -2,6 +2,7 @@ package internal_test import ( "bytes" + "io" "log" "path/filepath" "sort" @@ -13,6 +14,20 @@ import ( "github.com/stretchr/testify/assert" ) +func newTestChecker(fs afero.Fs, logBuf *bytes.Buffer) *CoverageChecker { + return &CoverageChecker{ + Logger: log.New(logBuf, "", 0), + Fs: fs, + } +} + +func newSilentChecker(fs afero.Fs) *CoverageChecker { + return &CoverageChecker{ + Logger: log.New(io.Discard, "", 0), + Fs: fs, + } +} + func TestIsExcludedGlobally_PathGloballyExcluded(t *testing.T) { sources := []Path{ { @@ -26,14 +41,14 @@ func TestIsExcludedGlobally_PathGloballyExcluded(t *testing.T) { } var logBuffer bytes.Buffer - log.SetOutput(&logBuffer) + + checker := newTestChecker(nil, &logBuffer) path := "/home/data/projects/P1" - expectsError := true expectedLog := "Path '/home/data/projects/P1' is globally excluded by '/projects/P1/' in source '/home/data/'" - result := IsExcludedGlobally(path, sources) - assert.Equal(t, expectsError, result) + result := checker.IsExcludedGlobally(path, sources) + assert.True(t, result) assert.Contains(t, logBuffer.String(), expectedLog) } @@ -49,14 +64,12 @@ func TestIsExcludedGlobally_PathNotExcluded(t *testing.T) { }, } - var logBuffer bytes.Buffer - log.SetOutput(&logBuffer) + checker := newSilentChecker(nil) path := "/home/data/projects/Other" - expectsError := false - result := IsExcludedGlobally(path, sources) - assert.Equal(t, expectsError, result) + result := checker.IsExcludedGlobally(path, sources) + assert.False(t, result) } func TestIsExcludedGlobally_PathExcludedInAnotherSource(t *testing.T) { @@ -72,14 +85,14 @@ func TestIsExcludedGlobally_PathExcludedInAnotherSource(t *testing.T) { } var logBuffer bytes.Buffer - log.SetOutput(&logBuffer) + + checker := newTestChecker(nil, &logBuffer) path := "/home/user/cache" - expectsError := true expectedLog := "Path '/home/user/cache' is globally excluded by '/cache/' in source '/home/user/'" - result := IsExcludedGlobally(path, sources) - assert.Equal(t, expectsError, result) + result := checker.IsExcludedGlobally(path, sources) + assert.True(t, result) assert.Contains(t, logBuffer.String(), expectedLog) } @@ -102,8 +115,10 @@ func runListUncoveredPathsTest( } } + checker := newSilentChecker(fs) + // Call the function - uncoveredPaths := ListUncoveredPaths(fs, cfg) + uncoveredPaths := checker.ListUncoveredPaths(cfg) // Assertions sort.Strings(uncoveredPaths) @@ -198,22 +213,125 @@ func TestListUncoveredPathsVariationsSubfoldersCovered(t *testing.T) { func TestListUncoveredPathsVariationsSubfoldersPartiallyCovered(t *testing.T) { t.Skip("Skipping test for partially covered subfolders") - // // Variation: one source covered, one uncovered subfolder - // runListUncoveredPathsTest(t, - // map[string][]string{ - // "/home/data": {"family"}, - // "/home/data/family": {"me", "you"}, - // "/home/data/family/me": {"a"}, - // "/home/data/family/you": {"a"}, - // }, - // Config{ - // Sources: []Path{ - // {Path: "/home/data"}, - // }, - // Jobs: []Job{ - // {Name: "JobMe", Source: "/home/data/family/me"}, - // }, - // }, - // []string{"/home/data/family/you"}, - // ) +} + +// Test that a job with exclusions properly marks child paths as excluded. +func TestListUncoveredPaths_JobExclusion(t *testing.T) { + runListUncoveredPathsTest(t, + map[string][]string{ + "/data": {"docs", "cache"}, + "/data/docs": {}, + "/data/cache": {}, + }, + Config{ + Sources: []Path{ + {Path: "/data"}, + }, + Jobs: []Job{ + {Name: "backup", Source: "/data/", Exclusions: []string{"cache"}}, + }, + }, + []string{}, + ) +} + +// Test that duplicate source paths are processed only once. +func TestListUncoveredPaths_DuplicateSourcesSkipped(t *testing.T) { + fs := afero.NewMemMapFs() + _ = fs.MkdirAll("/data", 0755) + + var logBuf bytes.Buffer + + checker := newTestChecker(fs, &logBuf) + + cfg := Config{ + Sources: []Path{ + {Path: "/data"}, + {Path: "/data"}, + }, + Jobs: []Job{ + {Name: "backup", Source: "/data"}, + }, + } + + result := checker.ListUncoveredPaths(cfg) + + assert.Empty(t, result) + assert.Contains(t, logBuf.String(), "SKIP: Path '/data' already seen") +} + +// Test getChildDirectories error path (unreadable directory). +func TestListUncoveredPaths_UnreadableDirectory(t *testing.T) { + fs := afero.NewMemMapFs() + // Don't create /data, so ReadDir will fail + + var logBuf bytes.Buffer + + checker := newTestChecker(fs, &logBuf) + + cfg := Config{ + Sources: []Path{ + {Path: "/data"}, + }, + Jobs: []Job{}, + } + + result := checker.ListUncoveredPaths(cfg) + + assert.Equal(t, []string{"/data"}, result) + assert.Contains(t, logBuf.String(), "ADD: Path '/data' is uncovered") +} + +// Test that a child path matching a job exclusion is marked as excluded +// (covers isExcluded true + isCoveredByJob excluded log). +func TestListUncoveredPaths_ChildPathExcludedByJob(t *testing.T) { + fs := afero.NewMemMapFs() + _ = fs.MkdirAll("/data/stuff/docs", 0755) + _ = fs.MkdirAll("/data/stuff/cache", 0755) + + var logBuf bytes.Buffer + + checker := newTestChecker(fs, &logBuf) + + cfg := Config{ + Sources: []Path{ + {Path: "/data/stuff"}, + }, + Jobs: []Job{ + // Source "/data" with exclusion "stuff/cache" so exclusionPath = "/data/stuff/cache" + {Name: "data-backup", Source: "/data", Exclusions: []string{"stuff/cache"}}, + // Covers the /data/stuff/docs child directly + {Name: "docs", Source: "/data/stuff/docs"}, + }, + } + + result := checker.ListUncoveredPaths(cfg) + + assert.Empty(t, result) + assert.Contains(t, logBuf.String(), "EXCLUDED: Path '/data/stuff/cache' is excluded by job 'data-backup'") +} + +// Test that a source path that is globally excluded is skipped in checkPath. +func TestListUncoveredPaths_GloballyExcludedSourceSkipped(t *testing.T) { + fs := afero.NewMemMapFs() + _ = fs.MkdirAll("/data/cache", 0755) + + var logBuf bytes.Buffer + + checker := newTestChecker(fs, &logBuf) + + cfg := Config{ + Sources: []Path{ + {Path: "/data", Exclusions: []string{"cache"}}, + {Path: "/data/cache"}, + }, + Jobs: []Job{ + {Name: "backup", Source: "/data"}, + }, + } + + result := checker.ListUncoveredPaths(cfg) + + assert.Empty(t, result) + assert.Contains(t, logBuf.String(), "SKIP: Path '/data/cache' is globally excluded") } diff --git a/backup/internal/test/config_test.go b/backup/internal/test/config_test.go index a3ae015..8556a96 100644 --- a/backup/internal/test/config_test.go +++ b/backup/internal/test/config_test.go @@ -2,9 +2,13 @@ package internal_test import ( "bytes" + "log" + "os" + "path/filepath" "testing" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" "gopkg.in/yaml.v3" @@ -360,3 +364,215 @@ func TestResolveConfig(t *testing.T) { assert.Equal(t, "/home/user/Pictures", resolvedCfg.Jobs[1].Source) assert.Equal(t, "/backup/user/Pictures", resolvedCfg.Jobs[1].Target) } + +// writeTestConfig writes YAML content to a temp file and returns its path. +func writeTestConfig(t *testing.T, content string) string { + t.Helper() + + dir := t.TempDir() + path := filepath.Join(dir, "test.yaml") + err := os.WriteFile(path, []byte(content), 0600) + require.NoError(t, err) + + return path +} + +func TestLoadResolvedConfig_FileNotFound(t *testing.T) { + _, err := LoadResolvedConfig("/nonexistent/path/config.yaml") + require.Error(t, err) + assert.Contains(t, err.Error(), "failed to open config") +} + +func TestLoadResolvedConfig_InvalidYAML(t *testing.T) { + path := writeTestConfig(t, "{{invalid yaml") + + _, err := LoadResolvedConfig(path) + require.Error(t, err) + assert.Contains(t, err.Error(), "failed to parse YAML") +} + +func TestLoadResolvedConfig_DuplicateJobNames(t *testing.T) { + yaml := ` +sources: + - path: "/src" +targets: + - path: "/tgt" +jobs: + - name: "dup" + source: "/src/a" + target: "/tgt/a" + - name: "dup" + source: "/src/b" + target: "/tgt/b" +` + path := writeTestConfig(t, yaml) + + _, err := LoadResolvedConfig(path) + require.Error(t, err) + assert.Contains(t, err.Error(), "job validation failed") + assert.Contains(t, err.Error(), "duplicate job name: dup") +} + +func TestLoadResolvedConfig_InvalidSourcePath(t *testing.T) { + yaml := ` +sources: + - path: "/home" +targets: + - path: "/backup" +jobs: + - name: "job1" + source: "/invalid/source" + target: "/backup/stuff" +` + path := writeTestConfig(t, yaml) + + _, err := LoadResolvedConfig(path) + require.Error(t, err) + assert.Contains(t, err.Error(), "path validation failed") +} + +func TestLoadResolvedConfig_OverlappingSourcePaths(t *testing.T) { + yaml := ` +sources: + - path: "/home" +targets: + - path: "/backup" +jobs: + - name: "parent" + source: "/home/user" + target: "/backup/user" + - name: "child" + source: "/home/user/docs" + target: "/backup/docs" +` + path := writeTestConfig(t, yaml) + + _, err := LoadResolvedConfig(path) + require.Error(t, err) + assert.Contains(t, err.Error(), "job source path validation failed") +} + +func TestLoadResolvedConfig_OverlappingSourcePathsAllowedByExclusion(t *testing.T) { + yaml := ` +sources: + - path: "/home" +targets: + - path: "/backup" +jobs: + - name: "parent" + source: "/home/user" + target: "/backup/user" + exclusions: + - "docs" + - name: "child" + source: "/home/user/docs" + target: "/backup/docs" +` + path := writeTestConfig(t, yaml) + + cfg, err := LoadResolvedConfig(path) + require.NoError(t, err) + assert.Len(t, cfg.Jobs, 2) +} + +func TestLoadResolvedConfig_OverlappingTargetPaths(t *testing.T) { + yaml := ` +sources: + - path: "/home" +targets: + - path: "/backup" +jobs: + - name: "job1" + source: "/home/docs" + target: "/backup/all" + - name: "job2" + source: "/home/photos" + target: "/backup/all/photos" +` + path := writeTestConfig(t, yaml) + + _, err := LoadResolvedConfig(path) + require.Error(t, err) + assert.Contains(t, err.Error(), "job target path validation failed") +} + +func TestLoadResolvedConfig_ValidConfig(t *testing.T) { + yaml := ` +sources: + - path: "/home" +targets: + - path: "/backup" +variables: + base: "/backup" +jobs: + - name: "docs" + source: "/home/docs" + target: "${base}/docs" +` + path := writeTestConfig(t, yaml) + + cfg, err := LoadResolvedConfig(path) + require.NoError(t, err) + assert.Len(t, cfg.Jobs, 1) + assert.Equal(t, "/backup/docs", cfg.Jobs[0].Target) +} + +func TestConfigApply_VersionInfoSuccess(t *testing.T) { + mockCmd := NewMockJobCommand(t) + + var output bytes.Buffer + + var logBuf bytes.Buffer + + logger := log.New(&logBuf, "", 0) + + cfg := Config{ + Jobs: []Job{ + {Name: "job1", Source: "/src/", Target: "/dst/", Enabled: true}, + {Name: "job2", Source: "/src2/", Target: "/dst2/", Enabled: false}, + }, + } + + mockCmd.EXPECT().GetVersionInfo().Return("rsync version 3.2.3", "/usr/bin/rsync", nil).Once() + mockCmd.EXPECT().Run(mock.AnythingOfType("internal.Job")).Return(Success).Once() + + err := cfg.Apply(mockCmd, logger, &output) + + require.NoError(t, err) + assert.Contains(t, logBuf.String(), "Rsync Binary Path: /usr/bin/rsync") + assert.Contains(t, logBuf.String(), "Rsync Version Info: rsync version 3.2.3") + assert.Contains(t, logBuf.String(), "STATUS [job1]: SUCCESS") + assert.Contains(t, logBuf.String(), "STATUS [job2]: SKIPPED") + assert.Contains(t, output.String(), "Status [job1]: SUCCESS") + assert.Contains(t, output.String(), "Status [job2]: SKIPPED") + assert.Contains(t, output.String(), "Summary: 1 succeeded, 0 failed, 1 skipped") +} + +func TestConfigApply_VersionInfoError(t *testing.T) { + mockCmd := NewMockJobCommand(t) + + var output bytes.Buffer + + var logBuf bytes.Buffer + + logger := log.New(&logBuf, "", 0) + + cfg := Config{ + Jobs: []Job{ + {Name: "backup", Source: "/data/", Target: "/bak/", Enabled: true}, + }, + } + + mockCmd.EXPECT().GetVersionInfo().Return("", "", errCommandNotFound).Once() + mockCmd.EXPECT().Run(mock.AnythingOfType("internal.Job")).Return(Failure).Once() + + err := cfg.Apply(mockCmd, logger, &output) + + require.Error(t, err) + require.ErrorIs(t, err, ErrJobFailure) + assert.Contains(t, logBuf.String(), "Failed to fetch rsync version: command not found") + assert.NotContains(t, logBuf.String(), "Rsync Binary Path") + assert.Contains(t, logBuf.String(), "STATUS [backup]: FAILURE") + assert.Contains(t, output.String(), "Status [backup]: FAILURE") + assert.Contains(t, output.String(), "Summary: 0 succeeded, 1 failed, 0 skipped") +} diff --git a/backup/internal/test/helper_test.go b/backup/internal/test/helper_test.go index 906bce4..67dd58b 100644 --- a/backup/internal/test/helper_test.go +++ b/backup/internal/test/helper_test.go @@ -1,11 +1,14 @@ package internal_test import ( + "os" "testing" + "time" . "backup-rsync/backup/internal" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestNormalizePath(t *testing.T) { @@ -25,34 +28,87 @@ func TestNormalizePath(t *testing.T) { } } +func fixedTime() time.Time { + return time.Date(2025, 6, 15, 14, 30, 45, 0, time.UTC) +} + func TestCreateMainLogger_Title_IsPresent(t *testing.T) { - logger, logPath := CreateMainLogger("title", true) + logger, logPath, cleanup, err := CreateMainLogger("title", true, fixedTime()) + require.NoError(t, err) + + defer cleanup() + assert.Contains(t, logPath, "title") assert.NotNil(t, logger) } func TestCreateMainLogger_IsSimulate_HasSimSuffix(t *testing.T) { - logger, logPath := CreateMainLogger("", true) + logger, logPath, cleanup, err := CreateMainLogger("", true, fixedTime()) + require.NoError(t, err) + + defer cleanup() + assert.Contains(t, logPath, "-sim") assert.NotNil(t, logger) } func TestCreateMainLogger_NotSimulate_HasNoSimSuffix(t *testing.T) { - logger, logPath := CreateMainLogger("", false) + logger, logPath, cleanup, err := CreateMainLogger("", false, fixedTime()) + require.NoError(t, err) + + defer cleanup() + assert.NotContains(t, logPath, "-sim") assert.NotNil(t, logger) } -func TestCreateLogPath_IsSimulate_ContainsTimestamp(t *testing.T) { - _, logPath := CreateMainLogger("", true) - // Check if the logPath contains a timestamp in the format '2006-01-02T15-04-05' - timestampRegex := `\d{4}-\d{2}-\d{2}T\d{2}-\d{2}-\d{2}` - assert.Regexp(t, timestampRegex, logPath) +func TestCreateMainLogger_DeterministicLogPath(t *testing.T) { + _, logPath, cleanup, err := CreateMainLogger("backup.yaml", true, fixedTime()) + require.NoError(t, err) + + defer cleanup() + + assert.Equal(t, "logs/sync-2025-06-15T14-30-45-backup-sim", logPath) } -func TestCreateLogPath_NotSimulate_ContainsTimestamp(t *testing.T) { - _, logPath := CreateMainLogger("", false) - // Check if the logPath contains a timestamp in the format '2006-01-02T15-04-05' - timestampRegex := `\d{4}-\d{2}-\d{2}T\d{2}-\d{2}-\d{2}` - assert.Regexp(t, timestampRegex, logPath) +func TestCreateMainLogger_DeterministicLogPath_NoSimulate(t *testing.T) { + _, logPath, cleanup, err := CreateMainLogger("sync.yaml", false, fixedTime()) + require.NoError(t, err) + + defer cleanup() + + assert.Equal(t, "logs/sync-2025-06-15T14-30-45-sync", logPath) +} + +func TestCreateMainLogger_MkdirError(t *testing.T) { + // Use t.Chdir to a temp dir so we control the filesystem + tmpDir := t.TempDir() + t.Chdir(tmpDir) + + // Create "logs" as a regular file to block MkdirAll + err := os.WriteFile("logs", []byte("block"), 0600) + require.NoError(t, err) + + _, _, cleanup, err := CreateMainLogger("test.yaml", false, fixedTime()) + _ = cleanup + + require.Error(t, err) + assert.Contains(t, err.Error(), "failed to create log directory") +} + +func TestCreateMainLogger_OpenFileError(t *testing.T) { + tmpDir := t.TempDir() + t.Chdir(tmpDir) + + // Pre-create the log path directory and make summary.log a directory to block OpenFile + logDir := "logs/sync-2025-06-15T14-30-45-test" + + err := os.MkdirAll(logDir+"/summary.log", 0750) + require.NoError(t, err) + + _, _, cleanup, err := CreateMainLogger("test.yaml", false, fixedTime()) + _ = cleanup + + require.Error(t, err) + assert.Contains(t, err.Error(), "failed to open overall log file") } diff --git a/backup/internal/test/job_test.go b/backup/internal/test/job_test.go index fe0b759..3186f81 100644 --- a/backup/internal/test/job_test.go +++ b/backup/internal/test/job_test.go @@ -5,6 +5,8 @@ import ( "testing" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "gopkg.in/yaml.v3" ) func newJob() Job { @@ -56,3 +58,18 @@ func TestApply_JobSucceeds_RunIsCalledAndReturnsSuccess(t *testing.T) { assert.Equal(t, Success, status) } + +func TestUnmarshalYAML_InvalidNode(t *testing.T) { + // A scalar node cannot be decoded into the JobYAML struct + node := &yaml.Node{ + Kind: yaml.ScalarNode, + Value: "not a mapping", + } + + var job Job + + err := node.Decode(&job) + + require.Error(t, err) + assert.Contains(t, err.Error(), "failed to decode YAML node") +} diff --git a/backup/internal/test/rsync_test.go b/backup/internal/test/rsync_test.go index 249864d..90598b9 100644 --- a/backup/internal/test/rsync_test.go +++ b/backup/internal/test/rsync_test.go @@ -2,7 +2,9 @@ package internal_test import ( . "backup-rsync/backup/internal" + "bytes" "errors" + "io" "strings" "testing" @@ -57,6 +59,7 @@ func TestGetVersionInfo_Success(t *testing.T) { rsync := SharedCommand{ BinPath: rsyncPath, Shell: mockExec, + Output: io.Discard, } // Set expectation for Execute call @@ -76,6 +79,7 @@ func TestGetVersionInfo_CommandError(t *testing.T) { rsync := SharedCommand{ BinPath: rsyncPath, Shell: mockExec, + Output: io.Discard, } // Set expectation for Execute call to return error @@ -95,6 +99,7 @@ func TestGetVersionInfo_InvalidOutput(t *testing.T) { rsync := SharedCommand{ BinPath: rsyncPath, Shell: mockExec, + Output: io.Discard, } // Set expectation for Execute call to return invalid output @@ -114,6 +119,7 @@ func TestGetVersionInfo_EmptyPath(t *testing.T) { rsync := SharedCommand{ BinPath: "", Shell: mockExec, + Output: io.Discard, } // No expectations set - should fail before calling Execute due to path validation @@ -131,6 +137,7 @@ func TestGetVersionInfo_IncompletePath(t *testing.T) { rsync := SharedCommand{ BinPath: "bin/rsync", Shell: mockExec, + Output: io.Discard, } // No expectations set - should fail before calling Execute due to path validation @@ -142,3 +149,156 @@ func TestGetVersionInfo_IncompletePath(t *testing.T) { assert.Empty(t, versionInfo) assert.Empty(t, fullpath) } + +func newTestJob() Job { + return Job{ + Name: "test-job", + Source: "/home/user/docs/", + Target: "/backup/user/docs/", + Delete: true, + Enabled: true, + Exclusions: []string{"*.tmp"}, + } +} + +func TestNewSharedCommand(t *testing.T) { + mockExec := NewMockExec(t) + cmd := NewSharedCommand(rsyncPath, "/logs/base", mockExec, io.Discard) + + assert.Equal(t, rsyncPath, cmd.BinPath) + assert.Equal(t, "/logs/base", cmd.BaseLogPath) + assert.Equal(t, mockExec, cmd.Shell) + assert.Equal(t, io.Discard, cmd.Output) +} + +func TestJobLogPath(t *testing.T) { + cmd := NewSharedCommand(rsyncPath, "/logs/sync-2025", nil, io.Discard) + job := newTestJob() + + logPath := cmd.JobLogPath(job) + + assert.Equal(t, "/logs/sync-2025/job-test-job.log", logPath) +} + +func TestNewListCommand(t *testing.T) { + mockExec := NewMockExec(t) + cmd := NewListCommand(rsyncPath, mockExec, io.Discard) + + assert.Equal(t, rsyncPath, cmd.BinPath) + assert.Empty(t, cmd.BaseLogPath) + assert.Equal(t, mockExec, cmd.Shell) +} + +func TestListCommand_Run_ReturnsSuccess(t *testing.T) { + mockExec := NewMockExec(t) + + var buf bytes.Buffer + + cmd := NewListCommand(rsyncPath, mockExec, &buf) + job := newTestJob() + + status := cmd.Run(job) + + assert.Equal(t, Success, status) + assert.Contains(t, buf.String(), "Job: test-job") + assert.Contains(t, buf.String(), rsyncPath) +} + +func TestNewSyncCommand(t *testing.T) { + mockExec := NewMockExec(t) + cmd := NewSyncCommand(rsyncPath, "/logs/base", mockExec, io.Discard) + + assert.Equal(t, rsyncPath, cmd.BinPath) + assert.Equal(t, "/logs/base", cmd.BaseLogPath) + assert.Equal(t, mockExec, cmd.Shell) +} + +func TestSyncCommand_Run_Success(t *testing.T) { + mockExec := NewMockExec(t) + + var buf bytes.Buffer + + cmd := NewSyncCommand(rsyncPath, "/logs/base", mockExec, &buf) + job := newTestJob() + + mockExec.EXPECT().Execute(rsyncPath, mock.AnythingOfType("[]string")). + Return([]byte("sync output"), nil).Once() + + status := cmd.Run(job) + + assert.Equal(t, Success, status) + assert.Contains(t, buf.String(), "Job: test-job") + assert.Contains(t, buf.String(), "Output:\nsync output") +} + +func TestSyncCommand_Run_Failure(t *testing.T) { + mockExec := NewMockExec(t) + cmd := NewSyncCommand(rsyncPath, "/logs/base", mockExec, io.Discard) + job := newTestJob() + + mockExec.EXPECT().Execute(rsyncPath, mock.AnythingOfType("[]string")). + Return(nil, errCommandNotFound).Once() + + status := cmd.Run(job) + + assert.Equal(t, Failure, status) +} + +func TestNewSimulateCommand(t *testing.T) { + mockExec := NewMockExec(t) + cmd := NewSimulateCommand(rsyncPath, "/logs/base", mockExec, io.Discard) + + assert.Equal(t, rsyncPath, cmd.BinPath) + assert.Equal(t, "/logs/base", cmd.BaseLogPath) + assert.Equal(t, mockExec, cmd.Shell) +} + +func TestSimulateCommand_Run_Success(t *testing.T) { + mockExec := NewMockExec(t) + logDir := t.TempDir() + + var buf bytes.Buffer + + cmd := NewSimulateCommand(rsyncPath, logDir, mockExec, &buf) + job := newTestJob() + + mockExec.EXPECT().Execute(rsyncPath, mock.AnythingOfType("[]string")). + Return([]byte("simulated output"), nil).Once() + + status := cmd.Run(job) + + assert.Equal(t, Success, status) + assert.Contains(t, buf.String(), "Job: test-job") +} + +func TestSimulateCommand_Run_Failure(t *testing.T) { + mockExec := NewMockExec(t) + logDir := t.TempDir() + cmd := NewSimulateCommand(rsyncPath, logDir, mockExec, io.Discard) + job := newTestJob() + + mockExec.EXPECT().Execute(rsyncPath, mock.AnythingOfType("[]string")). + Return(nil, errCommandNotFound).Once() + + status := cmd.Run(job) + + assert.Equal(t, Failure, status) +} + +func TestSimulateCommand_Run_LogWriteError(t *testing.T) { + mockExec := NewMockExec(t) + + var buf bytes.Buffer + + // Use a non-existent directory so WriteFile fails + cmd := NewSimulateCommand(rsyncPath, "/nonexistent/path", mockExec, &buf) + job := newTestJob() + + mockExec.EXPECT().Execute(rsyncPath, mock.AnythingOfType("[]string")). + Return([]byte("output"), nil).Once() + + status := cmd.Run(job) + + assert.Equal(t, Success, status) + assert.Contains(t, buf.String(), "Warning: Failed to write output to log file") +} diff --git a/go.mod b/go.mod index 1f51c78..d0c05f1 100644 --- a/go.mod +++ b/go.mod @@ -1,6 +1,6 @@ module backup-rsync -go 1.24.9 +go 1.25 require ( github.com/spf13/afero v1.15.0