diff --git a/.github/actions/go-check-setup/action.yml b/.github/actions/go-check-setup/action.yml index 3c14a5ca..3ee224ab 100644 --- a/.github/actions/go-check-setup/action.yml +++ b/.github/actions/go-check-setup/action.yml @@ -13,8 +13,13 @@ runs: restore-keys: | ${{ matrix.os }}-golang-${{ matrix.go }}- + - name: Setup Go + uses: actions/setup-go@v5 + with: + go-version-file: 'go.mod' + - name: Lint - uses: golangci/golangci-lint-action@v3 + uses: golangci/golangci-lint-action@v7 with: - version: v1.55.2 + version: v2.1.5 args: --timeout=10m diff --git a/.github/workflows/auto-generate.yml b/.github/workflows/auto-generate.yml index ec94250f..3b05a27a 100644 --- a/.github/workflows/auto-generate.yml +++ b/.github/workflows/auto-generate.yml @@ -13,9 +13,9 @@ jobs: ref: ${{ github.head_ref }} - name: Setup Go - uses: actions/setup-go@v4 + uses: actions/setup-go@v5 with: - go-version: '1.20.x' + go-version-file: 'go.mod' - name: Run go generate run: | diff --git a/.github/workflows/go-check.yml b/.github/workflows/go-check.yml index 23292b90..6972415d 100644 --- a/.github/workflows/go-check.yml +++ b/.github/workflows/go-check.yml @@ -15,4 +15,4 @@ concurrency: jobs: go-check: - uses: ipdxco/unified-github-workflows/.github/workflows/go-check.yml@v1.0.17 + uses: ipdxco/unified-github-workflows/.github/workflows/go-check.yml@v1.0.22 diff --git a/.github/workflows/release-binaries.yml b/.github/workflows/release-binaries.yml index 044a5005..13fbdc35 100644 --- a/.github/workflows/release-binaries.yml +++ b/.github/workflows/release-binaries.yml @@ -16,9 +16,9 @@ jobs: with: fetch-depth: 0 - name: Set up Go - uses: actions/setup-go@v4 + uses: actions/setup-go@v5 with: - go-version: "1.20.x" + go-version-file: 'go.mod' - name: Release Binaries uses: goreleaser/goreleaser-action@v4 with: diff --git a/.gitignore b/.gitignore index f7d83b87..0ac67489 100644 --- a/.gitignore +++ b/.gitignore @@ -21,6 +21,8 @@ /baga* singularity.db* +# Devcontainer +.devcontainer/ node_modules /.pnp diff --git a/.golangci.yml b/.golangci.yml index 148ca873..56fa45fb 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -1,76 +1,87 @@ -run: - tests: false - skip-dirs: - - replication/internal - - cmd/embed - - docs - - dashboard/model2ts - - handler/datasource/generate - - handler/storage/gen - skip-files: - - cmd/testutil.go - -linters: - enable-all: true - disable: - - typecheck - - interfacer - - structcheck - - golint - - ifshort - - scopelint - - varcheck - - varnamelen - - maligned - - deadcode - - structcheck - - gci - - goimports - - gofumpt - - nolintlint - - ireturn - - nosnakecase - - nlreturn - - godox - - gomoddirectives - - rowserrcheck - - sqlclosecheck - - wastedassign - - gocognit - - wsl - - musttag - - exhaustivestruct - - cyclop - - gomnd - - gochecknoglobals - - funlen - - gocyclo - - exhaustruct - - wrapcheck - - nestif - - containedctx - - maintidx - - nonamedreturns - - nilnil - - prealloc - - gochecknoinits - - dupl - - forbidigo - - godot - - depguard - - nakedret - - tagalign - - lll - - dupword - - interfacebloat - - goconst - -linters-settings: - errcheck: - exclude-functions: - - (github.com/libp2p/go-libp2p/network.MuxedStream).SetDeadline - - (github.com/data-preservation-programs/singularity/service.DatasetWorker).cleanup - revive: - rules: - - name: var-naming - disabled: true +version: "2" +run: + tests: false +linters: + default: all + disable: + - containedctx + - cyclop + - depguard + - dupl + - dupword + - exhaustruct + - forbidigo + - funlen + - gochecknoglobals + - gochecknoinits + - gocognit + - goconst + - gocyclo + - godot + - godox + - gomoddirectives + - interfacebloat + - ireturn + - lll + - maintidx + - mnd + - musttag + - nakedret + - nestif + - nilnil + - nlreturn + - nolintlint + - nonamedreturns + - prealloc + - rowserrcheck + - sqlclosecheck + - tagalign + - varnamelen + - wastedassign + - wrapcheck + - wsl + - contextcheck + - forcetypeassert + - funcorder + - exhaustive + - intrange + settings: + gosec: + excludes: + - G115 # we do a lot of uint64 conversions unfortunately + errcheck: + exclude-functions: + - path/filepath.Walk + - (github.com/libp2p/go-libp2p/network.MuxedStream).SetDeadline + - (github.com/data-preservation-programs/singularity/service.DatasetWorker).cleanup + revive: + rules: + - name: var-naming + disabled: true + recvcheck: + disable-builtin: true + exclusions: + - "*.Value" + - "*.String" + - "*.MarshalBinary" + - "*.MarshalJSON" + exclusions: + generated: lax + presets: + - comments + - common-false-positives + - legacy + - std-error-handling + paths: + - third_party$ + - builtin$ + - examples$ +formatters: + enable: + - gofmt + exclusions: + generated: lax + paths: + - third_party$ + - builtin$ + - examples$ diff --git a/Makefile b/Makefile index 64c657c4..597f2c4f 100644 --- a/Makefile +++ b/Makefile @@ -13,8 +13,8 @@ check-go: @which go > /dev/null || (echo "Go is not installed. Please install Go." && exit 1) install-lint-deps: - @which golangci-lint > /dev/null || (echo "Required golangci-lint not found. Installing it..." && GO111MODULE=on go get github.com/golangci/golangci-lint/cmd/golangci-lint@latest) - @which staticcheck > /dev/null || (echo "Required staticcheck not found. Installing it..." && GO111MODULE=on go get honnef.co/go/tools/cmd/staticcheck) + @which golangci-lint > /dev/null || (echo "Required golangci-lint not found. Installing it..." && go install github.com/golangci/golangci-lint/cmd/golangci-lint@latest) + @which staticcheck > /dev/null || (echo "Required staticcheck not found. Installing it..." && go install honnef.co/go/tools/cmd/staticcheck@latest) install-test-deps: @which gotestsum > /dev/null || (echo "Installing gotestsum..." && GO111MODULE=on go get gotest.tools/gotestsum@latest) @@ -29,8 +29,10 @@ generate: check-go go generate ./... lint: check-go install-lint-deps + @echo "Verifying golangci-lint configuration..." + golangci-lint config verify gofmt -s -w . - golangci-lint run --no-config --fix --disable-all -E tagalign --timeout 10m + golangci-lint run --no-config --fix --default=none -E tagalign --timeout 10m golangci-lint run --fix --timeout 10m staticcheck ./... diff --git a/analytics/analytics.go b/analytics/analytics.go index 6affaab1..6451c493 100644 --- a/analytics/analytics.go +++ b/analytics/analytics.go @@ -63,8 +63,10 @@ func Init(ctx context.Context, db *gorm.DB) error { return nil } -var Instance string -var Identity string +var ( + Instance string + Identity string +) type Collector struct { mu sync.Mutex diff --git a/api/api.go b/api/api.go index bdcc33d7..41c63e33 100644 --- a/api/api.go +++ b/api/api.go @@ -11,7 +11,10 @@ import ( "strconv" "time" + "github.com/cockroachdb/errors" "github.com/data-preservation-programs/singularity/analytics" + "github.com/data-preservation-programs/singularity/database" + _ "github.com/data-preservation-programs/singularity/docs/swagger" "github.com/data-preservation-programs/singularity/handler/admin" "github.com/data-preservation-programs/singularity/handler/dataprep" "github.com/data-preservation-programs/singularity/handler/deal" @@ -29,17 +32,13 @@ import ( "github.com/data-preservation-programs/singularity/service/contentprovider" "github.com/data-preservation-programs/singularity/util" "github.com/filecoin-project/lassie/pkg/lassie" - "github.com/libp2p/go-libp2p/core/host" - "github.com/ybbus/jsonrpc/v3" - - "github.com/cockroachdb/errors" - "github.com/data-preservation-programs/singularity/database" - _ "github.com/data-preservation-programs/singularity/docs/swagger" logging "github.com/ipfs/go-log/v2" "github.com/labstack/echo/v4" "github.com/labstack/echo/v4/middleware" + "github.com/libp2p/go-libp2p/core/host" echoSwagger "github.com/swaggo/echo-swagger" "github.com/urfave/cli/v2" + "github.com/ybbus/jsonrpc/v3" "gorm.io/gorm" ) @@ -61,7 +60,7 @@ type Server struct { scheduleHandler schedule.Handler } -func (s Server) Name() string { +func (s *Server) Name() string { return "api" } @@ -75,7 +74,7 @@ func (s Server) Name() string { // @Failure 404 {string} string "Not Found" // @Failure 500 {string} string "Internal Server Error" // @Router /piece/{id}/metadata [get] -func (s Server) getMetadataHandler(c echo.Context) error { +func (s *Server) getMetadataHandler(c echo.Context) error { return contentprovider.GetMetadataHandler(c, s.db) } @@ -112,18 +111,18 @@ type APIParams struct { ConnString string } -func InitServer(ctx context.Context, params APIParams) (Server, error) { +func InitServer(ctx context.Context, params APIParams) (*Server, error) { db, closer, err := database.OpenWithLogger(params.ConnString) if err != nil { - return Server{}, errors.WithStack(err) + return nil, errors.WithStack(err) } h, err := util.InitHost(nil) if err != nil { - return Server{}, errors.Wrap(err, "failed to init host") + return nil, errors.Wrap(err, "failed to init host") } lassie, err := lassie.NewLassie(ctx, lassie.WithHost(h)) if err != nil { - return Server{}, errors.Wrap(err, "failed to init lassie") + return nil, errors.Wrap(err, "failed to init lassie") } infoFetcher := replication.MinerInfoFetcher{ Client: util.NewLotusClient(params.LotusAPI, params.LotusToken), @@ -136,7 +135,7 @@ func InitServer(ctx context.Context, params APIParams) (Server, error) { endpointfinder.WithErrorLruSize(128), endpointfinder.WithErrorLruTimeout(time.Minute*5), ) - return Server{ + return &Server{ db: db, host: h, listener: params.Listener, @@ -184,7 +183,7 @@ func InitServer(ctx context.Context, params APIParams) (Server, error) { // This method assumes a specific ordering and kind of parameters in the handler functions. // It is designed to simplify the process of defining Echo handlers but has limitations // in terms of the variety of supported handler function signatures. -func (s Server) toEchoHandler(handlerFunc any) echo.HandlerFunc { +func (s *Server) toEchoHandler(handlerFunc any) echo.HandlerFunc { return func(c echo.Context) error { handlerFuncValue := reflect.ValueOf(handlerFunc) handlerFuncType := handlerFuncValue.Type() @@ -295,7 +294,7 @@ func (s Server) toEchoHandler(handlerFunc any) echo.HandlerFunc { } } -func (s Server) setupRoutes(e *echo.Echo) { +func (s *Server) setupRoutes(e *echo.Echo) { // Admin e.POST("/api/identity", s.toEchoHandler(s.adminHandler.SetIdentityHandler)) // Storage @@ -402,7 +401,7 @@ var logger = logging.Logger("api") // 3. Completion of analytics event flushing. // - A channel (service.Fail) that reports errors that occur while the server is running. // - An error if there is an issue during the initialization phase, otherwise nil. -func (s Server) Start(ctx context.Context, exitErr chan<- error) error { +func (s *Server) Start(ctx context.Context, exitErr chan<- error) error { err := analytics.Init(ctx, s.db) if err != nil { return errors.WithStack(err) diff --git a/cmd/download.go b/cmd/download.go index f45574e2..8eac0c11 100644 --- a/cmd/download.go +++ b/cmd/download.go @@ -1,6 +1,8 @@ package cmd import ( + "slices" + "github.com/cockroachdb/errors" "github.com/data-preservation-programs/singularity/cmd/cliutil" "github.com/data-preservation-programs/singularity/cmd/storage" @@ -9,7 +11,6 @@ import ( "github.com/data-preservation-programs/singularity/storagesystem" "github.com/ipfs/go-log" "github.com/urfave/cli/v2" - "golang.org/x/exp/slices" ) var DownloadCmd = &cli.Command{ diff --git a/cmd/ez/prep.go b/cmd/ez/prep.go index 64d9e2f2..ee1e2fd8 100644 --- a/cmd/ez/prep.go +++ b/cmd/ez/prep.go @@ -94,7 +94,7 @@ var PrepCmd = &cli.Command{ outputDir := c.String("output-dir") var outputStorages []string if outputDir != "" { - err = os.MkdirAll(outputDir, 0755) + err = os.MkdirAll(outputDir, 0o755) if err != nil { return errors.Wrap(err, "failed to create output directory") } diff --git a/cmd/run/api.go b/cmd/run/api.go index eb22d571..a7c84c9e 100644 --- a/cmd/run/api.go +++ b/cmd/run/api.go @@ -5,17 +5,15 @@ import ( "github.com/urfave/cli/v2" ) -var ( - APICmd = &cli.Command{ - Name: "api", - Usage: "Run the singularity API", - Flags: []cli.Flag{ - &cli.StringFlag{ - Name: "bind", - Usage: "Bind address for the API server", - Value: ":9090", - }, +var APICmd = &cli.Command{ + Name: "api", + Usage: "Run the singularity API", + Flags: []cli.Flag{ + &cli.StringFlag{ + Name: "bind", + Usage: "Bind address for the API server", + Value: ":9090", }, - Action: api.Run, - } -) + }, + Action: api.Run, +} diff --git a/cmd/run/downloadserver.go b/cmd/run/downloadserver.go index 9029fbd6..cf63c3c0 100644 --- a/cmd/run/downloadserver.go +++ b/cmd/run/downloadserver.go @@ -8,7 +8,7 @@ import ( "github.com/data-preservation-programs/singularity/service/downloadserver" "github.com/data-preservation-programs/singularity/storagesystem" "github.com/urfave/cli/v2" - "golang.org/x/exp/slices" + "slices" ) var DownloadServerCmd = &cli.Command{ diff --git a/cmd/storage/create.go b/cmd/storage/create.go index 60aa5c1a..de75df7b 100644 --- a/cmd/storage/create.go +++ b/cmd/storage/create.go @@ -3,6 +3,7 @@ package storage import ( "net/url" "path/filepath" + "slices" "strings" "github.com/cockroachdb/errors" @@ -17,7 +18,6 @@ import ( "github.com/rclone/rclone/fs" "github.com/rjNemo/underscore" "github.com/urfave/cli/v2" - "golang.org/x/exp/slices" ) var defaultClientConfig = fs.NewConfig() diff --git a/cmd/storage/update.go b/cmd/storage/update.go index 9d5013d7..00ae71b2 100644 --- a/cmd/storage/update.go +++ b/cmd/storage/update.go @@ -14,8 +14,8 @@ import ( "github.com/gotidy/ptr" "github.com/rjNemo/underscore" "github.com/urfave/cli/v2" - "golang.org/x/exp/slices" "gorm.io/gorm" + "slices" ) var HTTPClientConfigFlagsForUpdate = []cli.Flag{ diff --git a/cmd/testutil.go b/cmd/testutil.go index 711af055..c7343858 100644 --- a/cmd/testutil.go +++ b/cmd/testutil.go @@ -16,6 +16,8 @@ import ( "testing" "time" + "slices" + "github.com/cockroachdb/errors" "github.com/data-preservation-programs/singularity/pack" "github.com/fatih/color" @@ -25,7 +27,6 @@ import ( "github.com/rjNemo/underscore" "github.com/stretchr/testify/require" "github.com/urfave/cli/v2" - "golang.org/x/exp/slices" ) type RunnerMode string @@ -199,7 +200,7 @@ func Download(ctx context.Context, url string, nThreads int) ([]byte, error) { ctx, cancel := context.WithCancel(ctx) defer cancel() // Make a HEAD request to get the size of the file - req, err := http.NewRequestWithContext(ctx, "HEAD", url, nil) + req, err := http.NewRequestWithContext(ctx, http.MethodHead, url, nil) if err != nil { return nil, errors.WithStack(err) } @@ -228,7 +229,7 @@ func Download(ctx context.Context, url string, nThreads int) ([]byte, error) { var wg sync.WaitGroup parts := make([][]byte, nThreads) errChan := make(chan error, nThreads) - for i := 0; i < nThreads; i++ { + for i := range nThreads { wg.Add(1) go func(i int) { defer wg.Done() @@ -239,7 +240,7 @@ func Download(ctx context.Context, url string, nThreads int) ([]byte, error) { end += extraSize // add the remainder to the last part } - req, err := http.NewRequestWithContext(ctx, "GET", url, nil) + req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) if err != nil { errChan <- errors.WithStack(err) return @@ -292,7 +293,9 @@ func Download(ctx context.Context, url string, nThreads int) ([]byte, error) { return result.Bytes(), nil } + func CompareDirectories(t *testing.T, dir1, dir2 string) { + t.Helper() filesInDir2 := make(map[string]struct{}) err := filepath.Walk(dir1, func(path1 string, info1 os.FileInfo, err error) error { diff --git a/docs/gen/clireference/main.go b/docs/gen/clireference/main.go index 680afcab..90979379 100644 --- a/docs/gen/clireference/main.go +++ b/docs/gen/clireference/main.go @@ -8,11 +8,12 @@ import ( "path" "strings" + "slices" + "github.com/cockroachdb/errors" "github.com/data-preservation-programs/singularity/cmd" "github.com/mattn/go-shellwords" "github.com/urfave/cli/v2" - "golang.org/x/exp/slices" ) var overrides = map[string]string{ @@ -36,7 +37,7 @@ func main() { if err != nil { panic(err) } - err = os.WriteFile("docs/en/cli-reference/README.md", []byte(sb.String()), 0644) + err = os.WriteFile("docs/en/cli-reference/README.md", []byte(sb.String()), 0644) //nolint:gosec if err != nil { panic(err) } @@ -65,7 +66,7 @@ func main() { } lines = append(lines[:beginIndex+1], append([]string{"", summary.String()}, lines[endIndex:]...)...) - err = os.WriteFile("docs/en/SUMMARY.md", []byte(strings.Join(lines, "\n")), 0644) + err = os.WriteFile("docs/en/SUMMARY.md", []byte(strings.Join(lines, "\n")), 0644) //nolint:gosec if err != nil { panic(err) } @@ -104,13 +105,13 @@ func saveMarkdown(command *cli.Command, outDir string, args []string) { sb.WriteString(stdout) sb.WriteString("```\n") sb.WriteString("{% endcode %}\n") - err = os.WriteFile(outFile, []byte(sb.String()), 0644) + err = os.WriteFile(outFile, []byte(sb.String()), 0644) //nolint:gosec if err != nil { panic(err) } var margin string - for i := 0; i < len(args)-1; i++ { + for range len(args) - 1 { margin += " " } diff --git a/docs/gen/translate/main.go b/docs/gen/translate/main.go index 86a396f0..2e3cbbae 100644 --- a/docs/gen/translate/main.go +++ b/docs/gen/translate/main.go @@ -35,7 +35,7 @@ func main() { var wg sync.WaitGroup for _, language := range languages { wg.Add(1) - language := language + go func() { defer wg.Done() client := openai.NewClient(token) @@ -120,7 +120,7 @@ func main() { if err != nil { panic(err) } - err = os.WriteFile(outPath, []byte(strings.Join(results, "\n")), 0644) + err = os.WriteFile(outPath, []byte(strings.Join(results, "\n")), 0644) //nolint:gosec if err != nil { panic(err) } diff --git a/docs/gen/webapireference/main.go b/docs/gen/webapireference/main.go index cb895c10..16455bf4 100644 --- a/docs/gen/webapireference/main.go +++ b/docs/gen/webapireference/main.go @@ -6,7 +6,7 @@ import ( "os" "strings" - "golang.org/x/exp/slices" + "slices" ) type SwaggerSpec struct { @@ -54,7 +54,7 @@ func main() { contentMap[tag] = &strings.Builder{} contentMap[tag].WriteString("# " + tag + "\n\n") } - contentMap[tag].WriteString(fmt.Sprintf("{%% swagger src=\"https://raw.githubusercontent.com/data-preservation-programs/singularity/main/docs/swagger/swagger.yaml\" path=\"%s\" method=\"%s\" %%}\n", pathName, method)) + fmt.Fprintf(contentMap[tag], "{%% swagger src=\"https://raw.githubusercontent.com/data-preservation-programs/singularity/main/docs/swagger/swagger.yaml\" path=\"%s\" method=\"%s\" %%}\n", pathName, method) contentMap[tag].WriteString("[https://raw.githubusercontent.com/data-preservation-programs/singularity/main/docs/swagger/swagger.yaml](https://raw.githubusercontent.com/data-preservation-programs/singularity/main/docs/swagger/swagger.yaml)\n") contentMap[tag].WriteString("{% endswagger %}\n\n") } @@ -72,7 +72,7 @@ func main() { slices.Sort(contentMapSorted) for _, tag := range contentMapSorted { builder := contentMap[tag] - err := os.WriteFile("./docs/en/web-api-reference/"+convertStringToHyphenated(tag)+".md", []byte(builder.String()), 0644) + err := os.WriteFile("./docs/en/web-api-reference/"+convertStringToHyphenated(tag)+".md", []byte(builder.String()), 0644) //nolint:gosec if err != nil { panic(err) } @@ -95,7 +95,7 @@ func main() { slices.Sort(summaries) summaries = append(summaries, "* [Specification](https://raw.githubusercontent.com/data-preservation-programs/singularity/main/docs/swagger/swagger.yaml)", "") lines = append(lines[:beginIndex+1], append([]string{"", strings.Join(summaries, "\n")}, lines[endIndex:]...)...) - err = os.WriteFile("docs/en/SUMMARY.md", []byte(strings.Join(lines, "\n")), 0644) + err = os.WriteFile("docs/en/SUMMARY.md", []byte(strings.Join(lines, "\n")), 0644) //nolint:gosec if err != nil { panic(err) } diff --git a/go.mod b/go.mod index 40675a4d..612b2d2d 100644 --- a/go.mod +++ b/go.mod @@ -75,7 +75,6 @@ require ( go.mongodb.org/mongo-driver v1.12.1 go.uber.org/multierr v1.11.0 go.uber.org/zap v1.27.0 - golang.org/x/exp v0.0.0-20250128182459-e0ece0dbea4c golang.org/x/text v0.22.0 gorm.io/driver/mysql v1.5.0 gorm.io/driver/postgres v1.5.0 @@ -350,6 +349,7 @@ require ( go.uber.org/fx v1.23.0 // indirect go.uber.org/mock v0.5.0 // indirect golang.org/x/crypto v0.32.0 // indirect + golang.org/x/exp v0.0.0-20250128182459-e0ece0dbea4c // indirect golang.org/x/mod v0.23.0 // indirect golang.org/x/net v0.34.0 // indirect golang.org/x/oauth2 v0.24.0 // indirect diff --git a/handler/dataprep/listschedules.go b/handler/dataprep/listschedules.go index 666692a1..d5c2e07f 100644 --- a/handler/dataprep/listschedules.go +++ b/handler/dataprep/listschedules.go @@ -25,7 +25,8 @@ import ( func (DefaultHandler) ListSchedulesHandler( ctx context.Context, db *gorm.DB, - id string) ([]model.Schedule, error) { + id string, +) ([]model.Schedule, error) { db = db.WithContext(ctx) var preparation model.Preparation diff --git a/handler/dataprep/remove.go b/handler/dataprep/remove.go index 83e7fad5..d3000957 100644 --- a/handler/dataprep/remove.go +++ b/handler/dataprep/remove.go @@ -57,7 +57,6 @@ func (DefaultHandler) RemovePreparationHandler(ctx context.Context, db *gorm.DB, return db.Delete(&preparation).Error }) }) - if err != nil { return errors.WithStack(err) } diff --git a/handler/deal/schedule/create.go b/handler/deal/schedule/create.go index 5dae3611..6c5fc0ed 100644 --- a/handler/deal/schedule/create.go +++ b/handler/deal/schedule/create.go @@ -7,15 +7,14 @@ import ( "strings" "time" - "github.com/data-preservation-programs/singularity/handler/handlererror" - "github.com/rjNemo/underscore" - "github.com/robfig/cron/v3" - "github.com/cockroachdb/errors" "github.com/data-preservation-programs/singularity/database" + "github.com/data-preservation-programs/singularity/handler/handlererror" "github.com/data-preservation-programs/singularity/model" "github.com/dustin/go-humanize" "github.com/ipfs/go-cid" + "github.com/rjNemo/underscore" + "github.com/robfig/cron/v3" "github.com/ybbus/jsonrpc/v3" "gorm.io/gorm" ) diff --git a/handler/deal/schedule/pause.go b/handler/deal/schedule/pause.go index 4d350fae..9c97a0cd 100644 --- a/handler/deal/schedule/pause.go +++ b/handler/deal/schedule/pause.go @@ -2,12 +2,12 @@ package schedule import ( "context" + "slices" "github.com/cockroachdb/errors" "github.com/data-preservation-programs/singularity/database" "github.com/data-preservation-programs/singularity/handler/handlererror" "github.com/data-preservation-programs/singularity/model" - "golang.org/x/exp/slices" "gorm.io/gorm" ) diff --git a/handler/deal/schedule/remove.go b/handler/deal/schedule/remove.go index fa0508ea..2d26ff08 100644 --- a/handler/deal/schedule/remove.go +++ b/handler/deal/schedule/remove.go @@ -2,12 +2,12 @@ package schedule import ( "context" + "slices" "github.com/cockroachdb/errors" "github.com/data-preservation-programs/singularity/database" "github.com/data-preservation-programs/singularity/handler/handlererror" "github.com/data-preservation-programs/singularity/model" - "golang.org/x/exp/slices" "gorm.io/gorm" ) diff --git a/handler/deal/schedule/resume.go b/handler/deal/schedule/resume.go index 839fbcc6..944986ab 100644 --- a/handler/deal/schedule/resume.go +++ b/handler/deal/schedule/resume.go @@ -7,8 +7,8 @@ import ( "github.com/data-preservation-programs/singularity/database" "github.com/data-preservation-programs/singularity/handler/handlererror" "github.com/data-preservation-programs/singularity/model" - "golang.org/x/exp/slices" "gorm.io/gorm" + "slices" ) var resumableStates = []model.ScheduleState{ diff --git a/handler/deal/send-manual.go b/handler/deal/send-manual.go index 220eef88..498cc7bb 100644 --- a/handler/deal/send-manual.go +++ b/handler/deal/send-manual.go @@ -7,12 +7,11 @@ import ( "strings" "time" - "github.com/data-preservation-programs/singularity/handler/handlererror" - "github.com/dustin/go-humanize" - "github.com/cockroachdb/errors" + "github.com/data-preservation-programs/singularity/handler/handlererror" "github.com/data-preservation-programs/singularity/model" "github.com/data-preservation-programs/singularity/replication" + "github.com/dustin/go-humanize" "github.com/ipfs/go-cid" "gorm.io/gorm" ) diff --git a/handler/download.go b/handler/download.go index 7b688df1..358ad04b 100644 --- a/handler/download.go +++ b/handler/download.go @@ -83,7 +83,7 @@ func download(cctx *cli.Context, reader *store.PieceReader, outPath string, conc errChan := make(chan error, 1) - for i := 0; i < concurrency; i++ { + for i := range concurrency { wg.Add(1) go func(i int) { defer wg.Done() @@ -115,7 +115,7 @@ func download(cctx *cli.Context, reader *store.PieceReader, outPath string, conc reader := io.LimitReader(clonedReader, end-start) buffer := make([]byte, 4096) if !cctx.Bool("quiet") { - _, _ = cctx.App.Writer.Write([]byte(fmt.Sprintf("[Thread %d] Downloading part %d - %d\n", i, end, start))) + _, _ = fmt.Fprintf(cctx.App.Writer, "[Thread %d] Downloading part %d - %d\n", i, end, start) } for { if ctx.Err() != nil { @@ -144,7 +144,7 @@ func download(cctx *cli.Context, reader *store.PieceReader, outPath string, conc start += int64(n) } if !cctx.Bool("quiet") { - _, _ = cctx.App.Writer.Write([]byte(fmt.Sprintf("[Thread %d] Completed\n", i))) + _, _ = fmt.Fprintf(cctx.App.Writer, "[Thread %d] Completed\n", i) } }(i) } diff --git a/handler/file/prepare.go b/handler/file/prepare.go index b561c626..fb4f3e0d 100644 --- a/handler/file/prepare.go +++ b/handler/file/prepare.go @@ -13,7 +13,8 @@ import ( func (DefaultHandler) PrepareToPackFileHandler( ctx context.Context, db *gorm.DB, - fileID uint64) (int64, error) { + fileID uint64, +) (int64, error) { db = db.WithContext(ctx) var file model.File err := db.Preload("Attachment.Preparation").Where("id = ?", fileID).First(&file).Error diff --git a/handler/file/push.go b/handler/file/push.go index 518146ef..36ab01bb 100644 --- a/handler/file/push.go +++ b/handler/file/push.go @@ -67,7 +67,6 @@ func (DefaultHandler) PushFileHandler( } file, fileRanges, err := push.PushFile(ctx, db, obj, attachment, map[string]model.DirectoryID{}) - if err != nil { return nil, errors.WithStack(err) } diff --git a/handler/job/daggen.go b/handler/job/daggen.go index 12557498..68033d55 100644 --- a/handler/job/daggen.go +++ b/handler/job/daggen.go @@ -24,7 +24,8 @@ func (DefaultHandler) StartDagGenHandler( ctx context.Context, db *gorm.DB, id string, - name string) (*model.Job, error) { + name string, +) (*model.Job, error) { return StartJobHandler(ctx, db, id, name, model.DagGen) } @@ -58,7 +59,8 @@ func (DefaultHandler) PauseDagGenHandler( ctx context.Context, db *gorm.DB, id string, - name string) (*model.Job, error) { + name string, +) (*model.Job, error) { return PauseJobHandler(ctx, db, id, name, model.DagGen) } diff --git a/handler/job/pack.go b/handler/job/pack.go index 0452db6d..8e0761f6 100644 --- a/handler/job/pack.go +++ b/handler/job/pack.go @@ -3,6 +3,8 @@ package job import ( "context" + "slices" + "github.com/cockroachdb/errors" "github.com/data-preservation-programs/singularity/database" "github.com/data-preservation-programs/singularity/handler/handlererror" @@ -10,12 +12,13 @@ import ( "github.com/data-preservation-programs/singularity/pack" "github.com/data-preservation-programs/singularity/scan" "github.com/data-preservation-programs/singularity/util" - "golang.org/x/exp/slices" "gorm.io/gorm" ) -var startableStatesForPack = []model.JobState{model.Paused, model.Created, model.Error} -var pausableStatesForPack = []model.JobState{model.Processing, model.Ready} +var ( + startableStatesForPack = []model.JobState{model.Paused, model.Created, model.Error} + pausableStatesForPack = []model.JobState{model.Processing, model.Ready} +) // StartPackHandler initiates pack jobs for a given source storage. // @@ -40,7 +43,8 @@ func (DefaultHandler) StartPackHandler( db *gorm.DB, id string, name string, - jobID int64) ([]model.Job, error) { + jobID int64, +) ([]model.Job, error) { db = db.WithContext(ctx) sourceAttachment, err := validateSourceStorage(ctx, db, id, name) if err != nil { @@ -147,7 +151,8 @@ func (DefaultHandler) PausePackHandler( db *gorm.DB, id string, name string, - jobID int64) ([]model.Job, error) { + jobID int64, +) ([]model.Job, error) { db = db.WithContext(ctx) sourceAttachment, err := validateSourceStorage(ctx, db, id, name) if err != nil { @@ -216,7 +221,8 @@ func (DefaultHandler) PausePackHandler( func (DefaultHandler) PackHandler( ctx context.Context, db *gorm.DB, - jobID uint64) (*model.Car, error) { + jobID uint64, +) (*model.Car, error) { db = db.WithContext(ctx) var packJob model.Job err := db. diff --git a/handler/job/scan.go b/handler/job/scan.go index ca71c2f9..18ba5e6e 100644 --- a/handler/job/scan.go +++ b/handler/job/scan.go @@ -7,8 +7,8 @@ import ( "github.com/data-preservation-programs/singularity/database" "github.com/data-preservation-programs/singularity/handler/handlererror" "github.com/data-preservation-programs/singularity/model" - "golang.org/x/exp/slices" "gorm.io/gorm" + "slices" ) var pausableStatesForScan = []model.JobState{model.Processing, model.Ready} @@ -53,7 +53,8 @@ func StartJobHandler( db *gorm.DB, id string, name string, - jobType model.JobType) (*model.Job, error) { + jobType model.JobType, +) (*model.Job, error) { db = db.WithContext(ctx) sourceAttachment, err := validateSourceStorage(ctx, db, id, name) if err != nil { @@ -95,7 +96,8 @@ func (DefaultHandler) StartScanHandler( ctx context.Context, db *gorm.DB, id string, - name string) (*model.Job, error) { + name string, +) (*model.Job, error) { return StartJobHandler(ctx, db, id, name, model.Scan) } @@ -134,7 +136,8 @@ func PauseJobHandler( db *gorm.DB, id string, name string, - jobType model.JobType) (*model.Job, error) { + jobType model.JobType, +) (*model.Job, error) { db = db.WithContext(ctx) sourceAttachment, err := validateSourceStorage(ctx, db, id, name) if err != nil { @@ -163,7 +166,8 @@ func (DefaultHandler) PauseScanHandler( ctx context.Context, db *gorm.DB, id string, - name string) (*model.Job, error) { + name string, +) (*model.Job, error) { return PauseJobHandler(ctx, db, id, name, model.Scan) } diff --git a/handler/storage/list.go b/handler/storage/list.go index 06e126cf..4dc2b300 100644 --- a/handler/storage/list.go +++ b/handler/storage/list.go @@ -19,7 +19,8 @@ import ( // - An error, if any occurred during the operation. func (DefaultHandler) ListStoragesHandler( ctx context.Context, - db *gorm.DB) ([]model.Storage, error) { + db *gorm.DB, +) ([]model.Storage, error) { db = db.WithContext(ctx) var storages []model.Storage if err := db.Preload("PreparationsAsSource").Preload("PreparationsAsOutput").Find(&storages).Error; err != nil { diff --git a/handler/storage/remove.go b/handler/storage/remove.go index e4d32457..742554a6 100644 --- a/handler/storage/remove.go +++ b/handler/storage/remove.go @@ -25,7 +25,8 @@ import ( func (DefaultHandler) RemoveHandler( ctx context.Context, db *gorm.DB, - name string) error { + name string, +) error { db = db.WithContext(ctx) err := database.DoRetry(ctx, func() error { return db.Transaction(func(db *gorm.DB) error { diff --git a/handler/tool/extractcar.go b/handler/tool/extractcar.go index adcb07a9..1f0ba031 100644 --- a/handler/tool/extractcar.go +++ b/handler/tool/extractcar.go @@ -128,7 +128,6 @@ func ExtractCarHandler(ctx *cli.Context, inputDir string, output string, c cid.C } return nil }) - if err != nil { return errors.Wrap(err, "failed to walk input directory") } @@ -157,7 +156,7 @@ func getOutPathForFile(outPath string, c cid.Cid) (string, error) { stat, err := os.Stat(outPath) // If the user supply /a/b.txt but the file does not exist, then we need to mkdir -p /a if errors.Is(err, oserror.ErrNotExist) { - err = os.MkdirAll(filepath.Dir(outPath), 0755) + err = os.MkdirAll(filepath.Dir(outPath), 0o755) if err != nil { return "", errors.Wrapf(err, "failed to create output directory %s", filepath.Dir(outPath)) } @@ -190,8 +189,8 @@ func writeToOutput(ctx *cli.Context, dagServ ipld.DAGService, outPath string, c return errors.Wrapf(err, "failed to get output path for CID %s", c) } } - _, _ = ctx.App.Writer.Write([]byte(fmt.Sprintf("Writing to %s\n", outPath))) - return os.WriteFile(outPath, node.RawData(), 0600) + _, _ = fmt.Fprintf(ctx.App.Writer, "Writing to %s\n", outPath) + return os.WriteFile(outPath, node.RawData(), 0o600) case cid.DagProtobuf: fsnode, err := unixfs.ExtractFSNode(node) if err != nil { @@ -214,7 +213,7 @@ func writeToOutput(ctx *cli.Context, dagServ ipld.DAGService, outPath string, c return errors.Wrapf(err, "failed to create output file %s", outPath) } defer f.Close() - _, _ = ctx.App.Writer.Write([]byte(fmt.Sprintf("Writing to %s\n", outPath))) + _, _ = fmt.Fprintf(ctx.App.Writer, "Writing to %s\n", outPath) _, err = reader.WriteTo(f) if err != nil { return errors.Wrapf(err, "failed to write to output file %s", outPath) @@ -224,8 +223,8 @@ func writeToOutput(ctx *cli.Context, dagServ ipld.DAGService, outPath string, c if err != nil { return errors.Wrapf(err, "failed to create directory from node for CID %s", c) } - _, _ = ctx.App.Writer.Write([]byte(fmt.Sprintf("Create Dir %s\n", outPath))) - err = os.MkdirAll(outPath, 0755) + _, _ = fmt.Fprintf(ctx.App.Writer, "Create Dir %s\n", outPath) + err = os.MkdirAll(outPath, 0o755) if err != nil { return errors.Wrapf(err, "failed to create output directory %s", outPath) } diff --git a/handler/wallet/attach.go b/handler/wallet/attach.go index fb7603a4..e2b374bf 100644 --- a/handler/wallet/attach.go +++ b/handler/wallet/attach.go @@ -49,7 +49,6 @@ func (DefaultHandler) AttachHandler( err = database.DoRetry(ctx, func() error { return db.Model(&preparation).Association("Wallets").Append(&w) }) - if err != nil { return nil, errors.WithStack(err) } diff --git a/handler/wallet/detach.go b/handler/wallet/detach.go index 6b1ace1e..7dee510f 100644 --- a/handler/wallet/detach.go +++ b/handler/wallet/detach.go @@ -41,7 +41,6 @@ func (DefaultHandler) DetachHandler( found, err := underscore.Find(preparation.Wallets, func(w model.Wallet) bool { return w.ID == wallet || w.Address == wallet }) - if err != nil { return nil, errors.Wrapf(handlererror.ErrNotFound, "wallet %s not attached to preparation %d", wallet, preparationID) } @@ -49,7 +48,6 @@ func (DefaultHandler) DetachHandler( err = database.DoRetry(ctx, func() error { return db.Model(&preparation).Association("Wallets").Delete(&found) }) - if err != nil { return nil, errors.WithStack(err) } diff --git a/migrate/migrate-dataset.go b/migrate/migrate-dataset.go index 64d341d1..db3c5c74 100644 --- a/migrate/migrate-dataset.go +++ b/migrate/migrate-dataset.go @@ -2,7 +2,6 @@ package migrate import ( "context" - "fmt" "log" "path/filepath" "strings" @@ -112,7 +111,7 @@ func migrateDataset(ctx context.Context, mg *mongo.Client, db *gorm.DB, scanning log.Printf("failed to parse data cid %s\n", generation.DataCID) dataCID = cid.Undef } - fileName := fmt.Sprintf("%s.car", generation.PieceCID) + fileName := generation.PieceCID + ".car" if generation.FilenameOverride != "" { fileName = generation.FilenameOverride } diff --git a/migrate/migrate-schedule.go b/migrate/migrate-schedule.go index e8a1a1ec..07f4f3bb 100644 --- a/migrate/migrate-schedule.go +++ b/migrate/migrate-schedule.go @@ -70,7 +70,10 @@ func MigrateSchedule(c *cli.Context) error { } findResult := mg.Database("singularity").Collection("scanningrequests").FindOne(ctx, bson.M{"_id": oid}) if findResult.Err() != nil { - return errors.Wrapf(err, "failed to find dataset %s", replication.DatasetID) + if err != nil { + return errors.Wrapf(err, "failed to find dataset %s", replication.DatasetID) + } + return errors.Errorf("failed to find dataset %s", replication.DatasetID) } err = findResult.Decode(&scanning) diff --git a/migrate/types.go b/migrate/types.go index f6584193..9de6c98e 100644 --- a/migrate/types.go +++ b/migrate/types.go @@ -6,9 +6,11 @@ import ( "go.mongodb.org/mongo-driver/bson/primitive" ) -type ScanningRequestStatus string -type GenerationRequestStatus string -type ReplicationRequestStatus string +type ( + ScanningRequestStatus string + GenerationRequestStatus string + ReplicationRequestStatus string +) const ( ScanningStatusActive ScanningRequestStatus = "active" diff --git a/model/basetypes.go b/model/basetypes.go index c6936ce0..2341d85f 100644 --- a/model/basetypes.go +++ b/model/basetypes.go @@ -8,15 +8,18 @@ import ( "strings" "time" + "slices" + "github.com/cockroachdb/errors" "github.com/ipfs/go-cid" - "golang.org/x/exp/slices" ) -var ErrInvalidCIDEntry = errors.New("invalid CID entry in the database") -var ErrInvalidStringSliceEntry = errors.New("invalid string slice entry in the database") -var ErrInvalidStringMapEntry = errors.New("invalid string map entry in the database") -var ErrInvalidHTTPConfigEntry = errors.New("invalid ClientConfig entry in the database") +var ( + ErrInvalidCIDEntry = errors.New("invalid CID entry in the database") + ErrInvalidStringSliceEntry = errors.New("invalid string slice entry in the database") + ErrInvalidStringMapEntry = errors.New("invalid string map entry in the database") + ErrInvalidHTTPConfigEntry = errors.New("invalid ClientConfig entry in the database") +) type StringSlice []string @@ -135,6 +138,7 @@ func (c *CID) Scan(src any) error { func (ss StringSlice) Value() (driver.Value, error) { return json.Marshal(ss) } + func (m ConfigMap) Value() (driver.Value, error) { return json.Marshal(m) } @@ -190,7 +194,7 @@ func (m ConfigMap) String() string { return strings.Join(values, " ") } -func (c ClientConfig) Value() (driver.Value, error) { +func (c ClientConfig) Value() (driver.Value, error) { //nolint:recvcheck return json.Marshal(c) } diff --git a/model/preparation.go b/model/preparation.go index 4efd7237..3414d998 100644 --- a/model/preparation.go +++ b/model/preparation.go @@ -310,8 +310,10 @@ func (c CarBlock) BlockLength() int32 { } if c.RawBlock != nil { + //nolint:gosec // G115: Safe conversion, length of blocks will not exceed int32 max value c.blockLength = int32(len(c.RawBlock)) } else { + //nolint:gosec // G115: Safe conversion, CID byte length and varint length will not exceed int32 max value c.blockLength = c.CarBlockLength - int32(cid.Cid(c.CID).ByteLen()) - int32(len(c.Varint)) } diff --git a/pack/assembler.go b/pack/assembler.go index 1a95f670..e796ff45 100644 --- a/pack/assembler.go +++ b/pack/assembler.go @@ -70,7 +70,8 @@ func (a *Assembler) Close() error { // NewAssembler initializes a new Assembler instance with the given parameters. func NewAssembler(ctx context.Context, reader storagesystem.Reader, - fileRanges []model.FileRange, noInline bool, skipInaccessibleFiles bool) *Assembler { + fileRanges []model.FileRange, noInline bool, skipInaccessibleFiles bool, +) *Assembler { return &Assembler{ ctx: ctx, reader: reader, @@ -214,8 +215,8 @@ func (a *Assembler) prefetch() error { } // read more than 0 bytes, or the first block of an empty file - // nolint:goerr113 - if err == nil || err == io.ErrUnexpectedEOF || err == io.EOF { + // nolint:err113 + if err == nil || errors.Is(err, io.ErrUnexpectedEOF) || err == io.EOF { var cidValue cid.Cid var vint []byte if err == io.EOF { @@ -245,9 +246,17 @@ func (a *Assembler) prefetch() error { if !a.noInline { a.carBlocks = append(a.carBlocks, carBlocks...) } + + // Check for negative file size + size := n + if size < 0 { + logger.Warnf("Encountered unknown size file (%s)", a.fileRanges[a.index].File.Path) + size = 0 + } + a.pendingLinks = append(a.pendingLinks, format.Link{ Cid: cidValue, - Size: uint64(n), + Size: uint64(size), //nolint:gosec }) if err == nil { diff --git a/pack/daggen/directory.go b/pack/daggen/directory.go index af030105..2069afb4 100644 --- a/pack/daggen/directory.go +++ b/pack/daggen/directory.go @@ -16,8 +16,10 @@ import ( "github.com/klauspost/compress/zstd" ) -var compressor, _ = zstd.NewWriter(nil, zstd.WithEncoderLevel(zstd.SpeedDefault)) -var decompressor, _ = zstd.NewReader(nil, zstd.WithDecoderConcurrency(0)) +var ( + compressor, _ = zstd.NewWriter(nil, zstd.WithEncoderLevel(zstd.SpeedDefault)) + decompressor, _ = zstd.NewReader(nil, zstd.WithDecoderConcurrency(0)) +) type DirectoryDetail struct { Dir *model.Directory diff --git a/pack/pack.go b/pack/pack.go index 3f9f7de4..53355590 100644 --- a/pack/pack.go +++ b/pack/pack.go @@ -5,25 +5,24 @@ import ( "io" "time" + "github.com/cockroachdb/errors" "github.com/data-preservation-programs/singularity/analytics" "github.com/data-preservation-programs/singularity/database" + "github.com/data-preservation-programs/singularity/model" "github.com/data-preservation-programs/singularity/pack/daggen" "github.com/data-preservation-programs/singularity/pack/packutil" "github.com/data-preservation-programs/singularity/storagesystem" "github.com/data-preservation-programs/singularity/util" - "github.com/google/uuid" - "github.com/rjNemo/underscore" - "gorm.io/gorm" - "gorm.io/gorm/clause" - - "github.com/cockroachdb/errors" - "github.com/data-preservation-programs/singularity/model" commcid "github.com/filecoin-project/go-fil-commcid" commp "github.com/filecoin-project/go-fil-commp-hashhash" + "github.com/google/uuid" blocks "github.com/ipfs/go-block-format" "github.com/ipfs/go-cid" format "github.com/ipfs/go-ipld-format" "github.com/ipfs/go-log/v2" + "github.com/rjNemo/underscore" + "gorm.io/gorm" + "gorm.io/gorm/clause" ) var logger = log.Logger("pack") diff --git a/pack/packutil/util.go b/pack/packutil/util.go index 84a363c3..9692ea04 100644 --- a/pack/packutil/util.go +++ b/pack/packutil/util.go @@ -8,11 +8,11 @@ import ( "github.com/data-preservation-programs/singularity/util" "github.com/ipfs/boxo/ipld/merkledag" "github.com/ipfs/boxo/ipld/unixfs" - "github.com/ipfs/boxo/ipld/unixfs/pb" + unixfs_pb "github.com/ipfs/boxo/ipld/unixfs/pb" util2 "github.com/ipfs/boxo/util" - "github.com/ipfs/go-block-format" + blocks "github.com/ipfs/go-block-format" "github.com/ipfs/go-cid" - "github.com/ipfs/go-ipld-format" + format "github.com/ipfs/go-ipld-format" "github.com/multiformats/go-varint" ) @@ -22,8 +22,10 @@ var EmptyFileVarint = varint.ToUvarint(uint64(len(EmptyFileCid.Bytes()))) var EmptyCarHeader, _ = util.GenerateCarHeader(EmptyFileCid) -const ChunkSize int64 = 1 << 20 -const NumLinkPerNode = 1024 +const ( + ChunkSize int64 = 1 << 20 + NumLinkPerNode = 1024 +) // createParentNode creates a new parent ProtoNode for a given set of links. // It constructs a UnixFS node with the type Data_File and adds the sizes of diff --git a/pack/push/pushfile.go b/pack/push/pushfile.go index 8e8f777e..dad5105b 100644 --- a/pack/push/pushfile.go +++ b/pack/push/pushfile.go @@ -51,7 +51,8 @@ func PushFile( db *gorm.DB, obj fs.ObjectInfo, attachment model.SourceAttachment, - directoryCache map[string]model.DirectoryID) (*model.File, []model.FileRange, error) { + directoryCache map[string]model.DirectoryID, +) (*model.File, []model.FileRange, error) { logger.Debugw("pushing file", "file", obj.Remote(), "preparation", attachment.PreparationID, "storage", attachment.StorageID) db = db.WithContext(ctx) splitSize := MaxSizeToSplitSize(attachment.Preparation.MaxSize) @@ -140,7 +141,8 @@ func EnsureParentDirectories( ctx context.Context, db *gorm.DB, file *model.File, rootDirID model.DirectoryID, - directoryCache map[string]model.DirectoryID) error { + directoryCache map[string]model.DirectoryID, +) error { if file.DirectoryID != nil { return nil } diff --git a/replication/makedeal.go b/replication/makedeal.go index 91ac9636..b074b2d7 100644 --- a/replication/makedeal.go +++ b/replication/makedeal.go @@ -8,6 +8,8 @@ import ( "strings" "time" + "slices" + "github.com/cockroachdb/errors" "github.com/data-preservation-programs/singularity/analytics" "github.com/data-preservation-programs/singularity/model" @@ -30,7 +32,6 @@ import ( "github.com/libp2p/go-libp2p/core/protocol" "github.com/multiformats/go-multiaddr" "github.com/ybbus/jsonrpc/v3" - "golang.org/x/exp/slices" ) const ( @@ -588,16 +589,18 @@ func (d DealMakerImpl) MakeDeal(ctx context.Context, walletObj model.Wallet, } dealModel := &model.Deal{ - State: model.DealProposed, - ClientID: walletObj.ID, - Provider: dealConfig.Provider, - Label: cid.Cid(car.RootCID).String(), - PieceCID: car.PieceCID, - PieceSize: car.PieceSize, + State: model.DealProposed, + ClientID: walletObj.ID, + Provider: dealConfig.Provider, + Label: cid.Cid(car.RootCID).String(), + PieceCID: car.PieceCID, + PieceSize: car.PieceSize, + //nolint:gosec // G115: Safe conversion, max int32 epoch won't occur until year 4062 StartEpoch: int32(startEpoch), - EndEpoch: int32(endEpoch), - Price: dealConfig.GetPrice(car.PieceSize, dealConfig.Duration).String(), - Verified: dealConfig.Verified, + //nolint:gosec // G115: Safe conversion, max int32 epoch won't occur until year 4062 + EndEpoch: int32(endEpoch), + Price: dealConfig.GetPrice(car.PieceSize, dealConfig.Duration).String(), + Verified: dealConfig.Verified, } if slices.Contains(protocols, StorageProposalV120) { dealID := uuid.New() diff --git a/replication/wallet.go b/replication/wallet.go index c57fa5e2..74e457c6 100644 --- a/replication/wallet.go +++ b/replication/wallet.go @@ -69,7 +69,8 @@ type DatacapWalletChooser struct { } func NewDatacapWalletChooser(db *gorm.DB, cacheTTL time.Duration, - lotusAPI string, lotusToken string, min uint64) DatacapWalletChooser { + lotusAPI string, lotusToken string, min uint64, //nolint:predeclared // We're ok with using the same name as the predeclared identifier here +) DatacapWalletChooser { cache := ttlcache.New[string, int64]( ttlcache.WithTTL[string, int64](cacheTTL), ttlcache.WithDisableTouchOnHit[string, int64]()) diff --git a/retriever/endpointfinder/endpointfinder.go b/retriever/endpointfinder/endpointfinder.go index 09d2e399..3d591779 100644 --- a/retriever/endpointfinder/endpointfinder.go +++ b/retriever/endpointfinder/endpointfinder.go @@ -118,7 +118,7 @@ func (ef *EndpointFinder) FindHTTPEndpoints(ctx context.Context, sps []string) ( } } - for i := 0; i < toLookup; i++ { + for range toLookup { select { case providerAddrs := <-addrChan: if providerAddrs.addrs != nil { diff --git a/retriever/retriever.go b/retriever/retriever.go index e534ad5c..407e1ca2 100644 --- a/retriever/retriever.go +++ b/retriever/retriever.go @@ -101,7 +101,7 @@ func (r *Retriever) Retrieve(ctx context.Context, c cid.Cid, rangeStart int64, r // collect errors var err error - for i := 0; i < 2; i++ { + for range 2 { select { case <-ctx.Done(): return ctx.Err() diff --git a/scan/scan.go b/scan/scan.go index 98bb8216..5110d022 100644 --- a/scan/scan.go +++ b/scan/scan.go @@ -38,7 +38,7 @@ var logger = log.Logger("scan") func Scan(ctx context.Context, db *gorm.DB, attachment model.SourceAttachment) error { db = db.WithContext(ctx) directoryCache := make(map[string]model.DirectoryID) - var remaining = push.NewFileRangeSet() + remaining := push.NewFileRangeSet() var remainingFileRanges []model.FileRange err := db.Joins("File"). Where("attachment_id = ? AND file_ranges.job_id is null", attachment.ID). @@ -130,7 +130,8 @@ func addFileRangesAndCreatePackJob( attachmentID model.SourceAttachmentID, remaining *push.FileRangeSet, maxSize int64, - fileRanges ...model.FileRange) error { + fileRanges ...model.FileRange, +) error { for _, fileRange := range fileRanges { fit := remaining.AddIfFits(fileRange, maxSize) if fit { diff --git a/service/contentprovider/contentprovider.go b/service/contentprovider/contentprovider.go index c3306091..2913c2a2 100644 --- a/service/contentprovider/contentprovider.go +++ b/service/contentprovider/contentprovider.go @@ -7,10 +7,9 @@ import ( "github.com/cockroachdb/errors" "github.com/data-preservation-programs/singularity/service" "github.com/data-preservation-programs/singularity/util" + logging "github.com/ipfs/go-log/v2" "github.com/libp2p/go-libp2p/core/crypto" "github.com/multiformats/go-multiaddr" - - logging "github.com/ipfs/go-log/v2" "gorm.io/gorm" ) diff --git a/service/contentprovider/http.go b/service/contentprovider/http.go index c3291cea..43c190b7 100644 --- a/service/contentprovider/http.go +++ b/service/contentprovider/http.go @@ -200,7 +200,7 @@ func GetMetadataHandler(c echo.Context, db *gorm.DB) error { metadata, err := getPieceMetadata(ctx, db, car) if err != nil { - return c.String(http.StatusInternalServerError, fmt.Sprintf("Error: %s", err.Error())) + return c.String(http.StatusInternalServerError, "Error: "+err.Error()) } // Remove all credentials diff --git a/service/datasetworker/datasetworker.go b/service/datasetworker/datasetworker.go index 6e3e6ad4..06983aa9 100644 --- a/service/datasetworker/datasetworker.go +++ b/service/datasetworker/datasetworker.go @@ -25,9 +25,11 @@ type Worker struct { stateMonitor *StateMonitor } -const defaultMinInterval = 5 * time.Second -const defaultMaxInterval = 160 * time.Second -const cleanupTimeout = 5 * time.Second +const ( + defaultMinInterval = 5 * time.Second + defaultMaxInterval = 160 * time.Second + cleanupTimeout = 5 * time.Second +) type Config struct { Concurrency int @@ -175,7 +177,7 @@ func (w Worker) Run(ctx context.Context) error { }() threads := make([]service.Server, w.config.Concurrency) - for i := 0; i < w.config.Concurrency; i++ { + for i := range w.config.Concurrency { id := uuid.New() thread := &Thread{ id: id, diff --git a/service/dealpusher/dealpusher.go b/service/dealpusher/dealpusher.go index 5d2e1e7e..a9f1a037 100644 --- a/service/dealpusher/dealpusher.go +++ b/service/dealpusher/dealpusher.go @@ -7,20 +7,19 @@ import ( "time" "github.com/avast/retry-go" + "github.com/cockroachdb/errors" "github.com/data-preservation-programs/singularity/analytics" "github.com/data-preservation-programs/singularity/database" - "github.com/ipfs/go-cid" - "github.com/libp2p/go-libp2p/core/host" - "github.com/rjNemo/underscore" - "github.com/robfig/cron/v3" - - "github.com/cockroachdb/errors" "github.com/data-preservation-programs/singularity/model" "github.com/data-preservation-programs/singularity/replication" "github.com/data-preservation-programs/singularity/service/healthcheck" "github.com/data-preservation-programs/singularity/util" "github.com/google/uuid" + "github.com/ipfs/go-cid" "github.com/ipfs/go-log/v2" + "github.com/libp2p/go-libp2p/core/host" + "github.com/rjNemo/underscore" + "github.com/robfig/cron/v3" "gorm.io/gorm" ) @@ -267,7 +266,8 @@ func (d *DealPusher) runSchedule(ctx context.Context, schedule *model.Schedule) var total sumResult err = db.Model(&model.Deal{}). Where("schedule_id = ? AND state IN (?)", schedule.ID, []model.DealState{ - model.DealActive, model.DealProposed, model.DealPublished}).Select("COUNT(*) AS deal_number, SUM(piece_size) AS deal_size").Scan(&total).Error + model.DealActive, model.DealProposed, model.DealPublished, + }).Select("COUNT(*) AS deal_number, SUM(piece_size) AS deal_size").Scan(&total).Error if err != nil { return model.ScheduleError, errors.Wrap(err, "failed to count total active and pending deals") } @@ -424,7 +424,8 @@ func (d *DealPusher) runSchedule(ctx context.Context, schedule *model.Schedule) } func NewDealPusher(db *gorm.DB, lotusURL string, - lotusToken string, numAttempts uint, maxReplicas uint) (*DealPusher, error) { + lotusToken string, numAttempts uint, maxReplicas uint, +) (*DealPusher, error) { if numAttempts <= 1 { numAttempts = 1 } diff --git a/service/dealtracker/dealtracker.go b/service/dealtracker/dealtracker.go index e146d339..36a2714c 100644 --- a/service/dealtracker/dealtracker.go +++ b/service/dealtracker/dealtracker.go @@ -29,9 +29,11 @@ import ( var ErrAlreadyRunning = errors.New("another worker already running") -const healthRegisterRetryInterval = time.Minute -const cleanupTimeout = 5 * time.Second -const logStatsInterval = 15 * time.Second +const ( + healthRegisterRetryInterval = time.Minute + cleanupTimeout = 5 * time.Second + logStatsInterval = 15 * time.Second +) type Deal struct { Proposal DealProposal @@ -105,7 +107,8 @@ func NewDealTracker( dealZstURL string, lotusURL string, lotusToken string, - once bool) DealTracker { + once bool, +) DealTracker { return DealTracker{ workerID: uuid.New(), dbNoContext: db, diff --git a/service/downloadserver/downloadserver.go b/service/downloadserver/downloadserver.go index 2056987a..4327ac88 100644 --- a/service/downloadserver/downloadserver.go +++ b/service/downloadserver/downloadserver.go @@ -157,7 +157,8 @@ func GetMetadata( api string, config map[string]string, clientConfig model.ClientConfig, - pieceCid string) (*contentprovider.PieceMetadata, int, error) { + pieceCid string, +) (*contentprovider.PieceMetadata, int, error) { api = strings.TrimSuffix(api, "/") req, err := http.NewRequestWithContext(ctx, http.MethodGet, api+"/piece/metadata/"+pieceCid, nil) if err != nil { diff --git a/service/healthcheck/healthcheck.go b/service/healthcheck/healthcheck.go index a4a57d56..fb148f9f 100644 --- a/service/healthcheck/healthcheck.go +++ b/service/healthcheck/healthcheck.go @@ -14,8 +14,10 @@ import ( "gorm.io/gorm/clause" ) -var staleThreshold = time.Minute * 5 -var reportInterval = time.Minute +var ( + staleThreshold = time.Minute * 5 + reportInterval = time.Minute +) var cleanupInterval = time.Minute * 5 @@ -169,7 +171,6 @@ func ReportHealth(ctx context.Context, db *gorm.DB, workerID uuid.UUID, workerTy DoUpdates: clause.AssignmentColumns([]string{"last_heartbeat", "type", "hostname"}), }).Create(&worker).Error }) - if err != nil { logger.Errorw("failed to send heartbeat", "error", err) } diff --git a/service/service.go b/service/service.go index 101d7510..e3196b82 100644 --- a/service/service.go +++ b/service/service.go @@ -8,7 +8,6 @@ import ( "syscall" "github.com/cockroachdb/errors" - "github.com/ipfs/go-log/v2" "github.com/urfave/cli/v2" ) diff --git a/storagesystem/rclone.go b/storagesystem/rclone.go index c3913b94..195caccc 100644 --- a/storagesystem/rclone.go +++ b/storagesystem/rclone.go @@ -8,6 +8,8 @@ import ( "sync" "time" + "slices" + "github.com/cockroachdb/errors" "github.com/data-preservation-programs/singularity/model" "github.com/gammazero/workerpool" @@ -15,16 +17,17 @@ import ( "github.com/rclone/rclone/fs" "github.com/rclone/rclone/fs/config/configmap" "github.com/rclone/rclone/fs/object" - "golang.org/x/exp/slices" ) var logger = log.Logger("storage") var _ Handler = &RCloneHandler{} -var ErrGetUsageNotSupported = errors.New("The backend does not support getting usage quota") -var ErrBackendNotSupported = errors.New("This backend is not supported") -var ErrMoveNotSupported = errors.New("The backend does not support moving files") +var ( + ErrGetUsageNotSupported = errors.New("The backend does not support getting usage quota") + ErrBackendNotSupported = errors.New("This backend is not supported") + ErrMoveNotSupported = errors.New("The backend does not support moving files") +) type RCloneHandler struct { name string @@ -95,7 +98,6 @@ func (h RCloneHandler) scan(ctx context.Context, path string, ch chan<- Entry, w var subCount int for _, entry := range entries { - entry := entry switch v := entry.(type) { case fs.Directory: select { diff --git a/storagesystem/types.go b/storagesystem/types.go index 050b031b..d523f02f 100644 --- a/storagesystem/types.go +++ b/storagesystem/types.go @@ -54,7 +54,7 @@ import ( "github.com/rclone/rclone/lib/encoder" "github.com/rjNemo/underscore" "github.com/urfave/cli/v2" - "golang.org/x/exp/slices" + "slices" ) // Entry is a struct that represents a single file or directory during a data source scan. @@ -243,8 +243,10 @@ func (p ProviderOptions) ToCLICommand(short string, long string, description str return command } -var Backends []Backend -var BackendMap = make(map[string]Backend) +var ( + Backends []Backend + BackendMap = make(map[string]Backend) +) func init() { for _, regInfo := range fs.Registry { diff --git a/storagesystem/util.go b/storagesystem/util.go index a8f0cadf..b3449fa6 100644 --- a/storagesystem/util.go +++ b/storagesystem/util.go @@ -91,8 +91,10 @@ func GetHash(ctx context.Context, object fs.ObjectInfo) (string, error) { var ErrStorageNotAvailable = errors.New("storage not available") -var freeSpaceWarningThreshold = 0.05 -var freeSpaceErrorThreshold = 0.01 +var ( + freeSpaceWarningThreshold = 0.05 + freeSpaceErrorThreshold = 0.01 +) // GetRandomOutputWriter selects a storage from the provided storages list based on its available // space and returns an associated Writer to interact with that storage. diff --git a/store/item_reference.go b/store/item_reference.go index b646b72c..2b7bf8e4 100644 --- a/store/item_reference.go +++ b/store/item_reference.go @@ -10,7 +10,6 @@ import ( "github.com/data-preservation-programs/singularity/util" blocks "github.com/ipfs/go-block-format" "github.com/ipfs/go-cid" - format "github.com/ipfs/go-ipld-format" "gorm.io/gorm" ) diff --git a/store/piece_store.go b/store/piece_store.go index e111de14..5ad724ed 100644 --- a/store/piece_store.go +++ b/store/piece_store.go @@ -5,30 +5,31 @@ import ( "io" "sort" + "github.com/cockroachdb/errors" + "github.com/data-preservation-programs/singularity/model" "github.com/data-preservation-programs/singularity/storagesystem" "github.com/data-preservation-programs/singularity/util" + "github.com/ipfs/go-cid" "github.com/ipfs/go-log/v2" "github.com/multiformats/go-varint" "github.com/rclone/rclone/fs" - - "github.com/cockroachdb/errors" - "github.com/data-preservation-programs/singularity/model" - "github.com/ipfs/go-cid" ) -var logger = log.Logger("piece_store") -var ErrNoCarBlocks = errors.New("no Blocks provided") -var ErrInvalidStartOffset = errors.New("first block must start at car Header") -var ErrInvalidEndOffset = errors.New("last block must end at car end") -var ErrIncontiguousBlocks = errors.New("Blocks must be contiguous") -var ErrInvalidVarintLength = errors.New("varint read does not match varint length") -var ErrVarintDoesNotMatchBlockLength = errors.New("varint does not match block length") -var ErrFileNotProvided = errors.New("file not provided") -var ErrInvalidWhence = errors.New("invalid whence") -var ErrNegativeOffset = errors.New("negative offset") -var ErrOffsetOutOfRange = errors.New("position past end of file") -var ErrTruncated = errors.New("original file has been truncated") -var ErrFileHasChanged = errors.New("file has changed") +var ( + logger = log.Logger("piece_store") + ErrNoCarBlocks = errors.New("no Blocks provided") + ErrInvalidStartOffset = errors.New("first block must start at car Header") + ErrInvalidEndOffset = errors.New("last block must end at car end") + ErrIncontiguousBlocks = errors.New("Blocks must be contiguous") + ErrInvalidVarintLength = errors.New("varint read does not match varint length") + ErrVarintDoesNotMatchBlockLength = errors.New("varint does not match block length") + ErrFileNotProvided = errors.New("file not provided") + ErrInvalidWhence = errors.New("invalid whence") + ErrNegativeOffset = errors.New("negative offset") + ErrOffsetOutOfRange = errors.New("position past end of file") + ErrTruncated = errors.New("original file has been truncated") + ErrFileHasChanged = errors.New("file has changed") +) // PieceReader is a struct that represents a reader for pieces of data. // @@ -177,7 +178,7 @@ func NewPieceReader( return nil, errors.Wrapf(ErrInvalidEndOffset, "expected %d, got %d", car.FileSize, lastBlock.CarOffset+int64(lastBlock.CarBlockLength)) } - for i := 0; i < len(carBlocks); i++ { + for i := range carBlocks { if i != len(carBlocks)-1 { if carBlocks[i].CarOffset+int64(carBlocks[i].CarBlockLength) != carBlocks[i+1].CarOffset { return nil, errors.Wrapf(ErrIncontiguousBlocks, "previous offset %d, next offset %d", carBlocks[i].CarOffset+int64(carBlocks[i].CarBlockLength), carBlocks[i+1].CarOffset) diff --git a/testdb/main.go b/testdb/main.go index a4cb1d28..26e92636 100644 --- a/testdb/main.go +++ b/testdb/main.go @@ -185,7 +185,7 @@ func createPreparation(ctx context.Context, db *gorm.DB) error { FileRanges: nil, } - for i := 0; i < 100; i++ { + for i := range 100 { largeFile.FileRanges = append(largeFile.FileRanges, model.FileRange{ Offset: int64(i << 34), Length: 1 << 34, @@ -203,7 +203,7 @@ func createPreparation(ctx context.Context, db *gorm.DB) error { } // Setup a file with multiple versions - for i := 0; i < 10; i++ { + for range 10 { size := r.Int63n(1 << 20) rCID := randomCID() err = db.Create(&model.File{ @@ -258,7 +258,7 @@ func createPreparation(ctx context.Context, db *gorm.DB) error { } // Some Car files without association with the preparation - for i := 0; i < 5; i++ { + for range 5 { pieceCID, err := randomPieceCID() if err != nil { return errors.WithStack(err) @@ -324,7 +324,8 @@ func createPreparation(ctx context.Context, db *gorm.DB) error { model.DealProposed, model.DealPublished, model.DealSlashed, - model.DealActive} + model.DealActive, + } state := states[r.Intn(len(states))] deal := model.Deal{ State: state, @@ -334,7 +335,9 @@ func createPreparation(ctx context.Context, db *gorm.DB) error { PieceCID: car.PieceCID, PieceSize: car.PieceSize, DealID: nil, + //nolint:gosec // G115: Safe conversion, max int32 epoch won't occur until year 4062 StartEpoch: int32(10000 + r.Intn(10000)), + //nolint:gosec // G115: Safe conversion, max int32 epoch won't occur until year 4062 EndEpoch: int32(20000 + r.Intn(10000)), Price: "0", Verified: true, @@ -342,6 +345,7 @@ func createPreparation(ctx context.Context, db *gorm.DB) error { ClientID: wallet.ID, } if state == model.DealActive { + //nolint:gosec // G115: Safe conversion, max int32 epoch won't occur until year 4062 deal.SectorStartEpoch = int32(10000 + r.Intn(10000)) } if state == model.DealProposed || state == model.DealPublished { diff --git a/util/car.go b/util/car.go index 9acffefa..a1b46c14 100644 --- a/util/car.go +++ b/util/car.go @@ -3,7 +3,7 @@ package util import ( "github.com/cockroachdb/errors" "github.com/ipfs/go-cid" - "github.com/ipfs/go-ipld-cbor" + cbornode "github.com/ipfs/go-ipld-cbor" "github.com/ipld/go-car" "github.com/multiformats/go-varint" ) diff --git a/util/host.go b/util/host.go index 15bb3d0e..485f2294 100644 --- a/util/host.go +++ b/util/host.go @@ -28,7 +28,8 @@ const yamuxID = "/yamux/1.0.0" func InitHost(opts []libp2p.Option, listenAddrs ...multiaddr.Multiaddr) (host.Host, error) { opts = append([]libp2p.Option{ libp2p.Identity(nil), - libp2p.ResourceManager(&network.NullResourceManager{})}, + libp2p.ResourceManager(&network.NullResourceManager{}), + }, opts...) if len(listenAddrs) > 0 { opts = append([]libp2p.Option{libp2p.ListenAddrs(listenAddrs...)}, opts...) @@ -38,17 +39,20 @@ func InitHost(opts []libp2p.Option, listenAddrs ...multiaddr.Multiaddr) (host.Ho libp2p.Transport(tcp.NewTCPTransport, tcp.WithMetrics()), libp2p.Transport(websocket.New), libp2p.Transport(quic.NewTransport), - libp2p.Transport(webtransport.New)}, + libp2p.Transport(webtransport.New), + }, opts...) // add security opts = append([]libp2p.Option{ libp2p.Security(tls.ID, tls.New), - libp2p.Security(noise.ID, noise.New)}, + libp2p.Security(noise.ID, noise.New), + }, opts...) // add muxers opts = append([]libp2p.Option{ - libp2p.Muxer(yamuxID, yamuxTransport())}, + libp2p.Muxer(yamuxID, yamuxTransport()), + }, opts...) //nolint:wrapcheck diff --git a/util/testutil/testutils.go b/util/testutil/testutils.go index 6195979a..10881e82 100644 --- a/util/testutil/testutils.go +++ b/util/testutil/testutils.go @@ -4,14 +4,13 @@ import ( "context" "crypto/rand" "io" + rand2 "math/rand" "net" "os" "strings" "testing" "time" - rand2 "math/rand" - "github.com/cockroachdb/errors" "github.com/data-preservation-programs/singularity/database" "github.com/data-preservation-programs/singularity/model" @@ -26,7 +25,7 @@ const pattern = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789" func GenerateFixedBytes(length int) []byte { patternLen := len(pattern) result := make([]byte, length) - for i := 0; i < length; i++ { + for i := range length { result[i] = pattern[i%patternLen] } return result