Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/common/dao/pgsql_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ func TestMaxOpenConns(t *testing.T) {

queryNum := 200
results := make([]bool, queryNum)
for i := 0; i < queryNum; i++ {
for i := range queryNum {
wg.Add(1)
go func(i int) {
defer wg.Done()
Expand Down
2 changes: 1 addition & 1 deletion src/common/dao/testutils.go
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ func ArrayEqual(arrayA, arrayB []int) bool {
return false
}
size := len(arrayA)
for i := 0; i < size; i++ {
for i := range size {
if arrayA[i] != arrayB[i] {
return false
}
Expand Down
2 changes: 1 addition & 1 deletion src/common/rbac/project/evaluator_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ func BenchmarkProjectEvaluator(b *testing.B) {
resource := NewNamespace(public.ProjectID).Resource(rbac.ResourceRepository)

b.ResetTimer()
for i := 0; i < b.N; i++ {
for b.Loop() {
evaluator.HasPermission(context.TODO(), resource, rbac.ActionPull)
}
}
Expand Down
12 changes: 1 addition & 11 deletions src/common/utils/utils.go
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ func GenerateRandomStringWithLen(length int) string {
if err != nil {
log.Warningf("Error reading random bytes: %v", err)
}
for i := 0; i < length; i++ {
for i := range length {
result[i] = chars[int(result[i])%l]
}
return string(result)
Expand Down Expand Up @@ -337,13 +337,3 @@ func MostMatchSorter(a, b string, matchWord string) bool {
func IsLocalPath(path string) bool {
return len(path) == 0 || (strings.HasPrefix(path, "/") && !strings.HasPrefix(path, "//"))
}

// StringInSlice check if the string is in the slice
func StringInSlice(str string, slice []string) bool {
for _, s := range slice {
if s == str {
return true
}
}
return false
}
3 changes: 1 addition & 2 deletions src/controller/artifact/annotation/v1alpha1.go
Original file line number Diff line number Diff line change
Expand Up @@ -66,8 +66,7 @@ func parseV1alpha1SkipList(artifact *artifact.Artifact, manifest *v1.Manifest) {
skipListAnnotationKey := fmt.Sprintf("%s.%s.%s", AnnotationPrefix, V1alpha1, SkipList)
skipList, ok := manifest.Config.Annotations[skipListAnnotationKey]
if ok {
skipKeyList := strings.Split(skipList, ",")
for _, skipKey := range skipKeyList {
for skipKey := range strings.SplitSeq(skipList, ",") {
delete(metadata, skipKey)
}
artifact.ExtraAttrs = metadata
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@ func TestAddNode(t *testing.T) {
// Verify the path exists.
current := root
parts := filepath.Clean(tt.path)
for _, part := range strings.Split(parts, string(filepath.Separator)) {
for part := range strings.SplitSeq(parts, string(filepath.Separator)) {
if part == "" {
continue
}
Expand Down
2 changes: 1 addition & 1 deletion src/controller/blob/controller_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -232,7 +232,7 @@ func (suite *ControllerTestSuite) TestGet() {

func (suite *ControllerTestSuite) TestSync() {
var references []distribution.Descriptor
for i := 0; i < 5; i++ {
for i := range 5 {
references = append(references, distribution.Descriptor{
MediaType: fmt.Sprintf("media type %d", i),
Digest: suite.Digest(),
Expand Down
2 changes: 1 addition & 1 deletion src/controller/config/controller.go
Original file line number Diff line number Diff line change
Expand Up @@ -206,7 +206,7 @@ func maxValueLimitedByLength(length int) int64 {
var value int64
// the times for multiple, should *10 for every time
times := 1
for i := 0; i < length; i++ {
for range length {
value = value + int64(9*times)
times = times * 10
}
Expand Down
2 changes: 1 addition & 1 deletion src/controller/event/handler/webhook/scan/scan.go
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ func constructScanImagePayload(ctx context.Context, event *event.ScanImageEvent,
// Wait for reasonable time to make sure the report is ready
// Interval=500ms and total time = 5s
// If the report is still not ready in the total time, then failed at then
for i := 0; i < 10; i++ {
for range 10 {
// First check in case it is ready
if re, err := scan.DefaultController.GetReport(ctx, art, []string{v1.MimeTypeNativeReport, v1.MimeTypeGenericVulnerabilityReport}); err == nil {
if len(re) > 0 && len(re[0].Report) > 0 {
Expand Down
2 changes: 1 addition & 1 deletion src/controller/health/controller.go
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ func (c *controller) GetHealth(_ context.Context) *OverallHealthStatus {
for name, checker := range registry {
go check(name, checker, timeout, ch)
}
for i := 0; i < len(registry); i++ {
for range len(registry) {
componentStatus := <-ch
if len(componentStatus.Error) != 0 {
isHealthy = false
Expand Down
8 changes: 2 additions & 6 deletions src/controller/jobmonitor/monitor.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ package jobmonitor
import (
"context"
"fmt"
"slices"
"strings"
"time"

Expand Down Expand Up @@ -278,12 +279,7 @@ func (w *monitorController) ListQueues(ctx context.Context) ([]*jm.Queue, error)
}

func skippedUnusedJobType(jobType string) bool {
for _, t := range skippedJobTypes {
if jobType == t {
return true
}
}
return false
return slices.Contains(skippedJobTypes, jobType)
}

func (w *monitorController) PauseJobQueues(ctx context.Context, jobType string) error {
Expand Down
4 changes: 2 additions & 2 deletions src/controller/proxy/manifestcache.go
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ func (m *ManifestListCache) push(ctx context.Context, repo, reference string, ma
// if time exceed, then push a updated manifest list which contains existing manifest
var newMan distribution.Manifest
var err error
for n := 0; n < maxManifestListWait; n++ {
for range maxManifestListWait {
log.Debugf("waiting for the manifest ready, repo %v, tag:%v", repo, reference)
time.Sleep(sleepIntervalSec * time.Second)
newMan, err = m.updateManifestList(ctx, repo, man)
Expand Down Expand Up @@ -177,7 +177,7 @@ type ManifestCache struct {
// CacheContent ...
func (m *ManifestCache) CacheContent(ctx context.Context, remoteRepo string, man distribution.Manifest, art lib.ArtifactInfo, r RemoteInterface, _ string) {
var waitBlobs []distribution.Descriptor
for n := 0; n < maxManifestWait; n++ {
for n := range maxManifestWait {
time.Sleep(sleepIntervalSec * time.Second)
waitBlobs = m.local.CheckDependencies(ctx, art.Repository, man)
if len(waitBlobs) == 0 {
Expand Down
4 changes: 2 additions & 2 deletions src/controller/quota/util_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -78,13 +78,13 @@ func (suite *RefreshForProjectsTestSuite) TestRefreshForProjects() {

startProjectID := rand.Int63()
var firstPageProjects, secondPageProjects []*models.Project
for i := 0; i < 50; i++ {
for i := range 50 {
firstPageProjects = append(firstPageProjects, &models.Project{
ProjectID: startProjectID + int64(i),
})
}

for i := 0; i < 10; i++ {
for i := range 10 {
secondPageProjects = append(secondPageProjects, &models.Project{
ProjectID: startProjectID + 50 + int64(i),
})
Expand Down
5 changes: 1 addition & 4 deletions src/controller/replication/transfer/image/transfer.go
Original file line number Diff line number Diff line change
Expand Up @@ -423,10 +423,7 @@ func (t *transfer) copyBlobByChunk(srcRepo, dstRepo, digest string, sizeFromDesc
// update the start and end for upload
*start = *end + 1
// since both ends are closed intervals, it is necessary to subtract one byte
*end = *start + replicationChunkSize - 1
if *end >= endRange {
*end = endRange
}
*end = min(*start+replicationChunkSize-1, endRange)

t.logger.Infof("copying the blob chunk: %d-%d/%d", *start, *end, sizeFromDescriptor)
_, data, err := t.src.PullBlobChunk(srcRepo, digest, sizeFromDescriptor, *start, *end)
Expand Down
7 changes: 3 additions & 4 deletions src/controller/scan/checker.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ package scan

import (
"context"
"slices"

"github.com/goharbor/harbor/src/controller/artifact"
"github.com/goharbor/harbor/src/controller/artifact/processor/image"
Expand Down Expand Up @@ -125,10 +126,8 @@ func hasCapability(r *models.Registration, a *artifact.Artifact) bool {
// use allowlist here because currently only docker image is supported by the scanner
// https://github.com/goharbor/pluggable-scanner-spec/issues/2
allowlist := []string{image.ArtifactTypeImage}
for _, t := range allowlist {
if a.Type == t {
return r.HasCapability(a.ManifestMediaType)
}
if slices.Contains(allowlist, a.Type) {
return r.HasCapability(a.ManifestMediaType)
}

return false
Expand Down
9 changes: 2 additions & 7 deletions src/controller/scanner/base_controller.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ package scanner
import (
"context"
"fmt"
"slices"
"sync"
"time"

Expand Down Expand Up @@ -383,13 +384,7 @@ var (
)

func isReservedName(name string) bool {
for _, reservedName := range reservedNames {
if name == reservedName {
return true
}
}

return false
return slices.Contains(reservedNames, name)
}

// MetadataResult metadata or error saved in cache
Expand Down
2 changes: 1 addition & 1 deletion src/core/auth/ldap/ldap.go
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,7 @@ func (l *Auth) attachGroupParallel(ctx context.Context, ldapUsers []model.User,
g := new(errgroup.Group)
g.SetLimit(workerCount)

for i := 0; i < workerCount; i++ {
for i := range workerCount {
curIndex := i
g.Go(func() error {
userGroups := make([]ugModel.UserGroup, 0)
Expand Down
7 changes: 3 additions & 4 deletions src/core/service/token/token_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ import (
"os"
"path"
"runtime"
"slices"
"testing"

"github.com/docker/distribution/registry/auth/token"
Expand Down Expand Up @@ -239,10 +240,8 @@ func (f *fakeSecurityContext) IsSolutionUser() bool {
}
func (f *fakeSecurityContext) Can(ctx context.Context, action rbac.Action, resource rbac.Resource) bool {
if actions, ok := f.rcActions[resource]; ok {
for _, a := range actions {
if a == action {
return true
}
if slices.Contains(actions, action) {
return true
}
}
return false
Expand Down
9 changes: 3 additions & 6 deletions src/core/session/codec.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ package session

import (
"encoding/gob"
"maps"

"github.com/beego/beego/v2/server/web/session"

Expand Down Expand Up @@ -51,14 +52,10 @@ func (*gobCodec) Decode(data []byte, v any) error {

switch in := v.(type) {
case map[any]any:
for k, v := range vm {
in[k] = v
}
maps.Copy(in, vm)
case *map[any]any:
m := *in
for k, v := range vm {
m[k] = v
}
maps.Copy(m, vm)
default:
return errors.Errorf("object type invalid, %#v", v)
}
Expand Down
14 changes: 4 additions & 10 deletions src/jobservice/job/impl/context.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ import (
"context"
"errors"
"fmt"
"maps"
"math"
"sync"
"time"
Expand Down Expand Up @@ -116,9 +117,7 @@ func (c *Context) Build(tracker job.Tracker) (job.Context, error) {

// Copy properties
if len(c.properties) > 0 {
for k, v := range c.properties {
jContext.properties[k] = v
}
maps.Copy(jContext.properties, c.properties)
}

// Refresh config properties
Expand All @@ -128,9 +127,7 @@ func (c *Context) Build(tracker job.Tracker) (job.Context, error) {
}

props := c.cfgMgr.GetAll(c.sysContext)
for k, v := range props {
jContext.properties[k] = v
}
maps.Copy(jContext.properties, props)

// Set loggers for job
c.lock.Lock()
Expand Down Expand Up @@ -199,10 +196,7 @@ func createLoggers(jobID string) (logger.Interface, error) {
if lc.Name == logger.NameFile || lc.Name == logger.NameDB {
// Need extra param
fSettings := map[string]any{}
for k, v := range lc.Settings {
// Copy settings
fSettings[k] = v
}
maps.Copy(fSettings, lc.Settings)
if lc.Name == logger.NameFile {
// Append file name param
fSettings["filename"] = fmt.Sprintf("%s.log", jobID)
Expand Down
5 changes: 2 additions & 3 deletions src/jobservice/job/impl/default_context.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ package impl
import (
"context"
"errors"
"maps"

o "github.com/beego/beego/v2/client/orm"

Expand Down Expand Up @@ -61,9 +62,7 @@ func (dc *DefaultContext) Build(t job.Tracker) (job.Context, error) {

// Copy properties
if len(dc.properties) > 0 {
for k, v := range dc.properties {
jContext.properties[k] = v
}
maps.Copy(jContext.properties, dc.properties)
}

// Set loggers for job
Expand Down
5 changes: 1 addition & 4 deletions src/jobservice/job/impl/gc/garbage_collection.go
Original file line number Diff line number Diff line change
Expand Up @@ -299,10 +299,7 @@ func (gc *GarbageCollector) sweep(ctx job.Context) error {
blobChunkCount := (total + blobChunkSize - 1) / blobChunkSize
blobChunks := make([][]*blobModels.Blob, blobChunkCount)
for i, start := 0, 0; i < blobChunkCount; i, start = i+1, start+blobChunkSize {
end := start + blobChunkSize
if end > total {
end = total
}
end := min(start+blobChunkSize, total)
blobChunks[i] = gc.deleteSet[start:end]
}

Expand Down
2 changes: 1 addition & 1 deletion src/jobservice/job/impl/gc/util_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ func TestDelKeys(t *testing.T) {
// helper function
// mock the data in the redis
mock := func(count int, prefix string) {
for i := 0; i < count; i++ {
for i := range count {
err = c.Save(context.TODO(), fmt.Sprintf("%s-%d", prefix, i), "", 0)
assert.NoError(t, err)
}
Expand Down
6 changes: 3 additions & 3 deletions src/jobservice/job/impl/scandataexport/scan_data_export.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ package scandataexport
import (
"encoding/json"
"fmt"
"maps"
"os"
"path/filepath"
"strconv"
Expand Down Expand Up @@ -173,9 +174,8 @@ func (sde *ScanDataExport) updateExecAttributes(ctx job.Context, params job.Para
}
// copy old extra
attrsToUpdate := exec.ExtraAttrs
for k, v := range attrs {
attrsToUpdate[k] = v
}
maps.Copy(attrsToUpdate, attrs)

return sde.execMgr.UpdateExtraAttrs(ctx.SystemContext(), execID, attrsToUpdate)
}

Expand Down
Loading
Loading