diff --git a/cmd/frontend/db/schema.md b/cmd/frontend/db/schema.md index 5ff3dadca543..ce1de66bc599 100644 --- a/cmd/frontend/db/schema.md +++ b/cmd/frontend/db/schema.md @@ -374,13 +374,13 @@ Indexes: Column | Type | Modifiers ---------------+---------+----------------------------------------------------------- id | integer | not null default nextval('lsif_commits_id_seq'::regclass) - repository | text | not null commit | text | not null parent_commit | text | + repository_id | integer | not null Indexes: "lsif_commits_pkey" PRIMARY KEY, btree (id) - "lsif_commits_repo_commit_parent_commit_unique" UNIQUE, btree (repository, commit, parent_commit) - "lsif_commits_parent_commit" btree (repository, parent_commit) + "lsif_commits_repository_id_commit_parent_commit_unique" UNIQUE, btree (repository_id, commit, parent_commit) + "lsif_commits_repository_id_parent_commit" btree (repository_id, parent_commit) Check constraints: "lsif_commits_commit_valid_chars" CHECK (commit ~ '^[a-z0-9]{40}$'::text) "lsif_commits_parent_commit_valid_chars" CHECK (parent_commit ~ '^[a-z0-9]{40}$'::text) @@ -424,30 +424,30 @@ Foreign-key constraints: # Table "public.lsif_uploads" ``` - Column | Type | Modifiers ---------------------+--------------------------+--------------------------------------------------------- - id | integer | not null default nextval('lsif_dumps_id_seq'::regclass) - repository | text | not null - commit | text | not null - root | text | not null default ''::text - visible_at_tip | boolean | not null default false - uploaded_at | timestamp with time zone | not null default now() - filename | text | not null - state | lsif_upload_state | not null default 'queued'::lsif_upload_state - failure_summary | text | - failure_stacktrace | text | - started_at | timestamp with time zone | - finished_at | timestamp with time zone | - tracing_context | text | not null + Column | Type | Modifiers +---------------------------+--------------------------+--------------------------------------------------------- + id | integer | not null default nextval('lsif_dumps_id_seq'::regclass) + repository_name_at_upload | text | not null + commit | text | not null + root | text | not null default ''::text + visible_at_tip | boolean | not null default false + uploaded_at | timestamp with time zone | not null default now() + filename | text | not null + state | lsif_upload_state | not null default 'queued'::lsif_upload_state + failure_summary | text | + failure_stacktrace | text | + started_at | timestamp with time zone | + finished_at | timestamp with time zone | + tracing_context | text | not null + repository_id | integer | not null Indexes: "lsif_uploads_pkey" PRIMARY KEY, btree (id) - "lsif_uploads_repository_commit_root" UNIQUE, btree (repository, commit, root) WHERE state = 'completed'::lsif_upload_state + "lsif_uploads_repository_id_commit_root" UNIQUE, btree (repository_id, commit, root) WHERE state = 'completed'::lsif_upload_state "lsif_uploads_state" btree (state) "lsif_uploads_uploaded_at" btree (uploaded_at) - "lsif_uploads_visible_repository_commit" btree (repository, commit) WHERE visible_at_tip + "lsif_uploads_visible_repository_id_commit" btree (repository_id, commit) WHERE visible_at_tip Check constraints: "lsif_uploads_commit_valid_chars" CHECK (commit ~ '^[a-z0-9]{40}$'::text) - "lsif_uploads_repository_check" CHECK (repository <> ''::text) Referenced by: TABLE "lsif_packages" CONSTRAINT "lsif_packages_dump_id_fkey" FOREIGN KEY (dump_id) REFERENCES lsif_uploads(id) ON DELETE CASCADE TABLE "lsif_references" CONSTRAINT "lsif_references_dump_id_fkey" FOREIGN KEY (dump_id) REFERENCES lsif_uploads(id) ON DELETE CASCADE diff --git a/cmd/frontend/graphqlbackend/codeintel.go b/cmd/frontend/graphqlbackend/codeintel.go index 3f9335eee48d..a280cb4fc9cc 100644 --- a/cmd/frontend/graphqlbackend/codeintel.go +++ b/cmd/frontend/graphqlbackend/codeintel.go @@ -6,6 +6,7 @@ import ( graphql "github.com/graph-gophers/graphql-go" "github.com/sourcegraph/sourcegraph/cmd/frontend/graphqlbackend/graphqlutil" + "github.com/sourcegraph/sourcegraph/internal/api" ) // NewCodeIntelResolver will be set by enterprise @@ -34,7 +35,6 @@ type LSIFRepositoryUploadsQueryArgs struct { type LSIFUploadResolver interface { ID() graphql.ID ProjectRoot(ctx context.Context) (*GitTreeEntryResolver, error) - InputRepoName() string InputCommit() string InputRoot() string State() string @@ -64,7 +64,8 @@ type LSIFQueryResolver interface { } type LSIFQueryArgs struct { - RepoName string + RepoID api.RepoID + RepoName api.RepoName Commit GitObjectID Path string UploadID int64 diff --git a/cmd/frontend/graphqlbackend/git_tree_entry.go b/cmd/frontend/graphqlbackend/git_tree_entry.go index 276ce12a3d2f..963c500d69f4 100644 --- a/cmd/frontend/graphqlbackend/git_tree_entry.go +++ b/cmd/frontend/graphqlbackend/git_tree_entry.go @@ -214,7 +214,8 @@ func (r *GitTreeEntryResolver) LSIF(ctx context.Context) (LSIFQueryResolver, err } return EnterpriseResolvers.codeIntelResolver.LSIF(ctx, &LSIFQueryArgs{ - RepoName: r.Repository().Name(), + RepoID: r.Repository().Type().ID, + RepoName: r.Repository().Type().Name, Commit: r.Commit().OID(), Path: r.Path(), }) diff --git a/cmd/frontend/graphqlbackend/schema.go b/cmd/frontend/graphqlbackend/schema.go index d96098d3cca0..fcb46ebe7bd7 100644 --- a/cmd/frontend/graphqlbackend/schema.go +++ b/cmd/frontend/graphqlbackend/schema.go @@ -4162,9 +4162,6 @@ type LSIFUpload implements Node { # The project for which this upload provides code intelligence. projectRoot: GitTree - # The original repository name supplied at upload time. - inputRepoName: String! - # The original 40-character commit commit supplied at upload time. inputCommit: String! diff --git a/cmd/frontend/graphqlbackend/schema.graphql b/cmd/frontend/graphqlbackend/schema.graphql index c65c56495d1d..a597e737deab 100755 --- a/cmd/frontend/graphqlbackend/schema.graphql +++ b/cmd/frontend/graphqlbackend/schema.graphql @@ -4169,9 +4169,6 @@ type LSIFUpload implements Node { # The project for which this upload provides code intelligence. projectRoot: GitTree - # The original repository name supplied at upload time. - inputRepoName: String! - # The original 40-character commit commit supplied at upload time. inputCommit: String! diff --git a/enterprise/internal/codeintel/lsifserver/client/query.go b/enterprise/internal/codeintel/lsifserver/client/query.go index e7190f37faba..97c7b934b7c8 100644 --- a/enterprise/internal/codeintel/lsifserver/client/query.go +++ b/enterprise/internal/codeintel/lsifserver/client/query.go @@ -8,16 +8,19 @@ import ( "github.com/sourcegraph/go-lsp" "github.com/sourcegraph/sourcegraph/cmd/frontend/graphqlbackend" + "github.com/sourcegraph/sourcegraph/internal/api" "github.com/sourcegraph/sourcegraph/internal/lsif" ) func (c *Client) Exists(ctx context.Context, args *struct { - RepoName string + RepoID api.RepoID + RepoName api.RepoName Commit string Path string }) (*lsif.LSIFUpload, error) { query := queryValues{} - query.Set("repository", args.RepoName) + query.SetInt("repositoryId", int64(args.RepoID)) + query.Set("repositoryName", string(args.RepoName)) query.Set("commit", args.Commit) query.Set("path", args.Path) @@ -39,7 +42,8 @@ func (c *Client) Exists(ctx context.Context, args *struct { } func (c *Client) Upload(ctx context.Context, args *struct { - RepoName string + RepoID api.RepoID + RepoName api.RepoName Commit graphqlbackend.GitObjectID Root string Blocking *bool @@ -47,7 +51,8 @@ func (c *Client) Upload(ctx context.Context, args *struct { Body io.ReadCloser }) (int64, bool, error) { query := queryValues{} - query.Set("repository", args.RepoName) + query.SetInt("repositoryId", int64(args.RepoID)) + query.Set("repositoryName", string(args.RepoName)) query.Set("commit", string(args.Commit)) query.Set("root", args.Root) query.SetOptionalBool("blocking", args.Blocking) @@ -70,10 +75,12 @@ func (c *Client) Upload(ctx context.Context, args *struct { } return payload.ID, meta.statusCode == http.StatusAccepted, nil + } func (c *Client) Definitions(ctx context.Context, args *struct { - RepoName string + RepoID api.RepoID + RepoName api.RepoName Commit graphqlbackend.GitObjectID Path string Line int32 @@ -82,7 +89,8 @@ func (c *Client) Definitions(ctx context.Context, args *struct { }) ([]*lsif.LSIFLocation, string, error) { return c.locationQuery(ctx, &struct { Operation string - RepoName string + RepoID api.RepoID + RepoName api.RepoName Commit graphqlbackend.GitObjectID Path string Line int32 @@ -92,6 +100,7 @@ func (c *Client) Definitions(ctx context.Context, args *struct { Cursor *string }{ Operation: "definitions", + RepoID: args.RepoID, RepoName: args.RepoName, Commit: args.Commit, Path: args.Path, @@ -102,7 +111,8 @@ func (c *Client) Definitions(ctx context.Context, args *struct { } func (c *Client) References(ctx context.Context, args *struct { - RepoName string + RepoID api.RepoID + RepoName api.RepoName Commit graphqlbackend.GitObjectID Path string Line int32 @@ -113,7 +123,8 @@ func (c *Client) References(ctx context.Context, args *struct { }) ([]*lsif.LSIFLocation, string, error) { return c.locationQuery(ctx, &struct { Operation string - RepoName string + RepoID api.RepoID + RepoName api.RepoName Commit graphqlbackend.GitObjectID Path string Line int32 @@ -123,6 +134,7 @@ func (c *Client) References(ctx context.Context, args *struct { Cursor *string }{ Operation: "references", + RepoID: args.RepoID, RepoName: args.RepoName, Commit: args.Commit, Path: args.Path, @@ -136,7 +148,8 @@ func (c *Client) References(ctx context.Context, args *struct { func (c *Client) locationQuery(ctx context.Context, args *struct { Operation string - RepoName string + RepoID api.RepoID + RepoName api.RepoName Commit graphqlbackend.GitObjectID Path string Line int32 @@ -146,7 +159,8 @@ func (c *Client) locationQuery(ctx context.Context, args *struct { Cursor *string }) ([]*lsif.LSIFLocation, string, error) { query := queryValues{} - query.Set("repository", args.RepoName) + query.SetInt("repositoryId", int64(args.RepoID)) + query.Set("repositoryName", string(args.RepoName)) query.Set("commit", string(args.Commit)) query.Set("path", args.Path) query.SetInt("line", int64(args.Line)) @@ -173,7 +187,8 @@ func (c *Client) locationQuery(ctx context.Context, args *struct { } func (c *Client) Hover(ctx context.Context, args *struct { - RepoName string + RepoID api.RepoID + RepoName api.RepoName Commit graphqlbackend.GitObjectID Path string Line int32 @@ -181,7 +196,8 @@ func (c *Client) Hover(ctx context.Context, args *struct { UploadID int64 }) (string, lsp.Range, error) { query := queryValues{} - query.Set("repository", args.RepoName) + query.SetInt("repositoryId", int64(args.RepoID)) + query.Set("repositoryName", string(args.RepoName)) query.Set("commit", string(args.Commit)) query.Set("path", args.Path) query.SetInt("line", int64(args.Line)) diff --git a/enterprise/internal/codeintel/lsifserver/client/upload.go b/enterprise/internal/codeintel/lsifserver/client/upload.go index 6750e790b05e..dd62e1b236c2 100644 --- a/enterprise/internal/codeintel/lsifserver/client/upload.go +++ b/enterprise/internal/codeintel/lsifserver/client/upload.go @@ -3,14 +3,15 @@ package client import ( "context" "fmt" - "net/url" "strings" + "github.com/sourcegraph/sourcegraph/internal/api" "github.com/sourcegraph/sourcegraph/internal/lsif" ) func (c *Client) GetUploads(ctx context.Context, args *struct { - RepoName string + RepoID api.RepoID + RepoName api.RepoName Query *string State *string IsLatestForRepo *bool @@ -27,7 +28,7 @@ func (c *Client) GetUploads(ctx context.Context, args *struct { } req := &lsifRequest{ - path: fmt.Sprintf("/uploads/%s", url.PathEscape(args.RepoName)), + path: fmt.Sprintf("/uploads/repository/%d", args.RepoID), cursor: args.Cursor, query: query, } diff --git a/enterprise/internal/codeintel/lsifserver/proxy/proxy.go b/enterprise/internal/codeintel/lsifserver/proxy/proxy.go index a1dbd26ec88e..a977265a881c 100644 --- a/enterprise/internal/codeintel/lsifserver/proxy/proxy.go +++ b/enterprise/internal/codeintel/lsifserver/proxy/proxy.go @@ -14,6 +14,7 @@ import ( "github.com/sourcegraph/sourcegraph/cmd/frontend/backend" "github.com/sourcegraph/sourcegraph/cmd/frontend/graphqlbackend" "github.com/sourcegraph/sourcegraph/cmd/frontend/httpapi" + "github.com/sourcegraph/sourcegraph/cmd/frontend/types" "github.com/sourcegraph/sourcegraph/enterprise/internal/codeintel/lsifserver" "github.com/sourcegraph/sourcegraph/enterprise/internal/codeintel/lsifserver/client" "github.com/sourcegraph/sourcegraph/internal/api" @@ -43,7 +44,8 @@ func uploadProxyHandler(p *httputil.ReverseProxy) func(http.ResponseWriter, *htt root := q.Get("root") ctx := r.Context() - if !ensureRepoAndCommitExist(ctx, w, repoName, commit) { + repo, ok := ensureRepoAndCommitExist(ctx, w, repoName, commit) + if !ok { return } @@ -55,14 +57,16 @@ func uploadProxyHandler(p *httputil.ReverseProxy) func(http.ResponseWriter, *htt } uploadID, queued, err := client.DefaultClient.Upload(ctx, &struct { - RepoName string + RepoID api.RepoID + RepoName api.RepoName Commit graphqlbackend.GitObjectID Root string Blocking *bool MaxWait *int32 Body io.ReadCloser }{ - RepoName: repoName, + RepoID: repo.ID, + RepoName: repo.Name, Commit: graphqlbackend.GitObjectID(commit), Root: root, Body: r.Body, @@ -90,29 +94,29 @@ func uploadProxyHandler(p *httputil.ReverseProxy) func(http.ResponseWriter, *htt } } -func ensureRepoAndCommitExist(ctx context.Context, w http.ResponseWriter, repoName, commit string) bool { +func ensureRepoAndCommitExist(ctx context.Context, w http.ResponseWriter, repoName, commit string) (*types.Repo, bool) { repo, err := backend.Repos.GetByName(ctx, api.RepoName(repoName)) if err != nil { if errcode.IsNotFound(err) { http.Error(w, fmt.Sprintf("unknown repository %q", repoName), http.StatusNotFound) - return false + return nil, false } http.Error(w, err.Error(), http.StatusInternalServerError) - return false + return nil, false } if _, err := backend.Repos.ResolveRev(ctx, repo, commit); err != nil { if gitserver.IsRevisionNotFound(err) { http.Error(w, fmt.Sprintf("unknown commit %q", commit), http.StatusNotFound) - return false + return nil, false } http.Error(w, err.Error(), http.StatusInternalServerError) - return false + return nil, false } - return true + return repo, true } func enforceAuth(ctx context.Context, w http.ResponseWriter, r *http.Request, repoName string) bool { diff --git a/enterprise/internal/codeintel/resolvers/location.go b/enterprise/internal/codeintel/resolvers/location.go index 6012fdeae6ed..91a739a35161 100644 --- a/enterprise/internal/codeintel/resolvers/location.go +++ b/enterprise/internal/codeintel/resolvers/location.go @@ -6,21 +6,10 @@ import ( "github.com/sourcegraph/sourcegraph/cmd/frontend/graphqlbackend" "github.com/sourcegraph/sourcegraph/cmd/frontend/graphqlbackend/graphqlutil" + "github.com/sourcegraph/sourcegraph/internal/api" "github.com/sourcegraph/sourcegraph/internal/lsif" ) -type LocationsQueryOptions struct { - Operation string - RepoName string - Commit graphqlbackend.GitObjectID - Path string - Line int32 - Character int32 - UploadID int64 - Limit *int32 - NextURL *string -} - type locationConnectionResolver struct { locations []*lsif.LSIFLocation nextURL string @@ -30,12 +19,12 @@ var _ graphqlbackend.LocationConnectionResolver = &locationConnectionResolver{} func (r *locationConnectionResolver) Nodes(ctx context.Context) ([]graphqlbackend.LocationResolver, error) { collectionResolver := &repositoryCollectionResolver{ - commitCollectionResolvers: map[string]*commitCollectionResolver{}, + commitCollectionResolvers: map[api.RepoID]*commitCollectionResolver{}, } var l []graphqlbackend.LocationResolver for _, location := range r.locations { - treeResolver, err := collectionResolver.resolve(ctx, location.Repository, location.Commit, location.Path) + treeResolver, err := collectionResolver.resolve(ctx, location.RepositoryID, location.Commit, location.Path) if err != nil { return nil, err } diff --git a/enterprise/internal/codeintel/resolvers/query.go b/enterprise/internal/codeintel/resolvers/query.go index 6c0793d68bf2..22b4d624d046 100644 --- a/enterprise/internal/codeintel/resolvers/query.go +++ b/enterprise/internal/codeintel/resolvers/query.go @@ -6,11 +6,13 @@ import ( "github.com/sourcegraph/sourcegraph/cmd/frontend/graphqlbackend" "github.com/sourcegraph/sourcegraph/enterprise/internal/codeintel/lsifserver/client" + "github.com/sourcegraph/sourcegraph/internal/api" "github.com/sourcegraph/sourcegraph/internal/lsif" ) type lsifQueryResolver struct { - repoName string + repoID api.RepoID + repoName api.RepoName commit graphqlbackend.GitObjectID path string upload *lsif.LSIFUpload @@ -19,18 +21,20 @@ type lsifQueryResolver struct { var _ graphqlbackend.LSIFQueryResolver = &lsifQueryResolver{} func (r *lsifQueryResolver) Commit(ctx context.Context) (*graphqlbackend.GitCommitResolver, error) { - return resolveCommit(ctx, r.repoName, r.upload.Commit) + return resolveCommit(ctx, r.repoID, r.upload.Commit) } func (r *lsifQueryResolver) Definitions(ctx context.Context, args *graphqlbackend.LSIFQueryPositionArgs) (graphqlbackend.LocationConnectionResolver, error) { opts := &struct { - RepoName string + RepoID api.RepoID + RepoName api.RepoName Commit graphqlbackend.GitObjectID Path string Line int32 Character int32 UploadID int64 }{ + RepoID: r.repoID, RepoName: r.repoName, Commit: r.commit, Path: r.path, @@ -52,7 +56,8 @@ func (r *lsifQueryResolver) Definitions(ctx context.Context, args *graphqlbacken func (r *lsifQueryResolver) References(ctx context.Context, args *graphqlbackend.LSIFPagedQueryPositionArgs) (graphqlbackend.LocationConnectionResolver, error) { opts := &struct { - RepoName string + RepoID api.RepoID + RepoName api.RepoName Commit graphqlbackend.GitObjectID Path string Line int32 @@ -61,6 +66,7 @@ func (r *lsifQueryResolver) References(ctx context.Context, args *graphqlbackend Limit *int32 Cursor *string }{ + RepoID: r.repoID, RepoName: r.repoName, Commit: r.commit, Path: r.path, @@ -93,13 +99,15 @@ func (r *lsifQueryResolver) References(ctx context.Context, args *graphqlbackend func (r *lsifQueryResolver) Hover(ctx context.Context, args *graphqlbackend.LSIFQueryPositionArgs) (graphqlbackend.HoverResolver, error) { text, lspRange, err := client.DefaultClient.Hover(ctx, &struct { - RepoName string + RepoID api.RepoID + RepoName api.RepoName Commit graphqlbackend.GitObjectID Path string Line int32 Character int32 UploadID int64 }{ + RepoID: r.repoID, RepoName: r.repoName, Commit: r.commit, Path: r.path, diff --git a/enterprise/internal/codeintel/resolvers/resolver.go b/enterprise/internal/codeintel/resolvers/resolver.go index 3a43394e7797..ffca3bb6988a 100644 --- a/enterprise/internal/codeintel/resolvers/resolver.go +++ b/enterprise/internal/codeintel/resolvers/resolver.go @@ -8,6 +8,7 @@ import ( "github.com/sourcegraph/sourcegraph/cmd/frontend/backend" "github.com/sourcegraph/sourcegraph/cmd/frontend/graphqlbackend" "github.com/sourcegraph/sourcegraph/enterprise/internal/codeintel/lsifserver/client" + "github.com/sourcegraph/sourcegraph/internal/api" ) type Resolver struct{} @@ -91,10 +92,12 @@ func (r *Resolver) LSIFUploads(ctx context.Context, args *graphqlbackend.LSIFRep func (r *Resolver) LSIF(ctx context.Context, args *graphqlbackend.LSIFQueryArgs) (graphqlbackend.LSIFQueryResolver, error) { upload, err := client.DefaultClient.Exists(ctx, &struct { - RepoName string + RepoID api.RepoID + RepoName api.RepoName Commit string Path string }{ + RepoID: args.RepoID, RepoName: args.RepoName, Commit: string(args.Commit), Path: args.Path, @@ -109,6 +112,7 @@ func (r *Resolver) LSIF(ctx context.Context, args *graphqlbackend.LSIFQueryArgs) } return &lsifQueryResolver{ + repoID: args.RepoID, repoName: args.RepoName, commit: args.Commit, path: args.Path, diff --git a/enterprise/internal/codeintel/resolvers/tree.go b/enterprise/internal/codeintel/resolvers/tree.go index 9d1c5ffecec1..08580f5a13bd 100644 --- a/enterprise/internal/codeintel/resolvers/tree.go +++ b/enterprise/internal/codeintel/resolvers/tree.go @@ -11,8 +11,8 @@ import ( ) // resolveRepository returns a repository resolver for the given name. -func resolveRepository(ctx context.Context, repoName string) (*graphqlbackend.RepositoryResolver, error) { - repo, err := backend.Repos.GetByName(ctx, api.RepoName(repoName)) +func resolveRepository(ctx context.Context, repoID api.RepoID) (*graphqlbackend.RepositoryResolver, error) { + repo, err := backend.Repos.Get(ctx, repoID) if err != nil { return nil, err } @@ -23,8 +23,8 @@ func resolveRepository(ctx context.Context, repoName string) (*graphqlbackend.Re // resolveCommit returns the GitCommitResolver for the given repository and commit. If the // commit does not exist for the repository, a nil resolver is returned. Any other error is // returned unmodified. -func resolveCommit(ctx context.Context, repoName, commit string) (*graphqlbackend.GitCommitResolver, error) { - repositoryResolver, err := resolveRepository(ctx, repoName) +func resolveCommit(ctx context.Context, repoID api.RepoID, commit string) (*graphqlbackend.GitCommitResolver, error) { + repositoryResolver, err := resolveRepository(ctx, repoID) if err != nil { return nil, err } @@ -55,8 +55,8 @@ func resolveCommitFrom(ctx context.Context, repositoryResolver *graphqlbackend.R // resolvePath returns the GitTreeResolver for the given repository, commit, and path. If the // commit does not exist for the repository, a nil resolver is returned. Any other error is // returned unmodified. -func resolvePath(ctx context.Context, repoName, commit, path string) (*graphqlbackend.GitTreeEntryResolver, error) { - commitResolver, err := resolveCommit(ctx, repoName, commit) +func resolvePath(ctx context.Context, repoID api.RepoID, commit, path string) (*graphqlbackend.GitTreeEntryResolver, error) { + commitResolver, err := resolveCommit(ctx, repoID, commit) if err != nil { return nil, err } diff --git a/enterprise/internal/codeintel/resolvers/tree_collection.go b/enterprise/internal/codeintel/resolvers/tree_collection.go index 2cb2d7512560..3e82976984cd 100644 --- a/enterprise/internal/codeintel/resolvers/tree_collection.go +++ b/enterprise/internal/codeintel/resolvers/tree_collection.go @@ -4,17 +4,18 @@ import ( "context" "github.com/sourcegraph/sourcegraph/cmd/frontend/graphqlbackend" + "github.com/sourcegraph/sourcegraph/internal/api" ) type repositoryCollectionResolver struct { - commitCollectionResolvers map[string]*commitCollectionResolver + commitCollectionResolvers map[api.RepoID]*commitCollectionResolver } // resolve returns a GitTreeEntryResolver for the given repository, commit, and path. This will cache // the repository, commit, and path resolvers if they have been previously constructed with this same // struct instance. If the commit resolver cannot be constructed, a nil resolver is returned. -func (r *repositoryCollectionResolver) resolve(ctx context.Context, repoName, commit, path string) (*graphqlbackend.GitTreeEntryResolver, error) { - commitCollectionResolver, err := r.resolveRepository(ctx, repoName) +func (r *repositoryCollectionResolver) resolve(ctx context.Context, repoID api.RepoID, commit, path string) (*graphqlbackend.GitTreeEntryResolver, error) { + commitCollectionResolver, err := r.resolveRepository(ctx, repoID) if err != nil { return nil, err } @@ -28,12 +29,12 @@ func (r *repositoryCollectionResolver) resolve(ctx context.Context, repoName, co } // resolveRepository returns a commitCollectionResolver with the given resolved repository. -func (r *repositoryCollectionResolver) resolveRepository(ctx context.Context, repoName string) (*commitCollectionResolver, error) { - if payload, ok := r.commitCollectionResolvers[repoName]; ok { +func (r *repositoryCollectionResolver) resolveRepository(ctx context.Context, repoID api.RepoID) (*commitCollectionResolver, error) { + if payload, ok := r.commitCollectionResolvers[repoID]; ok { return payload, nil } - repositoryResolver, err := resolveRepository(ctx, repoName) + repositoryResolver, err := resolveRepository(ctx, repoID) if err != nil { return nil, err } @@ -43,7 +44,7 @@ func (r *repositoryCollectionResolver) resolveRepository(ctx context.Context, re pathCollectionResolvers: map[string]*pathCollectionResolver{}, } - r.commitCollectionResolvers[repoName] = payload + r.commitCollectionResolvers[repoID] = payload return payload, nil } diff --git a/enterprise/internal/codeintel/resolvers/upload.go b/enterprise/internal/codeintel/resolvers/upload.go index 8bd5a6abab70..cb1d2bec6b58 100644 --- a/enterprise/internal/codeintel/resolvers/upload.go +++ b/enterprise/internal/codeintel/resolvers/upload.go @@ -12,6 +12,7 @@ import ( "github.com/sourcegraph/sourcegraph/cmd/frontend/graphqlbackend" "github.com/sourcegraph/sourcegraph/cmd/frontend/graphqlbackend/graphqlutil" "github.com/sourcegraph/sourcegraph/enterprise/internal/codeintel/lsifserver/client" + "github.com/sourcegraph/sourcegraph/internal/api" "github.com/sourcegraph/sourcegraph/internal/lsif" ) @@ -27,11 +28,7 @@ func (r *lsifUploadResolver) ID() graphql.ID { } func (r *lsifUploadResolver) ProjectRoot(ctx context.Context) (*graphqlbackend.GitTreeEntryResolver, error) { - return resolvePath(ctx, r.lsifUpload.Repository, r.lsifUpload.Commit, r.lsifUpload.Root) -} - -func (r *lsifUploadResolver) InputRepoName() string { - return r.lsifUpload.Repository + return resolvePath(ctx, r.lsifUpload.RepositoryID, r.lsifUpload.Commit, r.lsifUpload.Root) } func (r *lsifUploadResolver) InputCommit() string { @@ -162,14 +159,16 @@ func (r *lsifUploadConnectionResolver) compute(ctx context.Context) ([]*lsif.LSI } r.uploads, r.nextURL, r.totalCount, r.err = client.DefaultClient.GetUploads(ctx, &struct { - RepoName string + RepoID api.RepoID + RepoName api.RepoName Query *string State *string IsLatestForRepo *bool Limit *int32 Cursor *string }{ - RepoName: r.repositoryResolver.Name(), + RepoID: r.repositoryResolver.Type().ID, + RepoName: r.repositoryResolver.Type().Name, Query: r.opt.Query, State: r.opt.State, IsLatestForRepo: r.opt.IsLatestForRepo, diff --git a/internal/lsif/types.go b/internal/lsif/types.go index 2fcbbd00fae3..66223bb25e2d 100644 --- a/internal/lsif/types.go +++ b/internal/lsif/types.go @@ -4,11 +4,12 @@ import ( "time" "github.com/sourcegraph/go-lsp" + "github.com/sourcegraph/sourcegraph/internal/api" ) type LSIFUpload struct { ID int64 `json:"id"` - Repository string `json:"repository"` + RepositoryID api.RepoID `json:"repositoryId"` Commit string `json:"commit"` Root string `json:"root"` Filename string `json:"filename"` @@ -22,8 +23,8 @@ type LSIFUpload struct { } type LSIFLocation struct { - Repository string `json:"repository"` - Commit string `json:"commit"` - Path string `json:"path"` - Range lsp.Range `json:"range"` + RepositoryID api.RepoID `json:"repositoryId"` + Commit string `json:"commit"` + Path string `json:"path"` + Range lsp.Range `json:"range"` } diff --git a/lsif/docs/api.yaml b/lsif/docs/api.yaml index d5193f042d2b..375e75608728 100644 --- a/lsif/docs/api.yaml +++ b/lsif/docs/api.yaml @@ -27,12 +27,12 @@ paths: type: string format: binary parameters: - - name: repository + - name: repositoryId in: query - description: The repository name. + description: The repository identifier. required: true schema: - type: string + type: number - name: commit in: query description: The 40-character commit hash. @@ -77,10 +77,17 @@ paths: tags: - LSIF parameters: - - name: repository + - name: repositoryId in: query - description: The repository name. + description: The repository identifier. required: true + schema: + type: number + - name: repositoryName + in: query + description: The url-encoded repository name. + required: false + example: github.com%2Fsourcegraph%2Flsif-go schema: type: string - name: commit @@ -109,10 +116,17 @@ paths: tags: - LSIF parameters: - - name: repository + - name: repositoryId in: query - description: The repository name. + description: The repository identifier. required: true + schema: + type: number + - name: repositoryName + in: query + description: The url-encoded repository name. + required: false + example: github.com%2Fsourcegraph%2Flsif-go schema: type: string - name: commit @@ -140,10 +154,16 @@ paths: tags: - LSIF parameters: - - name: repository + - name: repositoryId in: query - description: The url-encoded repository name. + description: The repository identifier. required: true + schema: + type: number + - name: repositoryName + in: query + description: The url-encoded repository name. + required: false example: github.com%2Fsourcegraph%2Flsif-go schema: type: string @@ -192,10 +212,16 @@ paths: tags: - LSIF parameters: - - name: repository + - name: repositoryId in: query - description: The url-encoded repository name. + description: The repository identifier. required: true + schema: + type: number + - name: repositoryName + in: query + description: The url-encoded repository name. + required: false example: github.com%2Fsourcegraph%2Flsif-go schema: type: string @@ -262,10 +288,16 @@ paths: tags: - LSIF parameters: - - name: repository + - name: repositoryId in: query - description: The url-encoded repository name. + description: The repository identifier. required: true + schema: + type: number + - name: repositoryName + in: query + description: The url-encoded repository name. + required: false example: github.com%2Fsourcegraph%2Flsif-go schema: type: string @@ -308,21 +340,21 @@ paths: $ref: '#/components/schemas/Hover' '404': description: Not found - /uploads/{repository}: + /uploads/repositories/{repositoryId}: get: - description: Get LSIF uploads by state. + description: Get LSIF uploads for a repository. tags: - Uploads parameters: - - name: repository + - name: repositoryId in: query - description: The repository name. + description: The repository identifier. required: true schema: - type: string + type: number - name: query in: query - description: A search query applied over repository, commit, root, failure reason, and failure stacktrace properties. + description: A search query applied over commit, root, failure reason, and failure stacktrace properties. required: false schema: type: string diff --git a/lsif/src/server/backend/backend.ts b/lsif/src/server/backend/backend.ts index 9c48c1b2784d..5cdde3a4fc81 100644 --- a/lsif/src/server/backend/backend.ts +++ b/lsif/src/server/backend/backend.ts @@ -121,20 +121,29 @@ export class Backend { /** * Determine if data exists for a particular document. * - * @param repository The repository name. + * @param repositoryId The repository identifier. + * @param repositoryName The repository name. * @param commit The commit. * @param path The path of the document. * @param dumpId The identifier of the dump to load. If not supplied, the closest dump will be used. * @param ctx The tracing context. */ public async exists( - repository: string, + repositoryId: number, + repositoryName: string | undefined, commit: string, path: string, dumpId?: number, ctx: TracingContext = {} ): Promise { - const closestDatabaseAndDump = await this.loadClosestDatabase(repository, commit, path, dumpId, ctx) + const closestDatabaseAndDump = await this.loadClosestDatabase( + repositoryId, + repositoryName, + commit, + path, + dumpId, + ctx + ) if (!closestDatabaseAndDump) { return undefined } @@ -146,7 +155,8 @@ export class Backend { * Return the location for the symbol at the given position. Returns undefined if no dump can * be loaded to answer this query. * - * @param repository The repository name. + * @param repositoryId The repository identifier. + * @param repositoryName The repository name. * @param commit The commit. * @param path The path of the document to which the position belongs. * @param position The current hover position. @@ -154,14 +164,15 @@ export class Backend { * @param ctx The tracing context. */ public async definitions( - repository: string, + repositoryId: number, + repositoryName: string | undefined, commit: string, path: string, position: lsp.Position, dumpId?: number, ctx: TracingContext = {} ): Promise { - const result = await this.internalDefinitions(repository, commit, path, position, dumpId, ctx) + const result = await this.internalDefinitions(repositoryId, repositoryName, commit, path, position, dumpId, ctx) if (result === undefined) { return undefined } @@ -173,7 +184,8 @@ export class Backend { * Return a list of locations which reference the symbol at the given position. Returns * undefined if no dump can be loaded to answer this query. * - * @param repository The repository name. + * @param repositoryId The repository identifier. + * @param repositoryName The repository name. * @param commit The commit. * @param path The path of the document to which the position belongs. * @param position The current hover position. @@ -182,7 +194,8 @@ export class Backend { * @param ctx The tracing context. */ public async references( - repository: string, + repositoryId: number, + repositoryName: string | undefined, commit: string, path: string, position: lsp.Position, @@ -190,14 +203,24 @@ export class Backend { dumpId?: number, ctx: TracingContext = {} ): Promise<{ locations: InternalLocation[]; cursor?: ReferencePaginationCursor } | undefined> { - return this.internalReferences(repository, commit, path, position, paginationContext, dumpId, ctx) + return this.internalReferences( + repositoryId, + repositoryName, + commit, + path, + position, + paginationContext, + dumpId, + ctx + ) } /** * Return the hover content for the symbol at the given position. Returns undefined if no dump can * be loaded to answer this query. * - * @param repository The repository name. + * @param repositoryId The repository identifier. + * @param repositoryName The repository name. * @param commit The commit. * @param path The path of the document to which the position belongs. * @param position The current hover position. @@ -205,17 +228,25 @@ export class Backend { * @param ctx The tracing context. */ public async hover( - repository: string, + repositoryId: number, + repositoryName: string | undefined, commit: string, path: string, position: lsp.Position, dumpId?: number, ctx: TracingContext = {} ): Promise<{ text: string; range: lsp.Range } | null | undefined> { - const closestDatabaseAndDump = await this.loadClosestDatabase(repository, commit, path, dumpId, ctx) + const closestDatabaseAndDump = await this.loadClosestDatabase( + repositoryId, + repositoryName, + commit, + path, + dumpId, + ctx + ) if (!closestDatabaseAndDump) { if (ctx.logger) { - ctx.logger.warn('No database could be loaded', { repository, commit, path }) + ctx.logger.warn('No database could be loaded', { repositoryId, repositoryName, commit, path }) } return undefined @@ -233,7 +264,7 @@ export class Backend { // can happen when the indexer only gives a moniker but does not // give hover data for externally defined symbols. - const result = await this.internalDefinitions(repository, commit, path, position, dumpId, ctx) + const result = await this.internalDefinitions(repositoryId, repositoryName, commit, path, position, dumpId, ctx) if (result === undefined || result.locations.length === 0) { return null } @@ -246,17 +277,25 @@ export class Backend { } private async internalDefinitions( - repository: string, + repositoryId: number, + repositoryName: string | undefined, commit: string, path: string, position: lsp.Position, dumpId?: number, ctx: TracingContext = {} ): Promise<{ dump: pgModels.LsifDump; locations: InternalLocation[] } | undefined> { - const closestDatabaseAndDump = await this.loadClosestDatabase(repository, commit, path, dumpId, ctx) + const closestDatabaseAndDump = await this.loadClosestDatabase( + repositoryId, + repositoryName, + commit, + path, + dumpId, + ctx + ) if (!closestDatabaseAndDump) { if (ctx.logger) { - ctx.logger.warn('No database could be loaded', { repository, commit, path }) + ctx.logger.warn('No database could be loaded', { repositoryId, repositoryName, commit, path }) } return undefined @@ -319,7 +358,8 @@ export class Backend { } private async internalReferences( - repository: string, + repositoryId: number, + repositoryName: string | undefined, commit: string, path: string, position: lsp.Position, @@ -337,7 +377,7 @@ export class Backend { // Continue from previous page const results = await this.performRemoteReferences( - repository, + repositoryId, commit, paginationContext.limit, paginationContext.cursor, @@ -351,10 +391,17 @@ export class Backend { return { dump, locations: [] } } - const closestDatabaseAndDump = await this.loadClosestDatabase(repository, commit, path, dumpId, ctx) + const closestDatabaseAndDump = await this.loadClosestDatabase( + repositoryId, + repositoryName, + commit, + path, + dumpId, + ctx + ) if (!closestDatabaseAndDump) { if (ctx.logger) { - ctx.logger.warn('No database could be loaded', { repository, commit, path }) + ctx.logger.warn('No database could be loaded', { repositoryId, repositoryName, commit, path }) } return undefined @@ -428,7 +475,7 @@ export class Backend { } const results = await this.performRemoteReferences( - repository, + repositoryId, commit, paginationContext.limit, cursor, @@ -513,7 +560,7 @@ export class Backend { logSpan(ctx, 'package_entity', { moniker, packageInformation, - packageRepository: packageEntity.dump.repository, + packageRepositoryId: packageEntity.dump.repositoryId, packageCommit: packageEntity.dump.commit, }) @@ -527,14 +574,14 @@ export class Backend { * other repositories. The offset into the set of results (as well as the target set of dumps) * depends on the exact values of the pagination cursor. This method returns the new cursor. * - * @param repository The repository name. + * @param repositoryId The repository identifier. * @param commit The target commit. * @param limit The maximum number of dumps to open. * @param cursor The pagination cursor. * @param ctx The tracing context. */ private async performRemoteReferences( - repository: string, + repositoryId: number, commit: string, limit: number, cursor: ReferencePaginationCursor, @@ -546,7 +593,7 @@ export class Backend { if (cursor.phase === 'same-repo') { const { locations, totalCount, newOffset } = await this.sameRepositoryRemoteReferences( cursor.dumpId, - repository, + repositoryId, commit, moniker, packageInformation, @@ -566,7 +613,7 @@ export class Backend { // Determine if there are any valid remote dumps we will open if // we move onto a next page. const { totalCount: remoteTotalCount } = await this.dependencyManager.getReferences({ - repository, + repositoryId, scheme: moniker.scheme, name: packageInformation.name, version: packageInformation.version, @@ -595,7 +642,7 @@ export class Backend { const { locations, totalCount, newOffset } = await this.remoteReferences( cursor.dumpId, - repository, + repositoryId, moniker, packageInformation, limit, @@ -626,7 +673,7 @@ export class Backend { * queried for the target moniker. * * @param dumpId The ID of the dump for which this database answers queries. - * @param repository The repository for which this database answers queries. + * @param repositoryId The repository identifier for which this database answers queries. * @param moniker The target moniker. * @param packageInformation The target package. * @param limit The maximum number of remote dumps to search. @@ -635,7 +682,7 @@ export class Backend { */ private async remoteReferences( dumpId: pgModels.DumpId, - repository: string, + repositoryId: number, moniker: Pick, packageInformation: Pick, limit: number, @@ -643,7 +690,7 @@ export class Backend { ctx: TracingContext = {} ): Promise<{ locations: InternalLocation[]; totalCount: number; newOffset: number }> { const { references, totalCount, newOffset } = await this.dependencyManager.getReferences({ - repository, + repositoryId, scheme: moniker.scheme, identifier: moniker.identifier, name: packageInformation.name, @@ -664,7 +711,7 @@ export class Backend { * dumps are opened, and their references tables are queried for the target moniker. * * @param dumpId The ID of the dump for which this database answers queries. - * @param repository The repository for which this database answers queries. + * @param repositoryId The repository identifier for which this database answers queries. * @param commit The commit of the references query. * @param moniker The target moniker. * @param packageInformation The target package. @@ -674,7 +721,7 @@ export class Backend { */ private async sameRepositoryRemoteReferences( dumpId: pgModels.DumpId, - repository: string, + repositoryId: number, commit: string, moniker: Pick, packageInformation: Pick, @@ -683,7 +730,7 @@ export class Backend { ctx: TracingContext = {} ): Promise<{ locations: InternalLocation[]; totalCount: number; newOffset: number }> { const { references, totalCount, newOffset } = await this.dependencyManager.getSameRepoRemoteReferences({ - repository, + repositoryId, commit, scheme: moniker.scheme, identifier: moniker.identifier, @@ -713,7 +760,7 @@ export class Backend { ctx: TracingContext = {} ): Promise { logSpan(ctx, 'package_references', { - references: dumps.map(d => ({ repository: d.repository, commit: d.commit })), + references: dumps.map(d => ({ repositoryId: d.repositoryId, commit: d.commit })), }) let locations: InternalLocation[] = [] @@ -741,14 +788,16 @@ export class Backend { * be used in all downstream requests so that the original commit and the effective commit * are both known. * - * @param repository The repository name. + * @param repositoryId The repository identifier. + * @param repositoryName The repository name. * @param commit The target commit. * @param file One of the files in the dump. * @param dumpId The identifier of the dump to load. If not supplied, the closest dump will be used. * @param ctx The tracing context. */ private async loadClosestDatabase( - repository: string, + repositoryId: number, + repositoryName: string | undefined, commit: string, file: string, dumpId?: number, @@ -759,7 +808,16 @@ export class Backend { // data for this commit. const dump = await (dumpId ? this.dumpManager.getDumpById(dumpId) - : this.dumpManager.findClosestDump(repository, commit, file, ctx, this.fetchConfiguration().gitServers)) + : repositoryName + ? this.dumpManager.findClosestDump( + repositoryId, + repositoryName, + commit, + file, + ctx, + this.fetchConfiguration().gitServers + ) + : undefined) if (dump) { return { database: this.createDatabase(dump), dump, ctx: addTags(ctx, { closestCommit: dump.commit }) } @@ -779,7 +837,7 @@ export class Backend { this.documentCache, this.resultChunkCache, dump, - dbFilename(this.storageRoot, dump.id, dump.repository, dump.commit) + dbFilename(this.storageRoot, dump.id) ) } } diff --git a/lsif/src/server/backend/database.test.ts b/lsif/src/server/backend/database.test.ts index 5199a4d124e2..32c36ff8b429 100644 --- a/lsif/src/server/backend/database.test.ts +++ b/lsif/src/server/backend/database.test.ts @@ -116,7 +116,8 @@ describe('mapRangesToInternalLocations', () => { it('should map ranges to locations', () => { const dump = { id: 42, - repository: 'test-repo', + repositoryId: 1234, + repositoryNameAtUpload: 'foo/bar', commit: 'deadbeef', root: '', filename: '', diff --git a/lsif/src/server/routes/lsif.ts b/lsif/src/server/routes/lsif.ts index 980f3997e459..67559f62cb8b 100644 --- a/lsif/src/server/routes/lsif.ts +++ b/lsif/src/server/routes/lsif.ts @@ -64,7 +64,8 @@ export function createLsifRouter( ): TracingContext => addTags({ logger, span: req.span }, tags) interface UploadQueryArgs { - repository: string + repositoryId: number + repositoryName: string commit: string root?: string blocking?: boolean @@ -74,7 +75,8 @@ export function createLsifRouter( router.post( '/upload', validation.validationMiddleware([ - validation.validateNonEmptyString('repository'), + validation.validateInt('repositoryId'), + validation.validateNonEmptyString('repositoryName'), validation.validateNonEmptyString('commit').matches(commitPattern), validation.validateOptionalString('root'), validation.validateOptionalBoolean('blocking'), @@ -82,18 +84,29 @@ export function createLsifRouter( ]), wrap( async (req: express.Request, res: express.Response): Promise => { - const { repository, commit, root: rootRaw, blocking, maxWait }: UploadQueryArgs = req.query + const { + repositoryId, + repositoryName, + commit, + root: rootRaw, + blocking, + maxWait, + }: UploadQueryArgs = req.query const root = sanitizeRoot(rootRaw) - const ctx = createTracingContext(req, { repository, commit, root }) + const ctx = createTracingContext(req, { repositoryId, commit, root }) const filename = nodepath.join(settings.STORAGE_ROOT, constants.UPLOADS_DIR, uuid.v4()) const output = fs.createWriteStream(filename) await logAndTraceCall(ctx, 'Uploading dump', () => pipeline(req, output)) // Add upload record - const upload = await uploadManager.enqueue({ repository, commit, root, filename }, tracer, ctx.span) + const upload = await uploadManager.enqueue( + { repositoryId, repositoryName, commit, root, filename }, + tracer, + ctx.span + ) if (blocking) { - logger.debug('Blocking on upload conversion', { repository, commit, root }) + logger.debug('Blocking on upload conversion', { repositoryId, commit, root }) if (await uploadManager.waitForUploadToConvert(upload.id, maxWait)) { // Upload converted successfully while blocked, send success @@ -111,7 +124,8 @@ export function createLsifRouter( ) interface ExistsQueryArgs { - repository: string + repositoryId: number + repositoryName: string | undefined commit: string path: string } @@ -119,22 +133,24 @@ export function createLsifRouter( router.get( '/exists', validation.validationMiddleware([ - validation.validateNonEmptyString('repository'), + validation.validateInt('repositoryId'), + validation.validateOptionalString('repositoryName'), validation.validateNonEmptyString('commit').matches(commitPattern), validation.validateNonEmptyString('path'), ]), wrap( async (req: express.Request, res: express.Response): Promise => { - const { repository, commit, path }: ExistsQueryArgs = req.query - const ctx = createTracingContext(req, { repository, commit }) - const upload = await backend.exists(repository, commit, path, undefined, ctx) + const { repositoryId, repositoryName, commit, path }: ExistsQueryArgs = req.query + const ctx = createTracingContext(req, { repositoryId, commit }) + const upload = await backend.exists(repositoryId, repositoryName, commit, path, undefined, ctx) res.json({ upload }) } ) ) interface FilePositionArgs { - repository: string + repositoryId: number + repositoryName: string | undefined commit: string path: string line: number @@ -145,7 +161,8 @@ export function createLsifRouter( router.get( '/definitions', validation.validationMiddleware([ - validation.validateNonEmptyString('repository'), + validation.validateInt('repositoryId'), + validation.validateOptionalString('repositoryName'), validation.validateNonEmptyString('commit'), validation.validateNonEmptyString('path'), validation.validateInt('line'), @@ -154,11 +171,20 @@ export function createLsifRouter( ]), wrap( async (req: express.Request, res: express.Response): Promise => { - const { repository, commit, path, line, character, uploadId }: FilePositionArgs = req.query - const ctx = createTracingContext(req, { repository, commit, path }) + const { + repositoryId, + repositoryName, + commit, + path, + line, + character, + uploadId, + }: FilePositionArgs = req.query + const ctx = createTracingContext(req, { repositoryId, commit, path }) const locations = await backend.definitions( - repository, + repositoryId, + repositoryName, commit, path, { line, character }, @@ -171,7 +197,7 @@ export function createLsifRouter( res.send({ locations: locations.map(l => ({ - repository: l.dump.repository, + repositoryId: l.dump.repositoryId, commit: l.dump.commit, path: l.path, range: l.range, @@ -189,7 +215,8 @@ export function createLsifRouter( router.get( '/references', validation.validationMiddleware([ - validation.validateNonEmptyString('repository'), + validation.validateInt('repositoryId'), + validation.validateOptionalString('repositoryName'), validation.validateNonEmptyString('commit'), validation.validateNonEmptyString('path'), validation.validateInt('line'), @@ -200,12 +227,22 @@ export function createLsifRouter( ]), wrap( async (req: express.Request, res: express.Response): Promise => { - const { repository, commit, path, line, character, uploadId, cursor }: ReferencesQueryArgs = req.query + const { + repositoryId, + repositoryName, + commit, + path, + line, + character, + uploadId, + cursor, + }: ReferencesQueryArgs = req.query const { limit } = extractLimitOffset(req.query, settings.DEFAULT_REFERENCES_NUM_REMOTE_DUMPS) - const ctx = createTracingContext(req, { repository, commit, path }) + const ctx = createTracingContext(req, { repositoryId, commit, path }) const result = await backend.references( - repository, + repositoryId, + repositoryName, commit, path, { line, character }, @@ -225,7 +262,7 @@ export function createLsifRouter( res.json({ locations: locations.map(l => ({ - repository: l.dump.repository, + repositoryId: l.dump.repositoryId, commit: l.dump.commit, path: l.path, range: l.range, @@ -238,7 +275,8 @@ export function createLsifRouter( router.get( '/hover', validation.validationMiddleware([ - validation.validateNonEmptyString('repository'), + validation.validateInt('repositoryId'), + validation.validateOptionalString('repositoryName'), validation.validateNonEmptyString('commit'), validation.validateNonEmptyString('path'), validation.validateInt('line'), @@ -247,10 +285,26 @@ export function createLsifRouter( ]), wrap( async (req: express.Request, res: express.Response): Promise => { - const { repository, commit, path, line, character, uploadId }: FilePositionArgs = req.query - const ctx = createTracingContext(req, { repository, commit, path }) + const { + repositoryId, + repositoryName, + commit, + path, + line, + character, + uploadId, + }: FilePositionArgs = req.query + const ctx = createTracingContext(req, { repositoryId, commit, path }) - const result = await backend.hover(repository, commit, path, { line, character }, uploadId, ctx) + const result = await backend.hover( + repositoryId, + repositoryName, + commit, + path, + { line, character }, + uploadId, + ctx + ) if (result === undefined) { throw Object.assign(new Error('LSIF upload not found'), { status: 404 }) } diff --git a/lsif/src/server/routes/uploads.ts b/lsif/src/server/routes/uploads.ts index 4337a0166604..852744bc8f68 100644 --- a/lsif/src/server/routes/uploads.ts +++ b/lsif/src/server/routes/uploads.ts @@ -55,7 +55,7 @@ export function createUploadRouter(uploadManager: UploadManager): express.Router ) router.get( - '/uploads/:repository', + '/uploads/repository/:id([0-9]+)', validation.validationMiddleware([ validation.validateQuery, validation.validateLsifUploadState, @@ -68,7 +68,7 @@ export function createUploadRouter(uploadManager: UploadManager): express.Router const { query, state, visibleAtTip }: UploadsQueryArgs = req.query const { limit, offset } = extractLimitOffset(req.query, settings.DEFAULT_UPLOAD_PAGE_SIZE) const { uploads, totalCount } = await uploadManager.getUploads( - decodeURIComponent(req.params.repository), + parseInt(req.params.id, 10), state, query, !!visibleAtTip, diff --git a/lsif/src/server/server.ts b/lsif/src/server/server.ts index 5843d2ce0b90..9e8967fb4467 100644 --- a/lsif/src/server/server.ts +++ b/lsif/src/server/server.ts @@ -3,18 +3,16 @@ import * as fs from 'mz/fs' import * as metrics from './metrics' import * as path from 'path' import * as settings from './settings' -import * as pgModels from '../shared/models/pg' import express from 'express' import promClient from 'prom-client' import { Backend } from './backend/backend' -import { Connection } from 'typeorm' import { createLogger } from '../shared/logging' import { createLsifRouter } from './routes/lsif' import { createMetaRouter } from './routes/meta' import { createPostgresConnection } from '../shared/database/postgres' import { createTracer } from '../shared/tracing' import { createUploadRouter } from './routes/uploads' -import { dbFilename, dbFilenameOld, ensureDirectory } from '../shared/paths' +import { dbFilename, ensureDirectory, idFromFilename } from '../shared/paths' import { default as tracingMiddleware } from 'express-opentracing' import { errorHandler } from './middleware/errors' import { logger as loggingMiddleware } from 'express-winston' @@ -59,9 +57,9 @@ async function main(logger: Logger): Promise { const dependencyManager = new DependencyManager(connection) const backend = new Backend(settings.STORAGE_ROOT, dumpManager, dependencyManager, fetchConfiguration) - // Temporary migrations - await moveDatabaseFilesToSubdir() // TODO - remove after 3.12 - await ensureFilenamesAreIDs(connection) // TODO - remove after 3.10 + // Temporary migration + // TODO - remove after 3.15 + await migrateFilenames() // Start background tasks startTasks(connection, dumpManager, uploadManager, logger) @@ -95,37 +93,26 @@ async function main(logger: Logger): Promise { } /** - * Move all db files in storage root to a subdirectory. + * If it hasn't been done already, migrate from the old pre-3.13 filename format + * `$ID-$REPO@$COMMIT.lsif.db` to the new format `$ID.lsif.db`. */ -async function moveDatabaseFilesToSubdir(): Promise { - for (const filename of await fs.readdir(settings.STORAGE_ROOT)) { - if (filename.endsWith('.db')) { - await fs.rename( - path.join(settings.STORAGE_ROOT, filename), - path.join(settings.STORAGE_ROOT, constants.DBS_DIR, filename) - ) - } - } -} - -/** - * If it hasn't been done already, migrate from the old pre-3.9 filename format - * `$REPO@$COMMIT.lsif.db` to the new format `$ID.lsif.db`. - */ -async function ensureFilenamesAreIDs(db: Connection): Promise { - const doneFile = path.join(settings.STORAGE_ROOT, 'id-based-filenames') +async function migrateFilenames(): Promise { + const doneFile = path.join(settings.STORAGE_ROOT, 'id-only-based-filenames') if (await fs.exists(doneFile)) { // Already migrated. return } - for (const dump of await db.getRepository(pgModels.LsifDump).find()) { - const oldFile = dbFilenameOld(settings.STORAGE_ROOT, dump.repository, dump.commit) - const newFile = dbFilename(settings.STORAGE_ROOT, dump.id, dump.repository, dump.commit) - if (!(await fs.exists(oldFile))) { + for (const basename of await fs.readdir(path.join(settings.STORAGE_ROOT, constants.DBS_DIR))) { + const id = idFromFilename(basename) + if (!id) { continue } - await fs.rename(oldFile, newFile) + + await fs.rename( + path.join(settings.STORAGE_ROOT, constants.DBS_DIR, basename), + dbFilename(settings.STORAGE_ROOT, id) + ) } // Create an empty done file to record that all files have been renamed. diff --git a/lsif/src/server/tasks/uploads.ts b/lsif/src/server/tasks/uploads.ts index eb08f5b20fbb..5f69548f5dd0 100644 --- a/lsif/src/server/tasks/uploads.ts +++ b/lsif/src/server/tasks/uploads.ts @@ -9,7 +9,7 @@ import { TracingContext } from '../../shared/tracing' import { UploadManager } from '../../shared/store/uploads' import { withLock } from '../../shared/store/locks' import { DumpManager } from '../../shared/store/dumps' -import { dbFilename } from '../../shared/paths' +import { dbFilename, idFromFilename } from '../../shared/paths' import { Connection } from 'typeorm' /** @@ -96,13 +96,13 @@ export function purgeOldDumps( } logger.info('Pruning dump', { - repository: dump.repository, + repository: dump.repositoryId, commit: dump.commit, root: dump.root, }) // Delete this dump and subtract its size from the current dir size - const filename = dbFilename(storageRoot, dump.id, dump.repository, dump.commit) + const filename = dbFilename(storageRoot, dump.id) currentSizeBytes -= await filesize(filename) // This delete cascades to the packages and references tables as well @@ -136,10 +136,12 @@ async function removeDeadDumps( )) { const pathsById = new Map() for (const basename of basenames) { - const id = parseInt(basename.split('-')[0], 10) - if (!isNaN(id)) { - pathsById.set(id, path.join(storageRoot, constants.DBS_DIR, basename)) + const id = idFromFilename(basename) + if (!id) { + continue } + + pathsById.set(id, path.join(storageRoot, constants.DBS_DIR, basename)) } const states = await dumpManager.getUploadStates(Array.from(pathsById.keys())) diff --git a/lsif/src/shared/database/postgres.ts b/lsif/src/shared/database/postgres.ts index 0ff7e8aed09c..50fef766a7e3 100644 --- a/lsif/src/shared/database/postgres.ts +++ b/lsif/src/shared/database/postgres.ts @@ -16,7 +16,7 @@ import { TlsOptions } from 'tls' * version prior to making use of the DB (which the frontend may still be * migrating). */ -const MINIMUM_MIGRATION_VERSION = 1528395634 +const MINIMUM_MIGRATION_VERSION = 1528395637 /** * How many times to try to check the current database migration version on startup. diff --git a/lsif/src/shared/models/pg.ts b/lsif/src/shared/models/pg.ts index 92c14855e8fa..58709362894b 100644 --- a/lsif/src/shared/models/pg.ts +++ b/lsif/src/shared/models/pg.ts @@ -32,10 +32,18 @@ export class LsifUpload { public id!: DumpId /** - * The name of the source repository. + * The internal identifier of the source repository. */ - @Column('text') - public repository!: string + @Column('text', { name: 'repository_id' }) + public repositoryId!: number + + /** + * The name of the source repository at the time of upload. This field should + * only be used to interface with gitserver when processing the upload. Every + * other operation should be performed only in terms of repository ids. + */ + @Column('text', { name: 'repository_name_at_upload' }) + public repositoryNameAtUpload!: string /** * The source commit. @@ -137,10 +145,10 @@ export class Commit { public id!: number /** - * The name of the source repository. + * The internal identifier of the source repository. */ - @Column('text') - public repository!: string + @Column('text', { name: 'repository_id' }) + public repositoryId!: number /** * The source commit. diff --git a/lsif/src/shared/models/queries.ts b/lsif/src/shared/models/queries.ts index 4ca66c6ed289..c61aef922ddb 100644 --- a/lsif/src/shared/models/queries.ts +++ b/lsif/src/shared/models/queries.ts @@ -2,33 +2,33 @@ import { MAX_TRAVERSAL_LIMIT } from '../constants' /** * Return a recursive CTE `lineage` that returns ancestors of the commit for the given - * repository. This assumes that the repo name is $1 and the commit is $2. + * repository. This assumes that the repository identifier is $1 and the commit is $2. */ export function ancestorLineage(): string { return ` - RECURSIVE lineage(id, repository, "commit", parent) AS ( - SELECT c.* FROM lsif_commits c WHERE c.repository = $1 AND c."commit" = $2 + RECURSIVE lineage(id, "commit", parent, repository_id) AS ( + SELECT c.* FROM lsif_commits c WHERE c.repository_id = $1 AND c."commit" = $2 UNION - SELECT c.* FROM lineage a JOIN lsif_commits c ON a.repository = c.repository AND a.parent = c."commit" + SELECT c.* FROM lineage a JOIN lsif_commits c ON a.repository_id = c.repository_id AND a.parent = c."commit" ) ` } /** * Return a recursive CTE `lineage` that returns ancestors and descendants of the commit for - * the given repository. This assumes that the repo name is $1 and the commit is $2. This - * happens to evaluate in Postgres as a lazy generator, which allows us to pull the "next" + * the given repository. This assumes that the repository identifier is $1 and the commit is $2. + * This happens to evaluate in Postgres as a lazy generator, which allows us to pull the "next" * closest commit in either direction from the source commit as needed. */ export function bidirectionalLineage(): string { return ` - RECURSIVE lineage(id, repository, "commit", parent_commit, direction) AS ( + RECURSIVE lineage(id, "commit", parent_commit, repository_id, direction) AS ( SELECT l.* FROM ( -- seed recursive set with commit looking in ancestor direction - SELECT c.*, 'A' FROM lsif_commits c WHERE c.repository = $1 AND c."commit" = $2 + SELECT c.*, 'A' FROM lsif_commits c WHERE c.repository_id = $1 AND c."commit" = $2 UNION -- seed recursive set with commit looking in descendant direction - SELECT c.*, 'D' FROM lsif_commits c WHERE c.repository = $1 AND c."commit" = $2 + SELECT c.*, 'D' FROM lsif_commits c WHERE c.repository_id = $1 AND c."commit" = $2 ) l UNION @@ -36,10 +36,10 @@ export function bidirectionalLineage(): string { SELECT * FROM ( WITH l_inner AS (SELECT * FROM lineage) -- get next ancestors (multiple parents for merge commits) - SELECT c.*, 'A' FROM l_inner l JOIN lsif_commits c ON l.direction = 'A' AND c.repository = l.repository AND c."commit" = l.parent_commit + SELECT c.*, 'A' FROM l_inner l JOIN lsif_commits c ON l.direction = 'A' AND c.repository_id = l.repository_id AND c."commit" = l.parent_commit UNION -- get next descendants - SELECT c.*, 'D' FROM l_inner l JOIN lsif_commits c ON l.direction = 'D' and c.repository = l.repository AND c.parent_commit = l."commit" + SELECT c.*, 'D' FROM l_inner l JOIN lsif_commits c ON l.direction = 'D' and c.repository_id = l.repository_id AND c.parent_commit = l."commit" ) subquery ) ` @@ -62,7 +62,7 @@ export function lineageWithDumps(limit: number = MAX_TRAVERSAL_LIMIT): string { -- Correlate commits to dumps and filter out commits without LSIF data lineage_with_dumps AS ( SELECT a.*, d.root, d.id as dump_id FROM limited_lineage a - JOIN lsif_dumps d ON d.repository = a.repository AND d."commit" = a."commit" + JOIN lsif_dumps d ON d.repository_id = a.repository_id AND d."commit" = a."commit" ) ` } diff --git a/lsif/src/shared/paths.ts b/lsif/src/shared/paths.ts index 1a74375f0c20..7f7f913fb9f4 100644 --- a/lsif/src/shared/paths.ts +++ b/lsif/src/shared/paths.ts @@ -8,19 +8,26 @@ import * as path from 'path' * @param storageRoot The path where SQLite databases are stored. * @param id The ID of the dump. */ -export function dbFilename(storageRoot: string, id: number, repository: string, commit: string): string { - return path.join(storageRoot, constants.DBS_DIR, `${id}-${encodeURIComponent(repository)}@${commit}.lsif.db`) +export function dbFilename(storageRoot: string, id: number): string { + return path.join(storageRoot, constants.DBS_DIR, `${id}.lsif.db`) } /** - * Construct the path of the SQLite database file for the given repository and commit. + * Returns the identifier of the database file. Handles both of the + * following formats: * - * @param storageRoot The path where SQLite databases are stored. - * @param repository The repository name. - * @param commit The repository commit. + * - `{id}.lsif.db` + * - `{id}-{repo}-{commit}.lsif.db` + * + * @param filename The filename. */ -export function dbFilenameOld(storageRoot: string, repository: string, commit: string): string { - return path.join(storageRoot, `${encodeURIComponent(repository)}@${commit}.lsif.db`) +export function idFromFilename(filename: string): number | undefined { + const id = parseInt(path.parse(filename).name.split('-')[0], 10) + if (!isNaN(id)) { + return id + } + + return undefined } /** diff --git a/lsif/src/shared/store/dependencies.ts b/lsif/src/shared/store/dependencies.ts index 4da0281cd873..7dd57ecc9ada 100644 --- a/lsif/src/shared/store/dependencies.ts +++ b/lsif/src/shared/store/dependencies.ts @@ -101,7 +101,7 @@ export class DependencyManager { * @param args Parameter bag. */ public getReferences({ - repository, + repositoryId, scheme, name, version, @@ -110,8 +110,8 @@ export class DependencyManager { offset, ctx = {}, }: { - /** The source repository of the search. */ - repository: string + /** The identifier of the source repository of the search. */ + repositoryId: number /** The package manager scheme (e.g. npm, pip). */ scheme: string /** The package name. */ @@ -138,7 +138,7 @@ export class DependencyManager { .createQueryBuilder('reference') .leftJoinAndSelect('reference.dump', 'dump') .where({ scheme, name, version }) - .andWhere('dump.repository != :repository', { repository }) + .andWhere('dump.repository_id != :repositoryId', { repositoryId }) .andWhere('dump.visible_at_tip = true') // Get total number of items in this set of results @@ -147,7 +147,7 @@ export class DependencyManager { // Construct method to select a page of possible references const getPage = (pageOffset: number): Promise => baseQuery - .orderBy('dump.repository') + .orderBy('dump.repository_id') .addOrderBy('dump.root') .limit(limit) .offset(pageOffset) @@ -179,7 +179,7 @@ export class DependencyManager { * @param args Parameter bag. */ public getSameRepoRemoteReferences({ - repository, + repositoryId, commit, scheme, name, @@ -189,8 +189,8 @@ export class DependencyManager { offset, ctx = {}, }: { - /** The source repository of the search. */ - repository: string + /** The identifier of the source repository of the search. */ + repositoryId: number /** The commit of the references query. */ commit: string /** The package manager scheme (e.g. npm, pip). */ @@ -238,7 +238,7 @@ export class DependencyManager { // and the getPage queries. The results of this query do not change based on // the page size or offset, so we query it separately here and pass the result // as a parameter. - const visible_ids = extractIds(await entityManager.query(visibleIdsQuery, [repository, commit])) + const visible_ids = extractIds(await entityManager.query(visibleIdsQuery, [repositoryId, commit])) // Get total number of items in this set of results const rawCount: { count: string }[] = await entityManager.query(countQuery, [ diff --git a/lsif/src/shared/store/dumps.ts b/lsif/src/shared/store/dumps.ts index 7d2c9fc16e96..ca04fc8cc91f 100644 --- a/lsif/src/shared/store/dumps.ts +++ b/lsif/src/shared/store/dumps.ts @@ -4,7 +4,7 @@ import { addrFor, getCommitsNear, getHead } from '../gitserver/gitserver' import { Brackets, Connection, EntityManager } from 'typeorm' import { dbFilename, tryDeleteFile } from '../paths' import { logAndTraceCall, TracingContext } from '../tracing' -import { instrumentQuery, instrumentQueryOrTransaction } from '../database/postgres' +import { instrumentQuery, instrumentQueryOrTransaction, withInstrumentedTransaction } from '../database/postgres' import { TableInserter } from '../database/inserter' import { visibleDumps, lineageWithDumps, ancestorLineage, bidirectionalLineage } from '../models/queries' @@ -31,17 +31,17 @@ export class DumpManager { /** * Find the dump for the given repository and commit. * - * @param repository The repository. + * @param repositoryId The repository identifier. * @param commit The commit. * @param file A filename that should be included in the dump. */ - public getDump(repository: string, commit: string, file: string): Promise { + public getDump(repositoryId: number, commit: string, file: string): Promise { return instrumentQuery(() => this.connection .getRepository(pgModels.LsifDump) .createQueryBuilder() .select() - .where({ repository, commit }) + .where({ repositoryId, commit }) .andWhere(":file LIKE (root || '%')", { file }) .getOne() ) @@ -81,15 +81,15 @@ export class DumpManager { /** * Find the visible dumps. This method is used for testing. * - * @param repository The repository. + * @param repositoryId The repository identifier. */ - public getVisibleDumps(repository: string): Promise { + public getVisibleDumps(repositoryId: number): Promise { return instrumentQuery(() => this.connection .getRepository(pgModels.LsifDump) .createQueryBuilder() .select() - .where({ repository, visibleAtTip: true }) + .where({ repositoryId, visibleAtTip: true }) .getMany() ) } @@ -117,14 +117,16 @@ export class DumpManager { * Return the dump 'closest' to the given target commit (a direct descendant or ancestor of * the target commit). If no closest commit can be determined, this method returns undefined. * - * @param repository The repository name. + * @param repositoryId The repository identifier. + * @param repositoryName The repository name. * @param commit The target commit. * @param file One of the files in the dump. * @param ctx The tracing context. * @param gitserverUrls The set of ordered gitserver urls. */ public async findClosestDump( - repository: string, + repositoryId: number, + repositoryName: string, commit: string, file: string, ctx: TracingContext = {}, @@ -136,8 +138,8 @@ export class DumpManager { // the necessary data for the following query. if (gitserverUrls) { await this.updateCommits( - repository, - await this.discoverCommits({ repository, commit, gitserverUrls, ctx }), + repositoryId, + await this.discoverCommits({ repositoryId, repositoryName, commit, gitserverUrls, ctx }), ctx ) } @@ -148,22 +150,19 @@ export class DumpManager { ${bidirectionalLineage()}, ${lineageWithDumps()} - SELECT * from lsif_dumps WHERE id IN ( - SELECT d.dump_id FROM lineage_with_dumps d - WHERE $3 LIKE (d.root || '%') - ORDER BY d.n LIMIT 1 - ); + SELECT d.dump_id FROM lineage_with_dumps d + WHERE $3 LIKE (d.root || '%') + ORDER BY d.n LIMIT 1 ` - const results: pgModels.LsifDump[] = await instrumentQuery(() => - this.connection.query(query, [repository, commit, file]) - ) + return withInstrumentedTransaction(this.connection, async entityManager => { + const results: { dump_id: number }[] = await entityManager.query(query, [repositoryId, commit, file]) + if (results.length > 0) { + return entityManager.getRepository(pgModels.LsifDump).findOne(results[0].dump_id) + } - if (results.length === 0) { return undefined - } - - return results[0] + }) }) } @@ -176,13 +175,13 @@ export class DumpManager { * that has a root that overlaps with the filtered dump. The other such dump is * necessarily a dump associated with a closer commit for the same root. * - * @param repository The repository name. + * @param repositoryId The repository identifier. * @param commit The head of the default branch. * @param ctx The tracing context. * @param entityManager The EntityManager to use as part of a transaction. */ public updateDumpsVisibleFromTip( - repository: string, + repositoryId: number, commit: string, ctx: TracingContext = {}, entityManager: EntityManager = this.connection.createEntityManager() @@ -197,11 +196,11 @@ export class DumpManager { -- (2) setting the visibility flag of all currently visible dumps UPDATE lsif_dumps d SET visible_at_tip = id IN (SELECT * from visible_ids) - WHERE d.repository = $1 AND (d.id IN (SELECT * from visible_ids) OR d.visible_at_tip) + WHERE d.repository_id = $1 AND (d.id IN (SELECT * from visible_ids) OR d.visible_at_tip) ` return logAndTraceCall(ctx, 'Updating dumps visible from tip', () => - instrumentQuery(() => entityManager.query(query, [repository, commit])) + instrumentQuery(() => entityManager.query(query, [repositoryId, commit])) ) } @@ -210,13 +209,13 @@ export class DumpManager { * a set of parent commits. Commits without a parent should have an empty set of parents, but * should still be present in the map. * - * @param repository The repository name. + * @param repositoryId The repository identifier. * @param commits The commit parentage data. * @param ctx The tracing context. * @param entityManager The EntityManager to use as part of a transaction. */ public updateCommits( - repository: string, + repositoryId: number, commits: Map>, ctx: TracingContext = {}, entityManager?: EntityManager @@ -233,11 +232,11 @@ export class DumpManager { for (const [commit, parentCommits] of commits) { if (parentCommits.size === 0) { - await commitInserter.insert({ repository, commit, parentCommit: null }) + await commitInserter.insert({ repositoryId, commit, parentCommit: null }) } for (const parentCommit of parentCommits) { - await commitInserter.insert({ repository, commit, parentCommit }) + await commitInserter.insert({ repositoryId, commit, parentCommit }) } } @@ -256,13 +255,16 @@ export class DumpManager { * @param args Parameter bag. */ public async discoverCommits({ - repository, + repositoryId, + repositoryName, commit, gitserverUrls, ctx = {}, }: { + /** The repository identifier. */ + repositoryId: number /** The repository name. */ - repository: string + repositoryName: string /** The commit from which the gitserver queries should start. */ commit: string /** The set of ordered gitserver urls. */ @@ -271,20 +273,20 @@ export class DumpManager { ctx?: TracingContext }): Promise>> { const matchingRepos = await instrumentQuery(() => - this.connection.getRepository(pgModels.LsifUpload).count({ where: { repository } }) + this.connection.getRepository(pgModels.LsifUpload).count({ where: { repositoryId } }) ) if (matchingRepos === 0) { return new Map() } const matchingCommits = await instrumentQuery(() => - this.connection.getRepository(pgModels.Commit).count({ where: { repository, commit } }) + this.connection.getRepository(pgModels.Commit).count({ where: { repositoryId, commit } }) ) if (matchingCommits > 0) { return new Map() } - return getCommitsNear(addrFor(repository, gitserverUrls), repository, commit, ctx) + return getCommitsNear(addrFor(repositoryName, gitserverUrls), repositoryName, commit, ctx) } /** @@ -293,19 +295,19 @@ export class DumpManager { * @param args Parameter bag. */ public discoverTip({ - repository, + repositoryName, gitserverUrls, ctx = {}, }: { /** The repository name. */ - repository: string + repositoryName: string /** The set of ordered gitserver urls. */ gitserverUrls: string[] /** The tracing context. */ ctx?: TracingContext }): Promise { return logAndTraceCall(ctx, 'Getting repository metadata', () => - getHead(addrFor(repository, gitserverUrls), repository, ctx) + getHead(addrFor(repositoryName, gitserverUrls), repositoryName, ctx) ) } @@ -313,14 +315,14 @@ export class DumpManager { * Delete existing dumps from the same repo@commit that overlap with the current root * (where the existing root is a prefix of the current root, or vice versa). * - * @param repository The repository. + * @param repositoryId The repository identifier. * @param commit The commit. * @param root The root of all files that are in this dump. * @param ctx The tracing context. * @param entityManager The EntityManager to use as part of a transaction. */ public async deleteOverlappingDumps( - repository: string, + repositoryId: number, commit: string, root: string, ctx: TracingContext = {}, @@ -332,7 +334,7 @@ export class DumpManager { .getRepository(pgModels.LsifUpload) .createQueryBuilder() .delete() - .where({ repository, commit, state: 'completed' }) + .where({ repositoryId, commit, state: 'completed' }) .andWhere( new Brackets(qb => qb.where(":root LIKE (root || '%')", { root }).orWhere("root LIKE (:root || '%')", { root }) @@ -355,7 +357,7 @@ export class DumpManager { entityManager: EntityManager = this.connection.createEntityManager() ): Promise { // Delete the SQLite file on disk (ignore errors if the file doesn't exist) - const path = dbFilename(this.storageRoot, dump.id, dump.repository, dump.commit) + const path = dbFilename(this.storageRoot, dump.id) await tryDeleteFile(path) // Delete the dump record. Do this AFTER the file is deleted because the retention diff --git a/lsif/src/shared/store/uploads.ts b/lsif/src/shared/store/uploads.ts index da0d81abc68e..706673ade813 100644 --- a/lsif/src/shared/store/uploads.ts +++ b/lsif/src/shared/store/uploads.ts @@ -40,7 +40,7 @@ export class UploadManager { /** * Get the uploads in the given state. * - * @param repository The repository. + * @param repositoryId The repository identifier. * @param state The state. * @param query A search query. * @param visibleAtTip If true, only return dumps visible at tip. @@ -48,7 +48,7 @@ export class UploadManager { * @param offset The number of uploads to skip. */ public async getUploads( - repository: string, + repositoryId: number, state: pgModels.LsifUploadState | undefined, query: string, visibleAtTip: boolean, @@ -59,7 +59,7 @@ export class UploadManager { let queryBuilder = this.connection .getRepository(pgModels.LsifUpload) .createQueryBuilder('upload') - .where({ repository }) + .where({ repositoryId }) .orderBy('uploaded_at', 'DESC') .limit(limit) .offset(offset) @@ -69,7 +69,7 @@ export class UploadManager { } if (query) { - const clauses = ['repository', 'commit', 'root', 'failure_summary', 'failure_stacktrace'].map( + const clauses = ['commit', 'root', 'failure_summary', 'failure_stacktrace'].map( field => `"${field}" LIKE '%' || :query || '%'` ) @@ -166,13 +166,16 @@ export class UploadManager { */ public async enqueue( { - repository, + repositoryId, + repositoryName, commit, root, filename, }: { - /** The repository. */ - repository: string + /** The repository identifier. */ + repositoryId: number + /** The repository name. */ + repositoryName: string /** The commit. */ commit: string /** The root. */ @@ -189,7 +192,8 @@ export class UploadManager { } const upload = new pgModels.LsifUpload() - upload.repository = repository + upload.repositoryId = repositoryId + upload.repositoryNameAtUpload = repositoryName upload.commit = commit upload.root = root upload.filename = filename diff --git a/lsif/src/tests/integration/backend/linked-reference-results.test.ts b/lsif/src/tests/integration/backend/linked-reference-results.test.ts index f28261d90ceb..81479c63fc06 100644 --- a/lsif/src/tests/integration/backend/linked-reference-results.test.ts +++ b/lsif/src/tests/integration/backend/linked-reference-results.test.ts @@ -2,12 +2,19 @@ import * as util from '../integration-test-util' describe('Backend', () => { const ctx = new util.BackendTestContext() - const repository = 'main' + const repositoryId = 100 + const repositoryName = 'main' const commit = util.createCommit() beforeAll(async () => { await ctx.init() - await ctx.convertTestData(repository, commit, '', 'linked-reference-results/data/main.lsif.gz') + await ctx.convertTestData( + repositoryId, + repositoryName, + commit, + '', + 'linked-reference-results/data/main.lsif.gz' + ) }) afterAll(async () => { @@ -30,15 +37,17 @@ describe('Backend', () => { for (const position of positions) { const { locations } = util.filterNodeModules( util.mapLocations( - (await ctx.backend.references(repository, commit, 'src/index.ts', position)) || { locations: [] } + (await ctx.backend.references(repositoryId, repositoryName, commit, 'src/index.ts', position)) || { + locations: [], + } ) ) - expect(locations).toContainEqual(util.createLocation(repository, commit, 'src/index.ts', 1, 4, 1, 7)) // abstract def in I - expect(locations).toContainEqual(util.createLocation(repository, commit, 'src/index.ts', 5, 4, 5, 7)) // concrete def in A - expect(locations).toContainEqual(util.createLocation(repository, commit, 'src/index.ts', 9, 4, 9, 7)) // concrete def in B - expect(locations).toContainEqual(util.createLocation(repository, commit, 'src/index.ts', 13, 2, 13, 5)) // use via I - expect(locations).toContainEqual(util.createLocation(repository, commit, 'src/index.ts', 16, 2, 16, 5)) // use via B + expect(locations).toContainEqual(util.createLocation(repositoryId, commit, 'src/index.ts', 1, 4, 1, 7)) // abstract def in I + expect(locations).toContainEqual(util.createLocation(repositoryId, commit, 'src/index.ts', 5, 4, 5, 7)) // concrete def in A + expect(locations).toContainEqual(util.createLocation(repositoryId, commit, 'src/index.ts', 9, 4, 9, 7)) // concrete def in B + expect(locations).toContainEqual(util.createLocation(repositoryId, commit, 'src/index.ts', 13, 2, 13, 5)) // use via I + expect(locations).toContainEqual(util.createLocation(repositoryId, commit, 'src/index.ts', 16, 2, 16, 5)) // use via B // Ensure no additional references expect(locations?.length).toEqual(5) diff --git a/lsif/src/tests/integration/backend/reference-pagination-monorepo.test.ts b/lsif/src/tests/integration/backend/reference-pagination-monorepo.test.ts index d1ba0cb248ff..02d22de45a11 100644 --- a/lsif/src/tests/integration/backend/reference-pagination-monorepo.test.ts +++ b/lsif/src/tests/integration/backend/reference-pagination-monorepo.test.ts @@ -2,11 +2,12 @@ import * as util from '../integration-test-util' import { lsp } from 'lsif-protocol' import { MAX_TRAVERSAL_LIMIT } from '../../../shared/constants' import { ReferencePaginationContext } from '../../../server/backend/backend' +import { extractRepos } from './util' describe('Backend', () => { const ctx = new util.BackendTestContext() - const repository = 'monorepo' - + const repositoryId = 100 + const repositoryName = 'monorepo' const c0 = util.createCommit(0) const c1 = util.createCommit(1) const c2 = util.createCommit(2) @@ -58,7 +59,8 @@ describe('Backend', () => { await Promise.all( dumps.map(({ commit, project, suffix }) => ctx.convertTestData( - repository, + repositoryId, + repositoryName, commit, `${project}/`, `reference-pagination-monorepo/data/${project}${suffix}.lsif.gz`, @@ -68,7 +70,7 @@ describe('Backend', () => { ) await ctx.dumpManager.updateCommits( - repository, + repositoryId, new Map>( Array.from({ length: MAX_TRAVERSAL_LIMIT * 2 + 1 }, (_, i) => [ util.createCommit(i), @@ -90,19 +92,19 @@ describe('Backend', () => { const checkRefs = (locations: lsp.Location[], commit: string, root: string) => { expect(locations).toContainEqual( - util.createLocation('monorepo', commit, `${root}/src/index.ts`, 0, 9, 0, 12) + util.createLocation(repositoryId, commit, `${root}/src/index.ts`, 0, 9, 0, 12) ) expect(locations).toContainEqual( - util.createLocation('monorepo', commit, `${root}/src/index.ts`, 3, 0, 3, 3) + util.createLocation(repositoryId, commit, `${root}/src/index.ts`, 3, 0, 3, 3) ) expect(locations).toContainEqual( - util.createLocation('monorepo', commit, `${root}/src/index.ts`, 3, 7, 3, 10) + util.createLocation(repositoryId, commit, `${root}/src/index.ts`, 3, 7, 3, 10) ) expect(locations).toContainEqual( - util.createLocation('monorepo', commit, `${root}/src/index.ts`, 3, 14, 3, 17) + util.createLocation(repositoryId, commit, `${root}/src/index.ts`, 3, 14, 3, 17) ) expect(locations).toContainEqual( - util.createLocation('monorepo', commit, `${root}/src/index.ts`, 3, 21, 3, 24) + util.createLocation(repositoryId, commit, `${root}/src/index.ts`, 3, 21, 3, 24) ) } @@ -143,7 +145,7 @@ describe('Backend', () => { const fetch = async () => util.filterNodeModules( util.mapLocations( - (await backend.references(repository, commit, 'a/src/index.ts', { + (await backend.references(repositoryId, repositoryName, commit, 'a/src/index.ts', { line: 0, character: 17, })) || { locations: [] } @@ -153,7 +155,9 @@ describe('Backend', () => { const { locations, cursor } = await fetch() expect(cursor).toBeUndefined() - expect(locations).toContainEqual(util.createLocation('monorepo', defCommit, 'a/src/index.ts', 0, 16, 0, 19)) + expect(locations).toContainEqual( + util.createLocation(repositoryId, defCommit, 'a/src/index.ts', 0, 16, 0, 19) + ) for (const { root, commit: refCommit } of refs) { checkRefs(locations, refCommit, root) } @@ -167,16 +171,33 @@ describe('Backend', () => { fail('failed beforeAll') } + const ids = { + ext1: 101, + ext2: 103, + ext3: 104, + ext4: 105, + ext5: 106, + } + // Add external references - const repos = ['ext1', 'ext2', 'ext3', 'ext4', 'ext5'] - const filename = 'reference-pagination-monorepo/data/f-ref.lsif.gz' - await Promise.all(repos.map(r => ctx.convertTestData(r, util.createCommit(0), 'f/', filename))) + await Promise.all( + Object.values(ids).map(externalRepositoryId => + ctx.convertTestData( + externalRepositoryId, + repositoryName, + util.createCommit(0), + 'f/', + 'reference-pagination-monorepo/data/f-ref.lsif.gz' + ) + ) + ) const fetch = async (paginationContext?: ReferencePaginationContext) => util.filterNodeModules( util.mapLocations( (await backend.references( - repository, + repositoryId, + repositoryName, c3, 'a/src/index.ts', { @@ -205,17 +226,13 @@ describe('Backend', () => { expect(cursor1).toBeUndefined() expect(cursor6).toBeUndefined() - const extractRepos = (references: lsp.Location[]): string[] => - // extract the repo name from git://{repo}?{commit}#{path}, or return '' (indicating a local repo) - Array.from(new Set(references.map(r => (r.uri.match(/git:\/\/([^?]+)\?.+/) || ['', ''])[1]))).sort() - // Ensure paging gets us expected results per page - expect(extractRepos(locations0)).toEqual(['monorepo']) - expect(extractRepos(locations1)).toEqual(['ext1', 'ext2', 'ext3', 'ext4', 'ext5']) - expect(extractRepos(locations2)).toEqual(['monorepo']) - expect(extractRepos(locations3)).toEqual(['monorepo']) - expect(extractRepos(locations4)).toEqual(['ext1', 'ext2']) - expect(extractRepos(locations5)).toEqual(['ext3', 'ext4']) - expect(extractRepos(locations6)).toEqual(['ext5']) + expect(extractRepos(locations0)).toEqual([repositoryId]) + expect(extractRepos(locations1)).toEqual([ids.ext1, ids.ext2, ids.ext3, ids.ext4, ids.ext5]) + expect(extractRepos(locations2)).toEqual([repositoryId]) + expect(extractRepos(locations3)).toEqual([repositoryId]) + expect(extractRepos(locations4)).toEqual([ids.ext1, ids.ext2]) + expect(extractRepos(locations5)).toEqual([ids.ext3, ids.ext4]) + expect(extractRepos(locations6)).toEqual([ids.ext5]) }) }) diff --git a/lsif/src/tests/integration/backend/reference-pagination.test.ts b/lsif/src/tests/integration/backend/reference-pagination.test.ts index bb3f5cccd026..43ac3a57d79c 100644 --- a/lsif/src/tests/integration/backend/reference-pagination.test.ts +++ b/lsif/src/tests/integration/backend/reference-pagination.test.ts @@ -1,16 +1,37 @@ import * as util from '../integration-test-util' -import { lsp } from 'lsif-protocol' import { ReferencePaginationContext } from '../../../server/backend/backend' +import { extractRepos } from './util' describe('Backend', () => { const ctx = new util.BackendTestContext() const commit = util.createCommit() + const ids = { + a: 100, + b1: 101, + b2: 103, + b3: 104, + b4: 105, + b5: 106, + b6: 107, + b7: 108, + b8: 109, + b9: 110, + // note: lexiographic order + b10: 102, + } + beforeAll(async () => { await ctx.init() await Promise.all( - ['a', 'b1', 'b2', 'b3', 'b4', 'b5', 'b6', 'b7', 'b8', 'b9', 'b10'].map(r => - ctx.convertTestData(r, commit, '', `reference-pagination/data/${r}.lsif.gz`) + Object.entries(ids).map(([repositoryName, repositoryId]) => + ctx.convertTestData( + repositoryId, + repositoryName, + commit, + '', + `reference-pagination/data/${repositoryName}.lsif.gz` + ) ) ) }) @@ -29,6 +50,7 @@ describe('Backend', () => { util.filterNodeModules( util.mapLocations( (await backend.references( + ids.a, 'a', commit, 'src/index.ts', @@ -54,14 +76,10 @@ describe('Backend', () => { expect(cursor0).toBeUndefined() expect(cursor4).toBeUndefined() - const extractRepos = (references: lsp.Location[]): string[] => - // extract the repo name from git://{repo}?{commit}#{path}, or return '' (indicating a local repo) - Array.from(new Set(references.map(r => (r.uri.match(/git:\/\/([^?]+)\?.+/) || ['', ''])[1]))).sort() - // Ensure paging gets us expected results per page - expect(extractRepos(locations1)).toEqual(['a', 'b1', 'b10', 'b2']) - expect(extractRepos(locations2)).toEqual(['b3', 'b4', 'b5']) - expect(extractRepos(locations3)).toEqual(['b6', 'b7', 'b8']) - expect(extractRepos(locations4)).toEqual(['b9']) + expect(extractRepos(locations1)).toEqual([ids.a, ids.b1, ids.b10, ids.b2]) + expect(extractRepos(locations2)).toEqual([ids.b3, ids.b4, ids.b5]) + expect(extractRepos(locations3)).toEqual([ids.b6, ids.b7, ids.b8]) + expect(extractRepos(locations4)).toEqual([ids.b9]) }) }) diff --git a/lsif/src/tests/integration/backend/simple.test.ts b/lsif/src/tests/integration/backend/simple.test.ts index f03bb923e8d7..8bd72333d141 100644 --- a/lsif/src/tests/integration/backend/simple.test.ts +++ b/lsif/src/tests/integration/backend/simple.test.ts @@ -2,12 +2,13 @@ import * as util from '../integration-test-util' describe('Backend', () => { const ctx = new util.BackendTestContext() - const repository = 'main' + const repositoryId = 100 + const repositoryName = 'main' const commit = util.createCommit() beforeAll(async () => { await ctx.init() - await ctx.convertTestData(repository, commit, '', '/simple/data/main.lsif.gz') + await ctx.convertTestData(repositoryId, repositoryName, commit, '', '/simple/data/main.lsif.gz') }) afterAll(async () => { @@ -19,9 +20,12 @@ describe('Backend', () => { fail('failed beforeAll') } - const definitions = await ctx.backend.definitions(repository, commit, 'src/a.ts', { line: 0, character: 17 }) + const definitions = await ctx.backend.definitions(repositoryId, repositoryName, commit, 'src/a.ts', { + line: 0, + character: 17, + }) expect(definitions?.map(util.mapLocation)).toEqual([ - util.createLocation(repository, commit, 'src/a.ts', 0, 16, 0, 19), + util.createLocation(repositoryId, commit, 'src/a.ts', 0, 16, 0, 19), ]) }) @@ -30,9 +34,12 @@ describe('Backend', () => { fail('failed beforeAll') } - const definitions = await ctx.backend.definitions(repository, commit, 'src/b.ts', { line: 2, character: 1 }) + const definitions = await ctx.backend.definitions(repositoryId, repositoryName, commit, 'src/b.ts', { + line: 2, + character: 1, + }) expect(definitions?.map(util.mapLocation)).toEqual([ - util.createLocation(repository, commit, 'src/a.ts', 0, 16, 0, 19), + util.createLocation(repositoryId, commit, 'src/a.ts', 0, 16, 0, 19), ]) }) @@ -43,17 +50,20 @@ describe('Backend', () => { const { locations } = util.filterNodeModules( util.mapLocations( - (await ctx.backend.references(repository, commit, 'src/a.ts', { line: 0, character: 17 })) || { + (await ctx.backend.references(repositoryId, repositoryName, commit, 'src/a.ts', { + line: 0, + character: 17, + })) || { locations: [], } ) ) - expect(locations).toContainEqual(util.createLocation(repository, commit, 'src/a.ts', 0, 16, 0, 19)) // def - expect(locations).toContainEqual(util.createLocation(repository, commit, 'src/b.ts', 0, 9, 0, 12)) // import - expect(locations).toContainEqual(util.createLocation(repository, commit, 'src/b.ts', 2, 0, 2, 3)) // use - expect(locations).toContainEqual(util.createLocation(repository, commit, 'src/b.ts', 2, 7, 2, 10)) // use - expect(locations).toContainEqual(util.createLocation(repository, commit, 'src/b.ts', 2, 14, 2, 17)) // use + expect(locations).toContainEqual(util.createLocation(repositoryId, commit, 'src/a.ts', 0, 16, 0, 19)) // def + expect(locations).toContainEqual(util.createLocation(repositoryId, commit, 'src/b.ts', 0, 9, 0, 12)) // import + expect(locations).toContainEqual(util.createLocation(repositoryId, commit, 'src/b.ts', 2, 0, 2, 3)) // use + expect(locations).toContainEqual(util.createLocation(repositoryId, commit, 'src/b.ts', 2, 7, 2, 10)) // use + expect(locations).toContainEqual(util.createLocation(repositoryId, commit, 'src/b.ts', 2, 14, 2, 17)) // use expect(locations).toHaveLength(5) }) }) diff --git a/lsif/src/tests/integration/backend/util.ts b/lsif/src/tests/integration/backend/util.ts new file mode 100644 index 000000000000..ad267ede58d7 --- /dev/null +++ b/lsif/src/tests/integration/backend/util.ts @@ -0,0 +1,18 @@ +import { lsp } from 'lsif-protocol' + +/** + * Extract the repo name from `git://{repo}?{commit}#{path}`. + * + * @param uri The location URI. + */ +const extractRepo = (uri: string): string => { + const match = uri.match(/git:\/\/([^?]+)\?.+/) + if (!match) { + return '' + } + + return match[1] +} + +export const extractRepos = (references: lsp.Location[]): number[] => + Array.from(new Set(references.map(r => extractRepo(r.uri)).map(v => parseInt(v, 10)))).sort() diff --git a/lsif/src/tests/integration/backend/xrepo.test.ts b/lsif/src/tests/integration/backend/xrepo.test.ts index 664a27d06a7c..0f9d683d94f9 100644 --- a/lsif/src/tests/integration/backend/xrepo.test.ts +++ b/lsif/src/tests/integration/backend/xrepo.test.ts @@ -4,11 +4,21 @@ describe('Backend', () => { const ctx = new util.BackendTestContext() const commit = util.createCommit() + const ids = { + a: 100, + b1: 101, + b2: 103, + b3: 104, + c1: 105, + c2: 106, + c3: 107, + } + beforeAll(async () => { await ctx.init() await Promise.all( - ['a', 'b1', 'b2', 'b3', 'c1', 'c2', 'c3'].map(r => - ctx.convertTestData(r, commit, '', `xrepo/data/${r}.lsif.gz`) + Object.entries(ids).map(([repositoryName, repositoryId]) => + ctx.convertTestData(repositoryId, repositoryName, commit, '', `xrepo/data/${repositoryName}.lsif.gz`) ) ) }) @@ -22,12 +32,12 @@ describe('Backend', () => { fail('failed beforeAll') } - const definitions = await ctx.backend.definitions('a', commit, 'src/index.ts', { + const definitions = await ctx.backend.definitions(ids.a, 'a', commit, 'src/index.ts', { line: 11, character: 18, }) expect(definitions?.map(util.mapLocation)).toEqual([ - util.createLocation('a', commit, 'src/index.ts', 0, 16, 0, 19), + util.createLocation(ids.a, commit, 'src/index.ts', 0, 16, 0, 19), ]) }) @@ -36,12 +46,12 @@ describe('Backend', () => { fail('failed beforeAll') } - const definitions = await ctx.backend.definitions('b1', commit, 'src/index.ts', { + const definitions = await ctx.backend.definitions(ids.b1, 'b1', commit, 'src/index.ts', { line: 3, character: 12, }) expect(definitions?.map(util.mapLocation)).toEqual([ - util.createLocation('a', commit, 'src/index.ts', 0, 16, 0, 19), + util.createLocation(ids.a, commit, 'src/index.ts', 0, 16, 0, 19), ]) }) @@ -50,12 +60,12 @@ describe('Backend', () => { fail('failed beforeAll') } - const definitions = await ctx.backend.definitions('b1', commit, 'src/index.ts', { + const definitions = await ctx.backend.definitions(ids.b1, 'b1', commit, 'src/index.ts', { line: 3, character: 16, }) expect(definitions?.map(util.mapLocation)).toEqual([ - util.createLocation('a', commit, 'src/index.ts', 4, 16, 4, 19), + util.createLocation(ids.a, commit, 'src/index.ts', 4, 16, 4, 19), ]) }) @@ -66,23 +76,23 @@ describe('Backend', () => { const { locations } = util.filterNodeModules( util.mapLocations( - (await ctx.backend.references('a', util.createCommit(0), 'src/index.ts', { + (await ctx.backend.references(ids.a, 'a', util.createCommit(0), 'src/index.ts', { line: 4, character: 19, })) || { locations: [] } ) ) - expect(locations).toContainEqual(util.createLocation('a', commit, 'src/index.ts', 4, 16, 4, 19)) // def - expect(locations).toContainEqual(util.createLocation('b1', commit, 'src/index.ts', 0, 14, 0, 17)) // import - expect(locations).toContainEqual(util.createLocation('b1', commit, 'src/index.ts', 3, 15, 3, 18)) // 1st use - expect(locations).toContainEqual(util.createLocation('b1', commit, 'src/index.ts', 3, 26, 3, 29)) // 2nd use - expect(locations).toContainEqual(util.createLocation('b2', commit, 'src/index.ts', 0, 14, 0, 17)) // import - expect(locations).toContainEqual(util.createLocation('b2', commit, 'src/index.ts', 3, 15, 3, 18)) // 1st use - expect(locations).toContainEqual(util.createLocation('b2', commit, 'src/index.ts', 3, 26, 3, 29)) // 2nd use - expect(locations).toContainEqual(util.createLocation('b3', commit, 'src/index.ts', 0, 14, 0, 17)) // import - expect(locations).toContainEqual(util.createLocation('b3', commit, 'src/index.ts', 3, 15, 3, 18)) // 1st use - expect(locations).toContainEqual(util.createLocation('b3', commit, 'src/index.ts', 3, 26, 3, 29)) // 2nd use + expect(locations).toContainEqual(util.createLocation(ids.a, commit, 'src/index.ts', 4, 16, 4, 19)) // def + expect(locations).toContainEqual(util.createLocation(ids.b1, commit, 'src/index.ts', 0, 14, 0, 17)) // import + expect(locations).toContainEqual(util.createLocation(ids.b1, commit, 'src/index.ts', 3, 15, 3, 18)) // 1st use + expect(locations).toContainEqual(util.createLocation(ids.b1, commit, 'src/index.ts', 3, 26, 3, 29)) // 2nd use + expect(locations).toContainEqual(util.createLocation(ids.b2, commit, 'src/index.ts', 0, 14, 0, 17)) // import + expect(locations).toContainEqual(util.createLocation(ids.b2, commit, 'src/index.ts', 3, 15, 3, 18)) // 1st use + expect(locations).toContainEqual(util.createLocation(ids.b2, commit, 'src/index.ts', 3, 26, 3, 29)) // 2nd use + expect(locations).toContainEqual(util.createLocation(ids.b3, commit, 'src/index.ts', 0, 14, 0, 17)) // import + expect(locations).toContainEqual(util.createLocation(ids.b3, commit, 'src/index.ts', 3, 15, 3, 18)) // 1st use + expect(locations).toContainEqual(util.createLocation(ids.b3, commit, 'src/index.ts', 3, 26, 3, 29)) // 2nd use // Ensure no additional references expect(locations?.length).toEqual(10) @@ -95,23 +105,23 @@ describe('Backend', () => { const { locations } = util.filterNodeModules( util.mapLocations( - (await ctx.backend.references('b1', util.createCommit(0), 'src/index.ts', { + (await ctx.backend.references(ids.b1, 'b1', util.createCommit(0), 'src/index.ts', { line: 3, character: 16, })) || { locations: [] } ) ) - expect(locations).toContainEqual(util.createLocation('a', commit, 'src/index.ts', 4, 16, 4, 19)) // def - expect(locations).toContainEqual(util.createLocation('b1', commit, 'src/index.ts', 0, 14, 0, 17)) // import - expect(locations).toContainEqual(util.createLocation('b1', commit, 'src/index.ts', 3, 15, 3, 18)) // 1st use - expect(locations).toContainEqual(util.createLocation('b1', commit, 'src/index.ts', 3, 26, 3, 29)) // 2nd use - expect(locations).toContainEqual(util.createLocation('b2', commit, 'src/index.ts', 0, 14, 0, 17)) // import - expect(locations).toContainEqual(util.createLocation('b2', commit, 'src/index.ts', 3, 15, 3, 18)) // 1st use - expect(locations).toContainEqual(util.createLocation('b2', commit, 'src/index.ts', 3, 26, 3, 29)) // 2nd use - expect(locations).toContainEqual(util.createLocation('b3', commit, 'src/index.ts', 0, 14, 0, 17)) // import - expect(locations).toContainEqual(util.createLocation('b3', commit, 'src/index.ts', 3, 15, 3, 18)) // 1st use - expect(locations).toContainEqual(util.createLocation('b3', commit, 'src/index.ts', 3, 26, 3, 29)) // 2nd use + expect(locations).toContainEqual(util.createLocation(ids.a, commit, 'src/index.ts', 4, 16, 4, 19)) // def + expect(locations).toContainEqual(util.createLocation(ids.b1, commit, 'src/index.ts', 0, 14, 0, 17)) // import + expect(locations).toContainEqual(util.createLocation(ids.b1, commit, 'src/index.ts', 3, 15, 3, 18)) // 1st use + expect(locations).toContainEqual(util.createLocation(ids.b1, commit, 'src/index.ts', 3, 26, 3, 29)) // 2nd use + expect(locations).toContainEqual(util.createLocation(ids.b2, commit, 'src/index.ts', 0, 14, 0, 17)) // import + expect(locations).toContainEqual(util.createLocation(ids.b2, commit, 'src/index.ts', 3, 15, 3, 18)) // 1st use + expect(locations).toContainEqual(util.createLocation(ids.b2, commit, 'src/index.ts', 3, 26, 3, 29)) // 2nd use + expect(locations).toContainEqual(util.createLocation(ids.b3, commit, 'src/index.ts', 0, 14, 0, 17)) // import + expect(locations).toContainEqual(util.createLocation(ids.b3, commit, 'src/index.ts', 3, 15, 3, 18)) // 1st use + expect(locations).toContainEqual(util.createLocation(ids.b3, commit, 'src/index.ts', 3, 26, 3, 29)) // 2nd use // Ensure no additional references expect(locations?.length).toEqual(10) @@ -124,33 +134,33 @@ describe('Backend', () => { const { locations } = util.filterNodeModules( util.mapLocations( - (await ctx.backend.references('a', util.createCommit(0), 'src/index.ts', { + (await ctx.backend.references(ids.a, 'a', util.createCommit(0), 'src/index.ts', { line: 0, character: 17, })) || { locations: [] } ) ) - expect(locations).toContainEqual(util.createLocation('a', commit, 'src/index.ts', 0, 16, 0, 19)) // def - expect(locations).toContainEqual(util.createLocation('a', commit, 'src/index.ts', 11, 18, 11, 21)) // 1st use - expect(locations).toContainEqual(util.createLocation('b1', commit, 'src/index.ts', 0, 9, 0, 12)) // import - expect(locations).toContainEqual(util.createLocation('b1', commit, 'src/index.ts', 3, 11, 3, 14)) // 1st use - expect(locations).toContainEqual(util.createLocation('b2', commit, 'src/index.ts', 0, 9, 0, 12)) // import - expect(locations).toContainEqual(util.createLocation('b2', commit, 'src/index.ts', 3, 11, 3, 14)) // 1st use - expect(locations).toContainEqual(util.createLocation('b3', commit, 'src/index.ts', 0, 9, 0, 12)) // import - expect(locations).toContainEqual(util.createLocation('b3', commit, 'src/index.ts', 3, 11, 3, 14)) // 1st use - expect(locations).toContainEqual(util.createLocation('c1', commit, 'src/index.ts', 0, 9, 0, 12)) // import - expect(locations).toContainEqual(util.createLocation('c1', commit, 'src/index.ts', 3, 11, 3, 14)) // 1st use - expect(locations).toContainEqual(util.createLocation('c1', commit, 'src/index.ts', 3, 15, 3, 18)) // 2nd use - expect(locations).toContainEqual(util.createLocation('c1', commit, 'src/index.ts', 3, 26, 3, 29)) // 3rd use - expect(locations).toContainEqual(util.createLocation('c2', commit, 'src/index.ts', 0, 9, 0, 12)) // import - expect(locations).toContainEqual(util.createLocation('c2', commit, 'src/index.ts', 3, 11, 3, 14)) // 1st use - expect(locations).toContainEqual(util.createLocation('c2', commit, 'src/index.ts', 3, 15, 3, 18)) // 2nd use - expect(locations).toContainEqual(util.createLocation('c2', commit, 'src/index.ts', 3, 26, 3, 29)) // 3rd use - expect(locations).toContainEqual(util.createLocation('c3', commit, 'src/index.ts', 0, 9, 0, 12)) // import - expect(locations).toContainEqual(util.createLocation('c3', commit, 'src/index.ts', 3, 11, 3, 14)) // 1st use - expect(locations).toContainEqual(util.createLocation('c3', commit, 'src/index.ts', 3, 15, 3, 18)) // 2nd use - expect(locations).toContainEqual(util.createLocation('c3', commit, 'src/index.ts', 3, 26, 3, 29)) // 3rd use + expect(locations).toContainEqual(util.createLocation(ids.a, commit, 'src/index.ts', 0, 16, 0, 19)) // def + expect(locations).toContainEqual(util.createLocation(ids.a, commit, 'src/index.ts', 11, 18, 11, 21)) // 1st use + expect(locations).toContainEqual(util.createLocation(ids.b1, commit, 'src/index.ts', 0, 9, 0, 12)) // import + expect(locations).toContainEqual(util.createLocation(ids.b1, commit, 'src/index.ts', 3, 11, 3, 14)) // 1st use + expect(locations).toContainEqual(util.createLocation(ids.b2, commit, 'src/index.ts', 0, 9, 0, 12)) // import + expect(locations).toContainEqual(util.createLocation(ids.b2, commit, 'src/index.ts', 3, 11, 3, 14)) // 1st use + expect(locations).toContainEqual(util.createLocation(ids.b3, commit, 'src/index.ts', 0, 9, 0, 12)) // import + expect(locations).toContainEqual(util.createLocation(ids.b3, commit, 'src/index.ts', 3, 11, 3, 14)) // 1st use + expect(locations).toContainEqual(util.createLocation(ids.c1, commit, 'src/index.ts', 0, 9, 0, 12)) // import + expect(locations).toContainEqual(util.createLocation(ids.c1, commit, 'src/index.ts', 3, 11, 3, 14)) // 1st use + expect(locations).toContainEqual(util.createLocation(ids.c1, commit, 'src/index.ts', 3, 15, 3, 18)) // 2nd use + expect(locations).toContainEqual(util.createLocation(ids.c1, commit, 'src/index.ts', 3, 26, 3, 29)) // 3rd use + expect(locations).toContainEqual(util.createLocation(ids.c2, commit, 'src/index.ts', 0, 9, 0, 12)) // import + expect(locations).toContainEqual(util.createLocation(ids.c2, commit, 'src/index.ts', 3, 11, 3, 14)) // 1st use + expect(locations).toContainEqual(util.createLocation(ids.c2, commit, 'src/index.ts', 3, 15, 3, 18)) // 2nd use + expect(locations).toContainEqual(util.createLocation(ids.c2, commit, 'src/index.ts', 3, 26, 3, 29)) // 3rd use + expect(locations).toContainEqual(util.createLocation(ids.c3, commit, 'src/index.ts', 0, 9, 0, 12)) // import + expect(locations).toContainEqual(util.createLocation(ids.c3, commit, 'src/index.ts', 3, 11, 3, 14)) // 1st use + expect(locations).toContainEqual(util.createLocation(ids.c3, commit, 'src/index.ts', 3, 15, 3, 18)) // 2nd use + expect(locations).toContainEqual(util.createLocation(ids.c3, commit, 'src/index.ts', 3, 26, 3, 29)) // 3rd use // Ensure no additional references expect(locations?.length).toEqual(20) @@ -163,33 +173,33 @@ describe('Backend', () => { const { locations } = util.filterNodeModules( util.mapLocations( - (await ctx.backend.references('c1', util.createCommit(0), 'src/index.ts', { + (await ctx.backend.references(ids.c1, 'c1', util.createCommit(0), 'src/index.ts', { line: 3, character: 16, })) || { locations: [] } ) ) - expect(locations).toContainEqual(util.createLocation('a', commit, 'src/index.ts', 0, 16, 0, 19)) // def - expect(locations).toContainEqual(util.createLocation('a', commit, 'src/index.ts', 11, 18, 11, 21)) // 1st use - expect(locations).toContainEqual(util.createLocation('b1', commit, 'src/index.ts', 0, 9, 0, 12)) // import - expect(locations).toContainEqual(util.createLocation('b1', commit, 'src/index.ts', 3, 11, 3, 14)) // 1st use - expect(locations).toContainEqual(util.createLocation('b2', commit, 'src/index.ts', 0, 9, 0, 12)) // import - expect(locations).toContainEqual(util.createLocation('b2', commit, 'src/index.ts', 3, 11, 3, 14)) // 1st use - expect(locations).toContainEqual(util.createLocation('b3', commit, 'src/index.ts', 0, 9, 0, 12)) // import - expect(locations).toContainEqual(util.createLocation('b3', commit, 'src/index.ts', 3, 11, 3, 14)) // 1st use - expect(locations).toContainEqual(util.createLocation('c1', commit, 'src/index.ts', 0, 9, 0, 12)) // import - expect(locations).toContainEqual(util.createLocation('c1', commit, 'src/index.ts', 3, 11, 3, 14)) // 1st use - expect(locations).toContainEqual(util.createLocation('c1', commit, 'src/index.ts', 3, 15, 3, 18)) // 2nd use - expect(locations).toContainEqual(util.createLocation('c1', commit, 'src/index.ts', 3, 26, 3, 29)) // 3rd use - expect(locations).toContainEqual(util.createLocation('c2', commit, 'src/index.ts', 0, 9, 0, 12)) // import - expect(locations).toContainEqual(util.createLocation('c2', commit, 'src/index.ts', 3, 11, 3, 14)) // 1st use - expect(locations).toContainEqual(util.createLocation('c2', commit, 'src/index.ts', 3, 15, 3, 18)) // 2nd use - expect(locations).toContainEqual(util.createLocation('c2', commit, 'src/index.ts', 3, 26, 3, 29)) // 3rd use - expect(locations).toContainEqual(util.createLocation('c3', commit, 'src/index.ts', 0, 9, 0, 12)) // import - expect(locations).toContainEqual(util.createLocation('c3', commit, 'src/index.ts', 3, 11, 3, 14)) // 1st use - expect(locations).toContainEqual(util.createLocation('c3', commit, 'src/index.ts', 3, 15, 3, 18)) // 2nd use - expect(locations).toContainEqual(util.createLocation('c3', commit, 'src/index.ts', 3, 26, 3, 29)) // 3rd use + expect(locations).toContainEqual(util.createLocation(ids.a, commit, 'src/index.ts', 0, 16, 0, 19)) // def + expect(locations).toContainEqual(util.createLocation(ids.a, commit, 'src/index.ts', 11, 18, 11, 21)) // 1st use + expect(locations).toContainEqual(util.createLocation(ids.b1, commit, 'src/index.ts', 0, 9, 0, 12)) // import + expect(locations).toContainEqual(util.createLocation(ids.b1, commit, 'src/index.ts', 3, 11, 3, 14)) // 1st use + expect(locations).toContainEqual(util.createLocation(ids.b2, commit, 'src/index.ts', 0, 9, 0, 12)) // import + expect(locations).toContainEqual(util.createLocation(ids.b2, commit, 'src/index.ts', 3, 11, 3, 14)) // 1st use + expect(locations).toContainEqual(util.createLocation(ids.b3, commit, 'src/index.ts', 0, 9, 0, 12)) // import + expect(locations).toContainEqual(util.createLocation(ids.b3, commit, 'src/index.ts', 3, 11, 3, 14)) // 1st use + expect(locations).toContainEqual(util.createLocation(ids.c1, commit, 'src/index.ts', 0, 9, 0, 12)) // import + expect(locations).toContainEqual(util.createLocation(ids.c1, commit, 'src/index.ts', 3, 11, 3, 14)) // 1st use + expect(locations).toContainEqual(util.createLocation(ids.c1, commit, 'src/index.ts', 3, 15, 3, 18)) // 2nd use + expect(locations).toContainEqual(util.createLocation(ids.c1, commit, 'src/index.ts', 3, 26, 3, 29)) // 3rd use + expect(locations).toContainEqual(util.createLocation(ids.c2, commit, 'src/index.ts', 0, 9, 0, 12)) // import + expect(locations).toContainEqual(util.createLocation(ids.c2, commit, 'src/index.ts', 3, 11, 3, 14)) // 1st use + expect(locations).toContainEqual(util.createLocation(ids.c2, commit, 'src/index.ts', 3, 15, 3, 18)) // 2nd use + expect(locations).toContainEqual(util.createLocation(ids.c2, commit, 'src/index.ts', 3, 26, 3, 29)) // 3rd use + expect(locations).toContainEqual(util.createLocation(ids.c3, commit, 'src/index.ts', 0, 9, 0, 12)) // import + expect(locations).toContainEqual(util.createLocation(ids.c3, commit, 'src/index.ts', 3, 11, 3, 14)) // 1st use + expect(locations).toContainEqual(util.createLocation(ids.c3, commit, 'src/index.ts', 3, 15, 3, 18)) // 2nd use + expect(locations).toContainEqual(util.createLocation(ids.c3, commit, 'src/index.ts', 3, 26, 3, 29)) // 3rd use // Ensure no additional references expect(locations?.length).toEqual(20) diff --git a/lsif/src/tests/integration/integration-test-util.ts b/lsif/src/tests/integration/integration-test-util.ts index 53c7fce4cafb..5e452c42b9ac 100644 --- a/lsif/src/tests/integration/integration-test-util.ts +++ b/lsif/src/tests/integration/integration-test-util.ts @@ -129,21 +129,24 @@ export async function truncatePostgresTables(connection: Connection): Promise { - await dumpManager.deleteOverlappingDumps(repository, commit, root, {}) + await dumpManager.deleteOverlappingDumps(repositoryId, commit, root, {}) const upload = new pgModels.LsifUpload() - upload.repository = repository + upload.repositoryId = repositoryId + upload.repositoryNameAtUpload = repositoryName upload.commit = commit upload.root = root upload.filename = '' @@ -154,7 +157,7 @@ export async function insertDump( const dump = new pgModels.LsifDump() dump.id = upload.id - dump.repository = repository + dump.repositoryId = repositoryId dump.commit = commit dump.root = root return dump @@ -169,7 +172,8 @@ export async function insertDump( * @param dumpManager The dumps manager instance. * @param dependencyManager The dependency manager instance. * @param storageRoot The temporary storage root. - * @param repository The repository name. + * @param repositoryId The repository identifier. + * @param repositoryName The repository name. * @param commit The commit. * @param root The root of the dump. * @param filename The filename of the (gzipped) LSIF dump. @@ -180,7 +184,8 @@ export async function convertTestData( dumpManager: DumpManager, dependencyManager: DependencyManager, storageRoot: string, - repository: string, + repositoryId: number, + repositoryName: string, commit: string, root: string, filename: string, @@ -192,16 +197,16 @@ export async function convertTestData( const tmp = path.join(storageRoot, constants.TEMP_DIR, uuid.v4()) const { packages, references } = await convertLsif(fullFilename, tmp) - const dump = await insertDump(connection, dumpManager, repository, commit, root) + const dump = await insertDump(connection, dumpManager, repositoryId, repositoryName, commit, root) await dependencyManager.addPackagesAndReferences(dump.id, packages, references) - await fs.rename(tmp, dbFilename(storageRoot, dump.id, repository, commit)) + await fs.rename(tmp, dbFilename(storageRoot, dump.id)) if (updateCommits) { await dumpManager.updateCommits( - repository, + repositoryId, new Map>([[commit, new Set()]]) ) - await dumpManager.updateDumpsVisibleFromTip(repository, commit) + await dumpManager.updateDumpsVisibleFromTip(repositoryId, commit) } } @@ -267,14 +272,16 @@ export class BackendTestContext { * given storage root and will insert dump, package, and reference data into * the given Postgres database. * - * @param repository The repository name. + * @param repositoryId The repository identifier. + * @param repositoryName The repository name. * @param commit The commit. * @param root The root of the dump. * @param filename The filename of the (gzipped) LSIF dump. * @param updateCommits Whether not to update commits. */ public convertTestData( - repository: string, + repositoryId: number, + repositoryName: string, commit: string, root: string, filename: string, @@ -289,7 +296,8 @@ export class BackendTestContext { this.dumpManager, this.dependencyManager, this.storageRoot, - repository, + repositoryId, + repositoryName, commit, root, filename, @@ -314,7 +322,7 @@ export class BackendTestContext { /** * Create an LSP location with a remote URI. * - * @param repository The repository name. + * @param repositoryId The repository identifier. * @param commit The commit. * @param documentPath The document path. * @param startLine The starting line. @@ -323,7 +331,7 @@ export class BackendTestContext { * @param endCharacter The ending character. */ export function createLocation( - repository: string, + repositoryId: number, commit: string, documentPath: string, startLine: number, @@ -331,7 +339,7 @@ export function createLocation( endLine: number, endCharacter: number ): lsp.Location { - const url = new URL(`git://${repository}`) + const url = new URL(`git://${repositoryId}`) url.search = commit url.hash = documentPath @@ -354,7 +362,7 @@ export function createLocation( */ export function mapLocation(location: InternalLocation): lsp.Location { return createLocation( - location.dump.repository, + location.dump.repositoryId, location.dump.commit, location.path, location.range.start.line, diff --git a/lsif/src/tests/integration/store/dependencies.test.ts b/lsif/src/tests/integration/store/dependencies.test.ts index fac4342e6503..35921d353f95 100644 --- a/lsif/src/tests/integration/store/dependencies.test.ts +++ b/lsif/src/tests/integration/store/dependencies.test.ts @@ -13,6 +13,10 @@ describe('DependencyManager', () => { let dumpManager!: DumpManager let dependencyManager!: DependencyManager + const repositoryId1 = 100 + const repositoryId2 = 101 + const repositoryName = 'foo' + beforeAll(async () => { ;({ connection, cleanup } = await util.createCleanPostgresDatabase()) storageRoot = await util.createStorageRoot() @@ -51,7 +55,7 @@ describe('DependencyManager', () => { root: string, identifiers: string[] ): Promise => { - const dump = await util.insertDump(connection, dumpManager, 'foo', commit, root) + const dump = await util.insertDump(connection, dumpManager, repositoryId1, repositoryName, commit, root) await dependencyManager.addPackagesAndReferences( dump.id, @@ -81,7 +85,7 @@ describe('DependencyManager', () => { const getReferencedDumpIds = async () => { const { references } = await dependencyManager.getReferences({ - repository: '', + repositoryId: repositoryId2, scheme: 'npm', name: 'p1', version: '0.1.0', @@ -94,7 +98,7 @@ describe('DependencyManager', () => { } await dumpManager.updateCommits( - 'foo', + repositoryId1, new Map>([ [ca, new Set()], [cb, new Set([ca])], @@ -104,7 +108,7 @@ describe('DependencyManager', () => { [cf, new Set([ce])], ]) ) - await dumpManager.updateDumpsVisibleFromTip('foo', cf) + await dumpManager.updateDumpsVisibleFromTip(repositoryId1, cf) // only references containing identifier y expect(await getReferencedDumpIds()).toEqual([dumpa.id, dumpb.id, dumpf.id]) @@ -120,7 +124,7 @@ describe('DependencyManager', () => { root: string, identifiers: string[] ): Promise => { - const dump = await util.insertDump(connection, dumpManager, 'foo', commit, root) + const dump = await util.insertDump(connection, dumpManager, repositoryId1, repositoryName, commit, root) await dependencyManager.addPackagesAndReferences( dump.id, @@ -159,7 +163,7 @@ describe('DependencyManager', () => { } const { references } = await dependencyManager.getReferences({ - repository: 'bar', + repositoryId: repositoryId2, scheme: 'npm', name: 'p1', version: '0.1.0', @@ -191,9 +195,9 @@ describe('DependencyManager', () => { }, ] - const dumpa = await util.insertDump(connection, dumpManager, 'foo', ca, '') - const dumpb = await util.insertDump(connection, dumpManager, 'foo', cb, '') - const dumpc = await util.insertDump(connection, dumpManager, 'foo', cc, '') + const dumpa = await util.insertDump(connection, dumpManager, repositoryId1, repositoryName, ca, '') + const dumpb = await util.insertDump(connection, dumpManager, repositoryId1, repositoryName, cb, '') + const dumpc = await util.insertDump(connection, dumpManager, repositoryId1, repositoryName, cc, '') await dependencyManager.addPackagesAndReferences(dumpa.id, [], references) await dependencyManager.addPackagesAndReferences(dumpb.id, [], references) @@ -202,7 +206,7 @@ describe('DependencyManager', () => { const getReferencedDumpIds = async () => ( await dependencyManager.getReferences({ - repository: '', + repositoryId: repositoryId2, scheme: 'npm', name: 'p1', version: '0.1.0', diff --git a/lsif/src/tests/integration/store/dumps.test.ts b/lsif/src/tests/integration/store/dumps.test.ts index d9b689afbe4d..db9c3b11d6e6 100644 --- a/lsif/src/tests/integration/store/dumps.test.ts +++ b/lsif/src/tests/integration/store/dumps.test.ts @@ -14,6 +14,12 @@ describe('DumpManager', () => { let storageRoot!: string let dumpManager!: DumpManager + let counter = 100 + const nextId = () => { + counter++ + return counter + } + beforeAll(async () => { ;({ connection, cleanup } = await util.createCleanPostgresDatabase()) storageRoot = await util.createStorageRoot() @@ -45,6 +51,8 @@ describe('DumpManager', () => { // | | | // +-- [c] -- d --+ +--- h + const repositoryId = nextId() + const repositoryName = 'foo' const ca = util.createCommit() const cb = util.createCommit() const cc = util.createCommit() @@ -56,7 +64,7 @@ describe('DumpManager', () => { // Add relations await dumpManager.updateCommits( - 'foo', + repositoryId, new Map>([ [ca, new Set()], [cb, new Set([ca])], @@ -71,18 +79,18 @@ describe('DumpManager', () => { ) // Add dumps - await util.insertDump(connection, dumpManager, 'foo', ca, '') - await util.insertDump(connection, dumpManager, 'foo', cc, '') - await util.insertDump(connection, dumpManager, 'foo', cg, '') - - const d1 = await dumpManager.findClosestDump('foo', ca, 'file') - const d2 = await dumpManager.findClosestDump('foo', cb, 'file') - const d3 = await dumpManager.findClosestDump('foo', cc, 'file') - const d4 = await dumpManager.findClosestDump('foo', cd, 'file') - const d5 = await dumpManager.findClosestDump('foo', cf, 'file') - const d6 = await dumpManager.findClosestDump('foo', cg, 'file') - const d7 = await dumpManager.findClosestDump('foo', ce, 'file') - const d8 = await dumpManager.findClosestDump('foo', ch, 'file') + await util.insertDump(connection, dumpManager, repositoryId, repositoryName, ca, '') + await util.insertDump(connection, dumpManager, repositoryId, repositoryName, cc, '') + await util.insertDump(connection, dumpManager, repositoryId, repositoryName, cg, '') + + const d1 = await dumpManager.findClosestDump(repositoryId, repositoryName, ca, 'file') + const d2 = await dumpManager.findClosestDump(repositoryId, repositoryName, cb, 'file') + const d3 = await dumpManager.findClosestDump(repositoryId, repositoryName, cc, 'file') + const d4 = await dumpManager.findClosestDump(repositoryId, repositoryName, cd, 'file') + const d5 = await dumpManager.findClosestDump(repositoryId, repositoryName, cf, 'file') + const d6 = await dumpManager.findClosestDump(repositoryId, repositoryName, cg, 'file') + const d7 = await dumpManager.findClosestDump(repositoryId, repositoryName, ce, 'file') + const d8 = await dumpManager.findClosestDump(repositoryId, repositoryName, ch, 'file') // Test closest commit expect(d1?.commit).toEqual(ca) @@ -110,6 +118,8 @@ describe('DumpManager', () => { // | // +-- g -- h + const repositoryId = nextId() + const repositoryName = 'foo' const ca = util.createCommit() const cb = util.createCommit() const cc = util.createCommit() @@ -121,7 +131,7 @@ describe('DumpManager', () => { // Add relations await dumpManager.updateCommits( - 'foo', + repositoryId, new Map>([ [ca, new Set()], [cb, new Set([ca])], @@ -135,21 +145,21 @@ describe('DumpManager', () => { ) // Add dumps - await util.insertDump(connection, dumpManager, 'foo', cb, '') + await util.insertDump(connection, dumpManager, repositoryId, repositoryName, cb, '') - const d1 = await dumpManager.findClosestDump('foo', ca, 'file') - const d2 = await dumpManager.findClosestDump('foo', cb, 'file') - const d3 = await dumpManager.findClosestDump('foo', cc, 'file') + const d1 = await dumpManager.findClosestDump(repositoryId, repositoryName, ca, 'file') + const d2 = await dumpManager.findClosestDump(repositoryId, repositoryName, cb, 'file') + const d3 = await dumpManager.findClosestDump(repositoryId, repositoryName, cc, 'file') // Test closest commit expect(d1?.commit).toEqual(cb) expect(d2?.commit).toEqual(cb) expect(d3?.commit).toEqual(cb) - expect(await dumpManager.findClosestDump('foo', cd, 'file')).toBeUndefined() - expect(await dumpManager.findClosestDump('foo', ce, 'file')).toBeUndefined() - expect(await dumpManager.findClosestDump('foo', cf, 'file')).toBeUndefined() - expect(await dumpManager.findClosestDump('foo', cg, 'file')).toBeUndefined() - expect(await dumpManager.findClosestDump('foo', ch, 'file')).toBeUndefined() + expect(await dumpManager.findClosestDump(repositoryId, repositoryName, cd, 'file')).toBeUndefined() + expect(await dumpManager.findClosestDump(repositoryId, repositoryName, ce, 'file')).toBeUndefined() + expect(await dumpManager.findClosestDump(repositoryId, repositoryName, cf, 'file')).toBeUndefined() + expect(await dumpManager.findClosestDump(repositoryId, repositoryName, cg, 'file')).toBeUndefined() + expect(await dumpManager.findClosestDump(repositoryId, repositoryName, ch, 'file')).toBeUndefined() }) it('should return empty string as closest commit with no reachable lsif data', async () => { @@ -163,13 +173,15 @@ describe('DumpManager', () => { // // Where LSIF dumps exist at b at roots: root1/ and root2/. + const repositoryId = nextId() + const repositoryName = 'foo' const ca = util.createCommit() const cb = util.createCommit() - const fields = ['repository', 'commit', 'root'] + const fields = ['repositoryId', 'commit', 'root'] // Add relations await dumpManager.updateCommits( - 'foo', + repositoryId, new Map>([ [ca, new Set()], [cb, new Set([ca])], @@ -177,37 +189,47 @@ describe('DumpManager', () => { ) // Add dumps - await util.insertDump(connection, dumpManager, 'foo', cb, 'root1/') - await util.insertDump(connection, dumpManager, 'foo', cb, 'root2/') + await util.insertDump(connection, dumpManager, repositoryId, repositoryName, cb, 'root1/') + await util.insertDump(connection, dumpManager, repositoryId, repositoryName, cb, 'root2/') // Test closest commit - expect(await dumpManager.findClosestDump('foo', ca, 'blah')).toBeUndefined() - expect(pick(await dumpManager.findClosestDump('foo', cb, 'root1/file.ts'), ...fields)).toEqual({ - repository: 'foo', + expect(await dumpManager.findClosestDump(repositoryId, repositoryName, ca, 'blah')).toBeUndefined() + expect( + pick(await dumpManager.findClosestDump(repositoryId, repositoryName, cb, 'root1/file.ts'), ...fields) + ).toEqual({ + repositoryId, commit: cb, root: 'root1/', }) - expect(pick(await dumpManager.findClosestDump('foo', cb, 'root2/file.ts'), ...fields)).toEqual({ - repository: 'foo', + expect( + pick(await dumpManager.findClosestDump(repositoryId, repositoryName, cb, 'root2/file.ts'), ...fields) + ).toEqual({ + repositoryId, commit: cb, root: 'root2/', }) - expect(pick(await dumpManager.findClosestDump('foo', ca, 'root2/file.ts'), ...fields)).toEqual({ - repository: 'foo', + expect( + pick(await dumpManager.findClosestDump(repositoryId, repositoryName, ca, 'root2/file.ts'), ...fields) + ).toEqual({ + repositoryId, commit: cb, root: 'root2/', }) - expect(await dumpManager.findClosestDump('foo', ca, 'root3/file.ts')).toBeUndefined() + expect(await dumpManager.findClosestDump(repositoryId, repositoryName, ca, 'root3/file.ts')).toBeUndefined() - await util.insertDump(connection, dumpManager, 'foo', cb, '') - expect(pick(await dumpManager.findClosestDump('foo', ca, 'root2/file.ts'), ...fields)).toEqual({ - repository: 'foo', + await util.insertDump(connection, dumpManager, repositoryId, repositoryName, cb, '') + expect( + pick(await dumpManager.findClosestDump(repositoryId, repositoryName, ca, 'root2/file.ts'), ...fields) + ).toEqual({ + repositoryId, commit: cb, root: '', }) - expect(pick(await dumpManager.findClosestDump('foo', ca, 'root3/file.ts'), ...fields)).toEqual({ - repository: 'foo', + expect( + pick(await dumpManager.findClosestDump(repositoryId, repositoryName, ca, 'root3/file.ts'), ...fields) + ).toEqual({ + repositoryId, commit: cb, root: '', }) @@ -225,6 +247,8 @@ describe('DumpManager', () => { // Note: we use 'a' as a suffix for commit numbers on construction so that // we can distinguish `1` and `11` (`1a1a1a...` and `11a11a11a..`). + const repositoryId = nextId() + const repositoryName = 'foo' const c0 = util.createCommit(0) const c1 = util.createCommit(1) const cpen = util.createCommit(MAX_TRAVERSAL_LIMIT / 2 - 1) @@ -238,14 +262,14 @@ describe('DumpManager', () => { ) // Add relations - await dumpManager.updateCommits('foo', commits) + await dumpManager.updateCommits(repositoryId, commits) // Add dumps - await util.insertDump(connection, dumpManager, 'foo', c0, '') + await util.insertDump(connection, dumpManager, repositoryId, repositoryName, c0, '') - const d1 = await dumpManager.findClosestDump('foo', c0, 'file') - const d2 = await dumpManager.findClosestDump('foo', c1, 'file') - const d3 = await dumpManager.findClosestDump('foo', cpen, 'file') + const d1 = await dumpManager.findClosestDump(repositoryId, repositoryName, c0, 'file') + const d2 = await dumpManager.findClosestDump(repositoryId, repositoryName, c1, 'file') + const d3 = await dumpManager.findClosestDump(repositoryId, repositoryName, cpen, 'file') // Test closest commit expect(d1?.commit).toEqual(c0) @@ -268,13 +292,13 @@ describe('DumpManager', () => { // | 99 | 99 | // | 100 | 1 | (limit reached) - expect(await dumpManager.findClosestDump('foo', cmax, 'file')).toBeUndefined() + expect(await dumpManager.findClosestDump(repositoryId, repositoryName, cmax, 'file')).toBeUndefined() // Add closer dump - await util.insertDump(connection, dumpManager, 'foo', c1, '') + await util.insertDump(connection, dumpManager, repositoryId, repositoryName, c1, '') // Now commit 1 should be found - const dump = await dumpManager.findClosestDump('foo', cmax, 'file') + const dump = await dumpManager.findClosestDump(repositoryId, repositoryName, cmax, 'file') expect(dump?.commit).toEqual(c1) }) @@ -287,6 +311,8 @@ describe('DumpManager', () => { // // a -- b -- c -- d -- e -- f -- g + const repositoryId = nextId() + const repositoryName = 'foo' const ca = util.createCommit() const cb = util.createCommit() const cc = util.createCommit() @@ -297,7 +323,7 @@ describe('DumpManager', () => { // Add relations await dumpManager.updateCommits( - 'foo', + repositoryId, new Map>([ [ca, new Set()], [cb, new Set([ca])], @@ -310,15 +336,15 @@ describe('DumpManager', () => { ) // Add dumps - await util.insertDump(connection, dumpManager, 'foo', ca, 'r1') - await util.insertDump(connection, dumpManager, 'foo', cb, 'r2') - await util.insertDump(connection, dumpManager, 'foo', cc, '') // overwrites r1, r2 - const d1 = await util.insertDump(connection, dumpManager, 'foo', cd, 'r3') // overwrites '' - const d2 = await util.insertDump(connection, dumpManager, 'foo', cf, 'r4') - await util.insertDump(connection, dumpManager, 'foo', cg, 'r5') // not traversed - - await dumpManager.updateDumpsVisibleFromTip('foo', cf) - const visibleDumps = await dumpManager.getVisibleDumps('foo') + await util.insertDump(connection, dumpManager, repositoryId, repositoryName, ca, 'r1') + await util.insertDump(connection, dumpManager, repositoryId, repositoryName, cb, 'r2') + await util.insertDump(connection, dumpManager, repositoryId, repositoryName, cc, '') // overwrites r1, r2 + const d1 = await util.insertDump(connection, dumpManager, repositoryId, repositoryName, cd, 'r3') // overwrites '' + const d2 = await util.insertDump(connection, dumpManager, repositoryId, repositoryName, cf, 'r4') + await util.insertDump(connection, dumpManager, repositoryId, repositoryName, cg, 'r5') // not traversed + + await dumpManager.updateDumpsVisibleFromTip(repositoryId, cf) + const visibleDumps = await dumpManager.getVisibleDumps(repositoryId) expect(visibleDumps.map((dump: pgModels.LsifDump) => dump.id).sort()).toEqual([d1.id, d2.id]) }) @@ -335,6 +361,8 @@ describe('DumpManager', () => { // | | // +-- [f] --- g --------------+ + const repositoryId = nextId() + const repositoryName = 'foo' const ca = util.createCommit() const cb = util.createCommit() const cc = util.createCommit() @@ -347,7 +375,7 @@ describe('DumpManager', () => { // Add relations await dumpManager.updateCommits( - 'foo', + repositoryId, new Map>([ [ca, new Set()], [cb, new Set([ca])], @@ -362,16 +390,16 @@ describe('DumpManager', () => { ) // Add dumps - await util.insertDump(connection, dumpManager, 'foo', cb, 'r2') - const dump1 = await util.insertDump(connection, dumpManager, 'foo', ce, 'r2/a') // overwrites r2 in commit b - const dump2 = await util.insertDump(connection, dumpManager, 'foo', ce, 'r2/b') - await util.insertDump(connection, dumpManager, 'foo', cf, 'r1/a') - await util.insertDump(connection, dumpManager, 'foo', cf, 'r1/b') - const dump3 = await util.insertDump(connection, dumpManager, 'foo', ch, 'r1') // overwrites r1/{a,b} in commit f - const dump4 = await util.insertDump(connection, dumpManager, 'foo', ci, 'r3') - - await dumpManager.updateDumpsVisibleFromTip('foo', ci) - const visibleDumps = await dumpManager.getVisibleDumps('foo') + await util.insertDump(connection, dumpManager, repositoryId, repositoryName, cb, 'r2') + const dump1 = await util.insertDump(connection, dumpManager, repositoryId, repositoryName, ce, 'r2/a') // overwrites r2 in commit b + const dump2 = await util.insertDump(connection, dumpManager, repositoryId, repositoryName, ce, 'r2/b') + await util.insertDump(connection, dumpManager, repositoryId, repositoryName, cf, 'r1/a') + await util.insertDump(connection, dumpManager, repositoryId, repositoryName, cf, 'r1/b') + const dump3 = await util.insertDump(connection, dumpManager, repositoryId, repositoryName, ch, 'r1') // overwrites r1/{a,b} in commit f + const dump4 = await util.insertDump(connection, dumpManager, repositoryId, repositoryName, ci, 'r3') + + await dumpManager.updateDumpsVisibleFromTip(repositoryId, ci) + const visibleDumps = await dumpManager.getVisibleDumps(repositoryId) expect(visibleDumps.map((dump: pgModels.LsifDump) => dump.id).sort()).toEqual([ dump1.id, dump2.id, @@ -392,6 +420,8 @@ describe('DumpManager', () => { // Note: we use 'a' as a suffix for commit numbers on construction so that // we can distinguish `1` and `11` (`1a1a1a...` and `11a11a11a...`). + const repositoryId = nextId() + const repositoryName = 'foo' const c0 = util.createCommit(0) const c1 = util.createCommit(1) const cpen = util.createCommit(MAX_TRAVERSAL_LIMIT - 1) @@ -405,35 +435,43 @@ describe('DumpManager', () => { ) // Add relations - await dumpManager.updateCommits('foo', commits) + await dumpManager.updateCommits(repositoryId, commits) // Add dumps - const dump1 = await util.insertDump(connection, dumpManager, 'foo', cmax, '') + const dump1 = await util.insertDump(connection, dumpManager, repositoryId, repositoryName, cmax, '') - await dumpManager.updateDumpsVisibleFromTip('foo', cmax) - let visibleDumps = await dumpManager.getVisibleDumps('foo') + await dumpManager.updateDumpsVisibleFromTip(repositoryId, cmax) + let visibleDumps = await dumpManager.getVisibleDumps(repositoryId) expect(visibleDumps.map((dump: pgModels.LsifDump) => dump.id).sort()).toEqual([dump1.id]) - await dumpManager.updateDumpsVisibleFromTip('foo', c1) - visibleDumps = await dumpManager.getVisibleDumps('foo') + await dumpManager.updateDumpsVisibleFromTip(repositoryId, c1) + visibleDumps = await dumpManager.getVisibleDumps(repositoryId) expect(visibleDumps.map((dump: pgModels.LsifDump) => dump.id).sort()).toEqual([dump1.id]) - await dumpManager.updateDumpsVisibleFromTip('foo', c0) - visibleDumps = await dumpManager.getVisibleDumps('foo') + await dumpManager.updateDumpsVisibleFromTip(repositoryId, c0) + visibleDumps = await dumpManager.getVisibleDumps(repositoryId) expect(visibleDumps.map((dump: pgModels.LsifDump) => dump.id).sort()).toEqual([]) // Add closer dump - const dump2 = await util.insertDump(connection, dumpManager, 'foo', cpen, '') + const dump2 = await util.insertDump(connection, dumpManager, repositoryId, repositoryName, cpen, '') // Now commit cpen should be found - await dumpManager.updateDumpsVisibleFromTip('foo', c0) - visibleDumps = await dumpManager.getVisibleDumps('foo') + await dumpManager.updateDumpsVisibleFromTip(repositoryId, c0) + visibleDumps = await dumpManager.getVisibleDumps(repositoryId) expect(visibleDumps.map((dump: pgModels.LsifDump) => dump.id).sort()).toEqual([dump2.id]) }) }) describe('discoverAndUpdateCommit', () => { + let counter = 200 + const nextId = () => { + counter++ + return counter + } + it('should update tracked commits', async () => { + const repositoryId = nextId() + const repositoryName = 'test-repo' // hashes to gitserver1 const ca = util.createCommit() const cb = util.createCommit() const cc = util.createCommit() @@ -446,12 +484,13 @@ describe('discoverAndUpdateCommit', () => { try { const dumpManager = new DumpManager(connection, '') - await util.insertDump(connection, dumpManager, 'test-repo', ca, '') + await util.insertDump(connection, dumpManager, repositoryId, repositoryName, ca, '') await dumpManager.updateCommits( - 'test-repo', + repositoryId, await dumpManager.discoverCommits({ - repository: 'test-repo', // hashes to gitserver1 + repositoryId, + repositoryName, commit: cc, gitserverUrls: ['gitserver0', 'gitserver1', 'gitserver2'], }) @@ -469,6 +508,8 @@ describe('discoverAndUpdateCommit', () => { }) it('should early-out if commit is tracked', async () => { + const repositoryId = nextId() + const repositoryName = 'test-repo' // hashes to gitserver1 const ca = util.createCommit() const cb = util.createCommit() @@ -476,9 +517,9 @@ describe('discoverAndUpdateCommit', () => { try { const dumpManager = new DumpManager(connection, '') - await util.insertDump(connection, dumpManager, 'test-repo', ca, '') + await util.insertDump(connection, dumpManager, repositoryId, repositoryName, ca, '') await dumpManager.updateCommits( - 'test-repo', + repositoryId, new Map>([[cb, new Set()]]) ) @@ -487,9 +528,10 @@ describe('discoverAndUpdateCommit', () => { // in an exception being thrown. await dumpManager.updateCommits( - 'test-repo', + repositoryId, await dumpManager.discoverCommits({ - repository: 'test-repo', // hashes to gitserver1 + repositoryId, + repositoryName, commit: cb, gitserverUrls: ['gitserver0', 'gitserver1', 'gitserver2'], }) @@ -500,6 +542,8 @@ describe('discoverAndUpdateCommit', () => { }) it('should early-out if repository is unknown', async () => { + const repositoryId = nextId() + const repositoryName = 'test-repo' // hashes to gitserver1 const ca = util.createCommit() const { connection, cleanup } = await util.createCleanPostgresDatabase() @@ -512,9 +556,10 @@ describe('discoverAndUpdateCommit', () => { // in an exception being thrown. await dumpManager.updateCommits( - 'test-repo', + repositoryId, await dumpManager.discoverCommits({ - repository: 'test-repo', // hashes to gitserver1 + repositoryId, + repositoryName, commit: ca, gitserverUrls: ['gitserver0', 'gitserver1', 'gitserver2'], }) @@ -526,7 +571,15 @@ describe('discoverAndUpdateCommit', () => { }) describe('discoverAndUpdateTips', () => { + let counter = 300 + const nextId = () => { + counter++ + return counter + } + it('should update tips', async () => { + const repositoryId = nextId() + const repositoryName = 'test-repo' // hashes to gitserver1 const ca = util.createCommit() const cb = util.createCommit() const cc = util.createCommit() @@ -534,7 +587,7 @@ describe('discoverAndUpdateTips', () => { const ce = util.createCommit() nock('http://gitserver0') - .post('/exec', { repo: 'test-repo', args: ['rev-parse', 'HEAD'] }) + .post('/exec', { repo: repositoryName, args: ['rev-parse', 'HEAD'] }) .reply(200, ce) const { connection, cleanup } = await util.createCleanPostgresDatabase() @@ -542,7 +595,7 @@ describe('discoverAndUpdateTips', () => { try { const dumpManager = new DumpManager(connection, '') await dumpManager.updateCommits( - 'test-repo', + repositoryId, new Map>([ [ca, new Set()], [cb, new Set([ca])], @@ -551,22 +604,22 @@ describe('discoverAndUpdateTips', () => { [ce, new Set([cd])], ]) ) - await util.insertDump(connection, dumpManager, 'test-repo', ca, 'foo') - await util.insertDump(connection, dumpManager, 'test-repo', cb, 'foo') - await util.insertDump(connection, dumpManager, 'test-repo', cc, 'bar') + await util.insertDump(connection, dumpManager, repositoryId, repositoryName, ca, 'foo') + await util.insertDump(connection, dumpManager, repositoryId, repositoryName, cb, 'foo') + await util.insertDump(connection, dumpManager, repositoryId, repositoryName, cc, 'bar') const tipCommit = await dumpManager.discoverTip({ - repository: 'test-repo', + repositoryName, gitserverUrls: ['gitserver0'], }) if (!tipCommit) { throw new Error('Expected a tip commit') } - await dumpManager.updateDumpsVisibleFromTip('test-repo', tipCommit) + await dumpManager.updateDumpsVisibleFromTip(repositoryId, tipCommit) - const d1 = await dumpManager.getDump('test-repo', ca, 'foo/test.ts') - const d2 = await dumpManager.getDump('test-repo', cb, 'foo/test.ts') - const d3 = await dumpManager.getDump('test-repo', cc, 'bar/test.ts') + const d1 = await dumpManager.getDump(repositoryId, ca, 'foo/test.ts') + const d2 = await dumpManager.getDump(repositoryId, cb, 'foo/test.ts') + const d3 = await dumpManager.getDump(repositoryId, cc, 'bar/test.ts') expect(d1?.visibleAtTip).toBeFalsy() expect(d2?.visibleAtTip).toBeTruthy() @@ -578,6 +631,12 @@ describe('discoverAndUpdateTips', () => { }) describe('discoverTips', () => { + let counter = 400 + const nextId = () => { + counter++ + return counter + } + it('should route requests to correct gitserver', async () => { // Distribution of repository names to gitservers const requests = { @@ -596,26 +655,28 @@ describe('discoverTips', () => { } // Map repo to the payloads above - const expected = new Map() + const expected = new Map() for (let i = 0; i < 15; i++) { - expected.set(`test-repo-${i}`, `c${i}`) + expected.set(i, `c${i}`) } const { connection, cleanup } = await util.createCleanPostgresDatabase() try { + const repositoryId = nextId() + const repositoryName = 'foo' const dumpManager = new DumpManager(connection, '') for (let i = 0; i < 15; i++) { - await util.insertDump(connection, dumpManager, `test-repo-${i}`, util.createCommit(), '') + await util.insertDump(connection, dumpManager, repositoryId, repositoryName, util.createCommit(), '') } - const tips = new Map() + const tips = new Map() for (let i = 0; i < 15; i++) { tips.set( - `test-repo-${i}`, + i, await dumpManager.discoverTip({ - repository: `test-repo-${i}`, + repositoryName: `test-repo-${i}`, gitserverUrls: ['gitserver0', 'gitserver1', 'gitserver2'], }) ) diff --git a/lsif/src/worker/conversion/conversion.ts b/lsif/src/worker/conversion/conversion.ts index 4d98b0a73edd..f527c04d6685 100644 --- a/lsif/src/worker/conversion/conversion.ts +++ b/lsif/src/worker/conversion/conversion.ts @@ -44,10 +44,10 @@ export async function convertDatabase( ) // Move the temp file where it can be found by the server - await fs.rename(tempFile, dbFilename(settings.STORAGE_ROOT, upload.id, upload.repository, upload.commit)) + await fs.rename(tempFile, dbFilename(settings.STORAGE_ROOT, upload.id)) logger.info('Converted upload', { - repository: upload.repository, + repositoryId: upload.repositoryId, commit: upload.commit, root: upload.root, }) @@ -80,13 +80,21 @@ export async function updateCommitsAndDumpsVisibleFromTip( ): Promise { const gitserverUrls = fetchConfiguration().gitServers - const tipCommit = await dumpManager.discoverTip({ repository: upload.repository, gitserverUrls, ctx }) + const repositoryId = upload.repositoryId + const repositoryName = upload.repositoryNameAtUpload + + const tipCommit = await dumpManager.discoverTip({ + repositoryName, + gitserverUrls, + ctx, + }) if (tipCommit === undefined) { throw new Error('No tip commit available for repository') } const commits = await dumpManager.discoverCommits({ - repository: upload.repository, + repositoryId, + repositoryName, commit: upload.commit, gitserverUrls, ctx, @@ -100,7 +108,8 @@ export async function updateCommitsAndDumpsVisibleFromTip( // the tip and all dumps will be invisible. const tipCommits = await dumpManager.discoverCommits({ - repository: upload.repository, + repositoryId, + repositoryName, commit: tipCommit, gitserverUrls, ctx, @@ -114,6 +123,6 @@ export async function updateCommitsAndDumpsVisibleFromTip( } } - await dumpManager.updateCommits(upload.repository, commits, ctx, entityManager) - await dumpManager.updateDumpsVisibleFromTip(upload.repository, tipCommit, ctx, entityManager) + await dumpManager.updateCommits(upload.repositoryId, commits, ctx, entityManager) + await dumpManager.updateDumpsVisibleFromTip(upload.repositoryId, tipCommit, ctx, entityManager) } diff --git a/lsif/src/worker/worker.ts b/lsif/src/worker/worker.ts index fd0f01caee0d..d612181be3ec 100644 --- a/lsif/src/worker/worker.ts +++ b/lsif/src/worker/worker.ts @@ -80,7 +80,7 @@ async function main(logger: Logger): Promise { // delete the files on disk right away. These files will be cleaned up by a worker in // a future cleanup task. await dumpManager.deleteOverlappingDumps( - upload.repository, + upload.repositoryId, upload.commit, upload.root, { logger, span }, diff --git a/migrations/1528395637_lsif_repository_ids.down.sql b/migrations/1528395637_lsif_repository_ids.down.sql new file mode 100644 index 000000000000..cf02e27af50b --- /dev/null +++ b/migrations/1528395637_lsif_repository_ids.down.sql @@ -0,0 +1,49 @@ +-- Note: `commit` is a reserved word, so it's quoted. + +BEGIN; + +-- +-- lsif_uploads +-- + +-- Restore old column +ALTER TABLE lsif_uploads RENAME COLUMN repository_name_at_upload TO repository; +CREATE UNIQUE INDEX lsif_uploads_repository_commit_root on lsif_uploads(repository, "commit", root) WHERE state = 'completed'::lsif_upload_state; +CREATE INDEX lsif_uploads_visible_repository_commit on lsif_uploads(repository, "commit") WHERE visible_at_tip; +ALTER TABLE lsif_uploads ADD CONSTRAINT "lsif_uploads_repository_check" CHECK (repository <> ''::text); + +-- Re-populate old column +UPDATE lsif_uploads u SET repository = (SELECT name FROM repo r WHERE r.id = u.repository_id LIMIT 1); +ALTER TABLE lsif_uploads ALTER COLUMN repository_id SET NOT NULL; + +-- Drop view dependent on new column +DROP VIEW lsif_dumps; + +-- Drop new column +DROP INDEX lsif_uploads_repository_id_commit_root; +DROP INDEX lsif_uploads_visible_repository_id_commit; +ALTER TABLE lsif_uploads DROP repository_id; + +-- Recreate view with new column names +CREATE VIEW lsif_dumps AS SELECT u.*, u.finished_at as processed_at FROM lsif_uploads u WHERE state = 'completed'; + +-- +-- lsif_commits +-- + +-- Restore old column +-- TODO +ALTER TABLE lsif_commits ADD repository text; +CREATE UNIQUE INDEX lsif_commits_repo_commit_parent_commit_unique ON lsif_commits(repository, "commit", parent_commit); +CREATE INDEX lsif_commits_parent_commit ON lsif_commits(repository, parent_commit); + +-- Re-populate old column +UPDATE lsif_commits c SET repository = (SELECT name FROM repo r WHERE r.id = c.repository_id LIMIT 1); +ALTER TABLE lsif_commits ALTER COLUMN repository_id SET NOT NULL; + +-- Drop new column +DROP INDEX lsif_commits_repository_id_commit_parent_commit_unique; +DROP INDEX lsif_commits_repository_id_parent_commit; +ALTER TABLE lsif_commits DROP repository_id; + +COMMIT; diff --git a/migrations/1528395637_lsif_repository_ids.up.sql b/migrations/1528395637_lsif_repository_ids.up.sql new file mode 100644 index 000000000000..c8e40b0862c5 --- /dev/null +++ b/migrations/1528395637_lsif_repository_ids.up.sql @@ -0,0 +1,52 @@ +-- Note: `commit` is a reserved word, so it's quoted. + +BEGIN; + +-- +-- lsif_uploads +-- + +-- Rename column +ALTER TABLE lsif_uploads RENAME COLUMN repository TO repository_name_at_upload; + +-- Add new repository identifier column +ALTER TABLE lsif_uploads ADD repository_id int; +CREATE UNIQUE INDEX lsif_uploads_repository_id_commit_root ON lsif_uploads(repository_id, "commit", root) WHERE state = 'completed'::lsif_upload_state; +CREATE INDEX lsif_uploads_visible_repository_id_commit ON lsif_uploads(repository_id, "commit") WHERE visible_at_tip; + +-- Populate new column and delete any uploads that we can't correlate +UPDATE lsif_uploads u SET repository_id = (SELECT id FROM repo r WHERE r.name = u.repository_name_at_upload LIMIT 1); +DELETE FROM lsif_uploads WHERE repository_id IS NULL; +ALTER TABLE lsif_uploads ALTER COLUMN repository_id SET NOT NULL; + +-- Drop view dependent on old column +DROP VIEW lsif_dumps; + +-- Drop old column constraints/indexes +DROP INDEX lsif_uploads_repository_commit_root; +DROP INDEX lsif_uploads_visible_repository_commit; +ALTER TABLE lsif_uploads DROP CONSTRAINT lsif_uploads_repository_check; + +-- Recreate view with new column names +CREATE VIEW lsif_dumps AS SELECT u.*, u.finished_at as processed_at FROM lsif_uploads u WHERE state = 'completed'; + +-- +-- lsif_commits +-- + +-- Add new column +ALTER TABLE lsif_commits ADD repository_id int; +CREATE UNIQUE INDEX lsif_commits_repository_id_commit_parent_commit_unique ON lsif_commits(repository_id, "commit", parent_commit); +CREATE INDEX lsif_commits_repository_id_parent_commit ON lsif_commits(repository_id, parent_commit); + +-- Populate new column and delete any commits that we can't correlate +UPDATE lsif_commits c SET repository_id = (SELECT id FROM repo r WHERE r.name = c.repository LIMIT 1); +DELETE FROM lsif_commits WHERE repository_id IS NULL; +ALTER TABLE lsif_commits ALTER COLUMN repository_id SET NOT NULL; + +-- Drop old column +DROP INDEX lsif_commits_repo_commit_parent_commit_unique; +DROP INDEX lsif_commits_parent_commit; +ALTER TABLE lsif_commits DROP repository; + +COMMIT; diff --git a/migrations/bindata.go b/migrations/bindata.go index c72b1e355fe2..bac162cfb8dd 100644 --- a/migrations/bindata.go +++ b/migrations/bindata.go @@ -66,6 +66,8 @@ // 1528395635_check_campaign_name_not_blank.up.sql (189B) // 1528395636_add_published_at_to_changeset_jobs.down.sql (80B) // 1528395636_add_published_at_to_changeset_jobs.up.sql (81B) +// 1528395637_lsif_repository_ids.down.sql (1.825kB) +// 1528395637_lsif_repository_ids.up.sql (2.062kB) package migrations @@ -1454,6 +1456,46 @@ func _1528395636_add_published_at_to_changeset_jobsUpSql() (*asset, error) { return a, nil } +var __1528395637_lsif_repository_idsDownSql = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x9c\x54\x4d\x6f\x9b\x40\x14\xbc\xf3\x2b\x46\xbe\x38\xae\x8c\xa5\x5e\xed\xa6\x92\x03\xdb\x06\x15\x43\x8a\x71\xd3\x1b\xa1\xec\x8b\xb2\x2a\x66\xc9\xee\x12\xb7\xff\xbe\x02\x9b\x14\xfc\x81\xd2\x1e\x61\xdf\xbc\x7d\xf3\x66\x66\x6d\x1b\x81\x34\x34\xc7\x43\x26\xb7\x5b\x61\x1e\x20\x34\x52\x28\xd2\xa4\x5e\x88\x63\x27\x15\x9f\x42\x4b\x08\x33\xd6\x78\xae\xa4\x21\x3e\xb3\xac\x1b\xf6\xd9\x0b\x16\x96\x65\xdb\x96\x6d\x23\xd7\xe2\x31\xa9\xca\x5c\xa6\x5c\xd7\xbf\xea\x7f\x11\x69\x23\x15\x41\xe6\x1c\x99\xcc\xab\x6d\x61\x2d\xfd\x98\x45\x88\x97\x37\x3e\xeb\x41\x10\xb1\x60\xb9\x62\x70\x42\x7f\xb3\x0a\xa0\xa8\x94\x5a\x18\xa9\x7e\x27\x45\xba\xa5\x24\x35\x87\x42\xc4\x61\xe7\x70\x61\x39\x11\x5b\xc6\x0c\x9b\xc0\xfb\xba\x61\xf0\x02\x97\x7d\xef\xf5\x4d\x3a\x9d\xf6\xec\x12\x25\xa5\x81\x2c\x7a\x65\x57\x7f\xcb\xa6\x18\xed\x0b\x47\x53\xd4\xa5\x13\xdc\xdf\xb2\x88\x41\x9b\xd4\x10\xae\x31\xce\xe4\xb6\xcc\xc9\x10\x1f\xcf\xe7\x9d\x1e\x49\x53\xf0\x3a\xd1\x99\x51\x5e\x84\x16\x3f\x72\x3a\x1d\xe9\x4d\xd3\xb4\x73\xb4\x5d\x52\x93\x18\x51\x2e\x2e\x6f\x74\xe9\xba\x70\xc2\x60\x1d\x47\x4b\x2f\x88\x31\xba\xb8\x96\x27\xca\x7e\x8e\xe0\xdc\x32\xe7\x0b\x3a\x57\xe3\xc3\x47\x8c\xc7\xf3\xb9\xa1\x5f\x66\xb2\x38\x08\x6a\x97\xb2\xac\xf2\x7a\x13\x1d\x51\x37\x77\x6e\x4d\xb9\x77\x7b\x85\x35\x8b\x3b\x52\xe1\x1a\x57\x6b\xe6\x33\x27\x46\xad\x28\x3e\x45\xe1\xaa\x39\x86\x3a\x10\x53\x33\xc1\x71\x8d\x6a\xd6\x99\x4d\x70\xf8\xde\xca\x8b\xf1\x7e\x32\xc4\xb4\x39\x38\xb5\x8e\xe0\xcd\x10\x41\x18\x23\xd8\xf8\xfe\x9e\x83\xab\x64\x89\x17\x41\x3b\x70\x2a\xa9\xe0\x54\x34\xfb\x2f\x68\xd7\xd2\x71\xa3\xf0\x0e\xdf\x3c\x76\xbf\xbf\x85\x57\xdb\x52\x77\xb0\xc7\x95\xc3\xa6\x13\xbc\xeb\xbb\xc5\x45\xc8\x19\x73\xbc\x42\x07\xa8\x37\xed\x7a\x98\x56\xa9\x4c\x51\x2d\x53\xc3\x74\x27\xcc\x53\x67\xee\x46\x01\xdd\x3a\xf5\x88\x29\x96\x6b\x1c\x84\xaa\x66\xef\xa6\xa8\x66\x8f\xa2\x10\xfa\x89\x78\x92\x1a\xa4\x1a\xa5\x92\x19\x69\xbd\xff\x6e\x64\x3c\x12\xfe\x62\x5e\xfa\x8f\xc5\x9e\xdb\xd0\x63\x61\xdb\x88\x43\x37\x3c\x65\x7f\x80\x36\x16\xef\x58\xac\x76\xea\xc0\x93\x70\x40\x35\x2b\x6e\x45\x29\x53\x45\x85\x69\xbf\xaa\x42\x3c\x57\x84\x30\xe8\x01\x2e\x3c\x0e\x3d\xe8\xe4\x5c\xf0\xdb\x0b\x7b\x95\x83\xdd\x8f\x7b\xbe\x31\x74\xed\x3e\xb2\xff\x0d\x5d\xf6\xf6\xd0\xbd\xee\xfe\x9f\x43\x37\x10\x9c\xae\x34\x27\xc1\x39\xa7\xd1\x69\x90\xce\xb7\xe8\x61\x07\xd8\x9c\xcd\x91\x13\xae\x56\x5e\xbc\xb0\xfe\x04\x00\x00\xff\xff\x68\x95\x0e\x6d\x21\x07\x00\x00") + +func _1528395637_lsif_repository_idsDownSqlBytes() ([]byte, error) { + return bindataRead( + __1528395637_lsif_repository_idsDownSql, + "1528395637_lsif_repository_ids.down.sql", + ) +} + +func _1528395637_lsif_repository_idsDownSql() (*asset, error) { + bytes, err := _1528395637_lsif_repository_idsDownSqlBytes() + if err != nil { + return nil, err + } + + info := bindataFileInfo{name: "1528395637_lsif_repository_ids.down.sql", size: 0, mode: os.FileMode(0), modTime: time.Unix(0, 0)} + a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xb8, 0x6d, 0x5f, 0x28, 0x78, 0x7e, 0x3a, 0xe7, 0xd6, 0x23, 0x89, 0x9a, 0xe3, 0xcd, 0xa3, 0x65, 0x76, 0xdc, 0xf9, 0x81, 0x2d, 0xae, 0xdc, 0xff, 0x69, 0xad, 0x3f, 0x75, 0xd5, 0xff, 0x72, 0xd8}} + return a, nil +} + +var __1528395637_lsif_repository_idsUpSql = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xa4\x54\x4d\x73\x9b\x30\x14\xbc\xf3\x2b\xde\xe4\xe2\xa4\x63\xbb\xd3\x6b\x98\x1c\x88\x51\x5b\x66\x30\xa4\x18\x37\xbd\x11\x15\xbd\x8c\x35\xc5\x12\x91\x44\xdc\xfc\xfb\x8e\xf8\x70\xc0\x31\x4e\x3c\x3d\x02\xbb\xef\x2d\xab\xd5\xce\x66\x10\x49\x83\xd7\xf0\x90\xcb\xed\x96\x9b\x07\xe0\x1a\x28\x28\xd4\xa8\x9e\x91\xc1\x4e\x2a\x36\x05\x2d\x81\x9b\x89\x86\xa7\x4a\x1a\x64\x73\xc7\xb9\x25\xdf\x82\xc8\x75\x9c\xd9\xcc\x99\xcd\xa0\xd0\xfc\x31\xab\xca\x42\x52\xa6\xed\x2b\xfb\x2e\x41\x41\xb7\x08\xb9\x2c\xaa\xad\x70\xbc\x30\x25\x09\xa4\xde\x6d\x48\x06\x68\x48\x48\xe4\x2d\x09\x2c\xe2\x70\xbd\x8c\x40\x61\x29\x35\x37\x52\xbd\x40\x1a\xf7\x9e\x32\x3b\x2b\xa3\xa6\xa5\xd5\x8b\xc1\x63\x0c\x04\xee\xfa\x24\xce\x50\x18\xfe\xc8\x51\xbd\xbb\xd8\xf3\xfd\xfe\x02\xce\x80\x0b\xe3\x3a\x8b\x84\x78\x29\x81\x75\x14\xfc\x58\x13\x08\x22\x9f\xfc\x1a\xf0\xb2\x01\x27\x6b\x4c\xcb\x94\x94\x06\xe2\x68\x80\xbc\x1c\x20\xa7\x70\xd1\x60\x2f\xa6\x60\xd1\x57\x70\xff\x9d\x24\x04\xb4\xa1\x06\xe1\x06\x26\xb9\xdc\x96\x05\x1a\x64\x93\xeb\xeb\xde\x98\xac\x06\xec\x75\x1d\x11\xf4\xcc\x35\xff\x5d\xe0\x51\x61\x1f\xd5\xd4\xa9\xe9\x66\x51\x93\x19\x5e\x36\x36\xdf\xc9\xb2\x2a\xac\x48\xeb\x75\xe3\x2a\x50\xc1\x80\xa1\x95\x0b\x54\xbc\x40\xe7\xa9\xd9\x50\x03\x3b\x84\x9c\x8a\x89\x81\x5c\x2a\x85\x96\xe9\xac\xef\x7c\x2b\x7e\xe0\x7f\x05\x2b\x92\x1e\x9c\xc0\x0d\x5c\xae\x48\x48\x16\x29\x70\x06\x5f\x93\x78\x59\x7f\x07\xd5\xaa\x53\xf3\x3a\x52\x37\x50\xcd\x47\xa3\x01\x61\xb0\x0c\x52\xf8\x72\xe5\x3a\x3e\x09\x49\x4a\x9a\x39\x83\xd5\xed\xb4\xc1\xea\x60\x05\xd1\x3a\x0c\xdd\x13\x81\xa9\x3f\xbc\x09\xaa\x25\xdb\x3f\x89\xe2\xb4\x9d\x60\x4d\xf3\x95\x2c\xe1\x99\xe3\x0e\x18\x96\x28\x6c\x2c\x41\x0a\x90\x05\xeb\x82\xe9\x27\xf1\x1d\xfc\x0c\xc8\x7d\xb3\x85\x55\xdb\x52\xf7\xb8\xaf\x48\xc8\xa5\xd0\x46\x51\x2e\x8c\xfe\xcc\x05\xc3\xbf\xa8\x1b\xf6\xe9\x74\xf6\xa2\xe9\x8e\xe2\x8f\x84\xa7\xe1\x9d\x30\xa2\x9e\xb5\x88\xa3\x55\x9a\x78\x41\x94\x8e\x0b\xd8\x60\xfe\xc7\x6d\xcb\x20\x57\x68\x33\x54\x5b\xb2\xe3\x66\xd3\x4f\x93\x3d\x41\xdd\x25\xfc\xc0\x12\xf0\x56\xd0\x66\xa2\x9a\x7f\x9a\x42\x35\x7f\xe4\x82\xeb\x0d\xb2\x8c\x1a\xa0\x1a\x4a\x25\x73\xd4\xba\x79\x7e\x7b\xd6\xd5\xf8\x3d\x1b\xd6\x57\xf3\xdb\xfb\xfa\xea\xca\x65\xac\x46\x5a\xf8\xd9\x35\xd2\xf2\x8e\xd7\x48\x49\x15\x0a\xd3\x3d\x55\x82\x3f\x55\xb8\xbf\xc2\x2d\x73\xbc\x56\x06\xec\xab\x63\x95\x71\x7c\xf9\x80\xf7\xde\xba\xc3\x25\x1f\x6c\x88\xce\xae\x8f\x34\x44\x87\xcd\xff\xa3\x21\xf2\x5e\x43\x9c\xaa\x84\x6e\xd7\x79\x95\xb0\x3f\xfc\xb3\x2b\xe1\xb0\x00\x46\x0e\xe7\x54\x20\xde\x5e\xe6\x8e\x39\x40\x9f\x90\x5d\xf3\x5f\xe5\xba\x8e\xb3\x88\x97\xcb\x20\x75\x9d\x7f\x01\x00\x00\xff\xff\x80\x12\x3a\xd6\x0e\x08\x00\x00") + +func _1528395637_lsif_repository_idsUpSqlBytes() ([]byte, error) { + return bindataRead( + __1528395637_lsif_repository_idsUpSql, + "1528395637_lsif_repository_ids.up.sql", + ) +} + +func _1528395637_lsif_repository_idsUpSql() (*asset, error) { + bytes, err := _1528395637_lsif_repository_idsUpSqlBytes() + if err != nil { + return nil, err + } + + info := bindataFileInfo{name: "1528395637_lsif_repository_ids.up.sql", size: 0, mode: os.FileMode(0), modTime: time.Unix(0, 0)} + a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x15, 0xca, 0x8c, 0xea, 0x9, 0x92, 0xcd, 0xfe, 0xd5, 0x1d, 0x6, 0xf8, 0xfd, 0x9f, 0x86, 0xf3, 0x83, 0xd6, 0x55, 0xfb, 0x15, 0x47, 0x58, 0x6f, 0xe7, 0xff, 0x70, 0xdc, 0xa9, 0xac, 0x73, 0x18}} + return a, nil +} + // Asset loads and returns the asset for the given name. // It returns an error if the asset could not be found or // could not be loaded. @@ -1611,6 +1653,8 @@ var _bindata = map[string]func() (*asset, error){ "1528395635_check_campaign_name_not_blank.up.sql": _1528395635_check_campaign_name_not_blankUpSql, "1528395636_add_published_at_to_changeset_jobs.down.sql": _1528395636_add_published_at_to_changeset_jobsDownSql, "1528395636_add_published_at_to_changeset_jobs.up.sql": _1528395636_add_published_at_to_changeset_jobsUpSql, + "1528395637_lsif_repository_ids.down.sql": _1528395637_lsif_repository_idsDownSql, + "1528395637_lsif_repository_ids.up.sql": _1528395637_lsif_repository_idsUpSql, } // AssetDir returns the file names below a certain @@ -1720,6 +1764,8 @@ var _bintree = &bintree{nil, map[string]*bintree{ "1528395635_check_campaign_name_not_blank.up.sql": {_1528395635_check_campaign_name_not_blankUpSql, map[string]*bintree{}}, "1528395636_add_published_at_to_changeset_jobs.down.sql": {_1528395636_add_published_at_to_changeset_jobsDownSql, map[string]*bintree{}}, "1528395636_add_published_at_to_changeset_jobs.up.sql": {_1528395636_add_published_at_to_changeset_jobsUpSql, map[string]*bintree{}}, + "1528395637_lsif_repository_ids.down.sql": {_1528395637_lsif_repository_idsDownSql, map[string]*bintree{}}, + "1528395637_lsif_repository_ids.up.sql": {_1528395637_lsif_repository_idsUpSql, map[string]*bintree{}}, }} // RestoreAsset restores an asset under the given directory. diff --git a/web/src/enterprise/repo/settings/RepoSettingsLsifUploadPage.tsx b/web/src/enterprise/repo/settings/RepoSettingsLsifUploadPage.tsx index 7e1aa8e77c58..caa83baca0a1 100644 --- a/web/src/enterprise/repo/settings/RepoSettingsLsifUploadPage.tsx +++ b/web/src/enterprise/repo/settings/RepoSettingsLsifUploadPage.tsx @@ -15,12 +15,15 @@ import { RouteComponentProps } from 'react-router' import { Timestamp } from '../../../components/time/Timestamp' import { useObservable } from '../../../util/useObservable' -interface Props extends RouteComponentProps<{ id: string }> {} +interface Props extends RouteComponentProps<{ id: string }> { + repo: GQL.IRepository +} /** * A page displaying metadata about an LSIF upload. */ export const RepoSettingsLsifUploadPage: FunctionComponent = ({ + repo, match: { params: { id }, }, @@ -34,28 +37,24 @@ export const RepoSettingsLsifUploadPage: FunctionComponent = ({ return (
- {!uploadOrError ? ( - - ) : isErrorLike(uploadOrError) ? ( + {isErrorLike(uploadOrError) ? (
+ ) : !uploadOrError ? ( + ) : ( <> -
+

- Upload for{' '} + Upload for commit{' '} {uploadOrError.projectRoot - ? lsifUploadDescription( - uploadOrError.projectRoot.commit.repository.name, - uploadOrError.projectRoot.commit.abbreviatedOID, - uploadOrError.projectRoot.path - ) - : lsifUploadDescription( - uploadOrError.inputRepoName, - uploadOrError.inputCommit.substring(0, 7), - uploadOrError.inputRoot - )} + ? uploadOrError.projectRoot.commit.abbreviatedOID + : uploadOrError.inputCommit.substring(0, 7)} + {uploadOrError.inputRoot !== '' && + ` rooted at ${ + uploadOrError.projectRoot ? uploadOrError.projectRoot.path : uploadOrError.inputRoot + }`}

@@ -88,7 +87,7 @@ export const RepoSettingsLsifUploadPage: FunctionComponent = ({ {uploadOrError.projectRoot.commit.repository.name} ) : ( - uploadOrError.inputRepoName + repo.name )} @@ -159,7 +158,3 @@ export const RepoSettingsLsifUploadPage: FunctionComponent = ({
) } - -export function lsifUploadDescription(repoName: string, commit: string, root: string): string { - return `${repoName}@${commit}${root === '' ? '' : ` rooted at ${root}`}` -} diff --git a/web/src/enterprise/repo/settings/backend.tsx b/web/src/enterprise/repo/settings/backend.tsx index 4d3eb3754a7e..f64532a3114e 100644 --- a/web/src/enterprise/repo/settings/backend.tsx +++ b/web/src/enterprise/repo/settings/backend.tsx @@ -45,7 +45,6 @@ export function fetchLsifUploads({ path url } - inputRepoName inputCommit inputRoot uploadedAt @@ -103,7 +102,6 @@ export function fetchLsifUpload({ id }: { id: string }): Observable