aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRobert Findley <rfindley@google.com>2023-01-12 13:51:49 -0500
committerRobert Findley <rfindley@google.com>2023-01-26 21:09:09 +0000
commitf10e7d56a323823a26e612712857b47fab7ec65a (patch)
tree59ca16cf9df5880bb3743337244e9185e338a50a
parent827075753a9d1db28951d9fa52deee63cdf5f857 (diff)
downloadgolang-x-tools-f10e7d56a323823a26e612712857b47fab7ec65a.tar.gz
gopls/internal/lsp/cache: remove package dependence on packages.Config
The cache.pkg type was a mix of metadata-related information and type checking information, resulting in unnecessary relationships between type-checking results (which are shared) and loading results (which are not shared). As a result, the experimentalPackageCacheKey was more or less a hope that these relationships were valid. Avoid this relationship altogether by separating the shared type-checking result from other derived calculations. This makes the experimentalPackageCacheKey obsolete and lays the groundwork for type-checking from export data. Additionally: - revisit the package cache key to ensure it covers all inputs into type-checking, and make it more similar to the analysis key - remove methods from the source.Package API that return source.Package: we can't have edges between packages if they are going to be standalone - remove the experimentalPackageCacheKey setting - add a test for go list errors - use the proper types.Sizes when type-checking - address a comment from an earlier CL in completion_test.go Fixes golang/go#57853 Change-Id: I238913c7c8305cb534db77ebec5f062e96ed2503 Reviewed-on: https://go-review.googlesource.com/c/tools/+/461944 Run-TryBot: Robert Findley <rfindley@google.com> Reviewed-by: Alan Donovan <adonovan@google.com> gopls-CI: kokoro <noreply+kokoro@google.com> TryBot-Result: Gopher Robot <gobot@golang.org>
-rw-r--r--gopls/doc/settings.md14
-rw-r--r--gopls/internal/lsp/cache/analysis.go11
-rw-r--r--gopls/internal/lsp/cache/cache.go2
-rw-r--r--gopls/internal/lsp/cache/check.go293
-rw-r--r--gopls/internal/lsp/cache/errors.go74
-rw-r--r--gopls/internal/lsp/cache/errors_test.go (renamed from gopls/internal/lsp/cache/error_test.go)0
-rw-r--r--gopls/internal/lsp/cache/load.go14
-rw-r--r--gopls/internal/lsp/cache/pkg.go205
-rw-r--r--gopls/internal/lsp/cache/snapshot.go88
-rw-r--r--gopls/internal/lsp/code_action.go8
-rw-r--r--gopls/internal/lsp/diagnostics.go9
-rw-r--r--gopls/internal/lsp/mod/diagnostics.go6
-rw-r--r--gopls/internal/lsp/semantic.go50
-rwxr-xr-xgopls/internal/lsp/source/api_json.go8
-rw-r--r--gopls/internal/lsp/source/completion/completion.go30
-rw-r--r--gopls/internal/lsp/source/completion/format.go4
-rw-r--r--gopls/internal/lsp/source/completion/literal.go10
-rw-r--r--gopls/internal/lsp/source/completion/statements.go31
-rw-r--r--gopls/internal/lsp/source/diagnostics.go19
-rw-r--r--gopls/internal/lsp/source/highlight.go2
-rw-r--r--gopls/internal/lsp/source/hover.go22
-rw-r--r--gopls/internal/lsp/source/identifier.go18
-rw-r--r--gopls/internal/lsp/source/implementation.go26
-rw-r--r--gopls/internal/lsp/source/implementation2.go2
-rw-r--r--gopls/internal/lsp/source/options.go22
-rw-r--r--gopls/internal/lsp/source/references.go6
-rw-r--r--gopls/internal/lsp/source/references2.go4
-rw-r--r--gopls/internal/lsp/source/rename.go12
-rw-r--r--gopls/internal/lsp/source/rename_check.go13
-rw-r--r--gopls/internal/lsp/source/signature_help.go2
-rw-r--r--gopls/internal/lsp/source/types_format.go8
-rw-r--r--gopls/internal/lsp/source/util.go87
-rw-r--r--gopls/internal/lsp/source/view.go24
-rw-r--r--gopls/internal/lsp/source/xrefs/xrefs.go12
-rw-r--r--gopls/internal/regtest/bench/completion_test.go13
-rw-r--r--gopls/internal/regtest/diagnostics/diagnostics_test.go1
-rw-r--r--gopls/internal/regtest/diagnostics/golist_test.go68
-rw-r--r--gopls/internal/regtest/misc/hover_test.go4
38 files changed, 711 insertions, 511 deletions
diff --git a/gopls/doc/settings.md b/gopls/doc/settings.md
index 1b8f6d779..52a753910 100644
--- a/gopls/doc/settings.md
+++ b/gopls/doc/settings.md
@@ -116,20 +116,6 @@ a go.mod file, narrowing the scope to that directory if it exists.
Default: `true`.
-#### **experimentalPackageCacheKey** *bool*
-
-**This setting is experimental and may be deleted.**
-
-experimentalPackageCacheKey controls whether to use a coarser cache key
-for package type information to increase cache hits. This setting removes
-the user's environment, build flags, and working directory from the cache
-key, which should be a safe change as all relevant inputs into the type
-checking pass are already hashed into the key. This is temporarily guarded
-by an experiment because caching behavior is subtle and difficult to
-comprehensively test.
-
-Default: `true`.
-
#### **allowModfileModifications** *bool*
**This setting is experimental and may be deleted.**
diff --git a/gopls/internal/lsp/cache/analysis.go b/gopls/internal/lsp/cache/analysis.go
index e9f1c7f7c..0005b0e44 100644
--- a/gopls/internal/lsp/cache/analysis.go
+++ b/gopls/internal/lsp/cache/analysis.go
@@ -520,10 +520,8 @@ func analysisCacheKey(analyzers []*analysis.Analyzer, m *source.Metadata, compil
sz := m.TypesSizes.(*types.StdSizes)
fmt.Fprintf(hasher, "sizes: %d %d\n", sz.WordSize, sz.MaxAlign)
- // metadata errors
- for _, err := range m.Errors {
- fmt.Fprintf(hasher, "error: %q", err)
- }
+ // metadata errors: used for 'compiles' field
+ fmt.Fprintf(hasher, "errors: %d", len(m.Errors))
// module Go version
if m.Module != nil && m.Module.GoVersion != "" {
@@ -542,8 +540,8 @@ func analysisCacheKey(analyzers []*analysis.Analyzer, m *source.Metadata, compil
depIDs = append(depIDs, string(depID))
}
sort.Strings(depIDs)
- for _, id := range depIDs {
- vdep := vdeps[PackageID(id)]
+ for _, depID := range depIDs {
+ vdep := vdeps[PackageID(depID)]
fmt.Fprintf(hasher, "dep: %s\n", vdep.PkgPath)
fmt.Fprintf(hasher, "export: %s\n", vdep.DeepExportHash)
@@ -766,6 +764,7 @@ func typeCheckForAnalysis(fset *token.FileSet, parsed []*source.ParsedGoFile, m
}
cfg := &types.Config{
+ Sizes: m.TypesSizes,
Error: func(e error) {
pkg.compiles = false // type error
pkg.typeErrors = append(pkg.typeErrors, e.(types.Error))
diff --git a/gopls/internal/lsp/cache/cache.go b/gopls/internal/lsp/cache/cache.go
index ef56b12ef..9da185ccc 100644
--- a/gopls/internal/lsp/cache/cache.go
+++ b/gopls/internal/lsp/cache/cache.go
@@ -209,7 +209,7 @@ func (c *Cache) PackageStats(withNames bool) template.HTML {
typsCost := typesCost(v.pkg.types.Scope())
typInfoCost := typesInfoCost(v.pkg.typesInfo)
stat := packageStat{
- id: v.pkg.m.ID,
+ id: v.pkg.id,
mode: v.pkg.mode,
types: typsCost,
typesInfo: typInfoCost,
diff --git a/gopls/internal/lsp/cache/check.go b/gopls/internal/lsp/cache/check.go
index 722691d41..03691e50e 100644
--- a/gopls/internal/lsp/cache/check.go
+++ b/gopls/internal/lsp/cache/check.go
@@ -5,21 +5,21 @@
package cache
import (
- "bytes"
"context"
+ "crypto/sha256"
"errors"
"fmt"
"go/ast"
"go/types"
"path/filepath"
"regexp"
+ "sort"
"strings"
"sync"
"golang.org/x/mod/module"
"golang.org/x/sync/errgroup"
"golang.org/x/tools/go/ast/astutil"
- "golang.org/x/tools/go/packages"
"golang.org/x/tools/gopls/internal/lsp/protocol"
"golang.org/x/tools/gopls/internal/lsp/source"
"golang.org/x/tools/gopls/internal/lsp/source/methodsets"
@@ -42,12 +42,14 @@ type packageKey struct {
type packageHandleKey source.Hash
-// A packageHandle is a handle to the future result of type-checking a package.
-// The resulting package is obtained from the await() method.
+// A packageHandle holds package information, some of which may not be fully
+// evaluated.
+//
+// The only methods on packageHandle that are safe to call before calling await
+// are Metadata and await itself.
type packageHandle struct {
- promise *memoize.Promise // [typeCheckResult]
-
- // m is the metadata associated with the package.
+ // TODO(rfindley): remove metadata from packageHandle. It is only used for
+ // bug detection.
m *source.Metadata
// key is the hashed key for the package.
@@ -58,12 +60,29 @@ type packageHandle struct {
// enough. (The key for analysis actions could similarly
// hash only Facts of direct dependencies.)
key packageHandleKey
+
+ // The shared type-checking promise.
+ promise *memoize.Promise // [typeCheckResult]
+}
+
+// typeCheckInputs contains the inputs of a call to typeCheckImpl, which
+// type-checks a package.
+type typeCheckInputs struct {
+ id PackageID
+ pkgPath PackagePath
+ name PackageName
+ mode source.ParseMode
+ goFiles, compiledGoFiles []source.FileHandle
+ sizes types.Sizes
+ deps map[PackageID]*packageHandle
+ depsByImpPath map[ImportPath]PackageID
+ goVersion string // packages.Module.GoVersion, e.g. "1.18"
}
// typeCheckResult contains the result of a call to
// typeCheckImpl, which type-checks a package.
type typeCheckResult struct {
- pkg *pkg
+ pkg *syntaxPackage
err error
}
@@ -101,7 +120,6 @@ func (s *snapshot) buildPackageHandle(ctx context.Context, id PackageID, mode so
// for each package is computed by at most one thread, then do
// the recursive key building of dependencies in parallel.
deps := make(map[PackageID]*packageHandle)
- var depKey source.Hash // XOR of all unique deps
for _, depID := range m.DepsByPkgPath {
depHandle, err := s.buildPackageHandle(ctx, depID, s.workspaceParseMode(depID))
// Don't use invalid metadata for dependencies if the top-level
@@ -121,9 +139,6 @@ func (s *snapshot) buildPackageHandle(ctx context.Context, id PackageID, mode so
// checking the entire package. Leave depKeys[i] unset.
continue
}
-
- depKey.XORWith(source.Hash(depHandle.key))
-
deps[depID] = depHandle
}
@@ -139,12 +154,29 @@ func (s *snapshot) buildPackageHandle(ctx context.Context, id PackageID, mode so
return nil, err
}
+ goVersion := ""
+ if m.Module != nil && m.Module.GoVersion != "" {
+ goVersion = m.Module.GoVersion
+ }
+
+ inputs := typeCheckInputs{
+ id: m.ID,
+ pkgPath: m.PkgPath,
+ name: m.Name,
+ mode: mode,
+ goFiles: goFiles,
+ compiledGoFiles: compiledGoFiles,
+ sizes: m.TypesSizes,
+ deps: deps,
+ depsByImpPath: m.DepsByImpPath,
+ goVersion: goVersion,
+ }
+
// All the file reading has now been done.
// Create a handle for the result of type checking.
- experimentalKey := s.View().Options().ExperimentalPackageCacheKey
- phKey := computePackageKey(m.ID, compiledGoFiles, m, depKey, mode, experimentalKey)
+ phKey := computePackageKey(s, inputs)
promise, release := s.store.Promise(phKey, func(ctx context.Context, arg interface{}) interface{} {
- pkg, err := typeCheckImpl(ctx, arg.(*snapshot), goFiles, compiledGoFiles, m, mode, deps)
+ pkg, err := typeCheckImpl(ctx, arg.(*snapshot), inputs)
return typeCheckResult{pkg, err}
})
@@ -165,10 +197,11 @@ func (s *snapshot) buildPackageHandle(ctx context.Context, id PackageID, mode so
// handles for type-checking its immediate deps, at which
// point there will be no need to even access s.meta.)
if s.meta.metadata[ph.m.ID] != ph.m {
+ // TODO(rfindley): this should be bug.Errorf.
return nil, fmt.Errorf("stale metadata for %s", ph.m.ID)
}
- // Check cache again in case another thread got there first.
+ // Check cache again in case another goroutine got there first.
if prev, ok := s.packages.Get(packageKey); ok {
prevPH := prev.(*packageHandle)
release()
@@ -223,61 +256,64 @@ func (s *snapshot) workspaceParseMode(id PackageID) source.ParseMode {
// computePackageKey returns a key representing the act of type checking
// a package named id containing the specified files, metadata, and
// combined dependency hash.
-func computePackageKey(id PackageID, files []source.FileHandle, m *source.Metadata, depsKey source.Hash, mode source.ParseMode, experimentalKey bool) packageHandleKey {
- // TODO(adonovan): opt: no need to materalize the bytes; hash them directly.
- // Also, use field separators to avoid spurious collisions.
- b := bytes.NewBuffer(nil)
- b.WriteString(string(id))
- if m.Module != nil {
- b.WriteString(m.Module.GoVersion) // go version affects type check errors.
- }
- if !experimentalKey {
- // cfg was used to produce the other hashed inputs (package ID, parsed Go
- // files, and deps). It should not otherwise affect the inputs to the type
- // checker, so this experiment omits it. This should increase cache hits on
- // the daemon as cfg contains the environment and working directory.
- hc := hashConfig(m.Config)
- b.Write(hc[:])
- }
- b.WriteByte(byte(mode))
- b.Write(depsKey[:])
- for _, file := range files {
- b.WriteString(file.FileIdentity().String())
- }
- // Metadata errors are interpreted and memoized on the computed package, so
- // we must hash them into the key here.
- //
- // TODO(rfindley): handle metadata diagnostics independently from
- // type-checking diagnostics.
- for _, err := range m.Errors {
- b.WriteString(err.Msg)
- b.WriteString(err.Pos)
- b.WriteRune(rune(err.Kind))
- }
- return packageHandleKey(source.HashOf(b.Bytes()))
-}
+func computePackageKey(s *snapshot, inputs typeCheckInputs) packageHandleKey {
+ hasher := sha256.New()
+
+ // In principle, a key must be the hash of an
+ // unambiguous encoding of all the relevant data.
+ // If it's ambiguous, we risk collisons.
+
+ // package identifiers
+ fmt.Fprintf(hasher, "package: %s %s %s\n", inputs.id, inputs.name, inputs.pkgPath)
+
+ // module Go version
+ fmt.Fprintf(hasher, "go %s\n", inputs.goVersion)
+
+ // parse mode
+ fmt.Fprintf(hasher, "mode %d\n", inputs.mode)
-// hashConfig returns the hash for the *packages.Config.
-func hashConfig(config *packages.Config) source.Hash {
- // TODO(adonovan): opt: don't materialize the bytes; hash them directly.
- // Also, use sound field separators to avoid collisions.
- var b bytes.Buffer
+ // import map
+ importPaths := make([]string, 0, len(inputs.depsByImpPath))
+ for impPath := range inputs.depsByImpPath {
+ importPaths = append(importPaths, string(impPath))
+ }
+ sort.Strings(importPaths)
+ for _, impPath := range importPaths {
+ fmt.Fprintf(hasher, "import %s %s", impPath, string(inputs.depsByImpPath[ImportPath(impPath)]))
+ }
- // Dir, Mode, Env, BuildFlags are the parts of the config that can change.
- b.WriteString(config.Dir)
- b.WriteRune(rune(config.Mode))
+ // deps, in PackageID order
+ depIDs := make([]string, 0, len(inputs.deps))
+ for depID := range inputs.deps {
+ depIDs = append(depIDs, string(depID))
+ }
+ sort.Strings(depIDs)
+ for _, depID := range depIDs {
+ dep := inputs.deps[PackageID(depID)]
+ fmt.Fprintf(hasher, "dep: %s key:%s\n", dep.m.PkgPath, dep.key)
+ }
- for _, e := range config.Env {
- b.WriteString(e)
+ // file names and contents
+ fmt.Fprintf(hasher, "compiledGoFiles: %d\n", len(inputs.compiledGoFiles))
+ for _, fh := range inputs.compiledGoFiles {
+ fmt.Fprintln(hasher, fh.FileIdentity())
}
- for _, f := range config.BuildFlags {
- b.WriteString(f)
+ fmt.Fprintf(hasher, "goFiles: %d\n", len(inputs.goFiles))
+ for _, fh := range inputs.goFiles {
+ fmt.Fprintln(hasher, fh.FileIdentity())
}
- return source.HashOf(b.Bytes())
+
+ // types sizes
+ sz := inputs.sizes.(*types.StdSizes)
+ fmt.Fprintf(hasher, "sizes: %d %d\n", sz.WordSize, sz.MaxAlign)
+
+ var hash [sha256.Size]byte
+ hasher.Sum(hash[:0])
+ return packageHandleKey(hash)
}
// await waits for typeCheckImpl to complete and returns its result.
-func (ph *packageHandle) await(ctx context.Context, s *snapshot) (*pkg, error) {
+func (ph *packageHandle) await(ctx context.Context, s *snapshot) (*syntaxPackage, error) {
v, err := s.awaitPromise(ctx, ph.promise)
if err != nil {
return nil, err
@@ -286,15 +322,7 @@ func (ph *packageHandle) await(ctx context.Context, s *snapshot) (*pkg, error) {
return data.pkg, data.err
}
-func (ph *packageHandle) CompiledGoFiles() []span.URI {
- return ph.m.CompiledGoFiles
-}
-
-func (ph *packageHandle) ID() string {
- return string(ph.m.ID)
-}
-
-func (ph *packageHandle) cached() (*pkg, error) {
+func (ph *packageHandle) cached() (*syntaxPackage, error) {
v := ph.promise.Cached()
if v == nil {
return nil, fmt.Errorf("no cached type information for %s", ph.m.PkgPath)
@@ -306,13 +334,13 @@ func (ph *packageHandle) cached() (*pkg, error) {
// typeCheckImpl type checks the parsed source files in compiledGoFiles.
// (The resulting pkg also holds the parsed but not type-checked goFiles.)
// deps holds the future results of type-checking the direct dependencies.
-func typeCheckImpl(ctx context.Context, snapshot *snapshot, goFiles, compiledGoFiles []source.FileHandle, m *source.Metadata, mode source.ParseMode, deps map[PackageID]*packageHandle) (*pkg, error) {
+func typeCheckImpl(ctx context.Context, snapshot *snapshot, inputs typeCheckInputs) (*syntaxPackage, error) {
// Start type checking of direct dependencies,
// in parallel and asynchronously.
// As the type checker imports each of these
// packages, it will wait for its completion.
var wg sync.WaitGroup
- for _, dep := range deps {
+ for _, dep := range inputs.deps {
wg.Add(1)
go func(dep *packageHandle) {
dep.await(ctx, snapshot) // ignore result
@@ -328,56 +356,39 @@ func typeCheckImpl(ctx context.Context, snapshot *snapshot, goFiles, compiledGoF
defer wg.Wait()
var filter *unexportedFilter
- if mode == source.ParseExported {
+ if inputs.mode == source.ParseExported {
filter = &unexportedFilter{uses: map[string]bool{}}
}
- pkg, err := doTypeCheck(ctx, snapshot, goFiles, compiledGoFiles, m, mode, deps, filter)
+ pkg, err := doTypeCheck(ctx, snapshot, inputs, filter)
if err != nil {
return nil, err
}
- if mode == source.ParseExported {
+ if inputs.mode == source.ParseExported {
// The AST filtering is a little buggy and may remove things it
// shouldn't. If we only got undeclared name errors, try one more
// time keeping those names.
missing, unexpected := filter.ProcessErrors(pkg.typeErrors)
if len(unexpected) == 0 && len(missing) != 0 {
- pkg, err = doTypeCheck(ctx, snapshot, goFiles, compiledGoFiles, m, mode, deps, filter)
+ pkg, err = doTypeCheck(ctx, snapshot, inputs, filter)
if err != nil {
return nil, err
}
missing, unexpected = filter.ProcessErrors(pkg.typeErrors)
}
if len(unexpected) != 0 || len(missing) != 0 {
- pkg, err = doTypeCheck(ctx, snapshot, goFiles, compiledGoFiles, m, mode, deps, nil)
+ pkg, err = doTypeCheck(ctx, snapshot, inputs, nil)
if err != nil {
return nil, err
}
}
}
- // If this is a replaced module in the workspace, the version is
- // meaningless, and we don't want clients to access it.
- if m.Module != nil {
- pkg.version = &module.Version{
- Path: m.Module.Path,
- Version: m.Module.Version,
- }
- }
// We don't care about a package's errors unless we have parsed it in full.
- if mode != source.ParseFull {
+ if inputs.mode != source.ParseFull {
return pkg, nil
}
- for _, e := range m.Errors {
- diags, err := goPackagesErrorDiagnostics(snapshot, pkg, e)
- if err != nil {
- event.Error(ctx, "unable to compute positions for list errors", err, tag.Package.Of(string(pkg.ID())))
- continue
- }
- pkg.diagnostics = append(pkg.diagnostics, diags...)
- }
-
// Our heuristic for whether to show type checking errors is:
// + If any file was 'fixed', don't show type checking errors as we
// can't guarantee that they reference accurate locations in the source.
@@ -394,7 +405,7 @@ func typeCheckImpl(ctx context.Context, snapshot *snapshot, goFiles, compiledGoF
for _, e := range pkg.parseErrors {
diags, err := parseErrorDiagnostics(snapshot, pkg, e)
if err != nil {
- event.Error(ctx, "unable to compute positions for parse errors", err, tag.Package.Of(string(pkg.ID())))
+ event.Error(ctx, "unable to compute positions for parse errors", err, tag.Package.Of(string(inputs.id)))
continue
}
for _, diag := range diags {
@@ -412,7 +423,7 @@ func typeCheckImpl(ctx context.Context, snapshot *snapshot, goFiles, compiledGoF
for _, e := range expandErrors(unexpanded, snapshot.View().Options().RelatedInformationSupported) {
diags, err := typeErrorDiagnostics(snapshot, pkg, e)
if err != nil {
- event.Error(ctx, "unable to compute positions for type errors", err, tag.Package.Of(string(pkg.ID())))
+ event.Error(ctx, "unable to compute positions for type errors", err, tag.Package.Of(string(inputs.id)))
continue
}
pkg.typeErrors = append(pkg.typeErrors, e.primary)
@@ -426,27 +437,20 @@ func typeCheckImpl(ctx context.Context, snapshot *snapshot, goFiles, compiledGoF
}
}
- depsErrors, err := snapshot.depsErrors(ctx, pkg)
- if err != nil {
- return nil, err
- }
- pkg.diagnostics = append(pkg.diagnostics, depsErrors...)
-
return pkg, nil
}
var goVersionRx = regexp.MustCompile(`^go([1-9][0-9]*)\.(0|[1-9][0-9]*)$`)
-func doTypeCheck(ctx context.Context, snapshot *snapshot, goFiles, compiledGoFiles []source.FileHandle, m *source.Metadata, mode source.ParseMode, deps map[PackageID]*packageHandle, astFilter *unexportedFilter) (*pkg, error) {
- ctx, done := event.Start(ctx, "cache.typeCheck", tag.Package.Of(string(m.ID)))
+func doTypeCheck(ctx context.Context, snapshot *snapshot, inputs typeCheckInputs, astFilter *unexportedFilter) (*syntaxPackage, error) {
+ ctx, done := event.Start(ctx, "cache.typeCheck", tag.Package.Of(string(inputs.id)))
defer done()
- pkg := &pkg{
- m: m,
- mode: mode,
+ pkg := &syntaxPackage{
+ id: inputs.id,
+ mode: inputs.mode,
fset: snapshot.FileSet(), // must match parse call below (snapshot.ParseGo for now)
- deps: make(map[PackageID]*pkg),
- types: types.NewPackage(string(m.PkgPath), string(m.Name)),
+ types: types.NewPackage(string(inputs.pkgPath), string(inputs.name)),
typesInfo: &types.Info{
Types: make(map[ast.Expr]types.TypeAndValue),
Defs: make(map[*ast.Ident]types.Object),
@@ -461,9 +465,9 @@ func doTypeCheck(ctx context.Context, snapshot *snapshot, goFiles, compiledGoFil
// Parse the non-compiled GoFiles. (These aren't presented to
// the type checker but are part of the returned pkg.)
// TODO(adonovan): opt: parallelize parsing.
- for _, fh := range goFiles {
- goMode := mode
- if mode == source.ParseExported {
+ for _, fh := range inputs.goFiles {
+ goMode := inputs.mode
+ if inputs.mode == source.ParseExported {
// This package is being loaded only for type information,
// to which non-compiled Go files are irrelevant,
// so parse only the header.
@@ -477,40 +481,32 @@ func doTypeCheck(ctx context.Context, snapshot *snapshot, goFiles, compiledGoFil
}
// Parse the CompiledGoFiles: those seen by the compiler/typechecker.
- if err := parseCompiledGoFiles(ctx, compiledGoFiles, snapshot, mode, pkg, astFilter); err != nil {
+ if err := parseCompiledGoFiles(ctx, inputs.compiledGoFiles, snapshot, inputs.mode, pkg, astFilter); err != nil {
return nil, err
}
// Use the default type information for the unsafe package.
- if m.PkgPath == "unsafe" {
+ if inputs.pkgPath == "unsafe" {
// Don't type check Unsafe: it's unnecessary, and doing so exposes a data
// race to Unsafe.completed.
// TODO(adonovan): factor (tail-merge) with the normal control path.
pkg.types = types.Unsafe
pkg.methodsets = methodsets.NewIndex(pkg.fset, pkg.types)
- pkg.xrefs = xrefs.Index(pkg)
+ pkg.xrefs = xrefs.Index(pkg.compiledGoFiles, pkg.types, pkg.typesInfo)
return pkg, nil
}
- if len(m.CompiledGoFiles) == 0 {
- // No files most likely means go/packages failed. Try to attach error
- // messages to the file as much as possible.
- var found bool
- for _, e := range m.Errors {
- srcDiags, err := goPackagesErrorDiagnostics(snapshot, pkg, e)
- if err != nil {
- continue
- }
- found = true
- pkg.diagnostics = append(pkg.diagnostics, srcDiags...)
- }
- if found {
- return pkg, nil
- }
- return nil, fmt.Errorf("no parsed files for package %s, expected: %v, errors: %v", pkg.m.PkgPath, pkg.compiledGoFiles, m.Errors)
+ if len(pkg.compiledGoFiles) == 0 {
+ // No files most likely means go/packages failed.
+ //
+ // TODO(rfindley): in the past, we would capture go list errors in this
+ // case, to present go list errors to the user. However we had no tests for
+ // this behavior. It is unclear if anything better can be done here.
+ return nil, fmt.Errorf("no parsed files for package %s", inputs.pkgPath)
}
cfg := &types.Config{
+ Sizes: inputs.sizes,
Error: func(e error) {
pkg.typeErrors = append(pkg.typeErrors, e.(types.Error))
},
@@ -519,30 +515,30 @@ func doTypeCheck(ctx context.Context, snapshot *snapshot, goFiles, compiledGoFil
// based on the metadata before we start type checking,
// reporting them via types.Importer places the errors
// at the correct source location.
- id, ok := pkg.m.DepsByImpPath[ImportPath(path)]
+ id, ok := inputs.depsByImpPath[ImportPath(path)]
if !ok {
// If the import declaration is broken,
// go list may fail to report metadata about it.
// See TestFixImportDecl for an example.
return nil, fmt.Errorf("missing metadata for import of %q", path)
}
- dep, ok := deps[id] // id may be ""
+ dep, ok := inputs.deps[id] // id may be ""
if !ok {
return nil, snapshot.missingPkgError(path)
}
- if !source.IsValidImport(m.PkgPath, dep.m.PkgPath) {
+ if !source.IsValidImport(inputs.pkgPath, dep.m.PkgPath) {
return nil, fmt.Errorf("invalid use of internal package %s", path)
}
depPkg, err := dep.await(ctx, snapshot)
if err != nil {
return nil, err
}
- pkg.deps[depPkg.m.ID] = depPkg
return depPkg.types, nil
}),
}
- if pkg.m.Module != nil && pkg.m.Module.GoVersion != "" {
- goVersion := "go" + pkg.m.Module.GoVersion
+
+ if inputs.goVersion != "" {
+ goVersion := "go" + inputs.goVersion
// types.NewChecker panics if GoVersion is invalid. An unparsable mod
// file should probably stop us before we get here, but double check
// just in case.
@@ -551,7 +547,7 @@ func doTypeCheck(ctx context.Context, snapshot *snapshot, goFiles, compiledGoFil
}
}
- if mode != source.ParseFull {
+ if inputs.mode != source.ParseFull {
cfg.DisableUnusedImportCheck = true
cfg.IgnoreFuncBodies = true
}
@@ -574,7 +570,7 @@ func doTypeCheck(ctx context.Context, snapshot *snapshot, goFiles, compiledGoFil
pkg.methodsets = methodsets.NewIndex(pkg.fset, pkg.types)
// Build global index of outbound cross-references.
- pkg.xrefs = xrefs.Index(pkg)
+ pkg.xrefs = xrefs.Index(pkg.compiledGoFiles, pkg.types, pkg.typesInfo)
// If the context was cancelled, we may have returned a ton of transient
// errors to the type checker. Swallow them.
@@ -584,7 +580,7 @@ func doTypeCheck(ctx context.Context, snapshot *snapshot, goFiles, compiledGoFil
return pkg, nil
}
-func parseCompiledGoFiles(ctx context.Context, compiledGoFiles []source.FileHandle, snapshot *snapshot, mode source.ParseMode, pkg *pkg, astFilter *unexportedFilter) error {
+func parseCompiledGoFiles(ctx context.Context, compiledGoFiles []source.FileHandle, snapshot *snapshot, mode source.ParseMode, pkg *syntaxPackage, astFilter *unexportedFilter) error {
// TODO(adonovan): opt: parallelize this loop, which takes 1-25ms.
for _, fh := range compiledGoFiles {
var pgf *source.ParsedGoFile
@@ -633,11 +629,16 @@ func parseCompiledGoFiles(ctx context.Context, compiledGoFiles []source.FileHand
return nil
}
-func (s *snapshot) depsErrors(ctx context.Context, pkg *pkg) ([]*source.Diagnostic, error) {
+// depsErrors creates diagnostics for each metadata error (e.g. import cycle).
+// These may be attached to import declarations in the transitive source files
+// of pkg, or to 'requires' declarations in the package's go.mod file.
+//
+// TODO(rfindley): move this to errors.go
+func (s *snapshot) depsErrors(ctx context.Context, pkg *syntaxPackage, depsErrors []*packagesinternal.PackageError) ([]*source.Diagnostic, error) {
// Select packages that can't be found, and were imported in non-workspace packages.
// Workspace packages already show their own errors.
var relevantErrors []*packagesinternal.PackageError
- for _, depsError := range pkg.m.DepsErrors {
+ for _, depsError := range depsErrors {
// Up to Go 1.15, the missing package was included in the stack, which
// was presumably a bug. We want the next one up.
directImporterIdx := len(depsError.ImportStack) - 1
@@ -665,7 +666,7 @@ func (s *snapshot) depsErrors(ctx context.Context, pkg *pkg) ([]*source.Diagnost
allImports := map[string][]fileImport{}
for _, cgf := range pkg.compiledGoFiles {
// TODO(adonovan): modify Imports() to accept a single token.File (cgf.Tok).
- for _, group := range astutil.Imports(s.FileSet(), cgf.File) {
+ for _, group := range astutil.Imports(pkg.fset, cgf.File) {
for _, imp := range group {
if imp.Path == nil {
continue
diff --git a/gopls/internal/lsp/cache/errors.go b/gopls/internal/lsp/cache/errors.go
index 8af1ac355..b0babc7b0 100644
--- a/gopls/internal/lsp/cache/errors.go
+++ b/gopls/internal/lsp/cache/errors.go
@@ -28,30 +28,20 @@ import (
"golang.org/x/tools/internal/typesinternal"
)
-func goPackagesErrorDiagnostics(snapshot *snapshot, pkg *pkg, e packages.Error) ([]*source.Diagnostic, error) {
- if msg, spn, ok := parseGoListImportCycleError(snapshot, e, pkg); ok {
- rng, err := spanToRange(pkg, spn)
- if err != nil {
- return nil, err
- }
- return []*source.Diagnostic{{
- URI: spn.URI(),
- Range: rng,
- Severity: protocol.SeverityError,
- Source: source.ListError,
- Message: msg,
- }}, nil
+func goPackagesErrorDiagnostics(e packages.Error, pkg *syntaxPackage, fromDir string) (diags []*source.Diagnostic, rerr error) {
+ if diag, ok := parseGoListImportCycleError(e, pkg); ok {
+ return []*source.Diagnostic{diag}, nil
}
var spn span.Span
if e.Pos == "" {
- spn = parseGoListError(e.Msg, pkg.m.Config.Dir)
+ spn = parseGoListError(e.Msg, fromDir)
// We may not have been able to parse a valid span. Apply the errors to all files.
if _, err := spanToRange(pkg, spn); err != nil {
var diags []*source.Diagnostic
- for _, cgf := range pkg.compiledGoFiles {
+ for _, pgf := range pkg.compiledGoFiles {
diags = append(diags, &source.Diagnostic{
- URI: cgf.URI,
+ URI: pgf.URI,
Severity: protocol.SeverityError,
Source: source.ListError,
Message: e.Msg,
@@ -60,9 +50,20 @@ func goPackagesErrorDiagnostics(snapshot *snapshot, pkg *pkg, e packages.Error)
return diags, nil
}
} else {
- spn = span.ParseInDir(e.Pos, pkg.m.Config.Dir)
+ spn = span.ParseInDir(e.Pos, fromDir)
}
+ // TODO(rfindley): in some cases the go command outputs invalid spans, for
+ // example (from TestGoListErrors):
+ //
+ // package a
+ // import
+ //
+ // In this case, the go command will complain about a.go:2:8, which is after
+ // the trailing newline but still considered to be on the second line, most
+ // likely because *token.File lacks information about newline termination.
+ //
+ // We could do better here by handling that case.
rng, err := spanToRange(pkg, spn)
if err != nil {
return nil, err
@@ -76,7 +77,7 @@ func goPackagesErrorDiagnostics(snapshot *snapshot, pkg *pkg, e packages.Error)
}}, nil
}
-func parseErrorDiagnostics(snapshot *snapshot, pkg *pkg, errList scanner.ErrorList) ([]*source.Diagnostic, error) {
+func parseErrorDiagnostics(snapshot *snapshot, pkg *syntaxPackage, errList scanner.ErrorList) ([]*source.Diagnostic, error) {
// The first parser error is likely the root cause of the problem.
if errList.Len() <= 0 {
return nil, fmt.Errorf("no errors in %v", errList)
@@ -102,7 +103,7 @@ func parseErrorDiagnostics(snapshot *snapshot, pkg *pkg, errList scanner.ErrorLi
var importErrorRe = regexp.MustCompile(`could not import ([^\s]+)`)
var unsupportedFeatureRe = regexp.MustCompile(`.*require.* go(\d+\.\d+) or later`)
-func typeErrorDiagnostics(snapshot *snapshot, pkg *pkg, e extendedError) ([]*source.Diagnostic, error) {
+func typeErrorDiagnostics(snapshot *snapshot, pkg *syntaxPackage, e extendedError) ([]*source.Diagnostic, error) {
code, loc, err := typeErrorData(pkg, e.primary)
if err != nil {
return nil, err
@@ -286,7 +287,7 @@ func relatedInformation(diag *gobDiagnostic) []source.RelatedInformation {
return out
}
-func typeErrorData(pkg *pkg, terr types.Error) (typesinternal.ErrorCode, protocol.Location, error) {
+func typeErrorData(pkg *syntaxPackage, terr types.Error) (typesinternal.ErrorCode, protocol.Location, error) {
ecode, start, end, ok := typesinternal.ReadGo116ErrorData(terr)
if !ok {
start, end = terr.Pos, terr.Pos
@@ -299,7 +300,7 @@ func typeErrorData(pkg *pkg, terr types.Error) (typesinternal.ErrorCode, protoco
}
// go/types errors retain their FileSet.
// Sanity-check that we're using the right one.
- fset := pkg.FileSet()
+ fset := pkg.fset
if fset != terr.Fset {
return 0, protocol.Location{}, bug.Errorf("wrong FileSet for type error")
}
@@ -320,7 +321,7 @@ func typeErrorData(pkg *pkg, terr types.Error) (typesinternal.ErrorCode, protoco
// spanToRange converts a span.Span to a protocol.Range,
// assuming that the span belongs to the package whose diagnostics are being computed.
-func spanToRange(pkg *pkg, spn span.Span) (protocol.Range, error) {
+func spanToRange(pkg *syntaxPackage, spn span.Span) (protocol.Range, error) {
pgf, err := pkg.File(spn.URI())
if err != nil {
return protocol.Range{}, err
@@ -344,36 +345,39 @@ func parseGoListError(input, wd string) span.Span {
return span.ParseInDir(input[:msgIndex], wd)
}
-func parseGoListImportCycleError(snapshot *snapshot, e packages.Error, pkg *pkg) (string, span.Span, bool) {
+func parseGoListImportCycleError(e packages.Error, pkg *syntaxPackage) (*source.Diagnostic, bool) {
re := regexp.MustCompile(`(.*): import stack: \[(.+)\]`)
matches := re.FindStringSubmatch(strings.TrimSpace(e.Msg))
if len(matches) < 3 {
- return e.Msg, span.Span{}, false
+ return nil, false
}
msg := matches[1]
importList := strings.Split(matches[2], " ")
// Since the error is relative to the current package. The import that is causing
// the import cycle error is the second one in the list.
if len(importList) < 2 {
- return msg, span.Span{}, false
+ return nil, false
}
// Imports have quotation marks around them.
circImp := strconv.Quote(importList[1])
- for _, cgf := range pkg.compiledGoFiles {
+ for _, pgf := range pkg.compiledGoFiles {
// Search file imports for the import that is causing the import cycle.
- for _, imp := range cgf.File.Imports {
+ for _, imp := range pgf.File.Imports {
if imp.Path.Value == circImp {
- start, end, err := safetoken.Offsets(cgf.Tok, imp.Pos(), imp.End())
- if err != nil {
- return msg, span.Span{}, false
- }
- spn, err := cgf.Mapper.OffsetSpan(start, end)
+ rng, err := pgf.PosMappedRange(imp.Pos(), imp.End())
if err != nil {
- return msg, span.Span{}, false
+ return nil, false
}
- return msg, spn, true
+
+ return &source.Diagnostic{
+ URI: pgf.URI,
+ Range: rng.Range(),
+ Severity: protocol.SeverityError,
+ Source: source.ListError,
+ Message: msg,
+ }, true
}
}
}
- return msg, span.Span{}, false
+ return nil, false
}
diff --git a/gopls/internal/lsp/cache/error_test.go b/gopls/internal/lsp/cache/errors_test.go
index 43cc03f78..43cc03f78 100644
--- a/gopls/internal/lsp/cache/error_test.go
+++ b/gopls/internal/lsp/cache/errors_test.go
diff --git a/gopls/internal/lsp/cache/load.go b/gopls/internal/lsp/cache/load.go
index 1d69504a9..9b321cec5 100644
--- a/gopls/internal/lsp/cache/load.go
+++ b/gopls/internal/lsp/cache/load.go
@@ -447,21 +447,13 @@ func buildMetadata(ctx context.Context, pkg *packages.Package, cfg *packages.Con
Name: PackageName(pkg.Name),
ForTest: PackagePath(packagesinternal.GetForTest(pkg)),
TypesSizes: pkg.TypesSizes,
- Config: cfg,
+ LoadDir: cfg.Dir,
Module: pkg.Module,
+ Errors: pkg.Errors,
DepsErrors: packagesinternal.GetDepsErrors(pkg),
}
- updates[id] = m
- for _, err := range pkg.Errors {
- // Filter out parse errors from go list. We'll get them when we
- // actually parse, and buggy overlay support may generate spurious
- // errors. (See TestNewModule_Issue38207.)
- if strings.Contains(err.Msg, "expected '") {
- continue
- }
- m.Errors = append(m.Errors, err)
- }
+ updates[id] = m
for _, filename := range pkg.CompiledGoFiles {
uri := span.URIFromPath(filename)
diff --git a/gopls/internal/lsp/cache/pkg.go b/gopls/internal/lsp/cache/pkg.go
index 37aae50a0..bb4823c03 100644
--- a/gopls/internal/lsp/cache/pkg.go
+++ b/gopls/internal/lsp/cache/pkg.go
@@ -5,19 +5,22 @@
package cache
import (
+ "context"
"fmt"
"go/ast"
"go/scanner"
"go/token"
"go/types"
+ "strings"
- "golang.org/x/mod/module"
"golang.org/x/tools/go/types/objectpath"
"golang.org/x/tools/gopls/internal/lsp/protocol"
"golang.org/x/tools/gopls/internal/lsp/source"
"golang.org/x/tools/gopls/internal/lsp/source/methodsets"
"golang.org/x/tools/gopls/internal/lsp/source/xrefs"
"golang.org/x/tools/gopls/internal/span"
+ "golang.org/x/tools/internal/event"
+ "golang.org/x/tools/internal/event/tag"
"golang.org/x/tools/internal/memoize"
)
@@ -29,30 +32,93 @@ type (
ImportPath = source.ImportPath
)
-// pkg contains parse trees and type information for a package.
-type pkg struct {
+// A Package is the union of snapshot-local information (Metadata) and shared
+// type-checking information (a syntaxPackage).
+//
+// TODO(rfindley): for now, we do not persist the post-processing of
+// loadDiagnostics, because the value of the snapshot.packages map is just the
+// package handle. Fix this.
+type Package struct {
m *source.Metadata
- mode source.ParseMode
+ pkg *syntaxPackage
+ loadDiagnostics *memoize.Promise // post-processed errors from loading
+}
+
+func newPackage(m *source.Metadata, pkg *syntaxPackage) *Package {
+ p := &Package{
+ m: m,
+ pkg: pkg,
+ }
+ if len(m.Errors) > 0 || len(m.DepsErrors) > 0 {
+ p.loadDiagnostics = memoize.NewPromise(fmt.Sprintf("loadDiagnostics(%s)", m.ID), func(ctx context.Context, arg interface{}) interface{} {
+ s := arg.(*snapshot)
+ var diags []*source.Diagnostic
+ for _, packagesErr := range p.m.Errors {
+ // Filter out parse errors from go list. We'll get them when we
+ // actually parse, and buggy overlay support may generate spurious
+ // errors. (See TestNewModule_Issue38207.)
+ if strings.Contains(packagesErr.Msg, "expected '") {
+ continue
+ }
+ pkgDiags, err := goPackagesErrorDiagnostics(packagesErr, p.pkg, p.m.LoadDir)
+ if err != nil {
+ // There are certain cases where the go command returns invalid
+ // positions, so we cannot panic or even bug.Reportf here.
+ event.Error(ctx, "unable to compute positions for list errors", err, tag.Package.Of(string(p.m.ID)))
+ continue
+ }
+ diags = append(diags, pkgDiags...)
+ }
+
+ // TODO(rfindley): this is buggy: an insignificant change to a modfile
+ // (or an unsaved modfile) could affect the position of deps errors,
+ // without invalidating the package.
+ depsDiags, err := s.depsErrors(ctx, p.pkg, p.m.DepsErrors)
+ if err != nil {
+ if ctx.Err() == nil {
+ // TODO(rfindley): consider making this a bug.Reportf. depsErrors should
+ // not normally fail.
+ event.Error(ctx, "unable to compute deps errors", err, tag.Package.Of(string(p.m.ID)))
+ }
+ return nil
+ }
+ diags = append(diags, depsDiags...)
+ return diags
+ })
+ }
+ return p
+}
+
+// syntaxPackage contains parse trees and type information for a package.
+type syntaxPackage struct {
+ // -- identifiers --
+ id PackageID
+ mode source.ParseMode
+
+ // -- outputs --
fset *token.FileSet // for now, same as the snapshot's FileSet
goFiles []*source.ParsedGoFile
compiledGoFiles []*source.ParsedGoFile
diagnostics []*source.Diagnostic
- deps map[PackageID]*pkg // use m.DepsBy{Pkg,Imp}Path to look up ID
- version *module.Version // may be nil; may differ from m.Module.Version
parseErrors []scanner.ErrorList
typeErrors []types.Error
types *types.Package
typesInfo *types.Info
- hasFixedFiles bool // if true, AST was sufficiently mangled that we should hide type errors
- xrefs []byte // serializable index of outbound cross-references
-
- analyses memoize.Store // maps analyzer.Name to Promise[actionResult]
- methodsets *methodsets.Index // index of method sets of package-level types
+ hasFixedFiles bool // if true, AST was sufficiently mangled that we should hide type errors
+ xrefs []byte // serializable index of outbound cross-references
+ analyses memoize.Store // maps analyzer.Name to Promise[actionResult]
+ methodsets *methodsets.Index // index of method sets of package-level types
}
-func (p *pkg) String() string { return string(p.ID()) }
+func (p *Package) String() string { return string(p.m.ID) }
+
+func (p *Package) Metadata() *source.Metadata {
+ return p.m
+}
// A loadScope defines a package loading scope for use with go/packages.
+//
+// TODO(rfindley): move this to load.go.
type loadScope interface {
aScope()
}
@@ -70,127 +136,96 @@ func (packageLoadScope) aScope() {}
func (moduleLoadScope) aScope() {}
func (viewLoadScope) aScope() {}
-func (p *pkg) ID() PackageID { return p.m.ID }
-func (p *pkg) Name() PackageName { return p.m.Name }
-func (p *pkg) PkgPath() PackagePath { return p.m.PkgPath }
+func (p *Package) ParseMode() source.ParseMode {
+ return p.pkg.mode
+}
-func (p *pkg) ParseMode() source.ParseMode {
- return p.mode
+func (p *Package) CompiledGoFiles() []*source.ParsedGoFile {
+ return p.pkg.compiledGoFiles
}
-func (p *pkg) CompiledGoFiles() []*source.ParsedGoFile {
- return p.compiledGoFiles
+func (p *Package) File(uri span.URI) (*source.ParsedGoFile, error) {
+ return p.pkg.File(uri)
}
-func (p *pkg) File(uri span.URI) (*source.ParsedGoFile, error) {
- for _, cgf := range p.compiledGoFiles {
+func (pkg *syntaxPackage) File(uri span.URI) (*source.ParsedGoFile, error) {
+ for _, cgf := range pkg.compiledGoFiles {
if cgf.URI == uri {
return cgf, nil
}
}
- for _, gf := range p.goFiles {
+ for _, gf := range pkg.goFiles {
if gf.URI == uri {
return gf, nil
}
}
- return nil, fmt.Errorf("no parsed file for %s in %v", uri, p.m.ID)
+ return nil, fmt.Errorf("no parsed file for %s in %v", uri, pkg.id)
}
-func (p *pkg) GetSyntax() []*ast.File {
+func (p *Package) GetSyntax() []*ast.File {
var syntax []*ast.File
- for _, pgf := range p.compiledGoFiles {
+ for _, pgf := range p.pkg.compiledGoFiles {
syntax = append(syntax, pgf.File)
}
return syntax
}
-func (p *pkg) FileSet() *token.FileSet {
- return p.fset
+func (p *Package) FileSet() *token.FileSet {
+ return p.pkg.fset
}
-func (p *pkg) GetTypes() *types.Package {
- return p.types
+func (p *Package) GetTypes() *types.Package {
+ return p.pkg.types
}
-func (p *pkg) GetTypesInfo() *types.Info {
- return p.typesInfo
+func (p *Package) GetTypesInfo() *types.Info {
+ return p.pkg.typesInfo
}
-func (p *pkg) GetTypesSizes() types.Sizes {
- return p.m.TypesSizes
+func (p *Package) HasParseErrors() bool {
+ return len(p.pkg.parseErrors) != 0
}
-func (p *pkg) ForTest() string {
- return string(p.m.ForTest)
+func (p *Package) HasTypeErrors() bool {
+ return len(p.pkg.typeErrors) != 0
}
-// DirectDep returns the directly imported dependency of this package,
-// given its PackagePath. (If you have an ImportPath, e.g. a string
-// from an import declaration, use ResolveImportPath instead.
-// They may differ in case of vendoring.)
-func (p *pkg) DirectDep(pkgPath PackagePath) (source.Package, error) {
- if id, ok := p.m.DepsByPkgPath[pkgPath]; ok {
- if imp := p.deps[id]; imp != nil {
- return imp, nil
+func (p *Package) DiagnosticsForFile(ctx context.Context, s source.Snapshot, uri span.URI) ([]*source.Diagnostic, error) {
+ var diags []*source.Diagnostic
+ for _, diag := range p.pkg.diagnostics {
+ if diag.URI == uri {
+ diags = append(diags, diag)
}
}
- return nil, fmt.Errorf("package does not import package with path %s", pkgPath)
-}
-// ResolveImportPath returns the directly imported dependency of this package,
-// given its ImportPath. See also DirectDep.
-func (p *pkg) ResolveImportPath(importPath ImportPath) (source.Package, error) {
- if id, ok := p.m.DepsByImpPath[importPath]; ok && id != "" {
- if imp := p.deps[id]; imp != nil {
- return imp, nil
+ if p.loadDiagnostics != nil {
+ res, err := p.loadDiagnostics.Get(ctx, s)
+ if err != nil {
+ return nil, err
}
- }
- return nil, fmt.Errorf("package does not import %s", importPath)
-}
-
-func (p *pkg) Imports() []source.Package {
- var result []source.Package // unordered
- for _, dep := range p.deps {
- result = append(result, dep)
- }
- return result
-}
-
-func (p *pkg) Version() *module.Version {
- return p.version
-}
-
-func (p *pkg) HasListOrParseErrors() bool {
- return len(p.m.Errors) != 0 || len(p.parseErrors) != 0
-}
-
-func (p *pkg) HasTypeErrors() bool {
- return len(p.typeErrors) != 0
-}
-
-func (p *pkg) DiagnosticsForFile(uri span.URI) []*source.Diagnostic {
- var res []*source.Diagnostic
- for _, diag := range p.diagnostics {
- if diag.URI == uri {
- res = append(res, diag)
+ for _, diag := range res.([]*source.Diagnostic) {
+ if diag.URI == uri {
+ diags = append(diags, diag)
+ }
}
}
- return res
+
+ return diags, nil
}
// ReferencesTo returns the location of each reference within package p
// to one of the target objects denoted by the pair (package path, object path).
-func (p *pkg) ReferencesTo(targets map[PackagePath]map[objectpath.Path]struct{}) []protocol.Location {
+func (p *Package) ReferencesTo(targets map[PackagePath]map[objectpath.Path]struct{}) []protocol.Location {
// TODO(adonovan): In future, p.xrefs will be retrieved from a
// section of the cache file produced by type checking.
// (Other sections will include the package's export data,
// "implements" relations, exported symbols, etc.)
// For now we just hang it off the pkg.
- return xrefs.Lookup(p.m, p.xrefs, targets)
+ return xrefs.Lookup(p.m, p.pkg.xrefs, targets)
}
-func (p *pkg) MethodSetsIndex() *methodsets.Index {
+func (p *Package) MethodSetsIndex() *methodsets.Index {
// TODO(adonovan): In future, p.methodsets will be retrieved from a
// section of the cache file produced by type checking.
- return p.methodsets
+ return p.pkg.methodsets
}
diff --git a/gopls/internal/lsp/cache/snapshot.go b/gopls/internal/lsp/cache/snapshot.go
index 51d7a7964..995fba34c 100644
--- a/gopls/internal/lsp/cache/snapshot.go
+++ b/gopls/internal/lsp/cache/snapshot.go
@@ -118,7 +118,7 @@ type snapshot struct {
// analyses maps an analysisKey (which identifies a package
// and a set of analyzers) to the handle for the future result
// of loading the package and analyzing it.
- analyses *persistent.Map // from analysisKey to analysisHandle
+ analyses *persistent.Map // from analysisKey to analysisPromise
// workspacePackages contains the workspace's packages, which are loaded
// when the view is created.
@@ -626,10 +626,20 @@ func (s *snapshot) buildOverlay() map[string][]byte {
}
// TypeCheck type-checks the specified packages in the given mode.
+//
+// The resulting packages slice always contains len(ids) entries, though some
+// of them may be nil if (and only if) the resulting error is non-nil.
+//
+// An error is returned if any of the packages fail to type-check. This is
+// different from having type-checking errors: a failure to type-check
+// indicates context cancellation or otherwise significant failure to perform
+// the type-checking operation.
func (s *snapshot) TypeCheck(ctx context.Context, mode source.TypecheckMode, ids ...PackageID) ([]source.Package, error) {
// Build all the handles...
- var phs []*packageHandle
- for _, id := range ids {
+ phs := make([]*packageHandle, len(ids))
+ pkgs := make([]source.Package, len(ids))
+ var firstErr error
+ for i, id := range ids {
parseMode := source.ParseFull
if mode == source.TypecheckWorkspace {
parseMode = s.workspaceParseMode(id)
@@ -637,21 +647,36 @@ func (s *snapshot) TypeCheck(ctx context.Context, mode source.TypecheckMode, ids
ph, err := s.buildPackageHandle(ctx, id, parseMode)
if err != nil {
- return nil, err
+ if firstErr == nil {
+ firstErr = err
+ }
+ if ctx.Err() != nil {
+ return pkgs, firstErr
+ }
+ continue
}
- phs = append(phs, ph)
+ phs[i] = ph
}
// ...then await them all.
- var pkgs []source.Package
- for _, ph := range phs {
- pkg, err := ph.await(ctx, s)
+ for i, ph := range phs {
+ if ph == nil {
+ continue
+ }
+ p, err := ph.await(ctx, s)
if err != nil {
- return nil, err
+ if firstErr == nil {
+ firstErr = err
+ }
+ if ctx.Err() != nil {
+ return pkgs, firstErr
+ }
+ continue
}
- pkgs = append(pkgs, pkg)
+ pkgs[i] = newPackage(ph.m, p)
}
- return pkgs, nil
+
+ return pkgs, firstErr
}
func (s *snapshot) MetadataForFile(ctx context.Context, uri span.URI) ([]*source.Metadata, error) {
@@ -1058,7 +1083,7 @@ func (s *snapshot) AllMetadata(ctx context.Context) ([]*source.Metadata, error)
return meta, nil
}
-func (s *snapshot) CachedImportPaths(ctx context.Context) (map[PackagePath]source.Package, error) {
+func (s *snapshot) CachedImportPaths(ctx context.Context) (map[PackagePath]*types.Package, error) {
// Don't reload workspace package metadata.
// This function is meant to only return currently cached information.
s.AwaitInitialized(ctx)
@@ -1066,24 +1091,41 @@ func (s *snapshot) CachedImportPaths(ctx context.Context) (map[PackagePath]sourc
s.mu.Lock()
defer s.mu.Unlock()
- results := map[PackagePath]source.Package{}
+ pkgs := make(map[PackagePath]*syntaxPackage)
+
+ // Find all cached packages that are imported a nonzero amount of time.
+ //
+ // TODO(rfindley): this is pre-existing behavior, and a test fails if we
+ // don't do the importCount filter, but why do we care if a package is
+ // imported a nonzero amount of times?
+ imported := make(map[PackagePath]bool)
s.packages.Range(func(_, v interface{}) {
- cachedPkg, err := v.(*packageHandle).cached()
+ ph := v.(*packageHandle)
+ for dep := range ph.m.DepsByPkgPath {
+ imported[dep] = true
+ }
+ if ph.m.Name == "main" {
+ return
+ }
+ pkg, err := ph.cached()
if err != nil {
return
}
- for _, newPkg := range cachedPkg.deps {
- pkgPath := newPkg.PkgPath()
- if oldPkg, ok := results[pkgPath]; ok {
- // Using the same trick as NarrowestPackage, prefer non-variants.
- if len(newPkg.compiledGoFiles) < len(oldPkg.(*pkg).compiledGoFiles) {
- results[pkgPath] = newPkg
- }
- } else {
- results[pkgPath] = newPkg
+ if old, ok := pkgs[ph.m.PkgPath]; ok {
+ if len(pkg.compiledGoFiles) < len(old.compiledGoFiles) {
+ pkgs[ph.m.PkgPath] = pkg
}
+ } else {
+ pkgs[ph.m.PkgPath] = pkg
}
})
+ results := make(map[PackagePath]*types.Package)
+ for pkgPath, pkg := range pkgs {
+ if imported[pkgPath] {
+ results[pkgPath] = pkg.types
+ }
+ }
+
return results, nil
}
diff --git a/gopls/internal/lsp/code_action.go b/gopls/internal/lsp/code_action.go
index ec918b7c7..c91def600 100644
--- a/gopls/internal/lsp/code_action.go
+++ b/gopls/internal/lsp/code_action.go
@@ -162,12 +162,16 @@ func (s *Server) codeAction(ctx context.Context, params *protocol.CodeActionPara
if err != nil {
return nil, err
}
- analysisDiags, err := source.Analyze(ctx, snapshot, pkg.ID(), true)
+ pkgDiags, err := pkg.DiagnosticsForFile(ctx, snapshot, uri)
+ if err != nil {
+ return nil, err
+ }
+ analysisDiags, err := source.Analyze(ctx, snapshot, pkg.Metadata().ID, true)
if err != nil {
return nil, err
}
var fileDiags []*source.Diagnostic
- source.CombineDiagnostics(pkg, fh.URI(), analysisDiags, &fileDiags, &fileDiags)
+ source.CombineDiagnostics(pkgDiags, analysisDiags[uri], &fileDiags, &fileDiags)
// Split diagnostics into fixes, which must match incoming diagnostics,
// and non-fixes, which must match the requested range. Build actions
diff --git a/gopls/internal/lsp/diagnostics.go b/gopls/internal/lsp/diagnostics.go
index ef30b6f77..a58f40497 100644
--- a/gopls/internal/lsp/diagnostics.go
+++ b/gopls/internal/lsp/diagnostics.go
@@ -401,8 +401,15 @@ func (s *Server) diagnosePkg(ctx context.Context, snapshot source.Snapshot, m *s
if snapshot.IsBuiltin(ctx, cgf.URI) {
continue
}
+
+ pkgDiags, err := pkg.DiagnosticsForFile(ctx, snapshot, cgf.URI)
+ if err != nil {
+ event.Error(ctx, "warning: getting package diagnostics", err, append(source.SnapshotLabels(snapshot), tag.Package.Of(string(m.ID)))...)
+ return
+ }
+
var tdiags, adiags []*source.Diagnostic
- source.CombineDiagnostics(pkg, cgf.URI, analysisDiags, &tdiags, &adiags)
+ source.CombineDiagnostics(pkgDiags, analysisDiags[cgf.URI], &tdiags, &adiags)
s.storeDiagnostics(snapshot, cgf.URI, typeCheckSource, tdiags, true)
s.storeDiagnostics(snapshot, cgf.URI, analysisSource, adiags, true)
}
diff --git a/gopls/internal/lsp/mod/diagnostics.go b/gopls/internal/lsp/mod/diagnostics.go
index 3cbe3529e..25860ead3 100644
--- a/gopls/internal/lsp/mod/diagnostics.go
+++ b/gopls/internal/lsp/mod/diagnostics.go
@@ -107,7 +107,11 @@ func ModDiagnostics(ctx context.Context, snapshot source.Snapshot, fh source.Fil
return nil, err
}
for _, pkg := range pkgs {
- diagnostics = append(diagnostics, pkg.DiagnosticsForFile(fh.URI())...)
+ pkgDiags, err := pkg.DiagnosticsForFile(ctx, snapshot, fh.URI())
+ if err != nil {
+ return nil, err
+ }
+ diagnostics = append(diagnostics, pkgDiags...)
}
}
diff --git a/gopls/internal/lsp/semantic.go b/gopls/internal/lsp/semantic.go
index 728f61de7..93328773b 100644
--- a/gopls/internal/lsp/semantic.go
+++ b/gopls/internal/lsp/semantic.go
@@ -76,10 +76,11 @@ func (s *Server) computeSemanticTokens(ctx context.Context, td protocol.TextDocu
// this is a little cumbersome to avoid both exporting 'encoded' and its methods
// and to avoid import cycles
e := &encoded{
- ctx: ctx,
- rng: rng,
- tokTypes: s.session.Options().SemanticTypes,
- tokMods: s.session.Options().SemanticMods,
+ ctx: ctx,
+ metadataSource: snapshot,
+ rng: rng,
+ tokTypes: s.session.Options().SemanticTypes,
+ tokMods: s.session.Options().SemanticMods,
}
add := func(line, start uint32, len uint32) {
e.add(line, start, len, tokMacro, nil)
@@ -103,16 +104,17 @@ func (s *Server) computeSemanticTokens(ctx context.Context, td protocol.TextDocu
return nil, err
}
e := &encoded{
- ctx: ctx,
- pgf: pgf,
- rng: rng,
- ti: pkg.GetTypesInfo(),
- pkg: pkg,
- fset: pkg.FileSet(),
- tokTypes: s.session.Options().SemanticTypes,
- tokMods: s.session.Options().SemanticMods,
- noStrings: vv.Options().NoSemanticString,
- noNumbers: vv.Options().NoSemanticNumber,
+ ctx: ctx,
+ metadataSource: snapshot,
+ pgf: pgf,
+ rng: rng,
+ ti: pkg.GetTypesInfo(),
+ pkg: pkg,
+ fset: pkg.FileSet(),
+ tokTypes: s.session.Options().SemanticTypes,
+ tokMods: s.session.Options().SemanticMods,
+ noStrings: vv.Options().NoSemanticString,
+ noNumbers: vv.Options().NoSemanticNumber,
}
if err := e.init(); err != nil {
// e.init should never return an error, unless there's some
@@ -216,7 +218,11 @@ type encoded struct {
noStrings bool
noNumbers bool
- ctx context.Context
+ ctx context.Context
+ // metadataSource is used to resolve imports
+ metadataSource interface {
+ Metadata(source.PackageID) *source.Metadata
+ }
tokTypes, tokMods []string
pgf *source.ParsedGoFile
rng *protocol.Range
@@ -908,20 +914,24 @@ func (e *encoded) importSpec(d *ast.ImportSpec) {
return
}
// Import strings are implementation defined. Try to match with parse information.
- imported, err := e.pkg.ResolveImportPath(importPath)
- if err != nil {
+ depID := e.pkg.Metadata().DepsByImpPath[importPath]
+ if depID == "" {
+ return
+ }
+ depMD := e.metadataSource.Metadata(depID)
+ if depMD == nil {
// unexpected, but impact is that maybe some import is not colored
return
}
// Check whether the original literal contains the package's declared name.
- j := strings.LastIndex(d.Path.Value, string(imported.Name()))
+ j := strings.LastIndex(d.Path.Value, string(depMD.Name))
if j == -1 {
- // name doesn't show up, for whatever reason, so nothing to report
+ // Package name does not match import path, so there is nothing to report.
return
}
// Report virtual declaration at the position of the substring.
start := d.Path.Pos() + token.Pos(j)
- e.token(start, len(imported.Name()), tokNamespace, nil)
+ e.token(start, len(depMD.Name), tokNamespace, nil)
}
// log unexpected state
diff --git a/gopls/internal/lsp/source/api_json.go b/gopls/internal/lsp/source/api_json.go
index e97c1474a..3fa7ca222 100755
--- a/gopls/internal/lsp/source/api_json.go
+++ b/gopls/internal/lsp/source/api_json.go
@@ -57,14 +57,6 @@ var GeneratedAPIJSON = &APIJSON{
Hierarchy: "build",
},
{
- Name: "experimentalPackageCacheKey",
- Type: "bool",
- Doc: "experimentalPackageCacheKey controls whether to use a coarser cache key\nfor package type information to increase cache hits. This setting removes\nthe user's environment, build flags, and working directory from the cache\nkey, which should be a safe change as all relevant inputs into the type\nchecking pass are already hashed into the key. This is temporarily guarded\nby an experiment because caching behavior is subtle and difficult to\ncomprehensively test.\n",
- Default: "true",
- Status: "experimental",
- Hierarchy: "build",
- },
- {
Name: "allowModfileModifications",
Type: "bool",
Doc: "allowModfileModifications disables -mod=readonly, allowing imports from\nout-of-scope modules. This option will eventually be removed.\n",
diff --git a/gopls/internal/lsp/source/completion/completion.go b/gopls/internal/lsp/source/completion/completion.go
index 056b2893c..3343a1a59 100644
--- a/gopls/internal/lsp/source/completion/completion.go
+++ b/gopls/internal/lsp/source/completion/completion.go
@@ -1088,10 +1088,12 @@ func (c *completer) selector(ctx context.Context, sel *ast.SelectorExpr) error {
// Is sel a qualified identifier?
if id, ok := sel.X.(*ast.Ident); ok {
if pkgName, ok := c.pkg.GetTypesInfo().Uses[id].(*types.PkgName); ok {
- pkg, _ := c.pkg.DirectDep(source.PackagePath(pkgName.Imported().Path()))
- // If the package is not imported, try searching for unimported
- // completions.
- if pkg == nil && c.opts.unimported {
+ // If this package path is not a known dep, it means that it resolves to
+ // an import path that couldn't be resolved by go/packages.
+ //
+ // Try to complete from the package cache.
+ pkgPath := source.PackagePath(pkgName.Imported().Path())
+ if _, ok := c.pkg.Metadata().DepsByPkgPath[pkgPath]; !ok && c.opts.unimported {
if err := c.unimportedMembers(ctx, id); err != nil {
return err
}
@@ -1133,7 +1135,7 @@ func (c *completer) unimportedMembers(ctx context.Context, id *ast.Ident) error
var paths []string
for path, pkg := range known {
- if pkg.GetTypes().Name() != id.Name {
+ if pkg.Name() != id.Name {
continue
}
paths = append(paths, string(path))
@@ -1155,16 +1157,16 @@ func (c *completer) unimportedMembers(ctx context.Context, id *ast.Ident) error
for _, path := range paths {
pkg := known[source.PackagePath(path)]
- if pkg.GetTypes().Name() != id.Name {
+ if pkg.Name() != id.Name {
continue
}
imp := &importInfo{
importPath: path,
}
- if imports.ImportPathToAssumedName(path) != pkg.GetTypes().Name() {
- imp.name = pkg.GetTypes().Name()
+ if imports.ImportPathToAssumedName(path) != pkg.Name() {
+ imp.name = pkg.Name()
}
- c.packageMembers(pkg.GetTypes(), unimportedScore(relevances[path]), imp, func(cand candidate) {
+ c.packageMembers(pkg, unimportedScore(relevances[path]), imp, func(cand candidate) {
c.deepState.enqueue(cand)
})
if len(c.items) >= unimportedMemberTarget {
@@ -1479,7 +1481,7 @@ func (c *completer) unimportedPackages(ctx context.Context, seen map[string]stru
}
var paths []string // actually PackagePaths
for path, pkg := range known {
- if !strings.HasPrefix(pkg.GetTypes().Name(), prefix) {
+ if !strings.HasPrefix(pkg.Name(), prefix) {
continue
}
paths = append(paths, string(path))
@@ -1508,21 +1510,21 @@ func (c *completer) unimportedPackages(ctx context.Context, seen map[string]stru
for _, path := range paths {
pkg := known[source.PackagePath(path)]
- if _, ok := seen[pkg.GetTypes().Name()]; ok {
+ if _, ok := seen[pkg.Name()]; ok {
continue
}
imp := &importInfo{
importPath: path,
}
- if imports.ImportPathToAssumedName(path) != pkg.GetTypes().Name() {
- imp.name = pkg.GetTypes().Name()
+ if imports.ImportPathToAssumedName(path) != pkg.Name() {
+ imp.name = pkg.Name()
}
if count >= maxUnimportedPackageNames {
return nil
}
c.deepState.enqueue(candidate{
// Pass an empty *types.Package to disable deep completions.
- obj: types.NewPkgName(0, nil, pkg.GetTypes().Name(), types.NewPackage(path, string(pkg.Name()))),
+ obj: types.NewPkgName(0, nil, pkg.Name(), types.NewPackage(path, string(pkg.Name()))),
score: unimportedScore(relevances[path]),
imp: imp,
})
diff --git a/gopls/internal/lsp/source/completion/format.go b/gopls/internal/lsp/source/completion/format.go
index bb3ccd72d..16da642fe 100644
--- a/gopls/internal/lsp/source/completion/format.go
+++ b/gopls/internal/lsp/source/completion/format.go
@@ -81,7 +81,7 @@ func (c *completer) item(ctx context.Context, cand candidate) (CompletionItem, e
if _, ok := obj.Type().(*types.Struct); ok {
detail = "struct{...}" // for anonymous structs
} else if obj.IsField() {
- detail = source.FormatVarType(c.pkg, obj, c.qf)
+ detail = source.FormatVarType(ctx, c.snapshot, c.pkg, obj, c.qf)
}
if obj.IsField() {
kind = protocol.FieldCompletion
@@ -238,7 +238,7 @@ Suffixes:
uri := span.URIFromPath(pos.Filename)
// Find the source file of the candidate.
- pkg, err := source.FindPackageFromPos(c.pkg, obj.Pos())
+ pkg, err := source.FindPackageFromPos(ctx, c.snapshot, c.pkg, obj.Pos())
if err != nil {
return item, nil
}
diff --git a/gopls/internal/lsp/source/completion/literal.go b/gopls/internal/lsp/source/completion/literal.go
index 870b97ab2..9d4b0e6fb 100644
--- a/gopls/internal/lsp/source/completion/literal.go
+++ b/gopls/internal/lsp/source/completion/literal.go
@@ -162,7 +162,7 @@ func (c *completer) literal(ctx context.Context, literalType types.Type, imp *im
if score := c.matcher.Score("func"); !cand.hasMod(reference) && score > 0 && (expType == nil || !types.IsInterface(expType)) {
switch t := literalType.Underlying().(type) {
case *types.Signature:
- c.functionLiteral(t, float64(score))
+ c.functionLiteral(ctx, t, float64(score))
}
}
}
@@ -175,7 +175,7 @@ const literalCandidateScore = highScore / 2
// functionLiteral adds a function literal completion item for the
// given signature.
-func (c *completer) functionLiteral(sig *types.Signature, matchScore float64) {
+func (c *completer) functionLiteral(ctx context.Context, sig *types.Signature, matchScore float64) {
snip := &snippet.Builder{}
snip.WriteText("func(")
@@ -202,7 +202,7 @@ func (c *completer) functionLiteral(sig *types.Signature, matchScore float64) {
// If the param has no name in the signature, guess a name based
// on the type. Use an empty qualifier to ignore the package.
// For example, we want to name "http.Request" "r", not "hr".
- name = source.FormatVarType(c.pkg, p, func(p *types.Package) string {
+ name = source.FormatVarType(ctx, c.snapshot, c.pkg, p, func(p *types.Package) string {
return ""
})
name = abbreviateTypeName(name)
@@ -264,7 +264,7 @@ func (c *completer) functionLiteral(sig *types.Signature, matchScore float64) {
// of "i int, j int".
if i == sig.Params().Len()-1 || !types.Identical(p.Type(), sig.Params().At(i+1).Type()) {
snip.WriteText(" ")
- typeStr := source.FormatVarType(c.pkg, p, c.qf)
+ typeStr := source.FormatVarType(ctx, c.snapshot, c.pkg, p, c.qf)
if sig.Variadic() && i == sig.Params().Len()-1 {
typeStr = strings.Replace(typeStr, "[]", "...", 1)
}
@@ -314,7 +314,7 @@ func (c *completer) functionLiteral(sig *types.Signature, matchScore float64) {
snip.WriteText(name + " ")
}
- text := source.FormatVarType(c.pkg, r, c.qf)
+ text := source.FormatVarType(ctx, c.snapshot, c.pkg, r, c.qf)
if tp, _ := r.Type().(*typeparams.TypeParam); tp != nil && !c.typeParamInScope(tp) {
snip.WritePlaceholder(func(snip *snippet.Builder) {
snip.WriteText(text)
diff --git a/gopls/internal/lsp/source/completion/statements.go b/gopls/internal/lsp/source/completion/statements.go
index dcce76ff7..809cb808a 100644
--- a/gopls/internal/lsp/source/completion/statements.go
+++ b/gopls/internal/lsp/source/completion/statements.go
@@ -330,24 +330,31 @@ func getTestVar(enclosingFunc *funcInfo, pkg source.Package) string {
return ""
}
+ var testingPkg *types.Package
+ for _, p := range pkg.GetTypes().Imports() {
+ if p.Path() == "testing" {
+ testingPkg = p
+ break
+ }
+ }
+ if testingPkg == nil {
+ return ""
+ }
+ tbObj := testingPkg.Scope().Lookup("TB")
+ if tbObj == nil {
+ return ""
+ }
+ iface, ok := tbObj.Type().Underlying().(*types.Interface)
+ if !ok {
+ return ""
+ }
+
sig := enclosingFunc.sig
for i := 0; i < sig.Params().Len(); i++ {
param := sig.Params().At(i)
if param.Name() == "_" {
continue
}
- testingPkg, err := pkg.DirectDep("testing")
- if err != nil {
- continue
- }
- tbObj := testingPkg.GetTypes().Scope().Lookup("TB")
- if tbObj == nil {
- continue
- }
- iface, ok := tbObj.Type().Underlying().(*types.Interface)
- if !ok {
- continue
- }
if !types.Implements(param.Type(), iface) {
continue
}
diff --git a/gopls/internal/lsp/source/diagnostics.go b/gopls/internal/lsp/source/diagnostics.go
index a7350b00e..042db1785 100644
--- a/gopls/internal/lsp/source/diagnostics.go
+++ b/gopls/internal/lsp/source/diagnostics.go
@@ -82,18 +82,22 @@ func FileDiagnostics(ctx context.Context, snapshot Snapshot, uri span.URI) (File
if err != nil {
return nil, nil, err
}
- adiags, err := Analyze(ctx, snapshot, pkg.ID(), false)
+ pkgDiags, err := pkg.DiagnosticsForFile(ctx, snapshot, uri)
+ if err != nil {
+ return nil, nil, err
+ }
+ adiags, err := Analyze(ctx, snapshot, pkg.Metadata().ID, false)
if err != nil {
return nil, nil, err
}
var fileDiags []*Diagnostic // combine load/parse/type + analysis diagnostics
- CombineDiagnostics(pkg, fh.URI(), adiags, &fileDiags, &fileDiags)
+ CombineDiagnostics(pkgDiags, adiags[uri], &fileDiags, &fileDiags)
return fh, fileDiags, nil
}
-// CombineDiagnostics combines and filters list/parse/type diagnostics
-// from pkg.DiagnosticsForFile(uri) with analysisDiagnostics[uri], and
-// appends the two lists to *outT and *outA, respectively.
+// CombineDiagnostics combines and filters list/parse/type diagnostics from
+// tdiags with adiags, and appends the two lists to *outT and *outA,
+// respectively.
//
// Type-error analyzers produce diagnostics that are redundant
// with type checker diagnostics, but more detailed (e.g. fixes).
@@ -111,7 +115,7 @@ func FileDiagnostics(ctx context.Context, snapshot Snapshot, uri span.URI) (File
// easily choose whether to keep the results separate or combined.
//
// The arguments are not modified.
-func CombineDiagnostics(pkg Package, uri span.URI, analysisDiagnostics map[span.URI][]*Diagnostic, outT, outA *[]*Diagnostic) {
+func CombineDiagnostics(tdiags []*Diagnostic, adiags []*Diagnostic, outT, outA *[]*Diagnostic) {
// Build index of (list+parse+)type errors.
type key struct {
@@ -119,14 +123,13 @@ func CombineDiagnostics(pkg Package, uri span.URI, analysisDiagnostics map[span.
message string
}
index := make(map[key]int) // maps (Range,Message) to index in tdiags slice
- tdiags := pkg.DiagnosticsForFile(uri)
for i, diag := range tdiags {
index[key{diag.Range, diag.Message}] = i
}
// Filter out analysis diagnostics that match type errors,
// retaining their suggested fix (etc) fields.
- for _, diag := range analysisDiagnostics[uri] {
+ for _, diag := range adiags {
if i, ok := index[key{diag.Range, diag.Message}]; ok {
copy := *tdiags[i]
copy.SuggestedFixes = diag.SuggestedFixes
diff --git a/gopls/internal/lsp/source/highlight.go b/gopls/internal/lsp/source/highlight.go
index 4fe3ae8b3..d0f77e62a 100644
--- a/gopls/internal/lsp/source/highlight.go
+++ b/gopls/internal/lsp/source/highlight.go
@@ -54,7 +54,7 @@ func Highlight(ctx context.Context, snapshot Snapshot, fh FileHandle, position p
}
var ranges []protocol.Range
for rng := range result {
- mRng, err := posToMappedRange(pkg, rng.start, rng.end)
+ mRng, err := posToMappedRange(ctx, snapshot, pkg, rng.start, rng.end)
if err != nil {
return nil, err
}
diff --git a/gopls/internal/lsp/source/hover.go b/gopls/internal/lsp/source/hover.go
index 9eee53ad3..c27065662 100644
--- a/gopls/internal/lsp/source/hover.go
+++ b/gopls/internal/lsp/source/hover.go
@@ -327,7 +327,7 @@ func hoverIdentifier(ctx context.Context, i *IdentifierInfo) (*HoverJSON, error)
// Check if the identifier is test-only (and is therefore not part of a
// package's API). This is true if the request originated in a test package,
// and if the declaration is also found in the same test package.
- if i.pkg != nil && obj.Pkg() != nil && i.pkg.ForTest() != "" {
+ if i.pkg != nil && obj.Pkg() != nil && i.pkg.Metadata().ForTest != "" {
if _, err := i.pkg.File(i.Declaration.MappedRange[0].URI()); err == nil {
return h, nil
}
@@ -443,18 +443,22 @@ func moduleAtVersion(path string, i *IdentifierInfo) (string, string, bool) {
if strings.ToLower(i.Snapshot.View().Options().LinkTarget) != "pkg.go.dev" {
return "", "", false
}
- impPkg, err := i.pkg.DirectDep(PackagePath(path))
- if err != nil {
+ impID, ok := i.pkg.Metadata().DepsByPkgPath[PackagePath(path)]
+ if !ok {
+ return "", "", false
+ }
+ impMeta := i.Snapshot.Metadata(impID)
+ if impMeta == nil {
return "", "", false
}
- if impPkg.Version() == nil {
+ module := impMeta.Module
+ if module == nil {
return "", "", false
}
- version, modpath := impPkg.Version().Version, impPkg.Version().Path
- if modpath == "" || version == "" {
+ if module.Path == "" || module.Version == "" {
return "", "", false
}
- return modpath, version, true
+ return module.Path, module.Version, true
}
// objectString is a wrapper around the types.ObjectString function.
@@ -534,7 +538,9 @@ func FindHoverContext(ctx context.Context, s Snapshot, pkg Package, obj types.Ob
if err != nil {
return nil, err
}
- imp, err := pkg.ResolveImportPath(ImportPath(importPath))
+ // TODO(rfindley): avoid type-checking here, by re-parsing the package with
+ // ParseHeader.
+ imp, err := ResolveImportPath(ctx, s, pkg.Metadata().ID, ImportPath(importPath))
if err != nil {
return nil, err
}
diff --git a/gopls/internal/lsp/source/identifier.go b/gopls/internal/lsp/source/identifier.go
index 39743b2a5..0be26b565 100644
--- a/gopls/internal/lsp/source/identifier.go
+++ b/gopls/internal/lsp/source/identifier.go
@@ -98,7 +98,7 @@ func findIdentifier(ctx context.Context, snapshot Snapshot, pkg Package, pgf *Pa
file := pgf.File
// Handle import specs separately, as there is no formal position for a
// package declaration.
- if result, err := importSpec(snapshot, pkg, pgf, pos); result != nil || err != nil {
+ if result, err := importSpec(ctx, snapshot, pkg, pgf, pos); result != nil || err != nil {
return result, err
}
path := pathEnclosingObjNode(file, pos)
@@ -155,7 +155,7 @@ func findIdentifier(ctx context.Context, snapshot Snapshot, pkg Package, pgf *Pa
result.Name = result.ident.Name
var err error
- if result.MappedRange, err = posToMappedRange(pkg, result.ident.Pos(), result.ident.End()); err != nil {
+ if result.MappedRange, err = posToMappedRange(ctx, snapshot, pkg, result.ident.Pos(), result.ident.End()); err != nil {
return nil, err
}
@@ -266,7 +266,7 @@ func findIdentifier(ctx context.Context, snapshot Snapshot, pkg Package, pgf *Pa
// findFileInDeps, which is also called below. Refactor
// objToMappedRange to separate the find-file from the
// lookup-position steps to avoid the redundancy.
- rng, err := objToMappedRange(pkg, result.Declaration.obj)
+ rng, err := objToMappedRange(ctx, snapshot, pkg, result.Declaration.obj)
if err != nil {
return nil, err
}
@@ -274,7 +274,7 @@ func findIdentifier(ctx context.Context, snapshot Snapshot, pkg Package, pgf *Pa
declPos := result.Declaration.obj.Pos()
objURI := span.URIFromPath(pkg.FileSet().File(declPos).Name())
- declFile, declPkg, err := findFileInDeps(pkg, objURI)
+ declFile, declPkg, err := findFileInDeps(ctx, snapshot, pkg, objURI)
if err != nil {
return nil, err
}
@@ -301,7 +301,7 @@ func findIdentifier(ctx context.Context, snapshot Snapshot, pkg Package, pgf *Pa
if hasErrorType(result.Type.Object) {
return result, nil
}
- if result.Type.MappedRange, err = objToMappedRange(pkg, result.Type.Object); err != nil {
+ if result.Type.MappedRange, err = objToMappedRange(ctx, snapshot, pkg, result.Type.Object); err != nil {
return nil, err
}
}
@@ -449,7 +449,7 @@ func hasErrorType(obj types.Object) bool {
}
// importSpec handles positions inside of an *ast.ImportSpec.
-func importSpec(snapshot Snapshot, pkg Package, pgf *ParsedGoFile, pos token.Pos) (*IdentifierInfo, error) {
+func importSpec(ctx context.Context, snapshot Snapshot, pkg Package, pgf *ParsedGoFile, pos token.Pos) (*IdentifierInfo, error) {
var imp *ast.ImportSpec
for _, spec := range pgf.File.Imports {
if spec.Path.Pos() <= pos && pos < spec.Path.End() {
@@ -463,7 +463,7 @@ func importSpec(snapshot Snapshot, pkg Package, pgf *ParsedGoFile, pos token.Pos
if err != nil {
return nil, fmt.Errorf("import path not quoted: %s (%v)", imp.Path.Value, err)
}
- imported, err := pkg.ResolveImportPath(ImportPath(importPath))
+ imported, err := ResolveImportPath(ctx, snapshot, pkg.Metadata().ID, ImportPath(importPath))
if err != nil {
return nil, err
}
@@ -472,13 +472,13 @@ func importSpec(snapshot Snapshot, pkg Package, pgf *ParsedGoFile, pos token.Pos
Name: importPath, // should this perhaps be imported.PkgPath()?
pkg: pkg,
}
- if result.MappedRange, err = posToMappedRange(pkg, imp.Path.Pos(), imp.Path.End()); err != nil {
+ if result.MappedRange, err = posToMappedRange(ctx, snapshot, pkg, imp.Path.Pos(), imp.Path.End()); err != nil {
return nil, err
}
// Consider the "declaration" of an import spec to be the imported package.
// Return all of the files in the package as the definition of the import spec.
for _, dst := range imported.GetSyntax() {
- rng, err := posToMappedRange(pkg, dst.Pos(), dst.End())
+ rng, err := posToMappedRange(ctx, snapshot, pkg, dst.Pos(), dst.End())
if err != nil {
return nil, err
}
diff --git a/gopls/internal/lsp/source/implementation.go b/gopls/internal/lsp/source/implementation.go
index be29973ae..50520267e 100644
--- a/gopls/internal/lsp/source/implementation.go
+++ b/gopls/internal/lsp/source/implementation.go
@@ -324,22 +324,28 @@ func qualifiedObjsAtLocation(ctx context.Context, s Snapshot, key positionKey, s
}
// Get all of the transitive dependencies of the search package.
- pkgs := make(map[*types.Package]Package)
- var addPkg func(pkg Package)
- addPkg = func(pkg Package) {
- pkgs[pkg.GetTypes()] = pkg
- for _, imp := range pkg.Imports() {
- if _, ok := pkgs[imp.GetTypes()]; !ok {
- addPkg(imp)
- }
+ pkgSet := map[*types.Package]Package{
+ searchpkg.GetTypes(): searchpkg,
+ }
+ deps := recursiveDeps(s, searchpkg.Metadata())[1:]
+ // Ignore the error from type checking, but check if the context was
+ // canceled (which would have caused TypeCheck to exit early).
+ depPkgs, _ := s.TypeCheck(ctx, TypecheckWorkspace, deps...)
+ if ctx.Err() != nil {
+ return nil, ctx.Err()
+ }
+ for _, dep := range depPkgs {
+ // Since we ignored the error from type checking, pkg may be nil.
+ if dep != nil {
+ pkgSet[dep.GetTypes()] = dep
}
}
- addPkg(searchpkg)
+
for _, obj := range objs {
if obj.Parent() == types.Universe {
return nil, fmt.Errorf("%q: %w", obj.Name(), errBuiltin)
}
- pkg, ok := pkgs[obj.Pkg()]
+ pkg, ok := pkgSet[obj.Pkg()]
if !ok {
event.Error(ctx, fmt.Sprintf("no package for obj %s: %v", obj, obj.Pkg()), err)
continue
diff --git a/gopls/internal/lsp/source/implementation2.go b/gopls/internal/lsp/source/implementation2.go
index 09659ccd6..a776c0389 100644
--- a/gopls/internal/lsp/source/implementation2.go
+++ b/gopls/internal/lsp/source/implementation2.go
@@ -166,7 +166,7 @@ func implementations2(ctx context.Context, snapshot Snapshot, fh FileHandle, pp
}
globalIDs := make([]PackageID, 0, len(globalMetas))
for _, m := range globalMetas {
- if m.PkgPath == declPkg.PkgPath() {
+ if m.PkgPath == declPkg.Metadata().PkgPath {
continue // declaring package is handled by local implementation
}
globalIDs = append(globalIDs, m.ID)
diff --git a/gopls/internal/lsp/source/options.go b/gopls/internal/lsp/source/options.go
index a32ed128a..bb0b5ecb6 100644
--- a/gopls/internal/lsp/source/options.go
+++ b/gopls/internal/lsp/source/options.go
@@ -116,12 +116,11 @@ func DefaultOptions() *Options {
},
UserOptions: UserOptions{
BuildOptions: BuildOptions{
- ExpandWorkspaceToModule: true,
- ExperimentalPackageCacheKey: true,
- MemoryMode: ModeNormal,
- DirectoryFilters: []string{"-**/node_modules"},
- TemplateExtensions: []string{},
- StandaloneTags: []string{"ignore"},
+ ExpandWorkspaceToModule: true,
+ MemoryMode: ModeNormal,
+ DirectoryFilters: []string{"-**/node_modules"},
+ TemplateExtensions: []string{},
+ StandaloneTags: []string{"ignore"},
},
UIOptions: UIOptions{
DiagnosticOptions: DiagnosticOptions{
@@ -269,15 +268,6 @@ type BuildOptions struct {
// a go.mod file, narrowing the scope to that directory if it exists.
ExpandWorkspaceToModule bool `status:"experimental"`
- // ExperimentalPackageCacheKey controls whether to use a coarser cache key
- // for package type information to increase cache hits. This setting removes
- // the user's environment, build flags, and working directory from the cache
- // key, which should be a safe change as all relevant inputs into the type
- // checking pass are already hashed into the key. This is temporarily guarded
- // by an experiment because caching behavior is subtle and difficult to
- // comprehensively test.
- ExperimentalPackageCacheKey bool `status:"experimental"`
-
// AllowModfileModifications disables -mod=readonly, allowing imports from
// out-of-scope modules. This option will eventually be removed.
AllowModfileModifications bool `status:"experimental"`
@@ -1113,7 +1103,7 @@ func (o *Options) set(name string, value interface{}, seen map[string]struct{})
result.deprecated("")
case "experimentalPackageCacheKey":
- result.setBool(&o.ExperimentalPackageCacheKey)
+ result.deprecated("")
case "allowModfileModifications":
result.setBool(&o.AllowModfileModifications)
diff --git a/gopls/internal/lsp/source/references.go b/gopls/internal/lsp/source/references.go
index f14d1d601..1fc7fde78 100644
--- a/gopls/internal/lsp/source/references.go
+++ b/gopls/internal/lsp/source/references.go
@@ -93,7 +93,7 @@ func references(ctx context.Context, snapshot Snapshot, qos []qualifiedObject, i
// transitively for (e.g.) capitalized local variables.
// We could do better by checking for an objectpath.
transitive := qo.obj.Pkg().Scope().Lookup(qo.obj.Name()) != qo.obj
- rdeps, err := snapshot.ReverseDependencies(ctx, qo.pkg.ID(), transitive)
+ rdeps, err := snapshot.ReverseDependencies(ctx, qo.pkg.Metadata().ID, transitive)
if err != nil {
return nil, err
}
@@ -134,14 +134,14 @@ func references(ctx context.Context, snapshot Snapshot, qos []qualifiedObject, i
}
key, found := packagePositionKey(pkg, ident.Pos())
if !found {
- bug.Reportf("ident %v (pos: %v) not found in package %v", ident.Name, ident.Pos(), pkg.Name())
+ bug.Reportf("ident %v (pos: %v) not found in package %v", ident.Name, ident.Pos(), pkg.Metadata().ID)
continue
}
if seen[key] {
continue
}
seen[key] = true
- rng, err := posToMappedRange(pkg, ident.Pos(), ident.End())
+ rng, err := posToMappedRange(ctx, snapshot, pkg, ident.Pos(), ident.End())
if err != nil {
return nil, err
}
diff --git a/gopls/internal/lsp/source/references2.go b/gopls/internal/lsp/source/references2.go
index 4b6ba4cd7..1a01eb98a 100644
--- a/gopls/internal/lsp/source/references2.go
+++ b/gopls/internal/lsp/source/references2.go
@@ -316,7 +316,7 @@ func ordinaryReferences(ctx context.Context, snapshot Snapshot, uri span.URI, pp
ref := &ReferenceInfoV2{
IsDeclaration: isDecl,
Location: loc,
- PkgPath: pkg.PkgPath(),
+ PkgPath: pkg.Metadata().PkgPath,
Name: obj.Name(),
}
refsMu.Lock()
@@ -402,7 +402,7 @@ func expandMethodSearch(ctx context.Context, snapshot Snapshot, method *types.Fu
// Expand global search scope to include rdeps of this pkg.
if len(results) > 0 {
- rdeps, err := snapshot.ReverseDependencies(ctx, pkg.ID(), true)
+ rdeps, err := snapshot.ReverseDependencies(ctx, pkg.Metadata().ID, true)
if err != nil {
return err
}
diff --git a/gopls/internal/lsp/source/rename.go b/gopls/internal/lsp/source/rename.go
index 1d2a03b0f..385dff4a7 100644
--- a/gopls/internal/lsp/source/rename.go
+++ b/gopls/internal/lsp/source/rename.go
@@ -30,6 +30,7 @@ import (
type renamer struct {
ctx context.Context
+ snapshot Snapshot
refs []*ReferenceInfo
objsToUpdate map[types.Object]bool
hadConflicts bool
@@ -125,15 +126,15 @@ func PrepareRename(ctx context.Context, snapshot Snapshot, f FileHandle, pp prot
if err := checkRenamable(obj); err != nil {
return nil, nil, err
}
- result, err := computePrepareRenameResp(snapshot, pkg, node, obj.Name())
+ result, err := computePrepareRenameResp(ctx, snapshot, pkg, node, obj.Name())
if err != nil {
return nil, nil, err
}
return result, nil, nil
}
-func computePrepareRenameResp(snapshot Snapshot, pkg Package, node ast.Node, text string) (*PrepareItem, error) {
- mr, err := posToMappedRange(pkg, node.Pos(), node.End())
+func computePrepareRenameResp(ctx context.Context, snapshot Snapshot, pkg Package, node ast.Node, text string) (*PrepareItem, error) {
+ mr, err := posToMappedRange(ctx, snapshot, pkg, node.Pos(), node.End())
if err != nil {
return nil, err
}
@@ -592,6 +593,7 @@ func renameObj(ctx context.Context, s Snapshot, newName string, qos []qualifiedO
}
r := renamer{
ctx: ctx,
+ snapshot: s,
refs: refs,
objsToUpdate: make(map[types.Object]bool),
from: obj.Name(),
@@ -735,7 +737,7 @@ func (r *renamer) update() (map[span.URI][]diff.Edit, error) {
// docComment returns the doc for an identifier.
func (r *renamer) docComment(pkg Package, id *ast.Ident) *ast.CommentGroup {
- _, tokFile, nodes, _ := pathEnclosingInterval(pkg, id.Pos(), id.End())
+ _, tokFile, nodes, _ := pathEnclosingInterval(r.ctx, r.snapshot, pkg, id.Pos(), id.End())
for _, node := range nodes {
switch decl := node.(type) {
case *ast.FuncDecl:
@@ -788,7 +790,7 @@ func (r *renamer) docComment(pkg Package, id *ast.Ident) *ast.CommentGroup {
func (r *renamer) updatePkgName(pkgName *types.PkgName) (*diff.Edit, error) {
// Modify ImportSpec syntax to add or remove the Name as needed.
pkg := r.packages[pkgName.Pkg()]
- _, tokFile, path, _ := pathEnclosingInterval(pkg, pkgName.Pos(), pkgName.Pos())
+ _, tokFile, path, _ := pathEnclosingInterval(r.ctx, r.snapshot, pkg, pkgName.Pos(), pkgName.Pos())
if len(path) < 2 {
return nil, fmt.Errorf("no path enclosing interval for %s", pkgName.Name())
}
diff --git a/gopls/internal/lsp/source/rename_check.go b/gopls/internal/lsp/source/rename_check.go
index 0285fc0c9..e442dae8d 100644
--- a/gopls/internal/lsp/source/rename_check.go
+++ b/gopls/internal/lsp/source/rename_check.go
@@ -7,6 +7,7 @@
package source
import (
+ "context"
"fmt"
"go/ast"
"go/token"
@@ -371,7 +372,7 @@ func (r *renamer) checkStructField(from *types.Var) {
if !ok {
return
}
- pkg, _, path, _ := pathEnclosingInterval(fromPkg, from.Pos(), from.Pos())
+ pkg, _, path, _ := pathEnclosingInterval(r.ctx, r.snapshot, fromPkg, from.Pos(), from.Pos())
if pkg == nil || path == nil {
return
}
@@ -790,10 +791,10 @@ func (r *renamer) satisfy() map[satisfy.Constraint]bool {
// type-checker.
//
// Only proceed if all packages have no errors.
- if pkg.HasListOrParseErrors() || pkg.HasTypeErrors() {
+ if pkg.HasParseErrors() || pkg.HasTypeErrors() {
r.errorf(token.NoPos, // we don't have a position for this error.
"renaming %q to %q not possible because %q has errors",
- r.from, r.to, pkg.PkgPath())
+ r.from, r.to, pkg.Metadata().PkgPath)
return nil
}
f.Find(pkg.GetTypesInfo(), pkg.GetSyntax())
@@ -826,7 +827,9 @@ func someUse(info *types.Info, obj types.Object) *ast.Ident {
// exact is defined as for astutil.PathEnclosingInterval.
//
// The zero value is returned if not found.
-func pathEnclosingInterval(pkg Package, start, end token.Pos) (resPkg Package, tokFile *token.File, path []ast.Node, exact bool) {
+//
+// TODO(rfindley): this has some redundancy with FindPackageFromPos, etc. Refactor.
+func pathEnclosingInterval(ctx context.Context, s Snapshot, pkg Package, start, end token.Pos) (resPkg Package, tokFile *token.File, path []ast.Node, exact bool) {
pkgs := []Package{pkg}
for _, f := range pkg.GetSyntax() {
for _, imp := range f.Imports {
@@ -837,7 +840,7 @@ func pathEnclosingInterval(pkg Package, start, end token.Pos) (resPkg Package, t
if importPath == "" {
continue
}
- imported, err := pkg.ResolveImportPath(importPath)
+ imported, err := ResolveImportPath(ctx, s, pkg.Metadata().ID, importPath)
if err != nil {
return nil, nil, nil, false
}
diff --git a/gopls/internal/lsp/source/signature_help.go b/gopls/internal/lsp/source/signature_help.go
index 3f81c27b0..e65548149 100644
--- a/gopls/internal/lsp/source/signature_help.go
+++ b/gopls/internal/lsp/source/signature_help.go
@@ -94,7 +94,7 @@ FindCall:
comment *ast.CommentGroup
)
if obj != nil {
- declPkg, err := FindPackageFromPos(pkg, obj.Pos())
+ declPkg, err := FindPackageFromPos(ctx, snapshot, pkg, obj.Pos())
if err != nil {
return nil, 0, err
}
diff --git a/gopls/internal/lsp/source/types_format.go b/gopls/internal/lsp/source/types_format.go
index e3a884d85..7a2605c72 100644
--- a/gopls/internal/lsp/source/types_format.go
+++ b/gopls/internal/lsp/source/types_format.go
@@ -206,7 +206,7 @@ func NewSignature(ctx context.Context, s Snapshot, pkg Package, sig *types.Signa
params := make([]string, 0, sig.Params().Len())
for i := 0; i < sig.Params().Len(); i++ {
el := sig.Params().At(i)
- typ := FormatVarType(pkg, el, qf)
+ typ := FormatVarType(ctx, s, pkg, el, qf)
p := typ
if el.Name() != "" {
p = el.Name() + " " + typ
@@ -221,7 +221,7 @@ func NewSignature(ctx context.Context, s Snapshot, pkg Package, sig *types.Signa
needResultParens = true
}
el := sig.Results().At(i)
- typ := FormatVarType(pkg, el, qf)
+ typ := FormatVarType(ctx, s, pkg, el, qf)
if el.Name() == "" {
results = append(results, typ)
} else {
@@ -254,8 +254,8 @@ func NewSignature(ctx context.Context, s Snapshot, pkg Package, sig *types.Signa
// FormatVarType formats a *types.Var, accounting for type aliases.
// To do this, it looks in the AST of the file in which the object is declared.
// On any errors, it always falls back to types.TypeString.
-func FormatVarType(srcpkg Package, obj *types.Var, qf types.Qualifier) string {
- pkg, err := FindPackageFromPos(srcpkg, obj.Pos())
+func FormatVarType(ctx context.Context, snapshot Snapshot, srcpkg Package, obj *types.Var, qf types.Qualifier) string {
+ pkg, err := FindPackageFromPos(ctx, snapshot, srcpkg, obj.Pos())
if err != nil {
return types.TypeString(obj.Type(), qf)
}
diff --git a/gopls/internal/lsp/source/util.go b/gopls/internal/lsp/source/util.go
index ec4c6bdbe..eacd9ce0b 100644
--- a/gopls/internal/lsp/source/util.go
+++ b/gopls/internal/lsp/source/util.go
@@ -51,7 +51,7 @@ func IsGenerated(ctx context.Context, snapshot Snapshot, uri span.URI) bool {
return false
}
-func objToMappedRange(pkg Package, obj types.Object) (protocol.MappedRange, error) {
+func objToMappedRange(ctx context.Context, snapshot Snapshot, pkg Package, obj types.Object) (protocol.MappedRange, error) {
nameLen := len(obj.Name())
if pkgName, ok := obj.(*types.PkgName); ok {
// An imported Go package has a package-local, unqualified name.
@@ -68,7 +68,7 @@ func objToMappedRange(pkg Package, obj types.Object) (protocol.MappedRange, erro
nameLen = len(pkgName.Imported().Path()) + len(`""`)
}
}
- return posToMappedRange(pkg, obj.Pos(), obj.Pos()+token.Pos(nameLen))
+ return posToMappedRange(ctx, snapshot, pkg, obj.Pos(), obj.Pos()+token.Pos(nameLen))
}
// posToMappedRange returns the MappedRange for the given [start, end) span,
@@ -77,7 +77,7 @@ func objToMappedRange(pkg Package, obj types.Object) (protocol.MappedRange, erro
// TODO(adonovan): many of the callers need only the ParsedGoFile so
// that they can call pgf.PosRange(pos, end) to get a Range; they
// don't actually need a MappedRange.
-func posToMappedRange(pkg Package, pos, end token.Pos) (protocol.MappedRange, error) {
+func posToMappedRange(ctx context.Context, snapshot Snapshot, pkg Package, pos, end token.Pos) (protocol.MappedRange, error) {
if !pos.IsValid() {
return protocol.MappedRange{}, fmt.Errorf("invalid start position")
}
@@ -86,7 +86,7 @@ func posToMappedRange(pkg Package, pos, end token.Pos) (protocol.MappedRange, er
}
logicalFilename := pkg.FileSet().File(pos).Name() // ignore line directives
- pgf, _, err := findFileInDeps(pkg, span.URIFromPath(logicalFilename))
+ pgf, _, err := findFileInDeps(ctx, snapshot, pkg, span.URIFromPath(logicalFilename))
if err != nil {
return protocol.MappedRange{}, err
}
@@ -99,13 +99,13 @@ func posToMappedRange(pkg Package, pos, end token.Pos) (protocol.MappedRange, er
// TODO(rfindley): is this the best factoring of this API? This function is
// really a trivial wrapper around findFileInDeps, which may be a more useful
// function to expose.
-func FindPackageFromPos(pkg Package, pos token.Pos) (Package, error) {
+func FindPackageFromPos(ctx context.Context, snapshot Snapshot, pkg Package, pos token.Pos) (Package, error) {
if !pos.IsValid() {
return nil, fmt.Errorf("invalid position")
}
fileName := pkg.FileSet().File(pos).Name()
uri := span.URIFromPath(fileName)
- _, pkg, err := findFileInDeps(pkg, uri)
+ _, pkg, err := findFileInDeps(ctx, snapshot, pkg, uri)
return pkg, err
}
@@ -223,25 +223,52 @@ func CompareDiagnostic(a, b *Diagnostic) int {
}
// findFileInDeps finds uri in pkg or its dependencies.
-func findFileInDeps(pkg Package, uri span.URI) (*ParsedGoFile, Package, error) {
- queue := []Package{pkg}
- seen := make(map[PackageID]bool)
-
- for len(queue) > 0 {
- pkg := queue[0]
- queue = queue[1:]
- seen[pkg.ID()] = true
-
+func findFileInDeps(ctx context.Context, snapshot Snapshot, pkg Package, uri span.URI) (*ParsedGoFile, Package, error) {
+ pkgs := []Package{pkg}
+ deps := recursiveDeps(snapshot, pkg.Metadata())[1:]
+ // Ignore the error from type checking, but check if the context was
+ // canceled (which would have caused TypeCheck to exit early).
+ depPkgs, _ := snapshot.TypeCheck(ctx, TypecheckWorkspace, deps...)
+ if ctx.Err() != nil {
+ return nil, nil, ctx.Err()
+ }
+ for _, dep := range depPkgs {
+ // Since we ignored the error from type checking, pkg may be nil.
+ if dep != nil {
+ pkgs = append(pkgs, dep)
+ }
+ }
+ for _, pkg := range pkgs {
if pgf, err := pkg.File(uri); err == nil {
return pgf, pkg, nil
}
- for _, dep := range pkg.Imports() {
- if !seen[dep.ID()] {
- queue = append(queue, dep)
- }
+ }
+ return nil, nil, fmt.Errorf("no file for %s in deps of package %s", uri, pkg.Metadata().ID)
+}
+
+// recursiveDeps finds unique transitive dependencies of m, including m itself.
+//
+// Invariant: for the resulting slice res, res[0] == m.ID.
+//
+// TODO(rfindley): consider replacing this with a snapshot.ForwardDependencies
+// method, or exposing the metadata graph itself.
+func recursiveDeps(s interface{ Metadata(PackageID) *Metadata }, m *Metadata) []PackageID {
+ seen := make(map[PackageID]bool)
+ var ids []PackageID
+ var add func(*Metadata)
+ add = func(m *Metadata) {
+ if seen[m.ID] {
+ return
+ }
+ seen[m.ID] = true
+ ids = append(ids, m.ID)
+ for _, dep := range m.DepsByPkgPath {
+ m := s.Metadata(dep)
+ add(m)
}
}
- return nil, nil, fmt.Errorf("no file for %s in package %s", uri, pkg.ID())
+ add(m)
+ return ids
}
// UnquoteImportPath returns the unquoted import path of s,
@@ -463,3 +490,23 @@ func embeddedIdent(x ast.Expr) *ast.Ident {
}
return nil
}
+
+// ResolveImportPath returns the directly imported dependency of the package with id fromID,
+// given its ImportPath, type-checked in its workspace parse mode.
+//
+// TODO(rfindley): eliminate this function, in favor of inlining where it is used.
+func ResolveImportPath(ctx context.Context, snapshot Snapshot, fromID PackageID, importPath ImportPath) (Package, error) {
+ meta := snapshot.Metadata(fromID)
+ if meta == nil {
+ return nil, fmt.Errorf("unknown package %s", fromID)
+ }
+ depID, ok := meta.DepsByImpPath[importPath]
+ if !ok {
+ return nil, fmt.Errorf("package does not import %s", importPath)
+ }
+ pkgs, err := snapshot.TypeCheck(ctx, TypecheckWorkspace, depID)
+ if err != nil {
+ return nil, fmt.Errorf("type checking dep: %v", err)
+ }
+ return pkgs[0], nil
+}
diff --git a/gopls/internal/lsp/source/view.go b/gopls/internal/lsp/source/view.go
index 81a91ccd9..3744e7c9f 100644
--- a/gopls/internal/lsp/source/view.go
+++ b/gopls/internal/lsp/source/view.go
@@ -17,7 +17,6 @@ import (
"io"
"golang.org/x/mod/modfile"
- "golang.org/x/mod/module"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/packages"
"golang.org/x/tools/go/types/objectpath"
@@ -171,7 +170,7 @@ type Snapshot interface {
//
// To reduce latency, it does not wait for type-checking to complete.
// It is intended for use only in completions.
- CachedImportPaths(ctx context.Context) (map[PackagePath]Package, error)
+ CachedImportPaths(ctx context.Context) (map[PackagePath]*types.Package, error)
// ActiveMetadata returns a new, unordered slice containing
// metadata for all packages considered 'active' in the workspace.
@@ -460,9 +459,7 @@ type Metadata struct {
DepsByPkgPath map[PackagePath]PackageID // values are unique and non-empty
Module *packages.Module
DepsErrors []*packagesinternal.PackageError
-
- // Config is the *packages.Config associated with the loaded package.
- Config *packages.Config
+ LoadDir string // directory from which go/packages was run
}
// IsIntermediateTestVariant reports whether the given package is an
@@ -750,13 +747,7 @@ type (
// Package represents a Go package that has been parsed and type-checked.
// It maintains only the relevant fields of a *go/packages.Package.
type Package interface {
- // Metadata:
- ID() PackageID
- Name() PackageName
- PkgPath() PackagePath
- GetTypesSizes() types.Sizes
- ForTest() string
- Version() *module.Version
+ Metadata() *Metadata
// Results of parsing:
FileSet() *token.FileSet
@@ -764,17 +755,14 @@ type Package interface {
CompiledGoFiles() []*ParsedGoFile // (borrowed)
File(uri span.URI) (*ParsedGoFile, error)
GetSyntax() []*ast.File // (borrowed)
- HasListOrParseErrors() bool
+ HasParseErrors() bool
// Results of type checking:
GetTypes() *types.Package
GetTypesInfo() *types.Info
- DirectDep(path PackagePath) (Package, error)
- ResolveImportPath(path ImportPath) (Package, error)
- Imports() []Package // new slice of all direct dependencies, unordered
HasTypeErrors() bool
- DiagnosticsForFile(uri span.URI) []*Diagnostic // new array of list/parse/type errors
- ReferencesTo(map[PackagePath]map[objectpath.Path]unit) []protocol.Location // new sorted array of xrefs
+ DiagnosticsForFile(ctx context.Context, s Snapshot, uri span.URI) ([]*Diagnostic, error)
+ ReferencesTo(map[PackagePath]map[objectpath.Path]unit) []protocol.Location
MethodSetsIndex() *methodsets.Index
}
diff --git a/gopls/internal/lsp/source/xrefs/xrefs.go b/gopls/internal/lsp/source/xrefs/xrefs.go
index 5f781f788..6a8b391e9 100644
--- a/gopls/internal/lsp/source/xrefs/xrefs.go
+++ b/gopls/internal/lsp/source/xrefs/xrefs.go
@@ -23,7 +23,7 @@ import (
// Index constructs a serializable index of outbound cross-references
// for the specified type-checked package.
-func Index(pkg source.Package) []byte {
+func Index(files []*source.ParsedGoFile, pkg *types.Package, info *types.Info) []byte {
// pkgObjects maps each referenced package Q to a mapping:
// from each referenced symbol in Q to the ordered list
// of references to that symbol from this package.
@@ -41,7 +41,7 @@ func Index(pkg source.Package) []byte {
return objects
}
- for fileIndex, pgf := range pkg.CompiledGoFiles() {
+ for fileIndex, pgf := range files {
nodeRange := func(n ast.Node) protocol.Range {
rng, err := pgf.PosRange(n.Pos(), n.End())
@@ -58,9 +58,9 @@ func Index(pkg source.Package) []byte {
// uses a symbol exported from another package.
// (The built-in error.Error method has no package.)
if n.IsExported() {
- if obj, ok := pkg.GetTypesInfo().Uses[n]; ok &&
+ if obj, ok := info.Uses[n]; ok &&
obj.Pkg() != nil &&
- obj.Pkg() != pkg.GetTypes() {
+ obj.Pkg() != pkg {
objects := getObjects(obj.Pkg())
gobObj, ok := objects[obj]
@@ -87,9 +87,9 @@ func Index(pkg source.Package) []byte {
// string to the imported package.
var obj types.Object
if n.Name != nil {
- obj = pkg.GetTypesInfo().Defs[n.Name]
+ obj = info.Defs[n.Name]
} else {
- obj = pkg.GetTypesInfo().Implicits[n]
+ obj = info.Implicits[n]
}
if obj == nil {
return true // missing import
diff --git a/gopls/internal/regtest/bench/completion_test.go b/gopls/internal/regtest/bench/completion_test.go
index aafc970f1..1a464ca8a 100644
--- a/gopls/internal/regtest/bench/completion_test.go
+++ b/gopls/internal/regtest/bench/completion_test.go
@@ -7,7 +7,6 @@ package bench
import (
"context"
"fmt"
- "strings"
"testing"
"golang.org/x/tools/gopls/internal/lsp/protocol"
@@ -87,14 +86,12 @@ func benchmarkCompletion(options completionBenchOptions, b *testing.B) {
// the given file.
func endRangeInBuffer(env *Env, name string) protocol.Range {
buffer := env.BufferText(name)
- lines := strings.Split(buffer, "\n")
- numLines := len(lines)
-
- end := protocol.Position{
- Line: uint32(numLines - 1),
- Character: uint32(len([]rune(lines[numLines-1]))),
+ m := protocol.NewMapper("", []byte(buffer))
+ rng, err := m.OffsetRange(len(buffer), len(buffer))
+ if err != nil {
+ env.T.Fatal(err)
}
- return protocol.Range{Start: end, End: end}
+ return rng
}
// Benchmark struct completion in tools codebase.
diff --git a/gopls/internal/regtest/diagnostics/diagnostics_test.go b/gopls/internal/regtest/diagnostics/diagnostics_test.go
index 800274b5c..f96a0aa04 100644
--- a/gopls/internal/regtest/diagnostics/diagnostics_test.go
+++ b/gopls/internal/regtest/diagnostics/diagnostics_test.go
@@ -862,6 +862,7 @@ package foo_
`
Run(t, mod, func(t *testing.T, env *Env) {
env.OpenFile("foo/bar_test.go")
+ env.AfterChange()
env.RegexpReplace("foo/bar_test.go", "package foo_", "package foo_test")
env.AfterChange(
NoDiagnostics(ForFile("foo/bar_test.go")),
diff --git a/gopls/internal/regtest/diagnostics/golist_test.go b/gopls/internal/regtest/diagnostics/golist_test.go
new file mode 100644
index 000000000..ec54a92ce
--- /dev/null
+++ b/gopls/internal/regtest/diagnostics/golist_test.go
@@ -0,0 +1,68 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package diagnostics
+
+import (
+ "testing"
+
+ . "golang.org/x/tools/gopls/internal/lsp/regtest"
+ "golang.org/x/tools/gopls/internal/lsp/source"
+)
+
+func TestGoListErrors(t *testing.T) {
+ const src = `
+-- go.mod --
+module a.com
+
+go 1.18
+-- a/a.go --
+package a
+
+import
+-- c/c.go --
+package c
+
+/*
+int fortythree() { return 42; }
+*/
+import "C"
+
+func Foo() {
+ print(C.fortytwo())
+}
+-- p/p.go --
+package p
+
+import "a.com/q"
+
+const P = q.Q + 1
+-- q/q.go --
+package q
+
+import "a.com/p"
+
+const Q = p.P + 1
+`
+
+ Run(t, src, func(t *testing.T, env *Env) {
+ env.OnceMet(
+ InitialWorkspaceLoad,
+ Diagnostics(
+ env.AtRegexp("a/a.go", "import\n()"),
+ FromSource(string(source.ParseError)),
+ ),
+ Diagnostics(
+ AtPosition("c/c.go", 0, 0),
+ FromSource(string(source.ListError)),
+ WithMessage("may indicate failure to perform cgo processing"),
+ ),
+ Diagnostics(
+ env.AtRegexp("p/p.go", `"a.com/q"`),
+ FromSource(string(source.ListError)),
+ WithMessage("import cycle not allowed"),
+ ),
+ )
+ })
+}
diff --git a/gopls/internal/regtest/misc/hover_test.go b/gopls/internal/regtest/misc/hover_test.go
index dafcc6203..013c4d160 100644
--- a/gopls/internal/regtest/misc/hover_test.go
+++ b/gopls/internal/regtest/misc/hover_test.go
@@ -204,6 +204,10 @@ func main() {
env.OpenFile("main.go")
for _, test := range tests {
got, _ := env.Hover("main.go", env.RegexpSearch("main.go", test.hoverPackage))
+ if got == nil {
+ t.Error("nil hover for", test.hoverPackage)
+ continue
+ }
if !strings.Contains(got.Value, test.want) {
t.Errorf("Hover: got:\n%q\nwant:\n%q", got.Value, test.want)
}