aboutsummaryrefslogtreecommitdiff
path: root/gopls/internal/lsp/source
diff options
context:
space:
mode:
Diffstat (limited to 'gopls/internal/lsp/source')
-rw-r--r--gopls/internal/lsp/source/add_import.go26
-rwxr-xr-xgopls/internal/lsp/source/api_json.go1118
-rw-r--r--gopls/internal/lsp/source/call_hierarchy.go311
-rw-r--r--gopls/internal/lsp/source/code_lens.go248
-rw-r--r--gopls/internal/lsp/source/comment.go384
-rw-r--r--gopls/internal/lsp/source/comment_go118_test.go371
-rw-r--r--gopls/internal/lsp/source/comment_go119.go56
-rw-r--r--gopls/internal/lsp/source/completion/builtin.go147
-rw-r--r--gopls/internal/lsp/source/completion/completion.go3252
-rw-r--r--gopls/internal/lsp/source/completion/deep_completion.go362
-rw-r--r--gopls/internal/lsp/source/completion/deep_completion_test.go33
-rw-r--r--gopls/internal/lsp/source/completion/definition.go160
-rw-r--r--gopls/internal/lsp/source/completion/format.go338
-rw-r--r--gopls/internal/lsp/source/completion/fuzz.go142
-rw-r--r--gopls/internal/lsp/source/completion/keywords.go154
-rw-r--r--gopls/internal/lsp/source/completion/labels.go112
-rw-r--r--gopls/internal/lsp/source/completion/literal.go592
-rw-r--r--gopls/internal/lsp/source/completion/package.go351
-rw-r--r--gopls/internal/lsp/source/completion/package_test.go81
-rw-r--r--gopls/internal/lsp/source/completion/postfix_snippets.go471
-rw-r--r--gopls/internal/lsp/source/completion/printf.go172
-rw-r--r--gopls/internal/lsp/source/completion/printf_test.go72
-rw-r--r--gopls/internal/lsp/source/completion/snippet.go116
-rw-r--r--gopls/internal/lsp/source/completion/statements.go361
-rw-r--r--gopls/internal/lsp/source/completion/util.go344
-rw-r--r--gopls/internal/lsp/source/completion/util_test.go28
-rw-r--r--gopls/internal/lsp/source/definition.go229
-rw-r--r--gopls/internal/lsp/source/diagnostics.go138
-rw-r--r--gopls/internal/lsp/source/extract.go1331
-rw-r--r--gopls/internal/lsp/source/fix.go138
-rw-r--r--gopls/internal/lsp/source/folding_range.go193
-rw-r--r--gopls/internal/lsp/source/format.go391
-rw-r--r--gopls/internal/lsp/source/format_test.go75
-rw-r--r--gopls/internal/lsp/source/gc_annotations.go221
-rw-r--r--gopls/internal/lsp/source/highlight.go484
-rw-r--r--gopls/internal/lsp/source/hover.go951
-rw-r--r--gopls/internal/lsp/source/identifier.go174
-rw-r--r--gopls/internal/lsp/source/identifier_test.go103
-rw-r--r--gopls/internal/lsp/source/implementation.go482
-rw-r--r--gopls/internal/lsp/source/inlay_hint.go394
-rw-r--r--gopls/internal/lsp/source/known_packages.go140
-rw-r--r--gopls/internal/lsp/source/linkname.go136
-rw-r--r--gopls/internal/lsp/source/methodsets/methodsets.go508
-rw-r--r--gopls/internal/lsp/source/options.go1631
-rw-r--r--gopls/internal/lsp/source/options_test.go206
-rw-r--r--gopls/internal/lsp/source/references.go582
-rw-r--r--gopls/internal/lsp/source/rename.go1244
-rw-r--r--gopls/internal/lsp/source/rename_check.go921
-rw-r--r--gopls/internal/lsp/source/signature_help.go185
-rw-r--r--gopls/internal/lsp/source/stub.go238
-rw-r--r--gopls/internal/lsp/source/symbols.go227
-rw-r--r--gopls/internal/lsp/source/type_definition.go55
-rw-r--r--gopls/internal/lsp/source/types_format.go517
-rw-r--r--gopls/internal/lsp/source/util.go555
-rw-r--r--gopls/internal/lsp/source/view.go857
-rw-r--r--gopls/internal/lsp/source/workspace_symbol.go632
-rw-r--r--gopls/internal/lsp/source/workspace_symbol_test.go136
-rw-r--r--gopls/internal/lsp/source/xrefs/xrefs.go216
58 files changed, 24092 insertions, 0 deletions
diff --git a/gopls/internal/lsp/source/add_import.go b/gopls/internal/lsp/source/add_import.go
new file mode 100644
index 000000000..cd8ec7ab7
--- /dev/null
+++ b/gopls/internal/lsp/source/add_import.go
@@ -0,0 +1,26 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package source
+
+import (
+ "context"
+
+ "golang.org/x/tools/gopls/internal/lsp/protocol"
+ "golang.org/x/tools/internal/imports"
+)
+
+// AddImport adds a single import statement to the given file
+func AddImport(ctx context.Context, snapshot Snapshot, fh FileHandle, importPath string) ([]protocol.TextEdit, error) {
+ pgf, err := snapshot.ParseGo(ctx, fh, ParseFull)
+ if err != nil {
+ return nil, err
+ }
+ return ComputeOneImportFixEdits(snapshot, pgf, &imports.ImportFix{
+ StmtInfo: imports.ImportInfo{
+ ImportPath: importPath,
+ },
+ FixType: imports.AddImport,
+ })
+}
diff --git a/gopls/internal/lsp/source/api_json.go b/gopls/internal/lsp/source/api_json.go
new file mode 100755
index 000000000..e655eb01b
--- /dev/null
+++ b/gopls/internal/lsp/source/api_json.go
@@ -0,0 +1,1118 @@
+// Code generated by "golang.org/x/tools/gopls/doc/generate"; DO NOT EDIT.
+
+package source
+
+var GeneratedAPIJSON = &APIJSON{
+ Options: map[string][]*OptionJSON{
+ "User": {
+ {
+ Name: "buildFlags",
+ Type: "[]string",
+ Doc: "buildFlags is the set of flags passed on to the build system when invoked.\nIt is applied to queries like `go list`, which is used when discovering files.\nThe most common use is to set `-tags`.\n",
+ Default: "[]",
+ Hierarchy: "build",
+ },
+ {
+ Name: "env",
+ Type: "map[string]string",
+ Doc: "env adds environment variables to external commands run by `gopls`, most notably `go list`.\n",
+ Default: "{}",
+ Hierarchy: "build",
+ },
+ {
+ Name: "directoryFilters",
+ Type: "[]string",
+ Doc: "directoryFilters can be used to exclude unwanted directories from the\nworkspace. By default, all directories are included. Filters are an\noperator, `+` to include and `-` to exclude, followed by a path prefix\nrelative to the workspace folder. They are evaluated in order, and\nthe last filter that applies to a path controls whether it is included.\nThe path prefix can be empty, so an initial `-` excludes everything.\n\nDirectoryFilters also supports the `**` operator to match 0 or more directories.\n\nExamples:\n\nExclude node_modules at current depth: `-node_modules`\n\nExclude node_modules at any depth: `-**/node_modules`\n\nInclude only project_a: `-` (exclude everything), `+project_a`\n\nInclude only project_a, but not node_modules inside it: `-`, `+project_a`, `-project_a/node_modules`\n",
+ Default: "[\"-**/node_modules\"]",
+ Hierarchy: "build",
+ },
+ {
+ Name: "templateExtensions",
+ Type: "[]string",
+ Doc: "templateExtensions gives the extensions of file names that are treateed\nas template files. (The extension\nis the part of the file name after the final dot.)\n",
+ Default: "[]",
+ Hierarchy: "build",
+ },
+ {
+ Name: "memoryMode",
+ Type: "enum",
+ Doc: "memoryMode controls the tradeoff `gopls` makes between memory usage and\ncorrectness.\n\nValues other than `Normal` are untested and may break in surprising ways.\n",
+ EnumValues: []EnumValue{
+ {
+ Value: "\"DegradeClosed\"",
+ Doc: "`\"DegradeClosed\"`: In DegradeClosed mode, `gopls` will collect less information about\npackages without open files. As a result, features like Find\nReferences and Rename will miss results in such packages.\n",
+ },
+ {Value: "\"Normal\""},
+ },
+ Default: "\"Normal\"",
+ Status: "experimental",
+ Hierarchy: "build",
+ },
+ {
+ Name: "expandWorkspaceToModule",
+ Type: "bool",
+ Doc: "expandWorkspaceToModule instructs `gopls` to adjust the scope of the\nworkspace to find the best available module root. `gopls` first looks for\na go.mod file in any parent directory of the workspace folder, expanding\nthe scope to that directory if it exists. If no viable parent directory is\nfound, gopls will check if there is exactly one child directory containing\na go.mod file, narrowing the scope to that directory if it exists.\n",
+ Default: "true",
+ Status: "experimental",
+ Hierarchy: "build",
+ },
+ {
+ Name: "allowModfileModifications",
+ Type: "bool",
+ Doc: "allowModfileModifications disables -mod=readonly, allowing imports from\nout-of-scope modules. This option will eventually be removed.\n",
+ Default: "false",
+ Status: "experimental",
+ Hierarchy: "build",
+ },
+ {
+ Name: "allowImplicitNetworkAccess",
+ Type: "bool",
+ Doc: "allowImplicitNetworkAccess disables GOPROXY=off, allowing implicit module\ndownloads rather than requiring user action. This option will eventually\nbe removed.\n",
+ Default: "false",
+ Status: "experimental",
+ Hierarchy: "build",
+ },
+ {
+ Name: "standaloneTags",
+ Type: "[]string",
+ Doc: "standaloneTags specifies a set of build constraints that identify\nindividual Go source files that make up the entire main package of an\nexecutable.\n\nA common example of standalone main files is the convention of using the\ndirective `//go:build ignore` to denote files that are not intended to be\nincluded in any package, for example because they are invoked directly by\nthe developer using `go run`.\n\nGopls considers a file to be a standalone main file if and only if it has\npackage name \"main\" and has a build directive of the exact form\n\"//go:build tag\" or \"// +build tag\", where tag is among the list of tags\nconfigured by this setting. Notably, if the build constraint is more\ncomplicated than a simple tag (such as the composite constraint\n`//go:build tag && go1.18`), the file is not considered to be a standalone\nmain file.\n\nThis setting is only supported when gopls is built with Go 1.16 or later.\n",
+ Default: "[\"ignore\"]",
+ Hierarchy: "build",
+ },
+ {
+ Name: "hoverKind",
+ Type: "enum",
+ Doc: "hoverKind controls the information that appears in the hover text.\nSingleLine and Structured are intended for use only by authors of editor plugins.\n",
+ EnumValues: []EnumValue{
+ {Value: "\"FullDocumentation\""},
+ {Value: "\"NoDocumentation\""},
+ {Value: "\"SingleLine\""},
+ {
+ Value: "\"Structured\"",
+ Doc: "`\"Structured\"` is an experimental setting that returns a structured hover format.\nThis format separates the signature from the documentation, so that the client\ncan do more manipulation of these fields.\n\nThis should only be used by clients that support this behavior.\n",
+ },
+ {Value: "\"SynopsisDocumentation\""},
+ },
+ Default: "\"FullDocumentation\"",
+ Hierarchy: "ui.documentation",
+ },
+ {
+ Name: "linkTarget",
+ Type: "string",
+ Doc: "linkTarget controls where documentation links go.\nIt might be one of:\n\n* `\"godoc.org\"`\n* `\"pkg.go.dev\"`\n\nIf company chooses to use its own `godoc.org`, its address can be used as well.\n\nModules matching the GOPRIVATE environment variable will not have\ndocumentation links in hover.\n",
+ Default: "\"pkg.go.dev\"",
+ Hierarchy: "ui.documentation",
+ },
+ {
+ Name: "linksInHover",
+ Type: "bool",
+ Doc: "linksInHover toggles the presence of links to documentation in hover.\n",
+ Default: "true",
+ Hierarchy: "ui.documentation",
+ },
+ {
+ Name: "usePlaceholders",
+ Type: "bool",
+ Doc: "placeholders enables placeholders for function parameters or struct\nfields in completion responses.\n",
+ Default: "false",
+ Hierarchy: "ui.completion",
+ },
+ {
+ Name: "completionBudget",
+ Type: "time.Duration",
+ Doc: "completionBudget is the soft latency goal for completion requests. Most\nrequests finish in a couple milliseconds, but in some cases deep\ncompletions can take much longer. As we use up our budget we\ndynamically reduce the search scope to ensure we return timely\nresults. Zero means unlimited.\n",
+ Default: "\"100ms\"",
+ Status: "debug",
+ Hierarchy: "ui.completion",
+ },
+ {
+ Name: "matcher",
+ Type: "enum",
+ Doc: "matcher sets the algorithm that is used when calculating completion\ncandidates.\n",
+ EnumValues: []EnumValue{
+ {Value: "\"CaseInsensitive\""},
+ {Value: "\"CaseSensitive\""},
+ {Value: "\"Fuzzy\""},
+ },
+ Default: "\"Fuzzy\"",
+ Status: "advanced",
+ Hierarchy: "ui.completion",
+ },
+ {
+ Name: "experimentalPostfixCompletions",
+ Type: "bool",
+ Doc: "experimentalPostfixCompletions enables artificial method snippets\nsuch as \"someSlice.sort!\".\n",
+ Default: "true",
+ Status: "experimental",
+ Hierarchy: "ui.completion",
+ },
+ {
+ Name: "importShortcut",
+ Type: "enum",
+ Doc: "importShortcut specifies whether import statements should link to\ndocumentation or go to definitions.\n",
+ EnumValues: []EnumValue{
+ {Value: "\"Both\""},
+ {Value: "\"Definition\""},
+ {Value: "\"Link\""},
+ },
+ Default: "\"Both\"",
+ Hierarchy: "ui.navigation",
+ },
+ {
+ Name: "symbolMatcher",
+ Type: "enum",
+ Doc: "symbolMatcher sets the algorithm that is used when finding workspace symbols.\n",
+ EnumValues: []EnumValue{
+ {Value: "\"CaseInsensitive\""},
+ {Value: "\"CaseSensitive\""},
+ {Value: "\"FastFuzzy\""},
+ {Value: "\"Fuzzy\""},
+ },
+ Default: "\"FastFuzzy\"",
+ Status: "advanced",
+ Hierarchy: "ui.navigation",
+ },
+ {
+ Name: "symbolStyle",
+ Type: "enum",
+ Doc: "symbolStyle controls how symbols are qualified in symbol responses.\n\nExample Usage:\n\n```json5\n\"gopls\": {\n...\n \"symbolStyle\": \"Dynamic\",\n...\n}\n```\n",
+ EnumValues: []EnumValue{
+ {
+ Value: "\"Dynamic\"",
+ Doc: "`\"Dynamic\"` uses whichever qualifier results in the highest scoring\nmatch for the given symbol query. Here a \"qualifier\" is any \"/\" or \".\"\ndelimited suffix of the fully qualified symbol. i.e. \"to/pkg.Foo.Field\" or\njust \"Foo.Field\".\n",
+ },
+ {
+ Value: "\"Full\"",
+ Doc: "`\"Full\"` is fully qualified symbols, i.e.\n\"path/to/pkg.Foo.Field\".\n",
+ },
+ {
+ Value: "\"Package\"",
+ Doc: "`\"Package\"` is package qualified symbols i.e.\n\"pkg.Foo.Field\".\n",
+ },
+ },
+ Default: "\"Dynamic\"",
+ Status: "advanced",
+ Hierarchy: "ui.navigation",
+ },
+ {
+ Name: "analyses",
+ Type: "map[string]bool",
+ Doc: "analyses specify analyses that the user would like to enable or disable.\nA map of the names of analysis passes that should be enabled/disabled.\nA full list of analyzers that gopls uses can be found in\n[analyzers.md](https://github.com/golang/tools/blob/master/gopls/doc/analyzers.md).\n\nExample Usage:\n\n```json5\n...\n\"analyses\": {\n \"unreachable\": false, // Disable the unreachable analyzer.\n \"unusedparams\": true // Enable the unusedparams analyzer.\n}\n...\n```\n",
+ EnumKeys: EnumKeys{
+ ValueType: "bool",
+ Keys: []EnumKey{
+ {
+ Name: "\"asmdecl\"",
+ Doc: "report mismatches between assembly files and Go declarations",
+ Default: "true",
+ },
+ {
+ Name: "\"assign\"",
+ Doc: "check for useless assignments\n\nThis checker reports assignments of the form x = x or a[i] = a[i].\nThese are almost always useless, and even when they aren't they are\nusually a mistake.",
+ Default: "true",
+ },
+ {
+ Name: "\"atomic\"",
+ Doc: "check for common mistakes using the sync/atomic package\n\nThe atomic checker looks for assignment statements of the form:\n\n\tx = atomic.AddUint64(&x, 1)\n\nwhich are not atomic.",
+ Default: "true",
+ },
+ {
+ Name: "\"atomicalign\"",
+ Doc: "check for non-64-bits-aligned arguments to sync/atomic functions",
+ Default: "true",
+ },
+ {
+ Name: "\"bools\"",
+ Doc: "check for common mistakes involving boolean operators",
+ Default: "true",
+ },
+ {
+ Name: "\"buildtag\"",
+ Doc: "check //go:build and // +build directives",
+ Default: "true",
+ },
+ {
+ Name: "\"cgocall\"",
+ Doc: "detect some violations of the cgo pointer passing rules\n\nCheck for invalid cgo pointer passing.\nThis looks for code that uses cgo to call C code passing values\nwhose types are almost always invalid according to the cgo pointer\nsharing rules.\nSpecifically, it warns about attempts to pass a Go chan, map, func,\nor slice to C, either directly, or via a pointer, array, or struct.",
+ Default: "true",
+ },
+ {
+ Name: "\"composites\"",
+ Doc: "check for unkeyed composite literals\n\nThis analyzer reports a diagnostic for composite literals of struct\ntypes imported from another package that do not use the field-keyed\nsyntax. Such literals are fragile because the addition of a new field\n(even if unexported) to the struct will cause compilation to fail.\n\nAs an example,\n\n\terr = &net.DNSConfigError{err}\n\nshould be replaced by:\n\n\terr = &net.DNSConfigError{Err: err}\n",
+ Default: "true",
+ },
+ {
+ Name: "\"copylocks\"",
+ Doc: "check for locks erroneously passed by value\n\nInadvertently copying a value containing a lock, such as sync.Mutex or\nsync.WaitGroup, may cause both copies to malfunction. Generally such\nvalues should be referred to through a pointer.",
+ Default: "true",
+ },
+ {
+ Name: "\"deepequalerrors\"",
+ Doc: "check for calls of reflect.DeepEqual on error values\n\nThe deepequalerrors checker looks for calls of the form:\n\n reflect.DeepEqual(err1, err2)\n\nwhere err1 and err2 are errors. Using reflect.DeepEqual to compare\nerrors is discouraged.",
+ Default: "true",
+ },
+ {
+ Name: "\"directive\"",
+ Doc: "check Go toolchain directives such as //go:debug\n\nThis analyzer checks for problems with known Go toolchain directives\nin all Go source files in a package directory, even those excluded by\n//go:build constraints, and all non-Go source files too.\n\nFor //go:debug (see https://go.dev/doc/godebug), the analyzer checks\nthat the directives are placed only in Go source files, only above the\npackage comment, and only in package main or *_test.go files.\n\nSupport for other known directives may be added in the future.\n\nThis analyzer does not check //go:build, which is handled by the\nbuildtag analyzer.\n",
+ Default: "true",
+ },
+ {
+ Name: "\"embed\"",
+ Doc: "check for //go:embed directive import\n\nThis analyzer checks that the embed package is imported when source code contains //go:embed comment directives.\nThe embed package must be imported for //go:embed directives to function.import _ \"embed\".",
+ Default: "true",
+ },
+ {
+ Name: "\"errorsas\"",
+ Doc: "report passing non-pointer or non-error values to errors.As\n\nThe errorsas analysis reports calls to errors.As where the type\nof the second argument is not a pointer to a type implementing error.",
+ Default: "true",
+ },
+ {
+ Name: "\"fieldalignment\"",
+ Doc: "find structs that would use less memory if their fields were sorted\n\nThis analyzer find structs that can be rearranged to use less memory, and provides\na suggested edit with the most compact order.\n\nNote that there are two different diagnostics reported. One checks struct size,\nand the other reports \"pointer bytes\" used. Pointer bytes is how many bytes of the\nobject that the garbage collector has to potentially scan for pointers, for example:\n\n\tstruct { uint32; string }\n\nhave 16 pointer bytes because the garbage collector has to scan up through the string's\ninner pointer.\n\n\tstruct { string; *uint32 }\n\nhas 24 pointer bytes because it has to scan further through the *uint32.\n\n\tstruct { string; uint32 }\n\nhas 8 because it can stop immediately after the string pointer.\n\nBe aware that the most compact order is not always the most efficient.\nIn rare cases it may cause two variables each updated by its own goroutine\nto occupy the same CPU cache line, inducing a form of memory contention\nknown as \"false sharing\" that slows down both goroutines.\n",
+ Default: "false",
+ },
+ {
+ Name: "\"httpresponse\"",
+ Doc: "check for mistakes using HTTP responses\n\nA common mistake when using the net/http package is to defer a function\ncall to close the http.Response Body before checking the error that\ndetermines whether the response is valid:\n\n\tresp, err := http.Head(url)\n\tdefer resp.Body.Close()\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\t// (defer statement belongs here)\n\nThis checker helps uncover latent nil dereference bugs by reporting a\ndiagnostic for such mistakes.",
+ Default: "true",
+ },
+ {
+ Name: "\"ifaceassert\"",
+ Doc: "detect impossible interface-to-interface type assertions\n\nThis checker flags type assertions v.(T) and corresponding type-switch cases\nin which the static type V of v is an interface that cannot possibly implement\nthe target interface T. This occurs when V and T contain methods with the same\nname but different signatures. Example:\n\n\tvar v interface {\n\t\tRead()\n\t}\n\t_ = v.(io.Reader)\n\nThe Read method in v has a different signature than the Read method in\nio.Reader, so this assertion cannot succeed.\n",
+ Default: "true",
+ },
+ {
+ Name: "\"infertypeargs\"",
+ Doc: "check for unnecessary type arguments in call expressions\n\nExplicit type arguments may be omitted from call expressions if they can be\ninferred from function arguments, or from other type arguments:\n\n\tfunc f[T any](T) {}\n\t\n\tfunc _() {\n\t\tf[string](\"foo\") // string could be inferred\n\t}\n",
+ Default: "true",
+ },
+ {
+ Name: "\"loopclosure\"",
+ Doc: "check references to loop variables from within nested functions\n\nThis analyzer reports places where a function literal references the\niteration variable of an enclosing loop, and the loop calls the function\nin such a way (e.g. with go or defer) that it may outlive the loop\niteration and possibly observe the wrong value of the variable.\n\nIn this example, all the deferred functions run after the loop has\ncompleted, so all observe the final value of v.\n\n for _, v := range list {\n defer func() {\n use(v) // incorrect\n }()\n }\n\nOne fix is to create a new variable for each iteration of the loop:\n\n for _, v := range list {\n v := v // new var per iteration\n defer func() {\n use(v) // ok\n }()\n }\n\nThe next example uses a go statement and has a similar problem.\nIn addition, it has a data race because the loop updates v\nconcurrent with the goroutines accessing it.\n\n for _, v := range elem {\n go func() {\n use(v) // incorrect, and a data race\n }()\n }\n\nA fix is the same as before. The checker also reports problems\nin goroutines started by golang.org/x/sync/errgroup.Group.\nA hard-to-spot variant of this form is common in parallel tests:\n\n func Test(t *testing.T) {\n for _, test := range tests {\n t.Run(test.name, func(t *testing.T) {\n t.Parallel()\n use(test) // incorrect, and a data race\n })\n }\n }\n\nThe t.Parallel() call causes the rest of the function to execute\nconcurrent with the loop.\n\nThe analyzer reports references only in the last statement,\nas it is not deep enough to understand the effects of subsequent\nstatements that might render the reference benign.\n(\"Last statement\" is defined recursively in compound\nstatements such as if, switch, and select.)\n\nSee: https://golang.org/doc/go_faq.html#closures_and_goroutines",
+ Default: "true",
+ },
+ {
+ Name: "\"lostcancel\"",
+ Doc: "check cancel func returned by context.WithCancel is called\n\nThe cancellation function returned by context.WithCancel, WithTimeout,\nand WithDeadline must be called or the new context will remain live\nuntil its parent context is cancelled.\n(The background context is never cancelled.)",
+ Default: "true",
+ },
+ {
+ Name: "\"nilfunc\"",
+ Doc: "check for useless comparisons between functions and nil\n\nA useless comparison is one like f == nil as opposed to f() == nil.",
+ Default: "true",
+ },
+ {
+ Name: "\"nilness\"",
+ Doc: "check for redundant or impossible nil comparisons\n\nThe nilness checker inspects the control-flow graph of each function in\na package and reports nil pointer dereferences, degenerate nil\npointers, and panics with nil values. A degenerate comparison is of the form\nx==nil or x!=nil where x is statically known to be nil or non-nil. These are\noften a mistake, especially in control flow related to errors. Panics with nil\nvalues are checked because they are not detectable by\n\n\tif r := recover(); r != nil {\n\nThis check reports conditions such as:\n\n\tif f == nil { // impossible condition (f is a function)\n\t}\n\nand:\n\n\tp := &v\n\t...\n\tif p != nil { // tautological condition\n\t}\n\nand:\n\n\tif p == nil {\n\t\tprint(*p) // nil dereference\n\t}\n\nand:\n\n\tif p == nil {\n\t\tpanic(p)\n\t}\n",
+ Default: "false",
+ },
+ {
+ Name: "\"printf\"",
+ Doc: "check consistency of Printf format strings and arguments\n\nThe check applies to known functions (for example, those in package fmt)\nas well as any detected wrappers of known functions.\n\nA function that wants to avail itself of printf checking but is not\nfound by this analyzer's heuristics (for example, due to use of\ndynamic calls) can insert a bogus call:\n\n\tif false {\n\t\t_ = fmt.Sprintf(format, args...) // enable printf checking\n\t}\n\nThe -funcs flag specifies a comma-separated list of names of additional\nknown formatting functions or methods. If the name contains a period,\nit must denote a specific function using one of the following forms:\n\n\tdir/pkg.Function\n\tdir/pkg.Type.Method\n\t(*dir/pkg.Type).Method\n\nOtherwise the name is interpreted as a case-insensitive unqualified\nidentifier such as \"errorf\". Either way, if a listed name ends in f, the\nfunction is assumed to be Printf-like, taking a format string before the\nargument list. Otherwise it is assumed to be Print-like, taking a list\nof arguments with no format string.\n",
+ Default: "true",
+ },
+ {
+ Name: "\"shadow\"",
+ Doc: "check for possible unintended shadowing of variables\n\nThis analyzer check for shadowed variables.\nA shadowed variable is a variable declared in an inner scope\nwith the same name and type as a variable in an outer scope,\nand where the outer variable is mentioned after the inner one\nis declared.\n\n(This definition can be refined; the module generates too many\nfalse positives and is not yet enabled by default.)\n\nFor example:\n\n\tfunc BadRead(f *os.File, buf []byte) error {\n\t\tvar err error\n\t\tfor {\n\t\t\tn, err := f.Read(buf) // shadows the function variable 'err'\n\t\t\tif err != nil {\n\t\t\t\tbreak // causes return of wrong value\n\t\t\t}\n\t\t\tfoo(buf)\n\t\t}\n\t\treturn err\n\t}\n",
+ Default: "false",
+ },
+ {
+ Name: "\"shift\"",
+ Doc: "check for shifts that equal or exceed the width of the integer",
+ Default: "true",
+ },
+ {
+ Name: "\"simplifycompositelit\"",
+ Doc: "check for composite literal simplifications\n\nAn array, slice, or map composite literal of the form:\n\t[]T{T{}, T{}}\nwill be simplified to:\n\t[]T{{}, {}}\n\nThis is one of the simplifications that \"gofmt -s\" applies.",
+ Default: "true",
+ },
+ {
+ Name: "\"simplifyrange\"",
+ Doc: "check for range statement simplifications\n\nA range of the form:\n\tfor x, _ = range v {...}\nwill be simplified to:\n\tfor x = range v {...}\n\nA range of the form:\n\tfor _ = range v {...}\nwill be simplified to:\n\tfor range v {...}\n\nThis is one of the simplifications that \"gofmt -s\" applies.",
+ Default: "true",
+ },
+ {
+ Name: "\"simplifyslice\"",
+ Doc: "check for slice simplifications\n\nA slice expression of the form:\n\ts[a:len(s)]\nwill be simplified to:\n\ts[a:]\n\nThis is one of the simplifications that \"gofmt -s\" applies.",
+ Default: "true",
+ },
+ {
+ Name: "\"sortslice\"",
+ Doc: "check the argument type of sort.Slice\n\nsort.Slice requires an argument of a slice type. Check that\nthe interface{} value passed to sort.Slice is actually a slice.",
+ Default: "true",
+ },
+ {
+ Name: "\"stdmethods\"",
+ Doc: "check signature of methods of well-known interfaces\n\nSometimes a type may be intended to satisfy an interface but may fail to\ndo so because of a mistake in its method signature.\nFor example, the result of this WriteTo method should be (int64, error),\nnot error, to satisfy io.WriterTo:\n\n\ttype myWriterTo struct{...}\n func (myWriterTo) WriteTo(w io.Writer) error { ... }\n\nThis check ensures that each method whose name matches one of several\nwell-known interface methods from the standard library has the correct\nsignature for that interface.\n\nChecked method names include:\n\tFormat GobEncode GobDecode MarshalJSON MarshalXML\n\tPeek ReadByte ReadFrom ReadRune Scan Seek\n\tUnmarshalJSON UnreadByte UnreadRune WriteByte\n\tWriteTo\n",
+ Default: "true",
+ },
+ {
+ Name: "\"stringintconv\"",
+ Doc: "check for string(int) conversions\n\nThis checker flags conversions of the form string(x) where x is an integer\n(but not byte or rune) type. Such conversions are discouraged because they\nreturn the UTF-8 representation of the Unicode code point x, and not a decimal\nstring representation of x as one might expect. Furthermore, if x denotes an\ninvalid code point, the conversion cannot be statically rejected.\n\nFor conversions that intend on using the code point, consider replacing them\nwith string(rune(x)). Otherwise, strconv.Itoa and its equivalents return the\nstring representation of the value in the desired base.\n",
+ Default: "true",
+ },
+ {
+ Name: "\"structtag\"",
+ Doc: "check that struct field tags conform to reflect.StructTag.Get\n\nAlso report certain struct tags (json, xml) used with unexported fields.",
+ Default: "true",
+ },
+ {
+ Name: "\"testinggoroutine\"",
+ Doc: "report calls to (*testing.T).Fatal from goroutines started by a test.\n\nFunctions that abruptly terminate a test, such as the Fatal, Fatalf, FailNow, and\nSkip{,f,Now} methods of *testing.T, must be called from the test goroutine itself.\nThis checker detects calls to these functions that occur within a goroutine\nstarted by the test. For example:\n\nfunc TestFoo(t *testing.T) {\n go func() {\n t.Fatal(\"oops\") // error: (*T).Fatal called from non-test goroutine\n }()\n}\n",
+ Default: "true",
+ },
+ {
+ Name: "\"tests\"",
+ Doc: "check for common mistaken usages of tests and examples\n\nThe tests checker walks Test, Benchmark and Example functions checking\nmalformed names, wrong signatures and examples documenting non-existent\nidentifiers.\n\nPlease see the documentation for package testing in golang.org/pkg/testing\nfor the conventions that are enforced for Tests, Benchmarks, and Examples.",
+ Default: "true",
+ },
+ {
+ Name: "\"timeformat\"",
+ Doc: "check for calls of (time.Time).Format or time.Parse with 2006-02-01\n\nThe timeformat checker looks for time formats with the 2006-02-01 (yyyy-dd-mm)\nformat. Internationally, \"yyyy-dd-mm\" does not occur in common calendar date\nstandards, and so it is more likely that 2006-01-02 (yyyy-mm-dd) was intended.\n",
+ Default: "true",
+ },
+ {
+ Name: "\"unmarshal\"",
+ Doc: "report passing non-pointer or non-interface values to unmarshal\n\nThe unmarshal analysis reports calls to functions such as json.Unmarshal\nin which the argument type is not a pointer or an interface.",
+ Default: "true",
+ },
+ {
+ Name: "\"unreachable\"",
+ Doc: "check for unreachable code\n\nThe unreachable analyzer finds statements that execution can never reach\nbecause they are preceded by an return statement, a call to panic, an\ninfinite loop, or similar constructs.",
+ Default: "true",
+ },
+ {
+ Name: "\"unsafeptr\"",
+ Doc: "check for invalid conversions of uintptr to unsafe.Pointer\n\nThe unsafeptr analyzer reports likely incorrect uses of unsafe.Pointer\nto convert integers to pointers. A conversion from uintptr to\nunsafe.Pointer is invalid if it implies that there is a uintptr-typed\nword in memory that holds a pointer value, because that word will be\ninvisible to stack copying and to the garbage collector.",
+ Default: "true",
+ },
+ {
+ Name: "\"unusedparams\"",
+ Doc: "check for unused parameters of functions\n\nThe unusedparams analyzer checks functions to see if there are\nany parameters that are not being used.\n\nTo reduce false positives it ignores:\n- methods\n- parameters that do not have a name or are underscored\n- functions in test files\n- functions with empty bodies or those with just a return stmt",
+ Default: "false",
+ },
+ {
+ Name: "\"unusedresult\"",
+ Doc: "check for unused results of calls to some functions\n\nSome functions like fmt.Errorf return a result and have no side effects,\nso it is always a mistake to discard the result. This analyzer reports\ncalls to certain functions in which the result of the call is ignored.\n\nThe set of functions may be controlled using flags.",
+ Default: "true",
+ },
+ {
+ Name: "\"unusedwrite\"",
+ Doc: "checks for unused writes\n\nThe analyzer reports instances of writes to struct fields and\narrays that are never read. Specifically, when a struct object\nor an array is copied, its elements are copied implicitly by\nthe compiler, and any element write to this copy does nothing\nwith the original object.\n\nFor example:\n\n\ttype T struct { x int }\n\tfunc f(input []T) {\n\t\tfor i, v := range input { // v is a copy\n\t\t\tv.x = i // unused write to field x\n\t\t}\n\t}\n\nAnother example is about non-pointer receiver:\n\n\ttype T struct { x int }\n\tfunc (t T) f() { // t is a copy\n\t\tt.x = i // unused write to field x\n\t}\n",
+ Default: "false",
+ },
+ {
+ Name: "\"useany\"",
+ Doc: "check for constraints that could be simplified to \"any\"",
+ Default: "false",
+ },
+ {
+ Name: "\"fillreturns\"",
+ Doc: "suggest fixes for errors due to an incorrect number of return values\n\nThis checker provides suggested fixes for type errors of the\ntype \"wrong number of return values (want %d, got %d)\". For example:\n\tfunc m() (int, string, *bool, error) {\n\t\treturn\n\t}\nwill turn into\n\tfunc m() (int, string, *bool, error) {\n\t\treturn 0, \"\", nil, nil\n\t}\n\nThis functionality is similar to https://github.com/sqs/goreturns.\n",
+ Default: "true",
+ },
+ {
+ Name: "\"nonewvars\"",
+ Doc: "suggested fixes for \"no new vars on left side of :=\"\n\nThis checker provides suggested fixes for type errors of the\ntype \"no new vars on left side of :=\". For example:\n\tz := 1\n\tz := 2\nwill turn into\n\tz := 1\n\tz = 2\n",
+ Default: "true",
+ },
+ {
+ Name: "\"noresultvalues\"",
+ Doc: "suggested fixes for unexpected return values\n\nThis checker provides suggested fixes for type errors of the\ntype \"no result values expected\" or \"too many return values\".\nFor example:\n\tfunc z() { return nil }\nwill turn into\n\tfunc z() { return }\n",
+ Default: "true",
+ },
+ {
+ Name: "\"undeclaredname\"",
+ Doc: "suggested fixes for \"undeclared name: <>\"\n\nThis checker provides suggested fixes for type errors of the\ntype \"undeclared name: <>\". It will either insert a new statement,\nsuch as:\n\n\"<> := \"\n\nor a new function declaration, such as:\n\nfunc <>(inferred parameters) {\n\tpanic(\"implement me!\")\n}\n",
+ Default: "true",
+ },
+ {
+ Name: "\"unusedvariable\"",
+ Doc: "check for unused variables\n\nThe unusedvariable analyzer suggests fixes for unused variables errors.\n",
+ Default: "false",
+ },
+ {
+ Name: "\"fillstruct\"",
+ Doc: "note incomplete struct initializations\n\nThis analyzer provides diagnostics for any struct literals that do not have\nany fields initialized. Because the suggested fix for this analysis is\nexpensive to compute, callers should compute it separately, using the\nSuggestedFix function below.\n",
+ Default: "true",
+ },
+ {
+ Name: "\"stubmethods\"",
+ Doc: "stub methods analyzer\n\nThis analyzer generates method stubs for concrete types\nin order to implement a target interface",
+ Default: "true",
+ },
+ },
+ },
+ Default: "{}",
+ Hierarchy: "ui.diagnostic",
+ },
+ {
+ Name: "staticcheck",
+ Type: "bool",
+ Doc: "staticcheck enables additional analyses from staticcheck.io.\nThese analyses are documented on\n[Staticcheck's website](https://staticcheck.io/docs/checks/).\n",
+ Default: "false",
+ Status: "experimental",
+ Hierarchy: "ui.diagnostic",
+ },
+ {
+ Name: "annotations",
+ Type: "map[string]bool",
+ Doc: "annotations specifies the various kinds of optimization diagnostics\nthat should be reported by the gc_details command.\n",
+ EnumKeys: EnumKeys{
+ ValueType: "bool",
+ Keys: []EnumKey{
+ {
+ Name: "\"bounds\"",
+ Doc: "`\"bounds\"` controls bounds checking diagnostics.\n",
+ Default: "true",
+ },
+ {
+ Name: "\"escape\"",
+ Doc: "`\"escape\"` controls diagnostics about escape choices.\n",
+ Default: "true",
+ },
+ {
+ Name: "\"inline\"",
+ Doc: "`\"inline\"` controls diagnostics about inlining choices.\n",
+ Default: "true",
+ },
+ {
+ Name: "\"nil\"",
+ Doc: "`\"nil\"` controls nil checks.\n",
+ Default: "true",
+ },
+ },
+ },
+ Default: "{\"bounds\":true,\"escape\":true,\"inline\":true,\"nil\":true}",
+ Status: "experimental",
+ Hierarchy: "ui.diagnostic",
+ },
+ {
+ Name: "vulncheck",
+ Type: "enum",
+ Doc: "vulncheck enables vulnerability scanning.\n",
+ EnumValues: []EnumValue{
+ {
+ Value: "\"Imports\"",
+ Doc: "`\"Imports\"`: In Imports mode, `gopls` will report vulnerabilities that affect packages\ndirectly and indirectly used by the analyzed main module.\n",
+ },
+ {
+ Value: "\"Off\"",
+ Doc: "`\"Off\"`: Disable vulnerability analysis.\n",
+ },
+ },
+ Default: "\"Off\"",
+ Status: "experimental",
+ Hierarchy: "ui.diagnostic",
+ },
+ {
+ Name: "diagnosticsDelay",
+ Type: "time.Duration",
+ Doc: "diagnosticsDelay controls the amount of time that gopls waits\nafter the most recent file modification before computing deep diagnostics.\nSimple diagnostics (parsing and type-checking) are always run immediately\non recently modified packages.\n\nThis option must be set to a valid duration string, for example `\"250ms\"`.\n",
+ Default: "\"250ms\"",
+ Status: "advanced",
+ Hierarchy: "ui.diagnostic",
+ },
+ {
+ Name: "hints",
+ Type: "map[string]bool",
+ Doc: "hints specify inlay hints that users want to see. A full list of hints\nthat gopls uses can be found in\n[inlayHints.md](https://github.com/golang/tools/blob/master/gopls/doc/inlayHints.md).\n",
+ EnumKeys: EnumKeys{Keys: []EnumKey{
+ {
+ Name: "\"assignVariableTypes\"",
+ Doc: "Enable/disable inlay hints for variable types in assign statements:\n```go\n\ti/* int*/, j/* int*/ := 0, len(r)-1\n```",
+ Default: "false",
+ },
+ {
+ Name: "\"compositeLiteralFields\"",
+ Doc: "Enable/disable inlay hints for composite literal field names:\n```go\n\t{/*in: */\"Hello, world\", /*want: */\"dlrow ,olleH\"}\n```",
+ Default: "false",
+ },
+ {
+ Name: "\"compositeLiteralTypes\"",
+ Doc: "Enable/disable inlay hints for composite literal types:\n```go\n\tfor _, c := range []struct {\n\t\tin, want string\n\t}{\n\t\t/*struct{ in string; want string }*/{\"Hello, world\", \"dlrow ,olleH\"},\n\t}\n```",
+ Default: "false",
+ },
+ {
+ Name: "\"constantValues\"",
+ Doc: "Enable/disable inlay hints for constant values:\n```go\n\tconst (\n\t\tKindNone Kind = iota/* = 0*/\n\t\tKindPrint/* = 1*/\n\t\tKindPrintf/* = 2*/\n\t\tKindErrorf/* = 3*/\n\t)\n```",
+ Default: "false",
+ },
+ {
+ Name: "\"functionTypeParameters\"",
+ Doc: "Enable/disable inlay hints for implicit type parameters on generic functions:\n```go\n\tmyFoo/*[int, string]*/(1, \"hello\")\n```",
+ Default: "false",
+ },
+ {
+ Name: "\"parameterNames\"",
+ Doc: "Enable/disable inlay hints for parameter names:\n```go\n\tparseInt(/* str: */ \"123\", /* radix: */ 8)\n```",
+ Default: "false",
+ },
+ {
+ Name: "\"rangeVariableTypes\"",
+ Doc: "Enable/disable inlay hints for variable types in range statements:\n```go\n\tfor k/* int*/, v/* string*/ := range []string{} {\n\t\tfmt.Println(k, v)\n\t}\n```",
+ Default: "false",
+ },
+ }},
+ Default: "{}",
+ Status: "experimental",
+ Hierarchy: "ui.inlayhint",
+ },
+ {
+ Name: "codelenses",
+ Type: "map[string]bool",
+ Doc: "codelenses overrides the enabled/disabled state of code lenses. See the\n\"Code Lenses\" section of the\n[Settings page](https://github.com/golang/tools/blob/master/gopls/doc/settings.md#code-lenses)\nfor the list of supported lenses.\n\nExample Usage:\n\n```json5\n\"gopls\": {\n...\n \"codelenses\": {\n \"generate\": false, // Don't show the `go generate` lens.\n \"gc_details\": true // Show a code lens toggling the display of gc's choices.\n }\n...\n}\n```\n",
+ EnumKeys: EnumKeys{
+ ValueType: "bool",
+ Keys: []EnumKey{
+ {
+ Name: "\"gc_details\"",
+ Doc: "Toggle the calculation of gc annotations.",
+ Default: "false",
+ },
+ {
+ Name: "\"generate\"",
+ Doc: "Runs `go generate` for a given directory.",
+ Default: "true",
+ },
+ {
+ Name: "\"regenerate_cgo\"",
+ Doc: "Regenerates cgo definitions.",
+ Default: "true",
+ },
+ {
+ Name: "\"run_govulncheck\"",
+ Doc: "Run vulnerability check (`govulncheck`).",
+ Default: "false",
+ },
+ {
+ Name: "\"test\"",
+ Doc: "Runs `go test` for a specific set of test or benchmark functions.",
+ Default: "false",
+ },
+ {
+ Name: "\"tidy\"",
+ Doc: "Runs `go mod tidy` for a module.",
+ Default: "true",
+ },
+ {
+ Name: "\"upgrade_dependency\"",
+ Doc: "Upgrades a dependency in the go.mod file for a module.",
+ Default: "true",
+ },
+ {
+ Name: "\"vendor\"",
+ Doc: "Runs `go mod vendor` for a module.",
+ Default: "true",
+ },
+ },
+ },
+ Default: "{\"gc_details\":false,\"generate\":true,\"regenerate_cgo\":true,\"tidy\":true,\"upgrade_dependency\":true,\"vendor\":true}",
+ Hierarchy: "ui",
+ },
+ {
+ Name: "semanticTokens",
+ Type: "bool",
+ Doc: "semanticTokens controls whether the LSP server will send\nsemantic tokens to the client.\n",
+ Default: "false",
+ Status: "experimental",
+ Hierarchy: "ui",
+ },
+ {
+ Name: "noSemanticString",
+ Type: "bool",
+ Doc: "noSemanticString turns off the sending of the semantic token 'string'\n",
+ Default: "false",
+ Status: "experimental",
+ Hierarchy: "ui",
+ },
+ {
+ Name: "noSemanticNumber",
+ Type: "bool",
+ Doc: "noSemanticNumber turns off the sending of the semantic token 'number'\n",
+ Default: "false",
+ Status: "experimental",
+ Hierarchy: "ui",
+ },
+ {
+ Name: "local",
+ Type: "string",
+ Doc: "local is the equivalent of the `goimports -local` flag, which puts\nimports beginning with this string after third-party packages. It should\nbe the prefix of the import path whose imports should be grouped\nseparately.\n",
+ Default: "\"\"",
+ Hierarchy: "formatting",
+ },
+ {
+ Name: "gofumpt",
+ Type: "bool",
+ Doc: "gofumpt indicates if we should run gofumpt formatting.\n",
+ Default: "false",
+ Hierarchy: "formatting",
+ },
+ {
+ Name: "verboseOutput",
+ Type: "bool",
+ Doc: "verboseOutput enables additional debug logging.\n",
+ Default: "false",
+ Status: "debug",
+ },
+ },
+ },
+ Commands: []*CommandJSON{
+ {
+ Command: "gopls.add_dependency",
+ Title: "Add a dependency",
+ Doc: "Adds a dependency to the go.mod file for a module.",
+ ArgDoc: "{\n\t// The go.mod file URI.\n\t\"URI\": string,\n\t// Additional args to pass to the go command.\n\t\"GoCmdArgs\": []string,\n\t// Whether to add a require directive.\n\t\"AddRequire\": bool,\n}",
+ },
+ {
+ Command: "gopls.add_import",
+ Title: "Add an import",
+ Doc: "Ask the server to add an import path to a given Go file. The method will\ncall applyEdit on the client so that clients don't have to apply the edit\nthemselves.",
+ ArgDoc: "{\n\t// ImportPath is the target import path that should\n\t// be added to the URI file\n\t\"ImportPath\": string,\n\t// URI is the file that the ImportPath should be\n\t// added to\n\t\"URI\": string,\n}",
+ },
+ {
+ Command: "gopls.apply_fix",
+ Title: "Apply a fix",
+ Doc: "Applies a fix to a region of source code.",
+ ArgDoc: "{\n\t// The fix to apply.\n\t\"Fix\": string,\n\t// The file URI for the document to fix.\n\t\"URI\": string,\n\t// The document range to scan for fixes.\n\t\"Range\": {\n\t\t\"start\": {\n\t\t\t\"line\": uint32,\n\t\t\t\"character\": uint32,\n\t\t},\n\t\t\"end\": {\n\t\t\t\"line\": uint32,\n\t\t\t\"character\": uint32,\n\t\t},\n\t},\n}",
+ },
+ {
+ Command: "gopls.check_upgrades",
+ Title: "Check for upgrades",
+ Doc: "Checks for module upgrades.",
+ ArgDoc: "{\n\t// The go.mod file URI.\n\t\"URI\": string,\n\t// The modules to check.\n\t\"Modules\": []string,\n}",
+ },
+ {
+ Command: "gopls.edit_go_directive",
+ Title: "Run go mod edit -go=version",
+ Doc: "Runs `go mod edit -go=version` for a module.",
+ ArgDoc: "{\n\t// Any document URI within the relevant module.\n\t\"URI\": string,\n\t// The version to pass to `go mod edit -go`.\n\t\"Version\": string,\n}",
+ },
+ {
+ Command: "gopls.fetch_vulncheck_result",
+ Title: "Get known vulncheck result",
+ Doc: "Fetch the result of latest vulnerability check (`govulncheck`).",
+ ArgDoc: "{\n\t// The file URI.\n\t\"URI\": string,\n}",
+ ResultDoc: "map[golang.org/x/tools/gopls/internal/lsp/protocol.DocumentURI]*golang.org/x/tools/gopls/internal/govulncheck.Result",
+ },
+ {
+ Command: "gopls.gc_details",
+ Title: "Toggle gc_details",
+ Doc: "Toggle the calculation of gc annotations.",
+ ArgDoc: "string",
+ },
+ {
+ Command: "gopls.generate",
+ Title: "Run go generate",
+ Doc: "Runs `go generate` for a given directory.",
+ ArgDoc: "{\n\t// URI for the directory to generate.\n\t\"Dir\": string,\n\t// Whether to generate recursively (go generate ./...)\n\t\"Recursive\": bool,\n}",
+ },
+ {
+ Command: "gopls.go_get_package",
+ Title: "go get a package",
+ Doc: "Runs `go get` to fetch a package.",
+ ArgDoc: "{\n\t// Any document URI within the relevant module.\n\t\"URI\": string,\n\t// The package to go get.\n\t\"Pkg\": string,\n\t\"AddRequire\": bool,\n}",
+ },
+ {
+ Command: "gopls.list_imports",
+ Title: "List imports of a file and its package",
+ Doc: "Retrieve a list of imports in the given Go file, and the package it\nbelongs to.",
+ ArgDoc: "{\n\t// The file URI.\n\t\"URI\": string,\n}",
+ ResultDoc: "{\n\t// Imports is a list of imports in the requested file.\n\t\"Imports\": []{\n\t\t\"Path\": string,\n\t\t\"Name\": string,\n\t},\n\t// PackageImports is a list of all imports in the requested file's package.\n\t\"PackageImports\": []{\n\t\t\"Path\": string,\n\t},\n}",
+ },
+ {
+ Command: "gopls.list_known_packages",
+ Title: "List known packages",
+ Doc: "Retrieve a list of packages that are importable from the given URI.",
+ ArgDoc: "{\n\t// The file URI.\n\t\"URI\": string,\n}",
+ ResultDoc: "{\n\t// Packages is a list of packages relative\n\t// to the URIArg passed by the command request.\n\t// In other words, it omits paths that are already\n\t// imported or cannot be imported due to compiler\n\t// restrictions.\n\t\"Packages\": []string,\n}",
+ },
+ {
+ Command: "gopls.mem_stats",
+ Title: "fetch memory statistics",
+ Doc: "Call runtime.GC multiple times and return memory statistics as reported by\nruntime.MemStats.\n\nThis command is used for benchmarking, and may change in the future.",
+ ResultDoc: "{\n\t\"HeapAlloc\": uint64,\n\t\"HeapInUse\": uint64,\n}",
+ },
+ {
+ Command: "gopls.regenerate_cgo",
+ Title: "Regenerate cgo",
+ Doc: "Regenerates cgo definitions.",
+ ArgDoc: "{\n\t// The file URI.\n\t\"URI\": string,\n}",
+ },
+ {
+ Command: "gopls.remove_dependency",
+ Title: "Remove a dependency",
+ Doc: "Removes a dependency from the go.mod file of a module.",
+ ArgDoc: "{\n\t// The go.mod file URI.\n\t\"URI\": string,\n\t// The module path to remove.\n\t\"ModulePath\": string,\n\t\"OnlyDiagnostic\": bool,\n}",
+ },
+ {
+ Command: "gopls.reset_go_mod_diagnostics",
+ Title: "Reset go.mod diagnostics",
+ Doc: "Reset diagnostics in the go.mod file of a module.",
+ ArgDoc: "{\n\t\"URIArg\": {\n\t\t\"URI\": string,\n\t},\n\t// Optional: source of the diagnostics to reset.\n\t// If not set, all resettable go.mod diagnostics will be cleared.\n\t\"DiagnosticSource\": string,\n}",
+ },
+ {
+ Command: "gopls.run_govulncheck",
+ Title: "Run govulncheck.",
+ Doc: "Run vulnerability check (`govulncheck`).",
+ ArgDoc: "{\n\t// Any document in the directory from which govulncheck will run.\n\t\"URI\": string,\n\t// Package pattern. E.g. \"\", \".\", \"./...\".\n\t\"Pattern\": string,\n}",
+ ResultDoc: "{\n\t// Token holds the progress token for LSP workDone reporting of the vulncheck\n\t// invocation.\n\t\"Token\": interface{},\n}",
+ },
+ {
+ Command: "gopls.run_tests",
+ Title: "Run test(s)",
+ Doc: "Runs `go test` for a specific set of test or benchmark functions.",
+ ArgDoc: "{\n\t// The test file containing the tests to run.\n\t\"URI\": string,\n\t// Specific test names to run, e.g. TestFoo.\n\t\"Tests\": []string,\n\t// Specific benchmarks to run, e.g. BenchmarkFoo.\n\t\"Benchmarks\": []string,\n}",
+ },
+ {
+ Command: "gopls.start_debugging",
+ Title: "Start the gopls debug server",
+ Doc: "Start the gopls debug server if it isn't running, and return the debug\naddress.",
+ ArgDoc: "{\n\t// Optional: the address (including port) for the debug server to listen on.\n\t// If not provided, the debug server will bind to \"localhost:0\", and the\n\t// full debug URL will be contained in the result.\n\t// \n\t// If there is more than one gopls instance along the serving path (i.e. you\n\t// are using a daemon), each gopls instance will attempt to start debugging.\n\t// If Addr specifies a port, only the daemon will be able to bind to that\n\t// port, and each intermediate gopls instance will fail to start debugging.\n\t// For this reason it is recommended not to specify a port (or equivalently,\n\t// to specify \":0\").\n\t// \n\t// If the server was already debugging this field has no effect, and the\n\t// result will contain the previously configured debug URL(s).\n\t\"Addr\": string,\n}",
+ ResultDoc: "{\n\t// The URLs to use to access the debug servers, for all gopls instances in\n\t// the serving path. For the common case of a single gopls instance (i.e. no\n\t// daemon), this will be exactly one address.\n\t// \n\t// In the case of one or more gopls instances forwarding the LSP to a daemon,\n\t// URLs will contain debug addresses for each server in the serving path, in\n\t// serving order. The daemon debug address will be the last entry in the\n\t// slice. If any intermediate gopls instance fails to start debugging, no\n\t// error will be returned but the debug URL for that server in the URLs slice\n\t// will be empty.\n\t\"URLs\": []string,\n}",
+ },
+ {
+ Command: "gopls.test",
+ Title: "Run test(s) (legacy)",
+ Doc: "Runs `go test` for a specific set of test or benchmark functions.",
+ ArgDoc: "string,\n[]string,\n[]string",
+ },
+ {
+ Command: "gopls.tidy",
+ Title: "Run go mod tidy",
+ Doc: "Runs `go mod tidy` for a module.",
+ ArgDoc: "{\n\t// The file URIs.\n\t\"URIs\": []string,\n}",
+ },
+ {
+ Command: "gopls.toggle_gc_details",
+ Title: "Toggle gc_details",
+ Doc: "Toggle the calculation of gc annotations.",
+ ArgDoc: "{\n\t// The file URI.\n\t\"URI\": string,\n}",
+ },
+ {
+ Command: "gopls.update_go_sum",
+ Title: "Update go.sum",
+ Doc: "Updates the go.sum file for a module.",
+ ArgDoc: "{\n\t// The file URIs.\n\t\"URIs\": []string,\n}",
+ },
+ {
+ Command: "gopls.upgrade_dependency",
+ Title: "Upgrade a dependency",
+ Doc: "Upgrades a dependency in the go.mod file for a module.",
+ ArgDoc: "{\n\t// The go.mod file URI.\n\t\"URI\": string,\n\t// Additional args to pass to the go command.\n\t\"GoCmdArgs\": []string,\n\t// Whether to add a require directive.\n\t\"AddRequire\": bool,\n}",
+ },
+ {
+ Command: "gopls.vendor",
+ Title: "Run go mod vendor",
+ Doc: "Runs `go mod vendor` for a module.",
+ ArgDoc: "{\n\t// The file URI.\n\t\"URI\": string,\n}",
+ },
+ },
+ Lenses: []*LensJSON{
+ {
+ Lens: "gc_details",
+ Title: "Toggle gc_details",
+ Doc: "Toggle the calculation of gc annotations.",
+ },
+ {
+ Lens: "generate",
+ Title: "Run go generate",
+ Doc: "Runs `go generate` for a given directory.",
+ },
+ {
+ Lens: "regenerate_cgo",
+ Title: "Regenerate cgo",
+ Doc: "Regenerates cgo definitions.",
+ },
+ {
+ Lens: "run_govulncheck",
+ Title: "Run govulncheck.",
+ Doc: "Run vulnerability check (`govulncheck`).",
+ },
+ {
+ Lens: "test",
+ Title: "Run test(s) (legacy)",
+ Doc: "Runs `go test` for a specific set of test or benchmark functions.",
+ },
+ {
+ Lens: "tidy",
+ Title: "Run go mod tidy",
+ Doc: "Runs `go mod tidy` for a module.",
+ },
+ {
+ Lens: "upgrade_dependency",
+ Title: "Upgrade a dependency",
+ Doc: "Upgrades a dependency in the go.mod file for a module.",
+ },
+ {
+ Lens: "vendor",
+ Title: "Run go mod vendor",
+ Doc: "Runs `go mod vendor` for a module.",
+ },
+ },
+ Analyzers: []*AnalyzerJSON{
+ {
+ Name: "asmdecl",
+ Doc: "report mismatches between assembly files and Go declarations",
+ Default: true,
+ },
+ {
+ Name: "assign",
+ Doc: "check for useless assignments\n\nThis checker reports assignments of the form x = x or a[i] = a[i].\nThese are almost always useless, and even when they aren't they are\nusually a mistake.",
+ Default: true,
+ },
+ {
+ Name: "atomic",
+ Doc: "check for common mistakes using the sync/atomic package\n\nThe atomic checker looks for assignment statements of the form:\n\n\tx = atomic.AddUint64(&x, 1)\n\nwhich are not atomic.",
+ Default: true,
+ },
+ {
+ Name: "atomicalign",
+ Doc: "check for non-64-bits-aligned arguments to sync/atomic functions",
+ Default: true,
+ },
+ {
+ Name: "bools",
+ Doc: "check for common mistakes involving boolean operators",
+ Default: true,
+ },
+ {
+ Name: "buildtag",
+ Doc: "check //go:build and // +build directives",
+ Default: true,
+ },
+ {
+ Name: "cgocall",
+ Doc: "detect some violations of the cgo pointer passing rules\n\nCheck for invalid cgo pointer passing.\nThis looks for code that uses cgo to call C code passing values\nwhose types are almost always invalid according to the cgo pointer\nsharing rules.\nSpecifically, it warns about attempts to pass a Go chan, map, func,\nor slice to C, either directly, or via a pointer, array, or struct.",
+ Default: true,
+ },
+ {
+ Name: "composites",
+ Doc: "check for unkeyed composite literals\n\nThis analyzer reports a diagnostic for composite literals of struct\ntypes imported from another package that do not use the field-keyed\nsyntax. Such literals are fragile because the addition of a new field\n(even if unexported) to the struct will cause compilation to fail.\n\nAs an example,\n\n\terr = &net.DNSConfigError{err}\n\nshould be replaced by:\n\n\terr = &net.DNSConfigError{Err: err}\n",
+ Default: true,
+ },
+ {
+ Name: "copylocks",
+ Doc: "check for locks erroneously passed by value\n\nInadvertently copying a value containing a lock, such as sync.Mutex or\nsync.WaitGroup, may cause both copies to malfunction. Generally such\nvalues should be referred to through a pointer.",
+ Default: true,
+ },
+ {
+ Name: "deepequalerrors",
+ Doc: "check for calls of reflect.DeepEqual on error values\n\nThe deepequalerrors checker looks for calls of the form:\n\n reflect.DeepEqual(err1, err2)\n\nwhere err1 and err2 are errors. Using reflect.DeepEqual to compare\nerrors is discouraged.",
+ Default: true,
+ },
+ {
+ Name: "directive",
+ Doc: "check Go toolchain directives such as //go:debug\n\nThis analyzer checks for problems with known Go toolchain directives\nin all Go source files in a package directory, even those excluded by\n//go:build constraints, and all non-Go source files too.\n\nFor //go:debug (see https://go.dev/doc/godebug), the analyzer checks\nthat the directives are placed only in Go source files, only above the\npackage comment, and only in package main or *_test.go files.\n\nSupport for other known directives may be added in the future.\n\nThis analyzer does not check //go:build, which is handled by the\nbuildtag analyzer.\n",
+ Default: true,
+ },
+ {
+ Name: "embed",
+ Doc: "check for //go:embed directive import\n\nThis analyzer checks that the embed package is imported when source code contains //go:embed comment directives.\nThe embed package must be imported for //go:embed directives to function.import _ \"embed\".",
+ Default: true,
+ },
+ {
+ Name: "errorsas",
+ Doc: "report passing non-pointer or non-error values to errors.As\n\nThe errorsas analysis reports calls to errors.As where the type\nof the second argument is not a pointer to a type implementing error.",
+ Default: true,
+ },
+ {
+ Name: "fieldalignment",
+ Doc: "find structs that would use less memory if their fields were sorted\n\nThis analyzer find structs that can be rearranged to use less memory, and provides\na suggested edit with the most compact order.\n\nNote that there are two different diagnostics reported. One checks struct size,\nand the other reports \"pointer bytes\" used. Pointer bytes is how many bytes of the\nobject that the garbage collector has to potentially scan for pointers, for example:\n\n\tstruct { uint32; string }\n\nhave 16 pointer bytes because the garbage collector has to scan up through the string's\ninner pointer.\n\n\tstruct { string; *uint32 }\n\nhas 24 pointer bytes because it has to scan further through the *uint32.\n\n\tstruct { string; uint32 }\n\nhas 8 because it can stop immediately after the string pointer.\n\nBe aware that the most compact order is not always the most efficient.\nIn rare cases it may cause two variables each updated by its own goroutine\nto occupy the same CPU cache line, inducing a form of memory contention\nknown as \"false sharing\" that slows down both goroutines.\n",
+ },
+ {
+ Name: "httpresponse",
+ Doc: "check for mistakes using HTTP responses\n\nA common mistake when using the net/http package is to defer a function\ncall to close the http.Response Body before checking the error that\ndetermines whether the response is valid:\n\n\tresp, err := http.Head(url)\n\tdefer resp.Body.Close()\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\t// (defer statement belongs here)\n\nThis checker helps uncover latent nil dereference bugs by reporting a\ndiagnostic for such mistakes.",
+ Default: true,
+ },
+ {
+ Name: "ifaceassert",
+ Doc: "detect impossible interface-to-interface type assertions\n\nThis checker flags type assertions v.(T) and corresponding type-switch cases\nin which the static type V of v is an interface that cannot possibly implement\nthe target interface T. This occurs when V and T contain methods with the same\nname but different signatures. Example:\n\n\tvar v interface {\n\t\tRead()\n\t}\n\t_ = v.(io.Reader)\n\nThe Read method in v has a different signature than the Read method in\nio.Reader, so this assertion cannot succeed.\n",
+ Default: true,
+ },
+ {
+ Name: "infertypeargs",
+ Doc: "check for unnecessary type arguments in call expressions\n\nExplicit type arguments may be omitted from call expressions if they can be\ninferred from function arguments, or from other type arguments:\n\n\tfunc f[T any](T) {}\n\t\n\tfunc _() {\n\t\tf[string](\"foo\") // string could be inferred\n\t}\n",
+ Default: true,
+ },
+ {
+ Name: "loopclosure",
+ Doc: "check references to loop variables from within nested functions\n\nThis analyzer reports places where a function literal references the\niteration variable of an enclosing loop, and the loop calls the function\nin such a way (e.g. with go or defer) that it may outlive the loop\niteration and possibly observe the wrong value of the variable.\n\nIn this example, all the deferred functions run after the loop has\ncompleted, so all observe the final value of v.\n\n for _, v := range list {\n defer func() {\n use(v) // incorrect\n }()\n }\n\nOne fix is to create a new variable for each iteration of the loop:\n\n for _, v := range list {\n v := v // new var per iteration\n defer func() {\n use(v) // ok\n }()\n }\n\nThe next example uses a go statement and has a similar problem.\nIn addition, it has a data race because the loop updates v\nconcurrent with the goroutines accessing it.\n\n for _, v := range elem {\n go func() {\n use(v) // incorrect, and a data race\n }()\n }\n\nA fix is the same as before. The checker also reports problems\nin goroutines started by golang.org/x/sync/errgroup.Group.\nA hard-to-spot variant of this form is common in parallel tests:\n\n func Test(t *testing.T) {\n for _, test := range tests {\n t.Run(test.name, func(t *testing.T) {\n t.Parallel()\n use(test) // incorrect, and a data race\n })\n }\n }\n\nThe t.Parallel() call causes the rest of the function to execute\nconcurrent with the loop.\n\nThe analyzer reports references only in the last statement,\nas it is not deep enough to understand the effects of subsequent\nstatements that might render the reference benign.\n(\"Last statement\" is defined recursively in compound\nstatements such as if, switch, and select.)\n\nSee: https://golang.org/doc/go_faq.html#closures_and_goroutines",
+ Default: true,
+ },
+ {
+ Name: "lostcancel",
+ Doc: "check cancel func returned by context.WithCancel is called\n\nThe cancellation function returned by context.WithCancel, WithTimeout,\nand WithDeadline must be called or the new context will remain live\nuntil its parent context is cancelled.\n(The background context is never cancelled.)",
+ Default: true,
+ },
+ {
+ Name: "nilfunc",
+ Doc: "check for useless comparisons between functions and nil\n\nA useless comparison is one like f == nil as opposed to f() == nil.",
+ Default: true,
+ },
+ {
+ Name: "nilness",
+ Doc: "check for redundant or impossible nil comparisons\n\nThe nilness checker inspects the control-flow graph of each function in\na package and reports nil pointer dereferences, degenerate nil\npointers, and panics with nil values. A degenerate comparison is of the form\nx==nil or x!=nil where x is statically known to be nil or non-nil. These are\noften a mistake, especially in control flow related to errors. Panics with nil\nvalues are checked because they are not detectable by\n\n\tif r := recover(); r != nil {\n\nThis check reports conditions such as:\n\n\tif f == nil { // impossible condition (f is a function)\n\t}\n\nand:\n\n\tp := &v\n\t...\n\tif p != nil { // tautological condition\n\t}\n\nand:\n\n\tif p == nil {\n\t\tprint(*p) // nil dereference\n\t}\n\nand:\n\n\tif p == nil {\n\t\tpanic(p)\n\t}\n",
+ },
+ {
+ Name: "printf",
+ Doc: "check consistency of Printf format strings and arguments\n\nThe check applies to known functions (for example, those in package fmt)\nas well as any detected wrappers of known functions.\n\nA function that wants to avail itself of printf checking but is not\nfound by this analyzer's heuristics (for example, due to use of\ndynamic calls) can insert a bogus call:\n\n\tif false {\n\t\t_ = fmt.Sprintf(format, args...) // enable printf checking\n\t}\n\nThe -funcs flag specifies a comma-separated list of names of additional\nknown formatting functions or methods. If the name contains a period,\nit must denote a specific function using one of the following forms:\n\n\tdir/pkg.Function\n\tdir/pkg.Type.Method\n\t(*dir/pkg.Type).Method\n\nOtherwise the name is interpreted as a case-insensitive unqualified\nidentifier such as \"errorf\". Either way, if a listed name ends in f, the\nfunction is assumed to be Printf-like, taking a format string before the\nargument list. Otherwise it is assumed to be Print-like, taking a list\nof arguments with no format string.\n",
+ Default: true,
+ },
+ {
+ Name: "shadow",
+ Doc: "check for possible unintended shadowing of variables\n\nThis analyzer check for shadowed variables.\nA shadowed variable is a variable declared in an inner scope\nwith the same name and type as a variable in an outer scope,\nand where the outer variable is mentioned after the inner one\nis declared.\n\n(This definition can be refined; the module generates too many\nfalse positives and is not yet enabled by default.)\n\nFor example:\n\n\tfunc BadRead(f *os.File, buf []byte) error {\n\t\tvar err error\n\t\tfor {\n\t\t\tn, err := f.Read(buf) // shadows the function variable 'err'\n\t\t\tif err != nil {\n\t\t\t\tbreak // causes return of wrong value\n\t\t\t}\n\t\t\tfoo(buf)\n\t\t}\n\t\treturn err\n\t}\n",
+ },
+ {
+ Name: "shift",
+ Doc: "check for shifts that equal or exceed the width of the integer",
+ Default: true,
+ },
+ {
+ Name: "simplifycompositelit",
+ Doc: "check for composite literal simplifications\n\nAn array, slice, or map composite literal of the form:\n\t[]T{T{}, T{}}\nwill be simplified to:\n\t[]T{{}, {}}\n\nThis is one of the simplifications that \"gofmt -s\" applies.",
+ Default: true,
+ },
+ {
+ Name: "simplifyrange",
+ Doc: "check for range statement simplifications\n\nA range of the form:\n\tfor x, _ = range v {...}\nwill be simplified to:\n\tfor x = range v {...}\n\nA range of the form:\n\tfor _ = range v {...}\nwill be simplified to:\n\tfor range v {...}\n\nThis is one of the simplifications that \"gofmt -s\" applies.",
+ Default: true,
+ },
+ {
+ Name: "simplifyslice",
+ Doc: "check for slice simplifications\n\nA slice expression of the form:\n\ts[a:len(s)]\nwill be simplified to:\n\ts[a:]\n\nThis is one of the simplifications that \"gofmt -s\" applies.",
+ Default: true,
+ },
+ {
+ Name: "sortslice",
+ Doc: "check the argument type of sort.Slice\n\nsort.Slice requires an argument of a slice type. Check that\nthe interface{} value passed to sort.Slice is actually a slice.",
+ Default: true,
+ },
+ {
+ Name: "stdmethods",
+ Doc: "check signature of methods of well-known interfaces\n\nSometimes a type may be intended to satisfy an interface but may fail to\ndo so because of a mistake in its method signature.\nFor example, the result of this WriteTo method should be (int64, error),\nnot error, to satisfy io.WriterTo:\n\n\ttype myWriterTo struct{...}\n func (myWriterTo) WriteTo(w io.Writer) error { ... }\n\nThis check ensures that each method whose name matches one of several\nwell-known interface methods from the standard library has the correct\nsignature for that interface.\n\nChecked method names include:\n\tFormat GobEncode GobDecode MarshalJSON MarshalXML\n\tPeek ReadByte ReadFrom ReadRune Scan Seek\n\tUnmarshalJSON UnreadByte UnreadRune WriteByte\n\tWriteTo\n",
+ Default: true,
+ },
+ {
+ Name: "stringintconv",
+ Doc: "check for string(int) conversions\n\nThis checker flags conversions of the form string(x) where x is an integer\n(but not byte or rune) type. Such conversions are discouraged because they\nreturn the UTF-8 representation of the Unicode code point x, and not a decimal\nstring representation of x as one might expect. Furthermore, if x denotes an\ninvalid code point, the conversion cannot be statically rejected.\n\nFor conversions that intend on using the code point, consider replacing them\nwith string(rune(x)). Otherwise, strconv.Itoa and its equivalents return the\nstring representation of the value in the desired base.\n",
+ Default: true,
+ },
+ {
+ Name: "structtag",
+ Doc: "check that struct field tags conform to reflect.StructTag.Get\n\nAlso report certain struct tags (json, xml) used with unexported fields.",
+ Default: true,
+ },
+ {
+ Name: "testinggoroutine",
+ Doc: "report calls to (*testing.T).Fatal from goroutines started by a test.\n\nFunctions that abruptly terminate a test, such as the Fatal, Fatalf, FailNow, and\nSkip{,f,Now} methods of *testing.T, must be called from the test goroutine itself.\nThis checker detects calls to these functions that occur within a goroutine\nstarted by the test. For example:\n\nfunc TestFoo(t *testing.T) {\n go func() {\n t.Fatal(\"oops\") // error: (*T).Fatal called from non-test goroutine\n }()\n}\n",
+ Default: true,
+ },
+ {
+ Name: "tests",
+ Doc: "check for common mistaken usages of tests and examples\n\nThe tests checker walks Test, Benchmark and Example functions checking\nmalformed names, wrong signatures and examples documenting non-existent\nidentifiers.\n\nPlease see the documentation for package testing in golang.org/pkg/testing\nfor the conventions that are enforced for Tests, Benchmarks, and Examples.",
+ Default: true,
+ },
+ {
+ Name: "timeformat",
+ Doc: "check for calls of (time.Time).Format or time.Parse with 2006-02-01\n\nThe timeformat checker looks for time formats with the 2006-02-01 (yyyy-dd-mm)\nformat. Internationally, \"yyyy-dd-mm\" does not occur in common calendar date\nstandards, and so it is more likely that 2006-01-02 (yyyy-mm-dd) was intended.\n",
+ Default: true,
+ },
+ {
+ Name: "unmarshal",
+ Doc: "report passing non-pointer or non-interface values to unmarshal\n\nThe unmarshal analysis reports calls to functions such as json.Unmarshal\nin which the argument type is not a pointer or an interface.",
+ Default: true,
+ },
+ {
+ Name: "unreachable",
+ Doc: "check for unreachable code\n\nThe unreachable analyzer finds statements that execution can never reach\nbecause they are preceded by an return statement, a call to panic, an\ninfinite loop, or similar constructs.",
+ Default: true,
+ },
+ {
+ Name: "unsafeptr",
+ Doc: "check for invalid conversions of uintptr to unsafe.Pointer\n\nThe unsafeptr analyzer reports likely incorrect uses of unsafe.Pointer\nto convert integers to pointers. A conversion from uintptr to\nunsafe.Pointer is invalid if it implies that there is a uintptr-typed\nword in memory that holds a pointer value, because that word will be\ninvisible to stack copying and to the garbage collector.",
+ Default: true,
+ },
+ {
+ Name: "unusedparams",
+ Doc: "check for unused parameters of functions\n\nThe unusedparams analyzer checks functions to see if there are\nany parameters that are not being used.\n\nTo reduce false positives it ignores:\n- methods\n- parameters that do not have a name or are underscored\n- functions in test files\n- functions with empty bodies or those with just a return stmt",
+ },
+ {
+ Name: "unusedresult",
+ Doc: "check for unused results of calls to some functions\n\nSome functions like fmt.Errorf return a result and have no side effects,\nso it is always a mistake to discard the result. This analyzer reports\ncalls to certain functions in which the result of the call is ignored.\n\nThe set of functions may be controlled using flags.",
+ Default: true,
+ },
+ {
+ Name: "unusedwrite",
+ Doc: "checks for unused writes\n\nThe analyzer reports instances of writes to struct fields and\narrays that are never read. Specifically, when a struct object\nor an array is copied, its elements are copied implicitly by\nthe compiler, and any element write to this copy does nothing\nwith the original object.\n\nFor example:\n\n\ttype T struct { x int }\n\tfunc f(input []T) {\n\t\tfor i, v := range input { // v is a copy\n\t\t\tv.x = i // unused write to field x\n\t\t}\n\t}\n\nAnother example is about non-pointer receiver:\n\n\ttype T struct { x int }\n\tfunc (t T) f() { // t is a copy\n\t\tt.x = i // unused write to field x\n\t}\n",
+ },
+ {
+ Name: "useany",
+ Doc: "check for constraints that could be simplified to \"any\"",
+ },
+ {
+ Name: "fillreturns",
+ Doc: "suggest fixes for errors due to an incorrect number of return values\n\nThis checker provides suggested fixes for type errors of the\ntype \"wrong number of return values (want %d, got %d)\". For example:\n\tfunc m() (int, string, *bool, error) {\n\t\treturn\n\t}\nwill turn into\n\tfunc m() (int, string, *bool, error) {\n\t\treturn 0, \"\", nil, nil\n\t}\n\nThis functionality is similar to https://github.com/sqs/goreturns.\n",
+ Default: true,
+ },
+ {
+ Name: "nonewvars",
+ Doc: "suggested fixes for \"no new vars on left side of :=\"\n\nThis checker provides suggested fixes for type errors of the\ntype \"no new vars on left side of :=\". For example:\n\tz := 1\n\tz := 2\nwill turn into\n\tz := 1\n\tz = 2\n",
+ Default: true,
+ },
+ {
+ Name: "noresultvalues",
+ Doc: "suggested fixes for unexpected return values\n\nThis checker provides suggested fixes for type errors of the\ntype \"no result values expected\" or \"too many return values\".\nFor example:\n\tfunc z() { return nil }\nwill turn into\n\tfunc z() { return }\n",
+ Default: true,
+ },
+ {
+ Name: "undeclaredname",
+ Doc: "suggested fixes for \"undeclared name: <>\"\n\nThis checker provides suggested fixes for type errors of the\ntype \"undeclared name: <>\". It will either insert a new statement,\nsuch as:\n\n\"<> := \"\n\nor a new function declaration, such as:\n\nfunc <>(inferred parameters) {\n\tpanic(\"implement me!\")\n}\n",
+ Default: true,
+ },
+ {
+ Name: "unusedvariable",
+ Doc: "check for unused variables\n\nThe unusedvariable analyzer suggests fixes for unused variables errors.\n",
+ },
+ {
+ Name: "fillstruct",
+ Doc: "note incomplete struct initializations\n\nThis analyzer provides diagnostics for any struct literals that do not have\nany fields initialized. Because the suggested fix for this analysis is\nexpensive to compute, callers should compute it separately, using the\nSuggestedFix function below.\n",
+ Default: true,
+ },
+ {
+ Name: "stubmethods",
+ Doc: "stub methods analyzer\n\nThis analyzer generates method stubs for concrete types\nin order to implement a target interface",
+ Default: true,
+ },
+ },
+ Hints: []*HintJSON{
+ {
+ Name: "assignVariableTypes",
+ Doc: "Enable/disable inlay hints for variable types in assign statements:\n```go\n\ti/* int*/, j/* int*/ := 0, len(r)-1\n```",
+ },
+ {
+ Name: "compositeLiteralFields",
+ Doc: "Enable/disable inlay hints for composite literal field names:\n```go\n\t{/*in: */\"Hello, world\", /*want: */\"dlrow ,olleH\"}\n```",
+ },
+ {
+ Name: "compositeLiteralTypes",
+ Doc: "Enable/disable inlay hints for composite literal types:\n```go\n\tfor _, c := range []struct {\n\t\tin, want string\n\t}{\n\t\t/*struct{ in string; want string }*/{\"Hello, world\", \"dlrow ,olleH\"},\n\t}\n```",
+ },
+ {
+ Name: "constantValues",
+ Doc: "Enable/disable inlay hints for constant values:\n```go\n\tconst (\n\t\tKindNone Kind = iota/* = 0*/\n\t\tKindPrint/* = 1*/\n\t\tKindPrintf/* = 2*/\n\t\tKindErrorf/* = 3*/\n\t)\n```",
+ },
+ {
+ Name: "functionTypeParameters",
+ Doc: "Enable/disable inlay hints for implicit type parameters on generic functions:\n```go\n\tmyFoo/*[int, string]*/(1, \"hello\")\n```",
+ },
+ {
+ Name: "parameterNames",
+ Doc: "Enable/disable inlay hints for parameter names:\n```go\n\tparseInt(/* str: */ \"123\", /* radix: */ 8)\n```",
+ },
+ {
+ Name: "rangeVariableTypes",
+ Doc: "Enable/disable inlay hints for variable types in range statements:\n```go\n\tfor k/* int*/, v/* string*/ := range []string{} {\n\t\tfmt.Println(k, v)\n\t}\n```",
+ },
+ },
+}
diff --git a/gopls/internal/lsp/source/call_hierarchy.go b/gopls/internal/lsp/source/call_hierarchy.go
new file mode 100644
index 000000000..2bdf7df40
--- /dev/null
+++ b/gopls/internal/lsp/source/call_hierarchy.go
@@ -0,0 +1,311 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package source
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "go/ast"
+ "go/token"
+ "go/types"
+ "path/filepath"
+
+ "golang.org/x/tools/go/ast/astutil"
+ "golang.org/x/tools/gopls/internal/lsp/protocol"
+ "golang.org/x/tools/gopls/internal/lsp/safetoken"
+ "golang.org/x/tools/gopls/internal/span"
+ "golang.org/x/tools/internal/bug"
+ "golang.org/x/tools/internal/event"
+ "golang.org/x/tools/internal/event/tag"
+)
+
+// PrepareCallHierarchy returns an array of CallHierarchyItem for a file and the position within the file.
+func PrepareCallHierarchy(ctx context.Context, snapshot Snapshot, fh FileHandle, pp protocol.Position) ([]protocol.CallHierarchyItem, error) {
+ ctx, done := event.Start(ctx, "source.PrepareCallHierarchy")
+ defer done()
+
+ pkg, pgf, err := PackageForFile(ctx, snapshot, fh.URI(), NarrowestPackage)
+ if err != nil {
+ return nil, err
+ }
+ pos, err := pgf.PositionPos(pp)
+ if err != nil {
+ return nil, err
+ }
+
+ _, obj, _ := referencedObject(pkg, pgf, pos)
+ if obj == nil {
+ return nil, nil
+ }
+
+ if _, ok := obj.Type().Underlying().(*types.Signature); !ok {
+ return nil, nil
+ }
+
+ declLoc, err := mapPosition(ctx, pkg.FileSet(), snapshot, obj.Pos(), adjustedObjEnd(obj))
+ if err != nil {
+ return nil, err
+ }
+ rng := declLoc.Range
+
+ callHierarchyItem := protocol.CallHierarchyItem{
+ Name: obj.Name(),
+ Kind: protocol.Function,
+ Tags: []protocol.SymbolTag{},
+ Detail: fmt.Sprintf("%s • %s", obj.Pkg().Path(), filepath.Base(declLoc.URI.SpanURI().Filename())),
+ URI: declLoc.URI,
+ Range: rng,
+ SelectionRange: rng,
+ }
+ return []protocol.CallHierarchyItem{callHierarchyItem}, nil
+}
+
+// IncomingCalls returns an array of CallHierarchyIncomingCall for a file and the position within the file.
+func IncomingCalls(ctx context.Context, snapshot Snapshot, fh FileHandle, pos protocol.Position) ([]protocol.CallHierarchyIncomingCall, error) {
+ ctx, done := event.Start(ctx, "source.IncomingCalls")
+ defer done()
+
+ refs, err := references(ctx, snapshot, fh, pos, false)
+ if err != nil {
+ if errors.Is(err, ErrNoIdentFound) || errors.Is(err, errNoObjectFound) {
+ return nil, nil
+ }
+ return nil, err
+ }
+
+ // Group references by their enclosing function declaration.
+ incomingCalls := make(map[protocol.Location]*protocol.CallHierarchyIncomingCall)
+ for _, ref := range refs {
+ callItem, err := enclosingNodeCallItem(ctx, snapshot, ref.pkgPath, ref.location)
+ if err != nil {
+ event.Error(ctx, "error getting enclosing node", err, tag.Method.Of(string(ref.pkgPath)))
+ continue
+ }
+ loc := protocol.Location{
+ URI: callItem.URI,
+ Range: callItem.Range,
+ }
+ call, ok := incomingCalls[loc]
+ if !ok {
+ call = &protocol.CallHierarchyIncomingCall{From: callItem}
+ incomingCalls[loc] = call
+ }
+ call.FromRanges = append(call.FromRanges, ref.location.Range)
+ }
+
+ // Flatten the map of pointers into a slice of values.
+ incomingCallItems := make([]protocol.CallHierarchyIncomingCall, 0, len(incomingCalls))
+ for _, callItem := range incomingCalls {
+ incomingCallItems = append(incomingCallItems, *callItem)
+ }
+ return incomingCallItems, nil
+}
+
+// enclosingNodeCallItem creates a CallHierarchyItem representing the function call at loc.
+func enclosingNodeCallItem(ctx context.Context, snapshot Snapshot, pkgPath PackagePath, loc protocol.Location) (protocol.CallHierarchyItem, error) {
+ // Parse the file containing the reference.
+ fh, err := snapshot.GetFile(ctx, loc.URI.SpanURI())
+ if err != nil {
+ return protocol.CallHierarchyItem{}, err
+ }
+ // TODO(adonovan): opt: before parsing, trim the bodies of functions
+ // that don't contain the reference, using either a scanner-based
+ // implementation such as https://go.dev/play/p/KUrObH1YkX8
+ // (~31% speedup), or a byte-oriented implementation (2x speedup).
+ pgf, err := snapshot.ParseGo(ctx, fh, ParseFull)
+ if err != nil {
+ return protocol.CallHierarchyItem{}, err
+ }
+ start, end, err := pgf.RangePos(loc.Range)
+ if err != nil {
+ return protocol.CallHierarchyItem{}, err
+ }
+
+ // Find the enclosing function, if any, and the number of func literals in between.
+ var funcDecl *ast.FuncDecl
+ var funcLit *ast.FuncLit // innermost function literal
+ var litCount int
+ path, _ := astutil.PathEnclosingInterval(pgf.File, start, end)
+outer:
+ for _, node := range path {
+ switch n := node.(type) {
+ case *ast.FuncDecl:
+ funcDecl = n
+ break outer
+ case *ast.FuncLit:
+ litCount++
+ if litCount > 1 {
+ continue
+ }
+ funcLit = n
+ }
+ }
+
+ nameIdent := path[len(path)-1].(*ast.File).Name
+ kind := protocol.Package
+ if funcDecl != nil {
+ nameIdent = funcDecl.Name
+ kind = protocol.Function
+ }
+
+ nameStart, nameEnd := nameIdent.Pos(), nameIdent.End()
+ if funcLit != nil {
+ nameStart, nameEnd = funcLit.Type.Func, funcLit.Type.Params.Pos()
+ kind = protocol.Function
+ }
+ rng, err := pgf.PosRange(nameStart, nameEnd)
+ if err != nil {
+ return protocol.CallHierarchyItem{}, err
+ }
+
+ name := nameIdent.Name
+ for i := 0; i < litCount; i++ {
+ name += ".func()"
+ }
+
+ return protocol.CallHierarchyItem{
+ Name: name,
+ Kind: kind,
+ Tags: []protocol.SymbolTag{},
+ Detail: fmt.Sprintf("%s • %s", pkgPath, filepath.Base(fh.URI().Filename())),
+ URI: loc.URI,
+ Range: rng,
+ SelectionRange: rng,
+ }, nil
+}
+
+// OutgoingCalls returns an array of CallHierarchyOutgoingCall for a file and the position within the file.
+func OutgoingCalls(ctx context.Context, snapshot Snapshot, fh FileHandle, pp protocol.Position) ([]protocol.CallHierarchyOutgoingCall, error) {
+ ctx, done := event.Start(ctx, "source.OutgoingCalls")
+ defer done()
+
+ pkg, pgf, err := PackageForFile(ctx, snapshot, fh.URI(), NarrowestPackage)
+ if err != nil {
+ return nil, err
+ }
+ pos, err := pgf.PositionPos(pp)
+ if err != nil {
+ return nil, err
+ }
+
+ _, obj, _ := referencedObject(pkg, pgf, pos)
+ if obj == nil {
+ return nil, nil
+ }
+
+ if _, ok := obj.Type().Underlying().(*types.Signature); !ok {
+ return nil, nil
+ }
+
+ // Skip builtins.
+ if obj.Pkg() == nil {
+ return nil, nil
+ }
+
+ if !obj.Pos().IsValid() {
+ return nil, bug.Errorf("internal error: object %s.%s missing position", obj.Pkg().Path(), obj.Name())
+ }
+
+ declFile := pkg.FileSet().File(obj.Pos())
+ if declFile == nil {
+ return nil, bug.Errorf("file not found for %d", obj.Pos())
+ }
+
+ uri := span.URIFromPath(declFile.Name())
+ offset, err := safetoken.Offset(declFile, obj.Pos())
+ if err != nil {
+ return nil, err
+ }
+
+ // Use TypecheckFull as we want to inspect the body of the function declaration.
+ declPkg, declPGF, err := PackageForFile(ctx, snapshot, uri, NarrowestPackage)
+ if err != nil {
+ return nil, err
+ }
+
+ declPos, err := safetoken.Pos(declPGF.Tok, offset)
+ if err != nil {
+ return nil, err
+ }
+
+ declNode, _, _ := findDeclInfo([]*ast.File{declPGF.File}, declPos)
+ if declNode == nil {
+ // TODO(rfindley): why don't we return an error here, or even bug.Errorf?
+ return nil, nil
+ // return nil, bug.Errorf("failed to find declaration for object %s.%s", obj.Pkg().Path(), obj.Name())
+ }
+
+ type callRange struct {
+ start, end token.Pos
+ }
+ callRanges := []callRange{}
+ ast.Inspect(declNode, func(n ast.Node) bool {
+ if call, ok := n.(*ast.CallExpr); ok {
+ var start, end token.Pos
+ switch n := call.Fun.(type) {
+ case *ast.SelectorExpr:
+ start, end = n.Sel.NamePos, call.Lparen
+ case *ast.Ident:
+ start, end = n.NamePos, call.Lparen
+ case *ast.FuncLit:
+ // while we don't add the function literal as an 'outgoing' call
+ // we still want to traverse into it
+ return true
+ default:
+ // ignore any other kind of call expressions
+ // for ex: direct function literal calls since that's not an 'outgoing' call
+ return false
+ }
+ callRanges = append(callRanges, callRange{start: start, end: end})
+ }
+ return true
+ })
+
+ outgoingCalls := map[token.Pos]*protocol.CallHierarchyOutgoingCall{}
+ for _, callRange := range callRanges {
+ _, obj, _ := referencedObject(declPkg, declPGF, callRange.start)
+ if obj == nil {
+ continue
+ }
+
+ // ignore calls to builtin functions
+ if obj.Pkg() == nil {
+ continue
+ }
+
+ outgoingCall, ok := outgoingCalls[obj.Pos()]
+ if !ok {
+ loc, err := mapPosition(ctx, declPkg.FileSet(), snapshot, obj.Pos(), obj.Pos()+token.Pos(len(obj.Name())))
+ if err != nil {
+ return nil, err
+ }
+ outgoingCall = &protocol.CallHierarchyOutgoingCall{
+ To: protocol.CallHierarchyItem{
+ Name: obj.Name(),
+ Kind: protocol.Function,
+ Tags: []protocol.SymbolTag{},
+ Detail: fmt.Sprintf("%s • %s", obj.Pkg().Path(), filepath.Base(loc.URI.SpanURI().Filename())),
+ URI: loc.URI,
+ Range: loc.Range,
+ SelectionRange: loc.Range,
+ },
+ }
+ outgoingCalls[obj.Pos()] = outgoingCall
+ }
+
+ rng, err := declPGF.PosRange(callRange.start, callRange.end)
+ if err != nil {
+ return nil, err
+ }
+ outgoingCall.FromRanges = append(outgoingCall.FromRanges, rng)
+ }
+
+ outgoingCallItems := make([]protocol.CallHierarchyOutgoingCall, 0, len(outgoingCalls))
+ for _, callItem := range outgoingCalls {
+ outgoingCallItems = append(outgoingCallItems, *callItem)
+ }
+ return outgoingCallItems, nil
+}
diff --git a/gopls/internal/lsp/source/code_lens.go b/gopls/internal/lsp/source/code_lens.go
new file mode 100644
index 000000000..ef1c3aa54
--- /dev/null
+++ b/gopls/internal/lsp/source/code_lens.go
@@ -0,0 +1,248 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package source
+
+import (
+ "context"
+ "go/ast"
+ "go/token"
+ "go/types"
+ "path/filepath"
+ "regexp"
+ "strings"
+
+ "golang.org/x/tools/gopls/internal/lsp/command"
+ "golang.org/x/tools/gopls/internal/lsp/protocol"
+ "golang.org/x/tools/gopls/internal/span"
+)
+
+type LensFunc func(context.Context, Snapshot, FileHandle) ([]protocol.CodeLens, error)
+
+// LensFuncs returns the supported lensFuncs for Go files.
+func LensFuncs() map[command.Command]LensFunc {
+ return map[command.Command]LensFunc{
+ command.Generate: goGenerateCodeLens,
+ command.Test: runTestCodeLens,
+ command.RegenerateCgo: regenerateCgoLens,
+ command.GCDetails: toggleDetailsCodeLens,
+ }
+}
+
+var (
+ testRe = regexp.MustCompile("^Test[^a-z]")
+ benchmarkRe = regexp.MustCompile("^Benchmark[^a-z]")
+)
+
+func runTestCodeLens(ctx context.Context, snapshot Snapshot, fh FileHandle) ([]protocol.CodeLens, error) {
+ codeLens := make([]protocol.CodeLens, 0)
+
+ fns, err := TestsAndBenchmarks(ctx, snapshot, fh)
+ if err != nil {
+ return nil, err
+ }
+ puri := protocol.URIFromSpanURI(fh.URI())
+ for _, fn := range fns.Tests {
+ cmd, err := command.NewTestCommand("run test", puri, []string{fn.Name}, nil)
+ if err != nil {
+ return nil, err
+ }
+ rng := protocol.Range{Start: fn.Rng.Start, End: fn.Rng.Start}
+ codeLens = append(codeLens, protocol.CodeLens{Range: rng, Command: &cmd})
+ }
+
+ for _, fn := range fns.Benchmarks {
+ cmd, err := command.NewTestCommand("run benchmark", puri, nil, []string{fn.Name})
+ if err != nil {
+ return nil, err
+ }
+ rng := protocol.Range{Start: fn.Rng.Start, End: fn.Rng.Start}
+ codeLens = append(codeLens, protocol.CodeLens{Range: rng, Command: &cmd})
+ }
+
+ if len(fns.Benchmarks) > 0 {
+ pgf, err := snapshot.ParseGo(ctx, fh, ParseFull)
+ if err != nil {
+ return nil, err
+ }
+ // add a code lens to the top of the file which runs all benchmarks in the file
+ rng, err := pgf.PosRange(pgf.File.Package, pgf.File.Package)
+ if err != nil {
+ return nil, err
+ }
+ var benches []string
+ for _, fn := range fns.Benchmarks {
+ benches = append(benches, fn.Name)
+ }
+ cmd, err := command.NewTestCommand("run file benchmarks", puri, nil, benches)
+ if err != nil {
+ return nil, err
+ }
+ codeLens = append(codeLens, protocol.CodeLens{Range: rng, Command: &cmd})
+ }
+ return codeLens, nil
+}
+
+type testFn struct {
+ Name string
+ Rng protocol.Range
+}
+
+type testFns struct {
+ Tests []testFn
+ Benchmarks []testFn
+}
+
+func TestsAndBenchmarks(ctx context.Context, snapshot Snapshot, fh FileHandle) (testFns, error) {
+ var out testFns
+
+ if !strings.HasSuffix(fh.URI().Filename(), "_test.go") {
+ return out, nil
+ }
+ pkg, pgf, err := PackageForFile(ctx, snapshot, fh.URI(), NarrowestPackage)
+ if err != nil {
+ return out, err
+ }
+
+ for _, d := range pgf.File.Decls {
+ fn, ok := d.(*ast.FuncDecl)
+ if !ok {
+ continue
+ }
+
+ rng, err := pgf.NodeRange(fn)
+ if err != nil {
+ return out, err
+ }
+
+ if matchTestFunc(fn, pkg, testRe, "T") {
+ out.Tests = append(out.Tests, testFn{fn.Name.Name, rng})
+ }
+
+ if matchTestFunc(fn, pkg, benchmarkRe, "B") {
+ out.Benchmarks = append(out.Benchmarks, testFn{fn.Name.Name, rng})
+ }
+ }
+
+ return out, nil
+}
+
+func matchTestFunc(fn *ast.FuncDecl, pkg Package, nameRe *regexp.Regexp, paramID string) bool {
+ // Make sure that the function name matches a test function.
+ if !nameRe.MatchString(fn.Name.Name) {
+ return false
+ }
+ info := pkg.GetTypesInfo()
+ if info == nil {
+ return false
+ }
+ obj := info.ObjectOf(fn.Name)
+ if obj == nil {
+ return false
+ }
+ sig, ok := obj.Type().(*types.Signature)
+ if !ok {
+ return false
+ }
+ // Test functions should have only one parameter.
+ if sig.Params().Len() != 1 {
+ return false
+ }
+
+ // Check the type of the only parameter
+ paramTyp, ok := sig.Params().At(0).Type().(*types.Pointer)
+ if !ok {
+ return false
+ }
+ named, ok := paramTyp.Elem().(*types.Named)
+ if !ok {
+ return false
+ }
+ namedObj := named.Obj()
+ if namedObj.Pkg().Path() != "testing" {
+ return false
+ }
+ return namedObj.Id() == paramID
+}
+
+func goGenerateCodeLens(ctx context.Context, snapshot Snapshot, fh FileHandle) ([]protocol.CodeLens, error) {
+ pgf, err := snapshot.ParseGo(ctx, fh, ParseFull)
+ if err != nil {
+ return nil, err
+ }
+ const ggDirective = "//go:generate"
+ for _, c := range pgf.File.Comments {
+ for _, l := range c.List {
+ if !strings.HasPrefix(l.Text, ggDirective) {
+ continue
+ }
+ rng, err := pgf.PosRange(l.Pos(), l.Pos()+token.Pos(len(ggDirective)))
+ if err != nil {
+ return nil, err
+ }
+ dir := protocol.URIFromSpanURI(span.URIFromPath(filepath.Dir(fh.URI().Filename())))
+ nonRecursiveCmd, err := command.NewGenerateCommand("run go generate", command.GenerateArgs{Dir: dir, Recursive: false})
+ if err != nil {
+ return nil, err
+ }
+ recursiveCmd, err := command.NewGenerateCommand("run go generate ./...", command.GenerateArgs{Dir: dir, Recursive: true})
+ if err != nil {
+ return nil, err
+ }
+ return []protocol.CodeLens{
+ {Range: rng, Command: &recursiveCmd},
+ {Range: rng, Command: &nonRecursiveCmd},
+ }, nil
+
+ }
+ }
+ return nil, nil
+}
+
+func regenerateCgoLens(ctx context.Context, snapshot Snapshot, fh FileHandle) ([]protocol.CodeLens, error) {
+ pgf, err := snapshot.ParseGo(ctx, fh, ParseFull)
+ if err != nil {
+ return nil, err
+ }
+ var c *ast.ImportSpec
+ for _, imp := range pgf.File.Imports {
+ if imp.Path.Value == `"C"` {
+ c = imp
+ }
+ }
+ if c == nil {
+ return nil, nil
+ }
+ rng, err := pgf.NodeRange(c)
+ if err != nil {
+ return nil, err
+ }
+ puri := protocol.URIFromSpanURI(fh.URI())
+ cmd, err := command.NewRegenerateCgoCommand("regenerate cgo definitions", command.URIArg{URI: puri})
+ if err != nil {
+ return nil, err
+ }
+ return []protocol.CodeLens{{Range: rng, Command: &cmd}}, nil
+}
+
+func toggleDetailsCodeLens(ctx context.Context, snapshot Snapshot, fh FileHandle) ([]protocol.CodeLens, error) {
+ pgf, err := snapshot.ParseGo(ctx, fh, ParseFull)
+ if err != nil {
+ return nil, err
+ }
+ if !pgf.File.Package.IsValid() {
+ // Without a package name we have nowhere to put the codelens, so give up.
+ return nil, nil
+ }
+ rng, err := pgf.PosRange(pgf.File.Package, pgf.File.Package)
+ if err != nil {
+ return nil, err
+ }
+ puri := protocol.URIFromSpanURI(fh.URI())
+ cmd, err := command.NewGCDetailsCommand("Toggle gc annotation details", puri)
+ if err != nil {
+ return nil, err
+ }
+ return []protocol.CodeLens{{Range: rng, Command: &cmd}}, nil
+}
diff --git a/gopls/internal/lsp/source/comment.go b/gopls/internal/lsp/source/comment.go
new file mode 100644
index 000000000..beed328ae
--- /dev/null
+++ b/gopls/internal/lsp/source/comment.go
@@ -0,0 +1,384 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build !go1.19
+// +build !go1.19
+
+package source
+
+import (
+ "bytes"
+ "io"
+ "regexp"
+ "strings"
+ "unicode"
+ "unicode/utf8"
+)
+
+// CommentToMarkdown converts comment text to formatted markdown.
+// The comment was prepared by DocReader,
+// so it is known not to have leading, trailing blank lines
+// nor to have trailing spaces at the end of lines.
+// The comment markers have already been removed.
+//
+// Each line is converted into a markdown line and empty lines are just converted to
+// newlines. Heading are prefixed with `### ` to make it a markdown heading.
+//
+// A span of indented lines retains a 4 space prefix block, with the common indent
+// prefix removed unless empty, in which case it will be converted to a newline.
+//
+// URLs in the comment text are converted into links.
+func CommentToMarkdown(text string, _ *Options) string {
+ buf := &bytes.Buffer{}
+ commentToMarkdown(buf, text)
+ return buf.String()
+}
+
+var (
+ mdNewline = []byte("\n")
+ mdHeader = []byte("### ")
+ mdIndent = []byte(" ")
+ mdLinkStart = []byte("[")
+ mdLinkDiv = []byte("](")
+ mdLinkEnd = []byte(")")
+)
+
+func commentToMarkdown(w io.Writer, text string) {
+ blocks := blocks(text)
+ for i, b := range blocks {
+ switch b.op {
+ case opPara:
+ for _, line := range b.lines {
+ emphasize(w, line, true)
+ }
+ case opHead:
+ // The header block can consist of only one line.
+ // However, check the number of lines, just in case.
+ if len(b.lines) == 0 {
+ // Skip this block.
+ continue
+ }
+ header := b.lines[0]
+
+ w.Write(mdHeader)
+ commentEscape(w, header, true)
+ // Header doesn't end with \n unlike the lines of other blocks.
+ w.Write(mdNewline)
+ case opPre:
+ for _, line := range b.lines {
+ if isBlank(line) {
+ w.Write(mdNewline)
+ continue
+ }
+ w.Write(mdIndent)
+ w.Write([]byte(line))
+ }
+ }
+
+ if i < len(blocks)-1 {
+ w.Write(mdNewline)
+ }
+ }
+}
+
+const (
+ ulquo = "“"
+ urquo = "”"
+)
+
+var (
+ markdownEscape = regexp.MustCompile(`([\\\x60*{}[\]()#+\-.!_>~|"$%&'\/:;<=?@^])`)
+
+ unicodeQuoteReplacer = strings.NewReplacer("``", ulquo, "''", urquo)
+)
+
+// commentEscape escapes comment text for markdown. If nice is set,
+// also turn double ` and ' into “ and ”.
+func commentEscape(w io.Writer, text string, nice bool) {
+ if nice {
+ text = convertQuotes(text)
+ }
+ text = escapeRegex(text)
+ w.Write([]byte(text))
+}
+
+func convertQuotes(text string) string {
+ return unicodeQuoteReplacer.Replace(text)
+}
+
+func escapeRegex(text string) string {
+ return markdownEscape.ReplaceAllString(text, `\$1`)
+}
+
+func emphasize(w io.Writer, line string, nice bool) {
+ for {
+ m := matchRx.FindStringSubmatchIndex(line)
+ if m == nil {
+ break
+ }
+ // m >= 6 (two parenthesized sub-regexps in matchRx, 1st one is urlRx)
+
+ // write text before match
+ commentEscape(w, line[0:m[0]], nice)
+
+ // adjust match for URLs
+ match := line[m[0]:m[1]]
+ if strings.Contains(match, "://") {
+ m0, m1 := m[0], m[1]
+ for _, s := range []string{"()", "{}", "[]"} {
+ open, close := s[:1], s[1:] // E.g., "(" and ")"
+ // require opening parentheses before closing parentheses (#22285)
+ if i := strings.Index(match, close); i >= 0 && i < strings.Index(match, open) {
+ m1 = m0 + i
+ match = line[m0:m1]
+ }
+ // require balanced pairs of parentheses (#5043)
+ for i := 0; strings.Count(match, open) != strings.Count(match, close) && i < 10; i++ {
+ m1 = strings.LastIndexAny(line[:m1], s)
+ match = line[m0:m1]
+ }
+ }
+ if m1 != m[1] {
+ // redo matching with shortened line for correct indices
+ m = matchRx.FindStringSubmatchIndex(line[:m[0]+len(match)])
+ }
+ }
+
+ // Following code has been modified from go/doc since words is always
+ // nil. All html formatting has also been transformed into markdown formatting
+
+ // analyze match
+ url := ""
+ if m[2] >= 0 {
+ url = match
+ }
+
+ // write match
+ if len(url) > 0 {
+ w.Write(mdLinkStart)
+ }
+
+ commentEscape(w, match, nice)
+
+ if len(url) > 0 {
+ w.Write(mdLinkDiv)
+ w.Write([]byte(urlReplacer.Replace(url)))
+ w.Write(mdLinkEnd)
+ }
+
+ // advance
+ line = line[m[1]:]
+ }
+ commentEscape(w, line, nice)
+}
+
+// Everything from here on is a copy of go/doc/comment.go
+
+const (
+ // Regexp for Go identifiers
+ identRx = `[\pL_][\pL_0-9]*`
+
+ // Regexp for URLs
+ // Match parens, and check later for balance - see #5043, #22285
+ // Match .,:;?! within path, but not at end - see #18139, #16565
+ // This excludes some rare yet valid urls ending in common punctuation
+ // in order to allow sentences ending in URLs.
+
+ // protocol (required) e.g. http
+ protoPart = `(https?|ftp|file|gopher|mailto|nntp)`
+ // host (required) e.g. www.example.com or [::1]:8080
+ hostPart = `([a-zA-Z0-9_@\-.\[\]:]+)`
+ // path+query+fragment (optional) e.g. /path/index.html?q=foo#bar
+ pathPart = `([.,:;?!]*[a-zA-Z0-9$'()*+&#=@~_/\-\[\]%])*`
+
+ urlRx = protoPart + `://` + hostPart + pathPart
+)
+
+var (
+ matchRx = regexp.MustCompile(`(` + urlRx + `)|(` + identRx + `)`)
+ urlReplacer = strings.NewReplacer(`(`, `\(`, `)`, `\)`)
+)
+
+func indentLen(s string) int {
+ i := 0
+ for i < len(s) && (s[i] == ' ' || s[i] == '\t') {
+ i++
+ }
+ return i
+}
+
+func isBlank(s string) bool {
+ return len(s) == 0 || (len(s) == 1 && s[0] == '\n')
+}
+
+func commonPrefix(a, b string) string {
+ i := 0
+ for i < len(a) && i < len(b) && a[i] == b[i] {
+ i++
+ }
+ return a[0:i]
+}
+
+func unindent(block []string) {
+ if len(block) == 0 {
+ return
+ }
+
+ // compute maximum common white prefix
+ prefix := block[0][0:indentLen(block[0])]
+ for _, line := range block {
+ if !isBlank(line) {
+ prefix = commonPrefix(prefix, line)
+ }
+ }
+ n := len(prefix)
+
+ // remove
+ for i, line := range block {
+ if !isBlank(line) {
+ block[i] = line[n:]
+ }
+ }
+}
+
+// heading returns the trimmed line if it passes as a section heading;
+// otherwise it returns the empty string.
+func heading(line string) string {
+ line = strings.TrimSpace(line)
+ if len(line) == 0 {
+ return ""
+ }
+
+ // a heading must start with an uppercase letter
+ r, _ := utf8.DecodeRuneInString(line)
+ if !unicode.IsLetter(r) || !unicode.IsUpper(r) {
+ return ""
+ }
+
+ // it must end in a letter or digit:
+ r, _ = utf8.DecodeLastRuneInString(line)
+ if !unicode.IsLetter(r) && !unicode.IsDigit(r) {
+ return ""
+ }
+
+ // exclude lines with illegal characters. we allow "(),"
+ if strings.ContainsAny(line, ";:!?+*/=[]{}_^°&§~%#@<\">\\") {
+ return ""
+ }
+
+ // allow "'" for possessive "'s" only
+ for b := line; ; {
+ i := strings.IndexRune(b, '\'')
+ if i < 0 {
+ break
+ }
+ if i+1 >= len(b) || b[i+1] != 's' || (i+2 < len(b) && b[i+2] != ' ') {
+ return "" // not followed by "s "
+ }
+ b = b[i+2:]
+ }
+
+ // allow "." when followed by non-space
+ for b := line; ; {
+ i := strings.IndexRune(b, '.')
+ if i < 0 {
+ break
+ }
+ if i+1 >= len(b) || b[i+1] == ' ' {
+ return "" // not followed by non-space
+ }
+ b = b[i+1:]
+ }
+
+ return line
+}
+
+type op int
+
+const (
+ opPara op = iota
+ opHead
+ opPre
+)
+
+type block struct {
+ op op
+ lines []string
+}
+
+func blocks(text string) []block {
+ var (
+ out []block
+ para []string
+
+ lastWasBlank = false
+ lastWasHeading = false
+ )
+
+ close := func() {
+ if para != nil {
+ out = append(out, block{opPara, para})
+ para = nil
+ }
+ }
+
+ lines := strings.SplitAfter(text, "\n")
+ unindent(lines)
+ for i := 0; i < len(lines); {
+ line := lines[i]
+ if isBlank(line) {
+ // close paragraph
+ close()
+ i++
+ lastWasBlank = true
+ continue
+ }
+ if indentLen(line) > 0 {
+ // close paragraph
+ close()
+
+ // count indented or blank lines
+ j := i + 1
+ for j < len(lines) && (isBlank(lines[j]) || indentLen(lines[j]) > 0) {
+ j++
+ }
+ // but not trailing blank lines
+ for j > i && isBlank(lines[j-1]) {
+ j--
+ }
+ pre := lines[i:j]
+ i = j
+
+ unindent(pre)
+
+ // put those lines in a pre block
+ out = append(out, block{opPre, pre})
+ lastWasHeading = false
+ continue
+ }
+
+ if lastWasBlank && !lastWasHeading && i+2 < len(lines) &&
+ isBlank(lines[i+1]) && !isBlank(lines[i+2]) && indentLen(lines[i+2]) == 0 {
+ // current line is non-blank, surrounded by blank lines
+ // and the next non-blank line is not indented: this
+ // might be a heading.
+ if head := heading(line); head != "" {
+ close()
+ out = append(out, block{opHead, []string{head}})
+ i += 2
+ lastWasHeading = true
+ continue
+ }
+ }
+
+ // open paragraph
+ lastWasBlank = false
+ lastWasHeading = false
+ para = append(para, lines[i])
+ i++
+ }
+ close()
+
+ return out
+}
diff --git a/gopls/internal/lsp/source/comment_go118_test.go b/gopls/internal/lsp/source/comment_go118_test.go
new file mode 100644
index 000000000..60bd14b9f
--- /dev/null
+++ b/gopls/internal/lsp/source/comment_go118_test.go
@@ -0,0 +1,371 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build !go1.19
+// +build !go1.19
+
+package source
+
+import (
+ "bytes"
+ "reflect"
+ "strings"
+ "testing"
+)
+
+// This file is a copy of go/doc/comment_test.go with the exception for
+// the test cases for TestEmphasize and TestCommentEscape
+
+var headingTests = []struct {
+ line string
+ ok bool
+}{
+ {"Section", true},
+ {"A typical usage", true},
+ {"ΔΛΞ is Greek", true},
+ {"Foo 42", true},
+ {"", false},
+ {"section", false},
+ {"A typical usage:", false},
+ {"This code:", false},
+ {"δ is Greek", false},
+ {"Foo §", false},
+ {"Fermat's Last Sentence", true},
+ {"Fermat's", true},
+ {"'sX", false},
+ {"Ted 'Too' Bar", false},
+ {"Use n+m", false},
+ {"Scanning:", false},
+ {"N:M", false},
+}
+
+func TestIsHeading(t *testing.T) {
+ for _, tt := range headingTests {
+ if h := heading(tt.line); (len(h) > 0) != tt.ok {
+ t.Errorf("isHeading(%q) = %v, want %v", tt.line, h, tt.ok)
+ }
+ }
+}
+
+var blocksTests = []struct {
+ in string
+ out []block
+ text string
+}{
+ {
+ in: `Para 1.
+Para 1 line 2.
+
+Para 2.
+
+Section
+
+Para 3.
+
+ pre
+ pre1
+
+Para 4.
+
+ pre
+ pre1
+
+ pre2
+
+Para 5.
+
+
+ pre
+
+
+ pre1
+ pre2
+
+Para 6.
+ pre
+ pre2
+`,
+ out: []block{
+ {opPara, []string{"Para 1.\n", "Para 1 line 2.\n"}},
+ {opPara, []string{"Para 2.\n"}},
+ {opHead, []string{"Section"}},
+ {opPara, []string{"Para 3.\n"}},
+ {opPre, []string{"pre\n", "pre1\n"}},
+ {opPara, []string{"Para 4.\n"}},
+ {opPre, []string{"pre\n", "pre1\n", "\n", "pre2\n"}},
+ {opPara, []string{"Para 5.\n"}},
+ {opPre, []string{"pre\n", "\n", "\n", "pre1\n", "pre2\n"}},
+ {opPara, []string{"Para 6.\n"}},
+ {opPre, []string{"pre\n", "pre2\n"}},
+ },
+ text: `. Para 1. Para 1 line 2.
+
+. Para 2.
+
+
+. Section
+
+. Para 3.
+
+$ pre
+$ pre1
+
+. Para 4.
+
+$ pre
+$ pre1
+
+$ pre2
+
+. Para 5.
+
+$ pre
+
+
+$ pre1
+$ pre2
+
+. Para 6.
+
+$ pre
+$ pre2
+`,
+ },
+ {
+ in: "Para.\n\tshould not be ``escaped''",
+ out: []block{
+ {opPara, []string{"Para.\n"}},
+ {opPre, []string{"should not be ``escaped''"}},
+ },
+ text: ". Para.\n\n$ should not be ``escaped''",
+ },
+ {
+ in: "// A very long line of 46 char for line wrapping.",
+ out: []block{
+ {opPara, []string{"// A very long line of 46 char for line wrapping."}},
+ },
+ text: `. // A very long line of 46 char for line
+. // wrapping.
+`,
+ },
+ {
+ in: `/* A very long line of 46 char for line wrapping.
+A very long line of 46 char for line wrapping. */`,
+ out: []block{
+ {opPara, []string{"/* A very long line of 46 char for line wrapping.\n", "A very long line of 46 char for line wrapping. */"}},
+ },
+ text: `. /* A very long line of 46 char for line
+. wrapping. A very long line of 46 char
+. for line wrapping. */
+`,
+ },
+}
+
+func TestBlocks(t *testing.T) {
+ for i, tt := range blocksTests {
+ b := blocks(tt.in)
+ if !reflect.DeepEqual(b, tt.out) {
+ t.Errorf("#%d: mismatch\nhave: %v\nwant: %v", i, b, tt.out)
+ }
+ }
+}
+
+// This has been modified from go/doc to use markdown links instead of html ones
+// and use markdown escaping instead oh html
+var emphasizeTests = []struct {
+ in, out string
+}{
+ {"", ""},
+ {"http://[::1]:8080/foo.txt", `[http\:\/\/\[\:\:1\]\:8080\/foo\.txt](http://[::1]:8080/foo.txt)`},
+ {"before (https://www.google.com) after", `before \([https\:\/\/www\.google\.com](https://www.google.com)\) after`},
+ {"before https://www.google.com:30/x/y/z:b::c. After", `before [https\:\/\/www\.google\.com\:30\/x\/y\/z\:b\:\:c](https://www.google.com:30/x/y/z:b::c)\. After`},
+ {"http://www.google.com/path/:;!-/?query=%34b#093124", `[http\:\/\/www\.google\.com\/path\/\:\;\!\-\/\?query\=\%34b\#093124](http://www.google.com/path/:;!-/?query=%34b#093124)`},
+ {"http://www.google.com/path/:;!-/?query=%34bar#093124", `[http\:\/\/www\.google\.com\/path\/\:\;\!\-\/\?query\=\%34bar\#093124](http://www.google.com/path/:;!-/?query=%34bar#093124)`},
+ {"http://www.google.com/index.html! After", `[http\:\/\/www\.google\.com\/index\.html](http://www.google.com/index.html)\! After`},
+ {"http://www.google.com/", `[http\:\/\/www\.google\.com\/](http://www.google.com/)`},
+ {"https://www.google.com/", `[https\:\/\/www\.google\.com\/](https://www.google.com/)`},
+ {"http://www.google.com/path.", `[http\:\/\/www\.google\.com\/path](http://www.google.com/path)\.`},
+ {"http://en.wikipedia.org/wiki/Camellia_(cipher)", `[http\:\/\/en\.wikipedia\.org\/wiki\/Camellia\_\(cipher\)](http://en.wikipedia.org/wiki/Camellia_\(cipher\))`},
+ {"(http://www.google.com/)", `\([http\:\/\/www\.google\.com\/](http://www.google.com/)\)`},
+ {"http://gmail.com)", `[http\:\/\/gmail\.com](http://gmail.com)\)`},
+ {"((http://gmail.com))", `\(\([http\:\/\/gmail\.com](http://gmail.com)\)\)`},
+ {"http://gmail.com ((http://gmail.com)) ()", `[http\:\/\/gmail\.com](http://gmail.com) \(\([http\:\/\/gmail\.com](http://gmail.com)\)\) \(\)`},
+ {"Foo bar http://example.com/ quux!", `Foo bar [http\:\/\/example\.com\/](http://example.com/) quux\!`},
+ {"Hello http://example.com/%2f/ /world.", `Hello [http\:\/\/example\.com\/\%2f\/](http://example.com/%2f/) \/world\.`},
+ {"Lorem http: ipsum //host/path", `Lorem http\: ipsum \/\/host\/path`},
+ {"javascript://is/not/linked", `javascript\:\/\/is\/not\/linked`},
+ {"http://foo", `[http\:\/\/foo](http://foo)`},
+ {"art by [[https://www.example.com/person/][Person Name]]", `art by \[\[[https\:\/\/www\.example\.com\/person\/](https://www.example.com/person/)\]\[Person Name\]\]`},
+ {"please visit (http://golang.org/)", `please visit \([http\:\/\/golang\.org\/](http://golang.org/)\)`},
+ {"please visit http://golang.org/hello())", `please visit [http\:\/\/golang\.org\/hello\(\)](http://golang.org/hello\(\))\)`},
+ {"http://git.qemu.org/?p=qemu.git;a=blob;f=qapi-schema.json;hb=HEAD", `[http\:\/\/git\.qemu\.org\/\?p\=qemu\.git\;a\=blob\;f\=qapi\-schema\.json\;hb\=HEAD](http://git.qemu.org/?p=qemu.git;a=blob;f=qapi-schema.json;hb=HEAD)`},
+ {"https://foo.bar/bal/x(])", `[https\:\/\/foo\.bar\/bal\/x\(](https://foo.bar/bal/x\()\]\)`},
+ {"foo [ http://bar(])", `foo \[ [http\:\/\/bar\(](http://bar\()\]\)`},
+}
+
+func TestEmphasize(t *testing.T) {
+ for i, tt := range emphasizeTests {
+ var buf bytes.Buffer
+ emphasize(&buf, tt.in, true)
+ out := buf.String()
+ if out != tt.out {
+ t.Errorf("#%d: mismatch\nhave: %v\nwant: %v", i, out, tt.out)
+ }
+ }
+}
+
+func TestCommentEscape(t *testing.T) {
+ //ldquo -> ulquo and rdquo -> urquo
+ commentTests := []struct {
+ in, out string
+ }{
+ {"typically invoked as ``go tool asm'',", "typically invoked as " + ulquo + "go tool asm" + urquo + ","},
+ {"For more detail, run ``go help test'' and ``go help testflag''", "For more detail, run " + ulquo + "go help test" + urquo + " and " + ulquo + "go help testflag" + urquo}}
+ for i, tt := range commentTests {
+ var buf strings.Builder
+ commentEscape(&buf, tt.in, true)
+ out := buf.String()
+ if out != tt.out {
+ t.Errorf("#%d: mismatch\nhave: %q\nwant: %q", i, out, tt.out)
+ }
+ }
+}
+
+func TestCommentToMarkdown(t *testing.T) {
+ tests := []struct {
+ in, out string
+ }{
+ {
+ in: "F declaration.\n",
+ out: "F declaration\\.\n",
+ },
+ {
+ in: `
+F declaration. Lorem ipsum dolor sit amet.
+Etiam mattis eros at orci mollis molestie.
+`,
+ out: `
+F declaration\. Lorem ipsum dolor sit amet\.
+Etiam mattis eros at orci mollis molestie\.
+`,
+ },
+ {
+ in: `
+F declaration.
+
+Lorem ipsum dolor sit amet.
+Sed id dui turpis.
+
+
+
+
+Aenean tempus velit non auctor eleifend.
+Aenean efficitur a sem id ultricies.
+
+
+Phasellus efficitur mauris et viverra bibendum.
+`,
+ out: `
+F declaration\.
+
+Lorem ipsum dolor sit amet\.
+Sed id dui turpis\.
+
+Aenean tempus velit non auctor eleifend\.
+Aenean efficitur a sem id ultricies\.
+
+Phasellus efficitur mauris et viverra bibendum\.
+`,
+ },
+ {
+ in: `
+F declaration.
+
+Aenean tempus velit non auctor eleifend.
+
+Section
+
+Lorem ipsum dolor sit amet, consectetur adipiscing elit.
+
+ func foo() {}
+
+
+ func bar() {}
+
+Fusce lorem lacus.
+
+ func foo() {}
+
+ func bar() {}
+
+Maecenas in lobortis lectus.
+
+ func foo() {}
+
+ func bar() {}
+
+Phasellus efficitur mauris et viverra bibendum.
+`,
+ out: `
+F declaration\.
+
+Aenean tempus velit non auctor eleifend\.
+
+### Section
+
+Lorem ipsum dolor sit amet, consectetur adipiscing elit\.
+
+ func foo() {}
+
+
+ func bar() {}
+
+Fusce lorem lacus\.
+
+ func foo() {}
+
+ func bar() {}
+
+Maecenas in lobortis lectus\.
+
+ func foo() {}
+
+ func bar() {}
+
+Phasellus efficitur mauris et viverra bibendum\.
+`,
+ },
+ {
+ in: `
+F declaration.
+
+ func foo() {
+ fmt.Println("foo")
+ }
+ func bar() {
+ fmt.Println("bar")
+ }
+`,
+ out: `
+F declaration\.
+
+ func foo() {
+ fmt.Println("foo")
+ }
+ func bar() {
+ fmt.Println("bar")
+ }
+`,
+ },
+ }
+ for i, tt := range tests {
+ // Comments start with new lines for better readability. So, we should trim them.
+ tt.in = strings.TrimPrefix(tt.in, "\n")
+ tt.out = strings.TrimPrefix(tt.out, "\n")
+
+ if out := CommentToMarkdown(tt.in, nil); out != tt.out {
+ t.Errorf("#%d: mismatch\nhave: %q\nwant: %q", i, out, tt.out)
+ }
+ }
+}
diff --git a/gopls/internal/lsp/source/comment_go119.go b/gopls/internal/lsp/source/comment_go119.go
new file mode 100644
index 000000000..c379a4a4f
--- /dev/null
+++ b/gopls/internal/lsp/source/comment_go119.go
@@ -0,0 +1,56 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.19
+// +build go1.19
+
+package source
+
+// Starting with go1.19, the formatting of comments has changed, and there
+// is a new package (go/doc/comment) for processing them.
+// As long as gopls has to compile under earlier versions, tests
+// have to pass with both the old and new code, which produce
+// slightly different results. (cmd/test/definition.go, source/comment_test.go,
+// and source/source_test.go) Each of the test files checks the results
+// with a function, tests.CheckSameMarkdown, that accepts both the old and the new
+// results. (The old code escapes many characters the new code does not,
+// and the new code sometimes adds a blank line.)
+
+// When gopls no longer needs to compile with go1.18, the old comment.go should
+// be replaced by this file, the golden test files should be updated.
+// (and checkSameMarkdown() could be replaced by a simple comparison.)
+
+import (
+ "fmt"
+ "go/doc/comment"
+)
+
+// CommentToMarkdown converts comment text to formatted markdown.
+// The comment was prepared by DocReader,
+// so it is known not to have leading, trailing blank lines
+// nor to have trailing spaces at the end of lines.
+// The comment markers have already been removed.
+func CommentToMarkdown(text string, options *Options) string {
+ var p comment.Parser
+ doc := p.Parse(text)
+ var pr comment.Printer
+ // The default produces {#Hdr-...} tags for headings.
+ // vscode displays thems, which is undesirable.
+ // The godoc for comment.Printer says the tags
+ // avoid a security problem.
+ pr.HeadingID = func(*comment.Heading) string { return "" }
+ pr.DocLinkURL = func(link *comment.DocLink) string {
+ msg := fmt.Sprintf("https://%s/%s", options.LinkTarget, link.ImportPath)
+ if link.Name != "" {
+ msg += "#"
+ if link.Recv != "" {
+ msg += link.Recv + "."
+ }
+ msg += link.Name
+ }
+ return msg
+ }
+ easy := pr.Markdown(doc)
+ return string(easy)
+}
diff --git a/gopls/internal/lsp/source/completion/builtin.go b/gopls/internal/lsp/source/completion/builtin.go
new file mode 100644
index 000000000..39732d864
--- /dev/null
+++ b/gopls/internal/lsp/source/completion/builtin.go
@@ -0,0 +1,147 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package completion
+
+import (
+ "context"
+ "go/ast"
+ "go/types"
+)
+
+// builtinArgKind determines the expected object kind for a builtin
+// argument. It attempts to use the AST hints from builtin.go where
+// possible.
+func (c *completer) builtinArgKind(ctx context.Context, obj types.Object, call *ast.CallExpr) objKind {
+ builtin, err := c.snapshot.BuiltinFile(ctx)
+ if err != nil {
+ return 0
+ }
+ exprIdx := exprAtPos(c.pos, call.Args)
+
+ builtinObj := builtin.File.Scope.Lookup(obj.Name())
+ if builtinObj == nil {
+ return 0
+ }
+ decl, ok := builtinObj.Decl.(*ast.FuncDecl)
+ if !ok || exprIdx >= len(decl.Type.Params.List) {
+ return 0
+ }
+
+ switch ptyp := decl.Type.Params.List[exprIdx].Type.(type) {
+ case *ast.ChanType:
+ return kindChan
+ case *ast.ArrayType:
+ return kindSlice
+ case *ast.MapType:
+ return kindMap
+ case *ast.Ident:
+ switch ptyp.Name {
+ case "Type":
+ switch obj.Name() {
+ case "make":
+ return kindChan | kindSlice | kindMap
+ case "len":
+ return kindSlice | kindMap | kindArray | kindString | kindChan
+ case "cap":
+ return kindSlice | kindArray | kindChan
+ }
+ }
+ }
+
+ return 0
+}
+
+// builtinArgType infers the type of an argument to a builtin
+// function. parentInf is the inferred type info for the builtin
+// call's parent node.
+func (c *completer) builtinArgType(obj types.Object, call *ast.CallExpr, parentInf candidateInference) candidateInference {
+ var (
+ exprIdx = exprAtPos(c.pos, call.Args)
+
+ // Propagate certain properties from our parent's inference.
+ inf = candidateInference{
+ typeName: parentInf.typeName,
+ modifiers: parentInf.modifiers,
+ }
+ )
+
+ switch obj.Name() {
+ case "append":
+ if exprIdx <= 0 {
+ // Infer first append() arg type as apparent return type of
+ // append().
+ inf.objType = parentInf.objType
+ if parentInf.variadic {
+ inf.objType = types.NewSlice(inf.objType)
+ }
+ break
+ }
+
+ // For non-initial append() args, infer slice type from the first
+ // append() arg, or from parent context.
+ if len(call.Args) > 0 {
+ inf.objType = c.pkg.GetTypesInfo().TypeOf(call.Args[0])
+ }
+ if inf.objType == nil {
+ inf.objType = parentInf.objType
+ }
+ if inf.objType == nil {
+ break
+ }
+
+ inf.objType = deslice(inf.objType)
+
+ // Check if we are completing the variadic append() param.
+ inf.variadic = exprIdx == 1 && len(call.Args) <= 2
+
+ // Penalize the first append() argument as a candidate. You
+ // don't normally append a slice to itself.
+ if sliceChain := objChain(c.pkg.GetTypesInfo(), call.Args[0]); len(sliceChain) > 0 {
+ inf.penalized = append(inf.penalized, penalizedObj{objChain: sliceChain, penalty: 0.9})
+ }
+ case "delete":
+ if exprIdx > 0 && len(call.Args) > 0 {
+ // Try to fill in expected type of map key.
+ firstArgType := c.pkg.GetTypesInfo().TypeOf(call.Args[0])
+ if firstArgType != nil {
+ if mt, ok := firstArgType.Underlying().(*types.Map); ok {
+ inf.objType = mt.Key()
+ }
+ }
+ }
+ case "copy":
+ var t1, t2 types.Type
+ if len(call.Args) > 0 {
+ t1 = c.pkg.GetTypesInfo().TypeOf(call.Args[0])
+ if len(call.Args) > 1 {
+ t2 = c.pkg.GetTypesInfo().TypeOf(call.Args[1])
+ }
+ }
+
+ // Fill in expected type of either arg if the other is already present.
+ if exprIdx == 1 && t1 != nil {
+ inf.objType = t1
+ } else if exprIdx == 0 && t2 != nil {
+ inf.objType = t2
+ }
+ case "new":
+ inf.typeName.wantTypeName = true
+ if parentInf.objType != nil {
+ // Expected type for "new" is the de-pointered parent type.
+ if ptr, ok := parentInf.objType.Underlying().(*types.Pointer); ok {
+ inf.objType = ptr.Elem()
+ }
+ }
+ case "make":
+ if exprIdx == 0 {
+ inf.typeName.wantTypeName = true
+ inf.objType = parentInf.objType
+ } else {
+ inf.objType = types.Typ[types.UntypedInt]
+ }
+ }
+
+ return inf
+}
diff --git a/gopls/internal/lsp/source/completion/completion.go b/gopls/internal/lsp/source/completion/completion.go
new file mode 100644
index 000000000..f8c7654f6
--- /dev/null
+++ b/gopls/internal/lsp/source/completion/completion.go
@@ -0,0 +1,3252 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package completion provides core functionality for code completion in Go
+// editors and tools.
+package completion
+
+import (
+ "bytes"
+ "context"
+ "fmt"
+ "go/ast"
+ "go/constant"
+ "go/parser"
+ "go/scanner"
+ "go/token"
+ "go/types"
+ "math"
+ "sort"
+ "strconv"
+ "strings"
+ "sync"
+ "sync/atomic"
+ "time"
+ "unicode"
+
+ "golang.org/x/sync/errgroup"
+ "golang.org/x/tools/go/ast/astutil"
+ "golang.org/x/tools/gopls/internal/lsp/protocol"
+ "golang.org/x/tools/gopls/internal/lsp/safetoken"
+ "golang.org/x/tools/gopls/internal/lsp/snippet"
+ "golang.org/x/tools/gopls/internal/lsp/source"
+ "golang.org/x/tools/gopls/internal/span"
+ "golang.org/x/tools/internal/event"
+ "golang.org/x/tools/internal/fuzzy"
+ "golang.org/x/tools/internal/imports"
+ "golang.org/x/tools/internal/typeparams"
+)
+
+// A CompletionItem represents a possible completion suggested by the algorithm.
+type CompletionItem struct {
+
+ // Invariant: CompletionItem does not refer to syntax or types.
+
+ // Label is the primary text the user sees for this completion item.
+ Label string
+
+ // Detail is supplemental information to present to the user.
+ // This often contains the type or return type of the completion item.
+ Detail string
+
+ // InsertText is the text to insert if this item is selected.
+ // Any of the prefix that has already been typed is not trimmed.
+ // The insert text does not contain snippets.
+ InsertText string
+
+ Kind protocol.CompletionItemKind
+ Tags []protocol.CompletionItemTag
+ Deprecated bool // Deprecated, prefer Tags if available
+
+ // An optional array of additional TextEdits that are applied when
+ // selecting this completion.
+ //
+ // Additional text edits should be used to change text unrelated to the current cursor position
+ // (for example adding an import statement at the top of the file if the completion item will
+ // insert an unqualified type).
+ AdditionalTextEdits []protocol.TextEdit
+
+ // Depth is how many levels were searched to find this completion.
+ // For example when completing "foo<>", "fooBar" is depth 0, and
+ // "fooBar.Baz" is depth 1.
+ Depth int
+
+ // Score is the internal relevance score.
+ // A higher score indicates that this completion item is more relevant.
+ Score float64
+
+ // snippet is the LSP snippet for the completion item. The LSP
+ // specification contains details about LSP snippets. For example, a
+ // snippet for a function with the following signature:
+ //
+ // func foo(a, b, c int)
+ //
+ // would be:
+ //
+ // foo(${1:a int}, ${2: b int}, ${3: c int})
+ //
+ // If Placeholders is false in the CompletionOptions, the above
+ // snippet would instead be:
+ //
+ // foo(${1:})
+ snippet *snippet.Builder
+
+ // Documentation is the documentation for the completion item.
+ Documentation string
+
+ // isSlice reports whether the underlying type of the object
+ // from which this candidate was derived is a slice.
+ // (Used to complete append() calls.)
+ isSlice bool
+}
+
+// completionOptions holds completion specific configuration.
+type completionOptions struct {
+ unimported bool
+ documentation bool
+ fullDocumentation bool
+ placeholders bool
+ literal bool
+ snippets bool
+ postfix bool
+ matcher source.Matcher
+ budget time.Duration
+}
+
+// Snippet is a convenience returns the snippet if available, otherwise
+// the InsertText.
+// used for an item, depending on if the callee wants placeholders or not.
+func (i *CompletionItem) Snippet() string {
+ if i.snippet != nil {
+ return i.snippet.String()
+ }
+ return i.InsertText
+}
+
+// Scoring constants are used for weighting the relevance of different candidates.
+const (
+ // stdScore is the base score for all completion items.
+ stdScore float64 = 1.0
+
+ // highScore indicates a very relevant completion item.
+ highScore float64 = 10.0
+
+ // lowScore indicates an irrelevant or not useful completion item.
+ lowScore float64 = 0.01
+)
+
+// matcher matches a candidate's label against the user input. The
+// returned score reflects the quality of the match. A score of zero
+// indicates no match, and a score of one means a perfect match.
+type matcher interface {
+ Score(candidateLabel string) (score float32)
+}
+
+// prefixMatcher implements case sensitive prefix matching.
+type prefixMatcher string
+
+func (pm prefixMatcher) Score(candidateLabel string) float32 {
+ if strings.HasPrefix(candidateLabel, string(pm)) {
+ return 1
+ }
+ return -1
+}
+
+// insensitivePrefixMatcher implements case insensitive prefix matching.
+type insensitivePrefixMatcher string
+
+func (ipm insensitivePrefixMatcher) Score(candidateLabel string) float32 {
+ if strings.HasPrefix(strings.ToLower(candidateLabel), string(ipm)) {
+ return 1
+ }
+ return -1
+}
+
+// completer contains the necessary information for a single completion request.
+type completer struct {
+ snapshot source.Snapshot
+ pkg source.Package
+ qf types.Qualifier // for qualifying typed expressions
+ mq source.MetadataQualifier // for syntactic qualifying
+ opts *completionOptions
+
+ // completionContext contains information about the trigger for this
+ // completion request.
+ completionContext completionContext
+
+ // fh is a handle to the file associated with this completion request.
+ fh source.FileHandle
+
+ // filename is the name of the file associated with this completion request.
+ filename string
+
+ // file is the AST of the file associated with this completion request.
+ file *ast.File
+
+ // (tokFile, pos) is the position at which the request was triggered.
+ tokFile *token.File
+ pos token.Pos
+
+ // path is the path of AST nodes enclosing the position.
+ path []ast.Node
+
+ // seen is the map that ensures we do not return duplicate results.
+ seen map[types.Object]bool
+
+ // items is the list of completion items returned.
+ items []CompletionItem
+
+ // completionCallbacks is a list of callbacks to collect completions that
+ // require expensive operations. This includes operations where we search
+ // through the entire module cache.
+ completionCallbacks []func(opts *imports.Options) error
+
+ // surrounding describes the identifier surrounding the position.
+ surrounding *Selection
+
+ // inference contains information we've inferred about ideal
+ // candidates such as the candidate's type.
+ inference candidateInference
+
+ // enclosingFunc contains information about the function enclosing
+ // the position.
+ enclosingFunc *funcInfo
+
+ // enclosingCompositeLiteral contains information about the composite literal
+ // enclosing the position.
+ enclosingCompositeLiteral *compLitInfo
+
+ // deepState contains the current state of our deep completion search.
+ deepState deepCompletionState
+
+ // matcher matches the candidates against the surrounding prefix.
+ matcher matcher
+
+ // methodSetCache caches the types.NewMethodSet call, which is relatively
+ // expensive and can be called many times for the same type while searching
+ // for deep completions.
+ methodSetCache map[methodSetKey]*types.MethodSet
+
+ // mapper converts the positions in the file from which the completion originated.
+ mapper *protocol.Mapper
+
+ // startTime is when we started processing this completion request. It does
+ // not include any time the request spent in the queue.
+ startTime time.Time
+
+ // scopes contains all scopes defined by nodes in our path,
+ // including nil values for nodes that don't defined a scope. It
+ // also includes our package scope and the universal scope at the
+ // end.
+ scopes []*types.Scope
+}
+
+// funcInfo holds info about a function object.
+type funcInfo struct {
+ // sig is the function declaration enclosing the position.
+ sig *types.Signature
+
+ // body is the function's body.
+ body *ast.BlockStmt
+}
+
+type compLitInfo struct {
+ // cl is the *ast.CompositeLit enclosing the position.
+ cl *ast.CompositeLit
+
+ // clType is the type of cl.
+ clType types.Type
+
+ // kv is the *ast.KeyValueExpr enclosing the position, if any.
+ kv *ast.KeyValueExpr
+
+ // inKey is true if we are certain the position is in the key side
+ // of a key-value pair.
+ inKey bool
+
+ // maybeInFieldName is true if inKey is false and it is possible
+ // we are completing a struct field name. For example,
+ // "SomeStruct{<>}" will be inKey=false, but maybeInFieldName=true
+ // because we _could_ be completing a field name.
+ maybeInFieldName bool
+}
+
+type importInfo struct {
+ importPath string
+ name string
+}
+
+type methodSetKey struct {
+ typ types.Type
+ addressable bool
+}
+
+type completionContext struct {
+ // triggerCharacter is the character used to trigger completion at current
+ // position, if any.
+ triggerCharacter string
+
+ // triggerKind is information about how a completion was triggered.
+ triggerKind protocol.CompletionTriggerKind
+
+ // commentCompletion is true if we are completing a comment.
+ commentCompletion bool
+
+ // packageCompletion is true if we are completing a package name.
+ packageCompletion bool
+}
+
+// A Selection represents the cursor position and surrounding identifier.
+type Selection struct {
+ content string
+ tokFile *token.File
+ start, end, cursor token.Pos // relative to rng.TokFile
+ mapper *protocol.Mapper
+}
+
+func (p Selection) Content() string {
+ return p.content
+}
+
+func (p Selection) Range() (protocol.Range, error) {
+ return p.mapper.PosRange(p.tokFile, p.start, p.end)
+}
+
+func (p Selection) Prefix() string {
+ return p.content[:p.cursor-p.start]
+}
+
+func (p Selection) Suffix() string {
+ return p.content[p.cursor-p.start:]
+}
+
+func (c *completer) setSurrounding(ident *ast.Ident) {
+ if c.surrounding != nil {
+ return
+ }
+ if !(ident.Pos() <= c.pos && c.pos <= ident.End()) {
+ return
+ }
+
+ c.surrounding = &Selection{
+ content: ident.Name,
+ cursor: c.pos,
+ // Overwrite the prefix only.
+ tokFile: c.tokFile,
+ start: ident.Pos(),
+ end: ident.End(),
+ mapper: c.mapper,
+ }
+
+ c.setMatcherFromPrefix(c.surrounding.Prefix())
+}
+
+func (c *completer) setMatcherFromPrefix(prefix string) {
+ switch c.opts.matcher {
+ case source.Fuzzy:
+ c.matcher = fuzzy.NewMatcher(prefix)
+ case source.CaseSensitive:
+ c.matcher = prefixMatcher(prefix)
+ default:
+ c.matcher = insensitivePrefixMatcher(strings.ToLower(prefix))
+ }
+}
+
+func (c *completer) getSurrounding() *Selection {
+ if c.surrounding == nil {
+ c.surrounding = &Selection{
+ content: "",
+ cursor: c.pos,
+ tokFile: c.tokFile,
+ start: c.pos,
+ end: c.pos,
+ mapper: c.mapper,
+ }
+ }
+ return c.surrounding
+}
+
+// candidate represents a completion candidate.
+type candidate struct {
+ // obj is the types.Object to complete to.
+ // TODO(adonovan): eliminate dependence on go/types throughout this struct.
+ obj types.Object
+
+ // score is used to rank candidates.
+ score float64
+
+ // name is the deep object name path, e.g. "foo.bar"
+ name string
+
+ // detail is additional information about this item. If not specified,
+ // defaults to type string for the object.
+ detail string
+
+ // path holds the path from the search root (excluding the candidate
+ // itself) for a deep candidate.
+ path []types.Object
+
+ // pathInvokeMask is a bit mask tracking whether each entry in path
+ // should be formatted with "()" (i.e. whether it is a function
+ // invocation).
+ pathInvokeMask uint16
+
+ // mods contains modifications that should be applied to the
+ // candidate when inserted. For example, "foo" may be inserted as
+ // "*foo" or "foo()".
+ mods []typeModKind
+
+ // addressable is true if a pointer can be taken to the candidate.
+ addressable bool
+
+ // convertTo is a type that this candidate should be cast to. For
+ // example, if convertTo is float64, "foo" should be formatted as
+ // "float64(foo)".
+ convertTo types.Type
+
+ // imp is the import that needs to be added to this package in order
+ // for this candidate to be valid. nil if no import needed.
+ imp *importInfo
+}
+
+func (c candidate) hasMod(mod typeModKind) bool {
+ for _, m := range c.mods {
+ if m == mod {
+ return true
+ }
+ }
+ return false
+}
+
+// ErrIsDefinition is an error that informs the user they got no
+// completions because they tried to complete the name of a new object
+// being defined.
+type ErrIsDefinition struct {
+ objStr string
+}
+
+func (e ErrIsDefinition) Error() string {
+ msg := "this is a definition"
+ if e.objStr != "" {
+ msg += " of " + e.objStr
+ }
+ return msg
+}
+
+// Completion returns a list of possible candidates for completion, given a
+// a file and a position.
+//
+// The selection is computed based on the preceding identifier and can be used by
+// the client to score the quality of the completion. For instance, some clients
+// may tolerate imperfect matches as valid completion results, since users may make typos.
+func Completion(ctx context.Context, snapshot source.Snapshot, fh source.FileHandle, protoPos protocol.Position, protoContext protocol.CompletionContext) ([]CompletionItem, *Selection, error) {
+ ctx, done := event.Start(ctx, "completion.Completion")
+ defer done()
+
+ startTime := time.Now()
+
+ pkg, pgf, err := source.PackageForFile(ctx, snapshot, fh.URI(), source.NarrowestPackage)
+ if err != nil || pgf.File.Package == token.NoPos {
+ // If we can't parse this file or find position for the package
+ // keyword, it may be missing a package declaration. Try offering
+ // suggestions for the package declaration.
+ // Note that this would be the case even if the keyword 'package' is
+ // present but no package name exists.
+ items, surrounding, innerErr := packageClauseCompletions(ctx, snapshot, fh, protoPos)
+ if innerErr != nil {
+ // return the error for GetParsedFile since it's more relevant in this situation.
+ return nil, nil, fmt.Errorf("getting file %s for Completion: %w (package completions: %v)", fh.URI(), err, innerErr)
+ }
+ return items, surrounding, nil
+ }
+ pos, err := pgf.PositionPos(protoPos)
+ if err != nil {
+ return nil, nil, err
+ }
+ // Completion is based on what precedes the cursor.
+ // Find the path to the position before pos.
+ path, _ := astutil.PathEnclosingInterval(pgf.File, pos-1, pos-1)
+ if path == nil {
+ return nil, nil, fmt.Errorf("cannot find node enclosing position")
+ }
+
+ // Check if completion at this position is valid. If not, return early.
+ switch n := path[0].(type) {
+ case *ast.BasicLit:
+ // Skip completion inside literals except for ImportSpec
+ if len(path) > 1 {
+ if _, ok := path[1].(*ast.ImportSpec); ok {
+ break
+ }
+ }
+ return nil, nil, nil
+ case *ast.CallExpr:
+ if n.Ellipsis.IsValid() && pos > n.Ellipsis && pos <= n.Ellipsis+token.Pos(len("...")) {
+ // Don't offer completions inside or directly after "...". For
+ // example, don't offer completions at "<>" in "foo(bar...<>").
+ return nil, nil, nil
+ }
+ case *ast.Ident:
+ // reject defining identifiers
+ if obj, ok := pkg.GetTypesInfo().Defs[n]; ok {
+ if v, ok := obj.(*types.Var); ok && v.IsField() && v.Embedded() {
+ // An anonymous field is also a reference to a type.
+ } else if pgf.File.Name == n {
+ // Don't skip completions if Ident is for package name.
+ break
+ } else {
+ objStr := ""
+ if obj != nil {
+ qual := types.RelativeTo(pkg.GetTypes())
+ objStr = types.ObjectString(obj, qual)
+ }
+ ans, sel := definition(path, obj, pgf)
+ if ans != nil {
+ sort.Slice(ans, func(i, j int) bool {
+ return ans[i].Score > ans[j].Score
+ })
+ return ans, sel, nil
+ }
+ return nil, nil, ErrIsDefinition{objStr: objStr}
+ }
+ }
+ }
+
+ // Collect all surrounding scopes, innermost first.
+ scopes := source.CollectScopes(pkg.GetTypesInfo(), path, pos)
+ scopes = append(scopes, pkg.GetTypes().Scope(), types.Universe)
+
+ opts := snapshot.View().Options()
+ c := &completer{
+ pkg: pkg,
+ snapshot: snapshot,
+ qf: source.Qualifier(pgf.File, pkg.GetTypes(), pkg.GetTypesInfo()),
+ mq: source.MetadataQualifierForFile(snapshot, pgf.File, pkg.Metadata()),
+ completionContext: completionContext{
+ triggerCharacter: protoContext.TriggerCharacter,
+ triggerKind: protoContext.TriggerKind,
+ },
+ fh: fh,
+ filename: fh.URI().Filename(),
+ tokFile: pgf.Tok,
+ file: pgf.File,
+ path: path,
+ pos: pos,
+ seen: make(map[types.Object]bool),
+ enclosingFunc: enclosingFunction(path, pkg.GetTypesInfo()),
+ enclosingCompositeLiteral: enclosingCompositeLiteral(path, pos, pkg.GetTypesInfo()),
+ deepState: deepCompletionState{
+ enabled: opts.DeepCompletion,
+ },
+ opts: &completionOptions{
+ matcher: opts.Matcher,
+ unimported: opts.CompleteUnimported,
+ documentation: opts.CompletionDocumentation && opts.HoverKind != source.NoDocumentation,
+ fullDocumentation: opts.HoverKind == source.FullDocumentation,
+ placeholders: opts.UsePlaceholders,
+ literal: opts.LiteralCompletions && opts.InsertTextFormat == protocol.SnippetTextFormat,
+ budget: opts.CompletionBudget,
+ snippets: opts.InsertTextFormat == protocol.SnippetTextFormat,
+ postfix: opts.ExperimentalPostfixCompletions,
+ },
+ // default to a matcher that always matches
+ matcher: prefixMatcher(""),
+ methodSetCache: make(map[methodSetKey]*types.MethodSet),
+ mapper: pgf.Mapper,
+ startTime: startTime,
+ scopes: scopes,
+ }
+
+ var cancel context.CancelFunc
+ if c.opts.budget == 0 {
+ ctx, cancel = context.WithCancel(ctx)
+ } else {
+ // timeoutDuration is the completion budget remaining. If less than
+ // 10ms, set to 10ms
+ timeoutDuration := time.Until(c.startTime.Add(c.opts.budget))
+ if timeoutDuration < 10*time.Millisecond {
+ timeoutDuration = 10 * time.Millisecond
+ }
+ ctx, cancel = context.WithTimeout(ctx, timeoutDuration)
+ }
+ defer cancel()
+
+ if surrounding := c.containingIdent(pgf.Src); surrounding != nil {
+ c.setSurrounding(surrounding)
+ }
+
+ c.inference = expectedCandidate(ctx, c)
+
+ err = c.collectCompletions(ctx)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ // Deep search collected candidates and their members for more candidates.
+ c.deepSearch(ctx)
+
+ for _, callback := range c.completionCallbacks {
+ if err := c.snapshot.RunProcessEnvFunc(ctx, callback); err != nil {
+ return nil, nil, err
+ }
+ }
+
+ // Search candidates populated by expensive operations like
+ // unimportedMembers etc. for more completion items.
+ c.deepSearch(ctx)
+
+ // Statement candidates offer an entire statement in certain contexts, as
+ // opposed to a single object. Add statement candidates last because they
+ // depend on other candidates having already been collected.
+ c.addStatementCandidates()
+
+ c.sortItems()
+ return c.items, c.getSurrounding(), nil
+}
+
+// collectCompletions adds possible completion candidates to either the deep
+// search queue or completion items directly for different completion contexts.
+func (c *completer) collectCompletions(ctx context.Context) error {
+ // Inside import blocks, return completions for unimported packages.
+ for _, importSpec := range c.file.Imports {
+ if !(importSpec.Path.Pos() <= c.pos && c.pos <= importSpec.Path.End()) {
+ continue
+ }
+ return c.populateImportCompletions(ctx, importSpec)
+ }
+
+ // Inside comments, offer completions for the name of the relevant symbol.
+ for _, comment := range c.file.Comments {
+ if comment.Pos() < c.pos && c.pos <= comment.End() {
+ c.populateCommentCompletions(ctx, comment)
+ return nil
+ }
+ }
+
+ // Struct literals are handled entirely separately.
+ if c.wantStructFieldCompletions() {
+ // If we are definitely completing a struct field name, deep completions
+ // don't make sense.
+ if c.enclosingCompositeLiteral.inKey {
+ c.deepState.enabled = false
+ }
+ return c.structLiteralFieldName(ctx)
+ }
+
+ if lt := c.wantLabelCompletion(); lt != labelNone {
+ c.labels(lt)
+ return nil
+ }
+
+ if c.emptySwitchStmt() {
+ // Empty switch statements only admit "default" and "case" keywords.
+ c.addKeywordItems(map[string]bool{}, highScore, CASE, DEFAULT)
+ return nil
+ }
+
+ switch n := c.path[0].(type) {
+ case *ast.Ident:
+ if c.file.Name == n {
+ return c.packageNameCompletions(ctx, c.fh.URI(), n)
+ } else if sel, ok := c.path[1].(*ast.SelectorExpr); ok && sel.Sel == n {
+ // Is this the Sel part of a selector?
+ return c.selector(ctx, sel)
+ }
+ return c.lexical(ctx)
+ // The function name hasn't been typed yet, but the parens are there:
+ // recv.‸(arg)
+ case *ast.TypeAssertExpr:
+ // Create a fake selector expression.
+ return c.selector(ctx, &ast.SelectorExpr{X: n.X})
+ case *ast.SelectorExpr:
+ return c.selector(ctx, n)
+ // At the file scope, only keywords are allowed.
+ case *ast.BadDecl, *ast.File:
+ c.addKeywordCompletions()
+ default:
+ // fallback to lexical completions
+ return c.lexical(ctx)
+ }
+
+ return nil
+}
+
+// containingIdent returns the *ast.Ident containing pos, if any. It
+// synthesizes an *ast.Ident to allow completion in the face of
+// certain syntax errors.
+func (c *completer) containingIdent(src []byte) *ast.Ident {
+ // In the normal case, our leaf AST node is the identifier being completed.
+ if ident, ok := c.path[0].(*ast.Ident); ok {
+ return ident
+ }
+
+ pos, tkn, lit := c.scanToken(src)
+ if !pos.IsValid() {
+ return nil
+ }
+
+ fakeIdent := &ast.Ident{Name: lit, NamePos: pos}
+
+ if _, isBadDecl := c.path[0].(*ast.BadDecl); isBadDecl {
+ // You don't get *ast.Idents at the file level, so look for bad
+ // decls and use the manually extracted token.
+ return fakeIdent
+ } else if c.emptySwitchStmt() {
+ // Only keywords are allowed in empty switch statements.
+ // *ast.Idents are not parsed, so we must use the manually
+ // extracted token.
+ return fakeIdent
+ } else if tkn.IsKeyword() {
+ // Otherwise, manually extract the prefix if our containing token
+ // is a keyword. This improves completion after an "accidental
+ // keyword", e.g. completing to "variance" in "someFunc(var<>)".
+ return fakeIdent
+ }
+
+ return nil
+}
+
+// scanToken scans pgh's contents for the token containing pos.
+func (c *completer) scanToken(contents []byte) (token.Pos, token.Token, string) {
+ tok := c.pkg.FileSet().File(c.pos)
+
+ var s scanner.Scanner
+ s.Init(tok, contents, nil, 0)
+ for {
+ tknPos, tkn, lit := s.Scan()
+ if tkn == token.EOF || tknPos >= c.pos {
+ return token.NoPos, token.ILLEGAL, ""
+ }
+
+ if len(lit) > 0 && tknPos <= c.pos && c.pos <= tknPos+token.Pos(len(lit)) {
+ return tknPos, tkn, lit
+ }
+ }
+}
+
+func (c *completer) sortItems() {
+ sort.SliceStable(c.items, func(i, j int) bool {
+ // Sort by score first.
+ if c.items[i].Score != c.items[j].Score {
+ return c.items[i].Score > c.items[j].Score
+ }
+
+ // Then sort by label so order stays consistent. This also has the
+ // effect of preferring shorter candidates.
+ return c.items[i].Label < c.items[j].Label
+ })
+}
+
+// emptySwitchStmt reports whether pos is in an empty switch or select
+// statement.
+func (c *completer) emptySwitchStmt() bool {
+ block, ok := c.path[0].(*ast.BlockStmt)
+ if !ok || len(block.List) > 0 || len(c.path) == 1 {
+ return false
+ }
+
+ switch c.path[1].(type) {
+ case *ast.SwitchStmt, *ast.TypeSwitchStmt, *ast.SelectStmt:
+ return true
+ default:
+ return false
+ }
+}
+
+// populateImportCompletions yields completions for an import path around the cursor.
+//
+// Completions are suggested at the directory depth of the given import path so
+// that we don't overwhelm the user with a large list of possibilities. As an
+// example, a completion for the prefix "golang" results in "golang.org/".
+// Completions for "golang.org/" yield its subdirectories
+// (i.e. "golang.org/x/"). The user is meant to accept completion suggestions
+// until they reach a complete import path.
+func (c *completer) populateImportCompletions(ctx context.Context, searchImport *ast.ImportSpec) error {
+ if !strings.HasPrefix(searchImport.Path.Value, `"`) {
+ return nil
+ }
+
+ // deepSearch is not valuable for import completions.
+ c.deepState.enabled = false
+
+ importPath := searchImport.Path.Value
+
+ // Extract the text between the quotes (if any) in an import spec.
+ // prefix is the part of import path before the cursor.
+ prefixEnd := c.pos - searchImport.Path.Pos()
+ prefix := strings.Trim(importPath[:prefixEnd], `"`)
+
+ // The number of directories in the import path gives us the depth at
+ // which to search.
+ depth := len(strings.Split(prefix, "/")) - 1
+
+ content := importPath
+ start, end := searchImport.Path.Pos(), searchImport.Path.End()
+ namePrefix, nameSuffix := `"`, `"`
+ // If a starting quote is present, adjust surrounding to either after the
+ // cursor or after the first slash (/), except if cursor is at the starting
+ // quote. Otherwise we provide a completion including the starting quote.
+ if strings.HasPrefix(importPath, `"`) && c.pos > searchImport.Path.Pos() {
+ content = content[1:]
+ start++
+ if depth > 0 {
+ // Adjust textEdit start to replacement range. For ex: if current
+ // path was "golang.or/x/to<>ols/internal/", where <> is the cursor
+ // position, start of the replacement range would be after
+ // "golang.org/x/".
+ path := strings.SplitAfter(prefix, "/")
+ numChars := len(strings.Join(path[:len(path)-1], ""))
+ content = content[numChars:]
+ start += token.Pos(numChars)
+ }
+ namePrefix = ""
+ }
+
+ // We won't provide an ending quote if one is already present, except if
+ // cursor is after the ending quote but still in import spec. This is
+ // because cursor has to be in our textEdit range.
+ if strings.HasSuffix(importPath, `"`) && c.pos < searchImport.Path.End() {
+ end--
+ content = content[:len(content)-1]
+ nameSuffix = ""
+ }
+
+ c.surrounding = &Selection{
+ content: content,
+ cursor: c.pos,
+ tokFile: c.tokFile,
+ start: start,
+ end: end,
+ mapper: c.mapper,
+ }
+
+ seenImports := make(map[string]struct{})
+ for _, importSpec := range c.file.Imports {
+ if importSpec.Path.Value == importPath {
+ continue
+ }
+ seenImportPath, err := strconv.Unquote(importSpec.Path.Value)
+ if err != nil {
+ return err
+ }
+ seenImports[seenImportPath] = struct{}{}
+ }
+
+ var mu sync.Mutex // guard c.items locally, since searchImports is called in parallel
+ seen := make(map[string]struct{})
+ searchImports := func(pkg imports.ImportFix) {
+ path := pkg.StmtInfo.ImportPath
+ if _, ok := seenImports[path]; ok {
+ return
+ }
+
+ // Any package path containing fewer directories than the search
+ // prefix is not a match.
+ pkgDirList := strings.Split(path, "/")
+ if len(pkgDirList) < depth+1 {
+ return
+ }
+ pkgToConsider := strings.Join(pkgDirList[:depth+1], "/")
+
+ name := pkgDirList[depth]
+ // if we're adding an opening quote to completion too, set name to full
+ // package path since we'll need to overwrite that range.
+ if namePrefix == `"` {
+ name = pkgToConsider
+ }
+
+ score := pkg.Relevance
+ if len(pkgDirList)-1 == depth {
+ score *= highScore
+ } else {
+ // For incomplete package paths, add a terminal slash to indicate that the
+ // user should keep triggering completions.
+ name += "/"
+ pkgToConsider += "/"
+ }
+
+ if _, ok := seen[pkgToConsider]; ok {
+ return
+ }
+ seen[pkgToConsider] = struct{}{}
+
+ mu.Lock()
+ defer mu.Unlock()
+
+ name = namePrefix + name + nameSuffix
+ obj := types.NewPkgName(0, nil, name, types.NewPackage(pkgToConsider, name))
+ c.deepState.enqueue(candidate{
+ obj: obj,
+ detail: fmt.Sprintf("%q", pkgToConsider),
+ score: score,
+ })
+ }
+
+ c.completionCallbacks = append(c.completionCallbacks, func(opts *imports.Options) error {
+ return imports.GetImportPaths(ctx, searchImports, prefix, c.filename, c.pkg.GetTypes().Name(), opts.Env)
+ })
+ return nil
+}
+
+// populateCommentCompletions yields completions for comments preceding or in declarations.
+func (c *completer) populateCommentCompletions(ctx context.Context, comment *ast.CommentGroup) {
+ // If the completion was triggered by a period, ignore it. These types of
+ // completions will not be useful in comments.
+ if c.completionContext.triggerCharacter == "." {
+ return
+ }
+
+ // Using the comment position find the line after
+ file := c.pkg.FileSet().File(comment.End())
+ if file == nil {
+ return
+ }
+
+ // Deep completion doesn't work properly in comments since we don't
+ // have a type object to complete further.
+ c.deepState.enabled = false
+ c.completionContext.commentCompletion = true
+
+ // Documentation isn't useful in comments, since it might end up being the
+ // comment itself.
+ c.opts.documentation = false
+
+ commentLine := file.Line(comment.End())
+
+ // comment is valid, set surrounding as word boundaries around cursor
+ c.setSurroundingForComment(comment)
+
+ // Using the next line pos, grab and parse the exported symbol on that line
+ for _, n := range c.file.Decls {
+ declLine := file.Line(n.Pos())
+ // if the comment is not in, directly above or on the same line as a declaration
+ if declLine != commentLine && declLine != commentLine+1 &&
+ !(n.Pos() <= comment.Pos() && comment.End() <= n.End()) {
+ continue
+ }
+ switch node := n.(type) {
+ // handle const, vars, and types
+ case *ast.GenDecl:
+ for _, spec := range node.Specs {
+ switch spec := spec.(type) {
+ case *ast.ValueSpec:
+ for _, name := range spec.Names {
+ if name.String() == "_" {
+ continue
+ }
+ obj := c.pkg.GetTypesInfo().ObjectOf(name)
+ c.deepState.enqueue(candidate{obj: obj, score: stdScore})
+ }
+ case *ast.TypeSpec:
+ // add TypeSpec fields to completion
+ switch typeNode := spec.Type.(type) {
+ case *ast.StructType:
+ c.addFieldItems(ctx, typeNode.Fields)
+ case *ast.FuncType:
+ c.addFieldItems(ctx, typeNode.Params)
+ c.addFieldItems(ctx, typeNode.Results)
+ case *ast.InterfaceType:
+ c.addFieldItems(ctx, typeNode.Methods)
+ }
+
+ if spec.Name.String() == "_" {
+ continue
+ }
+
+ obj := c.pkg.GetTypesInfo().ObjectOf(spec.Name)
+ // Type name should get a higher score than fields but not highScore by default
+ // since field near a comment cursor gets a highScore
+ score := stdScore * 1.1
+ // If type declaration is on the line after comment, give it a highScore.
+ if declLine == commentLine+1 {
+ score = highScore
+ }
+
+ c.deepState.enqueue(candidate{obj: obj, score: score})
+ }
+ }
+ // handle functions
+ case *ast.FuncDecl:
+ c.addFieldItems(ctx, node.Recv)
+ c.addFieldItems(ctx, node.Type.Params)
+ c.addFieldItems(ctx, node.Type.Results)
+
+ // collect receiver struct fields
+ if node.Recv != nil {
+ for _, fields := range node.Recv.List {
+ for _, name := range fields.Names {
+ obj := c.pkg.GetTypesInfo().ObjectOf(name)
+ if obj == nil {
+ continue
+ }
+
+ recvType := obj.Type().Underlying()
+ if ptr, ok := recvType.(*types.Pointer); ok {
+ recvType = ptr.Elem()
+ }
+ recvStruct, ok := recvType.Underlying().(*types.Struct)
+ if !ok {
+ continue
+ }
+ for i := 0; i < recvStruct.NumFields(); i++ {
+ field := recvStruct.Field(i)
+ c.deepState.enqueue(candidate{obj: field, score: lowScore})
+ }
+ }
+ }
+ }
+
+ if node.Name.String() == "_" {
+ continue
+ }
+
+ obj := c.pkg.GetTypesInfo().ObjectOf(node.Name)
+ if obj == nil || obj.Pkg() != nil && obj.Pkg() != c.pkg.GetTypes() {
+ continue
+ }
+
+ c.deepState.enqueue(candidate{obj: obj, score: highScore})
+ }
+ }
+}
+
+// sets word boundaries surrounding a cursor for a comment
+func (c *completer) setSurroundingForComment(comments *ast.CommentGroup) {
+ var cursorComment *ast.Comment
+ for _, comment := range comments.List {
+ if c.pos >= comment.Pos() && c.pos <= comment.End() {
+ cursorComment = comment
+ break
+ }
+ }
+ // if cursor isn't in the comment
+ if cursorComment == nil {
+ return
+ }
+
+ // index of cursor in comment text
+ cursorOffset := int(c.pos - cursorComment.Pos())
+ start, end := cursorOffset, cursorOffset
+ for start > 0 && isValidIdentifierChar(cursorComment.Text[start-1]) {
+ start--
+ }
+ for end < len(cursorComment.Text) && isValidIdentifierChar(cursorComment.Text[end]) {
+ end++
+ }
+
+ c.surrounding = &Selection{
+ content: cursorComment.Text[start:end],
+ cursor: c.pos,
+ tokFile: c.tokFile,
+ start: token.Pos(int(cursorComment.Slash) + start),
+ end: token.Pos(int(cursorComment.Slash) + end),
+ mapper: c.mapper,
+ }
+ c.setMatcherFromPrefix(c.surrounding.Prefix())
+}
+
+// isValidIdentifierChar returns true if a byte is a valid go identifier
+// character, i.e. unicode letter or digit or underscore.
+func isValidIdentifierChar(char byte) bool {
+ charRune := rune(char)
+ return unicode.In(charRune, unicode.Letter, unicode.Digit) || char == '_'
+}
+
+// adds struct fields, interface methods, function declaration fields to completion
+func (c *completer) addFieldItems(ctx context.Context, fields *ast.FieldList) {
+ if fields == nil {
+ return
+ }
+
+ cursor := c.surrounding.cursor
+ for _, field := range fields.List {
+ for _, name := range field.Names {
+ if name.String() == "_" {
+ continue
+ }
+ obj := c.pkg.GetTypesInfo().ObjectOf(name)
+ if obj == nil {
+ continue
+ }
+
+ // if we're in a field comment/doc, score that field as more relevant
+ score := stdScore
+ if field.Comment != nil && field.Comment.Pos() <= cursor && cursor <= field.Comment.End() {
+ score = highScore
+ } else if field.Doc != nil && field.Doc.Pos() <= cursor && cursor <= field.Doc.End() {
+ score = highScore
+ }
+
+ c.deepState.enqueue(candidate{obj: obj, score: score})
+ }
+ }
+}
+
+func (c *completer) wantStructFieldCompletions() bool {
+ clInfo := c.enclosingCompositeLiteral
+ if clInfo == nil {
+ return false
+ }
+
+ return clInfo.isStruct() && (clInfo.inKey || clInfo.maybeInFieldName)
+}
+
+func (c *completer) wantTypeName() bool {
+ return !c.completionContext.commentCompletion && c.inference.typeName.wantTypeName
+}
+
+// See https://golang.org/issue/36001. Unimported completions are expensive.
+const (
+ maxUnimportedPackageNames = 5
+ unimportedMemberTarget = 100
+)
+
+// selector finds completions for the specified selector expression.
+func (c *completer) selector(ctx context.Context, sel *ast.SelectorExpr) error {
+ c.inference.objChain = objChain(c.pkg.GetTypesInfo(), sel.X)
+
+ // True selector?
+ if tv, ok := c.pkg.GetTypesInfo().Types[sel.X]; ok {
+ c.methodsAndFields(tv.Type, tv.Addressable(), nil, c.deepState.enqueue)
+ c.addPostfixSnippetCandidates(ctx, sel)
+ return nil
+ }
+
+ id, ok := sel.X.(*ast.Ident)
+ if !ok {
+ return nil
+ }
+
+ // Treat sel as a qualified identifier.
+ var filter func(*source.Metadata) bool
+ needImport := false
+ if pkgName, ok := c.pkg.GetTypesInfo().Uses[id].(*types.PkgName); ok {
+ // Qualified identifier with import declaration.
+ imp := pkgName.Imported()
+
+ // Known direct dependency? Expand using type information.
+ if _, ok := c.pkg.Metadata().DepsByPkgPath[source.PackagePath(imp.Path())]; ok {
+ c.packageMembers(imp, stdScore, nil, c.deepState.enqueue)
+ return nil
+ }
+
+ // Imported declaration with missing type information.
+ // Fall through to shallow completion of unimported package members.
+ // Match candidate packages by path.
+ // TODO(adonovan): simplify by merging with else case and matching on name only?
+ filter = func(m *source.Metadata) bool {
+ return strings.TrimPrefix(string(m.PkgPath), "vendor/") == imp.Path()
+ }
+ } else {
+ // Qualified identifier without import declaration.
+ // Match candidate packages by name.
+ filter = func(m *source.Metadata) bool {
+ return string(m.Name) == id.Name
+ }
+ needImport = true
+ }
+
+ // Search unimported packages.
+ if !c.opts.unimported {
+ return nil // feature disabled
+ }
+
+ // The deep completion algorithm is exceedingly complex and
+ // deeply coupled to the now obsolete notions that all
+ // token.Pos values can be interpreted by as a single FileSet
+ // belonging to the Snapshot and that all types.Object values
+ // are canonicalized by a single types.Importer mapping.
+ // These invariants are no longer true now that gopls uses
+ // an incremental approach, parsing and type-checking each
+ // package separately.
+ //
+ // Consequently, completion of symbols defined in packages that
+ // are not currently imported by the query file cannot use the
+ // deep completion machinery which is based on type information.
+ // Instead it must use only syntax information from a quick
+ // parse of top-level declarations (but not function bodies).
+ //
+ // TODO(adonovan): rewrite the deep completion machinery to
+ // not assume global Pos/Object realms and then use export
+ // data instead of the quick parse approach taken here.
+
+ // First, we search among packages in the workspace.
+ // We'll use a fast parse to extract package members
+ // from those that match the name/path criterion.
+ all, err := c.snapshot.AllMetadata(ctx)
+ if err != nil {
+ return err
+ }
+ var paths []string
+ known := make(map[source.PackagePath][]*source.Metadata) // may include test variant
+ for _, m := range all {
+ if m.IsIntermediateTestVariant() || m.Name == "main" || !filter(m) {
+ continue
+ }
+ known[m.PkgPath] = append(known[m.PkgPath], m)
+ paths = append(paths, string(m.PkgPath))
+ }
+
+ // Rank import paths as goimports would.
+ var relevances map[string]float64
+ if len(paths) > 0 {
+ if err := c.snapshot.RunProcessEnvFunc(ctx, func(opts *imports.Options) error {
+ var err error
+ relevances, err = imports.ScoreImportPaths(ctx, opts.Env, paths)
+ return err
+ }); err != nil {
+ return err
+ }
+ sort.Slice(paths, func(i, j int) bool {
+ return relevances[paths[i]] > relevances[paths[j]]
+ })
+ }
+
+ // quickParse does a quick parse of a single file of package m,
+ // extracts exported package members and adds candidates to c.items.
+ var itemsMu sync.Mutex // guards c.items
+ var enough int32 // atomic bool
+ quickParse := func(uri span.URI, m *source.Metadata) error {
+ if atomic.LoadInt32(&enough) != 0 {
+ return nil
+ }
+
+ fh, err := c.snapshot.GetFile(ctx, uri)
+ if err != nil {
+ return err
+ }
+ content, err := fh.Read()
+ if err != nil {
+ return err
+ }
+ path := string(m.PkgPath)
+ forEachPackageMember(content, func(tok token.Token, id *ast.Ident, fn *ast.FuncDecl) {
+ if atomic.LoadInt32(&enough) != 0 {
+ return
+ }
+
+ if !id.IsExported() ||
+ sel.Sel.Name != "_" && !strings.HasPrefix(id.Name, sel.Sel.Name) {
+ return // not a match
+ }
+
+ // The only detail is the kind and package: `var (from "example.com/foo")`
+ // TODO(adonovan): pretty-print FuncDecl.FuncType or TypeSpec.Type?
+ item := CompletionItem{
+ Label: id.Name,
+ Detail: fmt.Sprintf("%s (from %q)", strings.ToLower(tok.String()), m.PkgPath),
+ InsertText: id.Name,
+ Score: unimportedScore(relevances[path]),
+ }
+ switch tok {
+ case token.FUNC:
+ item.Kind = protocol.FunctionCompletion
+ case token.VAR:
+ item.Kind = protocol.VariableCompletion
+ case token.CONST:
+ item.Kind = protocol.ConstantCompletion
+ case token.TYPE:
+ // Without types, we can't distinguish Class from Interface.
+ item.Kind = protocol.ClassCompletion
+ }
+
+ if needImport {
+ imp := &importInfo{importPath: path}
+ if imports.ImportPathToAssumedName(path) != string(m.Name) {
+ imp.name = string(m.Name)
+ }
+ item.AdditionalTextEdits, _ = c.importEdits(imp)
+ }
+
+ // For functions, add a parameter snippet.
+ if fn != nil {
+ var sn snippet.Builder
+ sn.WriteText(id.Name)
+ sn.WriteText("(")
+ var nparams int
+ for _, field := range fn.Type.Params.List {
+ if field.Names != nil {
+ nparams += len(field.Names)
+ } else {
+ nparams++
+ }
+ }
+ for i := 0; i < nparams; i++ {
+ if i > 0 {
+ sn.WriteText(", ")
+ }
+ sn.WritePlaceholder(nil)
+ }
+ sn.WriteText(")")
+ item.snippet = &sn
+ }
+
+ itemsMu.Lock()
+ c.items = append(c.items, item)
+ if len(c.items) >= unimportedMemberTarget {
+ atomic.StoreInt32(&enough, 1)
+ }
+ itemsMu.Unlock()
+ })
+ return nil
+ }
+
+ // Extract the package-level candidates using a quick parse.
+ var g errgroup.Group
+ for _, path := range paths {
+ for _, m := range known[source.PackagePath(path)] {
+ m := m
+ for _, uri := range m.CompiledGoFiles {
+ uri := uri
+ g.Go(func() error {
+ return quickParse(uri, m)
+ })
+ }
+ }
+ }
+ if err := g.Wait(); err != nil {
+ return err
+ }
+
+ // In addition, we search in the module cache using goimports.
+ ctx, cancel := context.WithCancel(ctx)
+ var mu sync.Mutex
+ add := func(pkgExport imports.PackageExport) {
+ mu.Lock()
+ defer mu.Unlock()
+ // TODO(adonovan): what if the actual package has a vendor/ prefix?
+ if _, ok := known[source.PackagePath(pkgExport.Fix.StmtInfo.ImportPath)]; ok {
+ return // We got this one above.
+ }
+
+ // Continue with untyped proposals.
+ pkg := types.NewPackage(pkgExport.Fix.StmtInfo.ImportPath, pkgExport.Fix.IdentName)
+ for _, export := range pkgExport.Exports {
+ score := unimportedScore(pkgExport.Fix.Relevance)
+ c.deepState.enqueue(candidate{
+ obj: types.NewVar(0, pkg, export, nil),
+ score: score,
+ imp: &importInfo{
+ importPath: pkgExport.Fix.StmtInfo.ImportPath,
+ name: pkgExport.Fix.StmtInfo.Name,
+ },
+ })
+ }
+ if len(c.items) >= unimportedMemberTarget {
+ cancel()
+ }
+ }
+
+ c.completionCallbacks = append(c.completionCallbacks, func(opts *imports.Options) error {
+ defer cancel()
+ return imports.GetPackageExports(ctx, add, id.Name, c.filename, c.pkg.GetTypes().Name(), opts.Env)
+ })
+ return nil
+}
+
+// unimportedScore returns a score for an unimported package that is generally
+// lower than other candidates.
+func unimportedScore(relevance float64) float64 {
+ return (stdScore + .1*relevance) / 2
+}
+
+func (c *completer) packageMembers(pkg *types.Package, score float64, imp *importInfo, cb func(candidate)) {
+ scope := pkg.Scope()
+ for _, name := range scope.Names() {
+ obj := scope.Lookup(name)
+ cb(candidate{
+ obj: obj,
+ score: score,
+ imp: imp,
+ addressable: isVar(obj),
+ })
+ }
+}
+
+func (c *completer) methodsAndFields(typ types.Type, addressable bool, imp *importInfo, cb func(candidate)) {
+ mset := c.methodSetCache[methodSetKey{typ, addressable}]
+ if mset == nil {
+ if addressable && !types.IsInterface(typ) && !isPointer(typ) {
+ // Add methods of *T, which includes methods with receiver T.
+ mset = types.NewMethodSet(types.NewPointer(typ))
+ } else {
+ // Add methods of T.
+ mset = types.NewMethodSet(typ)
+ }
+ c.methodSetCache[methodSetKey{typ, addressable}] = mset
+ }
+
+ if isStarTestingDotF(typ) && addressable {
+ // is that a sufficient test? (or is more care needed?)
+ if c.fuzz(typ, mset, imp, cb, c.pkg.FileSet()) {
+ return
+ }
+ }
+
+ for i := 0; i < mset.Len(); i++ {
+ cb(candidate{
+ obj: mset.At(i).Obj(),
+ score: stdScore,
+ imp: imp,
+ addressable: addressable || isPointer(typ),
+ })
+ }
+
+ // Add fields of T.
+ eachField(typ, func(v *types.Var) {
+ cb(candidate{
+ obj: v,
+ score: stdScore - 0.01,
+ imp: imp,
+ addressable: addressable || isPointer(typ),
+ })
+ })
+}
+
+// isStarTestingDotF reports whether typ is *testing.F.
+func isStarTestingDotF(typ types.Type) bool {
+ ptr, _ := typ.(*types.Pointer)
+ if ptr == nil {
+ return false
+ }
+ named, _ := ptr.Elem().(*types.Named)
+ if named == nil {
+ return false
+ }
+ obj := named.Obj()
+ // obj.Pkg is nil for the error type.
+ return obj != nil && obj.Pkg() != nil && obj.Pkg().Path() == "testing" && obj.Name() == "F"
+}
+
+// lexical finds completions in the lexical environment.
+func (c *completer) lexical(ctx context.Context) error {
+ var (
+ builtinIota = types.Universe.Lookup("iota")
+ builtinNil = types.Universe.Lookup("nil")
+
+ // TODO(rfindley): only allow "comparable" where it is valid (in constraint
+ // position or embedded in interface declarations).
+ // builtinComparable = types.Universe.Lookup("comparable")
+ )
+
+ // Track seen variables to avoid showing completions for shadowed variables.
+ // This works since we look at scopes from innermost to outermost.
+ seen := make(map[string]struct{})
+
+ // Process scopes innermost first.
+ for i, scope := range c.scopes {
+ if scope == nil {
+ continue
+ }
+
+ Names:
+ for _, name := range scope.Names() {
+ declScope, obj := scope.LookupParent(name, c.pos)
+ if declScope != scope {
+ continue // Name was declared in some enclosing scope, or not at all.
+ }
+
+ // If obj's type is invalid, find the AST node that defines the lexical block
+ // containing the declaration of obj. Don't resolve types for packages.
+ if !isPkgName(obj) && !typeIsValid(obj.Type()) {
+ // Match the scope to its ast.Node. If the scope is the package scope,
+ // use the *ast.File as the starting node.
+ var node ast.Node
+ if i < len(c.path) {
+ node = c.path[i]
+ } else if i == len(c.path) { // use the *ast.File for package scope
+ node = c.path[i-1]
+ }
+ if node != nil {
+ if resolved := resolveInvalid(c.pkg.FileSet(), obj, node, c.pkg.GetTypesInfo()); resolved != nil {
+ obj = resolved
+ }
+ }
+ }
+
+ // Don't use LHS of decl in RHS.
+ for _, ident := range enclosingDeclLHS(c.path) {
+ if obj.Pos() == ident.Pos() {
+ continue Names
+ }
+ }
+
+ // Don't suggest "iota" outside of const decls.
+ if obj == builtinIota && !c.inConstDecl() {
+ continue
+ }
+
+ // Rank outer scopes lower than inner.
+ score := stdScore * math.Pow(.99, float64(i))
+
+ // Dowrank "nil" a bit so it is ranked below more interesting candidates.
+ if obj == builtinNil {
+ score /= 2
+ }
+
+ // If we haven't already added a candidate for an object with this name.
+ if _, ok := seen[obj.Name()]; !ok {
+ seen[obj.Name()] = struct{}{}
+ c.deepState.enqueue(candidate{
+ obj: obj,
+ score: score,
+ addressable: isVar(obj),
+ })
+ }
+ }
+ }
+
+ if c.inference.objType != nil {
+ if named, _ := source.Deref(c.inference.objType).(*types.Named); named != nil {
+ // If we expected a named type, check the type's package for
+ // completion items. This is useful when the current file hasn't
+ // imported the type's package yet.
+
+ if named.Obj() != nil && named.Obj().Pkg() != nil {
+ pkg := named.Obj().Pkg()
+
+ // Make sure the package name isn't already in use by another
+ // object, and that this file doesn't import the package yet.
+ // TODO(adonovan): what if pkg.Path has vendor/ prefix?
+ if _, ok := seen[pkg.Name()]; !ok && pkg != c.pkg.GetTypes() && !alreadyImports(c.file, source.ImportPath(pkg.Path())) {
+ seen[pkg.Name()] = struct{}{}
+ obj := types.NewPkgName(0, nil, pkg.Name(), pkg)
+ imp := &importInfo{
+ importPath: pkg.Path(),
+ }
+ if imports.ImportPathToAssumedName(pkg.Path()) != pkg.Name() {
+ imp.name = pkg.Name()
+ }
+ c.deepState.enqueue(candidate{
+ obj: obj,
+ score: stdScore,
+ imp: imp,
+ })
+ }
+ }
+ }
+ }
+
+ if c.opts.unimported {
+ if err := c.unimportedPackages(ctx, seen); err != nil {
+ return err
+ }
+ }
+
+ if c.inference.typeName.isTypeParam {
+ // If we are completing a type param, offer each structural type.
+ // This ensures we suggest "[]int" and "[]float64" for a constraint
+ // with type union "[]int | []float64".
+ if t, _ := c.inference.objType.(*types.Interface); t != nil {
+ terms, _ := typeparams.InterfaceTermSet(t)
+ for _, term := range terms {
+ c.injectType(ctx, term.Type())
+ }
+ }
+ } else {
+ c.injectType(ctx, c.inference.objType)
+ }
+
+ // Add keyword completion items appropriate in the current context.
+ c.addKeywordCompletions()
+
+ return nil
+}
+
+// injectType manufactures candidates based on the given type. This is
+// intended for types not discoverable via lexical search, such as
+// composite and/or generic types. For example, if the type is "[]int",
+// this method makes sure you get candidates "[]int{}" and "[]int"
+// (the latter applies when completing a type name).
+func (c *completer) injectType(ctx context.Context, t types.Type) {
+ if t == nil {
+ return
+ }
+
+ t = source.Deref(t)
+
+ // If we have an expected type and it is _not_ a named type, handle
+ // it specially. Non-named types like "[]int" will never be
+ // considered via a lexical search, so we need to directly inject
+ // them. Also allow generic types since lexical search does not
+ // infer instantiated versions of them.
+ if named, _ := t.(*types.Named); named == nil || typeparams.ForNamed(named).Len() > 0 {
+ // If our expected type is "[]int", this will add a literal
+ // candidate of "[]int{}".
+ c.literal(ctx, t, nil)
+
+ if _, isBasic := t.(*types.Basic); !isBasic {
+ // If we expect a non-basic type name (e.g. "[]int"), hack up
+ // a named type whose name is literally "[]int". This allows
+ // us to reuse our object based completion machinery.
+ fakeNamedType := candidate{
+ obj: types.NewTypeName(token.NoPos, nil, types.TypeString(t, c.qf), t),
+ score: stdScore,
+ }
+ // Make sure the type name matches before considering
+ // candidate. This cuts down on useless candidates.
+ if c.matchingTypeName(&fakeNamedType) {
+ c.deepState.enqueue(fakeNamedType)
+ }
+ }
+ }
+}
+
+func (c *completer) unimportedPackages(ctx context.Context, seen map[string]struct{}) error {
+ var prefix string
+ if c.surrounding != nil {
+ prefix = c.surrounding.Prefix()
+ }
+
+ // Don't suggest unimported packages if we have absolutely nothing
+ // to go on.
+ if prefix == "" {
+ return nil
+ }
+
+ count := 0
+
+ // Search packages across the entire workspace.
+ all, err := c.snapshot.AllMetadata(ctx)
+ if err != nil {
+ return err
+ }
+ pkgNameByPath := make(map[source.PackagePath]string)
+ var paths []string // actually PackagePaths
+ for _, m := range all {
+ if m.ForTest != "" {
+ continue // skip all test variants
+ }
+ if m.Name == "main" {
+ continue // main is non-importable
+ }
+ if !strings.HasPrefix(string(m.Name), prefix) {
+ continue // not a match
+ }
+ paths = append(paths, string(m.PkgPath))
+ pkgNameByPath[m.PkgPath] = string(m.Name)
+ }
+
+ // Rank candidates using goimports' algorithm.
+ var relevances map[string]float64
+ if len(paths) != 0 {
+ if err := c.snapshot.RunProcessEnvFunc(ctx, func(opts *imports.Options) error {
+ var err error
+ relevances, err = imports.ScoreImportPaths(ctx, opts.Env, paths)
+ return err
+ }); err != nil {
+ return err
+ }
+ }
+ sort.Slice(paths, func(i, j int) bool {
+ if relevances[paths[i]] != relevances[paths[j]] {
+ return relevances[paths[i]] > relevances[paths[j]]
+ }
+
+ // Fall back to lexical sort to keep truncated set of candidates
+ // in a consistent order.
+ return paths[i] < paths[j]
+ })
+
+ for _, path := range paths {
+ name := pkgNameByPath[source.PackagePath(path)]
+ if _, ok := seen[name]; ok {
+ continue
+ }
+ imp := &importInfo{
+ importPath: path,
+ }
+ if imports.ImportPathToAssumedName(path) != name {
+ imp.name = name
+ }
+ if count >= maxUnimportedPackageNames {
+ return nil
+ }
+ c.deepState.enqueue(candidate{
+ // Pass an empty *types.Package to disable deep completions.
+ obj: types.NewPkgName(0, nil, name, types.NewPackage(path, name)),
+ score: unimportedScore(relevances[path]),
+ imp: imp,
+ })
+ count++
+ }
+
+ ctx, cancel := context.WithCancel(ctx)
+
+ var mu sync.Mutex
+ add := func(pkg imports.ImportFix) {
+ mu.Lock()
+ defer mu.Unlock()
+ if _, ok := seen[pkg.IdentName]; ok {
+ return
+ }
+ if _, ok := relevances[pkg.StmtInfo.ImportPath]; ok {
+ return
+ }
+
+ if count >= maxUnimportedPackageNames {
+ cancel()
+ return
+ }
+
+ // Do not add the unimported packages to seen, since we can have
+ // multiple packages of the same name as completion suggestions, since
+ // only one will be chosen.
+ obj := types.NewPkgName(0, nil, pkg.IdentName, types.NewPackage(pkg.StmtInfo.ImportPath, pkg.IdentName))
+ c.deepState.enqueue(candidate{
+ obj: obj,
+ score: unimportedScore(pkg.Relevance),
+ imp: &importInfo{
+ importPath: pkg.StmtInfo.ImportPath,
+ name: pkg.StmtInfo.Name,
+ },
+ })
+ count++
+ }
+ c.completionCallbacks = append(c.completionCallbacks, func(opts *imports.Options) error {
+ defer cancel()
+ return imports.GetAllCandidates(ctx, add, prefix, c.filename, c.pkg.GetTypes().Name(), opts.Env)
+ })
+ return nil
+}
+
+// alreadyImports reports whether f has an import with the specified path.
+func alreadyImports(f *ast.File, path source.ImportPath) bool {
+ for _, s := range f.Imports {
+ if source.UnquoteImportPath(s) == path {
+ return true
+ }
+ }
+ return false
+}
+
+func (c *completer) inConstDecl() bool {
+ for _, n := range c.path {
+ if decl, ok := n.(*ast.GenDecl); ok && decl.Tok == token.CONST {
+ return true
+ }
+ }
+ return false
+}
+
+// structLiteralFieldName finds completions for struct field names inside a struct literal.
+func (c *completer) structLiteralFieldName(ctx context.Context) error {
+ clInfo := c.enclosingCompositeLiteral
+
+ // Mark fields of the composite literal that have already been set,
+ // except for the current field.
+ addedFields := make(map[*types.Var]bool)
+ for _, el := range clInfo.cl.Elts {
+ if kvExpr, ok := el.(*ast.KeyValueExpr); ok {
+ if clInfo.kv == kvExpr {
+ continue
+ }
+
+ if key, ok := kvExpr.Key.(*ast.Ident); ok {
+ if used, ok := c.pkg.GetTypesInfo().Uses[key]; ok {
+ if usedVar, ok := used.(*types.Var); ok {
+ addedFields[usedVar] = true
+ }
+ }
+ }
+ }
+ }
+
+ deltaScore := 0.0001
+ switch t := clInfo.clType.(type) {
+ case *types.Struct:
+ for i := 0; i < t.NumFields(); i++ {
+ field := t.Field(i)
+ if !addedFields[field] {
+ c.deepState.enqueue(candidate{
+ obj: field,
+ score: highScore - float64(i)*deltaScore,
+ })
+ }
+ }
+
+ // Add lexical completions if we aren't certain we are in the key part of a
+ // key-value pair.
+ if clInfo.maybeInFieldName {
+ return c.lexical(ctx)
+ }
+ default:
+ return c.lexical(ctx)
+ }
+
+ return nil
+}
+
+func (cl *compLitInfo) isStruct() bool {
+ _, ok := cl.clType.(*types.Struct)
+ return ok
+}
+
+// enclosingCompositeLiteral returns information about the composite literal enclosing the
+// position.
+func enclosingCompositeLiteral(path []ast.Node, pos token.Pos, info *types.Info) *compLitInfo {
+ for _, n := range path {
+ switch n := n.(type) {
+ case *ast.CompositeLit:
+ // The enclosing node will be a composite literal if the user has just
+ // opened the curly brace (e.g. &x{<>) or the completion request is triggered
+ // from an already completed composite literal expression (e.g. &x{foo: 1, <>})
+ //
+ // The position is not part of the composite literal unless it falls within the
+ // curly braces (e.g. "foo.Foo<>Struct{}").
+ if !(n.Lbrace < pos && pos <= n.Rbrace) {
+ // Keep searching since we may yet be inside a composite literal.
+ // For example "Foo{B: Ba<>{}}".
+ break
+ }
+
+ tv, ok := info.Types[n]
+ if !ok {
+ return nil
+ }
+
+ clInfo := compLitInfo{
+ cl: n,
+ clType: source.Deref(tv.Type).Underlying(),
+ }
+
+ var (
+ expr ast.Expr
+ hasKeys bool
+ )
+ for _, el := range n.Elts {
+ // Remember the expression that the position falls in, if any.
+ if el.Pos() <= pos && pos <= el.End() {
+ expr = el
+ }
+
+ if kv, ok := el.(*ast.KeyValueExpr); ok {
+ hasKeys = true
+ // If expr == el then we know the position falls in this expression,
+ // so also record kv as the enclosing *ast.KeyValueExpr.
+ if expr == el {
+ clInfo.kv = kv
+ break
+ }
+ }
+ }
+
+ if clInfo.kv != nil {
+ // If in a *ast.KeyValueExpr, we know we are in the key if the position
+ // is to the left of the colon (e.g. "Foo{F<>: V}".
+ clInfo.inKey = pos <= clInfo.kv.Colon
+ } else if hasKeys {
+ // If we aren't in a *ast.KeyValueExpr but the composite literal has
+ // other *ast.KeyValueExprs, we must be on the key side of a new
+ // *ast.KeyValueExpr (e.g. "Foo{F: V, <>}").
+ clInfo.inKey = true
+ } else {
+ switch clInfo.clType.(type) {
+ case *types.Struct:
+ if len(n.Elts) == 0 {
+ // If the struct literal is empty, next could be a struct field
+ // name or an expression (e.g. "Foo{<>}" could become "Foo{F:}"
+ // or "Foo{someVar}").
+ clInfo.maybeInFieldName = true
+ } else if len(n.Elts) == 1 {
+ // If there is one expression and the position is in that expression
+ // and the expression is an identifier, we may be writing a field
+ // name or an expression (e.g. "Foo{F<>}").
+ _, clInfo.maybeInFieldName = expr.(*ast.Ident)
+ }
+ case *types.Map:
+ // If we aren't in a *ast.KeyValueExpr we must be adding a new key
+ // to the map.
+ clInfo.inKey = true
+ }
+ }
+
+ return &clInfo
+ default:
+ if breaksExpectedTypeInference(n, pos) {
+ return nil
+ }
+ }
+ }
+
+ return nil
+}
+
+// enclosingFunction returns the signature and body of the function
+// enclosing the given position.
+func enclosingFunction(path []ast.Node, info *types.Info) *funcInfo {
+ for _, node := range path {
+ switch t := node.(type) {
+ case *ast.FuncDecl:
+ if obj, ok := info.Defs[t.Name]; ok {
+ return &funcInfo{
+ sig: obj.Type().(*types.Signature),
+ body: t.Body,
+ }
+ }
+ case *ast.FuncLit:
+ if typ, ok := info.Types[t]; ok {
+ if sig, _ := typ.Type.(*types.Signature); sig == nil {
+ // golang/go#49397: it should not be possible, but we somehow arrived
+ // here with a non-signature type, most likely due to AST mangling
+ // such that node.Type is not a FuncType.
+ return nil
+ }
+ return &funcInfo{
+ sig: typ.Type.(*types.Signature),
+ body: t.Body,
+ }
+ }
+ }
+ }
+ return nil
+}
+
+func (c *completer) expectedCompositeLiteralType() types.Type {
+ clInfo := c.enclosingCompositeLiteral
+ switch t := clInfo.clType.(type) {
+ case *types.Slice:
+ if clInfo.inKey {
+ return types.Typ[types.UntypedInt]
+ }
+ return t.Elem()
+ case *types.Array:
+ if clInfo.inKey {
+ return types.Typ[types.UntypedInt]
+ }
+ return t.Elem()
+ case *types.Map:
+ if clInfo.inKey {
+ return t.Key()
+ }
+ return t.Elem()
+ case *types.Struct:
+ // If we are completing a key (i.e. field name), there is no expected type.
+ if clInfo.inKey {
+ return nil
+ }
+
+ // If we are in a key-value pair, but not in the key, then we must be on the
+ // value side. The expected type of the value will be determined from the key.
+ if clInfo.kv != nil {
+ if key, ok := clInfo.kv.Key.(*ast.Ident); ok {
+ for i := 0; i < t.NumFields(); i++ {
+ if field := t.Field(i); field.Name() == key.Name {
+ return field.Type()
+ }
+ }
+ }
+ } else {
+ // If we aren't in a key-value pair and aren't in the key, we must be using
+ // implicit field names.
+
+ // The order of the literal fields must match the order in the struct definition.
+ // Find the element that the position belongs to and suggest that field's type.
+ if i := exprAtPos(c.pos, clInfo.cl.Elts); i < t.NumFields() {
+ return t.Field(i).Type()
+ }
+ }
+ }
+ return nil
+}
+
+// typeMod represents an operator that changes the expected type.
+type typeMod struct {
+ mod typeModKind
+ arrayLen int64
+}
+
+type typeModKind int
+
+const (
+ dereference typeModKind = iota // pointer indirection: "*"
+ reference // adds level of pointer: "&" for values, "*" for type names
+ chanRead // channel read operator: "<-"
+ sliceType // make a slice type: "[]" in "[]int"
+ arrayType // make an array type: "[2]" in "[2]int"
+ invoke // make a function call: "()" in "foo()"
+ takeSlice // take slice of array: "[:]" in "foo[:]"
+ takeDotDotDot // turn slice into variadic args: "..." in "foo..."
+ index // index into slice/array: "[0]" in "foo[0]"
+)
+
+type objKind int
+
+const (
+ kindAny objKind = 0
+ kindArray objKind = 1 << iota
+ kindSlice
+ kindChan
+ kindMap
+ kindStruct
+ kindString
+ kindInt
+ kindBool
+ kindBytes
+ kindPtr
+ kindFloat
+ kindComplex
+ kindError
+ kindStringer
+ kindFunc
+)
+
+// penalizedObj represents an object that should be disfavored as a
+// completion candidate.
+type penalizedObj struct {
+ // objChain is the full "chain", e.g. "foo.bar().baz" becomes
+ // []types.Object{foo, bar, baz}.
+ objChain []types.Object
+ // penalty is score penalty in the range (0, 1).
+ penalty float64
+}
+
+// candidateInference holds information we have inferred about a type that can be
+// used at the current position.
+type candidateInference struct {
+ // objType is the desired type of an object used at the query position.
+ objType types.Type
+
+ // objKind is a mask of expected kinds of types such as "map", "slice", etc.
+ objKind objKind
+
+ // variadic is true if we are completing the initial variadic
+ // parameter. For example:
+ // append([]T{}, <>) // objType=T variadic=true
+ // append([]T{}, T{}, <>) // objType=T variadic=false
+ variadic bool
+
+ // modifiers are prefixes such as "*", "&" or "<-" that influence how
+ // a candidate type relates to the expected type.
+ modifiers []typeMod
+
+ // convertibleTo is a type our candidate type must be convertible to.
+ convertibleTo types.Type
+
+ // typeName holds information about the expected type name at
+ // position, if any.
+ typeName typeNameInference
+
+ // assignees are the types that would receive a function call's
+ // results at the position. For example:
+ //
+ // foo := 123
+ // foo, bar := <>
+ //
+ // at "<>", the assignees are [int, <invalid>].
+ assignees []types.Type
+
+ // variadicAssignees is true if we could be completing an inner
+ // function call that fills out an outer function call's variadic
+ // params. For example:
+ //
+ // func foo(int, ...string) {}
+ //
+ // foo(<>) // variadicAssignees=true
+ // foo(bar<>) // variadicAssignees=true
+ // foo(bar, baz<>) // variadicAssignees=false
+ variadicAssignees bool
+
+ // penalized holds expressions that should be disfavored as
+ // candidates. For example, it tracks expressions already used in a
+ // switch statement's other cases. Each expression is tracked using
+ // its entire object "chain" allowing differentiation between
+ // "a.foo" and "b.foo" when "a" and "b" are the same type.
+ penalized []penalizedObj
+
+ // objChain contains the chain of objects representing the
+ // surrounding *ast.SelectorExpr. For example, if we are completing
+ // "foo.bar.ba<>", objChain will contain []types.Object{foo, bar}.
+ objChain []types.Object
+}
+
+// typeNameInference holds information about the expected type name at
+// position.
+type typeNameInference struct {
+ // wantTypeName is true if we expect the name of a type.
+ wantTypeName bool
+
+ // modifiers are prefixes such as "*", "&" or "<-" that influence how
+ // a candidate type relates to the expected type.
+ modifiers []typeMod
+
+ // assertableFrom is a type that must be assertable to our candidate type.
+ assertableFrom types.Type
+
+ // wantComparable is true if we want a comparable type.
+ wantComparable bool
+
+ // seenTypeSwitchCases tracks types that have already been used by
+ // the containing type switch.
+ seenTypeSwitchCases []types.Type
+
+ // compLitType is true if we are completing a composite literal type
+ // name, e.g "foo<>{}".
+ compLitType bool
+
+ // isTypeParam is true if we are completing a type instantiation parameter
+ isTypeParam bool
+}
+
+// expectedCandidate returns information about the expected candidate
+// for an expression at the query position.
+func expectedCandidate(ctx context.Context, c *completer) (inf candidateInference) {
+ inf.typeName = expectTypeName(c)
+
+ if c.enclosingCompositeLiteral != nil {
+ inf.objType = c.expectedCompositeLiteralType()
+ }
+
+Nodes:
+ for i, node := range c.path {
+ switch node := node.(type) {
+ case *ast.BinaryExpr:
+ // Determine if query position comes from left or right of op.
+ e := node.X
+ if c.pos < node.OpPos {
+ e = node.Y
+ }
+ if tv, ok := c.pkg.GetTypesInfo().Types[e]; ok {
+ switch node.Op {
+ case token.LAND, token.LOR:
+ // Don't infer "bool" type for "&&" or "||". Often you want
+ // to compose a boolean expression from non-boolean
+ // candidates.
+ default:
+ inf.objType = tv.Type
+ }
+ break Nodes
+ }
+ case *ast.AssignStmt:
+ // Only rank completions if you are on the right side of the token.
+ if c.pos > node.TokPos {
+ i := exprAtPos(c.pos, node.Rhs)
+ if i >= len(node.Lhs) {
+ i = len(node.Lhs) - 1
+ }
+ if tv, ok := c.pkg.GetTypesInfo().Types[node.Lhs[i]]; ok {
+ inf.objType = tv.Type
+ }
+
+ // If we have a single expression on the RHS, record the LHS
+ // assignees so we can favor multi-return function calls with
+ // matching result values.
+ if len(node.Rhs) <= 1 {
+ for _, lhs := range node.Lhs {
+ inf.assignees = append(inf.assignees, c.pkg.GetTypesInfo().TypeOf(lhs))
+ }
+ } else {
+ // Otherwise, record our single assignee, even if its type is
+ // not available. We use this info to downrank functions
+ // with the wrong number of result values.
+ inf.assignees = append(inf.assignees, c.pkg.GetTypesInfo().TypeOf(node.Lhs[i]))
+ }
+ }
+ return inf
+ case *ast.ValueSpec:
+ if node.Type != nil && c.pos > node.Type.End() {
+ inf.objType = c.pkg.GetTypesInfo().TypeOf(node.Type)
+ }
+ return inf
+ case *ast.CallExpr:
+ // Only consider CallExpr args if position falls between parens.
+ if node.Lparen < c.pos && c.pos <= node.Rparen {
+ // For type conversions like "int64(foo)" we can only infer our
+ // desired type is convertible to int64.
+ if typ := typeConversion(node, c.pkg.GetTypesInfo()); typ != nil {
+ inf.convertibleTo = typ
+ break Nodes
+ }
+
+ sig, _ := c.pkg.GetTypesInfo().Types[node.Fun].Type.(*types.Signature)
+
+ if sig != nil && typeparams.ForSignature(sig).Len() > 0 {
+ // If we are completing a generic func call, re-check the call expression.
+ // This allows type param inference to work in cases like:
+ //
+ // func foo[T any](T) {}
+ // foo[int](<>) // <- get "int" completions instead of "T"
+ //
+ // TODO: remove this after https://go.dev/issue/52503
+ info := &types.Info{Types: make(map[ast.Expr]types.TypeAndValue)}
+ types.CheckExpr(c.pkg.FileSet(), c.pkg.GetTypes(), node.Fun.Pos(), node.Fun, info)
+ sig, _ = info.Types[node.Fun].Type.(*types.Signature)
+ }
+
+ if sig != nil {
+ inf = c.expectedCallParamType(inf, node, sig)
+ }
+
+ if funIdent, ok := node.Fun.(*ast.Ident); ok {
+ obj := c.pkg.GetTypesInfo().ObjectOf(funIdent)
+
+ if obj != nil && obj.Parent() == types.Universe {
+ // Defer call to builtinArgType so we can provide it the
+ // inferred type from its parent node.
+ defer func() {
+ inf = c.builtinArgType(obj, node, inf)
+ inf.objKind = c.builtinArgKind(ctx, obj, node)
+ }()
+
+ // The expected type of builtin arguments like append() is
+ // the expected type of the builtin call itself. For
+ // example:
+ //
+ // var foo []int = append(<>)
+ //
+ // To find the expected type at <> we "skip" the append()
+ // node and get the expected type one level up, which is
+ // []int.
+ continue Nodes
+ }
+ }
+
+ return inf
+ }
+ case *ast.ReturnStmt:
+ if c.enclosingFunc != nil {
+ sig := c.enclosingFunc.sig
+ // Find signature result that corresponds to our return statement.
+ if resultIdx := exprAtPos(c.pos, node.Results); resultIdx < len(node.Results) {
+ if resultIdx < sig.Results().Len() {
+ inf.objType = sig.Results().At(resultIdx).Type()
+ }
+ }
+ }
+ return inf
+ case *ast.CaseClause:
+ if swtch, ok := findSwitchStmt(c.path[i+1:], c.pos, node).(*ast.SwitchStmt); ok {
+ if tv, ok := c.pkg.GetTypesInfo().Types[swtch.Tag]; ok {
+ inf.objType = tv.Type
+
+ // Record which objects have already been used in the case
+ // statements so we don't suggest them again.
+ for _, cc := range swtch.Body.List {
+ for _, caseExpr := range cc.(*ast.CaseClause).List {
+ // Don't record the expression we are currently completing.
+ if caseExpr.Pos() < c.pos && c.pos <= caseExpr.End() {
+ continue
+ }
+
+ if objs := objChain(c.pkg.GetTypesInfo(), caseExpr); len(objs) > 0 {
+ inf.penalized = append(inf.penalized, penalizedObj{objChain: objs, penalty: 0.1})
+ }
+ }
+ }
+ }
+ }
+ return inf
+ case *ast.SliceExpr:
+ // Make sure position falls within the brackets (e.g. "foo[a:<>]").
+ if node.Lbrack < c.pos && c.pos <= node.Rbrack {
+ inf.objType = types.Typ[types.UntypedInt]
+ }
+ return inf
+ case *ast.IndexExpr:
+ // Make sure position falls within the brackets (e.g. "foo[<>]").
+ if node.Lbrack < c.pos && c.pos <= node.Rbrack {
+ if tv, ok := c.pkg.GetTypesInfo().Types[node.X]; ok {
+ switch t := tv.Type.Underlying().(type) {
+ case *types.Map:
+ inf.objType = t.Key()
+ case *types.Slice, *types.Array:
+ inf.objType = types.Typ[types.UntypedInt]
+ }
+
+ if ct := expectedConstraint(tv.Type, 0); ct != nil {
+ inf.objType = ct
+ inf.typeName.wantTypeName = true
+ inf.typeName.isTypeParam = true
+ }
+ }
+ }
+ return inf
+ case *typeparams.IndexListExpr:
+ if node.Lbrack < c.pos && c.pos <= node.Rbrack {
+ if tv, ok := c.pkg.GetTypesInfo().Types[node.X]; ok {
+ if ct := expectedConstraint(tv.Type, exprAtPos(c.pos, node.Indices)); ct != nil {
+ inf.objType = ct
+ inf.typeName.wantTypeName = true
+ inf.typeName.isTypeParam = true
+ }
+ }
+ }
+ return inf
+ case *ast.SendStmt:
+ // Make sure we are on right side of arrow (e.g. "foo <- <>").
+ if c.pos > node.Arrow+1 {
+ if tv, ok := c.pkg.GetTypesInfo().Types[node.Chan]; ok {
+ if ch, ok := tv.Type.Underlying().(*types.Chan); ok {
+ inf.objType = ch.Elem()
+ }
+ }
+ }
+ return inf
+ case *ast.RangeStmt:
+ if source.NodeContains(node.X, c.pos) {
+ inf.objKind |= kindSlice | kindArray | kindMap | kindString
+ if node.Value == nil {
+ inf.objKind |= kindChan
+ }
+ }
+ return inf
+ case *ast.StarExpr:
+ inf.modifiers = append(inf.modifiers, typeMod{mod: dereference})
+ case *ast.UnaryExpr:
+ switch node.Op {
+ case token.AND:
+ inf.modifiers = append(inf.modifiers, typeMod{mod: reference})
+ case token.ARROW:
+ inf.modifiers = append(inf.modifiers, typeMod{mod: chanRead})
+ }
+ case *ast.DeferStmt, *ast.GoStmt:
+ inf.objKind |= kindFunc
+ return inf
+ default:
+ if breaksExpectedTypeInference(node, c.pos) {
+ return inf
+ }
+ }
+ }
+
+ return inf
+}
+
+func (c *completer) expectedCallParamType(inf candidateInference, node *ast.CallExpr, sig *types.Signature) candidateInference {
+ numParams := sig.Params().Len()
+ if numParams == 0 {
+ return inf
+ }
+
+ exprIdx := exprAtPos(c.pos, node.Args)
+
+ // If we have one or zero arg expressions, we may be
+ // completing to a function call that returns multiple
+ // values, in turn getting passed in to the surrounding
+ // call. Record the assignees so we can favor function
+ // calls that return matching values.
+ if len(node.Args) <= 1 && exprIdx == 0 {
+ for i := 0; i < sig.Params().Len(); i++ {
+ inf.assignees = append(inf.assignees, sig.Params().At(i).Type())
+ }
+
+ // Record that we may be completing into variadic parameters.
+ inf.variadicAssignees = sig.Variadic()
+ }
+
+ // Make sure not to run past the end of expected parameters.
+ if exprIdx >= numParams {
+ inf.objType = sig.Params().At(numParams - 1).Type()
+ } else {
+ inf.objType = sig.Params().At(exprIdx).Type()
+ }
+
+ if sig.Variadic() && exprIdx >= (numParams-1) {
+ // If we are completing a variadic param, deslice the variadic type.
+ inf.objType = deslice(inf.objType)
+ // Record whether we are completing the initial variadic param.
+ inf.variadic = exprIdx == numParams-1 && len(node.Args) <= numParams
+
+ // Check if we can infer object kind from printf verb.
+ inf.objKind |= printfArgKind(c.pkg.GetTypesInfo(), node, exprIdx)
+ }
+
+ // If our expected type is an uninstantiated generic type param,
+ // swap to the constraint which will do a decent job filtering
+ // candidates.
+ if tp, _ := inf.objType.(*typeparams.TypeParam); tp != nil {
+ inf.objType = tp.Constraint()
+ }
+
+ return inf
+}
+
+func expectedConstraint(t types.Type, idx int) types.Type {
+ var tp *typeparams.TypeParamList
+ if named, _ := t.(*types.Named); named != nil {
+ tp = typeparams.ForNamed(named)
+ } else if sig, _ := t.Underlying().(*types.Signature); sig != nil {
+ tp = typeparams.ForSignature(sig)
+ }
+ if tp == nil || idx >= tp.Len() {
+ return nil
+ }
+ return tp.At(idx).Constraint()
+}
+
+// objChain decomposes e into a chain of objects if possible. For
+// example, "foo.bar().baz" will yield []types.Object{foo, bar, baz}.
+// If any part can't be turned into an object, return nil.
+func objChain(info *types.Info, e ast.Expr) []types.Object {
+ var objs []types.Object
+
+ for e != nil {
+ switch n := e.(type) {
+ case *ast.Ident:
+ obj := info.ObjectOf(n)
+ if obj == nil {
+ return nil
+ }
+ objs = append(objs, obj)
+ e = nil
+ case *ast.SelectorExpr:
+ obj := info.ObjectOf(n.Sel)
+ if obj == nil {
+ return nil
+ }
+ objs = append(objs, obj)
+ e = n.X
+ case *ast.CallExpr:
+ if len(n.Args) > 0 {
+ return nil
+ }
+ e = n.Fun
+ default:
+ return nil
+ }
+ }
+
+ // Reverse order so the layout matches the syntactic order.
+ for i := 0; i < len(objs)/2; i++ {
+ objs[i], objs[len(objs)-1-i] = objs[len(objs)-1-i], objs[i]
+ }
+
+ return objs
+}
+
+// applyTypeModifiers applies the list of type modifiers to a type.
+// It returns nil if the modifiers could not be applied.
+func (ci candidateInference) applyTypeModifiers(typ types.Type, addressable bool) types.Type {
+ for _, mod := range ci.modifiers {
+ switch mod.mod {
+ case dereference:
+ // For every "*" indirection operator, remove a pointer layer
+ // from candidate type.
+ if ptr, ok := typ.Underlying().(*types.Pointer); ok {
+ typ = ptr.Elem()
+ } else {
+ return nil
+ }
+ case reference:
+ // For every "&" address operator, add another pointer layer to
+ // candidate type, if the candidate is addressable.
+ if addressable {
+ typ = types.NewPointer(typ)
+ } else {
+ return nil
+ }
+ case chanRead:
+ // For every "<-" operator, remove a layer of channelness.
+ if ch, ok := typ.(*types.Chan); ok {
+ typ = ch.Elem()
+ } else {
+ return nil
+ }
+ }
+ }
+
+ return typ
+}
+
+// applyTypeNameModifiers applies the list of type modifiers to a type name.
+func (ci candidateInference) applyTypeNameModifiers(typ types.Type) types.Type {
+ for _, mod := range ci.typeName.modifiers {
+ switch mod.mod {
+ case reference:
+ typ = types.NewPointer(typ)
+ case arrayType:
+ typ = types.NewArray(typ, mod.arrayLen)
+ case sliceType:
+ typ = types.NewSlice(typ)
+ }
+ }
+ return typ
+}
+
+// matchesVariadic returns true if we are completing a variadic
+// parameter and candType is a compatible slice type.
+func (ci candidateInference) matchesVariadic(candType types.Type) bool {
+ return ci.variadic && ci.objType != nil && assignableTo(candType, types.NewSlice(ci.objType))
+}
+
+// findSwitchStmt returns an *ast.CaseClause's corresponding *ast.SwitchStmt or
+// *ast.TypeSwitchStmt. path should start from the case clause's first ancestor.
+func findSwitchStmt(path []ast.Node, pos token.Pos, c *ast.CaseClause) ast.Stmt {
+ // Make sure position falls within a "case <>:" clause.
+ if exprAtPos(pos, c.List) >= len(c.List) {
+ return nil
+ }
+ // A case clause is always nested within a block statement in a switch statement.
+ if len(path) < 2 {
+ return nil
+ }
+ if _, ok := path[0].(*ast.BlockStmt); !ok {
+ return nil
+ }
+ switch s := path[1].(type) {
+ case *ast.SwitchStmt:
+ return s
+ case *ast.TypeSwitchStmt:
+ return s
+ default:
+ return nil
+ }
+}
+
+// breaksExpectedTypeInference reports if an expression node's type is unrelated
+// to its child expression node types. For example, "Foo{Bar: x.Baz(<>)}" should
+// expect a function argument, not a composite literal value.
+func breaksExpectedTypeInference(n ast.Node, pos token.Pos) bool {
+ switch n := n.(type) {
+ case *ast.CompositeLit:
+ // Doesn't break inference if pos is in type name.
+ // For example: "Foo<>{Bar: 123}"
+ return !source.NodeContains(n.Type, pos)
+ case *ast.CallExpr:
+ // Doesn't break inference if pos is in func name.
+ // For example: "Foo<>(123)"
+ return !source.NodeContains(n.Fun, pos)
+ case *ast.FuncLit, *ast.IndexExpr, *ast.SliceExpr:
+ return true
+ default:
+ return false
+ }
+}
+
+// expectTypeName returns information about the expected type name at position.
+func expectTypeName(c *completer) typeNameInference {
+ var inf typeNameInference
+
+Nodes:
+ for i, p := range c.path {
+ switch n := p.(type) {
+ case *ast.FieldList:
+ // Expect a type name if pos is in a FieldList. This applies to
+ // FuncType params/results, FuncDecl receiver, StructType, and
+ // InterfaceType. We don't need to worry about the field name
+ // because completion bails out early if pos is in an *ast.Ident
+ // that defines an object.
+ inf.wantTypeName = true
+ break Nodes
+ case *ast.CaseClause:
+ // Expect type names in type switch case clauses.
+ if swtch, ok := findSwitchStmt(c.path[i+1:], c.pos, n).(*ast.TypeSwitchStmt); ok {
+ // The case clause types must be assertable from the type switch parameter.
+ ast.Inspect(swtch.Assign, func(n ast.Node) bool {
+ if ta, ok := n.(*ast.TypeAssertExpr); ok {
+ inf.assertableFrom = c.pkg.GetTypesInfo().TypeOf(ta.X)
+ return false
+ }
+ return true
+ })
+ inf.wantTypeName = true
+
+ // Track the types that have already been used in this
+ // switch's case statements so we don't recommend them.
+ for _, e := range swtch.Body.List {
+ for _, typeExpr := range e.(*ast.CaseClause).List {
+ // Skip if type expression contains pos. We don't want to
+ // count it as already used if the user is completing it.
+ if typeExpr.Pos() < c.pos && c.pos <= typeExpr.End() {
+ continue
+ }
+
+ if t := c.pkg.GetTypesInfo().TypeOf(typeExpr); t != nil {
+ inf.seenTypeSwitchCases = append(inf.seenTypeSwitchCases, t)
+ }
+ }
+ }
+
+ break Nodes
+ }
+ return typeNameInference{}
+ case *ast.TypeAssertExpr:
+ // Expect type names in type assert expressions.
+ if n.Lparen < c.pos && c.pos <= n.Rparen {
+ // The type in parens must be assertable from the expression type.
+ inf.assertableFrom = c.pkg.GetTypesInfo().TypeOf(n.X)
+ inf.wantTypeName = true
+ break Nodes
+ }
+ return typeNameInference{}
+ case *ast.StarExpr:
+ inf.modifiers = append(inf.modifiers, typeMod{mod: reference})
+ case *ast.CompositeLit:
+ // We want a type name if position is in the "Type" part of a
+ // composite literal (e.g. "Foo<>{}").
+ if n.Type != nil && n.Type.Pos() <= c.pos && c.pos <= n.Type.End() {
+ inf.wantTypeName = true
+ inf.compLitType = true
+
+ if i < len(c.path)-1 {
+ // Track preceding "&" operator. Technically it applies to
+ // the composite literal and not the type name, but if
+ // affects our type completion nonetheless.
+ if u, ok := c.path[i+1].(*ast.UnaryExpr); ok && u.Op == token.AND {
+ inf.modifiers = append(inf.modifiers, typeMod{mod: reference})
+ }
+ }
+ }
+ break Nodes
+ case *ast.ArrayType:
+ // If we are inside the "Elt" part of an array type, we want a type name.
+ if n.Elt.Pos() <= c.pos && c.pos <= n.Elt.End() {
+ inf.wantTypeName = true
+ if n.Len == nil {
+ // No "Len" expression means a slice type.
+ inf.modifiers = append(inf.modifiers, typeMod{mod: sliceType})
+ } else {
+ // Try to get the array type using the constant value of "Len".
+ tv, ok := c.pkg.GetTypesInfo().Types[n.Len]
+ if ok && tv.Value != nil && tv.Value.Kind() == constant.Int {
+ if arrayLen, ok := constant.Int64Val(tv.Value); ok {
+ inf.modifiers = append(inf.modifiers, typeMod{mod: arrayType, arrayLen: arrayLen})
+ }
+ }
+ }
+
+ // ArrayTypes can be nested, so keep going if our parent is an
+ // ArrayType.
+ if i < len(c.path)-1 {
+ if _, ok := c.path[i+1].(*ast.ArrayType); ok {
+ continue Nodes
+ }
+ }
+
+ break Nodes
+ }
+ case *ast.MapType:
+ inf.wantTypeName = true
+ if n.Key != nil {
+ inf.wantComparable = source.NodeContains(n.Key, c.pos)
+ } else {
+ // If the key is empty, assume we are completing the key if
+ // pos is directly after the "map[".
+ inf.wantComparable = c.pos == n.Pos()+token.Pos(len("map["))
+ }
+ break Nodes
+ case *ast.ValueSpec:
+ inf.wantTypeName = source.NodeContains(n.Type, c.pos)
+ break Nodes
+ case *ast.TypeSpec:
+ inf.wantTypeName = source.NodeContains(n.Type, c.pos)
+ default:
+ if breaksExpectedTypeInference(p, c.pos) {
+ return typeNameInference{}
+ }
+ }
+ }
+
+ return inf
+}
+
+func (c *completer) fakeObj(T types.Type) *types.Var {
+ return types.NewVar(token.NoPos, c.pkg.GetTypes(), "", T)
+}
+
+// derivableTypes iterates types you can derive from t. For example,
+// from "foo" we might derive "&foo", and "foo()".
+func derivableTypes(t types.Type, addressable bool, f func(t types.Type, addressable bool, mod typeModKind) bool) bool {
+ switch t := t.Underlying().(type) {
+ case *types.Signature:
+ // If t is a func type with a single result, offer the result type.
+ if t.Results().Len() == 1 && f(t.Results().At(0).Type(), false, invoke) {
+ return true
+ }
+ case *types.Array:
+ if f(t.Elem(), true, index) {
+ return true
+ }
+ // Try converting array to slice.
+ if f(types.NewSlice(t.Elem()), false, takeSlice) {
+ return true
+ }
+ case *types.Pointer:
+ if f(t.Elem(), false, dereference) {
+ return true
+ }
+ case *types.Slice:
+ if f(t.Elem(), true, index) {
+ return true
+ }
+ case *types.Map:
+ if f(t.Elem(), false, index) {
+ return true
+ }
+ case *types.Chan:
+ if f(t.Elem(), false, chanRead) {
+ return true
+ }
+ }
+
+ // Check if c is addressable and a pointer to c matches our type inference.
+ if addressable && f(types.NewPointer(t), false, reference) {
+ return true
+ }
+
+ return false
+}
+
+// anyCandType reports whether f returns true for any candidate type
+// derivable from c. It searches up to three levels of type
+// modification. For example, given "foo" we could discover "***foo"
+// or "*foo()".
+func (c *candidate) anyCandType(f func(t types.Type, addressable bool) bool) bool {
+ if c.obj == nil || c.obj.Type() == nil {
+ return false
+ }
+
+ const maxDepth = 3
+
+ var searchTypes func(t types.Type, addressable bool, mods []typeModKind) bool
+ searchTypes = func(t types.Type, addressable bool, mods []typeModKind) bool {
+ if f(t, addressable) {
+ if len(mods) > 0 {
+ newMods := make([]typeModKind, len(mods)+len(c.mods))
+ copy(newMods, mods)
+ copy(newMods[len(mods):], c.mods)
+ c.mods = newMods
+ }
+ return true
+ }
+
+ if len(mods) == maxDepth {
+ return false
+ }
+
+ return derivableTypes(t, addressable, func(t types.Type, addressable bool, mod typeModKind) bool {
+ return searchTypes(t, addressable, append(mods, mod))
+ })
+ }
+
+ return searchTypes(c.obj.Type(), c.addressable, make([]typeModKind, 0, maxDepth))
+}
+
+// matchingCandidate reports whether cand matches our type inferences.
+// It mutates cand's score in certain cases.
+func (c *completer) matchingCandidate(cand *candidate) bool {
+ if c.completionContext.commentCompletion {
+ return false
+ }
+
+ // Bail out early if we are completing a field name in a composite literal.
+ if v, ok := cand.obj.(*types.Var); ok && v.IsField() && c.wantStructFieldCompletions() {
+ return true
+ }
+
+ if isTypeName(cand.obj) {
+ return c.matchingTypeName(cand)
+ } else if c.wantTypeName() {
+ // If we want a type, a non-type object never matches.
+ return false
+ }
+
+ if c.inference.candTypeMatches(cand) {
+ return true
+ }
+
+ candType := cand.obj.Type()
+ if candType == nil {
+ return false
+ }
+
+ if sig, ok := candType.Underlying().(*types.Signature); ok {
+ if c.inference.assigneesMatch(cand, sig) {
+ // Invoke the candidate if its results are multi-assignable.
+ cand.mods = append(cand.mods, invoke)
+ return true
+ }
+ }
+
+ // Default to invoking *types.Func candidates. This is so function
+ // completions in an empty statement (or other cases with no expected type)
+ // are invoked by default.
+ if isFunc(cand.obj) {
+ cand.mods = append(cand.mods, invoke)
+ }
+
+ return false
+}
+
+// candTypeMatches reports whether cand makes a good completion
+// candidate given the candidate inference. cand's score may be
+// mutated to downrank the candidate in certain situations.
+func (ci *candidateInference) candTypeMatches(cand *candidate) bool {
+ var (
+ expTypes = make([]types.Type, 0, 2)
+ variadicType types.Type
+ )
+ if ci.objType != nil {
+ expTypes = append(expTypes, ci.objType)
+
+ if ci.variadic {
+ variadicType = types.NewSlice(ci.objType)
+ expTypes = append(expTypes, variadicType)
+ }
+ }
+
+ return cand.anyCandType(func(candType types.Type, addressable bool) bool {
+ // Take into account any type modifiers on the expected type.
+ candType = ci.applyTypeModifiers(candType, addressable)
+ if candType == nil {
+ return false
+ }
+
+ if ci.convertibleTo != nil && convertibleTo(candType, ci.convertibleTo) {
+ return true
+ }
+
+ for _, expType := range expTypes {
+ if isEmptyInterface(expType) {
+ continue
+ }
+
+ matches := ci.typeMatches(expType, candType)
+ if !matches {
+ // If candType doesn't otherwise match, consider if we can
+ // convert candType directly to expType.
+ if considerTypeConversion(candType, expType, cand.path) {
+ cand.convertTo = expType
+ // Give a major score penalty so we always prefer directly
+ // assignable candidates, all else equal.
+ cand.score *= 0.5
+ return true
+ }
+
+ continue
+ }
+
+ if expType == variadicType {
+ cand.mods = append(cand.mods, takeDotDotDot)
+ }
+
+ // Lower candidate score for untyped conversions. This avoids
+ // ranking untyped constants above candidates with an exact type
+ // match. Don't lower score of builtin constants, e.g. "true".
+ if isUntyped(candType) && !types.Identical(candType, expType) && cand.obj.Parent() != types.Universe {
+ // Bigger penalty for deep completions into other packages to
+ // avoid random constants from other packages popping up all
+ // the time.
+ if len(cand.path) > 0 && isPkgName(cand.path[0]) {
+ cand.score *= 0.5
+ } else {
+ cand.score *= 0.75
+ }
+ }
+
+ return true
+ }
+
+ // If we don't have a specific expected type, fall back to coarser
+ // object kind checks.
+ if ci.objType == nil || isEmptyInterface(ci.objType) {
+ // If we were able to apply type modifiers to our candidate type,
+ // count that as a match. For example:
+ //
+ // var foo chan int
+ // <-fo<>
+ //
+ // We were able to apply the "<-" type modifier to "foo", so "foo"
+ // matches.
+ if len(ci.modifiers) > 0 {
+ return true
+ }
+
+ // If we didn't have an exact type match, check if our object kind
+ // matches.
+ if ci.kindMatches(candType) {
+ if ci.objKind == kindFunc {
+ cand.mods = append(cand.mods, invoke)
+ }
+ return true
+ }
+ }
+
+ return false
+ })
+}
+
+// considerTypeConversion returns true if we should offer a completion
+// automatically converting "from" to "to".
+func considerTypeConversion(from, to types.Type, path []types.Object) bool {
+ // Don't offer to convert deep completions from other packages.
+ // Otherwise there are many random package level consts/vars that
+ // pop up as candidates all the time.
+ if len(path) > 0 && isPkgName(path[0]) {
+ return false
+ }
+
+ if _, ok := from.(*typeparams.TypeParam); ok {
+ return false
+ }
+
+ if !convertibleTo(from, to) {
+ return false
+ }
+
+ // Don't offer to convert ints to strings since that probably
+ // doesn't do what the user wants.
+ if isBasicKind(from, types.IsInteger) && isBasicKind(to, types.IsString) {
+ return false
+ }
+
+ return true
+}
+
+// typeMatches reports whether an object of candType makes a good
+// completion candidate given the expected type expType.
+func (ci *candidateInference) typeMatches(expType, candType types.Type) bool {
+ // Handle untyped values specially since AssignableTo gives false negatives
+ // for them (see https://golang.org/issue/32146).
+ if candBasic, ok := candType.Underlying().(*types.Basic); ok {
+ if expBasic, ok := expType.Underlying().(*types.Basic); ok {
+ // Note that the candidate and/or the expected can be untyped.
+ // In "fo<> == 100" the expected type is untyped, and the
+ // candidate could also be an untyped constant.
+
+ // Sort by is_untyped and then by is_int to simplify below logic.
+ a, b := candBasic.Info(), expBasic.Info()
+ if a&types.IsUntyped == 0 || (b&types.IsInteger > 0 && b&types.IsUntyped > 0) {
+ a, b = b, a
+ }
+
+ // If at least one is untyped...
+ if a&types.IsUntyped > 0 {
+ switch {
+ // Untyped integers are compatible with floats.
+ case a&types.IsInteger > 0 && b&types.IsFloat > 0:
+ return true
+
+ // Check if their constant kind (bool|int|float|complex|string) matches.
+ // This doesn't take into account the constant value, so there will be some
+ // false positives due to integer sign and overflow.
+ case a&types.IsConstType == b&types.IsConstType:
+ return true
+ }
+ }
+ }
+ }
+
+ // AssignableTo covers the case where the types are equal, but also handles
+ // cases like assigning a concrete type to an interface type.
+ return assignableTo(candType, expType)
+}
+
+// kindMatches reports whether candType's kind matches our expected
+// kind (e.g. slice, map, etc.).
+func (ci *candidateInference) kindMatches(candType types.Type) bool {
+ return ci.objKind > 0 && ci.objKind&candKind(candType) > 0
+}
+
+// assigneesMatch reports whether an invocation of sig matches the
+// number and type of any assignees.
+func (ci *candidateInference) assigneesMatch(cand *candidate, sig *types.Signature) bool {
+ if len(ci.assignees) == 0 {
+ return false
+ }
+
+ // Uniresult functions are always usable and are handled by the
+ // normal, non-assignees type matching logic.
+ if sig.Results().Len() == 1 {
+ return false
+ }
+
+ // Don't prefer completing into func(...interface{}) calls since all
+ // functions would match.
+ if ci.variadicAssignees && len(ci.assignees) == 1 && isEmptyInterface(deslice(ci.assignees[0])) {
+ return false
+ }
+
+ var numberOfResultsCouldMatch bool
+ if ci.variadicAssignees {
+ numberOfResultsCouldMatch = sig.Results().Len() >= len(ci.assignees)-1
+ } else {
+ numberOfResultsCouldMatch = sig.Results().Len() == len(ci.assignees)
+ }
+
+ // If our signature doesn't return the right number of values, it's
+ // not a match, so downrank it. For example:
+ //
+ // var foo func() (int, int)
+ // a, b, c := <> // downrank "foo()" since it only returns two values
+ if !numberOfResultsCouldMatch {
+ cand.score /= 2
+ return false
+ }
+
+ // If at least one assignee has a valid type, and all valid
+ // assignees match the corresponding sig result value, the signature
+ // is a match.
+ allMatch := false
+ for i := 0; i < sig.Results().Len(); i++ {
+ var assignee types.Type
+
+ // If we are completing into variadic parameters, deslice the
+ // expected variadic type.
+ if ci.variadicAssignees && i >= len(ci.assignees)-1 {
+ assignee = ci.assignees[len(ci.assignees)-1]
+ if elem := deslice(assignee); elem != nil {
+ assignee = elem
+ }
+ } else {
+ assignee = ci.assignees[i]
+ }
+
+ if assignee == nil || assignee == types.Typ[types.Invalid] {
+ continue
+ }
+
+ allMatch = ci.typeMatches(assignee, sig.Results().At(i).Type())
+ if !allMatch {
+ break
+ }
+ }
+ return allMatch
+}
+
+func (c *completer) matchingTypeName(cand *candidate) bool {
+ if !c.wantTypeName() {
+ return false
+ }
+
+ typeMatches := func(candType types.Type) bool {
+ // Take into account any type name modifier prefixes.
+ candType = c.inference.applyTypeNameModifiers(candType)
+
+ if from := c.inference.typeName.assertableFrom; from != nil {
+ // Don't suggest the starting type in type assertions. For example,
+ // if "foo" is an io.Writer, don't suggest "foo.(io.Writer)".
+ if types.Identical(from, candType) {
+ return false
+ }
+
+ if intf, ok := from.Underlying().(*types.Interface); ok {
+ if !types.AssertableTo(intf, candType) {
+ return false
+ }
+ }
+ }
+
+ if c.inference.typeName.wantComparable && !types.Comparable(candType) {
+ return false
+ }
+
+ // Skip this type if it has already been used in another type
+ // switch case.
+ for _, seen := range c.inference.typeName.seenTypeSwitchCases {
+ if types.Identical(candType, seen) {
+ return false
+ }
+ }
+
+ // We can expect a type name and have an expected type in cases like:
+ //
+ // var foo []int
+ // foo = []i<>
+ //
+ // Where our expected type is "[]int", and we expect a type name.
+ if c.inference.objType != nil {
+ return assignableTo(candType, c.inference.objType)
+ }
+
+ // Default to saying any type name is a match.
+ return true
+ }
+
+ t := cand.obj.Type()
+
+ if typeMatches(t) {
+ return true
+ }
+
+ if !types.IsInterface(t) && typeMatches(types.NewPointer(t)) {
+ if c.inference.typeName.compLitType {
+ // If we are completing a composite literal type as in
+ // "foo<>{}", to make a pointer we must prepend "&".
+ cand.mods = append(cand.mods, reference)
+ } else {
+ // If we are completing a normal type name such as "foo<>", to
+ // make a pointer we must prepend "*".
+ cand.mods = append(cand.mods, dereference)
+ }
+ return true
+ }
+
+ return false
+}
+
+var (
+ // "interface { Error() string }" (i.e. error)
+ errorIntf = types.Universe.Lookup("error").Type().Underlying().(*types.Interface)
+
+ // "interface { String() string }" (i.e. fmt.Stringer)
+ stringerIntf = types.NewInterfaceType([]*types.Func{
+ types.NewFunc(token.NoPos, nil, "String", types.NewSignature(
+ nil,
+ nil,
+ types.NewTuple(types.NewParam(token.NoPos, nil, "", types.Typ[types.String])),
+ false,
+ )),
+ }, nil).Complete()
+
+ byteType = types.Universe.Lookup("byte").Type()
+)
+
+// candKind returns the objKind of candType, if any.
+func candKind(candType types.Type) objKind {
+ var kind objKind
+
+ switch t := candType.Underlying().(type) {
+ case *types.Array:
+ kind |= kindArray
+ if t.Elem() == byteType {
+ kind |= kindBytes
+ }
+ case *types.Slice:
+ kind |= kindSlice
+ if t.Elem() == byteType {
+ kind |= kindBytes
+ }
+ case *types.Chan:
+ kind |= kindChan
+ case *types.Map:
+ kind |= kindMap
+ case *types.Pointer:
+ kind |= kindPtr
+
+ // Some builtins handle array pointers as arrays, so just report a pointer
+ // to an array as an array.
+ if _, isArray := t.Elem().Underlying().(*types.Array); isArray {
+ kind |= kindArray
+ }
+ case *types.Basic:
+ switch info := t.Info(); {
+ case info&types.IsString > 0:
+ kind |= kindString
+ case info&types.IsInteger > 0:
+ kind |= kindInt
+ case info&types.IsFloat > 0:
+ kind |= kindFloat
+ case info&types.IsComplex > 0:
+ kind |= kindComplex
+ case info&types.IsBoolean > 0:
+ kind |= kindBool
+ }
+ case *types.Signature:
+ return kindFunc
+ }
+
+ if types.Implements(candType, errorIntf) {
+ kind |= kindError
+ }
+
+ if types.Implements(candType, stringerIntf) {
+ kind |= kindStringer
+ }
+
+ return kind
+}
+
+// innermostScope returns the innermost scope for c.pos.
+func (c *completer) innermostScope() *types.Scope {
+ for _, s := range c.scopes {
+ if s != nil {
+ return s
+ }
+ }
+ return nil
+}
+
+// isSlice reports whether the object's underlying type is a slice.
+func isSlice(obj types.Object) bool {
+ if obj != nil && obj.Type() != nil {
+ if _, ok := obj.Type().Underlying().(*types.Slice); ok {
+ return true
+ }
+ }
+ return false
+}
+
+// forEachPackageMember calls f(tok, id, fn) for each package-level
+// TYPE/VAR/CONST/FUNC declaration in the Go source file, based on a
+// quick partial parse. fn is non-nil only for function declarations.
+// The AST position information is garbage.
+func forEachPackageMember(content []byte, f func(tok token.Token, id *ast.Ident, fn *ast.FuncDecl)) {
+ purged := purgeFuncBodies(content)
+ file, _ := parser.ParseFile(token.NewFileSet(), "", purged, 0)
+ for _, decl := range file.Decls {
+ switch decl := decl.(type) {
+ case *ast.GenDecl:
+ for _, spec := range decl.Specs {
+ switch spec := spec.(type) {
+ case *ast.ValueSpec: // var/const
+ for _, id := range spec.Names {
+ f(decl.Tok, id, nil)
+ }
+ case *ast.TypeSpec:
+ f(decl.Tok, spec.Name, nil)
+ }
+ }
+ case *ast.FuncDecl:
+ if decl.Recv == nil {
+ f(token.FUNC, decl.Name, decl)
+ }
+ }
+ }
+}
+
+// purgeFuncBodies returns a copy of src in which the contents of each
+// outermost {...} region except struct and interface types have been
+// deleted. It does not preserve newlines. This reduces the amount of
+// work required to parse the top-level declarations.
+func purgeFuncBodies(src []byte) []byte {
+ // Destroy the content of any {...}-bracketed regions that are
+ // not immediately preceded by a "struct" or "interface"
+ // token. That includes function bodies, composite literals,
+ // switch/select bodies, and all blocks of statements.
+ // This will lead to non-void functions that don't have return
+ // statements, which of course is a type error, but that's ok.
+
+ var out bytes.Buffer
+ file := token.NewFileSet().AddFile("", -1, len(src))
+ var sc scanner.Scanner
+ sc.Init(file, src, nil, 0)
+ var prev token.Token
+ var cursor int // last consumed src offset
+ var braces []token.Pos // stack of unclosed braces or -1 for struct/interface type
+ for {
+ pos, tok, _ := sc.Scan()
+ if tok == token.EOF {
+ break
+ }
+ switch tok {
+ case token.COMMENT:
+ // TODO(adonovan): opt: skip, to save an estimated 20% of time.
+
+ case token.LBRACE:
+ if prev == token.STRUCT || prev == token.INTERFACE {
+ pos = -1
+ }
+ braces = append(braces, pos)
+
+ case token.RBRACE:
+ if last := len(braces) - 1; last >= 0 {
+ top := braces[last]
+ braces = braces[:last]
+ if top < 0 {
+ // struct/interface type: leave alone
+ } else if len(braces) == 0 { // toplevel only
+ // Delete {...} body.
+ start, _ := safetoken.Offset(file, top)
+ end, _ := safetoken.Offset(file, pos)
+ out.Write(src[cursor : start+len("{")])
+ cursor = end
+ }
+ }
+ }
+ prev = tok
+ }
+ out.Write(src[cursor:])
+ return out.Bytes()
+}
diff --git a/gopls/internal/lsp/source/completion/deep_completion.go b/gopls/internal/lsp/source/completion/deep_completion.go
new file mode 100644
index 000000000..a72d56191
--- /dev/null
+++ b/gopls/internal/lsp/source/completion/deep_completion.go
@@ -0,0 +1,362 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package completion
+
+import (
+ "context"
+ "go/types"
+ "strings"
+ "time"
+)
+
+// MaxDeepCompletions limits deep completion results because in most cases
+// there are too many to be useful.
+const MaxDeepCompletions = 3
+
+// deepCompletionState stores our state as we search for deep completions.
+// "deep completion" refers to searching into objects' fields and methods to
+// find more completion candidates.
+type deepCompletionState struct {
+ // enabled indicates whether deep completion is permitted.
+ enabled bool
+
+ // queueClosed is used to disable adding new sub-fields to search queue
+ // once we're running out of our time budget.
+ queueClosed bool
+
+ // thisQueue holds the current breadth first search queue.
+ thisQueue []candidate
+
+ // nextQueue holds the next breadth first search iteration's queue.
+ nextQueue []candidate
+
+ // highScores tracks the highest deep candidate scores we have found
+ // so far. This is used to avoid work for low scoring deep candidates.
+ highScores [MaxDeepCompletions]float64
+
+ // candidateCount is the count of unique deep candidates encountered
+ // so far.
+ candidateCount int
+}
+
+// enqueue adds a candidate to the search queue.
+func (s *deepCompletionState) enqueue(cand candidate) {
+ s.nextQueue = append(s.nextQueue, cand)
+}
+
+// dequeue removes and returns the leftmost element from the search queue.
+func (s *deepCompletionState) dequeue() *candidate {
+ var cand *candidate
+ cand, s.thisQueue = &s.thisQueue[len(s.thisQueue)-1], s.thisQueue[:len(s.thisQueue)-1]
+ return cand
+}
+
+// scorePenalty computes a deep candidate score penalty. A candidate is
+// penalized based on depth to favor shallower candidates. We also give a
+// slight bonus to unexported objects and a slight additional penalty to
+// function objects.
+func (s *deepCompletionState) scorePenalty(cand *candidate) float64 {
+ var deepPenalty float64
+ for _, dc := range cand.path {
+ deepPenalty++
+
+ if !dc.Exported() {
+ deepPenalty -= 0.1
+ }
+
+ if _, isSig := dc.Type().Underlying().(*types.Signature); isSig {
+ deepPenalty += 0.1
+ }
+ }
+
+ // Normalize penalty to a max depth of 10.
+ return deepPenalty / 10
+}
+
+// isHighScore returns whether score is among the top MaxDeepCompletions deep
+// candidate scores encountered so far. If so, it adds score to highScores,
+// possibly displacing an existing high score.
+func (s *deepCompletionState) isHighScore(score float64) bool {
+ // Invariant: s.highScores is sorted with highest score first. Unclaimed
+ // positions are trailing zeros.
+
+ // If we beat an existing score then take its spot.
+ for i, deepScore := range s.highScores {
+ if score <= deepScore {
+ continue
+ }
+
+ if deepScore != 0 && i != len(s.highScores)-1 {
+ // If this wasn't an empty slot then we need to scooch everyone
+ // down one spot.
+ copy(s.highScores[i+1:], s.highScores[i:])
+ }
+ s.highScores[i] = score
+ return true
+ }
+
+ return false
+}
+
+// newPath returns path from search root for an object following a given
+// candidate.
+func (s *deepCompletionState) newPath(cand candidate, obj types.Object) []types.Object {
+ path := make([]types.Object, len(cand.path)+1)
+ copy(path, cand.path)
+ path[len(path)-1] = obj
+
+ return path
+}
+
+// deepSearch searches a candidate and its subordinate objects for completion
+// items if deep completion is enabled and adds the valid candidates to
+// completion items.
+func (c *completer) deepSearch(ctx context.Context) {
+ defer func() {
+ // We can return early before completing the search, so be sure to
+ // clear out our queues to not impact any further invocations.
+ c.deepState.thisQueue = c.deepState.thisQueue[:0]
+ c.deepState.nextQueue = c.deepState.nextQueue[:0]
+ }()
+
+ for len(c.deepState.nextQueue) > 0 {
+ c.deepState.thisQueue, c.deepState.nextQueue = c.deepState.nextQueue, c.deepState.thisQueue[:0]
+
+ outer:
+ for _, cand := range c.deepState.thisQueue {
+ obj := cand.obj
+
+ if obj == nil {
+ continue
+ }
+
+ // At the top level, dedupe by object.
+ if len(cand.path) == 0 {
+ if c.seen[obj] {
+ continue
+ }
+ c.seen[obj] = true
+ }
+
+ // If obj is not accessible because it lives in another package and is
+ // not exported, don't treat it as a completion candidate unless it's
+ // a package completion candidate.
+ if !c.completionContext.packageCompletion &&
+ obj.Pkg() != nil && obj.Pkg() != c.pkg.GetTypes() && !obj.Exported() {
+ continue
+ }
+
+ // If we want a type name, don't offer non-type name candidates.
+ // However, do offer package names since they can contain type names,
+ // and do offer any candidate without a type since we aren't sure if it
+ // is a type name or not (i.e. unimported candidate).
+ if c.wantTypeName() && obj.Type() != nil && !isTypeName(obj) && !isPkgName(obj) {
+ continue
+ }
+
+ // When searching deep, make sure we don't have a cycle in our chain.
+ // We don't dedupe by object because we want to allow both "foo.Baz"
+ // and "bar.Baz" even though "Baz" is represented the same types.Object
+ // in both.
+ for _, seenObj := range cand.path {
+ if seenObj == obj {
+ continue outer
+ }
+ }
+
+ c.addCandidate(ctx, &cand)
+
+ c.deepState.candidateCount++
+ if c.opts.budget > 0 && c.deepState.candidateCount%100 == 0 {
+ spent := float64(time.Since(c.startTime)) / float64(c.opts.budget)
+ select {
+ case <-ctx.Done():
+ return
+ default:
+ // If we are almost out of budgeted time, no further elements
+ // should be added to the queue. This ensures remaining time is
+ // used for processing current queue.
+ if !c.deepState.queueClosed && spent >= 0.85 {
+ c.deepState.queueClosed = true
+ }
+ }
+ }
+
+ // if deep search is disabled, don't add any more candidates.
+ if !c.deepState.enabled || c.deepState.queueClosed {
+ continue
+ }
+
+ // Searching members for a type name doesn't make sense.
+ if isTypeName(obj) {
+ continue
+ }
+ if obj.Type() == nil {
+ continue
+ }
+
+ // Don't search embedded fields because they were already included in their
+ // parent's fields.
+ if v, ok := obj.(*types.Var); ok && v.Embedded() {
+ continue
+ }
+
+ if sig, ok := obj.Type().Underlying().(*types.Signature); ok {
+ // If obj is a function that takes no arguments and returns one
+ // value, keep searching across the function call.
+ if sig.Params().Len() == 0 && sig.Results().Len() == 1 {
+ path := c.deepState.newPath(cand, obj)
+ // The result of a function call is not addressable.
+ c.methodsAndFields(sig.Results().At(0).Type(), false, cand.imp, func(newCand candidate) {
+ newCand.pathInvokeMask = cand.pathInvokeMask | (1 << uint64(len(cand.path)))
+ newCand.path = path
+ c.deepState.enqueue(newCand)
+ })
+ }
+ }
+
+ path := c.deepState.newPath(cand, obj)
+ switch obj := obj.(type) {
+ case *types.PkgName:
+ c.packageMembers(obj.Imported(), stdScore, cand.imp, func(newCand candidate) {
+ newCand.pathInvokeMask = cand.pathInvokeMask
+ newCand.path = path
+ c.deepState.enqueue(newCand)
+ })
+ default:
+ c.methodsAndFields(obj.Type(), cand.addressable, cand.imp, func(newCand candidate) {
+ newCand.pathInvokeMask = cand.pathInvokeMask
+ newCand.path = path
+ c.deepState.enqueue(newCand)
+ })
+ }
+ }
+ }
+}
+
+// addCandidate adds a completion candidate to suggestions, without searching
+// its members for more candidates.
+func (c *completer) addCandidate(ctx context.Context, cand *candidate) {
+ obj := cand.obj
+ if c.matchingCandidate(cand) {
+ cand.score *= highScore
+
+ if p := c.penalty(cand); p > 0 {
+ cand.score *= (1 - p)
+ }
+ } else if isTypeName(obj) {
+ // If obj is a *types.TypeName that didn't otherwise match, check
+ // if a literal object of this type makes a good candidate.
+
+ // We only care about named types (i.e. don't want builtin types).
+ if _, isNamed := obj.Type().(*types.Named); isNamed {
+ c.literal(ctx, obj.Type(), cand.imp)
+ }
+ }
+
+ // Lower score of method calls so we prefer fields and vars over calls.
+ if cand.hasMod(invoke) {
+ if sig, ok := obj.Type().Underlying().(*types.Signature); ok && sig.Recv() != nil {
+ cand.score *= 0.9
+ }
+ }
+
+ // Prefer private objects over public ones.
+ if !obj.Exported() && obj.Parent() != types.Universe {
+ cand.score *= 1.1
+ }
+
+ // Slight penalty for index modifier (e.g. changing "foo" to
+ // "foo[]") to curb false positives.
+ if cand.hasMod(index) {
+ cand.score *= 0.9
+ }
+
+ // Favor shallow matches by lowering score according to depth.
+ cand.score -= cand.score * c.deepState.scorePenalty(cand)
+
+ if cand.score < 0 {
+ cand.score = 0
+ }
+
+ cand.name = deepCandName(cand)
+ if item, err := c.item(ctx, *cand); err == nil {
+ c.items = append(c.items, item)
+ }
+}
+
+// deepCandName produces the full candidate name including any
+// ancestor objects. For example, "foo.bar().baz" for candidate "baz".
+func deepCandName(cand *candidate) string {
+ totalLen := len(cand.obj.Name())
+ for i, obj := range cand.path {
+ totalLen += len(obj.Name()) + 1
+ if cand.pathInvokeMask&(1<<uint16(i)) > 0 {
+ totalLen += 2
+ }
+ }
+
+ var buf strings.Builder
+ buf.Grow(totalLen)
+
+ for i, obj := range cand.path {
+ buf.WriteString(obj.Name())
+ if cand.pathInvokeMask&(1<<uint16(i)) > 0 {
+ buf.WriteByte('(')
+ buf.WriteByte(')')
+ }
+ buf.WriteByte('.')
+ }
+
+ buf.WriteString(cand.obj.Name())
+
+ return buf.String()
+}
+
+// penalty reports a score penalty for cand in the range (0, 1).
+// For example, a candidate is penalized if it has already been used
+// in another switch case statement.
+func (c *completer) penalty(cand *candidate) float64 {
+ for _, p := range c.inference.penalized {
+ if c.objChainMatches(cand, p.objChain) {
+ return p.penalty
+ }
+ }
+
+ return 0
+}
+
+// objChainMatches reports whether cand combined with the surrounding
+// object prefix matches chain.
+func (c *completer) objChainMatches(cand *candidate, chain []types.Object) bool {
+ // For example, when completing:
+ //
+ // foo.ba<>
+ //
+ // If we are considering the deep candidate "bar.baz", cand is baz,
+ // objChain is [foo] and deepChain is [bar]. We would match the
+ // chain [foo, bar, baz].
+ if len(chain) != len(c.inference.objChain)+len(cand.path)+1 {
+ return false
+ }
+
+ if chain[len(chain)-1] != cand.obj {
+ return false
+ }
+
+ for i, o := range c.inference.objChain {
+ if chain[i] != o {
+ return false
+ }
+ }
+
+ for i, o := range cand.path {
+ if chain[i+len(c.inference.objChain)] != o {
+ return false
+ }
+ }
+
+ return true
+}
diff --git a/gopls/internal/lsp/source/completion/deep_completion_test.go b/gopls/internal/lsp/source/completion/deep_completion_test.go
new file mode 100644
index 000000000..27009af1b
--- /dev/null
+++ b/gopls/internal/lsp/source/completion/deep_completion_test.go
@@ -0,0 +1,33 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package completion
+
+import (
+ "testing"
+)
+
+func TestDeepCompletionIsHighScore(t *testing.T) {
+ // Test that deepCompletionState.isHighScore properly tracks the top
+ // N=MaxDeepCompletions scores.
+
+ var s deepCompletionState
+
+ if !s.isHighScore(1) {
+ // No other scores yet, anything is a winner.
+ t.Error("1 should be high score")
+ }
+
+ // Fill up with higher scores.
+ for i := 0; i < MaxDeepCompletions; i++ {
+ if !s.isHighScore(10) {
+ t.Error("10 should be high score")
+ }
+ }
+
+ // High scores should be filled with 10s so 2 is not a high score.
+ if s.isHighScore(2) {
+ t.Error("2 shouldn't be high score")
+ }
+}
diff --git a/gopls/internal/lsp/source/completion/definition.go b/gopls/internal/lsp/source/completion/definition.go
new file mode 100644
index 000000000..d7f51f002
--- /dev/null
+++ b/gopls/internal/lsp/source/completion/definition.go
@@ -0,0 +1,160 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package completion
+
+import (
+ "go/ast"
+ "go/types"
+ "strings"
+ "unicode"
+ "unicode/utf8"
+
+ "golang.org/x/tools/gopls/internal/lsp/protocol"
+ "golang.org/x/tools/gopls/internal/lsp/snippet"
+ "golang.org/x/tools/gopls/internal/lsp/source"
+)
+
+// some function definitions in test files can be completed
+// So far, TestFoo(t *testing.T), TestMain(m *testing.M)
+// BenchmarkFoo(b *testing.B), FuzzFoo(f *testing.F)
+
+// path[0] is known to be *ast.Ident
+func definition(path []ast.Node, obj types.Object, pgf *source.ParsedGoFile) ([]CompletionItem, *Selection) {
+ if _, ok := obj.(*types.Func); !ok {
+ return nil, nil // not a function at all
+ }
+ if !strings.HasSuffix(pgf.URI.Filename(), "_test.go") {
+ return nil, nil // not a test file
+ }
+
+ name := path[0].(*ast.Ident).Name
+ if len(name) == 0 {
+ // can't happen
+ return nil, nil
+ }
+ start := path[0].Pos()
+ end := path[0].End()
+ sel := &Selection{
+ content: "",
+ cursor: start,
+ tokFile: pgf.Tok,
+ start: start,
+ end: end,
+ mapper: pgf.Mapper,
+ }
+ var ans []CompletionItem
+ var hasParens bool
+ n, ok := path[1].(*ast.FuncDecl)
+ if !ok {
+ return nil, nil // can't happen
+ }
+ if n.Recv != nil {
+ return nil, nil // a method, not a function
+ }
+ t := n.Type.Params
+ if t.Closing != t.Opening {
+ hasParens = true
+ }
+
+ // Always suggest TestMain, if possible
+ if strings.HasPrefix("TestMain", name) {
+ if hasParens {
+ ans = append(ans, defItem("TestMain", obj))
+ } else {
+ ans = append(ans, defItem("TestMain(m *testing.M)", obj))
+ }
+ }
+
+ // If a snippet is possible, suggest it
+ if strings.HasPrefix("Test", name) {
+ if hasParens {
+ ans = append(ans, defItem("Test", obj))
+ } else {
+ ans = append(ans, defSnippet("Test", "(t *testing.T)", obj))
+ }
+ return ans, sel
+ } else if strings.HasPrefix("Benchmark", name) {
+ if hasParens {
+ ans = append(ans, defItem("Benchmark", obj))
+ } else {
+ ans = append(ans, defSnippet("Benchmark", "(b *testing.B)", obj))
+ }
+ return ans, sel
+ } else if strings.HasPrefix("Fuzz", name) {
+ if hasParens {
+ ans = append(ans, defItem("Fuzz", obj))
+ } else {
+ ans = append(ans, defSnippet("Fuzz", "(f *testing.F)", obj))
+ }
+ return ans, sel
+ }
+
+ // Fill in the argument for what the user has already typed
+ if got := defMatches(name, "Test", path, "(t *testing.T)"); got != "" {
+ ans = append(ans, defItem(got, obj))
+ } else if got := defMatches(name, "Benchmark", path, "(b *testing.B)"); got != "" {
+ ans = append(ans, defItem(got, obj))
+ } else if got := defMatches(name, "Fuzz", path, "(f *testing.F)"); got != "" {
+ ans = append(ans, defItem(got, obj))
+ }
+ return ans, sel
+}
+
+// defMatches returns text for defItem, never for defSnippet
+func defMatches(name, pat string, path []ast.Node, arg string) string {
+ if !strings.HasPrefix(name, pat) {
+ return ""
+ }
+ c, _ := utf8.DecodeRuneInString(name[len(pat):])
+ if unicode.IsLower(c) {
+ return ""
+ }
+ fd, ok := path[1].(*ast.FuncDecl)
+ if !ok {
+ // we don't know what's going on
+ return ""
+ }
+ fp := fd.Type.Params
+ if len(fp.List) > 0 {
+ // signature already there, nothing to suggest
+ return ""
+ }
+ if fp.Opening != fp.Closing {
+ // nothing: completion works on words, not easy to insert arg
+ return ""
+ }
+ // suggesting signature too
+ return name + arg
+}
+
+func defSnippet(prefix, suffix string, obj types.Object) CompletionItem {
+ var sn snippet.Builder
+ sn.WriteText(prefix)
+ sn.WritePlaceholder(func(b *snippet.Builder) { b.WriteText("Xxx") })
+ sn.WriteText(suffix + " {\n\t")
+ sn.WriteFinalTabstop()
+ sn.WriteText("\n}")
+ return CompletionItem{
+ Label: prefix + "Xxx" + suffix,
+ Detail: "tab, type the rest of the name, then tab",
+ Kind: protocol.FunctionCompletion,
+ Depth: 0,
+ Score: 10,
+ snippet: &sn,
+ Documentation: prefix + " test function",
+ isSlice: isSlice(obj),
+ }
+}
+func defItem(val string, obj types.Object) CompletionItem {
+ return CompletionItem{
+ Label: val,
+ InsertText: val,
+ Kind: protocol.FunctionCompletion,
+ Depth: 0,
+ Score: 9, // prefer the snippets when available
+ Documentation: "complete the function name",
+ isSlice: isSlice(obj),
+ }
+}
diff --git a/gopls/internal/lsp/source/completion/format.go b/gopls/internal/lsp/source/completion/format.go
new file mode 100644
index 000000000..c2d2c0bc0
--- /dev/null
+++ b/gopls/internal/lsp/source/completion/format.go
@@ -0,0 +1,338 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package completion
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "go/ast"
+ "go/doc"
+ "go/types"
+ "strings"
+
+ "golang.org/x/tools/gopls/internal/lsp/protocol"
+ "golang.org/x/tools/gopls/internal/lsp/safetoken"
+ "golang.org/x/tools/gopls/internal/lsp/snippet"
+ "golang.org/x/tools/gopls/internal/lsp/source"
+ "golang.org/x/tools/gopls/internal/span"
+ "golang.org/x/tools/internal/event"
+ "golang.org/x/tools/internal/imports"
+ "golang.org/x/tools/internal/typeparams"
+)
+
+var (
+ errNoMatch = errors.New("not a surrounding match")
+ errLowScore = errors.New("not a high scoring candidate")
+)
+
+// item formats a candidate to a CompletionItem.
+func (c *completer) item(ctx context.Context, cand candidate) (CompletionItem, error) {
+ obj := cand.obj
+
+ // if the object isn't a valid match against the surrounding, return early.
+ matchScore := c.matcher.Score(cand.name)
+ if matchScore <= 0 {
+ return CompletionItem{}, errNoMatch
+ }
+ cand.score *= float64(matchScore)
+
+ // Ignore deep candidates that won't be in the MaxDeepCompletions anyway.
+ if len(cand.path) != 0 && !c.deepState.isHighScore(cand.score) {
+ return CompletionItem{}, errLowScore
+ }
+
+ // Handle builtin types separately.
+ if obj.Parent() == types.Universe {
+ return c.formatBuiltin(ctx, cand)
+ }
+
+ var (
+ label = cand.name
+ detail = types.TypeString(obj.Type(), c.qf)
+ insert = label
+ kind = protocol.TextCompletion
+ snip snippet.Builder
+ protocolEdits []protocol.TextEdit
+ )
+ if obj.Type() == nil {
+ detail = ""
+ }
+ if isTypeName(obj) && c.wantTypeParams() {
+ x := cand.obj.(*types.TypeName)
+ if named, ok := x.Type().(*types.Named); ok {
+ tp := typeparams.ForNamed(named)
+ label += source.FormatTypeParams(tp)
+ insert = label // maintain invariant above (label == insert)
+ }
+ }
+
+ snip.WriteText(insert)
+
+ switch obj := obj.(type) {
+ case *types.TypeName:
+ detail, kind = source.FormatType(obj.Type(), c.qf)
+ case *types.Const:
+ kind = protocol.ConstantCompletion
+ case *types.Var:
+ if _, ok := obj.Type().(*types.Struct); ok {
+ detail = "struct{...}" // for anonymous structs
+ } else if obj.IsField() {
+ var err error
+ detail, err = source.FormatVarType(ctx, c.snapshot, c.pkg, obj, c.qf, c.mq)
+ if err != nil {
+ return CompletionItem{}, err
+ }
+ }
+ if obj.IsField() {
+ kind = protocol.FieldCompletion
+ c.structFieldSnippet(cand, detail, &snip)
+ } else {
+ kind = protocol.VariableCompletion
+ }
+ if obj.Type() == nil {
+ break
+ }
+ case *types.Func:
+ sig, ok := obj.Type().Underlying().(*types.Signature)
+ if !ok {
+ break
+ }
+ kind = protocol.FunctionCompletion
+ if sig != nil && sig.Recv() != nil {
+ kind = protocol.MethodCompletion
+ }
+ case *types.PkgName:
+ kind = protocol.ModuleCompletion
+ detail = fmt.Sprintf("%q", obj.Imported().Path())
+ case *types.Label:
+ kind = protocol.ConstantCompletion
+ detail = "label"
+ }
+
+ var prefix string
+ for _, mod := range cand.mods {
+ switch mod {
+ case reference:
+ prefix = "&" + prefix
+ case dereference:
+ prefix = "*" + prefix
+ case chanRead:
+ prefix = "<-" + prefix
+ }
+ }
+
+ var (
+ suffix string
+ funcType = obj.Type()
+ )
+Suffixes:
+ for _, mod := range cand.mods {
+ switch mod {
+ case invoke:
+ if sig, ok := funcType.Underlying().(*types.Signature); ok {
+ s, err := source.NewSignature(ctx, c.snapshot, c.pkg, sig, nil, c.qf, c.mq)
+ if err != nil {
+ return CompletionItem{}, err
+ }
+ c.functionCallSnippet("", s.TypeParams(), s.Params(), &snip)
+ if sig.Results().Len() == 1 {
+ funcType = sig.Results().At(0).Type()
+ }
+ detail = "func" + s.Format()
+ }
+
+ if !c.opts.snippets {
+ // Without snippets the candidate will not include "()". Don't
+ // add further suffixes since they will be invalid. For
+ // example, with snippets "foo()..." would become "foo..."
+ // without snippets if we added the dotDotDot.
+ break Suffixes
+ }
+ case takeSlice:
+ suffix += "[:]"
+ case takeDotDotDot:
+ suffix += "..."
+ case index:
+ snip.WriteText("[")
+ snip.WritePlaceholder(nil)
+ snip.WriteText("]")
+ }
+ }
+
+ // If this candidate needs an additional import statement,
+ // add the additional text edits needed.
+ if cand.imp != nil {
+ addlEdits, err := c.importEdits(cand.imp)
+
+ if err != nil {
+ return CompletionItem{}, err
+ }
+
+ protocolEdits = append(protocolEdits, addlEdits...)
+ if kind != protocol.ModuleCompletion {
+ if detail != "" {
+ detail += " "
+ }
+ detail += fmt.Sprintf("(from %q)", cand.imp.importPath)
+ }
+ }
+
+ if cand.convertTo != nil {
+ typeName := types.TypeString(cand.convertTo, c.qf)
+
+ switch cand.convertTo.(type) {
+ // We need extra parens when casting to these types. For example,
+ // we need "(*int)(foo)", not "*int(foo)".
+ case *types.Pointer, *types.Signature:
+ typeName = "(" + typeName + ")"
+ }
+
+ prefix = typeName + "(" + prefix
+ suffix = ")"
+ }
+
+ if prefix != "" {
+ // If we are in a selector, add an edit to place prefix before selector.
+ if sel := enclosingSelector(c.path, c.pos); sel != nil {
+ edits, err := c.editText(sel.Pos(), sel.Pos(), prefix)
+ if err != nil {
+ return CompletionItem{}, err
+ }
+ protocolEdits = append(protocolEdits, edits...)
+ } else {
+ // If there is no selector, just stick the prefix at the start.
+ insert = prefix + insert
+ snip.PrependText(prefix)
+ }
+ }
+
+ if suffix != "" {
+ insert += suffix
+ snip.WriteText(suffix)
+ }
+
+ detail = strings.TrimPrefix(detail, "untyped ")
+ // override computed detail with provided detail, if something is provided.
+ if cand.detail != "" {
+ detail = cand.detail
+ }
+ item := CompletionItem{
+ Label: label,
+ InsertText: insert,
+ AdditionalTextEdits: protocolEdits,
+ Detail: detail,
+ Kind: kind,
+ Score: cand.score,
+ Depth: len(cand.path),
+ snippet: &snip,
+ isSlice: isSlice(obj),
+ }
+ // If the user doesn't want documentation for completion items.
+ if !c.opts.documentation {
+ return item, nil
+ }
+ pos := safetoken.StartPosition(c.pkg.FileSet(), obj.Pos())
+
+ // We ignore errors here, because some types, like "unsafe" or "error",
+ // may not have valid positions that we can use to get documentation.
+ if !pos.IsValid() {
+ return item, nil
+ }
+
+ comment, err := source.HoverDocForObject(ctx, c.snapshot, c.pkg.FileSet(), obj)
+ if err != nil {
+ event.Error(ctx, fmt.Sprintf("failed to find Hover for %q", obj.Name()), err)
+ return item, nil
+ }
+ if c.opts.fullDocumentation {
+ item.Documentation = comment.Text()
+ } else {
+ item.Documentation = doc.Synopsis(comment.Text())
+ }
+ // The desired pattern is `^// Deprecated`, but the prefix has been removed
+ // TODO(rfindley): It doesn't look like this does the right thing for
+ // multi-line comments.
+ if strings.HasPrefix(comment.Text(), "Deprecated") {
+ if c.snapshot.View().Options().CompletionTags {
+ item.Tags = []protocol.CompletionItemTag{protocol.ComplDeprecated}
+ } else if c.snapshot.View().Options().CompletionDeprecated {
+ item.Deprecated = true
+ }
+ }
+
+ return item, nil
+}
+
+// importEdits produces the text edits necessary to add the given import to the current file.
+func (c *completer) importEdits(imp *importInfo) ([]protocol.TextEdit, error) {
+ if imp == nil {
+ return nil, nil
+ }
+
+ pgf, err := c.pkg.File(span.URIFromPath(c.filename))
+ if err != nil {
+ return nil, err
+ }
+
+ return source.ComputeOneImportFixEdits(c.snapshot, pgf, &imports.ImportFix{
+ StmtInfo: imports.ImportInfo{
+ ImportPath: imp.importPath,
+ Name: imp.name,
+ },
+ // IdentName is unused on this path and is difficult to get.
+ FixType: imports.AddImport,
+ })
+}
+
+func (c *completer) formatBuiltin(ctx context.Context, cand candidate) (CompletionItem, error) {
+ obj := cand.obj
+ item := CompletionItem{
+ Label: obj.Name(),
+ InsertText: obj.Name(),
+ Score: cand.score,
+ }
+ switch obj.(type) {
+ case *types.Const:
+ item.Kind = protocol.ConstantCompletion
+ case *types.Builtin:
+ item.Kind = protocol.FunctionCompletion
+ sig, err := source.NewBuiltinSignature(ctx, c.snapshot, obj.Name())
+ if err != nil {
+ return CompletionItem{}, err
+ }
+ item.Detail = "func" + sig.Format()
+ item.snippet = &snippet.Builder{}
+ c.functionCallSnippet(obj.Name(), sig.TypeParams(), sig.Params(), item.snippet)
+ case *types.TypeName:
+ if types.IsInterface(obj.Type()) {
+ item.Kind = protocol.InterfaceCompletion
+ } else {
+ item.Kind = protocol.ClassCompletion
+ }
+ case *types.Nil:
+ item.Kind = protocol.VariableCompletion
+ }
+ return item, nil
+}
+
+// decide if the type params (if any) should be part of the completion
+// which only possible for types.Named and types.Signature
+// (so far, only in receivers, e.g.; func (s *GENERIC[K, V])..., which is a types.Named)
+func (c *completer) wantTypeParams() bool {
+ // Need to be lexically in a receiver, and a child of an IndexListExpr
+ // (but IndexListExpr only exists with go1.18)
+ start := c.path[0].Pos()
+ for i, nd := range c.path {
+ if fd, ok := nd.(*ast.FuncDecl); ok {
+ if i > 0 && fd.Recv != nil && start < fd.Recv.End() {
+ return true
+ } else {
+ return false
+ }
+ }
+ }
+ return false
+}
diff --git a/gopls/internal/lsp/source/completion/fuzz.go b/gopls/internal/lsp/source/completion/fuzz.go
new file mode 100644
index 000000000..08e7654c7
--- /dev/null
+++ b/gopls/internal/lsp/source/completion/fuzz.go
@@ -0,0 +1,142 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package completion
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "go/types"
+ "strings"
+
+ "golang.org/x/tools/gopls/internal/lsp/protocol"
+)
+
+// golang/go#51089
+// *testing.F deserves special treatment as member use is constrained:
+// The arguments to f.Fuzz are determined by the arguments to a previous f.Add
+// Inside f.Fuzz only f.Failed and f.Name are allowed.
+// PJW: are there other packages where we can deduce usage constraints?
+
+// if we find fuzz completions, then return true, as those are the only completions to offer
+func (c *completer) fuzz(typ types.Type, mset *types.MethodSet, imp *importInfo, cb func(candidate), fset *token.FileSet) bool {
+ // 1. inside f.Fuzz? (only f.Failed and f.Name)
+ // 2. possible completing f.Fuzz?
+ // [Ident,SelectorExpr,Callexpr,ExprStmt,BlockiStmt,FuncDecl(Fuzz...)]
+ // 3. before f.Fuzz, same (for 2., offer choice when looking at an F)
+
+ // does the path contain FuncLit as arg to f.Fuzz CallExpr?
+ inside := false
+Loop:
+ for i, n := range c.path {
+ switch v := n.(type) {
+ case *ast.CallExpr:
+ if len(v.Args) != 1 {
+ continue Loop
+ }
+ if _, ok := v.Args[0].(*ast.FuncLit); !ok {
+ continue
+ }
+ if s, ok := v.Fun.(*ast.SelectorExpr); !ok || s.Sel.Name != "Fuzz" {
+ continue
+ }
+ if i > 2 { // avoid t.Fuzz itself in tests
+ inside = true
+ break Loop
+ }
+ }
+ }
+ if inside {
+ for i := 0; i < mset.Len(); i++ {
+ o := mset.At(i).Obj()
+ if o.Name() == "Failed" || o.Name() == "Name" {
+ cb(candidate{
+ obj: o,
+ score: stdScore,
+ imp: imp,
+ addressable: true,
+ })
+ }
+ }
+ return true
+ }
+ // if it could be t.Fuzz, look for the preceding t.Add
+ id, ok := c.path[0].(*ast.Ident)
+ if ok && strings.HasPrefix("Fuzz", id.Name) {
+ var add *ast.CallExpr
+ f := func(n ast.Node) bool {
+ if n == nil {
+ return true
+ }
+ call, ok := n.(*ast.CallExpr)
+ if !ok {
+ return true
+ }
+ s, ok := call.Fun.(*ast.SelectorExpr)
+ if !ok {
+ return true
+ }
+ if s.Sel.Name != "Add" {
+ return true
+ }
+ // Sel.X should be of type *testing.F
+ got := c.pkg.GetTypesInfo().Types[s.X]
+ if got.Type.String() == "*testing.F" {
+ add = call
+ }
+ return false // because we're done...
+ }
+ // look at the enclosing FuzzFoo functions
+ if len(c.path) < 2 {
+ return false
+ }
+ n := c.path[len(c.path)-2]
+ if _, ok := n.(*ast.FuncDecl); !ok {
+ // the path should start with ast.File, ast.FuncDecl, ...
+ // but it didn't, so give up
+ return false
+ }
+ ast.Inspect(n, f)
+ if add == nil {
+ // looks like f.Fuzz without a preceding f.Add.
+ // let the regular completion handle it.
+ return false
+ }
+
+ lbl := "Fuzz(func(t *testing.T"
+ for i, a := range add.Args {
+ info := c.pkg.GetTypesInfo().TypeOf(a)
+ if info == nil {
+ return false // How could this happen, but better safe than panic.
+ }
+ lbl += fmt.Sprintf(", %c %s", 'a'+i, info)
+ }
+ lbl += ")"
+ xx := CompletionItem{
+ Label: lbl,
+ InsertText: lbl,
+ Kind: protocol.FunctionCompletion,
+ Depth: 0,
+ Score: 10, // pretty confident the user should see this
+ Documentation: "argument types from f.Add",
+ isSlice: false,
+ }
+ c.items = append(c.items, xx)
+ for i := 0; i < mset.Len(); i++ {
+ o := mset.At(i).Obj()
+ if o.Name() != "Fuzz" {
+ cb(candidate{
+ obj: o,
+ score: stdScore,
+ imp: imp,
+ addressable: true,
+ })
+ }
+ }
+ return true // done
+ }
+ // let the standard processing take care of it instead
+ return false
+}
diff --git a/gopls/internal/lsp/source/completion/keywords.go b/gopls/internal/lsp/source/completion/keywords.go
new file mode 100644
index 000000000..a068ca2d5
--- /dev/null
+++ b/gopls/internal/lsp/source/completion/keywords.go
@@ -0,0 +1,154 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package completion
+
+import (
+ "go/ast"
+
+ "golang.org/x/tools/gopls/internal/lsp/protocol"
+ "golang.org/x/tools/gopls/internal/lsp/source"
+)
+
+const (
+ BREAK = "break"
+ CASE = "case"
+ CHAN = "chan"
+ CONST = "const"
+ CONTINUE = "continue"
+ DEFAULT = "default"
+ DEFER = "defer"
+ ELSE = "else"
+ FALLTHROUGH = "fallthrough"
+ FOR = "for"
+ FUNC = "func"
+ GO = "go"
+ GOTO = "goto"
+ IF = "if"
+ IMPORT = "import"
+ INTERFACE = "interface"
+ MAP = "map"
+ PACKAGE = "package"
+ RANGE = "range"
+ RETURN = "return"
+ SELECT = "select"
+ STRUCT = "struct"
+ SWITCH = "switch"
+ TYPE = "type"
+ VAR = "var"
+)
+
+// addKeywordCompletions offers keyword candidates appropriate at the position.
+func (c *completer) addKeywordCompletions() {
+ seen := make(map[string]bool)
+
+ if c.wantTypeName() && c.inference.objType == nil {
+ // If we want a type name but don't have an expected obj type,
+ // include "interface", "struct", "func", "chan", and "map".
+
+ // "interface" and "struct" are more common declaring named types.
+ // Give them a higher score if we are in a type declaration.
+ structIntf, funcChanMap := stdScore, highScore
+ if len(c.path) > 1 {
+ if _, namedDecl := c.path[1].(*ast.TypeSpec); namedDecl {
+ structIntf, funcChanMap = highScore, stdScore
+ }
+ }
+
+ c.addKeywordItems(seen, structIntf, STRUCT, INTERFACE)
+ c.addKeywordItems(seen, funcChanMap, FUNC, CHAN, MAP)
+ }
+
+ // If we are at the file scope, only offer decl keywords. We don't
+ // get *ast.Idents at the file scope because non-keyword identifiers
+ // turn into *ast.BadDecl, not *ast.Ident.
+ if len(c.path) == 1 || isASTFile(c.path[1]) {
+ c.addKeywordItems(seen, stdScore, TYPE, CONST, VAR, FUNC, IMPORT)
+ return
+ } else if _, ok := c.path[0].(*ast.Ident); !ok {
+ // Otherwise only offer keywords if the client is completing an identifier.
+ return
+ }
+
+ if len(c.path) > 2 {
+ // Offer "range" if we are in ast.ForStmt.Init. This is what the
+ // AST looks like before "range" is typed, e.g. "for i := r<>".
+ if loop, ok := c.path[2].(*ast.ForStmt); ok && source.NodeContains(loop.Init, c.pos) {
+ c.addKeywordItems(seen, stdScore, RANGE)
+ }
+ }
+
+ // Only suggest keywords if we are beginning a statement.
+ switch n := c.path[1].(type) {
+ case *ast.BlockStmt, *ast.ExprStmt:
+ // OK - our ident must be at beginning of statement.
+ case *ast.CommClause:
+ // Make sure we aren't in the Comm statement.
+ if !n.Colon.IsValid() || c.pos <= n.Colon {
+ return
+ }
+ case *ast.CaseClause:
+ // Make sure we aren't in the case List.
+ if !n.Colon.IsValid() || c.pos <= n.Colon {
+ return
+ }
+ default:
+ return
+ }
+
+ // Filter out keywords depending on scope
+ // Skip the first one because we want to look at the enclosing scopes
+ path := c.path[1:]
+ for i, n := range path {
+ switch node := n.(type) {
+ case *ast.CaseClause:
+ // only recommend "fallthrough" and "break" within the bodies of a case clause
+ if c.pos > node.Colon {
+ c.addKeywordItems(seen, stdScore, BREAK)
+ // "fallthrough" is only valid in switch statements.
+ // A case clause is always nested within a block statement in a switch statement,
+ // that block statement is nested within either a TypeSwitchStmt or a SwitchStmt.
+ if i+2 >= len(path) {
+ continue
+ }
+ if _, ok := path[i+2].(*ast.SwitchStmt); ok {
+ c.addKeywordItems(seen, stdScore, FALLTHROUGH)
+ }
+ }
+ case *ast.CommClause:
+ if c.pos > node.Colon {
+ c.addKeywordItems(seen, stdScore, BREAK)
+ }
+ case *ast.TypeSwitchStmt, *ast.SelectStmt, *ast.SwitchStmt:
+ c.addKeywordItems(seen, stdScore, CASE, DEFAULT)
+ case *ast.ForStmt, *ast.RangeStmt:
+ c.addKeywordItems(seen, stdScore, BREAK, CONTINUE)
+ // This is a bit weak, functions allow for many keywords
+ case *ast.FuncDecl:
+ if node.Body != nil && c.pos > node.Body.Lbrace {
+ c.addKeywordItems(seen, stdScore, DEFER, RETURN, FOR, GO, SWITCH, SELECT, IF, ELSE, VAR, CONST, GOTO, TYPE)
+ }
+ }
+ }
+}
+
+// addKeywordItems dedupes and adds completion items for the specified
+// keywords with the specified score.
+func (c *completer) addKeywordItems(seen map[string]bool, score float64, kws ...string) {
+ for _, kw := range kws {
+ if seen[kw] {
+ continue
+ }
+ seen[kw] = true
+
+ if matchScore := c.matcher.Score(kw); matchScore > 0 {
+ c.items = append(c.items, CompletionItem{
+ Label: kw,
+ Kind: protocol.KeywordCompletion,
+ InsertText: kw,
+ Score: score * float64(matchScore),
+ })
+ }
+ }
+}
diff --git a/gopls/internal/lsp/source/completion/labels.go b/gopls/internal/lsp/source/completion/labels.go
new file mode 100644
index 000000000..e4fd961e3
--- /dev/null
+++ b/gopls/internal/lsp/source/completion/labels.go
@@ -0,0 +1,112 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package completion
+
+import (
+ "go/ast"
+ "go/token"
+ "math"
+)
+
+type labelType int
+
+const (
+ labelNone labelType = iota
+ labelBreak
+ labelContinue
+ labelGoto
+)
+
+// wantLabelCompletion returns true if we want (only) label
+// completions at the position.
+func (c *completer) wantLabelCompletion() labelType {
+ if _, ok := c.path[0].(*ast.Ident); ok && len(c.path) > 1 {
+ // We want a label if we are an *ast.Ident child of a statement
+ // that accepts a label, e.g. "break Lo<>".
+ return takesLabel(c.path[1])
+ }
+
+ return labelNone
+}
+
+// takesLabel returns the corresponding labelType if n is a statement
+// that accepts a label, otherwise labelNone.
+func takesLabel(n ast.Node) labelType {
+ if bs, ok := n.(*ast.BranchStmt); ok {
+ switch bs.Tok {
+ case token.BREAK:
+ return labelBreak
+ case token.CONTINUE:
+ return labelContinue
+ case token.GOTO:
+ return labelGoto
+ }
+ }
+ return labelNone
+}
+
+// labels adds completion items for labels defined in the enclosing
+// function.
+func (c *completer) labels(lt labelType) {
+ if c.enclosingFunc == nil {
+ return
+ }
+
+ addLabel := func(score float64, l *ast.LabeledStmt) {
+ labelObj := c.pkg.GetTypesInfo().ObjectOf(l.Label)
+ if labelObj != nil {
+ c.deepState.enqueue(candidate{obj: labelObj, score: score})
+ }
+ }
+
+ switch lt {
+ case labelBreak, labelContinue:
+ // "break" and "continue" only accept labels from enclosing statements.
+
+ for i, p := range c.path {
+ switch p := p.(type) {
+ case *ast.FuncLit:
+ // Labels are function scoped, so don't continue out of functions.
+ return
+ case *ast.LabeledStmt:
+ switch p.Stmt.(type) {
+ case *ast.ForStmt, *ast.RangeStmt:
+ // Loop labels can be used for "break" or "continue".
+ addLabel(highScore*math.Pow(.99, float64(i)), p)
+ case *ast.SwitchStmt, *ast.SelectStmt, *ast.TypeSwitchStmt:
+ // Switch and select labels can be used only for "break".
+ if lt == labelBreak {
+ addLabel(highScore*math.Pow(.99, float64(i)), p)
+ }
+ }
+ }
+ }
+ case labelGoto:
+ // Goto accepts any label in the same function not in a nested
+ // block. It also doesn't take labels that would jump across
+ // variable definitions, but ignore that case for now.
+ ast.Inspect(c.enclosingFunc.body, func(n ast.Node) bool {
+ if n == nil {
+ return false
+ }
+
+ switch n := n.(type) {
+ // Only search into block-like nodes enclosing our "goto".
+ // This prevents us from finding labels in nested blocks.
+ case *ast.BlockStmt, *ast.CommClause, *ast.CaseClause:
+ for _, p := range c.path {
+ if n == p {
+ return true
+ }
+ }
+ return false
+ case *ast.LabeledStmt:
+ addLabel(highScore, n)
+ }
+
+ return true
+ })
+ }
+}
diff --git a/gopls/internal/lsp/source/completion/literal.go b/gopls/internal/lsp/source/completion/literal.go
new file mode 100644
index 000000000..06ed559be
--- /dev/null
+++ b/gopls/internal/lsp/source/completion/literal.go
@@ -0,0 +1,592 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package completion
+
+import (
+ "context"
+ "fmt"
+ "go/types"
+ "strings"
+ "unicode"
+
+ "golang.org/x/tools/gopls/internal/lsp/protocol"
+ "golang.org/x/tools/gopls/internal/lsp/snippet"
+ "golang.org/x/tools/gopls/internal/lsp/source"
+ "golang.org/x/tools/internal/event"
+ "golang.org/x/tools/internal/typeparams"
+)
+
+// literal generates composite literal, function literal, and make()
+// completion items.
+func (c *completer) literal(ctx context.Context, literalType types.Type, imp *importInfo) {
+ if !c.opts.literal {
+ return
+ }
+
+ expType := c.inference.objType
+
+ if c.inference.matchesVariadic(literalType) {
+ // Don't offer literal slice candidates for variadic arguments.
+ // For example, don't offer "[]interface{}{}" in "fmt.Print(<>)".
+ return
+ }
+
+ // Avoid literal candidates if the expected type is an empty
+ // interface. It isn't very useful to suggest a literal candidate of
+ // every possible type.
+ if expType != nil && isEmptyInterface(expType) {
+ return
+ }
+
+ // We handle unnamed literal completions explicitly before searching
+ // for candidates. Avoid named-type literal completions for
+ // unnamed-type expected type since that results in duplicate
+ // candidates. For example, in
+ //
+ // type mySlice []int
+ // var []int = <>
+ //
+ // don't offer "mySlice{}" since we have already added a candidate
+ // of "[]int{}".
+ if _, named := literalType.(*types.Named); named && expType != nil {
+ if _, named := source.Deref(expType).(*types.Named); !named {
+ return
+ }
+ }
+
+ // Check if an object of type literalType would match our expected type.
+ cand := candidate{
+ obj: c.fakeObj(literalType),
+ }
+
+ switch literalType.Underlying().(type) {
+ // These literal types are addressable (e.g. "&[]int{}"), others are
+ // not (e.g. can't do "&(func(){})").
+ case *types.Struct, *types.Array, *types.Slice, *types.Map:
+ cand.addressable = true
+ }
+
+ if !c.matchingCandidate(&cand) || cand.convertTo != nil {
+ return
+ }
+
+ var (
+ qf = c.qf
+ sel = enclosingSelector(c.path, c.pos)
+ )
+
+ // Don't qualify the type name if we are in a selector expression
+ // since the package name is already present.
+ if sel != nil {
+ qf = func(_ *types.Package) string { return "" }
+ }
+
+ snip, typeName := c.typeNameSnippet(literalType, qf)
+
+ // A type name of "[]int" doesn't work very will with the matcher
+ // since "[" isn't a valid identifier prefix. Here we strip off the
+ // slice (and array) prefix yielding just "int".
+ matchName := typeName
+ switch t := literalType.(type) {
+ case *types.Slice:
+ matchName = types.TypeString(t.Elem(), qf)
+ case *types.Array:
+ matchName = types.TypeString(t.Elem(), qf)
+ }
+
+ addlEdits, err := c.importEdits(imp)
+ if err != nil {
+ event.Error(ctx, "error adding import for literal candidate", err)
+ return
+ }
+
+ // If prefix matches the type name, client may want a composite literal.
+ if score := c.matcher.Score(matchName); score > 0 {
+ if cand.hasMod(reference) {
+ if sel != nil {
+ // If we are in a selector we must place the "&" before the selector.
+ // For example, "foo.B<>" must complete to "&foo.Bar{}", not
+ // "foo.&Bar{}".
+ edits, err := c.editText(sel.Pos(), sel.Pos(), "&")
+ if err != nil {
+ event.Error(ctx, "error making edit for literal pointer completion", err)
+ return
+ }
+ addlEdits = append(addlEdits, edits...)
+ } else {
+ // Otherwise we can stick the "&" directly before the type name.
+ typeName = "&" + typeName
+ snip.PrependText("&")
+ }
+ }
+
+ switch t := literalType.Underlying().(type) {
+ case *types.Struct, *types.Array, *types.Slice, *types.Map:
+ c.compositeLiteral(t, snip.Clone(), typeName, float64(score), addlEdits)
+ case *types.Signature:
+ // Add a literal completion for a signature type that implements
+ // an interface. For example, offer "http.HandlerFunc()" when
+ // expected type is "http.Handler".
+ if expType != nil && types.IsInterface(expType) {
+ c.basicLiteral(t, snip.Clone(), typeName, float64(score), addlEdits)
+ }
+ case *types.Basic:
+ // Add a literal completion for basic types that implement our
+ // expected interface (e.g. named string type http.Dir
+ // implements http.FileSystem), or are identical to our expected
+ // type (i.e. yielding a type conversion such as "float64()").
+ if expType != nil && (types.IsInterface(expType) || types.Identical(expType, literalType)) {
+ c.basicLiteral(t, snip.Clone(), typeName, float64(score), addlEdits)
+ }
+ }
+ }
+
+ // If prefix matches "make", client may want a "make()"
+ // invocation. We also include the type name to allow for more
+ // flexible fuzzy matching.
+ if score := c.matcher.Score("make." + matchName); !cand.hasMod(reference) && score > 0 {
+ switch literalType.Underlying().(type) {
+ case *types.Slice:
+ // The second argument to "make()" for slices is required, so default to "0".
+ c.makeCall(snip.Clone(), typeName, "0", float64(score), addlEdits)
+ case *types.Map, *types.Chan:
+ // Maps and channels don't require the second argument, so omit
+ // to keep things simple for now.
+ c.makeCall(snip.Clone(), typeName, "", float64(score), addlEdits)
+ }
+ }
+
+ // If prefix matches "func", client may want a function literal.
+ if score := c.matcher.Score("func"); !cand.hasMod(reference) && score > 0 && (expType == nil || !types.IsInterface(expType)) {
+ switch t := literalType.Underlying().(type) {
+ case *types.Signature:
+ c.functionLiteral(ctx, t, float64(score))
+ }
+ }
+}
+
+// literalCandidateScore is the base score for literal candidates.
+// Literal candidates match the expected type so they should be high
+// scoring, but we want them ranked below lexical objects of the
+// correct type, so scale down highScore.
+const literalCandidateScore = highScore / 2
+
+// functionLiteral adds a function literal completion item for the
+// given signature.
+func (c *completer) functionLiteral(ctx context.Context, sig *types.Signature, matchScore float64) {
+ snip := &snippet.Builder{}
+ snip.WriteText("func(")
+
+ // First we generate names for each param and keep a seen count so
+ // we know if we need to uniquify param names. For example,
+ // "func(int)" will become "func(i int)", but "func(int, int64)"
+ // will become "func(i1 int, i2 int64)".
+ var (
+ paramNames = make([]string, sig.Params().Len())
+ paramNameCount = make(map[string]int)
+ hasTypeParams bool
+ )
+ for i := 0; i < sig.Params().Len(); i++ {
+ var (
+ p = sig.Params().At(i)
+ name = p.Name()
+ )
+
+ if tp, _ := p.Type().(*typeparams.TypeParam); tp != nil && !c.typeParamInScope(tp) {
+ hasTypeParams = true
+ }
+
+ if name == "" {
+ // If the param has no name in the signature, guess a name based
+ // on the type. Use an empty qualifier to ignore the package.
+ // For example, we want to name "http.Request" "r", not "hr".
+ typeName, err := source.FormatVarType(ctx, c.snapshot, c.pkg, p,
+ func(p *types.Package) string { return "" },
+ func(source.PackageName, source.ImportPath, source.PackagePath) string { return "" })
+ if err != nil {
+ // In general, the only error we should encounter while formatting is
+ // context cancellation.
+ if ctx.Err() == nil {
+ event.Error(ctx, "formatting var type", err)
+ }
+ return
+ }
+ name = abbreviateTypeName(typeName)
+ }
+ paramNames[i] = name
+ if name != "_" {
+ paramNameCount[name]++
+ }
+ }
+
+ for n, c := range paramNameCount {
+ // Any names we saw more than once will need a unique suffix added
+ // on. Reset the count to 1 to act as the suffix for the first
+ // name.
+ if c >= 2 {
+ paramNameCount[n] = 1
+ } else {
+ delete(paramNameCount, n)
+ }
+ }
+
+ for i := 0; i < sig.Params().Len(); i++ {
+ if hasTypeParams && !c.opts.placeholders {
+ // If there are type params in the args then the user must
+ // choose the concrete types. If placeholders are disabled just
+ // drop them between the parens and let them fill things in.
+ snip.WritePlaceholder(nil)
+ break
+ }
+
+ if i > 0 {
+ snip.WriteText(", ")
+ }
+
+ var (
+ p = sig.Params().At(i)
+ name = paramNames[i]
+ )
+
+ // Uniquify names by adding on an incrementing numeric suffix.
+ if idx, found := paramNameCount[name]; found {
+ paramNameCount[name]++
+ name = fmt.Sprintf("%s%d", name, idx)
+ }
+
+ if name != p.Name() && c.opts.placeholders {
+ // If we didn't use the signature's param name verbatim then we
+ // may have chosen a poor name. Give the user a placeholder so
+ // they can easily fix the name.
+ snip.WritePlaceholder(func(b *snippet.Builder) {
+ b.WriteText(name)
+ })
+ } else {
+ snip.WriteText(name)
+ }
+
+ // If the following param's type is identical to this one, omit
+ // this param's type string. For example, emit "i, j int" instead
+ // of "i int, j int".
+ if i == sig.Params().Len()-1 || !types.Identical(p.Type(), sig.Params().At(i+1).Type()) {
+ snip.WriteText(" ")
+ typeStr, err := source.FormatVarType(ctx, c.snapshot, c.pkg, p, c.qf, c.mq)
+ if err != nil {
+ // In general, the only error we should encounter while formatting is
+ // context cancellation.
+ if ctx.Err() == nil {
+ event.Error(ctx, "formatting var type", err)
+ }
+ return
+ }
+ if sig.Variadic() && i == sig.Params().Len()-1 {
+ typeStr = strings.Replace(typeStr, "[]", "...", 1)
+ }
+
+ if tp, _ := p.Type().(*typeparams.TypeParam); tp != nil && !c.typeParamInScope(tp) {
+ snip.WritePlaceholder(func(snip *snippet.Builder) {
+ snip.WriteText(typeStr)
+ })
+ } else {
+ snip.WriteText(typeStr)
+ }
+ }
+ }
+ snip.WriteText(")")
+
+ results := sig.Results()
+ if results.Len() > 0 {
+ snip.WriteText(" ")
+ }
+
+ resultsNeedParens := results.Len() > 1 ||
+ results.Len() == 1 && results.At(0).Name() != ""
+
+ var resultHasTypeParams bool
+ for i := 0; i < results.Len(); i++ {
+ if tp, _ := results.At(i).Type().(*typeparams.TypeParam); tp != nil && !c.typeParamInScope(tp) {
+ resultHasTypeParams = true
+ }
+ }
+
+ if resultsNeedParens {
+ snip.WriteText("(")
+ }
+ for i := 0; i < results.Len(); i++ {
+ if resultHasTypeParams && !c.opts.placeholders {
+ // Leave an empty tabstop if placeholders are disabled and there
+ // are type args that need specificying.
+ snip.WritePlaceholder(nil)
+ break
+ }
+
+ if i > 0 {
+ snip.WriteText(", ")
+ }
+ r := results.At(i)
+ if name := r.Name(); name != "" {
+ snip.WriteText(name + " ")
+ }
+
+ text, err := source.FormatVarType(ctx, c.snapshot, c.pkg, r, c.qf, c.mq)
+ if err != nil {
+ // In general, the only error we should encounter while formatting is
+ // context cancellation.
+ if ctx.Err() == nil {
+ event.Error(ctx, "formatting var type", err)
+ }
+ return
+ }
+ if tp, _ := r.Type().(*typeparams.TypeParam); tp != nil && !c.typeParamInScope(tp) {
+ snip.WritePlaceholder(func(snip *snippet.Builder) {
+ snip.WriteText(text)
+ })
+ } else {
+ snip.WriteText(text)
+ }
+ }
+ if resultsNeedParens {
+ snip.WriteText(")")
+ }
+
+ snip.WriteText(" {")
+ snip.WriteFinalTabstop()
+ snip.WriteText("}")
+
+ c.items = append(c.items, CompletionItem{
+ Label: "func(...) {}",
+ Score: matchScore * literalCandidateScore,
+ Kind: protocol.VariableCompletion,
+ snippet: snip,
+ })
+}
+
+// conventionalAcronyms contains conventional acronyms for type names
+// in lower case. For example, "ctx" for "context" and "err" for "error".
+var conventionalAcronyms = map[string]string{
+ "context": "ctx",
+ "error": "err",
+ "tx": "tx",
+ "responsewriter": "w",
+}
+
+// abbreviateTypeName abbreviates type names into acronyms. For
+// example, "fooBar" is abbreviated "fb". Care is taken to ignore
+// non-identifier runes. For example, "[]int" becomes "i", and
+// "struct { i int }" becomes "s".
+func abbreviateTypeName(s string) string {
+ var (
+ b strings.Builder
+ useNextUpper bool
+ )
+
+ // Trim off leading non-letters. We trim everything between "[" and
+ // "]" to handle array types like "[someConst]int".
+ var inBracket bool
+ s = strings.TrimFunc(s, func(r rune) bool {
+ if inBracket {
+ inBracket = r != ']'
+ return true
+ }
+
+ if r == '[' {
+ inBracket = true
+ }
+
+ return !unicode.IsLetter(r)
+ })
+
+ if acr, ok := conventionalAcronyms[strings.ToLower(s)]; ok {
+ return acr
+ }
+
+ for i, r := range s {
+ // Stop if we encounter a non-identifier rune.
+ if !unicode.IsLetter(r) && !unicode.IsNumber(r) {
+ break
+ }
+
+ if i == 0 {
+ b.WriteRune(unicode.ToLower(r))
+ }
+
+ if unicode.IsUpper(r) {
+ if useNextUpper {
+ b.WriteRune(unicode.ToLower(r))
+ useNextUpper = false
+ }
+ } else {
+ useNextUpper = true
+ }
+ }
+
+ return b.String()
+}
+
+// compositeLiteral adds a composite literal completion item for the given typeName.
+func (c *completer) compositeLiteral(T types.Type, snip *snippet.Builder, typeName string, matchScore float64, edits []protocol.TextEdit) {
+ snip.WriteText("{")
+ // Don't put the tab stop inside the composite literal curlies "{}"
+ // for structs that have no accessible fields.
+ if strct, ok := T.(*types.Struct); !ok || fieldsAccessible(strct, c.pkg.GetTypes()) {
+ snip.WriteFinalTabstop()
+ }
+ snip.WriteText("}")
+
+ nonSnippet := typeName + "{}"
+
+ c.items = append(c.items, CompletionItem{
+ Label: nonSnippet,
+ InsertText: nonSnippet,
+ Score: matchScore * literalCandidateScore,
+ Kind: protocol.VariableCompletion,
+ AdditionalTextEdits: edits,
+ snippet: snip,
+ })
+}
+
+// basicLiteral adds a literal completion item for the given basic
+// type name typeName.
+func (c *completer) basicLiteral(T types.Type, snip *snippet.Builder, typeName string, matchScore float64, edits []protocol.TextEdit) {
+ // Never give type conversions like "untyped int()".
+ if isUntyped(T) {
+ return
+ }
+
+ snip.WriteText("(")
+ snip.WriteFinalTabstop()
+ snip.WriteText(")")
+
+ nonSnippet := typeName + "()"
+
+ c.items = append(c.items, CompletionItem{
+ Label: nonSnippet,
+ InsertText: nonSnippet,
+ Detail: T.String(),
+ Score: matchScore * literalCandidateScore,
+ Kind: protocol.VariableCompletion,
+ AdditionalTextEdits: edits,
+ snippet: snip,
+ })
+}
+
+// makeCall adds a completion item for a "make()" call given a specific type.
+func (c *completer) makeCall(snip *snippet.Builder, typeName string, secondArg string, matchScore float64, edits []protocol.TextEdit) {
+ // Keep it simple and don't add any placeholders for optional "make()" arguments.
+
+ snip.PrependText("make(")
+ if secondArg != "" {
+ snip.WriteText(", ")
+ snip.WritePlaceholder(func(b *snippet.Builder) {
+ if c.opts.placeholders {
+ b.WriteText(secondArg)
+ }
+ })
+ }
+ snip.WriteText(")")
+
+ var nonSnippet strings.Builder
+ nonSnippet.WriteString("make(" + typeName)
+ if secondArg != "" {
+ nonSnippet.WriteString(", ")
+ nonSnippet.WriteString(secondArg)
+ }
+ nonSnippet.WriteByte(')')
+
+ c.items = append(c.items, CompletionItem{
+ Label: nonSnippet.String(),
+ InsertText: nonSnippet.String(),
+ Score: matchScore * literalCandidateScore,
+ Kind: protocol.FunctionCompletion,
+ AdditionalTextEdits: edits,
+ snippet: snip,
+ })
+}
+
+// Create a snippet for a type name where type params become placeholders.
+func (c *completer) typeNameSnippet(literalType types.Type, qf types.Qualifier) (*snippet.Builder, string) {
+ var (
+ snip snippet.Builder
+ typeName string
+ named, _ = literalType.(*types.Named)
+ )
+
+ if named != nil && named.Obj() != nil && typeparams.ForNamed(named).Len() > 0 && !c.fullyInstantiated(named) {
+ // We are not "fully instantiated" meaning we have type params that must be specified.
+ if pkg := qf(named.Obj().Pkg()); pkg != "" {
+ typeName = pkg + "."
+ }
+
+ // We do this to get "someType" instead of "someType[T]".
+ typeName += named.Obj().Name()
+ snip.WriteText(typeName + "[")
+
+ if c.opts.placeholders {
+ for i := 0; i < typeparams.ForNamed(named).Len(); i++ {
+ if i > 0 {
+ snip.WriteText(", ")
+ }
+ snip.WritePlaceholder(func(snip *snippet.Builder) {
+ snip.WriteText(types.TypeString(typeparams.ForNamed(named).At(i), qf))
+ })
+ }
+ } else {
+ snip.WritePlaceholder(nil)
+ }
+ snip.WriteText("]")
+ typeName += "[...]"
+ } else {
+ // We don't have unspecified type params so use default type formatting.
+ typeName = types.TypeString(literalType, qf)
+ snip.WriteText(typeName)
+ }
+
+ return &snip, typeName
+}
+
+// fullyInstantiated reports whether all of t's type params have
+// specified type args.
+func (c *completer) fullyInstantiated(t *types.Named) bool {
+ tps := typeparams.ForNamed(t)
+ tas := typeparams.NamedTypeArgs(t)
+
+ if tps.Len() != tas.Len() {
+ return false
+ }
+
+ for i := 0; i < tas.Len(); i++ {
+ switch ta := tas.At(i).(type) {
+ case *typeparams.TypeParam:
+ // A *TypeParam only counts as specified if it is currently in
+ // scope (i.e. we are in a generic definition).
+ if !c.typeParamInScope(ta) {
+ return false
+ }
+ case *types.Named:
+ if !c.fullyInstantiated(ta) {
+ return false
+ }
+ }
+ }
+ return true
+}
+
+// typeParamInScope returns whether tp's object is in scope at c.pos.
+// This tells you whether you are in a generic definition and can
+// assume tp has been specified.
+func (c *completer) typeParamInScope(tp *typeparams.TypeParam) bool {
+ obj := tp.Obj()
+ if obj == nil {
+ return false
+ }
+
+ scope := c.innermostScope()
+ if scope == nil {
+ return false
+ }
+
+ _, foundObj := scope.LookupParent(obj.Name(), c.pos)
+ return obj == foundObj
+}
diff --git a/gopls/internal/lsp/source/completion/package.go b/gopls/internal/lsp/source/completion/package.go
new file mode 100644
index 000000000..f3bc30688
--- /dev/null
+++ b/gopls/internal/lsp/source/completion/package.go
@@ -0,0 +1,351 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package completion
+
+import (
+ "bytes"
+ "context"
+ "errors"
+ "fmt"
+ "go/ast"
+ "go/parser"
+ "go/scanner"
+ "go/token"
+ "go/types"
+ "path/filepath"
+ "strings"
+ "unicode"
+
+ "golang.org/x/tools/gopls/internal/lsp/protocol"
+ "golang.org/x/tools/gopls/internal/lsp/safetoken"
+ "golang.org/x/tools/gopls/internal/lsp/source"
+ "golang.org/x/tools/gopls/internal/span"
+ "golang.org/x/tools/internal/fuzzy"
+)
+
+// packageClauseCompletions offers completions for a package declaration when
+// one is not present in the given file.
+func packageClauseCompletions(ctx context.Context, snapshot source.Snapshot, fh source.FileHandle, position protocol.Position) ([]CompletionItem, *Selection, error) {
+ // We know that the AST for this file will be empty due to the missing
+ // package declaration, but parse it anyway to get a mapper.
+ // TODO(adonovan): opt: there's no need to parse just to get a mapper.
+ pgf, err := snapshot.ParseGo(ctx, fh, source.ParseFull)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ offset, err := pgf.Mapper.PositionOffset(position)
+ if err != nil {
+ return nil, nil, err
+ }
+ surrounding, err := packageCompletionSurrounding(pgf, offset)
+ if err != nil {
+ return nil, nil, fmt.Errorf("invalid position for package completion: %w", err)
+ }
+
+ packageSuggestions, err := packageSuggestions(ctx, snapshot, fh.URI(), "")
+ if err != nil {
+ return nil, nil, err
+ }
+
+ var items []CompletionItem
+ for _, pkg := range packageSuggestions {
+ insertText := fmt.Sprintf("package %s", pkg.name)
+ items = append(items, CompletionItem{
+ Label: insertText,
+ Kind: protocol.ModuleCompletion,
+ InsertText: insertText,
+ Score: pkg.score,
+ })
+ }
+
+ return items, surrounding, nil
+}
+
+// packageCompletionSurrounding returns surrounding for package completion if a
+// package completions can be suggested at a given cursor offset. A valid location
+// for package completion is above any declarations or import statements.
+func packageCompletionSurrounding(pgf *source.ParsedGoFile, offset int) (*Selection, error) {
+ m := pgf.Mapper
+ // If the file lacks a package declaration, the parser will return an empty
+ // AST. As a work-around, try to parse an expression from the file contents.
+ fset := token.NewFileSet()
+ expr, _ := parser.ParseExprFrom(fset, m.URI.Filename(), pgf.Src, parser.Mode(0))
+ if expr == nil {
+ return nil, fmt.Errorf("unparseable file (%s)", m.URI)
+ }
+ tok := fset.File(expr.Pos())
+ cursor := tok.Pos(offset)
+
+ // If we were able to parse out an identifier as the first expression from
+ // the file, it may be the beginning of a package declaration ("pack ").
+ // We can offer package completions if the cursor is in the identifier.
+ if name, ok := expr.(*ast.Ident); ok {
+ if cursor >= name.Pos() && cursor <= name.End() {
+ if !strings.HasPrefix(PACKAGE, name.Name) {
+ return nil, fmt.Errorf("cursor in non-matching ident")
+ }
+ return &Selection{
+ content: name.Name,
+ cursor: cursor,
+ tokFile: tok,
+ start: name.Pos(),
+ end: name.End(),
+ mapper: m,
+ }, nil
+ }
+ }
+
+ // The file is invalid, but it contains an expression that we were able to
+ // parse. We will use this expression to construct the cursor's
+ // "surrounding".
+
+ // First, consider the possibility that we have a valid "package" keyword
+ // with an empty package name ("package "). "package" is parsed as an
+ // *ast.BadDecl since it is a keyword. This logic would allow "package" to
+ // appear on any line of the file as long as it's the first code expression
+ // in the file.
+ lines := strings.Split(string(pgf.Src), "\n")
+ cursorLine := tok.Line(cursor)
+ if cursorLine <= 0 || cursorLine > len(lines) {
+ return nil, fmt.Errorf("invalid line number")
+ }
+ if safetoken.StartPosition(fset, expr.Pos()).Line == cursorLine {
+ words := strings.Fields(lines[cursorLine-1])
+ if len(words) > 0 && words[0] == PACKAGE {
+ content := PACKAGE
+ // Account for spaces if there are any.
+ if len(words) > 1 {
+ content += " "
+ }
+
+ start := expr.Pos()
+ end := token.Pos(int(expr.Pos()) + len(content) + 1)
+ // We have verified that we have a valid 'package' keyword as our
+ // first expression. Ensure that cursor is in this keyword or
+ // otherwise fallback to the general case.
+ if cursor >= start && cursor <= end {
+ return &Selection{
+ content: content,
+ cursor: cursor,
+ tokFile: tok,
+ start: start,
+ end: end,
+ mapper: m,
+ }, nil
+ }
+ }
+ }
+
+ // If the cursor is after the start of the expression, no package
+ // declaration will be valid.
+ if cursor > expr.Pos() {
+ return nil, fmt.Errorf("cursor after expression")
+ }
+
+ // If the cursor is in a comment, don't offer any completions.
+ if cursorInComment(tok, cursor, m.Content) {
+ return nil, fmt.Errorf("cursor in comment")
+ }
+
+ // The surrounding range in this case is the cursor.
+ return &Selection{
+ content: "",
+ tokFile: tok,
+ start: cursor,
+ end: cursor,
+ cursor: cursor,
+ mapper: m,
+ }, nil
+}
+
+func cursorInComment(file *token.File, cursor token.Pos, src []byte) bool {
+ var s scanner.Scanner
+ s.Init(file, src, func(_ token.Position, _ string) {}, scanner.ScanComments)
+ for {
+ pos, tok, lit := s.Scan()
+ if pos <= cursor && cursor <= token.Pos(int(pos)+len(lit)) {
+ return tok == token.COMMENT
+ }
+ if tok == token.EOF {
+ break
+ }
+ }
+ return false
+}
+
+// packageNameCompletions returns name completions for a package clause using
+// the current name as prefix.
+func (c *completer) packageNameCompletions(ctx context.Context, fileURI span.URI, name *ast.Ident) error {
+ cursor := int(c.pos - name.NamePos)
+ if cursor < 0 || cursor > len(name.Name) {
+ return errors.New("cursor is not in package name identifier")
+ }
+
+ c.completionContext.packageCompletion = true
+
+ prefix := name.Name[:cursor]
+ packageSuggestions, err := packageSuggestions(ctx, c.snapshot, fileURI, prefix)
+ if err != nil {
+ return err
+ }
+
+ for _, pkg := range packageSuggestions {
+ c.deepState.enqueue(pkg)
+ }
+ return nil
+}
+
+// packageSuggestions returns a list of packages from workspace packages that
+// have the given prefix and are used in the same directory as the given
+// file. This also includes test packages for these packages (<pkg>_test) and
+// the directory name itself.
+func packageSuggestions(ctx context.Context, snapshot source.Snapshot, fileURI span.URI, prefix string) (packages []candidate, err error) {
+ active, err := snapshot.ActiveMetadata(ctx)
+ if err != nil {
+ return nil, err
+ }
+
+ toCandidate := func(name string, score float64) candidate {
+ obj := types.NewPkgName(0, nil, name, types.NewPackage("", name))
+ return candidate{obj: obj, name: name, detail: name, score: score}
+ }
+
+ matcher := fuzzy.NewMatcher(prefix)
+
+ // Always try to suggest a main package
+ defer func() {
+ if score := float64(matcher.Score("main")); score > 0 {
+ packages = append(packages, toCandidate("main", score*lowScore))
+ }
+ }()
+
+ dirPath := filepath.Dir(fileURI.Filename())
+ dirName := filepath.Base(dirPath)
+ if !isValidDirName(dirName) {
+ return packages, nil
+ }
+ pkgName := convertDirNameToPkgName(dirName)
+
+ seenPkgs := make(map[source.PackageName]struct{})
+
+ // The `go` command by default only allows one package per directory but we
+ // support multiple package suggestions since gopls is build system agnostic.
+ for _, m := range active {
+ if m.Name == "main" || m.Name == "" {
+ continue
+ }
+ if _, ok := seenPkgs[m.Name]; ok {
+ continue
+ }
+
+ // Only add packages that are previously used in the current directory.
+ var relevantPkg bool
+ for _, uri := range m.CompiledGoFiles {
+ if filepath.Dir(uri.Filename()) == dirPath {
+ relevantPkg = true
+ break
+ }
+ }
+ if !relevantPkg {
+ continue
+ }
+
+ // Add a found package used in current directory as a high relevance
+ // suggestion and the test package for it as a medium relevance
+ // suggestion.
+ if score := float64(matcher.Score(string(m.Name))); score > 0 {
+ packages = append(packages, toCandidate(string(m.Name), score*highScore))
+ }
+ seenPkgs[m.Name] = struct{}{}
+
+ testPkgName := m.Name + "_test"
+ if _, ok := seenPkgs[testPkgName]; ok || strings.HasSuffix(string(m.Name), "_test") {
+ continue
+ }
+ if score := float64(matcher.Score(string(testPkgName))); score > 0 {
+ packages = append(packages, toCandidate(string(testPkgName), score*stdScore))
+ }
+ seenPkgs[testPkgName] = struct{}{}
+ }
+
+ // Add current directory name as a low relevance suggestion.
+ if _, ok := seenPkgs[pkgName]; !ok {
+ if score := float64(matcher.Score(string(pkgName))); score > 0 {
+ packages = append(packages, toCandidate(string(pkgName), score*lowScore))
+ }
+
+ testPkgName := pkgName + "_test"
+ if score := float64(matcher.Score(string(testPkgName))); score > 0 {
+ packages = append(packages, toCandidate(string(testPkgName), score*lowScore))
+ }
+ }
+
+ return packages, nil
+}
+
+// isValidDirName checks whether the passed directory name can be used in
+// a package path. Requirements for a package path can be found here:
+// https://golang.org/ref/mod#go-mod-file-ident.
+func isValidDirName(dirName string) bool {
+ if dirName == "" {
+ return false
+ }
+
+ for i, ch := range dirName {
+ if isLetter(ch) || isDigit(ch) {
+ continue
+ }
+ if i == 0 {
+ // Directory name can start only with '_'. '.' is not allowed in module paths.
+ // '-' and '~' are not allowed because elements of package paths must be
+ // safe command-line arguments.
+ if ch == '_' {
+ continue
+ }
+ } else {
+ // Modules path elements can't end with '.'
+ if isAllowedPunctuation(ch) && (i != len(dirName)-1 || ch != '.') {
+ continue
+ }
+ }
+
+ return false
+ }
+ return true
+}
+
+// convertDirNameToPkgName converts a valid directory name to a valid package name.
+// It leaves only letters and digits. All letters are mapped to lower case.
+func convertDirNameToPkgName(dirName string) source.PackageName {
+ var buf bytes.Buffer
+ for _, ch := range dirName {
+ switch {
+ case isLetter(ch):
+ buf.WriteRune(unicode.ToLower(ch))
+
+ case buf.Len() != 0 && isDigit(ch):
+ buf.WriteRune(ch)
+ }
+ }
+ return source.PackageName(buf.String())
+}
+
+// isLetter and isDigit allow only ASCII characters because
+// "Each path element is a non-empty string made of up ASCII letters,
+// ASCII digits, and limited ASCII punctuation"
+// (see https://golang.org/ref/mod#go-mod-file-ident).
+
+func isLetter(ch rune) bool {
+ return 'a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z'
+}
+
+func isDigit(ch rune) bool {
+ return '0' <= ch && ch <= '9'
+}
+
+func isAllowedPunctuation(ch rune) bool {
+ return ch == '_' || ch == '-' || ch == '~' || ch == '.'
+}
diff --git a/gopls/internal/lsp/source/completion/package_test.go b/gopls/internal/lsp/source/completion/package_test.go
new file mode 100644
index 000000000..614359fa5
--- /dev/null
+++ b/gopls/internal/lsp/source/completion/package_test.go
@@ -0,0 +1,81 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package completion
+
+import (
+ "testing"
+
+ "golang.org/x/tools/gopls/internal/lsp/source"
+)
+
+func TestIsValidDirName(t *testing.T) {
+ tests := []struct {
+ dirName string
+ valid bool
+ }{
+ {dirName: "", valid: false},
+ //
+ {dirName: "a", valid: true},
+ {dirName: "abcdef", valid: true},
+ {dirName: "AbCdEf", valid: true},
+ //
+ {dirName: "1a35", valid: true},
+ {dirName: "a16", valid: true},
+ //
+ {dirName: "_a", valid: true},
+ {dirName: "a_", valid: true},
+ //
+ {dirName: "~a", valid: false},
+ {dirName: "a~", valid: true},
+ //
+ {dirName: "-a", valid: false},
+ {dirName: "a-", valid: true},
+ //
+ {dirName: ".a", valid: false},
+ {dirName: "a.", valid: false},
+ //
+ {dirName: "a~_b--c.-e", valid: true},
+ {dirName: "~a~_b--c.-e", valid: false},
+ {dirName: "a~_b--c.-e--~", valid: true},
+ {dirName: "a~_b--2134dc42.-e6--~", valid: true},
+ {dirName: "abc`def", valid: false},
+ {dirName: "тест", valid: false},
+ {dirName: "你好", valid: false},
+ }
+ for _, tt := range tests {
+ valid := isValidDirName(tt.dirName)
+ if tt.valid != valid {
+ t.Errorf("%s: expected %v, got %v", tt.dirName, tt.valid, valid)
+ }
+ }
+}
+
+func TestConvertDirNameToPkgName(t *testing.T) {
+ tests := []struct {
+ dirName string
+ pkgName source.PackageName
+ }{
+ {dirName: "a", pkgName: "a"},
+ {dirName: "abcdef", pkgName: "abcdef"},
+ {dirName: "AbCdEf", pkgName: "abcdef"},
+ {dirName: "1a35", pkgName: "a35"},
+ {dirName: "14a35", pkgName: "a35"},
+ {dirName: "a16", pkgName: "a16"},
+ {dirName: "_a", pkgName: "a"},
+ {dirName: "a_", pkgName: "a"},
+ {dirName: "a~", pkgName: "a"},
+ {dirName: "a-", pkgName: "a"},
+ {dirName: "a~_b--c.-e", pkgName: "abce"},
+ {dirName: "a~_b--c.-e--~", pkgName: "abce"},
+ {dirName: "a~_b--2134dc42.-e6--~", pkgName: "ab2134dc42e6"},
+ }
+ for _, tt := range tests {
+ pkgName := convertDirNameToPkgName(tt.dirName)
+ if tt.pkgName != pkgName {
+ t.Errorf("%s: expected %v, got %v", tt.dirName, tt.pkgName, pkgName)
+ continue
+ }
+ }
+}
diff --git a/gopls/internal/lsp/source/completion/postfix_snippets.go b/gopls/internal/lsp/source/completion/postfix_snippets.go
new file mode 100644
index 000000000..0737ec246
--- /dev/null
+++ b/gopls/internal/lsp/source/completion/postfix_snippets.go
@@ -0,0 +1,471 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package completion
+
+import (
+ "context"
+ "fmt"
+ "go/ast"
+ "go/token"
+ "go/types"
+ "log"
+ "reflect"
+ "strings"
+ "sync"
+ "text/template"
+
+ "golang.org/x/tools/gopls/internal/lsp/protocol"
+ "golang.org/x/tools/gopls/internal/lsp/snippet"
+ "golang.org/x/tools/gopls/internal/lsp/source"
+ "golang.org/x/tools/internal/event"
+ "golang.org/x/tools/internal/imports"
+)
+
+// Postfix snippets are artificial methods that allow the user to
+// compose common operations in an "argument oriented" fashion. For
+// example, instead of "sort.Slice(someSlice, ...)" a user can expand
+// "someSlice.sort!".
+
+// postfixTmpl represents a postfix snippet completion candidate.
+type postfixTmpl struct {
+ // label is the completion candidate's label presented to the user.
+ label string
+
+ // details is passed along to the client as the candidate's details.
+ details string
+
+ // body is the template text. See postfixTmplArgs for details on the
+ // facilities available to the template.
+ body string
+
+ tmpl *template.Template
+}
+
+// postfixTmplArgs are the template execution arguments available to
+// the postfix snippet templates.
+type postfixTmplArgs struct {
+ // StmtOK is true if it is valid to replace the selector with a
+ // statement. For example:
+ //
+ // func foo() {
+ // bar.sort! // statement okay
+ //
+ // someMethod(bar.sort!) // statement not okay
+ // }
+ StmtOK bool
+
+ // X is the textual SelectorExpr.X. For example, when completing
+ // "foo.bar.print!", "X" is "foo.bar".
+ X string
+
+ // Obj is the types.Object of SelectorExpr.X, if any.
+ Obj types.Object
+
+ // Type is the type of "foo.bar" in "foo.bar.print!".
+ Type types.Type
+
+ scope *types.Scope
+ snip snippet.Builder
+ importIfNeeded func(pkgPath string, scope *types.Scope) (name string, edits []protocol.TextEdit, err error)
+ edits []protocol.TextEdit
+ qf types.Qualifier
+ varNames map[string]bool
+}
+
+var postfixTmpls = []postfixTmpl{{
+ label: "sort",
+ details: "sort.Slice()",
+ body: `{{if and (eq .Kind "slice") .StmtOK -}}
+{{.Import "sort"}}.Slice({{.X}}, func({{.VarName nil "i"}}, {{.VarName nil "j"}} int) bool {
+ {{.Cursor}}
+})
+{{- end}}`,
+}, {
+ label: "last",
+ details: "s[len(s)-1]",
+ body: `{{if and (eq .Kind "slice") .Obj -}}
+{{.X}}[len({{.X}})-1]
+{{- end}}`,
+}, {
+ label: "reverse",
+ details: "reverse slice",
+ body: `{{if and (eq .Kind "slice") .StmtOK -}}
+{{$i := .VarName nil "i"}}{{$j := .VarName nil "j" -}}
+for {{$i}}, {{$j}} := 0, len({{.X}})-1; {{$i}} < {{$j}}; {{$i}}, {{$j}} = {{$i}}+1, {{$j}}-1 {
+ {{.X}}[{{$i}}], {{.X}}[{{$j}}] = {{.X}}[{{$j}}], {{.X}}[{{$i}}]
+}
+{{end}}`,
+}, {
+ label: "range",
+ details: "range over slice",
+ body: `{{if and (eq .Kind "slice") .StmtOK -}}
+for {{.VarName nil "i"}}, {{.VarName .ElemType "v"}} := range {{.X}} {
+ {{.Cursor}}
+}
+{{- end}}`,
+}, {
+ label: "append",
+ details: "append and re-assign slice",
+ body: `{{if and (eq .Kind "slice") .StmtOK .Obj -}}
+{{.X}} = append({{.X}}, {{.Cursor}})
+{{- end}}`,
+}, {
+ label: "append",
+ details: "append to slice",
+ body: `{{if and (eq .Kind "slice") (not .StmtOK) -}}
+append({{.X}}, {{.Cursor}})
+{{- end}}`,
+}, {
+ label: "copy",
+ details: "duplicate slice",
+ body: `{{if and (eq .Kind "slice") .StmtOK .Obj -}}
+{{$v := (.VarName nil (printf "%sCopy" .X))}}{{$v}} := make([]{{.TypeName .ElemType}}, len({{.X}}))
+copy({{$v}}, {{.X}})
+{{end}}`,
+}, {
+ label: "range",
+ details: "range over map",
+ body: `{{if and (eq .Kind "map") .StmtOK -}}
+for {{.VarName .KeyType "k"}}, {{.VarName .ElemType "v"}} := range {{.X}} {
+ {{.Cursor}}
+}
+{{- end}}`,
+}, {
+ label: "clear",
+ details: "clear map contents",
+ body: `{{if and (eq .Kind "map") .StmtOK -}}
+{{$k := (.VarName .KeyType "k")}}for {{$k}} := range {{.X}} {
+ delete({{.X}}, {{$k}})
+}
+{{end}}`,
+}, {
+ label: "keys",
+ details: "create slice of keys",
+ body: `{{if and (eq .Kind "map") .StmtOK -}}
+{{$keysVar := (.VarName nil "keys")}}{{$keysVar}} := make([]{{.TypeName .KeyType}}, 0, len({{.X}}))
+{{$k := (.VarName .KeyType "k")}}for {{$k}} := range {{.X}} {
+ {{$keysVar}} = append({{$keysVar}}, {{$k}})
+}
+{{end}}`,
+}, {
+ label: "range",
+ details: "range over channel",
+ body: `{{if and (eq .Kind "chan") .StmtOK -}}
+for {{.VarName .ElemType "e"}} := range {{.X}} {
+ {{.Cursor}}
+}
+{{- end}}`,
+}, {
+ label: "var",
+ details: "assign to variables",
+ body: `{{if and (eq .Kind "tuple") .StmtOK -}}
+{{$a := .}}{{range $i, $v := .Tuple}}{{if $i}}, {{end}}{{$a.VarName $v.Type $v.Name}}{{end}} := {{.X}}
+{{- end}}`,
+}, {
+ label: "var",
+ details: "assign to variable",
+ body: `{{if and (ne .Kind "tuple") .StmtOK -}}
+{{.VarName .Type ""}} := {{.X}}
+{{- end}}`,
+}, {
+ label: "print",
+ details: "print to stdout",
+ body: `{{if and (ne .Kind "tuple") .StmtOK -}}
+{{.Import "fmt"}}.Printf("{{.EscapeQuotes .X}}: %v\n", {{.X}})
+{{- end}}`,
+}, {
+ label: "print",
+ details: "print to stdout",
+ body: `{{if and (eq .Kind "tuple") .StmtOK -}}
+{{.Import "fmt"}}.Println({{.X}})
+{{- end}}`,
+}, {
+ label: "split",
+ details: "split string",
+ body: `{{if (eq (.TypeName .Type) "string") -}}
+{{.Import "strings"}}.Split({{.X}}, "{{.Cursor}}")
+{{- end}}`,
+}, {
+ label: "join",
+ details: "join string slice",
+ body: `{{if and (eq .Kind "slice") (eq (.TypeName .ElemType) "string") -}}
+{{.Import "strings"}}.Join({{.X}}, "{{.Cursor}}")
+{{- end}}`,
+}}
+
+// Cursor indicates where the client's cursor should end up after the
+// snippet is done.
+func (a *postfixTmplArgs) Cursor() string {
+ a.snip.WriteFinalTabstop()
+ return ""
+}
+
+// Import makes sure the package corresponding to path is imported,
+// returning the identifier to use to refer to the package.
+func (a *postfixTmplArgs) Import(path string) (string, error) {
+ name, edits, err := a.importIfNeeded(path, a.scope)
+ if err != nil {
+ return "", fmt.Errorf("couldn't import %q: %w", path, err)
+ }
+ a.edits = append(a.edits, edits...)
+ return name, nil
+}
+
+func (a *postfixTmplArgs) EscapeQuotes(v string) string {
+ return strings.ReplaceAll(v, `"`, `\\"`)
+}
+
+// ElemType returns the Elem() type of xType, if applicable.
+func (a *postfixTmplArgs) ElemType() types.Type {
+ if e, _ := a.Type.(interface{ Elem() types.Type }); e != nil {
+ return e.Elem()
+ }
+ return nil
+}
+
+// Kind returns the underlying kind of type, e.g. "slice", "struct",
+// etc.
+func (a *postfixTmplArgs) Kind() string {
+ t := reflect.TypeOf(a.Type.Underlying())
+ return strings.ToLower(strings.TrimPrefix(t.String(), "*types."))
+}
+
+// KeyType returns the type of X's key. KeyType panics if X is not a
+// map.
+func (a *postfixTmplArgs) KeyType() types.Type {
+ return a.Type.Underlying().(*types.Map).Key()
+}
+
+// Tuple returns the tuple result vars if X is a call expression.
+func (a *postfixTmplArgs) Tuple() []*types.Var {
+ tuple, _ := a.Type.(*types.Tuple)
+ if tuple == nil {
+ return nil
+ }
+
+ typs := make([]*types.Var, 0, tuple.Len())
+ for i := 0; i < tuple.Len(); i++ {
+ typs = append(typs, tuple.At(i))
+ }
+ return typs
+}
+
+// TypeName returns the textual representation of type t.
+func (a *postfixTmplArgs) TypeName(t types.Type) (string, error) {
+ if t == nil || t == types.Typ[types.Invalid] {
+ return "", fmt.Errorf("invalid type: %v", t)
+ }
+ return types.TypeString(t, a.qf), nil
+}
+
+// VarName returns a suitable variable name for the type t. If t
+// implements the error interface, "err" is used. If t is not a named
+// type then nonNamedDefault is used. Otherwise a name is made by
+// abbreviating the type name. If the resultant name is already in
+// scope, an integer is appended to make a unique name.
+func (a *postfixTmplArgs) VarName(t types.Type, nonNamedDefault string) string {
+ if t == nil {
+ t = types.Typ[types.Invalid]
+ }
+
+ var name string
+ // go/types predicates are undefined on types.Typ[types.Invalid].
+ if !types.Identical(t, types.Typ[types.Invalid]) && types.Implements(t, errorIntf) {
+ name = "err"
+ } else if _, isNamed := source.Deref(t).(*types.Named); !isNamed {
+ name = nonNamedDefault
+ }
+
+ if name == "" {
+ name = types.TypeString(t, func(p *types.Package) string {
+ return ""
+ })
+ name = abbreviateTypeName(name)
+ }
+
+ if dot := strings.LastIndex(name, "."); dot > -1 {
+ name = name[dot+1:]
+ }
+
+ uniqueName := name
+ for i := 2; ; i++ {
+ if s, _ := a.scope.LookupParent(uniqueName, token.NoPos); s == nil && !a.varNames[uniqueName] {
+ break
+ }
+ uniqueName = fmt.Sprintf("%s%d", name, i)
+ }
+
+ a.varNames[uniqueName] = true
+
+ return uniqueName
+}
+
+func (c *completer) addPostfixSnippetCandidates(ctx context.Context, sel *ast.SelectorExpr) {
+ if !c.opts.postfix {
+ return
+ }
+
+ initPostfixRules()
+
+ if sel == nil || sel.Sel == nil {
+ return
+ }
+
+ selType := c.pkg.GetTypesInfo().TypeOf(sel.X)
+ if selType == nil {
+ return
+ }
+
+ // Skip empty tuples since there is no value to operate on.
+ if tuple, ok := selType.Underlying().(*types.Tuple); ok && tuple == nil {
+ return
+ }
+
+ tokFile := c.pkg.FileSet().File(c.pos)
+
+ // Only replace sel with a statement if sel is already a statement.
+ var stmtOK bool
+ for i, n := range c.path {
+ if n == sel && i < len(c.path)-1 {
+ switch p := c.path[i+1].(type) {
+ case *ast.ExprStmt:
+ stmtOK = true
+ case *ast.AssignStmt:
+ // In cases like:
+ //
+ // foo.<>
+ // bar = 123
+ //
+ // detect that "foo." makes up the entire statement since the
+ // apparent selector spans lines.
+ stmtOK = tokFile.Line(c.pos) < tokFile.Line(p.TokPos)
+ }
+ break
+ }
+ }
+
+ scope := c.pkg.GetTypes().Scope().Innermost(c.pos)
+ if scope == nil {
+ return
+ }
+
+ // afterDot is the position after selector dot, e.g. "|" in
+ // "foo.|print".
+ afterDot := sel.Sel.Pos()
+
+ // We must detect dangling selectors such as:
+ //
+ // foo.<>
+ // bar
+ //
+ // and adjust afterDot so that we don't mistakenly delete the
+ // newline thinking "bar" is part of our selector.
+ if startLine := tokFile.Line(sel.Pos()); startLine != tokFile.Line(afterDot) {
+ if tokFile.Line(c.pos) != startLine {
+ return
+ }
+ afterDot = c.pos
+ }
+
+ for _, rule := range postfixTmpls {
+ // When completing foo.print<>, "print" is naturally overwritten,
+ // but we need to also remove "foo." so the snippet has a clean
+ // slate.
+ edits, err := c.editText(sel.Pos(), afterDot, "")
+ if err != nil {
+ event.Error(ctx, "error calculating postfix edits", err)
+ return
+ }
+
+ tmplArgs := postfixTmplArgs{
+ X: source.FormatNode(c.pkg.FileSet(), sel.X),
+ StmtOK: stmtOK,
+ Obj: exprObj(c.pkg.GetTypesInfo(), sel.X),
+ Type: selType,
+ qf: c.qf,
+ importIfNeeded: c.importIfNeeded,
+ scope: scope,
+ varNames: make(map[string]bool),
+ }
+
+ // Feed the template straight into the snippet builder. This
+ // allows templates to build snippets as they are executed.
+ err = rule.tmpl.Execute(&tmplArgs.snip, &tmplArgs)
+ if err != nil {
+ event.Error(ctx, "error executing postfix template", err)
+ continue
+ }
+
+ if strings.TrimSpace(tmplArgs.snip.String()) == "" {
+ continue
+ }
+
+ score := c.matcher.Score(rule.label)
+ if score <= 0 {
+ continue
+ }
+
+ c.items = append(c.items, CompletionItem{
+ Label: rule.label + "!",
+ Detail: rule.details,
+ Score: float64(score) * 0.01,
+ Kind: protocol.SnippetCompletion,
+ snippet: &tmplArgs.snip,
+ AdditionalTextEdits: append(edits, tmplArgs.edits...),
+ })
+ }
+}
+
+var postfixRulesOnce sync.Once
+
+func initPostfixRules() {
+ postfixRulesOnce.Do(func() {
+ var idx int
+ for _, rule := range postfixTmpls {
+ var err error
+ rule.tmpl, err = template.New("postfix_snippet").Parse(rule.body)
+ if err != nil {
+ log.Panicf("error parsing postfix snippet template: %v", err)
+ }
+ postfixTmpls[idx] = rule
+ idx++
+ }
+ postfixTmpls = postfixTmpls[:idx]
+ })
+}
+
+// importIfNeeded returns the package identifier and any necessary
+// edits to import package pkgPath.
+func (c *completer) importIfNeeded(pkgPath string, scope *types.Scope) (string, []protocol.TextEdit, error) {
+ defaultName := imports.ImportPathToAssumedName(pkgPath)
+
+ // Check if file already imports pkgPath.
+ for _, s := range c.file.Imports {
+ // TODO(adonovan): what if pkgPath has a vendor/ suffix?
+ // This may be the cause of go.dev/issue/56291.
+ if source.UnquoteImportPath(s) == source.ImportPath(pkgPath) {
+ if s.Name == nil {
+ return defaultName, nil, nil
+ }
+ if s.Name.Name != "_" {
+ return s.Name.Name, nil, nil
+ }
+ }
+ }
+
+ // Give up if the package's name is already in use by another object.
+ if _, obj := scope.LookupParent(defaultName, token.NoPos); obj != nil {
+ return "", nil, fmt.Errorf("import name %q of %q already in use", defaultName, pkgPath)
+ }
+
+ edits, err := c.importEdits(&importInfo{
+ importPath: pkgPath,
+ })
+ if err != nil {
+ return "", nil, err
+ }
+
+ return defaultName, edits, nil
+}
diff --git a/gopls/internal/lsp/source/completion/printf.go b/gopls/internal/lsp/source/completion/printf.go
new file mode 100644
index 000000000..432011755
--- /dev/null
+++ b/gopls/internal/lsp/source/completion/printf.go
@@ -0,0 +1,172 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package completion
+
+import (
+ "go/ast"
+ "go/constant"
+ "go/types"
+ "strconv"
+ "strings"
+ "unicode/utf8"
+)
+
+// printfArgKind returns the expected objKind when completing a
+// printf-like operand. call is the printf-like function call, and
+// argIdx is the index of call.Args being completed.
+func printfArgKind(info *types.Info, call *ast.CallExpr, argIdx int) objKind {
+ // Printf-like function name must end in "f".
+ fn := exprObj(info, call.Fun)
+ if fn == nil || !strings.HasSuffix(fn.Name(), "f") {
+ return kindAny
+ }
+
+ sig, _ := fn.Type().(*types.Signature)
+ if sig == nil {
+ return kindAny
+ }
+
+ // Must be variadic and take at least two params.
+ numParams := sig.Params().Len()
+ if !sig.Variadic() || numParams < 2 || argIdx < numParams-1 {
+ return kindAny
+ }
+
+ // Param preceding variadic args must be a (format) string.
+ if !types.Identical(sig.Params().At(numParams-2).Type(), types.Typ[types.String]) {
+ return kindAny
+ }
+
+ // Format string must be a constant.
+ strArg := info.Types[call.Args[numParams-2]].Value
+ if strArg == nil || strArg.Kind() != constant.String {
+ return kindAny
+ }
+
+ return formatOperandKind(constant.StringVal(strArg), argIdx-(numParams-1)+1)
+}
+
+// formatOperandKind returns the objKind corresponding to format's
+// operandIdx'th operand.
+func formatOperandKind(format string, operandIdx int) objKind {
+ var (
+ prevOperandIdx int
+ kind = kindAny
+ )
+ for {
+ i := strings.Index(format, "%")
+ if i == -1 {
+ break
+ }
+
+ var operands []formatOperand
+ format, operands = parsePrintfVerb(format[i+1:], prevOperandIdx)
+
+ // Check if any this verb's operands correspond to our target
+ // operandIdx.
+ for _, v := range operands {
+ if v.idx == operandIdx {
+ if kind == kindAny {
+ kind = v.kind
+ } else if v.kind != kindAny {
+ // If multiple verbs refer to the same operand, take the
+ // intersection of their kinds.
+ kind &= v.kind
+ }
+ }
+
+ prevOperandIdx = v.idx
+ }
+ }
+ return kind
+}
+
+type formatOperand struct {
+ // idx is the one-based printf operand index.
+ idx int
+ // kind is a mask of expected kinds of objects for this operand.
+ kind objKind
+}
+
+// parsePrintfVerb parses the leading printf verb in f. The opening
+// "%" must already be trimmed from f. prevIdx is the previous
+// operand's index, or zero if this is the first verb. The format
+// string is returned with the leading verb removed. Multiple operands
+// can be returned in the case of dynamic widths such as "%*.*f".
+func parsePrintfVerb(f string, prevIdx int) (string, []formatOperand) {
+ var verbs []formatOperand
+
+ addVerb := func(k objKind) {
+ verbs = append(verbs, formatOperand{
+ idx: prevIdx + 1,
+ kind: k,
+ })
+ prevIdx++
+ }
+
+ for len(f) > 0 {
+ // Trim first rune off of f so we are guaranteed to make progress.
+ r, l := utf8.DecodeRuneInString(f)
+ f = f[l:]
+
+ // We care about three things:
+ // 1. The verb, which maps directly to object kind.
+ // 2. Explicit operand indices like "%[2]s".
+ // 3. Dynamic widths using "*".
+ switch r {
+ case '%':
+ return f, nil
+ case '*':
+ addVerb(kindInt)
+ continue
+ case '[':
+ // Parse operand index as in "%[2]s".
+ i := strings.Index(f, "]")
+ if i == -1 {
+ return f, nil
+ }
+
+ idx, err := strconv.Atoi(f[:i])
+ f = f[i+1:]
+ if err != nil {
+ return f, nil
+ }
+
+ prevIdx = idx - 1
+ continue
+ case 'v', 'T':
+ addVerb(kindAny)
+ case 't':
+ addVerb(kindBool)
+ case 'c', 'd', 'o', 'O', 'U':
+ addVerb(kindInt)
+ case 'e', 'E', 'f', 'F', 'g', 'G':
+ addVerb(kindFloat | kindComplex)
+ case 'b':
+ addVerb(kindInt | kindFloat | kindComplex | kindBytes)
+ case 'q', 's':
+ addVerb(kindString | kindBytes | kindStringer | kindError)
+ case 'x', 'X':
+ // Omit kindStringer and kindError though technically allowed.
+ addVerb(kindString | kindBytes | kindInt | kindFloat | kindComplex)
+ case 'p':
+ addVerb(kindPtr | kindSlice)
+ case 'w':
+ addVerb(kindError)
+ case '+', '-', '#', ' ', '.', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+ // Flag or numeric width/precision value.
+ continue
+ default:
+ // Assume unrecognized rune is a custom fmt.Formatter verb.
+ addVerb(kindAny)
+ }
+
+ if len(verbs) > 0 {
+ break
+ }
+ }
+
+ return f, verbs
+}
diff --git a/gopls/internal/lsp/source/completion/printf_test.go b/gopls/internal/lsp/source/completion/printf_test.go
new file mode 100644
index 000000000..19d295b8d
--- /dev/null
+++ b/gopls/internal/lsp/source/completion/printf_test.go
@@ -0,0 +1,72 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package completion
+
+import (
+ "fmt"
+ "testing"
+)
+
+func TestFormatOperandKind(t *testing.T) {
+ cases := []struct {
+ f string
+ idx int
+ kind objKind
+ }{
+ {"", 1, kindAny},
+ {"%", 1, kindAny},
+ {"%%%", 1, kindAny},
+ {"%[1", 1, kindAny},
+ {"%[?%s", 2, kindAny},
+ {"%[abc]v", 1, kindAny},
+
+ {"%v", 1, kindAny},
+ {"%T", 1, kindAny},
+ {"%t", 1, kindBool},
+ {"%d", 1, kindInt},
+ {"%c", 1, kindInt},
+ {"%o", 1, kindInt},
+ {"%O", 1, kindInt},
+ {"%U", 1, kindInt},
+ {"%e", 1, kindFloat | kindComplex},
+ {"%E", 1, kindFloat | kindComplex},
+ {"%f", 1, kindFloat | kindComplex},
+ {"%F", 1, kindFloat | kindComplex},
+ {"%g", 1, kindFloat | kindComplex},
+ {"%G", 1, kindFloat | kindComplex},
+ {"%b", 1, kindInt | kindFloat | kindComplex | kindBytes},
+ {"%q", 1, kindString | kindBytes | kindStringer | kindError},
+ {"%s", 1, kindString | kindBytes | kindStringer | kindError},
+ {"%x", 1, kindString | kindBytes | kindInt | kindFloat | kindComplex},
+ {"%X", 1, kindString | kindBytes | kindInt | kindFloat | kindComplex},
+ {"%p", 1, kindPtr | kindSlice},
+ {"%w", 1, kindError},
+
+ {"%1.2f", 1, kindFloat | kindComplex},
+ {"%*f", 1, kindInt},
+ {"%*f", 2, kindFloat | kindComplex},
+ {"%*.*f", 1, kindInt},
+ {"%*.*f", 2, kindInt},
+ {"%*.*f", 3, kindFloat | kindComplex},
+ {"%[3]*.[2]*[1]f", 1, kindFloat | kindComplex},
+ {"%[3]*.[2]*[1]f", 2, kindInt},
+ {"%[3]*.[2]*[1]f", 3, kindInt},
+
+ {"foo %% %d", 1, kindInt},
+ {"%#-12.34f", 1, kindFloat | kindComplex},
+ {"% d", 1, kindInt},
+
+ {"%s %[1]X %d", 1, kindString | kindBytes},
+ {"%s %[1]X %d", 2, kindInt},
+ }
+
+ for _, c := range cases {
+ t.Run(fmt.Sprintf("%q#%d", c.f, c.idx), func(t *testing.T) {
+ if got := formatOperandKind(c.f, c.idx); got != c.kind {
+ t.Errorf("expected %d (%[1]b), got %d (%[2]b)", c.kind, got)
+ }
+ })
+ }
+}
diff --git a/gopls/internal/lsp/source/completion/snippet.go b/gopls/internal/lsp/source/completion/snippet.go
new file mode 100644
index 000000000..f4ea767e9
--- /dev/null
+++ b/gopls/internal/lsp/source/completion/snippet.go
@@ -0,0 +1,116 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package completion
+
+import (
+ "go/ast"
+
+ "golang.org/x/tools/gopls/internal/lsp/safetoken"
+ "golang.org/x/tools/gopls/internal/lsp/snippet"
+)
+
+// structFieldSnippet calculates the snippet for struct literal field names.
+func (c *completer) structFieldSnippet(cand candidate, detail string, snip *snippet.Builder) {
+ if !c.wantStructFieldCompletions() {
+ return
+ }
+
+ // If we are in a deep completion then we can't be completing a field
+ // name (e.g. "Foo{f<>}" completing to "Foo{f.Bar}" should not generate
+ // a snippet).
+ if len(cand.path) > 0 {
+ return
+ }
+
+ clInfo := c.enclosingCompositeLiteral
+
+ // If we are already in a key-value expression, we don't want a snippet.
+ if clInfo.kv != nil {
+ return
+ }
+
+ // A plain snippet turns "Foo{Ba<>" into "Foo{Bar: <>".
+ snip.WriteText(": ")
+ snip.WritePlaceholder(func(b *snippet.Builder) {
+ // A placeholder snippet turns "Foo{Ba<>" into "Foo{Bar: <*int*>".
+ if c.opts.placeholders {
+ b.WriteText(detail)
+ }
+ })
+
+ fset := c.pkg.FileSet()
+
+ // If the cursor position is on a different line from the literal's opening brace,
+ // we are in a multiline literal. Ignore line directives.
+ if safetoken.StartPosition(fset, c.pos).Line != safetoken.StartPosition(fset, clInfo.cl.Lbrace).Line {
+ snip.WriteText(",")
+ }
+}
+
+// functionCallSnippet calculates the snippet for function calls.
+func (c *completer) functionCallSnippet(name string, tparams, params []string, snip *snippet.Builder) {
+ // If there is no suffix then we need to reuse existing call parens
+ // "()" if present. If there is an identifier suffix then we always
+ // need to include "()" since we don't overwrite the suffix.
+ if c.surrounding != nil && c.surrounding.Suffix() == "" && len(c.path) > 1 {
+ // If we are the left side (i.e. "Fun") part of a call expression,
+ // we don't want a snippet since there are already parens present.
+ switch n := c.path[1].(type) {
+ case *ast.CallExpr:
+ // The Lparen != Rparen check detects fudged CallExprs we
+ // inserted when fixing the AST. In this case, we do still need
+ // to insert the calling "()" parens.
+ if n.Fun == c.path[0] && n.Lparen != n.Rparen {
+ return
+ }
+ case *ast.SelectorExpr:
+ if len(c.path) > 2 {
+ if call, ok := c.path[2].(*ast.CallExpr); ok && call.Fun == c.path[1] && call.Lparen != call.Rparen {
+ return
+ }
+ }
+ }
+ }
+
+ snip.WriteText(name)
+
+ if len(tparams) > 0 {
+ snip.WriteText("[")
+ if c.opts.placeholders {
+ for i, tp := range tparams {
+ if i > 0 {
+ snip.WriteText(", ")
+ }
+ snip.WritePlaceholder(func(b *snippet.Builder) {
+ b.WriteText(tp)
+ })
+ }
+ } else {
+ snip.WritePlaceholder(nil)
+ }
+ snip.WriteText("]")
+ }
+
+ snip.WriteText("(")
+
+ if c.opts.placeholders {
+ // A placeholder snippet turns "someFun<>" into "someFunc(<*i int*>, *s string*)".
+ for i, p := range params {
+ if i > 0 {
+ snip.WriteText(", ")
+ }
+ snip.WritePlaceholder(func(b *snippet.Builder) {
+ b.WriteText(p)
+ })
+ }
+ } else {
+ // A plain snippet turns "someFun<>" into "someFunc(<>)".
+ if len(params) > 0 {
+ snip.WritePlaceholder(nil)
+ }
+ }
+
+ snip.WriteText(")")
+}
diff --git a/gopls/internal/lsp/source/completion/statements.go b/gopls/internal/lsp/source/completion/statements.go
new file mode 100644
index 000000000..707375fa1
--- /dev/null
+++ b/gopls/internal/lsp/source/completion/statements.go
@@ -0,0 +1,361 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package completion
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "go/types"
+
+ "golang.org/x/tools/gopls/internal/lsp/protocol"
+ "golang.org/x/tools/gopls/internal/lsp/snippet"
+ "golang.org/x/tools/gopls/internal/lsp/source"
+)
+
+// addStatementCandidates adds full statement completion candidates
+// appropriate for the current context.
+func (c *completer) addStatementCandidates() {
+ c.addErrCheck()
+ c.addAssignAppend()
+}
+
+// addAssignAppend offers a completion candidate of the form:
+//
+// someSlice = append(someSlice, )
+//
+// It will offer the "append" completion in either of two situations:
+//
+// 1. Position is in RHS of assign, prefix matches "append", and
+// corresponding LHS object is a slice. For example,
+// "foo = ap<>" completes to "foo = append(foo, )".
+//
+// 2. Prefix is an ident or selector in an *ast.ExprStmt (i.e.
+// beginning of statement), and our best matching candidate is a
+// slice. For example: "foo.ba" completes to "foo.bar = append(foo.bar, )".
+func (c *completer) addAssignAppend() {
+ if len(c.path) < 3 {
+ return
+ }
+
+ ident, _ := c.path[0].(*ast.Ident)
+ if ident == nil {
+ return
+ }
+
+ var (
+ // sliceText is the full name of our slice object, e.g. "s.abc" in
+ // "s.abc = app<>".
+ sliceText string
+ // needsLHS is true if we need to prepend the LHS slice name and
+ // "=" to our candidate.
+ needsLHS = false
+ fset = c.pkg.FileSet()
+ )
+
+ switch n := c.path[1].(type) {
+ case *ast.AssignStmt:
+ // We are already in an assignment. Make sure our prefix matches "append".
+ if c.matcher.Score("append") <= 0 {
+ return
+ }
+
+ exprIdx := exprAtPos(c.pos, n.Rhs)
+ if exprIdx == len(n.Rhs) || exprIdx > len(n.Lhs)-1 {
+ return
+ }
+
+ lhsType := c.pkg.GetTypesInfo().TypeOf(n.Lhs[exprIdx])
+ if lhsType == nil {
+ return
+ }
+
+ // Make sure our corresponding LHS object is a slice.
+ if _, isSlice := lhsType.Underlying().(*types.Slice); !isSlice {
+ return
+ }
+
+ // The name or our slice is whatever's in the LHS expression.
+ sliceText = source.FormatNode(fset, n.Lhs[exprIdx])
+ case *ast.SelectorExpr:
+ // Make sure we are a selector at the beginning of a statement.
+ if _, parentIsExprtStmt := c.path[2].(*ast.ExprStmt); !parentIsExprtStmt {
+ return
+ }
+
+ // So far we only know the first part of our slice name. For
+ // example in "s.a<>" we only know our slice begins with "s."
+ // since the user could still be typing.
+ sliceText = source.FormatNode(fset, n.X) + "."
+ needsLHS = true
+ case *ast.ExprStmt:
+ needsLHS = true
+ default:
+ return
+ }
+
+ var (
+ label string
+ snip snippet.Builder
+ score = highScore
+ )
+
+ if needsLHS {
+ // Offer the long form assign + append candidate if our best
+ // candidate is a slice.
+ bestItem := c.topCandidate()
+ if bestItem == nil || !bestItem.isSlice {
+ return
+ }
+
+ // Don't rank the full form assign + append candidate above the
+ // slice itself.
+ score = bestItem.Score - 0.01
+
+ // Fill in rest of sliceText now that we have the object name.
+ sliceText += bestItem.Label
+
+ // Fill in the candidate's LHS bits.
+ label = fmt.Sprintf("%s = ", bestItem.Label)
+ snip.WriteText(label)
+ }
+
+ snip.WriteText(fmt.Sprintf("append(%s, ", sliceText))
+ snip.WritePlaceholder(nil)
+ snip.WriteText(")")
+
+ c.items = append(c.items, CompletionItem{
+ Label: label + fmt.Sprintf("append(%s, )", sliceText),
+ Kind: protocol.FunctionCompletion,
+ Score: score,
+ snippet: &snip,
+ })
+}
+
+// topCandidate returns the strictly highest scoring candidate
+// collected so far. If the top two candidates have the same score,
+// nil is returned.
+func (c *completer) topCandidate() *CompletionItem {
+ var bestItem, secondBestItem *CompletionItem
+ for i := range c.items {
+ if bestItem == nil || c.items[i].Score > bestItem.Score {
+ bestItem = &c.items[i]
+ } else if secondBestItem == nil || c.items[i].Score > secondBestItem.Score {
+ secondBestItem = &c.items[i]
+ }
+ }
+
+ // If secondBestItem has the same score, bestItem isn't
+ // the strict best.
+ if secondBestItem != nil && secondBestItem.Score == bestItem.Score {
+ return nil
+ }
+
+ return bestItem
+}
+
+// addErrCheck offers a completion candidate of the form:
+//
+// if err != nil {
+// return nil, err
+// }
+//
+// In the case of test functions, it offers a completion candidate of the form:
+//
+// if err != nil {
+// t.Fatal(err)
+// }
+//
+// The position must be in a function that returns an error, and the
+// statement preceding the position must be an assignment where the
+// final LHS object is an error. addErrCheck will synthesize
+// zero values as necessary to make the return statement valid.
+func (c *completer) addErrCheck() {
+ if len(c.path) < 2 || c.enclosingFunc == nil || !c.opts.placeholders {
+ return
+ }
+
+ var (
+ errorType = types.Universe.Lookup("error").Type()
+ result = c.enclosingFunc.sig.Results()
+ testVar = getTestVar(c.enclosingFunc, c.pkg)
+ isTest = testVar != ""
+ doesNotReturnErr = result.Len() == 0 || !types.Identical(result.At(result.Len()-1).Type(), errorType)
+ )
+ // Make sure our enclosing function is a Test func or returns an error.
+ if !isTest && doesNotReturnErr {
+ return
+ }
+
+ prevLine := prevStmt(c.pos, c.path)
+ if prevLine == nil {
+ return
+ }
+
+ // Make sure our preceding statement was as assignment.
+ assign, _ := prevLine.(*ast.AssignStmt)
+ if assign == nil || len(assign.Lhs) == 0 {
+ return
+ }
+
+ lastAssignee := assign.Lhs[len(assign.Lhs)-1]
+
+ // Make sure the final assignee is an error.
+ if !types.Identical(c.pkg.GetTypesInfo().TypeOf(lastAssignee), errorType) {
+ return
+ }
+
+ var (
+ // errVar is e.g. "err" in "foo, err := bar()".
+ errVar = source.FormatNode(c.pkg.FileSet(), lastAssignee)
+
+ // Whether we need to include the "if" keyword in our candidate.
+ needsIf = true
+ )
+
+ // If the returned error from the previous statement is "_", it is not a real object.
+ // If we don't have an error, and the function signature takes a testing.TB that is either ignored
+ // or an "_", then we also can't call t.Fatal(err).
+ if errVar == "_" {
+ return
+ }
+
+ // Below we try to detect if the user has already started typing "if
+ // err" so we can replace what they've typed with our complete
+ // statement.
+ switch n := c.path[0].(type) {
+ case *ast.Ident:
+ switch c.path[1].(type) {
+ case *ast.ExprStmt:
+ // This handles:
+ //
+ // f, err := os.Open("foo")
+ // i<>
+
+ // Make sure they are typing "if".
+ if c.matcher.Score("if") <= 0 {
+ return
+ }
+ case *ast.IfStmt:
+ // This handles:
+ //
+ // f, err := os.Open("foo")
+ // if er<>
+
+ // Make sure they are typing the error's name.
+ if c.matcher.Score(errVar) <= 0 {
+ return
+ }
+
+ needsIf = false
+ default:
+ return
+ }
+ case *ast.IfStmt:
+ // This handles:
+ //
+ // f, err := os.Open("foo")
+ // if <>
+
+ // Avoid false positives by ensuring the if's cond is a bad
+ // expression. For example, don't offer the completion in cases
+ // like "if <> somethingElse".
+ if _, bad := n.Cond.(*ast.BadExpr); !bad {
+ return
+ }
+
+ // If "if" is our direct prefix, we need to include it in our
+ // candidate since the existing "if" will be overwritten.
+ needsIf = c.pos == n.Pos()+token.Pos(len("if"))
+ }
+
+ // Build up a snippet that looks like:
+ //
+ // if err != nil {
+ // return <zero value>, ..., ${1:err}
+ // }
+ //
+ // We make the error a placeholder so it is easy to alter the error.
+ var snip snippet.Builder
+ if needsIf {
+ snip.WriteText("if ")
+ }
+ snip.WriteText(fmt.Sprintf("%s != nil {\n\t", errVar))
+
+ var label string
+ if isTest {
+ snip.WriteText(fmt.Sprintf("%s.Fatal(%s)", testVar, errVar))
+ label = fmt.Sprintf("%[1]s != nil { %[2]s.Fatal(%[1]s) }", errVar, testVar)
+ } else {
+ snip.WriteText("return ")
+ for i := 0; i < result.Len()-1; i++ {
+ snip.WriteText(formatZeroValue(result.At(i).Type(), c.qf))
+ snip.WriteText(", ")
+ }
+ snip.WritePlaceholder(func(b *snippet.Builder) {
+ b.WriteText(errVar)
+ })
+ label = fmt.Sprintf("%[1]s != nil { return %[1]s }", errVar)
+ }
+
+ snip.WriteText("\n}")
+
+ if needsIf {
+ label = "if " + label
+ }
+
+ c.items = append(c.items, CompletionItem{
+ Label: label,
+ // There doesn't seem to be a more appropriate kind.
+ Kind: protocol.KeywordCompletion,
+ Score: highScore,
+ snippet: &snip,
+ })
+}
+
+// getTestVar checks the function signature's input parameters and returns
+// the name of the first parameter that implements "testing.TB". For example,
+// func someFunc(t *testing.T) returns the string "t", func someFunc(b *testing.B)
+// returns "b" etc. An empty string indicates that the function signature
+// does not take a testing.TB parameter or does so but is ignored such
+// as func someFunc(*testing.T).
+func getTestVar(enclosingFunc *funcInfo, pkg source.Package) string {
+ if enclosingFunc == nil || enclosingFunc.sig == nil {
+ return ""
+ }
+
+ var testingPkg *types.Package
+ for _, p := range pkg.GetTypes().Imports() {
+ if p.Path() == "testing" {
+ testingPkg = p
+ break
+ }
+ }
+ if testingPkg == nil {
+ return ""
+ }
+ tbObj := testingPkg.Scope().Lookup("TB")
+ if tbObj == nil {
+ return ""
+ }
+ iface, ok := tbObj.Type().Underlying().(*types.Interface)
+ if !ok {
+ return ""
+ }
+
+ sig := enclosingFunc.sig
+ for i := 0; i < sig.Params().Len(); i++ {
+ param := sig.Params().At(i)
+ if param.Name() == "_" {
+ continue
+ }
+ if !types.Implements(param.Type(), iface) {
+ continue
+ }
+ return param.Name()
+ }
+
+ return ""
+}
diff --git a/gopls/internal/lsp/source/completion/util.go b/gopls/internal/lsp/source/completion/util.go
new file mode 100644
index 000000000..4b6ec09a0
--- /dev/null
+++ b/gopls/internal/lsp/source/completion/util.go
@@ -0,0 +1,344 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package completion
+
+import (
+ "go/ast"
+ "go/token"
+ "go/types"
+
+ "golang.org/x/tools/go/types/typeutil"
+ "golang.org/x/tools/gopls/internal/lsp/protocol"
+ "golang.org/x/tools/gopls/internal/lsp/safetoken"
+ "golang.org/x/tools/gopls/internal/lsp/source"
+ "golang.org/x/tools/internal/diff"
+ "golang.org/x/tools/internal/typeparams"
+)
+
+// exprAtPos returns the index of the expression containing pos.
+func exprAtPos(pos token.Pos, args []ast.Expr) int {
+ for i, expr := range args {
+ if expr.Pos() <= pos && pos <= expr.End() {
+ return i
+ }
+ }
+ return len(args)
+}
+
+// eachField invokes fn for each field that can be selected from a
+// value of type T.
+func eachField(T types.Type, fn func(*types.Var)) {
+ // TODO(adonovan): this algorithm doesn't exclude ambiguous
+ // selections that match more than one field/method.
+ // types.NewSelectionSet should do that for us.
+
+ // for termination on recursive types
+ var seen typeutil.Map
+
+ var visit func(T types.Type)
+ visit = func(T types.Type) {
+ if T, ok := source.Deref(T).Underlying().(*types.Struct); ok {
+ if seen.At(T) != nil {
+ return
+ }
+
+ for i := 0; i < T.NumFields(); i++ {
+ f := T.Field(i)
+ fn(f)
+ if f.Anonymous() {
+ seen.Set(T, true)
+ visit(f.Type())
+ }
+ }
+ }
+ }
+ visit(T)
+}
+
+// typeIsValid reports whether typ doesn't contain any Invalid types.
+func typeIsValid(typ types.Type) bool {
+ // Check named types separately, because we don't want
+ // to call Underlying() on them to avoid problems with recursive types.
+ if _, ok := typ.(*types.Named); ok {
+ return true
+ }
+
+ switch typ := typ.Underlying().(type) {
+ case *types.Basic:
+ return typ.Kind() != types.Invalid
+ case *types.Array:
+ return typeIsValid(typ.Elem())
+ case *types.Slice:
+ return typeIsValid(typ.Elem())
+ case *types.Pointer:
+ return typeIsValid(typ.Elem())
+ case *types.Map:
+ return typeIsValid(typ.Key()) && typeIsValid(typ.Elem())
+ case *types.Chan:
+ return typeIsValid(typ.Elem())
+ case *types.Signature:
+ return typeIsValid(typ.Params()) && typeIsValid(typ.Results())
+ case *types.Tuple:
+ for i := 0; i < typ.Len(); i++ {
+ if !typeIsValid(typ.At(i).Type()) {
+ return false
+ }
+ }
+ return true
+ case *types.Struct, *types.Interface:
+ // Don't bother checking structs, interfaces for validity.
+ return true
+ default:
+ return false
+ }
+}
+
+// resolveInvalid traverses the node of the AST that defines the scope
+// containing the declaration of obj, and attempts to find a user-friendly
+// name for its invalid type. The resulting Object and its Type are fake.
+func resolveInvalid(fset *token.FileSet, obj types.Object, node ast.Node, info *types.Info) types.Object {
+ var resultExpr ast.Expr
+ ast.Inspect(node, func(node ast.Node) bool {
+ switch n := node.(type) {
+ case *ast.ValueSpec:
+ for _, name := range n.Names {
+ if info.Defs[name] == obj {
+ resultExpr = n.Type
+ }
+ }
+ return false
+ case *ast.Field: // This case handles parameters and results of a FuncDecl or FuncLit.
+ for _, name := range n.Names {
+ if info.Defs[name] == obj {
+ resultExpr = n.Type
+ }
+ }
+ return false
+ default:
+ return true
+ }
+ })
+ // Construct a fake type for the object and return a fake object with this type.
+ typename := source.FormatNode(fset, resultExpr)
+ typ := types.NewNamed(types.NewTypeName(token.NoPos, obj.Pkg(), typename, nil), types.Typ[types.Invalid], nil)
+ return types.NewVar(obj.Pos(), obj.Pkg(), obj.Name(), typ)
+}
+
+func isPointer(T types.Type) bool {
+ _, ok := T.(*types.Pointer)
+ return ok
+}
+
+func isVar(obj types.Object) bool {
+ _, ok := obj.(*types.Var)
+ return ok
+}
+
+func isTypeName(obj types.Object) bool {
+ _, ok := obj.(*types.TypeName)
+ return ok
+}
+
+func isFunc(obj types.Object) bool {
+ _, ok := obj.(*types.Func)
+ return ok
+}
+
+func isEmptyInterface(T types.Type) bool {
+ intf, _ := T.(*types.Interface)
+ return intf != nil && intf.NumMethods() == 0 && typeparams.IsMethodSet(intf)
+}
+
+func isUntyped(T types.Type) bool {
+ if basic, ok := T.(*types.Basic); ok {
+ return basic.Info()&types.IsUntyped > 0
+ }
+ return false
+}
+
+func isPkgName(obj types.Object) bool {
+ _, ok := obj.(*types.PkgName)
+ return ok
+}
+
+func isASTFile(n ast.Node) bool {
+ _, ok := n.(*ast.File)
+ return ok
+}
+
+func deslice(T types.Type) types.Type {
+ if slice, ok := T.Underlying().(*types.Slice); ok {
+ return slice.Elem()
+ }
+ return nil
+}
+
+// isSelector returns the enclosing *ast.SelectorExpr when pos is in the
+// selector.
+func enclosingSelector(path []ast.Node, pos token.Pos) *ast.SelectorExpr {
+ if len(path) == 0 {
+ return nil
+ }
+
+ if sel, ok := path[0].(*ast.SelectorExpr); ok {
+ return sel
+ }
+
+ if _, ok := path[0].(*ast.Ident); ok && len(path) > 1 {
+ if sel, ok := path[1].(*ast.SelectorExpr); ok && pos >= sel.Sel.Pos() {
+ return sel
+ }
+ }
+
+ return nil
+}
+
+// enclosingDeclLHS returns LHS idents from containing value spec or
+// assign statement.
+func enclosingDeclLHS(path []ast.Node) []*ast.Ident {
+ for _, n := range path {
+ switch n := n.(type) {
+ case *ast.ValueSpec:
+ return n.Names
+ case *ast.AssignStmt:
+ ids := make([]*ast.Ident, 0, len(n.Lhs))
+ for _, e := range n.Lhs {
+ if id, ok := e.(*ast.Ident); ok {
+ ids = append(ids, id)
+ }
+ }
+ return ids
+ }
+ }
+
+ return nil
+}
+
+// exprObj returns the types.Object associated with the *ast.Ident or
+// *ast.SelectorExpr e.
+func exprObj(info *types.Info, e ast.Expr) types.Object {
+ var ident *ast.Ident
+ switch expr := e.(type) {
+ case *ast.Ident:
+ ident = expr
+ case *ast.SelectorExpr:
+ ident = expr.Sel
+ default:
+ return nil
+ }
+
+ return info.ObjectOf(ident)
+}
+
+// typeConversion returns the type being converted to if call is a type
+// conversion expression.
+func typeConversion(call *ast.CallExpr, info *types.Info) types.Type {
+ // Type conversion (e.g. "float64(foo)").
+ if fun, _ := exprObj(info, call.Fun).(*types.TypeName); fun != nil {
+ return fun.Type()
+ }
+
+ return nil
+}
+
+// fieldsAccessible returns whether s has at least one field accessible by p.
+func fieldsAccessible(s *types.Struct, p *types.Package) bool {
+ for i := 0; i < s.NumFields(); i++ {
+ f := s.Field(i)
+ if f.Exported() || f.Pkg() == p {
+ return true
+ }
+ }
+ return false
+}
+
+// prevStmt returns the statement that precedes the statement containing pos.
+// For example:
+//
+// foo := 1
+// bar(1 + 2<>)
+//
+// If "<>" is pos, prevStmt returns "foo := 1"
+func prevStmt(pos token.Pos, path []ast.Node) ast.Stmt {
+ var blockLines []ast.Stmt
+ for i := 0; i < len(path) && blockLines == nil; i++ {
+ switch n := path[i].(type) {
+ case *ast.BlockStmt:
+ blockLines = n.List
+ case *ast.CommClause:
+ blockLines = n.Body
+ case *ast.CaseClause:
+ blockLines = n.Body
+ }
+ }
+
+ for i := len(blockLines) - 1; i >= 0; i-- {
+ if blockLines[i].End() < pos {
+ return blockLines[i]
+ }
+ }
+
+ return nil
+}
+
+// formatZeroValue produces Go code representing the zero value of T. It
+// returns the empty string if T is invalid.
+func formatZeroValue(T types.Type, qf types.Qualifier) string {
+ switch u := T.Underlying().(type) {
+ case *types.Basic:
+ switch {
+ case u.Info()&types.IsNumeric > 0:
+ return "0"
+ case u.Info()&types.IsString > 0:
+ return `""`
+ case u.Info()&types.IsBoolean > 0:
+ return "false"
+ default:
+ return ""
+ }
+ case *types.Pointer, *types.Interface, *types.Chan, *types.Map, *types.Slice, *types.Signature:
+ return "nil"
+ default:
+ return types.TypeString(T, qf) + "{}"
+ }
+}
+
+// isBasicKind returns whether t is a basic type of kind k.
+func isBasicKind(t types.Type, k types.BasicInfo) bool {
+ b, _ := t.Underlying().(*types.Basic)
+ return b != nil && b.Info()&k > 0
+}
+
+func (c *completer) editText(from, to token.Pos, newText string) ([]protocol.TextEdit, error) {
+ start, end, err := safetoken.Offsets(c.tokFile, from, to)
+ if err != nil {
+ return nil, err // can't happen: from/to came from c
+ }
+ return source.ToProtocolEdits(c.mapper, []diff.Edit{{
+ Start: start,
+ End: end,
+ New: newText,
+ }})
+}
+
+// assignableTo is like types.AssignableTo, but returns false if
+// either type is invalid.
+func assignableTo(x, to types.Type) bool {
+ if x == types.Typ[types.Invalid] || to == types.Typ[types.Invalid] {
+ return false
+ }
+
+ return types.AssignableTo(x, to)
+}
+
+// convertibleTo is like types.ConvertibleTo, but returns false if
+// either type is invalid.
+func convertibleTo(x, to types.Type) bool {
+ if x == types.Typ[types.Invalid] || to == types.Typ[types.Invalid] {
+ return false
+ }
+
+ return types.ConvertibleTo(x, to)
+}
diff --git a/gopls/internal/lsp/source/completion/util_test.go b/gopls/internal/lsp/source/completion/util_test.go
new file mode 100644
index 000000000..c94d279fb
--- /dev/null
+++ b/gopls/internal/lsp/source/completion/util_test.go
@@ -0,0 +1,28 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package completion
+
+import (
+ "go/types"
+ "testing"
+)
+
+func TestFormatZeroValue(t *testing.T) {
+ tests := []struct {
+ typ types.Type
+ want string
+ }{
+ {types.Typ[types.String], `""`},
+ {types.Typ[types.Byte], "0"},
+ {types.Typ[types.Invalid], ""},
+ {types.Universe.Lookup("error").Type(), "nil"},
+ }
+
+ for _, test := range tests {
+ if got := formatZeroValue(test.typ, nil); got != test.want {
+ t.Errorf("formatZeroValue(%v) = %q, want %q", test.typ, got, test.want)
+ }
+ }
+}
diff --git a/gopls/internal/lsp/source/definition.go b/gopls/internal/lsp/source/definition.go
new file mode 100644
index 000000000..cb87eb061
--- /dev/null
+++ b/gopls/internal/lsp/source/definition.go
@@ -0,0 +1,229 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package source
+
+import (
+ "context"
+ "fmt"
+ "go/ast"
+ "go/token"
+ "go/types"
+
+ "golang.org/x/tools/gopls/internal/lsp/protocol"
+ "golang.org/x/tools/gopls/internal/span"
+ "golang.org/x/tools/internal/bug"
+ "golang.org/x/tools/internal/event"
+)
+
+// Definition handles the textDocument/definition request for Go files.
+func Definition(ctx context.Context, snapshot Snapshot, fh FileHandle, position protocol.Position) ([]protocol.Location, error) {
+ ctx, done := event.Start(ctx, "source.Definition")
+ defer done()
+
+ pkg, pgf, err := PackageForFile(ctx, snapshot, fh.URI(), NarrowestPackage)
+ if err != nil {
+ return nil, err
+ }
+ pos, err := pgf.PositionPos(position)
+ if err != nil {
+ return nil, err
+ }
+
+ // Handle the case where the cursor is in an import.
+ importLocations, err := importDefinition(ctx, snapshot, pkg, pgf, pos)
+ if err != nil {
+ return nil, err
+ }
+ if len(importLocations) > 0 {
+ return importLocations, nil
+ }
+
+ // Handle the case where the cursor is in the package name.
+ // We use "<= End" to accept a query immediately after the package name.
+ if pgf.File != nil && pgf.File.Name.Pos() <= pos && pos <= pgf.File.Name.End() {
+ // If there's no package documentation, just use current file.
+ declFile := pgf
+ for _, pgf := range pkg.CompiledGoFiles() {
+ if pgf.File.Name != nil && pgf.File.Doc != nil {
+ declFile = pgf
+ break
+ }
+ }
+ loc, err := declFile.NodeLocation(declFile.File.Name)
+ if err != nil {
+ return nil, err
+ }
+ return []protocol.Location{loc}, nil
+ }
+
+ // The general case: the cursor is on an identifier.
+ _, obj, _ := referencedObject(pkg, pgf, pos)
+ if obj == nil {
+ return nil, nil
+ }
+
+ // Handle built-in identifiers.
+ if obj.Parent() == types.Universe {
+ builtin, err := snapshot.BuiltinFile(ctx)
+ if err != nil {
+ return nil, err
+ }
+ // Note that builtinObj is an ast.Object, not types.Object :)
+ builtinObj := builtin.File.Scope.Lookup(obj.Name())
+ if builtinObj == nil {
+ // Every builtin should have documentation.
+ return nil, bug.Errorf("internal error: no builtin object for %s", obj.Name())
+ }
+ decl, ok := builtinObj.Decl.(ast.Node)
+ if !ok {
+ return nil, bug.Errorf("internal error: no declaration for %s", obj.Name())
+ }
+ // The builtin package isn't in the dependency graph, so the usual
+ // utilities won't work here.
+ loc, err := builtin.PosLocation(decl.Pos(), decl.Pos()+token.Pos(len(obj.Name())))
+ if err != nil {
+ return nil, err
+ }
+ return []protocol.Location{loc}, nil
+ }
+
+ // Finally, map the object position.
+ var locs []protocol.Location
+ if !obj.Pos().IsValid() {
+ return nil, bug.Errorf("internal error: no position for %v", obj.Name())
+ }
+ loc, err := mapPosition(ctx, pkg.FileSet(), snapshot, obj.Pos(), adjustedObjEnd(obj))
+ if err != nil {
+ return nil, err
+ }
+ locs = append(locs, loc)
+ return locs, nil
+}
+
+// referencedObject returns the identifier and object referenced at the
+// specified position, which must be within the file pgf, for the purposes of
+// definition/hover/call hierarchy operations. It returns a nil object if no
+// object was found at the given position.
+//
+// If the returned identifier is a type-switch implicit (i.e. the x in x :=
+// e.(type)), the third result will be the type of the expression being
+// switched on (the type of e in the example). This facilitates workarounds for
+// limitations of the go/types API, which does not report an object for the
+// identifier x.
+//
+// For embedded fields, referencedObject returns the type name object rather
+// than the var (field) object.
+//
+// TODO(rfindley): this function exists to preserve the pre-existing behavior
+// of source.Identifier. Eliminate this helper in favor of sharing
+// functionality with objectsAt, after choosing suitable primitives.
+func referencedObject(pkg Package, pgf *ParsedGoFile, pos token.Pos) (*ast.Ident, types.Object, types.Type) {
+ path := pathEnclosingObjNode(pgf.File, pos)
+ if len(path) == 0 {
+ return nil, nil, nil
+ }
+ var obj types.Object
+ info := pkg.GetTypesInfo()
+ switch n := path[0].(type) {
+ case *ast.Ident:
+ obj = info.ObjectOf(n)
+ // If n is the var's declaring ident in a type switch
+ // [i.e. the x in x := foo.(type)], it will not have an object. In this
+ // case, set obj to the first implicit object (if any), and return the type
+ // of the expression being switched on.
+ //
+ // The type switch may have no case clauses and thus no
+ // implicit objects; this is a type error ("unused x"),
+ if obj == nil {
+ if implicits, typ := typeSwitchImplicits(info, path); len(implicits) > 0 {
+ return n, implicits[0], typ
+ }
+ }
+
+ // If the original position was an embedded field, we want to jump
+ // to the field's type definition, not the field's definition.
+ if v, ok := obj.(*types.Var); ok && v.Embedded() {
+ // types.Info.Uses contains the embedded field's *types.TypeName.
+ if typeName := info.Uses[n]; typeName != nil {
+ obj = typeName
+ }
+ }
+ return n, obj, nil
+ }
+ return nil, nil, nil
+}
+
+// importDefinition returns locations defining a package referenced by the
+// import spec containing pos.
+//
+// If pos is not inside an import spec, it returns nil, nil.
+func importDefinition(ctx context.Context, s Snapshot, pkg Package, pgf *ParsedGoFile, pos token.Pos) ([]protocol.Location, error) {
+ var imp *ast.ImportSpec
+ for _, spec := range pgf.File.Imports {
+ // We use "<= End" to accept a query immediately after an ImportSpec.
+ if spec.Path.Pos() <= pos && pos <= spec.Path.End() {
+ imp = spec
+ }
+ }
+ if imp == nil {
+ return nil, nil
+ }
+
+ importPath := UnquoteImportPath(imp)
+ impID := pkg.Metadata().DepsByImpPath[importPath]
+ if impID == "" {
+ return nil, fmt.Errorf("failed to resolve import %q", importPath)
+ }
+ impMetadata := s.Metadata(impID)
+ if impMetadata == nil {
+ return nil, fmt.Errorf("missing information for package %q", impID)
+ }
+
+ var locs []protocol.Location
+ for _, f := range impMetadata.CompiledGoFiles {
+ fh, err := s.GetFile(ctx, f)
+ if err != nil {
+ if ctx.Err() != nil {
+ return nil, ctx.Err()
+ }
+ continue
+ }
+ pgf, err := s.ParseGo(ctx, fh, ParseHeader)
+ if err != nil {
+ if ctx.Err() != nil {
+ return nil, ctx.Err()
+ }
+ continue
+ }
+ loc, err := pgf.NodeLocation(pgf.File)
+ if err != nil {
+ return nil, err
+ }
+ locs = append(locs, loc)
+ }
+
+ if len(locs) == 0 {
+ return nil, fmt.Errorf("package %q has no readable files", impID) // incl. unsafe
+ }
+
+ return locs, nil
+}
+
+// TODO(rfindley): avoid the duplicate column mapping here, by associating a
+// column mapper with each file handle.
+func mapPosition(ctx context.Context, fset *token.FileSet, s FileSource, start, end token.Pos) (protocol.Location, error) {
+ file := fset.File(start)
+ uri := span.URIFromPath(file.Name())
+ fh, err := s.GetFile(ctx, uri)
+ if err != nil {
+ return protocol.Location{}, err
+ }
+ content, err := fh.Read()
+ if err != nil {
+ return protocol.Location{}, err
+ }
+ m := protocol.NewMapper(fh.URI(), content)
+ return m.PosLocation(file, start, end)
+}
diff --git a/gopls/internal/lsp/source/diagnostics.go b/gopls/internal/lsp/source/diagnostics.go
new file mode 100644
index 000000000..e3f35988e
--- /dev/null
+++ b/gopls/internal/lsp/source/diagnostics.go
@@ -0,0 +1,138 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package source
+
+import (
+ "context"
+
+ "golang.org/x/tools/gopls/internal/lsp/protocol"
+ "golang.org/x/tools/gopls/internal/span"
+)
+
+type SuggestedFix struct {
+ Title string
+ Edits map[span.URI][]protocol.TextEdit
+ Command *protocol.Command
+ ActionKind protocol.CodeActionKind
+}
+
+// Analyze reports go/analysis-framework diagnostics in the specified package.
+func Analyze(ctx context.Context, snapshot Snapshot, pkgid PackageID, includeConvenience bool) (map[span.URI][]*Diagnostic, error) {
+ // Exit early if the context has been canceled. This also protects us
+ // from a race on Options, see golang/go#36699.
+ if ctx.Err() != nil {
+ return nil, ctx.Err()
+ }
+
+ options := snapshot.View().Options()
+ categories := []map[string]*Analyzer{
+ options.DefaultAnalyzers,
+ options.StaticcheckAnalyzers,
+ options.TypeErrorAnalyzers,
+ }
+ if includeConvenience { // e.g. for codeAction
+ categories = append(categories, options.ConvenienceAnalyzers) // e.g. fillstruct
+ }
+
+ var analyzers []*Analyzer
+ for _, cat := range categories {
+ for _, a := range cat {
+ analyzers = append(analyzers, a)
+ }
+ }
+
+ analysisDiagnostics, err := snapshot.Analyze(ctx, pkgid, analyzers)
+ if err != nil {
+ return nil, err
+ }
+
+ // Report diagnostics and errors from root analyzers.
+ reports := make(map[span.URI][]*Diagnostic)
+ for _, diag := range analysisDiagnostics {
+ reports[diag.URI] = append(reports[diag.URI], diag)
+ }
+ return reports, nil
+}
+
+// FileDiagnostics reports diagnostics in the specified file,
+// as used by the "gopls check" command.
+//
+// TODO(adonovan): factor in common with (*Server).codeAction, which
+// executes { PackageForFile; Analyze } too?
+//
+// TODO(adonovan): opt: this function is called in a loop from the
+// "gopls/diagnoseFiles" nonstandard request handler. It would be more
+// efficient to compute the set of packages and TypeCheck and
+// Analyze them all at once.
+func FileDiagnostics(ctx context.Context, snapshot Snapshot, uri span.URI) (FileHandle, []*Diagnostic, error) {
+ fh, err := snapshot.GetFile(ctx, uri)
+ if err != nil {
+ return nil, nil, err
+ }
+ pkg, _, err := PackageForFile(ctx, snapshot, uri, NarrowestPackage)
+ if err != nil {
+ return nil, nil, err
+ }
+ pkgDiags, err := pkg.DiagnosticsForFile(ctx, snapshot, uri)
+ if err != nil {
+ return nil, nil, err
+ }
+ adiags, err := Analyze(ctx, snapshot, pkg.Metadata().ID, false)
+ if err != nil {
+ return nil, nil, err
+ }
+ var fileDiags []*Diagnostic // combine load/parse/type + analysis diagnostics
+ CombineDiagnostics(pkgDiags, adiags[uri], &fileDiags, &fileDiags)
+ return fh, fileDiags, nil
+}
+
+// CombineDiagnostics combines and filters list/parse/type diagnostics from
+// tdiags with adiags, and appends the two lists to *outT and *outA,
+// respectively.
+//
+// Type-error analyzers produce diagnostics that are redundant
+// with type checker diagnostics, but more detailed (e.g. fixes).
+// Rather than report two diagnostics for the same problem,
+// we combine them by augmenting the type-checker diagnostic
+// and discarding the analyzer diagnostic.
+//
+// If an analysis diagnostic has the same range and message as
+// a list/parse/type diagnostic, the suggested fix information
+// (et al) of the latter is merged into a copy of the former.
+// This handles the case where a type-error analyzer suggests
+// a fix to a type error, and avoids duplication.
+//
+// The use of out-slices, though irregular, allows the caller to
+// easily choose whether to keep the results separate or combined.
+//
+// The arguments are not modified.
+func CombineDiagnostics(tdiags []*Diagnostic, adiags []*Diagnostic, outT, outA *[]*Diagnostic) {
+
+ // Build index of (list+parse+)type errors.
+ type key struct {
+ Range protocol.Range
+ message string
+ }
+ index := make(map[key]int) // maps (Range,Message) to index in tdiags slice
+ for i, diag := range tdiags {
+ index[key{diag.Range, diag.Message}] = i
+ }
+
+ // Filter out analysis diagnostics that match type errors,
+ // retaining their suggested fix (etc) fields.
+ for _, diag := range adiags {
+ if i, ok := index[key{diag.Range, diag.Message}]; ok {
+ copy := *tdiags[i]
+ copy.SuggestedFixes = diag.SuggestedFixes
+ copy.Tags = diag.Tags
+ tdiags[i] = &copy
+ continue
+ }
+
+ *outA = append(*outA, diag)
+ }
+
+ *outT = append(*outT, tdiags...)
+}
diff --git a/gopls/internal/lsp/source/extract.go b/gopls/internal/lsp/source/extract.go
new file mode 100644
index 000000000..56e8a5e23
--- /dev/null
+++ b/gopls/internal/lsp/source/extract.go
@@ -0,0 +1,1331 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package source
+
+import (
+ "bytes"
+ "fmt"
+ "go/ast"
+ "go/format"
+ "go/parser"
+ "go/token"
+ "go/types"
+ "sort"
+ "strings"
+ "text/scanner"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/ast/astutil"
+ "golang.org/x/tools/gopls/internal/lsp/safetoken"
+ "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/bug"
+)
+
+func extractVariable(fset *token.FileSet, start, end token.Pos, src []byte, file *ast.File, _ *types.Package, info *types.Info) (*analysis.SuggestedFix, error) {
+ tokFile := fset.File(file.Pos())
+ expr, path, ok, err := CanExtractVariable(start, end, file)
+ if !ok {
+ return nil, fmt.Errorf("extractVariable: cannot extract %s: %v", safetoken.StartPosition(fset, start), err)
+ }
+
+ // Create new AST node for extracted code.
+ var lhsNames []string
+ switch expr := expr.(type) {
+ // TODO: stricter rules for selectorExpr.
+ case *ast.BasicLit, *ast.CompositeLit, *ast.IndexExpr, *ast.SliceExpr,
+ *ast.UnaryExpr, *ast.BinaryExpr, *ast.SelectorExpr:
+ lhsName, _ := generateAvailableIdentifier(expr.Pos(), file, path, info, "x", 0)
+ lhsNames = append(lhsNames, lhsName)
+ case *ast.CallExpr:
+ tup, ok := info.TypeOf(expr).(*types.Tuple)
+ if !ok {
+ // If the call expression only has one return value, we can treat it the
+ // same as our standard extract variable case.
+ lhsName, _ := generateAvailableIdentifier(expr.Pos(), file, path, info, "x", 0)
+ lhsNames = append(lhsNames, lhsName)
+ break
+ }
+ idx := 0
+ for i := 0; i < tup.Len(); i++ {
+ // Generate a unique variable for each return value.
+ var lhsName string
+ lhsName, idx = generateAvailableIdentifier(expr.Pos(), file, path, info, "x", idx)
+ lhsNames = append(lhsNames, lhsName)
+ }
+ default:
+ return nil, fmt.Errorf("cannot extract %T", expr)
+ }
+
+ insertBeforeStmt := analysisinternal.StmtToInsertVarBefore(path)
+ if insertBeforeStmt == nil {
+ return nil, fmt.Errorf("cannot find location to insert extraction")
+ }
+ indent, err := calculateIndentation(src, tokFile, insertBeforeStmt)
+ if err != nil {
+ return nil, err
+ }
+ newLineIndent := "\n" + indent
+
+ lhs := strings.Join(lhsNames, ", ")
+ assignStmt := &ast.AssignStmt{
+ Lhs: []ast.Expr{ast.NewIdent(lhs)},
+ Tok: token.DEFINE,
+ Rhs: []ast.Expr{expr},
+ }
+ var buf bytes.Buffer
+ if err := format.Node(&buf, fset, assignStmt); err != nil {
+ return nil, err
+ }
+ assignment := strings.ReplaceAll(buf.String(), "\n", newLineIndent) + newLineIndent
+
+ return &analysis.SuggestedFix{
+ TextEdits: []analysis.TextEdit{
+ {
+ Pos: insertBeforeStmt.Pos(),
+ End: insertBeforeStmt.Pos(),
+ NewText: []byte(assignment),
+ },
+ {
+ Pos: start,
+ End: end,
+ NewText: []byte(lhs),
+ },
+ },
+ }, nil
+}
+
+// CanExtractVariable reports whether the code in the given range can be
+// extracted to a variable.
+func CanExtractVariable(start, end token.Pos, file *ast.File) (ast.Expr, []ast.Node, bool, error) {
+ if start == end {
+ return nil, nil, false, fmt.Errorf("start and end are equal")
+ }
+ path, _ := astutil.PathEnclosingInterval(file, start, end)
+ if len(path) == 0 {
+ return nil, nil, false, fmt.Errorf("no path enclosing interval")
+ }
+ for _, n := range path {
+ if _, ok := n.(*ast.ImportSpec); ok {
+ return nil, nil, false, fmt.Errorf("cannot extract variable in an import block")
+ }
+ }
+ node := path[0]
+ if start != node.Pos() || end != node.End() {
+ return nil, nil, false, fmt.Errorf("range does not map to an AST node")
+ }
+ expr, ok := node.(ast.Expr)
+ if !ok {
+ return nil, nil, false, fmt.Errorf("node is not an expression")
+ }
+ switch expr.(type) {
+ case *ast.BasicLit, *ast.CompositeLit, *ast.IndexExpr, *ast.CallExpr,
+ *ast.SliceExpr, *ast.UnaryExpr, *ast.BinaryExpr, *ast.SelectorExpr:
+ return expr, path, true, nil
+ }
+ return nil, nil, false, fmt.Errorf("cannot extract an %T to a variable", expr)
+}
+
+// Calculate indentation for insertion.
+// When inserting lines of code, we must ensure that the lines have consistent
+// formatting (i.e. the proper indentation). To do so, we observe the indentation on the
+// line of code on which the insertion occurs.
+func calculateIndentation(content []byte, tok *token.File, insertBeforeStmt ast.Node) (string, error) {
+ line := tok.Line(insertBeforeStmt.Pos())
+ lineOffset, stmtOffset, err := safetoken.Offsets(tok, tok.LineStart(line), insertBeforeStmt.Pos())
+ if err != nil {
+ return "", err
+ }
+ return string(content[lineOffset:stmtOffset]), nil
+}
+
+// generateAvailableIdentifier adjusts the new function name until there are no collisions in scope.
+// Possible collisions include other function and variable names. Returns the next index to check for prefix.
+func generateAvailableIdentifier(pos token.Pos, file *ast.File, path []ast.Node, info *types.Info, prefix string, idx int) (string, int) {
+ scopes := CollectScopes(info, path, pos)
+ return generateIdentifier(idx, prefix, func(name string) bool {
+ return file.Scope.Lookup(name) != nil || !isValidName(name, scopes)
+ })
+}
+
+func generateIdentifier(idx int, prefix string, hasCollision func(string) bool) (string, int) {
+ name := prefix
+ if idx != 0 {
+ name += fmt.Sprintf("%d", idx)
+ }
+ for hasCollision(name) {
+ idx++
+ name = fmt.Sprintf("%v%d", prefix, idx)
+ }
+ return name, idx + 1
+}
+
+// isValidName checks for variable collision in scope.
+func isValidName(name string, scopes []*types.Scope) bool {
+ for _, scope := range scopes {
+ if scope == nil {
+ continue
+ }
+ if scope.Lookup(name) != nil {
+ return false
+ }
+ }
+ return true
+}
+
+// returnVariable keeps track of the information we need to properly introduce a new variable
+// that we will return in the extracted function.
+type returnVariable struct {
+ // name is the identifier that is used on the left-hand side of the call to
+ // the extracted function.
+ name ast.Expr
+ // decl is the declaration of the variable. It is used in the type signature of the
+ // extracted function and for variable declarations.
+ decl *ast.Field
+ // zeroVal is the "zero value" of the type of the variable. It is used in a return
+ // statement in the extracted function.
+ zeroVal ast.Expr
+}
+
+// extractMethod refactors the selected block of code into a new method.
+func extractMethod(fset *token.FileSet, start, end token.Pos, src []byte, file *ast.File, pkg *types.Package, info *types.Info) (*analysis.SuggestedFix, error) {
+ return extractFunctionMethod(fset, start, end, src, file, pkg, info, true)
+}
+
+// extractFunction refactors the selected block of code into a new function.
+func extractFunction(fset *token.FileSet, start, end token.Pos, src []byte, file *ast.File, pkg *types.Package, info *types.Info) (*analysis.SuggestedFix, error) {
+ return extractFunctionMethod(fset, start, end, src, file, pkg, info, false)
+}
+
+// extractFunctionMethod refactors the selected block of code into a new function/method.
+// It also replaces the selected block of code with a call to the extracted
+// function. First, we manually adjust the selection range. We remove trailing
+// and leading whitespace characters to ensure the range is precisely bounded
+// by AST nodes. Next, we determine the variables that will be the parameters
+// and return values of the extracted function/method. Lastly, we construct the call
+// of the function/method and insert this call as well as the extracted function/method into
+// their proper locations.
+func extractFunctionMethod(fset *token.FileSet, start, end token.Pos, src []byte, file *ast.File, pkg *types.Package, info *types.Info, isMethod bool) (*analysis.SuggestedFix, error) {
+ errorPrefix := "extractFunction"
+ if isMethod {
+ errorPrefix = "extractMethod"
+ }
+
+ tok := fset.File(file.Pos())
+ if tok == nil {
+ return nil, bug.Errorf("no file for position")
+ }
+ p, ok, methodOk, err := CanExtractFunction(tok, start, end, src, file)
+ if (!ok && !isMethod) || (!methodOk && isMethod) {
+ return nil, fmt.Errorf("%s: cannot extract %s: %v", errorPrefix,
+ safetoken.StartPosition(fset, start), err)
+ }
+ tok, path, start, end, outer, node := p.tok, p.path, p.start, p.end, p.outer, p.node
+ fileScope := info.Scopes[file]
+ if fileScope == nil {
+ return nil, fmt.Errorf("%s: file scope is empty", errorPrefix)
+ }
+ pkgScope := fileScope.Parent()
+ if pkgScope == nil {
+ return nil, fmt.Errorf("%s: package scope is empty", errorPrefix)
+ }
+
+ // A return statement is non-nested if its parent node is equal to the parent node
+ // of the first node in the selection. These cases must be handled separately because
+ // non-nested return statements are guaranteed to execute.
+ var retStmts []*ast.ReturnStmt
+ var hasNonNestedReturn bool
+ startParent := findParent(outer, node)
+ ast.Inspect(outer, func(n ast.Node) bool {
+ if n == nil {
+ return false
+ }
+ if n.Pos() < start || n.End() > end {
+ return n.Pos() <= end
+ }
+ ret, ok := n.(*ast.ReturnStmt)
+ if !ok {
+ return true
+ }
+ if findParent(outer, n) == startParent {
+ hasNonNestedReturn = true
+ }
+ retStmts = append(retStmts, ret)
+ return false
+ })
+ containsReturnStatement := len(retStmts) > 0
+
+ // Now that we have determined the correct range for the selection block,
+ // we must determine the signature of the extracted function. We will then replace
+ // the block with an assignment statement that calls the extracted function with
+ // the appropriate parameters and return values.
+ variables, err := collectFreeVars(info, file, fileScope, pkgScope, start, end, path[0])
+ if err != nil {
+ return nil, err
+ }
+
+ var (
+ receiverUsed bool
+ receiver *ast.Field
+ receiverName string
+ receiverObj types.Object
+ )
+ if isMethod {
+ if outer == nil || outer.Recv == nil || len(outer.Recv.List) == 0 {
+ return nil, fmt.Errorf("%s: cannot extract need method receiver", errorPrefix)
+ }
+ receiver = outer.Recv.List[0]
+ if len(receiver.Names) == 0 || receiver.Names[0] == nil {
+ return nil, fmt.Errorf("%s: cannot extract need method receiver name", errorPrefix)
+ }
+ recvName := receiver.Names[0]
+ receiverName = recvName.Name
+ receiverObj = info.ObjectOf(recvName)
+ }
+
+ var (
+ params, returns []ast.Expr // used when calling the extracted function
+ paramTypes, returnTypes []*ast.Field // used in the signature of the extracted function
+ uninitialized []types.Object // vars we will need to initialize before the call
+ )
+
+ // Avoid duplicates while traversing vars and uninitialized.
+ seenVars := make(map[types.Object]ast.Expr)
+ seenUninitialized := make(map[types.Object]struct{})
+
+ // Some variables on the left-hand side of our assignment statement may be free. If our
+ // selection begins in the same scope in which the free variable is defined, we can
+ // redefine it in our assignment statement. See the following example, where 'b' and
+ // 'err' (both free variables) can be redefined in the second funcCall() while maintaining
+ // correctness.
+ //
+ //
+ // Not Redefined:
+ //
+ // a, err := funcCall()
+ // var b int
+ // b, err = funcCall()
+ //
+ // Redefined:
+ //
+ // a, err := funcCall()
+ // b, err := funcCall()
+ //
+ // We track the number of free variables that can be redefined to maintain our preference
+ // of using "x, y, z := fn()" style assignment statements.
+ var canRedefineCount int
+
+ // Each identifier in the selected block must become (1) a parameter to the
+ // extracted function, (2) a return value of the extracted function, or (3) a local
+ // variable in the extracted function. Determine the outcome(s) for each variable
+ // based on whether it is free, altered within the selected block, and used outside
+ // of the selected block.
+ for _, v := range variables {
+ if _, ok := seenVars[v.obj]; ok {
+ continue
+ }
+ if v.obj.Name() == "_" {
+ // The blank identifier is always a local variable
+ continue
+ }
+ typ := analysisinternal.TypeExpr(file, pkg, v.obj.Type())
+ if typ == nil {
+ return nil, fmt.Errorf("nil AST expression for type: %v", v.obj.Name())
+ }
+ seenVars[v.obj] = typ
+ identifier := ast.NewIdent(v.obj.Name())
+ // An identifier must meet three conditions to become a return value of the
+ // extracted function. (1) its value must be defined or reassigned within
+ // the selection (isAssigned), (2) it must be used at least once after the
+ // selection (isUsed), and (3) its first use after the selection
+ // cannot be its own reassignment or redefinition (objOverriden).
+ if v.obj.Parent() == nil {
+ return nil, fmt.Errorf("parent nil")
+ }
+ isUsed, firstUseAfter := objUsed(info, end, v.obj.Parent().End(), v.obj)
+ if v.assigned && isUsed && !varOverridden(info, firstUseAfter, v.obj, v.free, outer) {
+ returnTypes = append(returnTypes, &ast.Field{Type: typ})
+ returns = append(returns, identifier)
+ if !v.free {
+ uninitialized = append(uninitialized, v.obj)
+ } else if v.obj.Parent().Pos() == startParent.Pos() {
+ canRedefineCount++
+ }
+ }
+ // An identifier must meet two conditions to become a parameter of the
+ // extracted function. (1) it must be free (isFree), and (2) its first
+ // use within the selection cannot be its own definition (isDefined).
+ if v.free && !v.defined {
+ // Skip the selector for a method.
+ if isMethod && v.obj == receiverObj {
+ receiverUsed = true
+ continue
+ }
+ params = append(params, identifier)
+ paramTypes = append(paramTypes, &ast.Field{
+ Names: []*ast.Ident{identifier},
+ Type: typ,
+ })
+ }
+ }
+
+ // Find the function literal that encloses the selection. The enclosing function literal
+ // may not be the enclosing function declaration (i.e. 'outer'). For example, in the
+ // following block:
+ //
+ // func main() {
+ // ast.Inspect(node, func(n ast.Node) bool {
+ // v := 1 // this line extracted
+ // return true
+ // })
+ // }
+ //
+ // 'outer' is main(). However, the extracted selection most directly belongs to
+ // the anonymous function literal, the second argument of ast.Inspect(). We use the
+ // enclosing function literal to determine the proper return types for return statements
+ // within the selection. We still need the enclosing function declaration because this is
+ // the top-level declaration. We inspect the top-level declaration to look for variables
+ // as well as for code replacement.
+ enclosing := outer.Type
+ for _, p := range path {
+ if p == enclosing {
+ break
+ }
+ if fl, ok := p.(*ast.FuncLit); ok {
+ enclosing = fl.Type
+ break
+ }
+ }
+
+ // We put the selection in a constructed file. We can then traverse and edit
+ // the extracted selection without modifying the original AST.
+ startOffset, endOffset, err := safetoken.Offsets(tok, start, end)
+ if err != nil {
+ return nil, err
+ }
+ selection := src[startOffset:endOffset]
+ extractedBlock, err := parseBlockStmt(fset, selection)
+ if err != nil {
+ return nil, err
+ }
+
+ // We need to account for return statements in the selected block, as they will complicate
+ // the logical flow of the extracted function. See the following example, where ** denotes
+ // the range to be extracted.
+ //
+ // Before:
+ //
+ // func _() int {
+ // a := 1
+ // b := 2
+ // **if a == b {
+ // return a
+ // }**
+ // ...
+ // }
+ //
+ // After:
+ //
+ // func _() int {
+ // a := 1
+ // b := 2
+ // cond0, ret0 := x0(a, b)
+ // if cond0 {
+ // return ret0
+ // }
+ // ...
+ // }
+ //
+ // func x0(a int, b int) (bool, int) {
+ // if a == b {
+ // return true, a
+ // }
+ // return false, 0
+ // }
+ //
+ // We handle returns by adding an additional boolean return value to the extracted function.
+ // This bool reports whether the original function would have returned. Because the
+ // extracted selection contains a return statement, we must also add the types in the
+ // return signature of the enclosing function to the return signature of the
+ // extracted function. We then add an extra if statement checking this boolean value
+ // in the original function. If the condition is met, the original function should
+ // return a value, mimicking the functionality of the original return statement(s)
+ // in the selection.
+ //
+ // If there is a return that is guaranteed to execute (hasNonNestedReturns=true), then
+ // we don't need to include this additional condition check and can simply return.
+ //
+ // Before:
+ //
+ // func _() int {
+ // a := 1
+ // b := 2
+ // **if a == b {
+ // return a
+ // }
+ // return b**
+ // }
+ //
+ // After:
+ //
+ // func _() int {
+ // a := 1
+ // b := 2
+ // return x0(a, b)
+ // }
+ //
+ // func x0(a int, b int) int {
+ // if a == b {
+ // return a
+ // }
+ // return b
+ // }
+
+ var retVars []*returnVariable
+ var ifReturn *ast.IfStmt
+ if containsReturnStatement {
+ if !hasNonNestedReturn {
+ // The selected block contained return statements, so we have to modify the
+ // signature of the extracted function as described above. Adjust all of
+ // the return statements in the extracted function to reflect this change in
+ // signature.
+ if err := adjustReturnStatements(returnTypes, seenVars, fset, file,
+ pkg, extractedBlock); err != nil {
+ return nil, err
+ }
+ }
+ // Collect the additional return values and types needed to accommodate return
+ // statements in the selection. Update the type signature of the extracted
+ // function and construct the if statement that will be inserted in the enclosing
+ // function.
+ retVars, ifReturn, err = generateReturnInfo(enclosing, pkg, path, file, info, fset, start, hasNonNestedReturn)
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ // Add a return statement to the end of the new function. This return statement must include
+ // the values for the types of the original extracted function signature and (if a return
+ // statement is present in the selection) enclosing function signature.
+ // This only needs to be done if the selections does not have a non-nested return, otherwise
+ // it already terminates with a return statement.
+ hasReturnValues := len(returns)+len(retVars) > 0
+ if hasReturnValues && !hasNonNestedReturn {
+ extractedBlock.List = append(extractedBlock.List, &ast.ReturnStmt{
+ Results: append(returns, getZeroVals(retVars)...),
+ })
+ }
+
+ // Construct the appropriate call to the extracted function.
+ // We must meet two conditions to use ":=" instead of '='. (1) there must be at least
+ // one variable on the lhs that is uninitialized (non-free) prior to the assignment.
+ // (2) all of the initialized (free) variables on the lhs must be able to be redefined.
+ sym := token.ASSIGN
+ canDefineCount := len(uninitialized) + canRedefineCount
+ canDefine := len(uninitialized)+len(retVars) > 0 && canDefineCount == len(returns)
+ if canDefine {
+ sym = token.DEFINE
+ }
+ var name, funName string
+ if isMethod {
+ name = "newMethod"
+ // TODO(suzmue): generate a name that does not conflict for "newMethod".
+ funName = name
+ } else {
+ name = "newFunction"
+ funName, _ = generateAvailableIdentifier(start, file, path, info, name, 0)
+ }
+ extractedFunCall := generateFuncCall(hasNonNestedReturn, hasReturnValues, params,
+ append(returns, getNames(retVars)...), funName, sym, receiverName)
+
+ // Build the extracted function.
+ newFunc := &ast.FuncDecl{
+ Name: ast.NewIdent(funName),
+ Type: &ast.FuncType{
+ Params: &ast.FieldList{List: paramTypes},
+ Results: &ast.FieldList{List: append(returnTypes, getDecls(retVars)...)},
+ },
+ Body: extractedBlock,
+ }
+ if isMethod {
+ var names []*ast.Ident
+ if receiverUsed {
+ names = append(names, ast.NewIdent(receiverName))
+ }
+ newFunc.Recv = &ast.FieldList{
+ List: []*ast.Field{{
+ Names: names,
+ Type: receiver.Type,
+ }},
+ }
+ }
+
+ // Create variable declarations for any identifiers that need to be initialized prior to
+ // calling the extracted function. We do not manually initialize variables if every return
+ // value is uninitialized. We can use := to initialize the variables in this situation.
+ var declarations []ast.Stmt
+ if canDefineCount != len(returns) {
+ declarations = initializeVars(uninitialized, retVars, seenUninitialized, seenVars)
+ }
+
+ var declBuf, replaceBuf, newFuncBuf, ifBuf, commentBuf bytes.Buffer
+ if err := format.Node(&declBuf, fset, declarations); err != nil {
+ return nil, err
+ }
+ if err := format.Node(&replaceBuf, fset, extractedFunCall); err != nil {
+ return nil, err
+ }
+ if ifReturn != nil {
+ if err := format.Node(&ifBuf, fset, ifReturn); err != nil {
+ return nil, err
+ }
+ }
+ if err := format.Node(&newFuncBuf, fset, newFunc); err != nil {
+ return nil, err
+ }
+ // Find all the comments within the range and print them to be put somewhere.
+ // TODO(suzmue): print these in the extracted function at the correct place.
+ for _, cg := range file.Comments {
+ if cg.Pos().IsValid() && cg.Pos() < end && cg.Pos() >= start {
+ for _, c := range cg.List {
+ fmt.Fprintln(&commentBuf, c.Text)
+ }
+ }
+ }
+
+ // We're going to replace the whole enclosing function,
+ // so preserve the text before and after the selected block.
+ outerStart, outerEnd, err := safetoken.Offsets(tok, outer.Pos(), outer.End())
+ if err != nil {
+ return nil, err
+ }
+ before := src[outerStart:startOffset]
+ after := src[endOffset:outerEnd]
+ indent, err := calculateIndentation(src, tok, node)
+ if err != nil {
+ return nil, err
+ }
+ newLineIndent := "\n" + indent
+
+ var fullReplacement strings.Builder
+ fullReplacement.Write(before)
+ if commentBuf.Len() > 0 {
+ comments := strings.ReplaceAll(commentBuf.String(), "\n", newLineIndent)
+ fullReplacement.WriteString(comments)
+ }
+ if declBuf.Len() > 0 { // add any initializations, if needed
+ initializations := strings.ReplaceAll(declBuf.String(), "\n", newLineIndent) +
+ newLineIndent
+ fullReplacement.WriteString(initializations)
+ }
+ fullReplacement.Write(replaceBuf.Bytes()) // call the extracted function
+ if ifBuf.Len() > 0 { // add the if statement below the function call, if needed
+ ifstatement := newLineIndent +
+ strings.ReplaceAll(ifBuf.String(), "\n", newLineIndent)
+ fullReplacement.WriteString(ifstatement)
+ }
+ fullReplacement.Write(after)
+ fullReplacement.WriteString("\n\n") // add newlines after the enclosing function
+ fullReplacement.Write(newFuncBuf.Bytes()) // insert the extracted function
+
+ return &analysis.SuggestedFix{
+ TextEdits: []analysis.TextEdit{{
+ Pos: outer.Pos(),
+ End: outer.End(),
+ NewText: []byte(fullReplacement.String()),
+ }},
+ }, nil
+}
+
+// adjustRangeForCommentsAndWhiteSpace adjusts the given range to exclude unnecessary leading or
+// trailing whitespace characters from selection as well as leading or trailing comments.
+// In the following example, each line of the if statement is indented once. There are also two
+// extra spaces after the sclosing bracket before the line break and a comment.
+//
+// \tif (true) {
+// \t _ = 1
+// \t} // hello \n
+//
+// By default, a valid range begins at 'if' and ends at the first whitespace character
+// after the '}'. But, users are likely to highlight full lines rather than adjusting
+// their cursors for whitespace. To support this use case, we must manually adjust the
+// ranges to match the correct AST node. In this particular example, we would adjust
+// rng.Start forward to the start of 'if' and rng.End backward to after '}'.
+func adjustRangeForCommentsAndWhiteSpace(tok *token.File, start, end token.Pos, content []byte, file *ast.File) (token.Pos, token.Pos, error) {
+ // Adjust the end of the range to after leading whitespace and comments.
+ prevStart := token.NoPos
+ startComment := sort.Search(len(file.Comments), func(i int) bool {
+ // Find the index for the first comment that ends after range start.
+ return file.Comments[i].End() > start
+ })
+ for prevStart != start {
+ prevStart = start
+ // If start is within a comment, move start to the end
+ // of the comment group.
+ if startComment < len(file.Comments) && file.Comments[startComment].Pos() <= start && start < file.Comments[startComment].End() {
+ start = file.Comments[startComment].End()
+ startComment++
+ }
+ // Move forwards to find a non-whitespace character.
+ offset, err := safetoken.Offset(tok, start)
+ if err != nil {
+ return 0, 0, err
+ }
+ for offset < len(content) && isGoWhiteSpace(content[offset]) {
+ offset++
+ }
+ start = tok.Pos(offset)
+ }
+
+ // Adjust the end of the range to before trailing whitespace and comments.
+ prevEnd := token.NoPos
+ endComment := sort.Search(len(file.Comments), func(i int) bool {
+ // Find the index for the first comment that ends after the range end.
+ return file.Comments[i].End() >= end
+ })
+ // Search will return n if not found, so we need to adjust if there are no
+ // comments that would match.
+ if endComment == len(file.Comments) {
+ endComment = -1
+ }
+ for prevEnd != end {
+ prevEnd = end
+ // If end is within a comment, move end to the start
+ // of the comment group.
+ if endComment >= 0 && file.Comments[endComment].Pos() < end && end <= file.Comments[endComment].End() {
+ end = file.Comments[endComment].Pos()
+ endComment--
+ }
+ // Move backwards to find a non-whitespace character.
+ offset, err := safetoken.Offset(tok, end)
+ if err != nil {
+ return 0, 0, err
+ }
+ for offset > 0 && isGoWhiteSpace(content[offset-1]) {
+ offset--
+ }
+ end = tok.Pos(offset)
+ }
+
+ return start, end, nil
+}
+
+// isGoWhiteSpace returns true if b is a considered white space in
+// Go as defined by scanner.GoWhitespace.
+func isGoWhiteSpace(b byte) bool {
+ return uint64(scanner.GoWhitespace)&(1<<uint(b)) != 0
+}
+
+// findParent finds the parent AST node of the given target node, if the target is a
+// descendant of the starting node.
+func findParent(start ast.Node, target ast.Node) ast.Node {
+ var parent ast.Node
+ analysisinternal.WalkASTWithParent(start, func(n, p ast.Node) bool {
+ if n == target {
+ parent = p
+ return false
+ }
+ return true
+ })
+ return parent
+}
+
+// variable describes the status of a variable within a selection.
+type variable struct {
+ obj types.Object
+
+ // free reports whether the variable is a free variable, meaning it should
+ // be a parameter to the extracted function.
+ free bool
+
+ // assigned reports whether the variable is assigned to in the selection.
+ assigned bool
+
+ // defined reports whether the variable is defined in the selection.
+ defined bool
+}
+
+// collectFreeVars maps each identifier in the given range to whether it is "free."
+// Given a range, a variable in that range is defined as "free" if it is declared
+// outside of the range and neither at the file scope nor package scope. These free
+// variables will be used as arguments in the extracted function. It also returns a
+// list of identifiers that may need to be returned by the extracted function.
+// Some of the code in this function has been adapted from tools/cmd/guru/freevars.go.
+func collectFreeVars(info *types.Info, file *ast.File, fileScope, pkgScope *types.Scope, start, end token.Pos, node ast.Node) ([]*variable, error) {
+ // id returns non-nil if n denotes an object that is referenced by the span
+ // and defined either within the span or in the lexical environment. The bool
+ // return value acts as an indicator for where it was defined.
+ id := func(n *ast.Ident) (types.Object, bool) {
+ obj := info.Uses[n]
+ if obj == nil {
+ return info.Defs[n], false
+ }
+ if obj.Name() == "_" {
+ return nil, false // exclude objects denoting '_'
+ }
+ if _, ok := obj.(*types.PkgName); ok {
+ return nil, false // imported package
+ }
+ if !(file.Pos() <= obj.Pos() && obj.Pos() <= file.End()) {
+ return nil, false // not defined in this file
+ }
+ scope := obj.Parent()
+ if scope == nil {
+ return nil, false // e.g. interface method, struct field
+ }
+ if scope == fileScope || scope == pkgScope {
+ return nil, false // defined at file or package scope
+ }
+ if start <= obj.Pos() && obj.Pos() <= end {
+ return obj, false // defined within selection => not free
+ }
+ return obj, true
+ }
+ // sel returns non-nil if n denotes a selection o.x.y that is referenced by the
+ // span and defined either within the span or in the lexical environment. The bool
+ // return value acts as an indicator for where it was defined.
+ var sel func(n *ast.SelectorExpr) (types.Object, bool)
+ sel = func(n *ast.SelectorExpr) (types.Object, bool) {
+ switch x := astutil.Unparen(n.X).(type) {
+ case *ast.SelectorExpr:
+ return sel(x)
+ case *ast.Ident:
+ return id(x)
+ }
+ return nil, false
+ }
+ seen := make(map[types.Object]*variable)
+ firstUseIn := make(map[types.Object]token.Pos)
+ var vars []types.Object
+ ast.Inspect(node, func(n ast.Node) bool {
+ if n == nil {
+ return false
+ }
+ if start <= n.Pos() && n.End() <= end {
+ var obj types.Object
+ var isFree, prune bool
+ switch n := n.(type) {
+ case *ast.Ident:
+ obj, isFree = id(n)
+ case *ast.SelectorExpr:
+ obj, isFree = sel(n)
+ prune = true
+ }
+ if obj != nil {
+ seen[obj] = &variable{
+ obj: obj,
+ free: isFree,
+ }
+ vars = append(vars, obj)
+ // Find the first time that the object is used in the selection.
+ first, ok := firstUseIn[obj]
+ if !ok || n.Pos() < first {
+ firstUseIn[obj] = n.Pos()
+ }
+ if prune {
+ return false
+ }
+ }
+ }
+ return n.Pos() <= end
+ })
+
+ // Find identifiers that are initialized or whose values are altered at some
+ // point in the selected block. For example, in a selected block from lines 2-4,
+ // variables x, y, and z are included in assigned. However, in a selected block
+ // from lines 3-4, only variables y and z are included in assigned.
+ //
+ // 1: var a int
+ // 2: var x int
+ // 3: y := 3
+ // 4: z := x + a
+ //
+ ast.Inspect(node, func(n ast.Node) bool {
+ if n == nil {
+ return false
+ }
+ if n.Pos() < start || n.End() > end {
+ return n.Pos() <= end
+ }
+ switch n := n.(type) {
+ case *ast.AssignStmt:
+ for _, assignment := range n.Lhs {
+ lhs, ok := assignment.(*ast.Ident)
+ if !ok {
+ continue
+ }
+ obj, _ := id(lhs)
+ if obj == nil {
+ continue
+ }
+ if _, ok := seen[obj]; !ok {
+ continue
+ }
+ seen[obj].assigned = true
+ if n.Tok != token.DEFINE {
+ continue
+ }
+ // Find identifiers that are defined prior to being used
+ // elsewhere in the selection.
+ // TODO: Include identifiers that are assigned prior to being
+ // used elsewhere in the selection. Then, change the assignment
+ // to a definition in the extracted function.
+ if firstUseIn[obj] != lhs.Pos() {
+ continue
+ }
+ // Ensure that the object is not used in its own re-definition.
+ // For example:
+ // var f float64
+ // f, e := math.Frexp(f)
+ for _, expr := range n.Rhs {
+ if referencesObj(info, expr, obj) {
+ continue
+ }
+ if _, ok := seen[obj]; !ok {
+ continue
+ }
+ seen[obj].defined = true
+ break
+ }
+ }
+ return false
+ case *ast.DeclStmt:
+ gen, ok := n.Decl.(*ast.GenDecl)
+ if !ok {
+ return false
+ }
+ for _, spec := range gen.Specs {
+ vSpecs, ok := spec.(*ast.ValueSpec)
+ if !ok {
+ continue
+ }
+ for _, vSpec := range vSpecs.Names {
+ obj, _ := id(vSpec)
+ if obj == nil {
+ continue
+ }
+ if _, ok := seen[obj]; !ok {
+ continue
+ }
+ seen[obj].assigned = true
+ }
+ }
+ return false
+ case *ast.IncDecStmt:
+ if ident, ok := n.X.(*ast.Ident); !ok {
+ return false
+ } else if obj, _ := id(ident); obj == nil {
+ return false
+ } else {
+ if _, ok := seen[obj]; !ok {
+ return false
+ }
+ seen[obj].assigned = true
+ }
+ }
+ return true
+ })
+ var variables []*variable
+ for _, obj := range vars {
+ v, ok := seen[obj]
+ if !ok {
+ return nil, fmt.Errorf("no seen types.Object for %v", obj)
+ }
+ variables = append(variables, v)
+ }
+ return variables, nil
+}
+
+// referencesObj checks whether the given object appears in the given expression.
+func referencesObj(info *types.Info, expr ast.Expr, obj types.Object) bool {
+ var hasObj bool
+ ast.Inspect(expr, func(n ast.Node) bool {
+ if n == nil {
+ return false
+ }
+ ident, ok := n.(*ast.Ident)
+ if !ok {
+ return true
+ }
+ objUse := info.Uses[ident]
+ if obj == objUse {
+ hasObj = true
+ return false
+ }
+ return false
+ })
+ return hasObj
+}
+
+type fnExtractParams struct {
+ tok *token.File
+ start, end token.Pos
+ path []ast.Node
+ outer *ast.FuncDecl
+ node ast.Node
+}
+
+// CanExtractFunction reports whether the code in the given range can be
+// extracted to a function.
+func CanExtractFunction(tok *token.File, start, end token.Pos, src []byte, file *ast.File) (*fnExtractParams, bool, bool, error) {
+ if start == end {
+ return nil, false, false, fmt.Errorf("start and end are equal")
+ }
+ var err error
+ start, end, err = adjustRangeForCommentsAndWhiteSpace(tok, start, end, src, file)
+ if err != nil {
+ return nil, false, false, err
+ }
+ path, _ := astutil.PathEnclosingInterval(file, start, end)
+ if len(path) == 0 {
+ return nil, false, false, fmt.Errorf("no path enclosing interval")
+ }
+ // Node that encloses the selection must be a statement.
+ // TODO: Support function extraction for an expression.
+ _, ok := path[0].(ast.Stmt)
+ if !ok {
+ return nil, false, false, fmt.Errorf("node is not a statement")
+ }
+
+ // Find the function declaration that encloses the selection.
+ var outer *ast.FuncDecl
+ for _, p := range path {
+ if p, ok := p.(*ast.FuncDecl); ok {
+ outer = p
+ break
+ }
+ }
+ if outer == nil {
+ return nil, false, false, fmt.Errorf("no enclosing function")
+ }
+
+ // Find the nodes at the start and end of the selection.
+ var startNode, endNode ast.Node
+ ast.Inspect(outer, func(n ast.Node) bool {
+ if n == nil {
+ return false
+ }
+ // Do not override 'start' with a node that begins at the same location
+ // but is nested further from 'outer'.
+ if startNode == nil && n.Pos() == start && n.End() <= end {
+ startNode = n
+ }
+ if endNode == nil && n.End() == end && n.Pos() >= start {
+ endNode = n
+ }
+ return n.Pos() <= end
+ })
+ if startNode == nil || endNode == nil {
+ return nil, false, false, fmt.Errorf("range does not map to AST nodes")
+ }
+ // If the region is a blockStmt, use the first and last nodes in the block
+ // statement.
+ // <rng.start>{ ... }<rng.end> => { <rng.start>...<rng.end> }
+ if blockStmt, ok := startNode.(*ast.BlockStmt); ok {
+ if len(blockStmt.List) == 0 {
+ return nil, false, false, fmt.Errorf("range maps to empty block statement")
+ }
+ startNode, endNode = blockStmt.List[0], blockStmt.List[len(blockStmt.List)-1]
+ start, end = startNode.Pos(), endNode.End()
+ }
+ return &fnExtractParams{
+ tok: tok,
+ start: start,
+ end: end,
+ path: path,
+ outer: outer,
+ node: startNode,
+ }, true, outer.Recv != nil, nil
+}
+
+// objUsed checks if the object is used within the range. It returns the first
+// occurrence of the object in the range, if it exists.
+func objUsed(info *types.Info, start, end token.Pos, obj types.Object) (bool, *ast.Ident) {
+ var firstUse *ast.Ident
+ for id, objUse := range info.Uses {
+ if obj != objUse {
+ continue
+ }
+ if id.Pos() < start || id.End() > end {
+ continue
+ }
+ if firstUse == nil || id.Pos() < firstUse.Pos() {
+ firstUse = id
+ }
+ }
+ return firstUse != nil, firstUse
+}
+
+// varOverridden traverses the given AST node until we find the given identifier. Then, we
+// examine the occurrence of the given identifier and check for (1) whether the identifier
+// is being redefined. If the identifier is free, we also check for (2) whether the identifier
+// is being reassigned. We will not include an identifier in the return statement of the
+// extracted function if it meets one of the above conditions.
+func varOverridden(info *types.Info, firstUse *ast.Ident, obj types.Object, isFree bool, node ast.Node) bool {
+ var isOverriden bool
+ ast.Inspect(node, func(n ast.Node) bool {
+ if n == nil {
+ return false
+ }
+ assignment, ok := n.(*ast.AssignStmt)
+ if !ok {
+ return true
+ }
+ // A free variable is initialized prior to the selection. We can always reassign
+ // this variable after the selection because it has already been defined.
+ // Conversely, a non-free variable is initialized within the selection. Thus, we
+ // cannot reassign this variable after the selection unless it is initialized and
+ // returned by the extracted function.
+ if !isFree && assignment.Tok == token.ASSIGN {
+ return false
+ }
+ for _, assigned := range assignment.Lhs {
+ ident, ok := assigned.(*ast.Ident)
+ // Check if we found the first use of the identifier.
+ if !ok || ident != firstUse {
+ continue
+ }
+ objUse := info.Uses[ident]
+ if objUse == nil || objUse != obj {
+ continue
+ }
+ // Ensure that the object is not used in its own definition.
+ // For example:
+ // var f float64
+ // f, e := math.Frexp(f)
+ for _, expr := range assignment.Rhs {
+ if referencesObj(info, expr, obj) {
+ return false
+ }
+ }
+ isOverriden = true
+ return false
+ }
+ return false
+ })
+ return isOverriden
+}
+
+// parseBlockStmt generates an AST file from the given text. We then return the portion of the
+// file that represents the text.
+func parseBlockStmt(fset *token.FileSet, src []byte) (*ast.BlockStmt, error) {
+ text := "package main\nfunc _() { " + string(src) + " }"
+ extract, err := parser.ParseFile(fset, "", text, 0)
+ if err != nil {
+ return nil, err
+ }
+ if len(extract.Decls) == 0 {
+ return nil, fmt.Errorf("parsed file does not contain any declarations")
+ }
+ decl, ok := extract.Decls[0].(*ast.FuncDecl)
+ if !ok {
+ return nil, fmt.Errorf("parsed file does not contain expected function declaration")
+ }
+ if decl.Body == nil {
+ return nil, fmt.Errorf("extracted function has no body")
+ }
+ return decl.Body, nil
+}
+
+// generateReturnInfo generates the information we need to adjust the return statements and
+// signature of the extracted function. We prepare names, signatures, and "zero values" that
+// represent the new variables. We also use this information to construct the if statement that
+// is inserted below the call to the extracted function.
+func generateReturnInfo(enclosing *ast.FuncType, pkg *types.Package, path []ast.Node, file *ast.File, info *types.Info, fset *token.FileSet, pos token.Pos, hasNonNestedReturns bool) ([]*returnVariable, *ast.IfStmt, error) {
+ var retVars []*returnVariable
+ var cond *ast.Ident
+ if !hasNonNestedReturns {
+ // Generate information for the added bool value.
+ name, _ := generateAvailableIdentifier(pos, file, path, info, "shouldReturn", 0)
+ cond = &ast.Ident{Name: name}
+ retVars = append(retVars, &returnVariable{
+ name: cond,
+ decl: &ast.Field{Type: ast.NewIdent("bool")},
+ zeroVal: ast.NewIdent("false"),
+ })
+ }
+ // Generate information for the values in the return signature of the enclosing function.
+ if enclosing.Results != nil {
+ idx := 0
+ for _, field := range enclosing.Results.List {
+ typ := info.TypeOf(field.Type)
+ if typ == nil {
+ return nil, nil, fmt.Errorf(
+ "failed type conversion, AST expression: %T", field.Type)
+ }
+ expr := analysisinternal.TypeExpr(file, pkg, typ)
+ if expr == nil {
+ return nil, nil, fmt.Errorf("nil AST expression")
+ }
+ var name string
+ name, idx = generateAvailableIdentifier(pos, file,
+ path, info, "returnValue", idx)
+ retVars = append(retVars, &returnVariable{
+ name: ast.NewIdent(name),
+ decl: &ast.Field{Type: expr},
+ zeroVal: analysisinternal.ZeroValue(file, pkg, typ),
+ })
+ }
+ }
+ var ifReturn *ast.IfStmt
+ if !hasNonNestedReturns {
+ // Create the return statement for the enclosing function. We must exclude the variable
+ // for the condition of the if statement (cond) from the return statement.
+ ifReturn = &ast.IfStmt{
+ Cond: cond,
+ Body: &ast.BlockStmt{
+ List: []ast.Stmt{&ast.ReturnStmt{Results: getNames(retVars)[1:]}},
+ },
+ }
+ }
+ return retVars, ifReturn, nil
+}
+
+// adjustReturnStatements adds "zero values" of the given types to each return statement
+// in the given AST node.
+func adjustReturnStatements(returnTypes []*ast.Field, seenVars map[types.Object]ast.Expr, fset *token.FileSet, file *ast.File, pkg *types.Package, extractedBlock *ast.BlockStmt) error {
+ var zeroVals []ast.Expr
+ // Create "zero values" for each type.
+ for _, returnType := range returnTypes {
+ var val ast.Expr
+ for obj, typ := range seenVars {
+ if typ != returnType.Type {
+ continue
+ }
+ val = analysisinternal.ZeroValue(file, pkg, obj.Type())
+ break
+ }
+ if val == nil {
+ return fmt.Errorf(
+ "could not find matching AST expression for %T", returnType.Type)
+ }
+ zeroVals = append(zeroVals, val)
+ }
+ // Add "zero values" to each return statement.
+ // The bool reports whether the enclosing function should return after calling the
+ // extracted function. We set the bool to 'true' because, if these return statements
+ // execute, the extracted function terminates early, and the enclosing function must
+ // return as well.
+ zeroVals = append(zeroVals, ast.NewIdent("true"))
+ ast.Inspect(extractedBlock, func(n ast.Node) bool {
+ if n == nil {
+ return false
+ }
+ if n, ok := n.(*ast.ReturnStmt); ok {
+ n.Results = append(zeroVals, n.Results...)
+ return false
+ }
+ return true
+ })
+ return nil
+}
+
+// generateFuncCall constructs a call expression for the extracted function, described by the
+// given parameters and return variables.
+func generateFuncCall(hasNonNestedReturn, hasReturnVals bool, params, returns []ast.Expr, name string, token token.Token, selector string) ast.Node {
+ var replace ast.Node
+ callExpr := &ast.CallExpr{
+ Fun: ast.NewIdent(name),
+ Args: params,
+ }
+ if selector != "" {
+ callExpr = &ast.CallExpr{
+ Fun: &ast.SelectorExpr{
+ X: ast.NewIdent(selector),
+ Sel: ast.NewIdent(name),
+ },
+ Args: params,
+ }
+ }
+ if hasReturnVals {
+ if hasNonNestedReturn {
+ // Create a return statement that returns the result of the function call.
+ replace = &ast.ReturnStmt{
+ Return: 0,
+ Results: []ast.Expr{callExpr},
+ }
+ } else {
+ // Assign the result of the function call.
+ replace = &ast.AssignStmt{
+ Lhs: returns,
+ Tok: token,
+ Rhs: []ast.Expr{callExpr},
+ }
+ }
+ } else {
+ replace = callExpr
+ }
+ return replace
+}
+
+// initializeVars creates variable declarations, if needed.
+// Our preference is to replace the selected block with an "x, y, z := fn()" style
+// assignment statement. We can use this style when all of the variables in the
+// extracted function's return statement are either not defined prior to the extracted block
+// or can be safely redefined. However, for example, if z is already defined
+// in a different scope, we replace the selected block with:
+//
+// var x int
+// var y string
+// x, y, z = fn()
+func initializeVars(uninitialized []types.Object, retVars []*returnVariable, seenUninitialized map[types.Object]struct{}, seenVars map[types.Object]ast.Expr) []ast.Stmt {
+ var declarations []ast.Stmt
+ for _, obj := range uninitialized {
+ if _, ok := seenUninitialized[obj]; ok {
+ continue
+ }
+ seenUninitialized[obj] = struct{}{}
+ valSpec := &ast.ValueSpec{
+ Names: []*ast.Ident{ast.NewIdent(obj.Name())},
+ Type: seenVars[obj],
+ }
+ genDecl := &ast.GenDecl{
+ Tok: token.VAR,
+ Specs: []ast.Spec{valSpec},
+ }
+ declarations = append(declarations, &ast.DeclStmt{Decl: genDecl})
+ }
+ // Each variable added from a return statement in the selection
+ // must be initialized.
+ for i, retVar := range retVars {
+ n := retVar.name.(*ast.Ident)
+ valSpec := &ast.ValueSpec{
+ Names: []*ast.Ident{n},
+ Type: retVars[i].decl.Type,
+ }
+ genDecl := &ast.GenDecl{
+ Tok: token.VAR,
+ Specs: []ast.Spec{valSpec},
+ }
+ declarations = append(declarations, &ast.DeclStmt{Decl: genDecl})
+ }
+ return declarations
+}
+
+// getNames returns the names from the given list of returnVariable.
+func getNames(retVars []*returnVariable) []ast.Expr {
+ var names []ast.Expr
+ for _, retVar := range retVars {
+ names = append(names, retVar.name)
+ }
+ return names
+}
+
+// getZeroVals returns the "zero values" from the given list of returnVariable.
+func getZeroVals(retVars []*returnVariable) []ast.Expr {
+ var zvs []ast.Expr
+ for _, retVar := range retVars {
+ zvs = append(zvs, retVar.zeroVal)
+ }
+ return zvs
+}
+
+// getDecls returns the declarations from the given list of returnVariable.
+func getDecls(retVars []*returnVariable) []*ast.Field {
+ var decls []*ast.Field
+ for _, retVar := range retVars {
+ decls = append(decls, retVar.decl)
+ }
+ return decls
+}
diff --git a/gopls/internal/lsp/source/fix.go b/gopls/internal/lsp/source/fix.go
new file mode 100644
index 000000000..2ed55c44d
--- /dev/null
+++ b/gopls/internal/lsp/source/fix.go
@@ -0,0 +1,138 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package source
+
+import (
+ "context"
+ "fmt"
+ "go/ast"
+ "go/token"
+ "go/types"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/gopls/internal/lsp/analysis/fillstruct"
+ "golang.org/x/tools/gopls/internal/lsp/analysis/undeclaredname"
+ "golang.org/x/tools/gopls/internal/lsp/protocol"
+ "golang.org/x/tools/gopls/internal/span"
+ "golang.org/x/tools/internal/bug"
+)
+
+type (
+ // SuggestedFixFunc is a function used to get the suggested fixes for a given
+ // gopls command, some of which are provided by go/analysis.Analyzers. Some of
+ // the analyzers in internal/lsp/analysis are not efficient enough to include
+ // suggested fixes with their diagnostics, so we have to compute them
+ // separately. Such analyzers should provide a function with a signature of
+ // SuggestedFixFunc.
+ //
+ // The returned FileSet must map all token.Pos found in the suggested text
+ // edits.
+ SuggestedFixFunc func(ctx context.Context, snapshot Snapshot, fh FileHandle, pRng protocol.Range) (*token.FileSet, *analysis.SuggestedFix, error)
+ singleFileFixFunc func(fset *token.FileSet, start, end token.Pos, src []byte, file *ast.File, pkg *types.Package, info *types.Info) (*analysis.SuggestedFix, error)
+)
+
+const (
+ FillStruct = "fill_struct"
+ StubMethods = "stub_methods"
+ UndeclaredName = "undeclared_name"
+ ExtractVariable = "extract_variable"
+ ExtractFunction = "extract_function"
+ ExtractMethod = "extract_method"
+)
+
+// suggestedFixes maps a suggested fix command id to its handler.
+var suggestedFixes = map[string]SuggestedFixFunc{
+ FillStruct: singleFile(fillstruct.SuggestedFix),
+ UndeclaredName: singleFile(undeclaredname.SuggestedFix),
+ ExtractVariable: singleFile(extractVariable),
+ ExtractFunction: singleFile(extractFunction),
+ ExtractMethod: singleFile(extractMethod),
+ StubMethods: stubSuggestedFixFunc,
+}
+
+// singleFile calls analyzers that expect inputs for a single file
+func singleFile(sf singleFileFixFunc) SuggestedFixFunc {
+ return func(ctx context.Context, snapshot Snapshot, fh FileHandle, pRng protocol.Range) (*token.FileSet, *analysis.SuggestedFix, error) {
+ pkg, pgf, err := PackageForFile(ctx, snapshot, fh.URI(), NarrowestPackage)
+ if err != nil {
+ return nil, nil, err
+ }
+ start, end, err := pgf.RangePos(pRng)
+ if err != nil {
+ return nil, nil, err
+ }
+ fix, err := sf(pkg.FileSet(), start, end, pgf.Src, pgf.File, pkg.GetTypes(), pkg.GetTypesInfo())
+ return pkg.FileSet(), fix, err
+ }
+}
+
+func SuggestedFixFromCommand(cmd protocol.Command, kind protocol.CodeActionKind) SuggestedFix {
+ return SuggestedFix{
+ Title: cmd.Title,
+ Command: &cmd,
+ ActionKind: kind,
+ }
+}
+
+// ApplyFix applies the command's suggested fix to the given file and
+// range, returning the resulting edits.
+func ApplyFix(ctx context.Context, fix string, snapshot Snapshot, fh FileHandle, pRng protocol.Range) ([]protocol.TextDocumentEdit, error) {
+ handler, ok := suggestedFixes[fix]
+ if !ok {
+ return nil, fmt.Errorf("no suggested fix function for %s", fix)
+ }
+ fset, suggestion, err := handler(ctx, snapshot, fh, pRng)
+ if err != nil {
+ return nil, err
+ }
+ if suggestion == nil {
+ return nil, nil
+ }
+ editsPerFile := map[span.URI]*protocol.TextDocumentEdit{}
+ for _, edit := range suggestion.TextEdits {
+ tokFile := fset.File(edit.Pos)
+ if tokFile == nil {
+ return nil, bug.Errorf("no file for edit position")
+ }
+ end := edit.End
+ if !end.IsValid() {
+ end = edit.Pos
+ }
+ fh, err := snapshot.GetFile(ctx, span.URIFromPath(tokFile.Name()))
+ if err != nil {
+ return nil, err
+ }
+ te, ok := editsPerFile[fh.URI()]
+ if !ok {
+ te = &protocol.TextDocumentEdit{
+ TextDocument: protocol.OptionalVersionedTextDocumentIdentifier{
+ Version: fh.Version(),
+ TextDocumentIdentifier: protocol.TextDocumentIdentifier{
+ URI: protocol.URIFromSpanURI(fh.URI()),
+ },
+ },
+ }
+ editsPerFile[fh.URI()] = te
+ }
+ content, err := fh.Read()
+ if err != nil {
+ return nil, err
+ }
+ m := protocol.NewMapper(fh.URI(), content)
+ rng, err := m.PosRange(tokFile, edit.Pos, end)
+ if err != nil {
+ return nil, err
+ }
+ te.Edits = append(te.Edits, protocol.TextEdit{
+ Range: rng,
+ NewText: string(edit.NewText),
+ })
+ }
+ var edits []protocol.TextDocumentEdit
+ for _, edit := range editsPerFile {
+ edits = append(edits, *edit)
+ }
+ return edits, nil
+}
diff --git a/gopls/internal/lsp/source/folding_range.go b/gopls/internal/lsp/source/folding_range.go
new file mode 100644
index 000000000..41f7b5bf5
--- /dev/null
+++ b/gopls/internal/lsp/source/folding_range.go
@@ -0,0 +1,193 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package source
+
+import (
+ "context"
+ "go/ast"
+ "go/token"
+ "sort"
+ "strings"
+
+ "golang.org/x/tools/gopls/internal/lsp/protocol"
+ "golang.org/x/tools/internal/bug"
+)
+
+// FoldingRangeInfo holds range and kind info of folding for an ast.Node
+type FoldingRangeInfo struct {
+ MappedRange protocol.MappedRange
+ Kind protocol.FoldingRangeKind
+}
+
+// FoldingRange gets all of the folding range for f.
+func FoldingRange(ctx context.Context, snapshot Snapshot, fh FileHandle, lineFoldingOnly bool) (ranges []*FoldingRangeInfo, err error) {
+ // TODO(suzmue): consider limiting the number of folding ranges returned, and
+ // implement a way to prioritize folding ranges in that case.
+ pgf, err := snapshot.ParseGo(ctx, fh, ParseFull)
+ if err != nil {
+ return nil, err
+ }
+
+ // With parse errors, we wouldn't be able to produce accurate folding info.
+ // LSP protocol (3.16) currently does not have a way to handle this case
+ // (https://github.com/microsoft/language-server-protocol/issues/1200).
+ // We cannot return an error either because we are afraid some editors
+ // may not handle errors nicely. As a workaround, we now return an empty
+ // result and let the client handle this case by double check the file
+ // contents (i.e. if the file is not empty and the folding range result
+ // is empty, raise an internal error).
+ if pgf.ParseErr != nil {
+ return nil, nil
+ }
+
+ // Get folding ranges for comments separately as they are not walked by ast.Inspect.
+ ranges = append(ranges, commentsFoldingRange(pgf)...)
+
+ visit := func(n ast.Node) bool {
+ rng := foldingRangeFunc(pgf, n, lineFoldingOnly)
+ if rng != nil {
+ ranges = append(ranges, rng)
+ }
+ return true
+ }
+ // Walk the ast and collect folding ranges.
+ ast.Inspect(pgf.File, visit)
+
+ sort.Slice(ranges, func(i, j int) bool {
+ irng := ranges[i].MappedRange.Range()
+ jrng := ranges[j].MappedRange.Range()
+ return protocol.CompareRange(irng, jrng) < 0
+ })
+
+ return ranges, nil
+}
+
+// foldingRangeFunc calculates the line folding range for ast.Node n
+func foldingRangeFunc(pgf *ParsedGoFile, n ast.Node, lineFoldingOnly bool) *FoldingRangeInfo {
+ // TODO(suzmue): include trailing empty lines before the closing
+ // parenthesis/brace.
+ var kind protocol.FoldingRangeKind
+ var start, end token.Pos
+ switch n := n.(type) {
+ case *ast.BlockStmt:
+ // Fold between positions of or lines between "{" and "}".
+ var startList, endList token.Pos
+ if num := len(n.List); num != 0 {
+ startList, endList = n.List[0].Pos(), n.List[num-1].End()
+ }
+ start, end = validLineFoldingRange(pgf.Tok, n.Lbrace, n.Rbrace, startList, endList, lineFoldingOnly)
+ case *ast.CaseClause:
+ // Fold from position of ":" to end.
+ start, end = n.Colon+1, n.End()
+ case *ast.CommClause:
+ // Fold from position of ":" to end.
+ start, end = n.Colon+1, n.End()
+ case *ast.CallExpr:
+ // Fold from position of "(" to position of ")".
+ start, end = n.Lparen+1, n.Rparen
+ case *ast.FieldList:
+ // Fold between positions of or lines between opening parenthesis/brace and closing parenthesis/brace.
+ var startList, endList token.Pos
+ if num := len(n.List); num != 0 {
+ startList, endList = n.List[0].Pos(), n.List[num-1].End()
+ }
+ start, end = validLineFoldingRange(pgf.Tok, n.Opening, n.Closing, startList, endList, lineFoldingOnly)
+ case *ast.GenDecl:
+ // If this is an import declaration, set the kind to be protocol.Imports.
+ if n.Tok == token.IMPORT {
+ kind = protocol.Imports
+ }
+ // Fold between positions of or lines between "(" and ")".
+ var startSpecs, endSpecs token.Pos
+ if num := len(n.Specs); num != 0 {
+ startSpecs, endSpecs = n.Specs[0].Pos(), n.Specs[num-1].End()
+ }
+ start, end = validLineFoldingRange(pgf.Tok, n.Lparen, n.Rparen, startSpecs, endSpecs, lineFoldingOnly)
+ case *ast.BasicLit:
+ // Fold raw string literals from position of "`" to position of "`".
+ if n.Kind == token.STRING && len(n.Value) >= 2 && n.Value[0] == '`' && n.Value[len(n.Value)-1] == '`' {
+ start, end = n.Pos(), n.End()
+ }
+ case *ast.CompositeLit:
+ // Fold between positions of or lines between "{" and "}".
+ var startElts, endElts token.Pos
+ if num := len(n.Elts); num != 0 {
+ startElts, endElts = n.Elts[0].Pos(), n.Elts[num-1].End()
+ }
+ start, end = validLineFoldingRange(pgf.Tok, n.Lbrace, n.Rbrace, startElts, endElts, lineFoldingOnly)
+ }
+
+ // Check that folding positions are valid.
+ if !start.IsValid() || !end.IsValid() {
+ return nil
+ }
+ // in line folding mode, do not fold if the start and end lines are the same.
+ if lineFoldingOnly && pgf.Tok.Line(start) == pgf.Tok.Line(end) {
+ return nil
+ }
+ mrng, err := pgf.PosMappedRange(start, end)
+ if err != nil {
+ bug.Errorf("%w", err) // can't happen
+ }
+ return &FoldingRangeInfo{
+ MappedRange: mrng,
+ Kind: kind,
+ }
+}
+
+// validLineFoldingRange returns start and end token.Pos for folding range if the range is valid.
+// returns token.NoPos otherwise, which fails token.IsValid check
+func validLineFoldingRange(tokFile *token.File, open, close, start, end token.Pos, lineFoldingOnly bool) (token.Pos, token.Pos) {
+ if lineFoldingOnly {
+ if !open.IsValid() || !close.IsValid() {
+ return token.NoPos, token.NoPos
+ }
+
+ // Don't want to fold if the start/end is on the same line as the open/close
+ // as an example, the example below should *not* fold:
+ // var x = [2]string{"d",
+ // "e" }
+ if tokFile.Line(open) == tokFile.Line(start) ||
+ tokFile.Line(close) == tokFile.Line(end) {
+ return token.NoPos, token.NoPos
+ }
+
+ return open + 1, end
+ }
+ return open + 1, close
+}
+
+// commentsFoldingRange returns the folding ranges for all comment blocks in file.
+// The folding range starts at the end of the first line of the comment block, and ends at the end of the
+// comment block and has kind protocol.Comment.
+func commentsFoldingRange(pgf *ParsedGoFile) (comments []*FoldingRangeInfo) {
+ tokFile := pgf.Tok
+ for _, commentGrp := range pgf.File.Comments {
+ startGrpLine, endGrpLine := tokFile.Line(commentGrp.Pos()), tokFile.Line(commentGrp.End())
+ if startGrpLine == endGrpLine {
+ // Don't fold single line comments.
+ continue
+ }
+
+ firstComment := commentGrp.List[0]
+ startPos, endLinePos := firstComment.Pos(), firstComment.End()
+ startCmmntLine, endCmmntLine := tokFile.Line(startPos), tokFile.Line(endLinePos)
+ if startCmmntLine != endCmmntLine {
+ // If the first comment spans multiple lines, then we want to have the
+ // folding range start at the end of the first line.
+ endLinePos = token.Pos(int(startPos) + len(strings.Split(firstComment.Text, "\n")[0]))
+ }
+ mrng, err := pgf.PosMappedRange(endLinePos, commentGrp.End())
+ if err != nil {
+ bug.Errorf("%w", err) // can't happen
+ }
+ comments = append(comments, &FoldingRangeInfo{
+ // Fold from the end of the first line comment to the end of the comment block.
+ MappedRange: mrng,
+ Kind: protocol.Comment,
+ })
+ }
+ return comments
+}
diff --git a/gopls/internal/lsp/source/format.go b/gopls/internal/lsp/source/format.go
new file mode 100644
index 000000000..b8206edc9
--- /dev/null
+++ b/gopls/internal/lsp/source/format.go
@@ -0,0 +1,391 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package source provides core features for use by Go editors and tools.
+package source
+
+import (
+ "bytes"
+ "context"
+ "fmt"
+ "go/ast"
+ "go/format"
+ "go/parser"
+ "go/token"
+ "strings"
+ "text/scanner"
+
+ "golang.org/x/tools/gopls/internal/lsp/protocol"
+ "golang.org/x/tools/gopls/internal/lsp/safetoken"
+ "golang.org/x/tools/internal/diff"
+ "golang.org/x/tools/internal/event"
+ "golang.org/x/tools/internal/imports"
+)
+
+// Format formats a file with a given range.
+func Format(ctx context.Context, snapshot Snapshot, fh FileHandle) ([]protocol.TextEdit, error) {
+ ctx, done := event.Start(ctx, "source.Format")
+ defer done()
+
+ // Generated files shouldn't be edited. So, don't format them
+ if IsGenerated(ctx, snapshot, fh.URI()) {
+ return nil, fmt.Errorf("can't format %q: file is generated", fh.URI().Filename())
+ }
+
+ pgf, err := snapshot.ParseGo(ctx, fh, ParseFull)
+ if err != nil {
+ return nil, err
+ }
+ // Even if this file has parse errors, it might still be possible to format it.
+ // Using format.Node on an AST with errors may result in code being modified.
+ // Attempt to format the source of this file instead.
+ if pgf.ParseErr != nil {
+ formatted, err := formatSource(ctx, fh)
+ if err != nil {
+ return nil, err
+ }
+ return computeTextEdits(ctx, snapshot, pgf, string(formatted))
+ }
+
+ // format.Node changes slightly from one release to another, so the version
+ // of Go used to build the LSP server will determine how it formats code.
+ // This should be acceptable for all users, who likely be prompted to rebuild
+ // the LSP server on each Go release.
+ buf := &bytes.Buffer{}
+ fset := FileSetFor(pgf.Tok)
+ if err := format.Node(buf, fset, pgf.File); err != nil {
+ return nil, err
+ }
+ formatted := buf.String()
+
+ // Apply additional formatting, if any is supported. Currently, the only
+ // supported additional formatter is gofumpt.
+ if format := snapshot.View().Options().GofumptFormat; snapshot.View().Options().Gofumpt && format != nil {
+ // gofumpt can customize formatting based on language version and module
+ // path, if available.
+ //
+ // Try to derive this information, but fall-back on the default behavior.
+ //
+ // TODO: under which circumstances can we fail to find module information?
+ // Can this, for example, result in inconsistent formatting across saves,
+ // due to pending calls to packages.Load?
+ var langVersion, modulePath string
+ mds, err := snapshot.MetadataForFile(ctx, fh.URI())
+ if err == nil && len(mds) > 0 {
+ if mi := mds[0].Module; mi != nil {
+ langVersion = mi.GoVersion
+ modulePath = mi.Path
+ }
+ }
+ b, err := format(ctx, langVersion, modulePath, buf.Bytes())
+ if err != nil {
+ return nil, err
+ }
+ formatted = string(b)
+ }
+ return computeTextEdits(ctx, snapshot, pgf, formatted)
+}
+
+func formatSource(ctx context.Context, fh FileHandle) ([]byte, error) {
+ _, done := event.Start(ctx, "source.formatSource")
+ defer done()
+
+ data, err := fh.Read()
+ if err != nil {
+ return nil, err
+ }
+ return format.Source(data)
+}
+
+type ImportFix struct {
+ Fix *imports.ImportFix
+ Edits []protocol.TextEdit
+}
+
+// AllImportsFixes formats f for each possible fix to the imports.
+// In addition to returning the result of applying all edits,
+// it returns a list of fixes that could be applied to the file, with the
+// corresponding TextEdits that would be needed to apply that fix.
+func AllImportsFixes(ctx context.Context, snapshot Snapshot, fh FileHandle) (allFixEdits []protocol.TextEdit, editsPerFix []*ImportFix, err error) {
+ ctx, done := event.Start(ctx, "source.AllImportsFixes")
+ defer done()
+
+ pgf, err := snapshot.ParseGo(ctx, fh, ParseFull)
+ if err != nil {
+ return nil, nil, err
+ }
+ if err := snapshot.RunProcessEnvFunc(ctx, func(opts *imports.Options) error {
+ allFixEdits, editsPerFix, err = computeImportEdits(snapshot, pgf, opts)
+ return err
+ }); err != nil {
+ return nil, nil, fmt.Errorf("AllImportsFixes: %v", err)
+ }
+ return allFixEdits, editsPerFix, nil
+}
+
+// computeImportEdits computes a set of edits that perform one or all of the
+// necessary import fixes.
+func computeImportEdits(snapshot Snapshot, pgf *ParsedGoFile, options *imports.Options) (allFixEdits []protocol.TextEdit, editsPerFix []*ImportFix, err error) {
+ filename := pgf.URI.Filename()
+
+ // Build up basic information about the original file.
+ allFixes, err := imports.FixImports(filename, pgf.Src, options)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ allFixEdits, err = computeFixEdits(snapshot, pgf, options, allFixes)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ // Apply all of the import fixes to the file.
+ // Add the edits for each fix to the result.
+ for _, fix := range allFixes {
+ edits, err := computeFixEdits(snapshot, pgf, options, []*imports.ImportFix{fix})
+ if err != nil {
+ return nil, nil, err
+ }
+ editsPerFix = append(editsPerFix, &ImportFix{
+ Fix: fix,
+ Edits: edits,
+ })
+ }
+ return allFixEdits, editsPerFix, nil
+}
+
+// ComputeOneImportFixEdits returns text edits for a single import fix.
+func ComputeOneImportFixEdits(snapshot Snapshot, pgf *ParsedGoFile, fix *imports.ImportFix) ([]protocol.TextEdit, error) {
+ options := &imports.Options{
+ LocalPrefix: snapshot.View().Options().Local,
+ // Defaults.
+ AllErrors: true,
+ Comments: true,
+ Fragment: true,
+ FormatOnly: false,
+ TabIndent: true,
+ TabWidth: 8,
+ }
+ return computeFixEdits(snapshot, pgf, options, []*imports.ImportFix{fix})
+}
+
+func computeFixEdits(snapshot Snapshot, pgf *ParsedGoFile, options *imports.Options, fixes []*imports.ImportFix) ([]protocol.TextEdit, error) {
+ // trim the original data to match fixedData
+ left, err := importPrefix(pgf.Src)
+ if err != nil {
+ return nil, err
+ }
+ extra := !strings.Contains(left, "\n") // one line may have more than imports
+ if extra {
+ left = string(pgf.Src)
+ }
+ if len(left) > 0 && left[len(left)-1] != '\n' {
+ left += "\n"
+ }
+ // Apply the fixes and re-parse the file so that we can locate the
+ // new imports.
+ flags := parser.ImportsOnly
+ if extra {
+ // used all of origData above, use all of it here too
+ flags = 0
+ }
+ fixedData, err := imports.ApplyFixes(fixes, "", pgf.Src, options, flags)
+ if err != nil {
+ return nil, err
+ }
+ if fixedData == nil || fixedData[len(fixedData)-1] != '\n' {
+ fixedData = append(fixedData, '\n') // ApplyFixes may miss the newline, go figure.
+ }
+ edits := snapshot.View().Options().ComputeEdits(left, string(fixedData))
+ return protocolEditsFromSource([]byte(left), edits)
+}
+
+// importPrefix returns the prefix of the given file content through the final
+// import statement. If there are no imports, the prefix is the package
+// statement and any comment groups below it.
+func importPrefix(src []byte) (string, error) {
+ fset := token.NewFileSet()
+ // do as little parsing as possible
+ f, err := parser.ParseFile(fset, "", src, parser.ImportsOnly|parser.ParseComments)
+ if err != nil { // This can happen if 'package' is misspelled
+ return "", fmt.Errorf("importPrefix: failed to parse: %s", err)
+ }
+ tok := fset.File(f.Pos())
+ var importEnd int
+ for _, d := range f.Decls {
+ if x, ok := d.(*ast.GenDecl); ok && x.Tok == token.IMPORT {
+ if e, err := safetoken.Offset(tok, d.End()); err != nil {
+ return "", fmt.Errorf("importPrefix: %s", err)
+ } else if e > importEnd {
+ importEnd = e
+ }
+ }
+ }
+
+ maybeAdjustToLineEnd := func(pos token.Pos, isCommentNode bool) int {
+ offset, err := safetoken.Offset(tok, pos)
+ if err != nil {
+ return -1
+ }
+
+ // Don't go past the end of the file.
+ if offset > len(src) {
+ offset = len(src)
+ }
+ // The go/ast package does not account for different line endings, and
+ // specifically, in the text of a comment, it will strip out \r\n line
+ // endings in favor of \n. To account for these differences, we try to
+ // return a position on the next line whenever possible.
+ switch line := tok.Line(tok.Pos(offset)); {
+ case line < tok.LineCount():
+ nextLineOffset, err := safetoken.Offset(tok, tok.LineStart(line+1))
+ if err != nil {
+ return -1
+ }
+ // If we found a position that is at the end of a line, move the
+ // offset to the start of the next line.
+ if offset+1 == nextLineOffset {
+ offset = nextLineOffset
+ }
+ case isCommentNode, offset+1 == tok.Size():
+ // If the last line of the file is a comment, or we are at the end
+ // of the file, the prefix is the entire file.
+ offset = len(src)
+ }
+ return offset
+ }
+ if importEnd == 0 {
+ pkgEnd := f.Name.End()
+ importEnd = maybeAdjustToLineEnd(pkgEnd, false)
+ }
+ for _, cgroup := range f.Comments {
+ for _, c := range cgroup.List {
+ if end, err := safetoken.Offset(tok, c.End()); err != nil {
+ return "", err
+ } else if end > importEnd {
+ startLine := safetoken.Position(tok, c.Pos()).Line
+ endLine := safetoken.Position(tok, c.End()).Line
+
+ // Work around golang/go#41197 by checking if the comment might
+ // contain "\r", and if so, find the actual end position of the
+ // comment by scanning the content of the file.
+ startOffset, err := safetoken.Offset(tok, c.Pos())
+ if err != nil {
+ return "", err
+ }
+ if startLine != endLine && bytes.Contains(src[startOffset:], []byte("\r")) {
+ if commentEnd := scanForCommentEnd(src[startOffset:]); commentEnd > 0 {
+ end = startOffset + commentEnd
+ }
+ }
+ importEnd = maybeAdjustToLineEnd(tok.Pos(end), true)
+ }
+ }
+ }
+ if importEnd > len(src) {
+ importEnd = len(src)
+ }
+ return string(src[:importEnd]), nil
+}
+
+// scanForCommentEnd returns the offset of the end of the multi-line comment
+// at the start of the given byte slice.
+func scanForCommentEnd(src []byte) int {
+ var s scanner.Scanner
+ s.Init(bytes.NewReader(src))
+ s.Mode ^= scanner.SkipComments
+
+ t := s.Scan()
+ if t == scanner.Comment {
+ return s.Pos().Offset
+ }
+ return 0
+}
+
+func computeTextEdits(ctx context.Context, snapshot Snapshot, pgf *ParsedGoFile, formatted string) ([]protocol.TextEdit, error) {
+ _, done := event.Start(ctx, "source.computeTextEdits")
+ defer done()
+
+ edits := snapshot.View().Options().ComputeEdits(string(pgf.Src), formatted)
+ return ToProtocolEdits(pgf.Mapper, edits)
+}
+
+// protocolEditsFromSource converts text edits to LSP edits using the original
+// source.
+func protocolEditsFromSource(src []byte, edits []diff.Edit) ([]protocol.TextEdit, error) {
+ m := protocol.NewMapper("", src)
+ var result []protocol.TextEdit
+ for _, edit := range edits {
+ rng, err := m.OffsetRange(edit.Start, edit.End)
+ if err != nil {
+ return nil, err
+ }
+
+ if rng.Start == rng.End && edit.New == "" {
+ // Degenerate case, which may result from a diff tool wanting to delete
+ // '\r' in line endings. Filter it out.
+ continue
+ }
+ result = append(result, protocol.TextEdit{
+ Range: rng,
+ NewText: edit.New,
+ })
+ }
+ return result, nil
+}
+
+// ToProtocolEdits converts diff.Edits to LSP TextEdits.
+// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textEditArray
+func ToProtocolEdits(m *protocol.Mapper, edits []diff.Edit) ([]protocol.TextEdit, error) {
+ // LSP doesn't require TextEditArray to be sorted:
+ // this is the receiver's concern. But govim, and perhaps
+ // other clients have historically relied on the order.
+ edits = append([]diff.Edit(nil), edits...)
+ diff.SortEdits(edits)
+
+ result := make([]protocol.TextEdit, len(edits))
+ for i, edit := range edits {
+ rng, err := m.OffsetRange(edit.Start, edit.End)
+ if err != nil {
+ return nil, err
+ }
+ result[i] = protocol.TextEdit{
+ Range: rng,
+ NewText: edit.New,
+ }
+ }
+ return result, nil
+}
+
+// FromProtocolEdits converts LSP TextEdits to diff.Edits.
+// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textEditArray
+func FromProtocolEdits(m *protocol.Mapper, edits []protocol.TextEdit) ([]diff.Edit, error) {
+ if edits == nil {
+ return nil, nil
+ }
+ result := make([]diff.Edit, len(edits))
+ for i, edit := range edits {
+ start, end, err := m.RangeOffsets(edit.Range)
+ if err != nil {
+ return nil, err
+ }
+ result[i] = diff.Edit{
+ Start: start,
+ End: end,
+ New: edit.NewText,
+ }
+ }
+ return result, nil
+}
+
+// ApplyProtocolEdits applies the patch (edits) to m.Content and returns the result.
+// It also returns the edits converted to diff-package form.
+func ApplyProtocolEdits(m *protocol.Mapper, edits []protocol.TextEdit) ([]byte, []diff.Edit, error) {
+ diffEdits, err := FromProtocolEdits(m, edits)
+ if err != nil {
+ return nil, nil, err
+ }
+ out, err := diff.ApplyBytes(m.Content, diffEdits)
+ return out, diffEdits, err
+}
diff --git a/gopls/internal/lsp/source/format_test.go b/gopls/internal/lsp/source/format_test.go
new file mode 100644
index 000000000..fac80c311
--- /dev/null
+++ b/gopls/internal/lsp/source/format_test.go
@@ -0,0 +1,75 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package source
+
+import (
+ "strings"
+ "testing"
+
+ "golang.org/x/tools/gopls/internal/lsp/tests/compare"
+)
+
+func TestImportPrefix(t *testing.T) {
+ for i, tt := range []struct {
+ input, want string
+ }{
+ {"package foo", "package foo"},
+ {"package foo\n", "package foo\n"},
+ {"package foo\n\nfunc f(){}\n", "package foo\n"},
+ {"package foo\n\nimport \"fmt\"\n", "package foo\n\nimport \"fmt\""},
+ {"package foo\nimport (\n\"fmt\"\n)\n", "package foo\nimport (\n\"fmt\"\n)"},
+ {"\n\n\npackage foo\n", "\n\n\npackage foo\n"},
+ {"// hi \n\npackage foo //xx\nfunc _(){}\n", "// hi \n\npackage foo //xx\n"},
+ {"package foo //hi\n", "package foo //hi\n"},
+ {"//hi\npackage foo\n//a\n\n//b\n", "//hi\npackage foo\n//a\n\n//b\n"},
+ {
+ "package a\n\nimport (\n \"fmt\"\n)\n//hi\n",
+ "package a\n\nimport (\n \"fmt\"\n)\n//hi\n",
+ },
+ {`package a /*hi*/`, `package a /*hi*/`},
+ {"package main\r\n\r\nimport \"go/types\"\r\n\r\n/*\r\n\r\n */\r\n", "package main\r\n\r\nimport \"go/types\"\r\n\r\n/*\r\n\r\n */\r\n"},
+ {"package x; import \"os\"; func f() {}\n\n", "package x; import \"os\""},
+ {"package x; func f() {fmt.Println()}\n\n", "package x"},
+ } {
+ got, err := importPrefix([]byte(tt.input))
+ if err != nil {
+ t.Fatal(err)
+ }
+ if d := compare.Text(tt.want, got); d != "" {
+ t.Errorf("%d: failed for %q:\n%s", i, tt.input, d)
+ }
+ }
+}
+
+func TestCRLFFile(t *testing.T) {
+ for i, tt := range []struct {
+ input, want string
+ }{
+ {
+ input: `package main
+
+/*
+Hi description
+*/
+func Hi() {
+}
+`,
+ want: `package main
+
+/*
+Hi description
+*/`,
+ },
+ } {
+ got, err := importPrefix([]byte(strings.ReplaceAll(tt.input, "\n", "\r\n")))
+ if err != nil {
+ t.Fatal(err)
+ }
+ want := strings.ReplaceAll(tt.want, "\n", "\r\n")
+ if d := compare.Text(want, got); d != "" {
+ t.Errorf("%d: failed for %q:\n%s", i, tt.input, d)
+ }
+ }
+}
diff --git a/gopls/internal/lsp/source/gc_annotations.go b/gopls/internal/lsp/source/gc_annotations.go
new file mode 100644
index 000000000..fbdfc3f7b
--- /dev/null
+++ b/gopls/internal/lsp/source/gc_annotations.go
@@ -0,0 +1,221 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package source
+
+import (
+ "bytes"
+ "context"
+ "encoding/json"
+ "fmt"
+ "io/ioutil"
+ "os"
+ "path/filepath"
+ "strings"
+
+ "golang.org/x/tools/gopls/internal/lsp/protocol"
+ "golang.org/x/tools/gopls/internal/span"
+ "golang.org/x/tools/internal/gocommand"
+)
+
+type Annotation string
+
+const (
+ // Nil controls nil checks.
+ Nil Annotation = "nil"
+
+ // Escape controls diagnostics about escape choices.
+ Escape Annotation = "escape"
+
+ // Inline controls diagnostics about inlining choices.
+ Inline Annotation = "inline"
+
+ // Bounds controls bounds checking diagnostics.
+ Bounds Annotation = "bounds"
+)
+
+func GCOptimizationDetails(ctx context.Context, snapshot Snapshot, m *Metadata) (map[span.URI][]*Diagnostic, error) {
+ if len(m.CompiledGoFiles) == 0 {
+ return nil, nil
+ }
+ pkgDir := filepath.Dir(m.CompiledGoFiles[0].Filename())
+ outDir := filepath.Join(os.TempDir(), fmt.Sprintf("gopls-%d.details", os.Getpid()))
+
+ if err := os.MkdirAll(outDir, 0700); err != nil {
+ return nil, err
+ }
+ tmpFile, err := ioutil.TempFile(os.TempDir(), "gopls-x")
+ if err != nil {
+ return nil, err
+ }
+ defer os.Remove(tmpFile.Name())
+
+ outDirURI := span.URIFromPath(outDir)
+ // GC details doesn't handle Windows URIs in the form of "file:///C:/...",
+ // so rewrite them to "file://C:/...". See golang/go#41614.
+ if !strings.HasPrefix(outDir, "/") {
+ outDirURI = span.URI(strings.Replace(string(outDirURI), "file:///", "file://", 1))
+ }
+ inv := &gocommand.Invocation{
+ Verb: "build",
+ Args: []string{
+ fmt.Sprintf("-gcflags=-json=0,%s", outDirURI),
+ fmt.Sprintf("-o=%s", tmpFile.Name()),
+ ".",
+ },
+ WorkingDir: pkgDir,
+ }
+ _, err = snapshot.RunGoCommandDirect(ctx, Normal, inv)
+ if err != nil {
+ return nil, err
+ }
+ files, err := findJSONFiles(outDir)
+ if err != nil {
+ return nil, err
+ }
+ reports := make(map[span.URI][]*Diagnostic)
+ opts := snapshot.View().Options()
+ var parseError error
+ for _, fn := range files {
+ uri, diagnostics, err := parseDetailsFile(fn, opts)
+ if err != nil {
+ // expect errors for all the files, save 1
+ parseError = err
+ }
+ fh := snapshot.FindFile(uri)
+ if fh == nil {
+ continue
+ }
+ if pkgDir != filepath.Dir(fh.URI().Filename()) {
+ // https://github.com/golang/go/issues/42198
+ // sometimes the detail diagnostics generated for files
+ // outside the package can never be taken back.
+ continue
+ }
+ reports[fh.URI()] = diagnostics
+ }
+ return reports, parseError
+}
+
+func parseDetailsFile(filename string, options *Options) (span.URI, []*Diagnostic, error) {
+ buf, err := ioutil.ReadFile(filename)
+ if err != nil {
+ return "", nil, err
+ }
+ var (
+ uri span.URI
+ i int
+ diagnostics []*Diagnostic
+ )
+ type metadata struct {
+ File string `json:"file,omitempty"`
+ }
+ for dec := json.NewDecoder(bytes.NewReader(buf)); dec.More(); {
+ // The first element always contains metadata.
+ if i == 0 {
+ i++
+ m := new(metadata)
+ if err := dec.Decode(m); err != nil {
+ return "", nil, err
+ }
+ if !strings.HasSuffix(m.File, ".go") {
+ continue // <autogenerated>
+ }
+ uri = span.URIFromPath(m.File)
+ continue
+ }
+ d := new(protocol.Diagnostic)
+ if err := dec.Decode(d); err != nil {
+ return "", nil, err
+ }
+ msg := d.Code.(string)
+ if msg != "" {
+ msg = fmt.Sprintf("%s(%s)", msg, d.Message)
+ }
+ if !showDiagnostic(msg, d.Source, options) {
+ continue
+ }
+ var related []protocol.DiagnosticRelatedInformation
+ for _, ri := range d.RelatedInformation {
+ // TODO(rfindley): The compiler uses LSP-like JSON to encode gc details,
+ // however the positions it uses are 1-based UTF-8:
+ // https://github.com/golang/go/blob/master/src/cmd/compile/internal/logopt/log_opts.go
+ //
+ // Here, we adjust for 0-based positions, but do not translate UTF-8 to UTF-16.
+ related = append(related, protocol.DiagnosticRelatedInformation{
+ Location: protocol.Location{
+ URI: ri.Location.URI,
+ Range: zeroIndexedRange(ri.Location.Range),
+ },
+ Message: ri.Message,
+ })
+ }
+ diagnostic := &Diagnostic{
+ URI: uri,
+ Range: zeroIndexedRange(d.Range),
+ Message: msg,
+ Severity: d.Severity,
+ Source: OptimizationDetailsError, // d.Source is always "go compiler" as of 1.16, use our own
+ Tags: d.Tags,
+ Related: related,
+ }
+ diagnostics = append(diagnostics, diagnostic)
+ i++
+ }
+ return uri, diagnostics, nil
+}
+
+// showDiagnostic reports whether a given diagnostic should be shown to the end
+// user, given the current options.
+func showDiagnostic(msg, source string, o *Options) bool {
+ if source != "go compiler" {
+ return false
+ }
+ if o.Annotations == nil {
+ return true
+ }
+ switch {
+ case strings.HasPrefix(msg, "canInline") ||
+ strings.HasPrefix(msg, "cannotInline") ||
+ strings.HasPrefix(msg, "inlineCall"):
+ return o.Annotations[Inline]
+ case strings.HasPrefix(msg, "escape") || msg == "leak":
+ return o.Annotations[Escape]
+ case strings.HasPrefix(msg, "nilcheck"):
+ return o.Annotations[Nil]
+ case strings.HasPrefix(msg, "isInBounds") ||
+ strings.HasPrefix(msg, "isSliceInBounds"):
+ return o.Annotations[Bounds]
+ }
+ return false
+}
+
+// The range produced by the compiler is 1-indexed, so subtract range by 1.
+func zeroIndexedRange(rng protocol.Range) protocol.Range {
+ return protocol.Range{
+ Start: protocol.Position{
+ Line: rng.Start.Line - 1,
+ Character: rng.Start.Character - 1,
+ },
+ End: protocol.Position{
+ Line: rng.End.Line - 1,
+ Character: rng.End.Character - 1,
+ },
+ }
+}
+
+func findJSONFiles(dir string) ([]string, error) {
+ ans := []string{}
+ f := func(path string, fi os.FileInfo, _ error) error {
+ if fi.IsDir() {
+ return nil
+ }
+ if strings.HasSuffix(path, ".json") {
+ ans = append(ans, path)
+ }
+ return nil
+ }
+ err := filepath.Walk(dir, f)
+ return ans, err
+}
diff --git a/gopls/internal/lsp/source/highlight.go b/gopls/internal/lsp/source/highlight.go
new file mode 100644
index 000000000..a190f4896
--- /dev/null
+++ b/gopls/internal/lsp/source/highlight.go
@@ -0,0 +1,484 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package source
+
+import (
+ "context"
+ "fmt"
+ "go/ast"
+ "go/token"
+ "go/types"
+ "strings"
+
+ "golang.org/x/tools/go/ast/astutil"
+ "golang.org/x/tools/gopls/internal/lsp/protocol"
+ "golang.org/x/tools/internal/event"
+)
+
+func Highlight(ctx context.Context, snapshot Snapshot, fh FileHandle, position protocol.Position) ([]protocol.Range, error) {
+ ctx, done := event.Start(ctx, "source.Highlight")
+ defer done()
+
+ // We always want fully parsed files for highlight, regardless
+ // of whether the file belongs to a workspace package.
+ pkg, pgf, err := PackageForFile(ctx, snapshot, fh.URI(), NarrowestPackage)
+ if err != nil {
+ return nil, fmt.Errorf("getting package for Highlight: %w", err)
+ }
+
+ pos, err := pgf.PositionPos(position)
+ if err != nil {
+ return nil, err
+ }
+ path, _ := astutil.PathEnclosingInterval(pgf.File, pos, pos)
+ if len(path) == 0 {
+ return nil, fmt.Errorf("no enclosing position found for %v:%v", position.Line, position.Character)
+ }
+ // If start == end for astutil.PathEnclosingInterval, the 1-char interval
+ // following start is used instead. As a result, we might not get an exact
+ // match so we should check the 1-char interval to the left of the passed
+ // in position to see if that is an exact match.
+ if _, ok := path[0].(*ast.Ident); !ok {
+ if p, _ := astutil.PathEnclosingInterval(pgf.File, pos-1, pos-1); p != nil {
+ switch p[0].(type) {
+ case *ast.Ident, *ast.SelectorExpr:
+ path = p // use preceding ident/selector
+ }
+ }
+ }
+ result, err := highlightPath(path, pgf.File, pkg.GetTypesInfo())
+ if err != nil {
+ return nil, err
+ }
+ var ranges []protocol.Range
+ for rng := range result {
+ rng, err := pgf.PosRange(rng.start, rng.end)
+ if err != nil {
+ return nil, err
+ }
+ ranges = append(ranges, rng)
+ }
+ return ranges, nil
+}
+
+func highlightPath(path []ast.Node, file *ast.File, info *types.Info) (map[posRange]struct{}, error) {
+ result := make(map[posRange]struct{})
+ switch node := path[0].(type) {
+ case *ast.BasicLit:
+ if len(path) > 1 {
+ if _, ok := path[1].(*ast.ImportSpec); ok {
+ err := highlightImportUses(path, info, result)
+ return result, err
+ }
+ }
+ highlightFuncControlFlow(path, result)
+ case *ast.ReturnStmt, *ast.FuncDecl, *ast.FuncType:
+ highlightFuncControlFlow(path, result)
+ case *ast.Ident:
+ // Check if ident is inside return or func decl.
+ highlightFuncControlFlow(path, result)
+ highlightIdentifier(node, file, info, result)
+ case *ast.ForStmt, *ast.RangeStmt:
+ highlightLoopControlFlow(path, info, result)
+ case *ast.SwitchStmt:
+ highlightSwitchFlow(path, info, result)
+ case *ast.BranchStmt:
+ // BREAK can exit a loop, switch or select, while CONTINUE exit a loop so
+ // these need to be handled separately. They can also be embedded in any
+ // other loop/switch/select if they have a label. TODO: add support for
+ // GOTO and FALLTHROUGH as well.
+ switch node.Tok {
+ case token.BREAK:
+ if node.Label != nil {
+ highlightLabeledFlow(path, info, node, result)
+ } else {
+ highlightUnlabeledBreakFlow(path, info, result)
+ }
+ case token.CONTINUE:
+ if node.Label != nil {
+ highlightLabeledFlow(path, info, node, result)
+ } else {
+ highlightLoopControlFlow(path, info, result)
+ }
+ }
+ default:
+ // If the cursor is in an unidentified area, return empty results.
+ return nil, nil
+ }
+ return result, nil
+}
+
+type posRange struct {
+ start, end token.Pos
+}
+
+func highlightFuncControlFlow(path []ast.Node, result map[posRange]struct{}) {
+ var enclosingFunc ast.Node
+ var returnStmt *ast.ReturnStmt
+ var resultsList *ast.FieldList
+ inReturnList := false
+
+Outer:
+ // Reverse walk the path till we get to the func block.
+ for i, n := range path {
+ switch node := n.(type) {
+ case *ast.KeyValueExpr:
+ // If cursor is in a key: value expr, we don't want control flow highlighting
+ return
+ case *ast.CallExpr:
+ // If cursor is an arg in a callExpr, we don't want control flow highlighting.
+ if i > 0 {
+ for _, arg := range node.Args {
+ if arg == path[i-1] {
+ return
+ }
+ }
+ }
+ case *ast.Field:
+ inReturnList = true
+ case *ast.FuncLit:
+ enclosingFunc = n
+ resultsList = node.Type.Results
+ break Outer
+ case *ast.FuncDecl:
+ enclosingFunc = n
+ resultsList = node.Type.Results
+ break Outer
+ case *ast.ReturnStmt:
+ returnStmt = node
+ // If the cursor is not directly in a *ast.ReturnStmt, then
+ // we need to know if it is within one of the values that is being returned.
+ inReturnList = inReturnList || path[0] != returnStmt
+ }
+ }
+ // Cursor is not in a function.
+ if enclosingFunc == nil {
+ return
+ }
+ // If the cursor is on a "return" or "func" keyword, we should highlight all of the exit
+ // points of the function, including the "return" and "func" keywords.
+ highlightAllReturnsAndFunc := path[0] == returnStmt || path[0] == enclosingFunc
+ switch path[0].(type) {
+ case *ast.Ident, *ast.BasicLit:
+ // Cursor is in an identifier and not in a return statement or in the results list.
+ if returnStmt == nil && !inReturnList {
+ return
+ }
+ case *ast.FuncType:
+ highlightAllReturnsAndFunc = true
+ }
+ // The user's cursor may be within the return statement of a function,
+ // or within the result section of a function's signature.
+ // index := -1
+ var nodes []ast.Node
+ if returnStmt != nil {
+ for _, n := range returnStmt.Results {
+ nodes = append(nodes, n)
+ }
+ } else if resultsList != nil {
+ for _, n := range resultsList.List {
+ nodes = append(nodes, n)
+ }
+ }
+ _, index := nodeAtPos(nodes, path[0].Pos())
+
+ // Highlight the correct argument in the function declaration return types.
+ if resultsList != nil && -1 < index && index < len(resultsList.List) {
+ rng := posRange{
+ start: resultsList.List[index].Pos(),
+ end: resultsList.List[index].End(),
+ }
+ result[rng] = struct{}{}
+ }
+ // Add the "func" part of the func declaration.
+ if highlightAllReturnsAndFunc {
+ r := posRange{
+ start: enclosingFunc.Pos(),
+ end: enclosingFunc.Pos() + token.Pos(len("func")),
+ }
+ result[r] = struct{}{}
+ }
+ ast.Inspect(enclosingFunc, func(n ast.Node) bool {
+ // Don't traverse any other functions.
+ switch n.(type) {
+ case *ast.FuncDecl, *ast.FuncLit:
+ return enclosingFunc == n
+ }
+ ret, ok := n.(*ast.ReturnStmt)
+ if !ok {
+ return true
+ }
+ var toAdd ast.Node
+ // Add the entire return statement, applies when highlight the word "return" or "func".
+ if highlightAllReturnsAndFunc {
+ toAdd = n
+ }
+ // Add the relevant field within the entire return statement.
+ if -1 < index && index < len(ret.Results) {
+ toAdd = ret.Results[index]
+ }
+ if toAdd != nil {
+ result[posRange{start: toAdd.Pos(), end: toAdd.End()}] = struct{}{}
+ }
+ return false
+ })
+}
+
+// highlightUnlabeledBreakFlow highlights the innermost enclosing for/range/switch or swlect
+func highlightUnlabeledBreakFlow(path []ast.Node, info *types.Info, result map[posRange]struct{}) {
+ // Reverse walk the path until we find closest loop, select, or switch.
+ for _, n := range path {
+ switch n.(type) {
+ case *ast.ForStmt, *ast.RangeStmt:
+ highlightLoopControlFlow(path, info, result)
+ return // only highlight the innermost statement
+ case *ast.SwitchStmt:
+ highlightSwitchFlow(path, info, result)
+ return
+ case *ast.SelectStmt:
+ // TODO: add highlight when breaking a select.
+ return
+ }
+ }
+}
+
+// highlightLabeledFlow highlights the enclosing labeled for, range,
+// or switch statement denoted by a labeled break or continue stmt.
+func highlightLabeledFlow(path []ast.Node, info *types.Info, stmt *ast.BranchStmt, result map[posRange]struct{}) {
+ use := info.Uses[stmt.Label]
+ if use == nil {
+ return
+ }
+ for _, n := range path {
+ if label, ok := n.(*ast.LabeledStmt); ok && info.Defs[label.Label] == use {
+ switch label.Stmt.(type) {
+ case *ast.ForStmt, *ast.RangeStmt:
+ highlightLoopControlFlow([]ast.Node{label.Stmt, label}, info, result)
+ case *ast.SwitchStmt:
+ highlightSwitchFlow([]ast.Node{label.Stmt, label}, info, result)
+ }
+ return
+ }
+ }
+}
+
+func labelFor(path []ast.Node) *ast.Ident {
+ if len(path) > 1 {
+ if n, ok := path[1].(*ast.LabeledStmt); ok {
+ return n.Label
+ }
+ }
+ return nil
+}
+
+func highlightLoopControlFlow(path []ast.Node, info *types.Info, result map[posRange]struct{}) {
+ var loop ast.Node
+ var loopLabel *ast.Ident
+ stmtLabel := labelFor(path)
+Outer:
+ // Reverse walk the path till we get to the for loop.
+ for i := range path {
+ switch n := path[i].(type) {
+ case *ast.ForStmt, *ast.RangeStmt:
+ loopLabel = labelFor(path[i:])
+
+ if stmtLabel == nil || loopLabel == stmtLabel {
+ loop = n
+ break Outer
+ }
+ }
+ }
+ if loop == nil {
+ return
+ }
+
+ // Add the for statement.
+ rng := posRange{
+ start: loop.Pos(),
+ end: loop.Pos() + token.Pos(len("for")),
+ }
+ result[rng] = struct{}{}
+
+ // Traverse AST to find branch statements within the same for-loop.
+ ast.Inspect(loop, func(n ast.Node) bool {
+ switch n.(type) {
+ case *ast.ForStmt, *ast.RangeStmt:
+ return loop == n
+ case *ast.SwitchStmt, *ast.SelectStmt:
+ return false
+ }
+ b, ok := n.(*ast.BranchStmt)
+ if !ok {
+ return true
+ }
+ if b.Label == nil || info.Uses[b.Label] == info.Defs[loopLabel] {
+ result[posRange{start: b.Pos(), end: b.End()}] = struct{}{}
+ }
+ return true
+ })
+
+ // Find continue statements in the same loop or switches/selects.
+ ast.Inspect(loop, func(n ast.Node) bool {
+ switch n.(type) {
+ case *ast.ForStmt, *ast.RangeStmt:
+ return loop == n
+ }
+
+ if n, ok := n.(*ast.BranchStmt); ok && n.Tok == token.CONTINUE {
+ result[posRange{start: n.Pos(), end: n.End()}] = struct{}{}
+ }
+ return true
+ })
+
+ // We don't need to check other for loops if we aren't looking for labeled statements.
+ if loopLabel == nil {
+ return
+ }
+
+ // Find labeled branch statements in any loop.
+ ast.Inspect(loop, func(n ast.Node) bool {
+ b, ok := n.(*ast.BranchStmt)
+ if !ok {
+ return true
+ }
+ // statement with labels that matches the loop
+ if b.Label != nil && info.Uses[b.Label] == info.Defs[loopLabel] {
+ result[posRange{start: b.Pos(), end: b.End()}] = struct{}{}
+ }
+ return true
+ })
+}
+
+func highlightSwitchFlow(path []ast.Node, info *types.Info, result map[posRange]struct{}) {
+ var switchNode ast.Node
+ var switchNodeLabel *ast.Ident
+ stmtLabel := labelFor(path)
+Outer:
+ // Reverse walk the path till we get to the switch statement.
+ for i := range path {
+ switch n := path[i].(type) {
+ case *ast.SwitchStmt:
+ switchNodeLabel = labelFor(path[i:])
+ if stmtLabel == nil || switchNodeLabel == stmtLabel {
+ switchNode = n
+ break Outer
+ }
+ }
+ }
+ // Cursor is not in a switch statement
+ if switchNode == nil {
+ return
+ }
+
+ // Add the switch statement.
+ rng := posRange{
+ start: switchNode.Pos(),
+ end: switchNode.Pos() + token.Pos(len("switch")),
+ }
+ result[rng] = struct{}{}
+
+ // Traverse AST to find break statements within the same switch.
+ ast.Inspect(switchNode, func(n ast.Node) bool {
+ switch n.(type) {
+ case *ast.SwitchStmt:
+ return switchNode == n
+ case *ast.ForStmt, *ast.RangeStmt, *ast.SelectStmt:
+ return false
+ }
+
+ b, ok := n.(*ast.BranchStmt)
+ if !ok || b.Tok != token.BREAK {
+ return true
+ }
+
+ if b.Label == nil || info.Uses[b.Label] == info.Defs[switchNodeLabel] {
+ result[posRange{start: b.Pos(), end: b.End()}] = struct{}{}
+ }
+ return true
+ })
+
+ // We don't need to check other switches if we aren't looking for labeled statements.
+ if switchNodeLabel == nil {
+ return
+ }
+
+ // Find labeled break statements in any switch
+ ast.Inspect(switchNode, func(n ast.Node) bool {
+ b, ok := n.(*ast.BranchStmt)
+ if !ok || b.Tok != token.BREAK {
+ return true
+ }
+
+ if b.Label != nil && info.Uses[b.Label] == info.Defs[switchNodeLabel] {
+ result[posRange{start: b.Pos(), end: b.End()}] = struct{}{}
+ }
+
+ return true
+ })
+}
+
+func highlightImportUses(path []ast.Node, info *types.Info, result map[posRange]struct{}) error {
+ basicLit, ok := path[0].(*ast.BasicLit)
+ if !ok {
+ return fmt.Errorf("highlightImportUses called with an ast.Node of type %T", basicLit)
+ }
+ ast.Inspect(path[len(path)-1], func(node ast.Node) bool {
+ if imp, ok := node.(*ast.ImportSpec); ok && imp.Path == basicLit {
+ result[posRange{start: node.Pos(), end: node.End()}] = struct{}{}
+ return false
+ }
+ n, ok := node.(*ast.Ident)
+ if !ok {
+ return true
+ }
+ obj, ok := info.ObjectOf(n).(*types.PkgName)
+ if !ok {
+ return true
+ }
+ if !strings.Contains(basicLit.Value, obj.Name()) {
+ return true
+ }
+ result[posRange{start: n.Pos(), end: n.End()}] = struct{}{}
+ return false
+ })
+ return nil
+}
+
+func highlightIdentifier(id *ast.Ident, file *ast.File, info *types.Info, result map[posRange]struct{}) {
+ // TODO(rfindley): idObj may be nil. Note that returning early in this case
+ // causes tests to fail (because the nObj == idObj check below was succeeded
+ // for nil == nil!)
+ //
+ // Revisit this. If ObjectOf is nil, there are type errors, and it seems
+ // reasonable for identifier highlighting not to work.
+ idObj := info.ObjectOf(id)
+ pkgObj, isImported := idObj.(*types.PkgName)
+ ast.Inspect(file, func(node ast.Node) bool {
+ if imp, ok := node.(*ast.ImportSpec); ok && isImported {
+ highlightImport(pkgObj, imp, result)
+ }
+ n, ok := node.(*ast.Ident)
+ if !ok {
+ return true
+ }
+ if n.Name != id.Name {
+ return false
+ }
+ if nObj := info.ObjectOf(n); nObj == idObj {
+ result[posRange{start: n.Pos(), end: n.End()}] = struct{}{}
+ }
+ return false
+ })
+}
+
+func highlightImport(obj *types.PkgName, imp *ast.ImportSpec, result map[posRange]struct{}) {
+ if imp.Name != nil || imp.Path == nil {
+ return
+ }
+ if !strings.Contains(imp.Path.Value, obj.Name()) {
+ return
+ }
+ result[posRange{start: imp.Path.Pos(), end: imp.Path.End()}] = struct{}{}
+}
diff --git a/gopls/internal/lsp/source/hover.go b/gopls/internal/lsp/source/hover.go
new file mode 100644
index 000000000..136a3022b
--- /dev/null
+++ b/gopls/internal/lsp/source/hover.go
@@ -0,0 +1,951 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package source
+
+import (
+ "context"
+ "encoding/json"
+ "fmt"
+ "go/ast"
+ "go/constant"
+ "go/doc"
+ "go/format"
+ "go/token"
+ "go/types"
+ "strconv"
+ "strings"
+ "time"
+ "unicode/utf8"
+
+ "golang.org/x/text/unicode/runenames"
+ "golang.org/x/tools/go/ast/astutil"
+ "golang.org/x/tools/go/types/typeutil"
+ "golang.org/x/tools/gopls/internal/lsp/protocol"
+ "golang.org/x/tools/gopls/internal/lsp/safetoken"
+ "golang.org/x/tools/gopls/internal/span"
+ "golang.org/x/tools/internal/bug"
+ "golang.org/x/tools/internal/event"
+ "golang.org/x/tools/internal/typeparams"
+)
+
+// HoverJSON contains information used by hover. It is also the JSON returned
+// for the "structured" hover format
+type HoverJSON struct {
+ // Synopsis is a single sentence synopsis of the symbol's documentation.
+ Synopsis string `json:"synopsis"`
+
+ // FullDocumentation is the symbol's full documentation.
+ FullDocumentation string `json:"fullDocumentation"`
+
+ // Signature is the symbol's signature.
+ Signature string `json:"signature"`
+
+ // SingleLine is a single line describing the symbol.
+ // This is recommended only for use in clients that show a single line for hover.
+ SingleLine string `json:"singleLine"`
+
+ // SymbolName is the human-readable name to use for the symbol in links.
+ SymbolName string `json:"symbolName"`
+
+ // LinkPath is the pkg.go.dev link for the given symbol.
+ // For example, the "go/ast" part of "pkg.go.dev/go/ast#Node".
+ LinkPath string `json:"linkPath"`
+
+ // LinkAnchor is the pkg.go.dev link anchor for the given symbol.
+ // For example, the "Node" part of "pkg.go.dev/go/ast#Node".
+ LinkAnchor string `json:"linkAnchor"`
+}
+
+// Hover implements the "textDocument/hover" RPC for Go files.
+func Hover(ctx context.Context, snapshot Snapshot, fh FileHandle, position protocol.Position) (*protocol.Hover, error) {
+ ctx, done := event.Start(ctx, "source.Hover")
+ defer done()
+
+ rng, h, err := hover(ctx, snapshot, fh, position)
+ if err != nil {
+ return nil, err
+ }
+ if h == nil {
+ return nil, nil
+ }
+ hover, err := formatHover(h, snapshot.View().Options())
+ if err != nil {
+ return nil, err
+ }
+ return &protocol.Hover{
+ Contents: protocol.MarkupContent{
+ Kind: snapshot.View().Options().PreferredContentFormat,
+ Value: hover,
+ },
+ Range: rng,
+ }, nil
+}
+
+// hover computes hover information at the given position. If we do not support
+// hovering at the position, it returns _, nil, nil: an error is only returned
+// if the position is valid but we fail to compute hover information.
+func hover(ctx context.Context, snapshot Snapshot, fh FileHandle, pp protocol.Position) (protocol.Range, *HoverJSON, error) {
+ pkg, pgf, err := PackageForFile(ctx, snapshot, fh.URI(), NarrowestPackage)
+ if err != nil {
+ return protocol.Range{}, nil, err
+ }
+ pos, err := pgf.PositionPos(pp)
+ if err != nil {
+ return protocol.Range{}, nil, err
+ }
+
+ // Handle hovering over import paths, which do not have an associated
+ // identifier.
+ for _, spec := range pgf.File.Imports {
+ // We are inclusive of the end point here to allow hovering when the cursor
+ // is just after the import path.
+ if spec.Path.Pos() <= pos && pos <= spec.Path.End() {
+ return hoverImport(ctx, snapshot, pkg, pgf, spec)
+ }
+ }
+
+ // Handle hovering over the package name, which does not have an associated
+ // object.
+ // As with import paths, we allow hovering just after the package name.
+ if pgf.File.Name != nil && pgf.File.Name.Pos() <= pos && pos <= pgf.File.Name.Pos() {
+ return hoverPackageName(pkg, pgf)
+ }
+
+ // Handle hovering over (non-import-path) literals.
+ if path, _ := astutil.PathEnclosingInterval(pgf.File, pos, pos); len(path) > 0 {
+ if lit, _ := path[0].(*ast.BasicLit); lit != nil {
+ return hoverLit(pgf, lit, pos)
+ }
+ }
+
+ // The general case: compute hover information for the object referenced by
+ // the identifier at pos.
+ ident, obj, selectedType := referencedObject(pkg, pgf, pos)
+ if obj == nil || ident == nil {
+ return protocol.Range{}, nil, nil // no object to hover
+ }
+
+ rng, err := pgf.NodeRange(ident)
+ if err != nil {
+ return protocol.Range{}, nil, err
+ }
+
+ // By convention, we qualify hover information relative to the package
+ // from which the request originated.
+ qf := Qualifier(pgf.File, pkg.GetTypes(), pkg.GetTypesInfo())
+
+ // Handle type switch identifiers as a special case, since they don't have an
+ // object.
+ //
+ // There's not much useful information to provide.
+ if selectedType != nil {
+ fakeObj := types.NewVar(obj.Pos(), obj.Pkg(), obj.Name(), selectedType)
+ signature := objectString(fakeObj, qf, nil)
+ return rng, &HoverJSON{
+ Signature: signature,
+ SingleLine: signature,
+ SymbolName: fakeObj.Name(),
+ }, nil
+ }
+
+ // Handle builtins, which don't have a package or position.
+ if obj.Pkg() == nil {
+ h, err := hoverBuiltin(ctx, snapshot, obj)
+ return rng, h, err
+ }
+
+ // For all other objects, consider the full syntax of their declaration in
+ // order to correctly compute their documentation, signature, and link.
+ declPGF, declPos, err := parseFull(ctx, snapshot, pkg.FileSet(), obj.Pos())
+ if err != nil {
+ return protocol.Range{}, nil, fmt.Errorf("re-parsing declaration of %s: %v", obj.Name(), err)
+ }
+ decl, spec, field := findDeclInfo([]*ast.File{declPGF.File}, declPos)
+ comment := chooseDocComment(decl, spec, field)
+ docText := comment.Text()
+
+ // By default, types.ObjectString provides a reasonable signature.
+ signature := objectString(obj, qf, nil)
+ // TODO(rfindley): we could do much better for inferred signatures.
+ if inferred := inferredSignature(pkg.GetTypesInfo(), ident); inferred != nil {
+ signature = objectString(obj, qf, inferred)
+ }
+
+ // For "objects defined by a type spec", the signature produced by
+ // objectString is insufficient:
+ // (1) large structs are formatted poorly, with no newlines
+ // (2) we lose inline comments
+ //
+ // Furthermore, we include a summary of their method set.
+ //
+ // TODO(rfindley): this should use FormatVarType to get proper qualification
+ // of identifiers, and we should revisit the formatting of method set.
+ _, isTypeName := obj.(*types.TypeName)
+ _, isTypeParam := obj.Type().(*typeparams.TypeParam)
+ if isTypeName && !isTypeParam {
+ spec, ok := spec.(*ast.TypeSpec)
+ if !ok {
+ return protocol.Range{}, nil, bug.Errorf("type name %q without type spec", obj.Name())
+ }
+ spec2 := *spec
+ // Don't duplicate comments when formatting type specs.
+ spec2.Doc = nil
+ spec2.Comment = nil
+ var b strings.Builder
+ b.WriteString("type ")
+ fset := FileSetFor(declPGF.Tok)
+ if err := format.Node(&b, fset, &spec2); err != nil {
+ return protocol.Range{}, nil, err
+ }
+
+ // Display the declared methods accessible from the identifier.
+ //
+ // (The format.Node call above displays any struct fields, public
+ // or private, in syntactic form. We choose not to recursively
+ // enumerate any fields and methods promoted from them.)
+ if !types.IsInterface(obj.Type()) {
+ sep := "\n\n"
+ for _, m := range typeutil.IntuitiveMethodSet(obj.Type(), nil) {
+ // Show direct methods that are either exported, or defined in the
+ // current package.
+ if (m.Obj().Exported() || m.Obj().Pkg() == pkg.GetTypes()) && len(m.Index()) == 1 {
+ b.WriteString(sep)
+ sep = "\n"
+ b.WriteString(objectString(m.Obj(), qf, nil))
+ }
+ }
+ }
+ signature = b.String()
+ }
+
+ // Compute link data (on pkg.go.dev or other documentation host).
+ //
+ // If linkPath is empty, the symbol is not linkable.
+ var (
+ linkName string // => link title, always non-empty
+ linkPath string // => link path
+ anchor string // link anchor
+ linkMeta *Metadata // metadata for the linked package
+ )
+ {
+ linkMeta = findFileInDeps(snapshot, pkg.Metadata(), declPGF.URI)
+ if linkMeta == nil {
+ return protocol.Range{}, nil, bug.Errorf("no metadata for %s", declPGF.URI)
+ }
+
+ // For package names, we simply link to their imported package.
+ if pkgName, ok := obj.(*types.PkgName); ok {
+ linkName = pkgName.Name()
+ linkPath = pkgName.Imported().Path()
+ impID := linkMeta.DepsByPkgPath[PackagePath(pkgName.Imported().Path())]
+ linkMeta = snapshot.Metadata(impID)
+ if linkMeta == nil {
+ return protocol.Range{}, nil, bug.Errorf("no metadata for %s", declPGF.URI)
+ }
+ } else {
+ // For all others, check whether the object is in the package scope, or
+ // an exported field or method of an object in the package scope.
+ //
+ // We try to match pkgsite's heuristics for what is linkable, and what is
+ // not.
+ var recv types.Object
+ switch obj := obj.(type) {
+ case *types.Func:
+ sig := obj.Type().(*types.Signature)
+ if sig.Recv() != nil {
+ tname := typeToObject(sig.Recv().Type())
+ if tname != nil { // beware typed nil
+ recv = tname
+ }
+ }
+ case *types.Var:
+ if obj.IsField() {
+ if spec, ok := spec.(*ast.TypeSpec); ok {
+ typeName := spec.Name
+ scopeObj, _ := obj.Pkg().Scope().Lookup(typeName.Name).(*types.TypeName)
+ if scopeObj != nil {
+ if st, _ := scopeObj.Type().Underlying().(*types.Struct); st != nil {
+ for i := 0; i < st.NumFields(); i++ {
+ if obj == st.Field(i) {
+ recv = scopeObj
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ // Even if the object is not available in package documentation, it may
+ // be embedded in a documented receiver. Detect this by searching
+ // enclosing selector expressions.
+ //
+ // TODO(rfindley): pkgsite doesn't document fields from embedding, just
+ // methods.
+ if recv == nil || !recv.Exported() {
+ path := pathEnclosingObjNode(pgf.File, pos)
+ if enclosing := searchForEnclosing(pkg.GetTypesInfo(), path); enclosing != nil {
+ recv = enclosing
+ } else {
+ recv = nil // note: just recv = ... could result in a typed nil.
+ }
+ }
+
+ pkg := obj.Pkg()
+ if recv != nil {
+ linkName = fmt.Sprintf("(%s.%s).%s", pkg.Name(), recv.Name(), obj.Name())
+ if obj.Exported() && recv.Exported() && pkg.Scope().Lookup(recv.Name()) == recv {
+ linkPath = pkg.Path()
+ anchor = fmt.Sprintf("%s.%s", recv.Name(), obj.Name())
+ }
+ } else {
+ linkName = fmt.Sprintf("%s.%s", pkg.Name(), obj.Name())
+ if obj.Exported() && pkg.Scope().Lookup(obj.Name()) == obj {
+ linkPath = pkg.Path()
+ anchor = obj.Name()
+ }
+ }
+ }
+ }
+
+ if snapshot.View().IsGoPrivatePath(linkPath) || linkMeta.ForTest != "" {
+ linkPath = ""
+ } else if linkMeta.Module != nil && linkMeta.Module.Version != "" {
+ mod := linkMeta.Module
+ linkPath = strings.Replace(linkPath, mod.Path, mod.Path+"@"+mod.Version, 1)
+ }
+
+ return rng, &HoverJSON{
+ Synopsis: doc.Synopsis(docText),
+ FullDocumentation: docText,
+ SingleLine: objectString(obj, qf, nil),
+ SymbolName: linkName,
+ Signature: signature,
+ LinkPath: linkPath,
+ LinkAnchor: anchor,
+ }, nil
+}
+
+// hoverBuiltin computes hover information when hovering over a builtin
+// identifier.
+func hoverBuiltin(ctx context.Context, snapshot Snapshot, obj types.Object) (*HoverJSON, error) {
+ // TODO(rfindley): link to the correct version of Go documentation.
+ builtin, err := snapshot.BuiltinFile(ctx)
+ if err != nil {
+ return nil, err
+ }
+
+ // TODO(rfindley): add a test for jump to definition of error.Error (which is
+ // probably failing, considering it lacks special handling).
+ if obj.Name() == "Error" {
+ signature := obj.String()
+ return &HoverJSON{
+ Signature: signature,
+ SingleLine: signature,
+ // TODO(rfindley): these are better than the current behavior.
+ // SymbolName: "(error).Error",
+ // LinkPath: "builtin",
+ // LinkAnchor: "error.Error",
+ }, nil
+ }
+
+ builtinObj := builtin.File.Scope.Lookup(obj.Name())
+ if builtinObj == nil {
+ // All builtins should have a declaration in the builtin file.
+ return nil, bug.Errorf("no builtin object for %s", obj.Name())
+ }
+ node, _ := builtinObj.Decl.(ast.Node)
+ if node == nil {
+ return nil, bug.Errorf("no declaration for %s", obj.Name())
+ }
+
+ var comment *ast.CommentGroup
+ path, _ := astutil.PathEnclosingInterval(builtin.File, node.Pos(), node.End())
+ for _, n := range path {
+ switch n := n.(type) {
+ case *ast.GenDecl:
+ // Separate documentation and signature.
+ comment = n.Doc
+ node2 := *n
+ node2.Doc = nil
+ node = &node2
+ case *ast.FuncDecl:
+ // Ditto.
+ comment = n.Doc
+ node2 := *n
+ node2.Doc = nil
+ node = &node2
+ }
+ }
+
+ signature := FormatNodeFile(builtin.Tok, node)
+ // Replace fake types with their common equivalent.
+ // TODO(rfindley): we should instead use obj.Type(), which would have the
+ // *actual* types of the builtin call.
+ signature = replacer.Replace(signature)
+
+ docText := comment.Text()
+ return &HoverJSON{
+ Synopsis: doc.Synopsis(docText),
+ FullDocumentation: docText,
+ Signature: signature,
+ SingleLine: obj.String(),
+ SymbolName: obj.Name(),
+ LinkPath: "builtin",
+ LinkAnchor: obj.Name(),
+ }, nil
+}
+
+// hoverImport computes hover information when hovering over the import path of
+// imp in the file pgf of pkg.
+//
+// If we do not have metadata for the hovered import, it returns _
+func hoverImport(ctx context.Context, snapshot Snapshot, pkg Package, pgf *ParsedGoFile, imp *ast.ImportSpec) (protocol.Range, *HoverJSON, error) {
+ rng, err := pgf.NodeRange(imp.Path)
+ if err != nil {
+ return protocol.Range{}, nil, err
+ }
+
+ importPath := UnquoteImportPath(imp)
+ if importPath == "" {
+ return protocol.Range{}, nil, fmt.Errorf("invalid import path")
+ }
+ impID := pkg.Metadata().DepsByImpPath[importPath]
+ if impID == "" {
+ return protocol.Range{}, nil, fmt.Errorf("no package data for import %q", importPath)
+ }
+ impMetadata := snapshot.Metadata(impID)
+ if impMetadata == nil {
+ return protocol.Range{}, nil, bug.Errorf("failed to resolve import ID %q", impID)
+ }
+
+ // Find the first file with a package doc comment.
+ var comment *ast.CommentGroup
+ for _, f := range impMetadata.CompiledGoFiles {
+ fh, err := snapshot.GetFile(ctx, f)
+ if err != nil {
+ if ctx.Err() != nil {
+ return protocol.Range{}, nil, ctx.Err()
+ }
+ continue
+ }
+ pgf, err := snapshot.ParseGo(ctx, fh, ParseHeader)
+ if err != nil {
+ if ctx.Err() != nil {
+ return protocol.Range{}, nil, ctx.Err()
+ }
+ continue
+ }
+ if pgf.File.Doc != nil {
+ comment = pgf.File.Doc
+ break
+ }
+ }
+
+ docText := comment.Text()
+ return rng, &HoverJSON{
+ Synopsis: doc.Synopsis(docText),
+ FullDocumentation: docText,
+ }, nil
+}
+
+// hoverPackageName computes hover information for the package name of the file
+// pgf in pkg.
+func hoverPackageName(pkg Package, pgf *ParsedGoFile) (protocol.Range, *HoverJSON, error) {
+ var comment *ast.CommentGroup
+ for _, pgf := range pkg.CompiledGoFiles() {
+ if pgf.File.Doc != nil {
+ comment = pgf.File.Doc
+ break
+ }
+ }
+ rng, err := pgf.NodeRange(pgf.File.Name)
+ if err != nil {
+ return protocol.Range{}, nil, err
+ }
+ docText := comment.Text()
+ return rng, &HoverJSON{
+ Synopsis: doc.Synopsis(docText),
+ FullDocumentation: docText,
+ // Note: including a signature is redundant, since the cursor is already on the
+ // package name.
+ }, nil
+}
+
+// hoverLit computes hover information when hovering over the basic literal lit
+// in the file pgf. The provided pos must be the exact position of the cursor,
+// as it is used to extract the hovered rune in strings.
+//
+// For example, hovering over "\u2211" in "foo \u2211 bar" yields:
+//
+// '∑', U+2211, N-ARY SUMMATION
+func hoverLit(pgf *ParsedGoFile, lit *ast.BasicLit, pos token.Pos) (protocol.Range, *HoverJSON, error) {
+ var r rune
+ var start, end token.Pos
+ // Extract a rune from the current position.
+ // 'Ω', "...Ω...", or 0x03A9 => 'Ω', U+03A9, GREEK CAPITAL LETTER OMEGA
+ switch lit.Kind {
+ case token.CHAR:
+ s, err := strconv.Unquote(lit.Value)
+ if err != nil {
+ // If the conversion fails, it's because of an invalid syntax, therefore
+ // there is no rune to be found.
+ return protocol.Range{}, nil, nil
+ }
+ r, _ = utf8.DecodeRuneInString(s)
+ if r == utf8.RuneError {
+ return protocol.Range{}, nil, fmt.Errorf("rune error")
+ }
+ start, end = lit.Pos(), lit.End()
+ case token.INT:
+ // TODO(rfindley): add support for hex/octal/binary->int conversion here.
+
+ // It's an integer, scan only if it is a hex literal whose bitsize in
+ // ranging from 8 to 32.
+ if !(strings.HasPrefix(lit.Value, "0x") && len(lit.Value[2:]) >= 2 && len(lit.Value[2:]) <= 8) {
+ return protocol.Range{}, nil, nil
+ }
+ v, err := strconv.ParseUint(lit.Value[2:], 16, 32)
+ if err != nil {
+ return protocol.Range{}, nil, fmt.Errorf("parsing int: %v", err)
+ }
+ r = rune(v)
+ if r == utf8.RuneError {
+ return protocol.Range{}, nil, fmt.Errorf("rune error")
+ }
+ start, end = lit.Pos(), lit.End()
+ case token.STRING:
+ // It's a string, scan only if it contains a unicode escape sequence under or before the
+ // current cursor position.
+ litOffset, err := safetoken.Offset(pgf.Tok, lit.Pos())
+ if err != nil {
+ return protocol.Range{}, nil, err
+ }
+ offset, err := safetoken.Offset(pgf.Tok, pos)
+ if err != nil {
+ return protocol.Range{}, nil, err
+ }
+ for i := offset - litOffset; i > 0; i-- {
+ // Start at the cursor position and search backward for the beginning of a rune escape sequence.
+ rr, _ := utf8.DecodeRuneInString(lit.Value[i:])
+ if rr == utf8.RuneError {
+ return protocol.Range{}, nil, fmt.Errorf("rune error")
+ }
+ if rr == '\\' {
+ // Got the beginning, decode it.
+ var tail string
+ r, _, tail, err = strconv.UnquoteChar(lit.Value[i:], '"')
+ if err != nil {
+ // If the conversion fails, it's because of an invalid syntax,
+ // therefore is no rune to be found.
+ return protocol.Range{}, nil, nil
+ }
+ // Only the rune escape sequence part of the string has to be highlighted, recompute the range.
+ runeLen := len(lit.Value) - (int(i) + len(tail))
+ start = token.Pos(int(lit.Pos()) + int(i))
+ end = token.Pos(int(start) + runeLen)
+ break
+ }
+ }
+ }
+ if r == 0 {
+ return protocol.Range{}, nil, nil
+ }
+ rng, err := pgf.PosRange(start, end)
+ if err != nil {
+ return protocol.Range{}, nil, err
+ }
+
+ var desc string
+ runeName := runenames.Name(r)
+ if len(runeName) > 0 && runeName[0] == '<' {
+ // Check if the rune looks like an HTML tag. If so, trim the surrounding <>
+ // characters to work around https://github.com/microsoft/vscode/issues/124042.
+ runeName = strings.TrimRight(runeName[1:], ">")
+ }
+ if strconv.IsPrint(r) {
+ desc = fmt.Sprintf("'%s', U+%04X, %s", string(r), uint32(r), runeName)
+ } else {
+ desc = fmt.Sprintf("U+%04X, %s", uint32(r), runeName)
+ }
+ return rng, &HoverJSON{
+ Synopsis: desc,
+ FullDocumentation: desc,
+ }, nil
+}
+
+// objectString is a wrapper around the types.ObjectString function.
+// It handles adding more information to the object string.
+//
+// TODO(rfindley): this function does too much. We should lift the special
+// handling to callsites.
+func objectString(obj types.Object, qf types.Qualifier, inferred *types.Signature) string {
+ // If the signature type was inferred, prefer the inferred signature with a
+ // comment showing the generic signature.
+ if sig, _ := obj.Type().(*types.Signature); sig != nil && typeparams.ForSignature(sig).Len() > 0 && inferred != nil {
+ obj2 := types.NewFunc(obj.Pos(), obj.Pkg(), obj.Name(), inferred)
+ str := types.ObjectString(obj2, qf)
+ // Try to avoid overly long lines.
+ if len(str) > 60 {
+ str += "\n"
+ } else {
+ str += " "
+ }
+ str += "// " + types.TypeString(sig, qf)
+ return str
+ }
+ str := types.ObjectString(obj, qf)
+ switch obj := obj.(type) {
+ case *types.Const:
+ str = fmt.Sprintf("%s = %s", str, obj.Val())
+
+ // Try to add a formatted duration as an inline comment
+ typ, ok := obj.Type().(*types.Named)
+ if !ok {
+ break
+ }
+ pkg := typ.Obj().Pkg()
+ if pkg.Path() == "time" && typ.Obj().Name() == "Duration" {
+ if d, ok := constant.Int64Val(obj.Val()); ok {
+ str += " // " + time.Duration(d).String()
+ }
+ }
+ }
+ return str
+}
+
+// HoverDocForObject returns the best doc comment for obj (for which
+// fset provides file/line information).
+//
+// TODO(rfindley): there appears to be zero(!) tests for this functionality.
+func HoverDocForObject(ctx context.Context, snapshot Snapshot, fset *token.FileSet, obj types.Object) (*ast.CommentGroup, error) {
+ if _, isTypeName := obj.(*types.TypeName); isTypeName {
+ if _, isTypeParam := obj.Type().(*typeparams.TypeParam); isTypeParam {
+ return nil, nil
+ }
+ }
+
+ pgf, pos, err := parseFull(ctx, snapshot, fset, obj.Pos())
+ if err != nil {
+ return nil, fmt.Errorf("re-parsing: %v", err)
+ }
+
+ decl, spec, field := findDeclInfo([]*ast.File{pgf.File}, pos)
+ return chooseDocComment(decl, spec, field), nil
+}
+
+func chooseDocComment(decl ast.Decl, spec ast.Spec, field *ast.Field) *ast.CommentGroup {
+ if field != nil {
+ if field.Doc != nil {
+ return field.Doc
+ }
+ if field.Comment != nil {
+ return field.Comment
+ }
+ return nil
+ }
+ switch decl := decl.(type) {
+ case *ast.FuncDecl:
+ return decl.Doc
+ case *ast.GenDecl:
+ switch spec := spec.(type) {
+ case *ast.ValueSpec:
+ if spec.Doc != nil {
+ return spec.Doc
+ }
+ if decl.Doc != nil {
+ return decl.Doc
+ }
+ return spec.Comment
+ case *ast.TypeSpec:
+ if spec.Doc != nil {
+ return spec.Doc
+ }
+ if decl.Doc != nil {
+ return decl.Doc
+ }
+ return spec.Comment
+ }
+ }
+ return nil
+}
+
+// parseFull fully parses the file corresponding to position pos (for
+// which fset provides file/line information).
+//
+// It returns the resulting ParsedGoFile as well as new pos contained in the
+// parsed file.
+func parseFull(ctx context.Context, snapshot Snapshot, fset *token.FileSet, pos token.Pos) (*ParsedGoFile, token.Pos, error) {
+ f := fset.File(pos)
+ if f == nil {
+ return nil, 0, bug.Errorf("internal error: no file for position %d", pos)
+ }
+
+ uri := span.URIFromPath(f.Name())
+ fh, err := snapshot.GetFile(ctx, uri)
+ if err != nil {
+ return nil, 0, err
+ }
+
+ pgf, err := snapshot.ParseGo(ctx, fh, ParseFull)
+ if err != nil {
+ return nil, 0, err
+ }
+
+ offset, err := safetoken.Offset(f, pos)
+ if err != nil {
+ return nil, 0, bug.Errorf("offset out of bounds in %q", uri)
+ }
+
+ fullPos, err := safetoken.Pos(pgf.Tok, offset)
+ if err != nil {
+ return nil, 0, err
+ }
+
+ return pgf, fullPos, nil
+}
+
+// extractFieldList recursively tries to extract a field list.
+// If it is not found, nil is returned.
+func extractFieldList(specType ast.Expr) *ast.FieldList {
+ switch t := specType.(type) {
+ case *ast.StructType:
+ return t.Fields
+ case *ast.InterfaceType:
+ return t.Methods
+ case *ast.ArrayType:
+ return extractFieldList(t.Elt)
+ case *ast.MapType:
+ // Map value has a greater chance to be a struct
+ if fields := extractFieldList(t.Value); fields != nil {
+ return fields
+ }
+ return extractFieldList(t.Key)
+ case *ast.ChanType:
+ return extractFieldList(t.Value)
+ }
+ return nil
+}
+
+func formatHover(h *HoverJSON, options *Options) (string, error) {
+ signature := formatSignature(h, options)
+
+ switch options.HoverKind {
+ case SingleLine:
+ return h.SingleLine, nil
+ case NoDocumentation:
+ return signature, nil
+ case Structured:
+ b, err := json.Marshal(h)
+ if err != nil {
+ return "", err
+ }
+ return string(b), nil
+ }
+
+ link := formatLink(h, options)
+ doc := formatDoc(h, options)
+
+ var b strings.Builder
+ parts := []string{signature, doc, link}
+ for i, el := range parts {
+ if el != "" {
+ b.WriteString(el)
+
+ // If any elements of the remainder of the list are non-empty,
+ // write an extra newline.
+ if anyNonEmpty(parts[i+1:]) {
+ if options.PreferredContentFormat == protocol.Markdown {
+ b.WriteString("\n\n")
+ } else {
+ b.WriteRune('\n')
+ }
+ }
+ }
+ }
+ return b.String(), nil
+}
+
+func formatSignature(h *HoverJSON, options *Options) string {
+ signature := h.Signature
+ if signature != "" && options.PreferredContentFormat == protocol.Markdown {
+ signature = fmt.Sprintf("```go\n%s\n```", signature)
+ }
+ return signature
+}
+
+func formatLink(h *HoverJSON, options *Options) string {
+ if !options.LinksInHover || options.LinkTarget == "" || h.LinkPath == "" {
+ return ""
+ }
+ plainLink := BuildLink(options.LinkTarget, h.LinkPath, h.LinkAnchor)
+ switch options.PreferredContentFormat {
+ case protocol.Markdown:
+ return fmt.Sprintf("[`%s` on %s](%s)", h.SymbolName, options.LinkTarget, plainLink)
+ case protocol.PlainText:
+ return ""
+ default:
+ return plainLink
+ }
+}
+
+// BuildLink constructs a URL with the given target, path, and anchor.
+func BuildLink(target, path, anchor string) string {
+ link := fmt.Sprintf("https://%s/%s", target, path)
+ if anchor == "" {
+ return link
+ }
+ return link + "#" + anchor
+}
+
+func formatDoc(h *HoverJSON, options *Options) string {
+ var doc string
+ switch options.HoverKind {
+ case SynopsisDocumentation:
+ doc = h.Synopsis
+ case FullDocumentation:
+ doc = h.FullDocumentation
+ }
+ if options.PreferredContentFormat == protocol.Markdown {
+ return CommentToMarkdown(doc, options)
+ }
+ return doc
+}
+
+func anyNonEmpty(x []string) bool {
+ for _, el := range x {
+ if el != "" {
+ return true
+ }
+ }
+ return false
+}
+
+// findDeclInfo returns the syntax nodes involved in the declaration of the
+// types.Object with position pos, searching the given list of file syntax
+// trees.
+//
+// Pos may be the position of the name-defining identifier in a FuncDecl,
+// ValueSpec, TypeSpec, Field, or as a special case the position of
+// Ellipsis.Elt in an ellipsis field.
+//
+// If found, the resulting decl, spec, and field will be the inner-most
+// instance of each node type surrounding pos.
+//
+// If field is non-nil, pos is the position of a field Var. If field is nil and
+// spec is non-nil, pos is the position of a Var, Const, or TypeName object. If
+// both field and spec are nil and decl is non-nil, pos is the position of a
+// Func object.
+//
+// It returns a nil decl if no object-defining node is found at pos.
+//
+// TODO(rfindley): this function has tricky semantics, and may be worth unit
+// testing and/or refactoring.
+func findDeclInfo(files []*ast.File, pos token.Pos) (decl ast.Decl, spec ast.Spec, field *ast.Field) {
+ // panic(found{}) breaks off the traversal and
+ // causes the function to return normally.
+ type found struct{}
+ defer func() {
+ switch x := recover().(type) {
+ case nil:
+ case found:
+ default:
+ panic(x)
+ }
+ }()
+
+ // Visit the files in search of the node at pos.
+ stack := make([]ast.Node, 0, 20)
+ // Allocate the closure once, outside the loop.
+ f := func(n ast.Node) bool {
+ if n != nil {
+ stack = append(stack, n) // push
+ } else {
+ stack = stack[:len(stack)-1] // pop
+ return false
+ }
+
+ // Skip subtrees (incl. files) that don't contain the search point.
+ if !(n.Pos() <= pos && pos < n.End()) {
+ return false
+ }
+
+ switch n := n.(type) {
+ case *ast.Field:
+ findEnclosingDeclAndSpec := func() {
+ for i := len(stack) - 1; i >= 0; i-- {
+ switch n := stack[i].(type) {
+ case ast.Spec:
+ spec = n
+ case ast.Decl:
+ decl = n
+ return
+ }
+ }
+ }
+
+ // Check each field name since you can have
+ // multiple names for the same type expression.
+ for _, id := range n.Names {
+ if id.Pos() == pos {
+ field = n
+ findEnclosingDeclAndSpec()
+ panic(found{})
+ }
+ }
+
+ // Check *ast.Field itself. This handles embedded
+ // fields which have no associated *ast.Ident name.
+ if n.Pos() == pos {
+ field = n
+ findEnclosingDeclAndSpec()
+ panic(found{})
+ }
+
+ // Also check "X" in "...X". This makes it easy to format variadic
+ // signature params properly.
+ //
+ // TODO(rfindley): I don't understand this comment. How does finding the
+ // field in this case make it easier to format variadic signature params?
+ if ell, ok := n.Type.(*ast.Ellipsis); ok && ell.Elt != nil && ell.Elt.Pos() == pos {
+ field = n
+ findEnclosingDeclAndSpec()
+ panic(found{})
+ }
+
+ case *ast.FuncDecl:
+ if n.Name.Pos() == pos {
+ decl = n
+ panic(found{})
+ }
+
+ case *ast.GenDecl:
+ for _, s := range n.Specs {
+ switch s := s.(type) {
+ case *ast.TypeSpec:
+ if s.Name.Pos() == pos {
+ decl = n
+ spec = s
+ panic(found{})
+ }
+ case *ast.ValueSpec:
+ for _, id := range s.Names {
+ if id.Pos() == pos {
+ decl = n
+ spec = s
+ panic(found{})
+ }
+ }
+ }
+ }
+ }
+ return true
+ }
+ for _, file := range files {
+ ast.Inspect(file, f)
+ }
+
+ return nil, nil, nil
+}
diff --git a/gopls/internal/lsp/source/identifier.go b/gopls/internal/lsp/source/identifier.go
new file mode 100644
index 000000000..15fe13a94
--- /dev/null
+++ b/gopls/internal/lsp/source/identifier.go
@@ -0,0 +1,174 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package source
+
+import (
+ "errors"
+ "go/ast"
+ "go/types"
+
+ "golang.org/x/tools/internal/typeparams"
+)
+
+// ErrNoIdentFound is error returned when no identifier is found at a particular position
+var ErrNoIdentFound = errors.New("no identifier found")
+
+// inferredSignature determines the resolved non-generic signature for an
+// identifier in an instantiation expression.
+//
+// If no such signature exists, it returns nil.
+func inferredSignature(info *types.Info, id *ast.Ident) *types.Signature {
+ inst := typeparams.GetInstances(info)[id]
+ sig, _ := inst.Type.(*types.Signature)
+ return sig
+}
+
+func searchForEnclosing(info *types.Info, path []ast.Node) *types.TypeName {
+ for _, n := range path {
+ switch n := n.(type) {
+ case *ast.SelectorExpr:
+ if sel, ok := info.Selections[n]; ok {
+ recv := Deref(sel.Recv())
+
+ // Keep track of the last exported type seen.
+ var exported *types.TypeName
+ if named, ok := recv.(*types.Named); ok && named.Obj().Exported() {
+ exported = named.Obj()
+ }
+ // We don't want the last element, as that's the field or
+ // method itself.
+ for _, index := range sel.Index()[:len(sel.Index())-1] {
+ if r, ok := recv.Underlying().(*types.Struct); ok {
+ recv = Deref(r.Field(index).Type())
+ if named, ok := recv.(*types.Named); ok && named.Obj().Exported() {
+ exported = named.Obj()
+ }
+ }
+ }
+ return exported
+ }
+ }
+ }
+ return nil
+}
+
+// typeToObject returns the relevant type name for the given type, after
+// unwrapping pointers, arrays, slices, channels, and function signatures with
+// a single non-error result.
+func typeToObject(typ types.Type) *types.TypeName {
+ switch typ := typ.(type) {
+ case *types.Named:
+ // TODO(rfindley): this should use typeparams.NamedTypeOrigin.
+ return typ.Obj()
+ case *types.Pointer:
+ return typeToObject(typ.Elem())
+ case *types.Array:
+ return typeToObject(typ.Elem())
+ case *types.Slice:
+ return typeToObject(typ.Elem())
+ case *types.Chan:
+ return typeToObject(typ.Elem())
+ case *types.Signature:
+ // Try to find a return value of a named type. If there's only one
+ // such value, jump to its type definition.
+ var res *types.TypeName
+
+ results := typ.Results()
+ for i := 0; i < results.Len(); i++ {
+ obj := typeToObject(results.At(i).Type())
+ if obj == nil || hasErrorType(obj) {
+ // Skip builtins.
+ continue
+ }
+ if res != nil {
+ // The function/method must have only one return value of a named type.
+ return nil
+ }
+
+ res = obj
+ }
+ return res
+ default:
+ return nil
+ }
+}
+
+func hasErrorType(obj types.Object) bool {
+ return types.IsInterface(obj.Type()) && obj.Pkg() == nil && obj.Name() == "error"
+}
+
+// typeSwitchImplicits returns all the implicit type switch objects that
+// correspond to the leaf *ast.Ident. It also returns the original type
+// associated with the identifier (outside of a case clause).
+func typeSwitchImplicits(info *types.Info, path []ast.Node) ([]types.Object, types.Type) {
+ ident, _ := path[0].(*ast.Ident)
+ if ident == nil {
+ return nil, nil
+ }
+
+ var (
+ ts *ast.TypeSwitchStmt
+ assign *ast.AssignStmt
+ cc *ast.CaseClause
+ obj = info.ObjectOf(ident)
+ )
+
+ // Walk our ancestors to determine if our leaf ident refers to a
+ // type switch variable, e.g. the "a" from "switch a := b.(type)".
+Outer:
+ for i := 1; i < len(path); i++ {
+ switch n := path[i].(type) {
+ case *ast.AssignStmt:
+ // Check if ident is the "a" in "a := foo.(type)". The "a" in
+ // this case has no types.Object, so check for ident equality.
+ if len(n.Lhs) == 1 && n.Lhs[0] == ident {
+ assign = n
+ }
+ case *ast.CaseClause:
+ // Check if ident is a use of "a" within a case clause. Each
+ // case clause implicitly maps "a" to a different types.Object,
+ // so check if ident's object is the case clause's implicit
+ // object.
+ if obj != nil && info.Implicits[n] == obj {
+ cc = n
+ }
+ case *ast.TypeSwitchStmt:
+ // Look for the type switch that owns our previously found
+ // *ast.AssignStmt or *ast.CaseClause.
+ if n.Assign == assign {
+ ts = n
+ break Outer
+ }
+
+ for _, stmt := range n.Body.List {
+ if stmt == cc {
+ ts = n
+ break Outer
+ }
+ }
+ }
+ }
+ if ts == nil {
+ return nil, nil
+ }
+ // Our leaf ident refers to a type switch variable. Fan out to the
+ // type switch's implicit case clause objects.
+ var objs []types.Object
+ for _, cc := range ts.Body.List {
+ if ccObj := info.Implicits[cc]; ccObj != nil {
+ objs = append(objs, ccObj)
+ }
+ }
+ // The right-hand side of a type switch should only have one
+ // element, and we need to track its type in order to generate
+ // hover information for implicit type switch variables.
+ var typ types.Type
+ if assign, ok := ts.Assign.(*ast.AssignStmt); ok && len(assign.Rhs) == 1 {
+ if rhs := assign.Rhs[0].(*ast.TypeAssertExpr); ok {
+ typ = info.TypeOf(rhs.X)
+ }
+ }
+ return objs, typ
+}
diff --git a/gopls/internal/lsp/source/identifier_test.go b/gopls/internal/lsp/source/identifier_test.go
new file mode 100644
index 000000000..7756fe402
--- /dev/null
+++ b/gopls/internal/lsp/source/identifier_test.go
@@ -0,0 +1,103 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package source
+
+import (
+ "bytes"
+ "go/ast"
+ "go/parser"
+ "go/token"
+ "go/types"
+ "testing"
+)
+
+func TestSearchForEnclosing(t *testing.T) {
+ tests := []struct {
+ desc string
+ // For convenience, consider the first occurrence of the identifier "X" in
+ // src.
+ src string
+ // By convention, "" means no type found.
+ wantTypeName string
+ }{
+ {
+ // TODO(rFindley): is this correct, or do we want to resolve I2 here?
+ desc: "embedded interface in interface",
+ src: `package a; var y = i1.X; type i1 interface {I2}; type I2 interface{X()}`,
+ wantTypeName: "",
+ },
+ {
+ desc: "embedded interface in struct",
+ src: `package a; var y = t.X; type t struct {I}; type I interface{X()}`,
+ wantTypeName: "I",
+ },
+ {
+ desc: "double embedding",
+ src: `package a; var y = t1.X; type t1 struct {t2}; type t2 struct {I}; type I interface{X()}`,
+ wantTypeName: "I",
+ },
+ }
+
+ for _, test := range tests {
+ test := test
+ t.Run(test.desc, func(t *testing.T) {
+ fset := token.NewFileSet()
+ file, err := parser.ParseFile(fset, "a.go", test.src, parser.AllErrors)
+ if err != nil {
+ t.Fatal(err)
+ }
+ column := 1 + bytes.IndexRune([]byte(test.src), 'X')
+ pos := posAt(1, column, fset, "a.go")
+ path := pathEnclosingObjNode(file, pos)
+ if path == nil {
+ t.Fatalf("no ident found at (1, %d)", column)
+ }
+ info := newInfo()
+ if _, err = (*types.Config)(nil).Check("p", fset, []*ast.File{file}, info); err != nil {
+ t.Fatal(err)
+ }
+ obj := searchForEnclosing(info, path)
+ if obj == nil {
+ if test.wantTypeName != "" {
+ t.Errorf("searchForEnclosing(...) = <nil>, want %q", test.wantTypeName)
+ }
+ return
+ }
+ if got := obj.Name(); got != test.wantTypeName {
+ t.Errorf("searchForEnclosing(...) = %q, want %q", got, test.wantTypeName)
+ }
+ })
+ }
+}
+
+// posAt returns the token.Pos corresponding to the 1-based (line, column)
+// coordinates in the file fname of fset.
+func posAt(line, column int, fset *token.FileSet, fname string) token.Pos {
+ var tok *token.File
+ fset.Iterate(func(tf *token.File) bool {
+ if tf.Name() == fname {
+ tok = tf
+ return false
+ }
+ return true
+ })
+ if tok == nil {
+ return token.NoPos
+ }
+ start := tok.LineStart(line)
+ return start + token.Pos(column-1)
+}
+
+// newInfo returns a types.Info with all maps populated.
+func newInfo() *types.Info {
+ return &types.Info{
+ Types: make(map[ast.Expr]types.TypeAndValue),
+ Defs: make(map[*ast.Ident]types.Object),
+ Uses: make(map[*ast.Ident]types.Object),
+ Implicits: make(map[ast.Node]types.Object),
+ Selections: make(map[*ast.SelectorExpr]*types.Selection),
+ Scopes: make(map[ast.Node]*types.Scope),
+ }
+}
diff --git a/gopls/internal/lsp/source/implementation.go b/gopls/internal/lsp/source/implementation.go
new file mode 100644
index 000000000..72ec90d28
--- /dev/null
+++ b/gopls/internal/lsp/source/implementation.go
@@ -0,0 +1,482 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package source
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "go/ast"
+ "go/token"
+ "go/types"
+ "reflect"
+ "sort"
+ "strings"
+ "sync"
+
+ "golang.org/x/sync/errgroup"
+ "golang.org/x/tools/gopls/internal/lsp/protocol"
+ "golang.org/x/tools/gopls/internal/lsp/safetoken"
+ "golang.org/x/tools/gopls/internal/lsp/source/methodsets"
+ "golang.org/x/tools/gopls/internal/span"
+ "golang.org/x/tools/internal/event"
+)
+
+// This file defines the new implementation of the 'implementation'
+// operator that does not require type-checker data structures for an
+// unbounded number of packages.
+//
+// TODO(adonovan):
+// - Audit to ensure robustness in face of type errors.
+// - Support 'error' and 'error.Error', which were also lacking from the old implementation.
+// - Eliminate false positives due to 'tricky' cases of the global algorithm.
+// - Ensure we have test coverage of:
+// type aliases
+// nil, PkgName, Builtin (all errors)
+// any (empty result)
+// method of unnamed interface type (e.g. var x interface { f() })
+// (the global algorithm may find implementations of this type
+// but will not include it in the index.)
+
+// Implementation returns a new sorted array of locations of
+// declarations of types that implement (or are implemented by) the
+// type referred to at the given position.
+//
+// If the position denotes a method, the computation is applied to its
+// receiver type and then its corresponding methods are returned.
+func Implementation(ctx context.Context, snapshot Snapshot, f FileHandle, pp protocol.Position) ([]protocol.Location, error) {
+ ctx, done := event.Start(ctx, "source.Implementation")
+ defer done()
+
+ locs, err := implementations2(ctx, snapshot, f, pp)
+ if err != nil {
+ return nil, err
+ }
+
+ // Sort and de-duplicate locations.
+ sort.Slice(locs, func(i, j int) bool {
+ return protocol.CompareLocation(locs[i], locs[j]) < 0
+ })
+ out := locs[:0]
+ for _, loc := range locs {
+ if len(out) == 0 || out[len(out)-1] != loc {
+ out = append(out, loc)
+ }
+ }
+ locs = out
+
+ return locs, nil
+}
+
+func implementations2(ctx context.Context, snapshot Snapshot, fh FileHandle, pp protocol.Position) ([]protocol.Location, error) {
+
+ // Type-check the query package, find the query identifier,
+ // and locate the type or method declaration it refers to.
+ declPosn, err := typeDeclPosition(ctx, snapshot, fh.URI(), pp)
+ if err != nil {
+ return nil, err
+ }
+
+ // Type-check the declaring package (incl. variants) for use
+ // by the "local" search, which uses type information to
+ // enumerate all types within the package that satisfy the
+ // query type, even those defined local to a function.
+ declURI := span.URIFromPath(declPosn.Filename)
+ declMetas, err := snapshot.MetadataForFile(ctx, declURI)
+ if err != nil {
+ return nil, err
+ }
+ if len(declMetas) == 0 {
+ return nil, fmt.Errorf("no packages for file %s", declURI)
+ }
+ ids := make([]PackageID, len(declMetas))
+ for i, m := range declMetas {
+ ids[i] = m.ID
+ }
+ localPkgs, err := snapshot.TypeCheck(ctx, ids...)
+ if err != nil {
+ return nil, err
+ }
+ // The narrowest package will do, since the local search is based
+ // on position and the global search is based on fingerprint.
+ // (Neither is based on object identity.)
+ declPkg := localPkgs[0]
+ declFile, err := declPkg.File(declURI)
+ if err != nil {
+ return nil, err // "can't happen"
+ }
+
+ // Find declaration of corresponding object
+ // in this package based on (URI, offset).
+ pos, err := safetoken.Pos(declFile.Tok, declPosn.Offset)
+ if err != nil {
+ return nil, err
+ }
+ // TODO(adonovan): simplify: use objectsAt?
+ path := pathEnclosingObjNode(declFile.File, pos)
+ if path == nil {
+ return nil, ErrNoIdentFound // checked earlier
+ }
+ id, ok := path[0].(*ast.Ident)
+ if !ok {
+ return nil, ErrNoIdentFound // checked earlier
+ }
+ obj := declPkg.GetTypesInfo().ObjectOf(id) // may be nil
+
+ // Is the selected identifier a type name or method?
+ // (For methods, report the corresponding method names.)
+ var queryType types.Type
+ var queryMethodID string
+ switch obj := obj.(type) {
+ case *types.TypeName:
+ queryType = obj.Type()
+ case *types.Func:
+ // For methods, use the receiver type, which may be anonymous.
+ if recv := obj.Type().(*types.Signature).Recv(); recv != nil {
+ queryType = recv.Type()
+ queryMethodID = obj.Id()
+ }
+ }
+ if queryType == nil {
+ return nil, fmt.Errorf("%s is not a type or method", id.Name)
+ }
+
+ // Compute the method-set fingerprint used as a key to the global search.
+ key, hasMethods := methodsets.KeyOf(queryType)
+ if !hasMethods {
+ // A type with no methods yields an empty result.
+ // (No point reporting that every type satisfies 'any'.)
+ return nil, nil
+ }
+
+ // The global search needs to look at every package in the workspace;
+ // see package ./methodsets.
+ //
+ // For now we do all the type checking before beginning the search.
+ // TODO(adonovan): opt: search in parallel topological order
+ // so that we can overlap index lookup with typechecking.
+ // I suspect a number of algorithms on the result of TypeCheck could
+ // be optimized by being applied as soon as each package is available.
+ globalMetas, err := snapshot.AllMetadata(ctx)
+ if err != nil {
+ return nil, err
+ }
+ globalIDs := make([]PackageID, 0, len(globalMetas))
+ for _, m := range globalMetas {
+ if m.PkgPath == declPkg.Metadata().PkgPath {
+ continue // declaring package is handled by local implementation
+ }
+ globalIDs = append(globalIDs, m.ID)
+ }
+ indexes, err := snapshot.MethodSets(ctx, globalIDs...)
+ if err != nil {
+ return nil, err
+ }
+
+ // Search local and global packages in parallel.
+ var (
+ group errgroup.Group
+ locsMu sync.Mutex
+ locs []protocol.Location
+ )
+ // local search
+ for _, localPkg := range localPkgs {
+ localPkg := localPkg
+ group.Go(func() error {
+ localLocs, err := localImplementations(ctx, snapshot, localPkg, queryType, queryMethodID)
+ if err != nil {
+ return err
+ }
+ locsMu.Lock()
+ locs = append(locs, localLocs...)
+ locsMu.Unlock()
+ return nil
+ })
+ }
+ // global search
+ for _, index := range indexes {
+ index := index
+ group.Go(func() error {
+ for _, res := range index.Search(key, queryMethodID) {
+ loc := res.Location
+ // Map offsets to protocol.Locations in parallel (may involve I/O).
+ group.Go(func() error {
+ ploc, err := offsetToLocation(ctx, snapshot, loc.Filename, loc.Start, loc.End)
+ if err != nil {
+ return err
+ }
+ locsMu.Lock()
+ locs = append(locs, ploc)
+ locsMu.Unlock()
+ return nil
+ })
+ }
+ return nil
+ })
+ }
+ if err := group.Wait(); err != nil {
+ return nil, err
+ }
+
+ return locs, nil
+}
+
+// offsetToLocation converts an offset-based position to a protocol.Location,
+// which requires reading the file.
+func offsetToLocation(ctx context.Context, snapshot Snapshot, filename string, start, end int) (protocol.Location, error) {
+ uri := span.URIFromPath(filename)
+ fh, err := snapshot.GetFile(ctx, uri)
+ if err != nil {
+ return protocol.Location{}, err // cancelled, perhaps
+ }
+ content, err := fh.Read()
+ if err != nil {
+ return protocol.Location{}, err // nonexistent or deleted ("can't happen")
+ }
+ m := protocol.NewMapper(uri, content)
+ return m.OffsetLocation(start, end)
+}
+
+// typeDeclPosition returns the position of the declaration of the
+// type (or one of its methods) referred to at (uri, ppos).
+func typeDeclPosition(ctx context.Context, snapshot Snapshot, uri span.URI, ppos protocol.Position) (token.Position, error) {
+ var noPosn token.Position
+
+ pkg, pgf, err := PackageForFile(ctx, snapshot, uri, WidestPackage)
+ if err != nil {
+ return noPosn, err
+ }
+ pos, err := pgf.PositionPos(ppos)
+ if err != nil {
+ return noPosn, err
+ }
+
+ // This function inherits the limitation of its predecessor in
+ // requiring the selection to be an identifier (of a type or
+ // method). But there's no fundamental reason why one could
+ // not pose this query about any selected piece of syntax that
+ // has a type and thus a method set.
+ // (If LSP was more thorough about passing text selections as
+ // intervals to queries, you could ask about the method set of a
+ // subexpression such as x.f().)
+
+ // TODO(adonovan): simplify: use objectsAt?
+ path := pathEnclosingObjNode(pgf.File, pos)
+ if path == nil {
+ return noPosn, ErrNoIdentFound
+ }
+ id, ok := path[0].(*ast.Ident)
+ if !ok {
+ return noPosn, ErrNoIdentFound
+ }
+
+ // Is the object a type or method? Reject other kinds.
+ obj := pkg.GetTypesInfo().Uses[id]
+ if obj == nil {
+ // Check uses first (unlike ObjectOf) so that T in
+ // struct{T} is treated as a reference to a type,
+ // not a declaration of a field.
+ obj = pkg.GetTypesInfo().Defs[id]
+ }
+ switch obj := obj.(type) {
+ case *types.TypeName:
+ // ok
+ case *types.Func:
+ if obj.Type().(*types.Signature).Recv() == nil {
+ return noPosn, fmt.Errorf("%s is a function, not a method", id.Name)
+ }
+ case nil:
+ return noPosn, fmt.Errorf("%s denotes unknown object", id.Name)
+ default:
+ // e.g. *types.Var -> "var".
+ kind := strings.ToLower(strings.TrimPrefix(reflect.TypeOf(obj).String(), "*types."))
+ return noPosn, fmt.Errorf("%s is a %s, not a type", id.Name, kind)
+ }
+
+ declPosn := safetoken.StartPosition(pkg.FileSet(), obj.Pos())
+ return declPosn, nil
+}
+
+// localImplementations searches within pkg for declarations of all
+// types that are assignable to/from the query type, and returns a new
+// unordered array of their locations.
+//
+// If methodID is non-empty, the function instead returns the location
+// of each type's method (if any) of that ID.
+//
+// ("Local" refers to the search within the same package, but this
+// function's results may include type declarations that are local to
+// a function body. The global search index excludes such types
+// because reliably naming such types is hard.)
+func localImplementations(ctx context.Context, snapshot Snapshot, pkg Package, queryType types.Type, methodID string) ([]protocol.Location, error) {
+ queryType = methodsets.EnsurePointer(queryType)
+
+ // Scan through all type declarations in the syntax.
+ var locs []protocol.Location
+ var methodLocs []methodsets.Location
+ for _, pgf := range pkg.CompiledGoFiles() {
+ ast.Inspect(pgf.File, func(n ast.Node) bool {
+ spec, ok := n.(*ast.TypeSpec)
+ if !ok {
+ return true // not a type declaration
+ }
+ def := pkg.GetTypesInfo().Defs[spec.Name]
+ if def == nil {
+ return true // "can't happen" for types
+ }
+ if def.(*types.TypeName).IsAlias() {
+ return true // skip type aliases to avoid duplicate reporting
+ }
+ candidateType := methodsets.EnsurePointer(def.Type())
+
+ // The historical behavior enshrined by this
+ // function rejects cases where both are
+ // (nontrivial) interface types?
+ // That seems like useful information.
+ // TODO(adonovan): UX: report I/I pairs too?
+ // The same question appears in the global algorithm (methodsets).
+ if !concreteImplementsIntf(candidateType, queryType) {
+ return true // not assignable
+ }
+
+ // Ignore types with empty method sets.
+ // (No point reporting that every type satisfies 'any'.)
+ mset := types.NewMethodSet(candidateType)
+ if mset.Len() == 0 {
+ return true
+ }
+
+ if methodID == "" {
+ // Found matching type.
+ locs = append(locs, mustLocation(pgf, spec.Name))
+ return true
+ }
+
+ // Find corresponding method.
+ //
+ // We can't use LookupFieldOrMethod because it requires
+ // the methodID's types.Package, which we don't know.
+ // We could recursively search pkg.Imports for it,
+ // but it's easier to walk the method set.
+ for i := 0; i < mset.Len(); i++ {
+ method := mset.At(i).Obj()
+ if method.Id() == methodID {
+ posn := safetoken.StartPosition(pkg.FileSet(), method.Pos())
+ methodLocs = append(methodLocs, methodsets.Location{
+ Filename: posn.Filename,
+ Start: posn.Offset,
+ End: posn.Offset + len(method.Name()),
+ })
+ break
+ }
+ }
+ return true
+ })
+ }
+
+ // Finally convert method positions to protocol form by reading the files.
+ for _, mloc := range methodLocs {
+ loc, err := offsetToLocation(ctx, snapshot, mloc.Filename, mloc.Start, mloc.End)
+ if err != nil {
+ return nil, err
+ }
+ locs = append(locs, loc)
+ }
+
+ return locs, nil
+}
+
+// concreteImplementsIntf returns true if a is an interface type implemented by
+// concrete type b, or vice versa.
+func concreteImplementsIntf(a, b types.Type) bool {
+ aIsIntf, bIsIntf := types.IsInterface(a), types.IsInterface(b)
+
+ // Make sure exactly one is an interface type.
+ if aIsIntf == bIsIntf {
+ return false
+ }
+
+ // Rearrange if needed so "a" is the concrete type.
+ if aIsIntf {
+ a, b = b, a
+ }
+
+ // TODO(adonovan): this should really use GenericAssignableTo
+ // to report (e.g.) "ArrayList[T] implements List[T]", but
+ // GenericAssignableTo doesn't work correctly on pointers to
+ // generic named types. Thus the legacy implementation and the
+ // "local" part of implementation2 fail to report generics.
+ // The global algorithm based on subsets does the right thing.
+ return types.AssignableTo(a, b)
+}
+
+var (
+ // TODO(adonovan): why do various RPC handlers related to
+ // IncomingCalls return (nil, nil) on the protocol in response
+ // to this error? That seems like a violation of the protocol.
+ // Is it perhaps a workaround for VSCode behavior?
+ errNoObjectFound = errors.New("no object found")
+)
+
+// pathEnclosingObjNode returns the AST path to the object-defining
+// node associated with pos. "Object-defining" means either an
+// *ast.Ident mapped directly to a types.Object or an ast.Node mapped
+// implicitly to a types.Object.
+func pathEnclosingObjNode(f *ast.File, pos token.Pos) []ast.Node {
+ var (
+ path []ast.Node
+ found bool
+ )
+
+ ast.Inspect(f, func(n ast.Node) bool {
+ if found {
+ return false
+ }
+
+ if n == nil {
+ path = path[:len(path)-1]
+ return false
+ }
+
+ path = append(path, n)
+
+ switch n := n.(type) {
+ case *ast.Ident:
+ // Include the position directly after identifier. This handles
+ // the common case where the cursor is right after the
+ // identifier the user is currently typing. Previously we
+ // handled this by calling astutil.PathEnclosingInterval twice,
+ // once for "pos" and once for "pos-1".
+ found = n.Pos() <= pos && pos <= n.End()
+ case *ast.ImportSpec:
+ if n.Path.Pos() <= pos && pos < n.Path.End() {
+ found = true
+ // If import spec has a name, add name to path even though
+ // position isn't in the name.
+ if n.Name != nil {
+ path = append(path, n.Name)
+ }
+ }
+ case *ast.StarExpr:
+ // Follow star expressions to the inner identifier.
+ if pos == n.Star {
+ pos = n.X.Pos()
+ }
+ }
+
+ return !found
+ })
+
+ if len(path) == 0 {
+ return nil
+ }
+
+ // Reverse path so leaf is first element.
+ for i := 0; i < len(path)/2; i++ {
+ path[i], path[len(path)-1-i] = path[len(path)-1-i], path[i]
+ }
+
+ return path
+}
diff --git a/gopls/internal/lsp/source/inlay_hint.go b/gopls/internal/lsp/source/inlay_hint.go
new file mode 100644
index 000000000..671d405dc
--- /dev/null
+++ b/gopls/internal/lsp/source/inlay_hint.go
@@ -0,0 +1,394 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package source
+
+import (
+ "context"
+ "fmt"
+ "go/ast"
+ "go/constant"
+ "go/token"
+ "go/types"
+ "strings"
+
+ "golang.org/x/tools/gopls/internal/lsp/protocol"
+ "golang.org/x/tools/internal/event"
+ "golang.org/x/tools/internal/typeparams"
+)
+
+const (
+ maxLabelLength = 28
+)
+
+type InlayHintFunc func(node ast.Node, m *protocol.Mapper, tf *token.File, info *types.Info, q *types.Qualifier) []protocol.InlayHint
+
+type Hint struct {
+ Name string
+ Doc string
+ Run InlayHintFunc
+}
+
+const (
+ ParameterNames = "parameterNames"
+ AssignVariableTypes = "assignVariableTypes"
+ ConstantValues = "constantValues"
+ RangeVariableTypes = "rangeVariableTypes"
+ CompositeLiteralTypes = "compositeLiteralTypes"
+ CompositeLiteralFieldNames = "compositeLiteralFields"
+ FunctionTypeParameters = "functionTypeParameters"
+)
+
+var AllInlayHints = map[string]*Hint{
+ AssignVariableTypes: {
+ Name: AssignVariableTypes,
+ Doc: "Enable/disable inlay hints for variable types in assign statements:\n```go\n\ti/* int*/, j/* int*/ := 0, len(r)-1\n```",
+ Run: assignVariableTypes,
+ },
+ ParameterNames: {
+ Name: ParameterNames,
+ Doc: "Enable/disable inlay hints for parameter names:\n```go\n\tparseInt(/* str: */ \"123\", /* radix: */ 8)\n```",
+ Run: parameterNames,
+ },
+ ConstantValues: {
+ Name: ConstantValues,
+ Doc: "Enable/disable inlay hints for constant values:\n```go\n\tconst (\n\t\tKindNone Kind = iota/* = 0*/\n\t\tKindPrint/* = 1*/\n\t\tKindPrintf/* = 2*/\n\t\tKindErrorf/* = 3*/\n\t)\n```",
+ Run: constantValues,
+ },
+ RangeVariableTypes: {
+ Name: RangeVariableTypes,
+ Doc: "Enable/disable inlay hints for variable types in range statements:\n```go\n\tfor k/* int*/, v/* string*/ := range []string{} {\n\t\tfmt.Println(k, v)\n\t}\n```",
+ Run: rangeVariableTypes,
+ },
+ CompositeLiteralTypes: {
+ Name: CompositeLiteralTypes,
+ Doc: "Enable/disable inlay hints for composite literal types:\n```go\n\tfor _, c := range []struct {\n\t\tin, want string\n\t}{\n\t\t/*struct{ in string; want string }*/{\"Hello, world\", \"dlrow ,olleH\"},\n\t}\n```",
+ Run: compositeLiteralTypes,
+ },
+ CompositeLiteralFieldNames: {
+ Name: CompositeLiteralFieldNames,
+ Doc: "Enable/disable inlay hints for composite literal field names:\n```go\n\t{/*in: */\"Hello, world\", /*want: */\"dlrow ,olleH\"}\n```",
+ Run: compositeLiteralFields,
+ },
+ FunctionTypeParameters: {
+ Name: FunctionTypeParameters,
+ Doc: "Enable/disable inlay hints for implicit type parameters on generic functions:\n```go\n\tmyFoo/*[int, string]*/(1, \"hello\")\n```",
+ Run: funcTypeParams,
+ },
+}
+
+func InlayHint(ctx context.Context, snapshot Snapshot, fh FileHandle, pRng protocol.Range) ([]protocol.InlayHint, error) {
+ ctx, done := event.Start(ctx, "source.InlayHint")
+ defer done()
+
+ pkg, pgf, err := PackageForFile(ctx, snapshot, fh.URI(), NarrowestPackage)
+ if err != nil {
+ return nil, fmt.Errorf("getting file for InlayHint: %w", err)
+ }
+
+ // Collect a list of the inlay hints that are enabled.
+ inlayHintOptions := snapshot.View().Options().InlayHintOptions
+ var enabledHints []InlayHintFunc
+ for hint, enabled := range inlayHintOptions.Hints {
+ if !enabled {
+ continue
+ }
+ if h, ok := AllInlayHints[hint]; ok {
+ enabledHints = append(enabledHints, h.Run)
+ }
+ }
+ if len(enabledHints) == 0 {
+ return nil, nil
+ }
+
+ info := pkg.GetTypesInfo()
+ q := Qualifier(pgf.File, pkg.GetTypes(), info)
+
+ // Set the range to the full file if the range is not valid.
+ start, end := pgf.File.Pos(), pgf.File.End()
+ if pRng.Start.Line < pRng.End.Line || pRng.Start.Character < pRng.End.Character {
+ // Adjust start and end for the specified range.
+ var err error
+ start, end, err = pgf.RangePos(pRng)
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ var hints []protocol.InlayHint
+ ast.Inspect(pgf.File, func(node ast.Node) bool {
+ // If not in range, we can stop looking.
+ if node == nil || node.End() < start || node.Pos() > end {
+ return false
+ }
+ for _, fn := range enabledHints {
+ hints = append(hints, fn(node, pgf.Mapper, pgf.Tok, info, &q)...)
+ }
+ return true
+ })
+ return hints, nil
+}
+
+func parameterNames(node ast.Node, m *protocol.Mapper, tf *token.File, info *types.Info, _ *types.Qualifier) []protocol.InlayHint {
+ callExpr, ok := node.(*ast.CallExpr)
+ if !ok {
+ return nil
+ }
+ signature, ok := info.TypeOf(callExpr.Fun).(*types.Signature)
+ if !ok {
+ return nil
+ }
+
+ var hints []protocol.InlayHint
+ for i, v := range callExpr.Args {
+ start, err := m.PosPosition(tf, v.Pos())
+ if err != nil {
+ continue
+ }
+ params := signature.Params()
+ // When a function has variadic params, we skip args after
+ // params.Len().
+ if i > params.Len()-1 {
+ break
+ }
+ param := params.At(i)
+ // param.Name is empty for built-ins like append
+ if param.Name() == "" {
+ continue
+ }
+ // Skip the parameter name hint if the arg matches the
+ // the parameter name.
+ if i, ok := v.(*ast.Ident); ok && i.Name == param.Name() {
+ continue
+ }
+
+ label := param.Name()
+ if signature.Variadic() && i == params.Len()-1 {
+ label = label + "..."
+ }
+ hints = append(hints, protocol.InlayHint{
+ Position: start,
+ Label: buildLabel(label + ":"),
+ Kind: protocol.Parameter,
+ PaddingRight: true,
+ })
+ }
+ return hints
+}
+
+func funcTypeParams(node ast.Node, m *protocol.Mapper, tf *token.File, info *types.Info, _ *types.Qualifier) []protocol.InlayHint {
+ ce, ok := node.(*ast.CallExpr)
+ if !ok {
+ return nil
+ }
+ id, ok := ce.Fun.(*ast.Ident)
+ if !ok {
+ return nil
+ }
+ inst := typeparams.GetInstances(info)[id]
+ if inst.TypeArgs == nil {
+ return nil
+ }
+ start, err := m.PosPosition(tf, id.End())
+ if err != nil {
+ return nil
+ }
+ var args []string
+ for i := 0; i < inst.TypeArgs.Len(); i++ {
+ args = append(args, inst.TypeArgs.At(i).String())
+ }
+ if len(args) == 0 {
+ return nil
+ }
+ return []protocol.InlayHint{{
+ Position: start,
+ Label: buildLabel("[" + strings.Join(args, ", ") + "]"),
+ Kind: protocol.Type,
+ }}
+}
+
+func assignVariableTypes(node ast.Node, m *protocol.Mapper, tf *token.File, info *types.Info, q *types.Qualifier) []protocol.InlayHint {
+ stmt, ok := node.(*ast.AssignStmt)
+ if !ok || stmt.Tok != token.DEFINE {
+ return nil
+ }
+
+ var hints []protocol.InlayHint
+ for _, v := range stmt.Lhs {
+ if h := variableType(v, m, tf, info, q); h != nil {
+ hints = append(hints, *h)
+ }
+ }
+ return hints
+}
+
+func rangeVariableTypes(node ast.Node, m *protocol.Mapper, tf *token.File, info *types.Info, q *types.Qualifier) []protocol.InlayHint {
+ rStmt, ok := node.(*ast.RangeStmt)
+ if !ok {
+ return nil
+ }
+ var hints []protocol.InlayHint
+ if h := variableType(rStmt.Key, m, tf, info, q); h != nil {
+ hints = append(hints, *h)
+ }
+ if h := variableType(rStmt.Value, m, tf, info, q); h != nil {
+ hints = append(hints, *h)
+ }
+ return hints
+}
+
+func variableType(e ast.Expr, m *protocol.Mapper, tf *token.File, info *types.Info, q *types.Qualifier) *protocol.InlayHint {
+ typ := info.TypeOf(e)
+ if typ == nil {
+ return nil
+ }
+ end, err := m.PosPosition(tf, e.End())
+ if err != nil {
+ return nil
+ }
+ return &protocol.InlayHint{
+ Position: end,
+ Label: buildLabel(types.TypeString(typ, *q)),
+ Kind: protocol.Type,
+ PaddingLeft: true,
+ }
+}
+
+func constantValues(node ast.Node, m *protocol.Mapper, tf *token.File, info *types.Info, _ *types.Qualifier) []protocol.InlayHint {
+ genDecl, ok := node.(*ast.GenDecl)
+ if !ok || genDecl.Tok != token.CONST {
+ return nil
+ }
+
+ var hints []protocol.InlayHint
+ for _, v := range genDecl.Specs {
+ spec, ok := v.(*ast.ValueSpec)
+ if !ok {
+ continue
+ }
+ end, err := m.PosPosition(tf, v.End())
+ if err != nil {
+ continue
+ }
+ // Show hints when values are missing or at least one value is not
+ // a basic literal.
+ showHints := len(spec.Values) == 0
+ checkValues := len(spec.Names) == len(spec.Values)
+ var values []string
+ for i, w := range spec.Names {
+ obj, ok := info.ObjectOf(w).(*types.Const)
+ if !ok || obj.Val().Kind() == constant.Unknown {
+ return nil
+ }
+ if checkValues {
+ switch spec.Values[i].(type) {
+ case *ast.BadExpr:
+ return nil
+ case *ast.BasicLit:
+ default:
+ if obj.Val().Kind() != constant.Bool {
+ showHints = true
+ }
+ }
+ }
+ values = append(values, fmt.Sprintf("%v", obj.Val()))
+ }
+ if !showHints || len(values) == 0 {
+ continue
+ }
+ hints = append(hints, protocol.InlayHint{
+ Position: end,
+ Label: buildLabel("= " + strings.Join(values, ", ")),
+ PaddingLeft: true,
+ })
+ }
+ return hints
+}
+
+func compositeLiteralFields(node ast.Node, m *protocol.Mapper, tf *token.File, info *types.Info, q *types.Qualifier) []protocol.InlayHint {
+ compLit, ok := node.(*ast.CompositeLit)
+ if !ok {
+ return nil
+ }
+ typ := info.TypeOf(compLit)
+ if typ == nil {
+ return nil
+ }
+ if t, ok := typ.(*types.Pointer); ok {
+ typ = t.Elem()
+ }
+ strct, ok := typ.Underlying().(*types.Struct)
+ if !ok {
+ return nil
+ }
+
+ var hints []protocol.InlayHint
+ var allEdits []protocol.TextEdit
+ for i, v := range compLit.Elts {
+ if _, ok := v.(*ast.KeyValueExpr); !ok {
+ start, err := m.PosPosition(tf, v.Pos())
+ if err != nil {
+ continue
+ }
+ if i > strct.NumFields()-1 {
+ break
+ }
+ hints = append(hints, protocol.InlayHint{
+ Position: start,
+ Label: buildLabel(strct.Field(i).Name() + ":"),
+ Kind: protocol.Parameter,
+ PaddingRight: true,
+ })
+ allEdits = append(allEdits, protocol.TextEdit{
+ Range: protocol.Range{Start: start, End: start},
+ NewText: strct.Field(i).Name() + ": ",
+ })
+ }
+ }
+ // It is not allowed to have a mix of keyed and unkeyed fields, so
+ // have the text edits add keys to all fields.
+ for i := range hints {
+ hints[i].TextEdits = allEdits
+ }
+ return hints
+}
+
+func compositeLiteralTypes(node ast.Node, m *protocol.Mapper, tf *token.File, info *types.Info, q *types.Qualifier) []protocol.InlayHint {
+ compLit, ok := node.(*ast.CompositeLit)
+ if !ok {
+ return nil
+ }
+ typ := info.TypeOf(compLit)
+ if typ == nil {
+ return nil
+ }
+ if compLit.Type != nil {
+ return nil
+ }
+ prefix := ""
+ if t, ok := typ.(*types.Pointer); ok {
+ typ = t.Elem()
+ prefix = "&"
+ }
+ // The type for this composite literal is implicit, add an inlay hint.
+ start, err := m.PosPosition(tf, compLit.Lbrace)
+ if err != nil {
+ return nil
+ }
+ return []protocol.InlayHint{{
+ Position: start,
+ Label: buildLabel(fmt.Sprintf("%s%s", prefix, types.TypeString(typ, *q))),
+ Kind: protocol.Type,
+ }}
+}
+
+func buildLabel(s string) []protocol.InlayHintLabelPart {
+ label := protocol.InlayHintLabelPart{
+ Value: s,
+ }
+ if len(s) > maxLabelLength+len("...") {
+ label.Value = s[:maxLabelLength] + "..."
+ }
+ return []protocol.InlayHintLabelPart{label}
+}
diff --git a/gopls/internal/lsp/source/known_packages.go b/gopls/internal/lsp/source/known_packages.go
new file mode 100644
index 000000000..07b4c30a8
--- /dev/null
+++ b/gopls/internal/lsp/source/known_packages.go
@@ -0,0 +1,140 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package source
+
+import (
+ "context"
+ "fmt"
+ "go/parser"
+ "go/token"
+ "sort"
+ "strings"
+ "sync"
+ "time"
+
+ "golang.org/x/tools/internal/event"
+ "golang.org/x/tools/internal/imports"
+)
+
+// KnownPackagePaths returns a new list of package paths of all known
+// packages in the package graph that could potentially be imported by
+// the given file. The list is ordered lexicographically, except that
+// all dot-free paths (standard packages) appear before dotful ones.
+//
+// It is part of the gopls.list_known_packages command.
+func KnownPackagePaths(ctx context.Context, snapshot Snapshot, fh FileHandle) ([]PackagePath, error) {
+ // This algorithm is expressed in terms of Metadata, not Packages,
+ // so it doesn't cause or wait for type checking.
+
+ // Find a Metadata containing the file.
+ metas, err := snapshot.MetadataForFile(ctx, fh.URI())
+ if err != nil {
+ return nil, err // e.g. context cancelled
+ }
+ if len(metas) == 0 {
+ return nil, fmt.Errorf("no loaded package contain file %s", fh.URI())
+ }
+ current := metas[0] // pick one arbitrarily (they should all have the same package path)
+
+ // Parse the file's imports so we can compute which
+ // PackagePaths are imported by this specific file.
+ src, err := fh.Read()
+ if err != nil {
+ return nil, err
+ }
+ file, err := parser.ParseFile(token.NewFileSet(), fh.URI().Filename(), src, parser.ImportsOnly)
+ if err != nil {
+ return nil, err
+ }
+ imported := make(map[PackagePath]bool)
+ for _, imp := range file.Imports {
+ if id := current.DepsByImpPath[UnquoteImportPath(imp)]; id != "" {
+ if m := snapshot.Metadata(id); m != nil {
+ imported[m.PkgPath] = true
+ }
+ }
+ }
+
+ // Now find candidates among known packages.
+ knownPkgs, err := snapshot.AllMetadata(ctx)
+ if err != nil {
+ return nil, err
+ }
+ seen := make(map[PackagePath]bool)
+ for _, knownPkg := range knownPkgs {
+ // package main cannot be imported
+ if knownPkg.Name == "main" {
+ continue
+ }
+ // test packages cannot be imported
+ if knownPkg.ForTest != "" {
+ continue
+ }
+ // No need to import what the file already imports.
+ // This check is based on PackagePath, not PackageID,
+ // so that all test variants are filtered out too.
+ if imported[knownPkg.PkgPath] {
+ continue
+ }
+ // make sure internal packages are importable by the file
+ if !IsValidImport(current.PkgPath, knownPkg.PkgPath) {
+ continue
+ }
+ // naive check on cyclical imports
+ if isDirectlyCyclical(current, knownPkg) {
+ continue
+ }
+ // AllMetadata may have multiple variants of a pkg.
+ seen[knownPkg.PkgPath] = true
+ }
+
+ // Augment the set by invoking the goimports algorithm.
+ if err := snapshot.RunProcessEnvFunc(ctx, func(o *imports.Options) error {
+ ctx, cancel := context.WithTimeout(ctx, time.Millisecond*80)
+ defer cancel()
+ var seenMu sync.Mutex
+ wrapped := func(ifix imports.ImportFix) {
+ seenMu.Lock()
+ defer seenMu.Unlock()
+ // TODO(adonovan): what if the actual package path has a vendor/ prefix?
+ seen[PackagePath(ifix.StmtInfo.ImportPath)] = true
+ }
+ return imports.GetAllCandidates(ctx, wrapped, "", fh.URI().Filename(), string(current.Name), o.Env)
+ }); err != nil {
+ // If goimports failed, proceed with just the candidates from the metadata.
+ event.Error(ctx, "imports.GetAllCandidates", err)
+ }
+
+ // Sort lexicographically, but with std before non-std packages.
+ paths := make([]PackagePath, 0, len(seen))
+ for path := range seen {
+ paths = append(paths, path)
+ }
+ sort.Slice(paths, func(i, j int) bool {
+ importI, importJ := paths[i], paths[j]
+ iHasDot := strings.Contains(string(importI), ".")
+ jHasDot := strings.Contains(string(importJ), ".")
+ if iHasDot != jHasDot {
+ return jHasDot // dot-free paths (standard packages) compare less
+ }
+ return importI < importJ
+ })
+
+ return paths, nil
+}
+
+// isDirectlyCyclical checks if imported directly imports pkg.
+// It does not (yet) offer a full cyclical check because showing a user
+// a list of importable packages already generates a very large list
+// and having a few false positives in there could be worth the
+// performance snappiness.
+//
+// TODO(adonovan): ensure that metadata graph is always cyclic!
+// Many algorithms will get confused or even stuck in the
+// presence of cycles. Then replace this function by 'false'.
+func isDirectlyCyclical(pkg, imported *Metadata) bool {
+ _, ok := imported.DepsByPkgPath[pkg.PkgPath]
+ return ok
+}
diff --git a/gopls/internal/lsp/source/linkname.go b/gopls/internal/lsp/source/linkname.go
new file mode 100644
index 000000000..c8afcdf2d
--- /dev/null
+++ b/gopls/internal/lsp/source/linkname.go
@@ -0,0 +1,136 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package source
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "go/token"
+ "strings"
+
+ "golang.org/x/tools/gopls/internal/lsp/protocol"
+ "golang.org/x/tools/gopls/internal/lsp/safetoken"
+ "golang.org/x/tools/gopls/internal/span"
+)
+
+// ErrNoLinkname is returned by LinknameDefinition when no linkname
+// directive is found at a particular position.
+// As such it indicates that other definitions could be worth checking.
+var ErrNoLinkname = errors.New("no linkname directive found")
+
+// LinknameDefinition finds the definition of the linkname directive in fh at pos.
+// If there is no linkname directive at pos, returns ErrNoLinkname.
+func LinknameDefinition(ctx context.Context, snapshot Snapshot, fh FileHandle, pos protocol.Position) ([]protocol.Location, error) {
+ pkgPath, name := parseLinkname(ctx, snapshot, fh, pos)
+ if pkgPath == "" {
+ return nil, ErrNoLinkname
+ }
+ return findLinkname(ctx, snapshot, fh, pos, PackagePath(pkgPath), name)
+}
+
+// parseLinkname attempts to parse a go:linkname declaration at the given pos.
+// If successful, it returns the package path and object name referenced by the second
+// argument of the linkname directive.
+//
+// If the position is not in the second argument of a go:linkname directive, or parsing fails, it returns "", "".
+func parseLinkname(ctx context.Context, snapshot Snapshot, fh FileHandle, pos protocol.Position) (pkgPath, name string) {
+ pgf, err := snapshot.ParseGo(ctx, fh, ParseFull)
+ if err != nil {
+ return "", ""
+ }
+
+ span, err := pgf.Mapper.PositionPoint(pos)
+ if err != nil {
+ return "", ""
+ }
+ atLine := span.Line()
+ atColumn := span.Column()
+
+ // Looking for pkgpath in '//go:linkname f pkgpath.g'.
+ // (We ignore 1-arg linkname directives.)
+ directive, column := findLinknameOnLine(pgf, atLine)
+ parts := strings.Fields(directive)
+ if len(parts) != 3 {
+ return "", ""
+ }
+
+ // Inside 2nd arg [start, end]?
+ end := column + len(directive)
+ start := end - len(parts[2])
+ if !(start <= atColumn && atColumn <= end) {
+ return "", ""
+ }
+ linkname := parts[2]
+
+ // Split the pkg path from the name.
+ dot := strings.LastIndexByte(linkname, '.')
+ if dot < 0 {
+ return "", ""
+ }
+ return linkname[:dot], linkname[dot+1:]
+}
+
+// findLinknameOnLine returns the first linkname directive on line and the column it starts at.
+// Returns "", 0 if no linkname directive is found on the line.
+func findLinknameOnLine(pgf *ParsedGoFile, line int) (string, int) {
+ for _, grp := range pgf.File.Comments {
+ for _, com := range grp.List {
+ if strings.HasPrefix(com.Text, "//go:linkname") {
+ p := safetoken.Position(pgf.Tok, com.Pos())
+ if p.Line == line {
+ return com.Text, p.Column
+ }
+ }
+ }
+ }
+ return "", 0
+}
+
+// findLinkname searches dependencies of packages containing fh for an object
+// with linker name matching the given package path and name.
+func findLinkname(ctx context.Context, snapshot Snapshot, fh FileHandle, pos protocol.Position, pkgPath PackagePath, name string) ([]protocol.Location, error) {
+ // Typically the linkname refers to a forward dependency
+ // or a reverse dependency, but in general it may refer
+ // to any package in the workspace.
+ var pkgMeta *Metadata
+ metas, err := snapshot.AllMetadata(ctx)
+ if err != nil {
+ return nil, err
+ }
+ metas = RemoveIntermediateTestVariants(metas)
+ for _, meta := range metas {
+ if meta.PkgPath == pkgPath {
+ pkgMeta = meta
+ break
+ }
+ }
+ if pkgMeta == nil {
+ return nil, fmt.Errorf("cannot find package %q", pkgPath)
+ }
+
+ // When found, type check the desired package (snapshot.TypeCheck in TypecheckFull mode),
+ pkgs, err := snapshot.TypeCheck(ctx, pkgMeta.ID)
+ if err != nil {
+ return nil, err
+ }
+ pkg := pkgs[0]
+
+ obj := pkg.GetTypes().Scope().Lookup(name)
+ if obj == nil {
+ return nil, fmt.Errorf("package %q does not define %s", pkgPath, name)
+ }
+
+ objURI := safetoken.StartPosition(pkg.FileSet(), obj.Pos())
+ pgf, err := pkg.File(span.URIFromPath(objURI.Filename))
+ if err != nil {
+ return nil, err
+ }
+ loc, err := pgf.PosLocation(obj.Pos(), obj.Pos()+token.Pos(len(name)))
+ if err != nil {
+ return nil, err
+ }
+ return []protocol.Location{loc}, nil
+}
diff --git a/gopls/internal/lsp/source/methodsets/methodsets.go b/gopls/internal/lsp/source/methodsets/methodsets.go
new file mode 100644
index 000000000..af836a5a4
--- /dev/null
+++ b/gopls/internal/lsp/source/methodsets/methodsets.go
@@ -0,0 +1,508 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package methodsets defines an incremental, serializable index of
+// method-set information that allows efficient 'implements' queries
+// across packages of the workspace without using the type checker.
+//
+// This package provides only the "global" (all workspace) search; the
+// "local" search within a given package uses a different
+// implementation based on type-checker data structures for a single
+// package plus variants; see ../implementation2.go.
+// The local algorithm is more precise as it tests function-local types too.
+//
+// A global index of function-local types is challenging since they
+// may reference other local types, for which we would need to invent
+// stable names, an unsolved problem described in passing in Go issue
+// 57497. The global algorithm also does not index anonymous interface
+// types, even outside function bodies.
+//
+// Consequently, global results are not symmetric: applying the
+// operation twice may not get you back where you started.
+package methodsets
+
+// DESIGN
+//
+// See https://go.dev/cl/452060 for a minimal exposition of the algorithm.
+//
+// For each method, we compute a fingerprint: a string representing
+// the method name and type such that equal fingerprint strings mean
+// identical method types.
+//
+// For efficiency, the fingerprint is reduced to a single bit
+// of a uint64, so that the method set can be represented as
+// the union of those method bits (a uint64 bitmask).
+// Assignability thus reduces to a subset check on bitmasks
+// followed by equality checks on fingerprints.
+//
+// In earlier experiments, using 128-bit masks instead of 64 reduced
+// the number of candidates by about 2x. Using (like a Bloom filter) a
+// different hash function to compute a second 64-bit mask and
+// performing a second mask test reduced it by about 4x.
+// Neither had much effect on the running time, presumably because a
+// single 64-bit mask is quite effective. See CL 452060 for details.
+
+import (
+ "bytes"
+ "encoding/gob"
+ "fmt"
+ "go/token"
+ "go/types"
+ "hash/crc32"
+ "log"
+ "strconv"
+ "strings"
+
+ "golang.org/x/tools/go/types/objectpath"
+ "golang.org/x/tools/gopls/internal/lsp/safetoken"
+ "golang.org/x/tools/internal/typeparams"
+ "golang.org/x/tools/internal/typesinternal"
+)
+
+// An Index records the non-empty method sets of all package-level
+// types in a package in a form that permits assignability queries
+// without the type checker.
+type Index struct {
+ pkg gobPackage
+}
+
+// Decode decodes the given gob-encoded data as an Index.
+func Decode(data []byte) *Index {
+ var pkg gobPackage
+ mustDecode(data, &pkg)
+ return &Index{pkg}
+}
+
+// Encode encodes the receiver as gob-encoded data.
+func (index *Index) Encode() []byte {
+ return mustEncode(index.pkg)
+}
+
+func mustEncode(x interface{}) []byte {
+ var buf bytes.Buffer
+ if err := gob.NewEncoder(&buf).Encode(x); err != nil {
+ log.Fatalf("internal error encoding %T: %v", x, err)
+ }
+ return buf.Bytes()
+}
+
+func mustDecode(data []byte, ptr interface{}) {
+ if err := gob.NewDecoder(bytes.NewReader(data)).Decode(ptr); err != nil {
+ log.Fatalf("internal error decoding %T: %v", ptr, err)
+ }
+}
+
+// NewIndex returns a new index of method-set information for all
+// package-level types in the specified package.
+func NewIndex(fset *token.FileSet, pkg *types.Package) *Index {
+ return new(indexBuilder).build(fset, pkg)
+}
+
+// A Location records the extent of an identifier in byte-offset form.
+//
+// Conversion to protocol (UTF-16) form is done by the caller after a
+// search, not during index construction.
+type Location struct {
+ Filename string
+ Start, End int // byte offsets
+}
+
+// A Key represents the method set of a given type in a form suitable
+// to pass to the (*Index).Search method of many different Indexes.
+type Key struct {
+ mset gobMethodSet // note: lacks position information
+}
+
+// KeyOf returns the search key for the method sets of a given type.
+// It returns false if the type has no methods.
+func KeyOf(t types.Type) (Key, bool) {
+ mset := methodSetInfo(t, nil)
+ if mset.Mask == 0 {
+ return Key{}, false // no methods
+ }
+ return Key{mset}, true
+}
+
+// A Result reports a matching type or method in a method-set search.
+type Result struct {
+ Location Location // location of the type or method
+
+ // methods only:
+ PkgPath string // path of declaring package (may differ due to embedding)
+ ObjectPath objectpath.Path // path of method within declaring package
+}
+
+// Search reports each type that implements (or is implemented by) the
+// type that produced the search key. If methodID is nonempty, only
+// that method of each type is reported.
+//
+// The result does not include the error.Error method.
+// TODO(adonovan): give this special case a more systematic treatment.
+func (index *Index) Search(key Key, methodID string) []Result {
+ var results []Result
+ for _, candidate := range index.pkg.MethodSets {
+ // Traditionally this feature doesn't report
+ // interface/interface elements of the relation.
+ // I think that's a mistake.
+ // TODO(adonovan): UX: change it, here and in the local implementation.
+ if candidate.IsInterface && key.mset.IsInterface {
+ continue
+ }
+ if !satisfies(candidate, key.mset) && !satisfies(key.mset, candidate) {
+ continue
+ }
+
+ if candidate.Tricky {
+ // If any interface method is tricky then extra
+ // checking may be needed to eliminate a false positive.
+ // TODO(adonovan): implement it.
+ }
+
+ if methodID == "" {
+ results = append(results, Result{Location: index.location(candidate.Posn)})
+ } else {
+ for _, m := range candidate.Methods {
+ // Here we exploit knowledge of the shape of the fingerprint string.
+ if strings.HasPrefix(m.Fingerprint, methodID) &&
+ m.Fingerprint[len(methodID)] == '(' {
+
+ // Don't report error.Error among the results:
+ // it has no true source location, no package,
+ // and is excluded from the xrefs index.
+ if m.PkgPath == 0 || m.ObjectPath == 0 {
+ if methodID != "Error" {
+ panic("missing info for" + methodID)
+ }
+ continue
+ }
+
+ results = append(results, Result{
+ Location: index.location(m.Posn),
+ PkgPath: index.pkg.Strings[m.PkgPath],
+ ObjectPath: objectpath.Path(index.pkg.Strings[m.ObjectPath]),
+ })
+ break
+ }
+ }
+ }
+ }
+ return results
+}
+
+// satisfies does a fast check for whether x satisfies y.
+func satisfies(x, y gobMethodSet) bool {
+ return y.IsInterface && x.Mask&y.Mask == y.Mask && subset(y, x)
+}
+
+// subset reports whether method set x is a subset of y.
+func subset(x, y gobMethodSet) bool {
+outer:
+ for _, mx := range x.Methods {
+ for _, my := range y.Methods {
+ if mx.Sum == my.Sum && mx.Fingerprint == my.Fingerprint {
+ continue outer // found; try next x method
+ }
+ }
+ return false // method of x not found in y
+ }
+ return true // all methods of x found in y
+}
+
+func (index *Index) location(posn gobPosition) Location {
+ return Location{
+ Filename: index.pkg.Strings[posn.File],
+ Start: posn.Offset,
+ End: posn.Offset + posn.Len,
+ }
+}
+
+// An indexBuilder builds an index for a single package.
+type indexBuilder struct {
+ gobPackage
+ stringIndex map[string]int
+}
+
+// build adds to the index all package-level named types of the specified package.
+func (b *indexBuilder) build(fset *token.FileSet, pkg *types.Package) *Index {
+ _ = b.string("") // 0 => ""
+
+ objectPos := func(obj types.Object) gobPosition {
+ posn := safetoken.StartPosition(fset, obj.Pos())
+ return gobPosition{b.string(posn.Filename), posn.Offset, len(obj.Name())}
+ }
+
+ objectpathFor := typesinternal.NewObjectpathFunc()
+
+ // setindexInfo sets the (Posn, PkgPath, ObjectPath) fields for each method declaration.
+ setIndexInfo := func(m *gobMethod, method *types.Func) {
+ // error.Error has empty Position, PkgPath, and ObjectPath.
+ if method.Pkg() == nil {
+ return
+ }
+
+ m.Posn = objectPos(method)
+ m.PkgPath = b.string(method.Pkg().Path())
+
+ // Instantiations of generic methods don't have an
+ // object path, so we use the generic.
+ if p, err := objectpathFor(typeparams.OriginMethod(method)); err != nil {
+ panic(err) // can't happen for a method of a package-level type
+ } else {
+ m.ObjectPath = b.string(string(p))
+ }
+ }
+
+ // We ignore aliases, though in principle they could define a
+ // struct{...} or interface{...} type, or an instantiation of
+ // a generic, that has a novel method set.
+ scope := pkg.Scope()
+ for _, name := range scope.Names() {
+ if tname, ok := scope.Lookup(name).(*types.TypeName); ok && !tname.IsAlias() {
+ if mset := methodSetInfo(tname.Type(), setIndexInfo); mset.Mask != 0 {
+ mset.Posn = objectPos(tname)
+ // Only record types with non-trivial method sets.
+ b.MethodSets = append(b.MethodSets, mset)
+ }
+ }
+ }
+
+ return &Index{pkg: b.gobPackage}
+}
+
+// string returns a small integer that encodes the string.
+func (b *indexBuilder) string(s string) int {
+ i, ok := b.stringIndex[s]
+ if !ok {
+ i = len(b.Strings)
+ if b.stringIndex == nil {
+ b.stringIndex = make(map[string]int)
+ }
+ b.stringIndex[s] = i
+ b.Strings = append(b.Strings, s)
+ }
+ return i
+}
+
+// methodSetInfo returns the method-set fingerprint of a type.
+// It calls the optional setIndexInfo function for each gobMethod.
+// This is used during index construction, but not search (KeyOf),
+// to store extra information.
+func methodSetInfo(t types.Type, setIndexInfo func(*gobMethod, *types.Func)) gobMethodSet {
+ // For non-interface types, use *T
+ // (if T is not already a pointer)
+ // since it may have more methods.
+ mset := types.NewMethodSet(EnsurePointer(t))
+
+ // Convert the method set into a compact summary.
+ var mask uint64
+ tricky := false
+ methods := make([]gobMethod, mset.Len())
+ for i := 0; i < mset.Len(); i++ {
+ m := mset.At(i).Obj().(*types.Func)
+ fp, isTricky := fingerprint(m)
+ if isTricky {
+ tricky = true
+ }
+ sum := crc32.ChecksumIEEE([]byte(fp))
+ methods[i] = gobMethod{Fingerprint: fp, Sum: sum}
+ if setIndexInfo != nil {
+ setIndexInfo(&methods[i], m) // set Position, PkgPath, ObjectPath
+ }
+ mask |= 1 << uint64(((sum>>24)^(sum>>16)^(sum>>8)^sum)&0x3f)
+ }
+ return gobMethodSet{
+ IsInterface: types.IsInterface(t),
+ Tricky: tricky,
+ Mask: mask,
+ Methods: methods,
+ }
+}
+
+// EnsurePointer wraps T in a types.Pointer if T is a named, non-interface type.
+// This is useful to make sure you consider a named type's full method set.
+func EnsurePointer(T types.Type) types.Type {
+ if _, ok := T.(*types.Named); ok && !types.IsInterface(T) {
+ return types.NewPointer(T)
+ }
+
+ return T
+}
+
+// fingerprint returns an encoding of a method signature such that two
+// methods with equal encodings have identical types, except for a few
+// tricky types whose encodings may spuriously match and whose exact
+// identity computation requires the type checker to eliminate false
+// positives (which are rare). The boolean result indicates whether
+// the result was one of these tricky types.
+//
+// In the standard library, 99.8% of package-level types have a
+// non-tricky method-set. The most common exceptions are due to type
+// parameters.
+//
+// The fingerprint string starts with method.Id() + "(".
+func fingerprint(method *types.Func) (string, bool) {
+ var buf strings.Builder
+ tricky := false
+ var fprint func(t types.Type)
+ fprint = func(t types.Type) {
+ switch t := t.(type) {
+ case *types.Named:
+ tname := t.Obj()
+ if tname.Pkg() != nil {
+ buf.WriteString(strconv.Quote(tname.Pkg().Path()))
+ buf.WriteByte('.')
+ } else if tname.Name() != "error" {
+ panic(tname) // error is the only named type with no package
+ }
+ buf.WriteString(tname.Name())
+
+ case *types.Array:
+ fmt.Fprintf(&buf, "[%d]", t.Len())
+ fprint(t.Elem())
+
+ case *types.Slice:
+ buf.WriteString("[]")
+ fprint(t.Elem())
+
+ case *types.Pointer:
+ buf.WriteByte('*')
+ fprint(t.Elem())
+
+ case *types.Map:
+ buf.WriteString("map[")
+ fprint(t.Key())
+ buf.WriteByte(']')
+ fprint(t.Elem())
+
+ case *types.Chan:
+ switch t.Dir() {
+ case types.SendRecv:
+ buf.WriteString("chan ")
+ case types.SendOnly:
+ buf.WriteString("<-chan ")
+ case types.RecvOnly:
+ buf.WriteString("chan<- ")
+ }
+ fprint(t.Elem())
+
+ case *types.Tuple:
+ buf.WriteByte('(')
+ for i := 0; i < t.Len(); i++ {
+ if i > 0 {
+ buf.WriteByte(',')
+ }
+ fprint(t.At(i).Type())
+ }
+ buf.WriteByte(')')
+
+ case *types.Basic:
+ // Use canonical names for uint8 and int32 aliases.
+ switch t.Kind() {
+ case types.Byte:
+ buf.WriteString("byte")
+ case types.Rune:
+ buf.WriteString("rune")
+ default:
+ buf.WriteString(t.String())
+ }
+
+ case *types.Signature:
+ buf.WriteString("func")
+ fprint(t.Params())
+ if t.Variadic() {
+ buf.WriteString("...") // not quite Go syntax
+ }
+ fprint(t.Results())
+
+ case *types.Struct:
+ // Non-empty unnamed struct types in method
+ // signatures are vanishingly rare.
+ buf.WriteString("struct{")
+ for i := 0; i < t.NumFields(); i++ {
+ if i > 0 {
+ buf.WriteByte(';')
+ }
+ f := t.Field(i)
+ // This isn't quite right for embedded type aliases.
+ // (See types.TypeString(StructType) and #44410 for context.)
+ // But this is vanishingly rare.
+ if !f.Embedded() {
+ buf.WriteString(f.Id())
+ buf.WriteByte(' ')
+ }
+ fprint(f.Type())
+ if tag := t.Tag(i); tag != "" {
+ buf.WriteByte(' ')
+ buf.WriteString(strconv.Quote(tag))
+ }
+ }
+ buf.WriteString("}")
+
+ case *types.Interface:
+ if t.NumMethods() == 0 {
+ buf.WriteString("any") // common case
+ } else {
+ // Interface assignability is particularly
+ // tricky due to the possibility of recursion.
+ tricky = true
+ // We could still give more disambiguating precision
+ // than "..." if we wanted to.
+ buf.WriteString("interface{...}")
+ }
+
+ case *typeparams.TypeParam:
+ tricky = true
+ // TODO(adonovan): refine this by adding a numeric suffix
+ // indicating the index among the receiver type's parameters.
+ buf.WriteByte('?')
+
+ default: // incl. *types.Union
+ panic(t)
+ }
+ }
+
+ buf.WriteString(method.Id()) // e.g. "pkg.Type"
+ sig := method.Type().(*types.Signature)
+ fprint(sig.Params())
+ fprint(sig.Results())
+ return buf.String(), tricky
+}
+
+// -- serial format of index --
+
+// The cost of gob encoding and decoding for most packages in x/tools
+// is under 50us, with occasional peaks of around 1-3ms.
+// The encoded indexes are around 1KB-50KB.
+
+// A gobPackage records the method set of each package-level type for a single package.
+type gobPackage struct {
+ Strings []string // index of strings used by gobPosition.File, gobMethod.{Pkg,Object}Path
+ MethodSets []gobMethodSet
+}
+
+// A gobMethodSet records the method set of a single type.
+type gobMethodSet struct {
+ Posn gobPosition
+ IsInterface bool
+ Tricky bool // at least one method is tricky; assignability requires go/types
+ Mask uint64 // mask with 1 bit from each of methods[*].sum
+ Methods []gobMethod
+}
+
+// A gobMethod records the name, type, and position of a single method.
+type gobMethod struct {
+ Fingerprint string // string of form "methodID(params...)(results)"
+ Sum uint32 // checksum of fingerprint
+
+ // index records only (zero in KeyOf; also for index of error.Error).
+ Posn gobPosition // location of method declaration
+ PkgPath int // path of package containing method declaration
+ ObjectPath int // object path of method relative to PkgPath
+}
+
+// A gobPosition records the file, offset, and length of an identifier.
+type gobPosition struct {
+ File int // index into gobPackage.Strings
+ Offset, Len int // in bytes
+}
diff --git a/gopls/internal/lsp/source/options.go b/gopls/internal/lsp/source/options.go
new file mode 100644
index 000000000..a4ae51a47
--- /dev/null
+++ b/gopls/internal/lsp/source/options.go
@@ -0,0 +1,1631 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package source
+
+import (
+ "context"
+ "fmt"
+ "io"
+ "path/filepath"
+ "regexp"
+ "runtime"
+ "strings"
+ "sync"
+ "time"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/analysis/passes/asmdecl"
+ "golang.org/x/tools/go/analysis/passes/assign"
+ "golang.org/x/tools/go/analysis/passes/atomic"
+ "golang.org/x/tools/go/analysis/passes/atomicalign"
+ "golang.org/x/tools/go/analysis/passes/bools"
+ "golang.org/x/tools/go/analysis/passes/buildtag"
+ "golang.org/x/tools/go/analysis/passes/cgocall"
+ "golang.org/x/tools/go/analysis/passes/composite"
+ "golang.org/x/tools/go/analysis/passes/copylock"
+ "golang.org/x/tools/go/analysis/passes/deepequalerrors"
+ "golang.org/x/tools/go/analysis/passes/directive"
+ "golang.org/x/tools/go/analysis/passes/errorsas"
+ "golang.org/x/tools/go/analysis/passes/fieldalignment"
+ "golang.org/x/tools/go/analysis/passes/httpresponse"
+ "golang.org/x/tools/go/analysis/passes/ifaceassert"
+ "golang.org/x/tools/go/analysis/passes/loopclosure"
+ "golang.org/x/tools/go/analysis/passes/lostcancel"
+ "golang.org/x/tools/go/analysis/passes/nilfunc"
+ "golang.org/x/tools/go/analysis/passes/nilness"
+ "golang.org/x/tools/go/analysis/passes/printf"
+ "golang.org/x/tools/go/analysis/passes/shadow"
+ "golang.org/x/tools/go/analysis/passes/shift"
+ "golang.org/x/tools/go/analysis/passes/sortslice"
+ "golang.org/x/tools/go/analysis/passes/stdmethods"
+ "golang.org/x/tools/go/analysis/passes/stringintconv"
+ "golang.org/x/tools/go/analysis/passes/structtag"
+ "golang.org/x/tools/go/analysis/passes/testinggoroutine"
+ "golang.org/x/tools/go/analysis/passes/tests"
+ "golang.org/x/tools/go/analysis/passes/timeformat"
+ "golang.org/x/tools/go/analysis/passes/unmarshal"
+ "golang.org/x/tools/go/analysis/passes/unreachable"
+ "golang.org/x/tools/go/analysis/passes/unsafeptr"
+ "golang.org/x/tools/go/analysis/passes/unusedresult"
+ "golang.org/x/tools/go/analysis/passes/unusedwrite"
+ "golang.org/x/tools/gopls/internal/lsp/analysis/embeddirective"
+ "golang.org/x/tools/gopls/internal/lsp/analysis/fillreturns"
+ "golang.org/x/tools/gopls/internal/lsp/analysis/fillstruct"
+ "golang.org/x/tools/gopls/internal/lsp/analysis/infertypeargs"
+ "golang.org/x/tools/gopls/internal/lsp/analysis/nonewvars"
+ "golang.org/x/tools/gopls/internal/lsp/analysis/noresultvalues"
+ "golang.org/x/tools/gopls/internal/lsp/analysis/simplifycompositelit"
+ "golang.org/x/tools/gopls/internal/lsp/analysis/simplifyrange"
+ "golang.org/x/tools/gopls/internal/lsp/analysis/simplifyslice"
+ "golang.org/x/tools/gopls/internal/lsp/analysis/stubmethods"
+ "golang.org/x/tools/gopls/internal/lsp/analysis/undeclaredname"
+ "golang.org/x/tools/gopls/internal/lsp/analysis/unusedparams"
+ "golang.org/x/tools/gopls/internal/lsp/analysis/unusedvariable"
+ "golang.org/x/tools/gopls/internal/lsp/analysis/useany"
+ "golang.org/x/tools/gopls/internal/lsp/command"
+ "golang.org/x/tools/gopls/internal/lsp/protocol"
+ "golang.org/x/tools/internal/diff"
+ "golang.org/x/tools/internal/diff/myers"
+)
+
+var (
+ optionsOnce sync.Once
+ defaultOptions *Options
+)
+
+// DefaultOptions is the options that are used for Gopls execution independent
+// of any externally provided configuration (LSP initialization, command
+// invocation, etc.).
+func DefaultOptions() *Options {
+ optionsOnce.Do(func() {
+ var commands []string
+ for _, c := range command.Commands {
+ commands = append(commands, c.ID())
+ }
+ defaultOptions = &Options{
+ ClientOptions: ClientOptions{
+ InsertTextFormat: protocol.PlainTextTextFormat,
+ PreferredContentFormat: protocol.Markdown,
+ ConfigurationSupported: true,
+ DynamicConfigurationSupported: true,
+ DynamicRegistrationSemanticTokensSupported: true,
+ DynamicWatchedFilesSupported: true,
+ LineFoldingOnly: false,
+ HierarchicalDocumentSymbolSupport: true,
+ },
+ ServerOptions: ServerOptions{
+ SupportedCodeActions: map[FileKind]map[protocol.CodeActionKind]bool{
+ Go: {
+ protocol.SourceFixAll: true,
+ protocol.SourceOrganizeImports: true,
+ protocol.QuickFix: true,
+ protocol.RefactorRewrite: true,
+ protocol.RefactorExtract: true,
+ },
+ Mod: {
+ protocol.SourceOrganizeImports: true,
+ protocol.QuickFix: true,
+ },
+ Work: {},
+ Sum: {},
+ Tmpl: {},
+ },
+ SupportedCommands: commands,
+ },
+ UserOptions: UserOptions{
+ BuildOptions: BuildOptions{
+ ExpandWorkspaceToModule: true,
+ MemoryMode: ModeNormal,
+ DirectoryFilters: []string{"-**/node_modules"},
+ TemplateExtensions: []string{},
+ StandaloneTags: []string{"ignore"},
+ },
+ UIOptions: UIOptions{
+ DiagnosticOptions: DiagnosticOptions{
+ DiagnosticsDelay: 250 * time.Millisecond,
+ Annotations: map[Annotation]bool{
+ Bounds: true,
+ Escape: true,
+ Inline: true,
+ Nil: true,
+ },
+ Vulncheck: ModeVulncheckOff,
+ },
+ InlayHintOptions: InlayHintOptions{},
+ DocumentationOptions: DocumentationOptions{
+ HoverKind: FullDocumentation,
+ LinkTarget: "pkg.go.dev",
+ LinksInHover: true,
+ },
+ NavigationOptions: NavigationOptions{
+ ImportShortcut: BothShortcuts,
+ SymbolMatcher: SymbolFastFuzzy,
+ SymbolStyle: DynamicSymbols,
+ },
+ CompletionOptions: CompletionOptions{
+ Matcher: Fuzzy,
+ CompletionBudget: 100 * time.Millisecond,
+ ExperimentalPostfixCompletions: true,
+ },
+ Codelenses: map[string]bool{
+ string(command.Generate): true,
+ string(command.RegenerateCgo): true,
+ string(command.Tidy): true,
+ string(command.GCDetails): false,
+ string(command.UpgradeDependency): true,
+ string(command.Vendor): true,
+ // TODO(hyangah): enable command.RunGovulncheck.
+ },
+ },
+ },
+ InternalOptions: InternalOptions{
+ LiteralCompletions: true,
+ TempModfile: true,
+ CompleteUnimported: true,
+ CompletionDocumentation: true,
+ DeepCompletion: true,
+ ChattyDiagnostics: true,
+ NewDiff: "both",
+ },
+ Hooks: Hooks{
+ // TODO(adonovan): switch to new diff.Strings implementation.
+ ComputeEdits: myers.ComputeEdits,
+ URLRegexp: urlRegexp(),
+ DefaultAnalyzers: defaultAnalyzers(),
+ TypeErrorAnalyzers: typeErrorAnalyzers(),
+ ConvenienceAnalyzers: convenienceAnalyzers(),
+ StaticcheckAnalyzers: map[string]*Analyzer{},
+ GoDiff: true,
+ },
+ }
+ })
+ return defaultOptions
+}
+
+// Options holds various configuration that affects Gopls execution, organized
+// by the nature or origin of the settings.
+type Options struct {
+ ClientOptions
+ ServerOptions
+ UserOptions
+ InternalOptions
+ Hooks
+}
+
+// ClientOptions holds LSP-specific configuration that is provided by the
+// client.
+type ClientOptions struct {
+ InsertTextFormat protocol.InsertTextFormat
+ ConfigurationSupported bool
+ DynamicConfigurationSupported bool
+ DynamicRegistrationSemanticTokensSupported bool
+ DynamicWatchedFilesSupported bool
+ PreferredContentFormat protocol.MarkupKind
+ LineFoldingOnly bool
+ HierarchicalDocumentSymbolSupport bool
+ SemanticTypes []string
+ SemanticMods []string
+ RelatedInformationSupported bool
+ CompletionTags bool
+ CompletionDeprecated bool
+ SupportedResourceOperations []protocol.ResourceOperationKind
+}
+
+// ServerOptions holds LSP-specific configuration that is provided by the
+// server.
+type ServerOptions struct {
+ SupportedCodeActions map[FileKind]map[protocol.CodeActionKind]bool
+ SupportedCommands []string
+}
+
+type BuildOptions struct {
+ // BuildFlags is the set of flags passed on to the build system when invoked.
+ // It is applied to queries like `go list`, which is used when discovering files.
+ // The most common use is to set `-tags`.
+ BuildFlags []string
+
+ // Env adds environment variables to external commands run by `gopls`, most notably `go list`.
+ Env map[string]string
+
+ // DirectoryFilters can be used to exclude unwanted directories from the
+ // workspace. By default, all directories are included. Filters are an
+ // operator, `+` to include and `-` to exclude, followed by a path prefix
+ // relative to the workspace folder. They are evaluated in order, and
+ // the last filter that applies to a path controls whether it is included.
+ // The path prefix can be empty, so an initial `-` excludes everything.
+ //
+ // DirectoryFilters also supports the `**` operator to match 0 or more directories.
+ //
+ // Examples:
+ //
+ // Exclude node_modules at current depth: `-node_modules`
+ //
+ // Exclude node_modules at any depth: `-**/node_modules`
+ //
+ // Include only project_a: `-` (exclude everything), `+project_a`
+ //
+ // Include only project_a, but not node_modules inside it: `-`, `+project_a`, `-project_a/node_modules`
+ DirectoryFilters []string
+
+ // TemplateExtensions gives the extensions of file names that are treateed
+ // as template files. (The extension
+ // is the part of the file name after the final dot.)
+ TemplateExtensions []string
+
+ // MemoryMode controls the tradeoff `gopls` makes between memory usage and
+ // correctness.
+ //
+ // Values other than `Normal` are untested and may break in surprising ways.
+ MemoryMode MemoryMode `status:"experimental"`
+
+ // ExpandWorkspaceToModule instructs `gopls` to adjust the scope of the
+ // workspace to find the best available module root. `gopls` first looks for
+ // a go.mod file in any parent directory of the workspace folder, expanding
+ // the scope to that directory if it exists. If no viable parent directory is
+ // found, gopls will check if there is exactly one child directory containing
+ // a go.mod file, narrowing the scope to that directory if it exists.
+ ExpandWorkspaceToModule bool `status:"experimental"`
+
+ // AllowModfileModifications disables -mod=readonly, allowing imports from
+ // out-of-scope modules. This option will eventually be removed.
+ AllowModfileModifications bool `status:"experimental"`
+
+ // AllowImplicitNetworkAccess disables GOPROXY=off, allowing implicit module
+ // downloads rather than requiring user action. This option will eventually
+ // be removed.
+ AllowImplicitNetworkAccess bool `status:"experimental"`
+
+ // StandaloneTags specifies a set of build constraints that identify
+ // individual Go source files that make up the entire main package of an
+ // executable.
+ //
+ // A common example of standalone main files is the convention of using the
+ // directive `//go:build ignore` to denote files that are not intended to be
+ // included in any package, for example because they are invoked directly by
+ // the developer using `go run`.
+ //
+ // Gopls considers a file to be a standalone main file if and only if it has
+ // package name "main" and has a build directive of the exact form
+ // "//go:build tag" or "// +build tag", where tag is among the list of tags
+ // configured by this setting. Notably, if the build constraint is more
+ // complicated than a simple tag (such as the composite constraint
+ // `//go:build tag && go1.18`), the file is not considered to be a standalone
+ // main file.
+ //
+ // This setting is only supported when gopls is built with Go 1.16 or later.
+ StandaloneTags []string
+}
+
+type UIOptions struct {
+ DocumentationOptions
+ CompletionOptions
+ NavigationOptions
+ DiagnosticOptions
+ InlayHintOptions
+
+ // Codelenses overrides the enabled/disabled state of code lenses. See the
+ // "Code Lenses" section of the
+ // [Settings page](https://github.com/golang/tools/blob/master/gopls/doc/settings.md#code-lenses)
+ // for the list of supported lenses.
+ //
+ // Example Usage:
+ //
+ // ```json5
+ // "gopls": {
+ // ...
+ // "codelenses": {
+ // "generate": false, // Don't show the `go generate` lens.
+ // "gc_details": true // Show a code lens toggling the display of gc's choices.
+ // }
+ // ...
+ // }
+ // ```
+ Codelenses map[string]bool
+
+ // SemanticTokens controls whether the LSP server will send
+ // semantic tokens to the client.
+ SemanticTokens bool `status:"experimental"`
+
+ // NoSemanticString turns off the sending of the semantic token 'string'
+ NoSemanticString bool `status:"experimental"`
+
+ // NoSemanticNumber turns off the sending of the semantic token 'number'
+ NoSemanticNumber bool `status:"experimental"`
+}
+
+type CompletionOptions struct {
+ // Placeholders enables placeholders for function parameters or struct
+ // fields in completion responses.
+ UsePlaceholders bool
+
+ // CompletionBudget is the soft latency goal for completion requests. Most
+ // requests finish in a couple milliseconds, but in some cases deep
+ // completions can take much longer. As we use up our budget we
+ // dynamically reduce the search scope to ensure we return timely
+ // results. Zero means unlimited.
+ CompletionBudget time.Duration `status:"debug"`
+
+ // Matcher sets the algorithm that is used when calculating completion
+ // candidates.
+ Matcher Matcher `status:"advanced"`
+
+ // ExperimentalPostfixCompletions enables artificial method snippets
+ // such as "someSlice.sort!".
+ ExperimentalPostfixCompletions bool `status:"experimental"`
+}
+
+type DocumentationOptions struct {
+ // HoverKind controls the information that appears in the hover text.
+ // SingleLine and Structured are intended for use only by authors of editor plugins.
+ HoverKind HoverKind
+
+ // LinkTarget controls where documentation links go.
+ // It might be one of:
+ //
+ // * `"godoc.org"`
+ // * `"pkg.go.dev"`
+ //
+ // If company chooses to use its own `godoc.org`, its address can be used as well.
+ //
+ // Modules matching the GOPRIVATE environment variable will not have
+ // documentation links in hover.
+ LinkTarget string
+
+ // LinksInHover toggles the presence of links to documentation in hover.
+ LinksInHover bool
+}
+
+type FormattingOptions struct {
+ // Local is the equivalent of the `goimports -local` flag, which puts
+ // imports beginning with this string after third-party packages. It should
+ // be the prefix of the import path whose imports should be grouped
+ // separately.
+ Local string
+
+ // Gofumpt indicates if we should run gofumpt formatting.
+ Gofumpt bool
+}
+
+type DiagnosticOptions struct {
+ // Analyses specify analyses that the user would like to enable or disable.
+ // A map of the names of analysis passes that should be enabled/disabled.
+ // A full list of analyzers that gopls uses can be found in
+ // [analyzers.md](https://github.com/golang/tools/blob/master/gopls/doc/analyzers.md).
+ //
+ // Example Usage:
+ //
+ // ```json5
+ // ...
+ // "analyses": {
+ // "unreachable": false, // Disable the unreachable analyzer.
+ // "unusedparams": true // Enable the unusedparams analyzer.
+ // }
+ // ...
+ // ```
+ Analyses map[string]bool
+
+ // Staticcheck enables additional analyses from staticcheck.io.
+ // These analyses are documented on
+ // [Staticcheck's website](https://staticcheck.io/docs/checks/).
+ Staticcheck bool `status:"experimental"`
+
+ // Annotations specifies the various kinds of optimization diagnostics
+ // that should be reported by the gc_details command.
+ Annotations map[Annotation]bool `status:"experimental"`
+
+ // Vulncheck enables vulnerability scanning.
+ Vulncheck VulncheckMode `status:"experimental"`
+
+ // DiagnosticsDelay controls the amount of time that gopls waits
+ // after the most recent file modification before computing deep diagnostics.
+ // Simple diagnostics (parsing and type-checking) are always run immediately
+ // on recently modified packages.
+ //
+ // This option must be set to a valid duration string, for example `"250ms"`.
+ DiagnosticsDelay time.Duration `status:"advanced"`
+}
+
+type InlayHintOptions struct {
+ // Hints specify inlay hints that users want to see. A full list of hints
+ // that gopls uses can be found in
+ // [inlayHints.md](https://github.com/golang/tools/blob/master/gopls/doc/inlayHints.md).
+ Hints map[string]bool `status:"experimental"`
+}
+
+type NavigationOptions struct {
+ // ImportShortcut specifies whether import statements should link to
+ // documentation or go to definitions.
+ ImportShortcut ImportShortcut
+
+ // SymbolMatcher sets the algorithm that is used when finding workspace symbols.
+ SymbolMatcher SymbolMatcher `status:"advanced"`
+
+ // SymbolStyle controls how symbols are qualified in symbol responses.
+ //
+ // Example Usage:
+ //
+ // ```json5
+ // "gopls": {
+ // ...
+ // "symbolStyle": "Dynamic",
+ // ...
+ // }
+ // ```
+ SymbolStyle SymbolStyle `status:"advanced"`
+}
+
+// UserOptions holds custom Gopls configuration (not part of the LSP) that is
+// modified by the client.
+type UserOptions struct {
+ BuildOptions
+ UIOptions
+ FormattingOptions
+
+ // VerboseOutput enables additional debug logging.
+ VerboseOutput bool `status:"debug"`
+}
+
+// EnvSlice returns Env as a slice of k=v strings.
+func (u *UserOptions) EnvSlice() []string {
+ var result []string
+ for k, v := range u.Env {
+ result = append(result, fmt.Sprintf("%v=%v", k, v))
+ }
+ return result
+}
+
+// SetEnvSlice sets Env from a slice of k=v strings.
+func (u *UserOptions) SetEnvSlice(env []string) {
+ u.Env = map[string]string{}
+ for _, kv := range env {
+ split := strings.SplitN(kv, "=", 2)
+ if len(split) != 2 {
+ continue
+ }
+ u.Env[split[0]] = split[1]
+ }
+}
+
+// DiffFunction is the type for a function that produces a set of edits that
+// convert from the before content to the after content.
+type DiffFunction func(before, after string) []diff.Edit
+
+// Hooks contains configuration that is provided to the Gopls command by the
+// main package.
+type Hooks struct {
+ // LicensesText holds third party licenses for software used by gopls.
+ LicensesText string
+
+ // GoDiff is used in gopls/hooks to get Myers' diff
+ GoDiff bool
+
+ // Whether staticcheck is supported.
+ StaticcheckSupported bool
+
+ // ComputeEdits is used to compute edits between file versions.
+ ComputeEdits DiffFunction
+
+ // URLRegexp is used to find potential URLs in comments/strings.
+ //
+ // Not all matches are shown to the user: if the matched URL is not detected
+ // as valid, it will be skipped.
+ URLRegexp *regexp.Regexp
+
+ // GofumptFormat allows the gopls module to wire-in a call to
+ // gofumpt/format.Source. langVersion and modulePath are used for some
+ // Gofumpt formatting rules -- see the Gofumpt documentation for details.
+ GofumptFormat func(ctx context.Context, langVersion, modulePath string, src []byte) ([]byte, error)
+
+ DefaultAnalyzers map[string]*Analyzer
+ TypeErrorAnalyzers map[string]*Analyzer
+ ConvenienceAnalyzers map[string]*Analyzer
+ StaticcheckAnalyzers map[string]*Analyzer
+}
+
+// InternalOptions contains settings that are not intended for use by the
+// average user. These may be settings used by tests or outdated settings that
+// will soon be deprecated. Some of these settings may not even be configurable
+// by the user.
+type InternalOptions struct {
+ // LiteralCompletions controls whether literal candidates such as
+ // "&someStruct{}" are offered. Tests disable this flag to simplify
+ // their expected values.
+ LiteralCompletions bool
+
+ // VerboseWorkDoneProgress controls whether the LSP server should send
+ // progress reports for all work done outside the scope of an RPC.
+ // Used by the regression tests.
+ VerboseWorkDoneProgress bool
+
+ // The following options were previously available to users, but they
+ // really shouldn't be configured by anyone other than "power users".
+
+ // CompletionDocumentation enables documentation with completion results.
+ CompletionDocumentation bool
+
+ // CompleteUnimported enables completion for packages that you do not
+ // currently import.
+ CompleteUnimported bool
+
+ // DeepCompletion enables the ability to return completions from deep
+ // inside relevant entities, rather than just the locally accessible ones.
+ //
+ // Consider this example:
+ //
+ // ```go
+ // package main
+ //
+ // import "fmt"
+ //
+ // type wrapString struct {
+ // str string
+ // }
+ //
+ // func main() {
+ // x := wrapString{"hello world"}
+ // fmt.Printf(<>)
+ // }
+ // ```
+ //
+ // At the location of the `<>` in this program, deep completion would suggest
+ // the result `x.str`.
+ DeepCompletion bool
+
+ // TempModfile controls the use of the -modfile flag in Go 1.14.
+ TempModfile bool
+
+ // ShowBugReports causes a message to be shown when the first bug is reported
+ // on the server.
+ // This option applies only during initialization.
+ ShowBugReports bool
+
+ // NewDiff controls the choice of the new diff implementation. It can be
+ // 'new', 'old', or 'both', which is the default. 'both' computes diffs with
+ // both algorithms, checks that the new algorithm has worked, and write some
+ // summary statistics to a file in os.TmpDir().
+ NewDiff string
+
+ // ChattyDiagnostics controls whether to report file diagnostics for each
+ // file change. If unset, gopls only reports diagnostics when they change, or
+ // when a file is opened or closed.
+ ChattyDiagnostics bool
+}
+
+type ImportShortcut string
+
+const (
+ BothShortcuts ImportShortcut = "Both"
+ LinkShortcut ImportShortcut = "Link"
+ DefinitionShortcut ImportShortcut = "Definition"
+)
+
+func (s ImportShortcut) ShowLinks() bool {
+ return s == BothShortcuts || s == LinkShortcut
+}
+
+func (s ImportShortcut) ShowDefinition() bool {
+ return s == BothShortcuts || s == DefinitionShortcut
+}
+
+type Matcher string
+
+const (
+ Fuzzy Matcher = "Fuzzy"
+ CaseInsensitive Matcher = "CaseInsensitive"
+ CaseSensitive Matcher = "CaseSensitive"
+)
+
+type SymbolMatcher string
+
+const (
+ SymbolFuzzy SymbolMatcher = "Fuzzy"
+ SymbolFastFuzzy SymbolMatcher = "FastFuzzy"
+ SymbolCaseInsensitive SymbolMatcher = "CaseInsensitive"
+ SymbolCaseSensitive SymbolMatcher = "CaseSensitive"
+)
+
+type SymbolStyle string
+
+const (
+ // PackageQualifiedSymbols is package qualified symbols i.e.
+ // "pkg.Foo.Field".
+ PackageQualifiedSymbols SymbolStyle = "Package"
+ // FullyQualifiedSymbols is fully qualified symbols, i.e.
+ // "path/to/pkg.Foo.Field".
+ FullyQualifiedSymbols SymbolStyle = "Full"
+ // DynamicSymbols uses whichever qualifier results in the highest scoring
+ // match for the given symbol query. Here a "qualifier" is any "/" or "."
+ // delimited suffix of the fully qualified symbol. i.e. "to/pkg.Foo.Field" or
+ // just "Foo.Field".
+ DynamicSymbols SymbolStyle = "Dynamic"
+)
+
+type HoverKind string
+
+const (
+ SingleLine HoverKind = "SingleLine"
+ NoDocumentation HoverKind = "NoDocumentation"
+ SynopsisDocumentation HoverKind = "SynopsisDocumentation"
+ FullDocumentation HoverKind = "FullDocumentation"
+
+ // Structured is an experimental setting that returns a structured hover format.
+ // This format separates the signature from the documentation, so that the client
+ // can do more manipulation of these fields.
+ //
+ // This should only be used by clients that support this behavior.
+ Structured HoverKind = "Structured"
+)
+
+type MemoryMode string
+
+const (
+ ModeNormal MemoryMode = "Normal"
+ // In DegradeClosed mode, `gopls` will collect less information about
+ // packages without open files. As a result, features like Find
+ // References and Rename will miss results in such packages.
+ ModeDegradeClosed MemoryMode = "DegradeClosed"
+)
+
+type VulncheckMode string
+
+const (
+ // Disable vulnerability analysis.
+ ModeVulncheckOff VulncheckMode = "Off"
+ // In Imports mode, `gopls` will report vulnerabilities that affect packages
+ // directly and indirectly used by the analyzed main module.
+ ModeVulncheckImports VulncheckMode = "Imports"
+
+ // TODO: VulncheckRequire, VulncheckCallgraph
+)
+
+type OptionResults []OptionResult
+
+type OptionResult struct {
+ Name string
+ Value interface{}
+ Error error
+}
+
+func SetOptions(options *Options, opts interface{}) OptionResults {
+ var results OptionResults
+ switch opts := opts.(type) {
+ case nil:
+ case map[string]interface{}:
+ // If the user's settings contains "allExperiments", set that first,
+ // and then let them override individual settings independently.
+ var enableExperiments bool
+ for name, value := range opts {
+ if b, ok := value.(bool); name == "allExperiments" && ok && b {
+ enableExperiments = true
+ options.EnableAllExperiments()
+ }
+ }
+ seen := map[string]struct{}{}
+ for name, value := range opts {
+ results = append(results, options.set(name, value, seen))
+ }
+ // Finally, enable any experimental features that are specified in
+ // maps, which allows users to individually toggle them on or off.
+ if enableExperiments {
+ options.enableAllExperimentMaps()
+ }
+ default:
+ results = append(results, OptionResult{
+ Value: opts,
+ Error: fmt.Errorf("Invalid options type %T", opts),
+ })
+ }
+ return results
+}
+
+func (o *Options) ForClientCapabilities(caps protocol.ClientCapabilities) {
+ // Check if the client supports snippets in completion items.
+ if caps.Workspace.WorkspaceEdit != nil {
+ o.SupportedResourceOperations = caps.Workspace.WorkspaceEdit.ResourceOperations
+ }
+ if c := caps.TextDocument.Completion; c.CompletionItem.SnippetSupport {
+ o.InsertTextFormat = protocol.SnippetTextFormat
+ }
+ // Check if the client supports configuration messages.
+ o.ConfigurationSupported = caps.Workspace.Configuration
+ o.DynamicConfigurationSupported = caps.Workspace.DidChangeConfiguration.DynamicRegistration
+ o.DynamicRegistrationSemanticTokensSupported = caps.TextDocument.SemanticTokens.DynamicRegistration
+ o.DynamicWatchedFilesSupported = caps.Workspace.DidChangeWatchedFiles.DynamicRegistration
+
+ // Check which types of content format are supported by this client.
+ if hover := caps.TextDocument.Hover; hover != nil && len(hover.ContentFormat) > 0 {
+ o.PreferredContentFormat = hover.ContentFormat[0]
+ }
+ // Check if the client supports only line folding.
+
+ if fr := caps.TextDocument.FoldingRange; fr != nil {
+ o.LineFoldingOnly = fr.LineFoldingOnly
+ }
+ // Check if the client supports hierarchical document symbols.
+ o.HierarchicalDocumentSymbolSupport = caps.TextDocument.DocumentSymbol.HierarchicalDocumentSymbolSupport
+
+ // Client's semantic tokens
+ o.SemanticTypes = caps.TextDocument.SemanticTokens.TokenTypes
+ o.SemanticMods = caps.TextDocument.SemanticTokens.TokenModifiers
+ // we don't need Requests, as we support full functionality
+ // we don't need Formats, as there is only one, for now
+
+ // Check if the client supports diagnostic related information.
+ o.RelatedInformationSupported = caps.TextDocument.PublishDiagnostics.RelatedInformation
+ // Check if the client completion support includes tags (preferred) or deprecation
+ if caps.TextDocument.Completion.CompletionItem.TagSupport.ValueSet != nil {
+ o.CompletionTags = true
+ } else if caps.TextDocument.Completion.CompletionItem.DeprecatedSupport {
+ o.CompletionDeprecated = true
+ }
+}
+
+func (o *Options) Clone() *Options {
+ // TODO(rfindley): has this function gone stale? It appears that there are
+ // settings that are incorrectly cloned here (such as TemplateExtensions).
+ result := &Options{
+ ClientOptions: o.ClientOptions,
+ InternalOptions: o.InternalOptions,
+ Hooks: Hooks{
+ GoDiff: o.GoDiff,
+ StaticcheckSupported: o.StaticcheckSupported,
+ ComputeEdits: o.ComputeEdits,
+ GofumptFormat: o.GofumptFormat,
+ URLRegexp: o.URLRegexp,
+ },
+ ServerOptions: o.ServerOptions,
+ UserOptions: o.UserOptions,
+ }
+ // Fully clone any slice or map fields. Only Hooks, ExperimentalOptions,
+ // and UserOptions can be modified.
+ copyStringMap := func(src map[string]bool) map[string]bool {
+ dst := make(map[string]bool)
+ for k, v := range src {
+ dst[k] = v
+ }
+ return dst
+ }
+ result.Analyses = copyStringMap(o.Analyses)
+ result.Codelenses = copyStringMap(o.Codelenses)
+
+ copySlice := func(src []string) []string {
+ dst := make([]string, len(src))
+ copy(dst, src)
+ return dst
+ }
+ result.SetEnvSlice(o.EnvSlice())
+ result.BuildFlags = copySlice(o.BuildFlags)
+ result.DirectoryFilters = copySlice(o.DirectoryFilters)
+ result.StandaloneTags = copySlice(o.StandaloneTags)
+
+ copyAnalyzerMap := func(src map[string]*Analyzer) map[string]*Analyzer {
+ dst := make(map[string]*Analyzer)
+ for k, v := range src {
+ dst[k] = v
+ }
+ return dst
+ }
+ result.DefaultAnalyzers = copyAnalyzerMap(o.DefaultAnalyzers)
+ result.TypeErrorAnalyzers = copyAnalyzerMap(o.TypeErrorAnalyzers)
+ result.ConvenienceAnalyzers = copyAnalyzerMap(o.ConvenienceAnalyzers)
+ result.StaticcheckAnalyzers = copyAnalyzerMap(o.StaticcheckAnalyzers)
+ return result
+}
+
+func (o *Options) AddStaticcheckAnalyzer(a *analysis.Analyzer, enabled bool, severity protocol.DiagnosticSeverity) {
+ o.StaticcheckAnalyzers[a.Name] = &Analyzer{
+ Analyzer: a,
+ Enabled: enabled,
+ Severity: severity,
+ }
+}
+
+// EnableAllExperiments turns on all of the experimental "off-by-default"
+// features offered by gopls. Any experimental features specified in maps
+// should be enabled in enableAllExperimentMaps.
+func (o *Options) EnableAllExperiments() {
+ o.SemanticTokens = true
+}
+
+func (o *Options) enableAllExperimentMaps() {
+ if _, ok := o.Codelenses[string(command.GCDetails)]; !ok {
+ o.Codelenses[string(command.GCDetails)] = true
+ }
+ if _, ok := o.Codelenses[string(command.RunGovulncheck)]; !ok {
+ o.Codelenses[string(command.RunGovulncheck)] = true
+ }
+ if _, ok := o.Analyses[unusedparams.Analyzer.Name]; !ok {
+ o.Analyses[unusedparams.Analyzer.Name] = true
+ }
+ if _, ok := o.Analyses[unusedvariable.Analyzer.Name]; !ok {
+ o.Analyses[unusedvariable.Analyzer.Name] = true
+ }
+}
+
+// validateDirectoryFilter validates if the filter string
+// - is not empty
+// - start with either + or -
+// - doesn't contain currently unsupported glob operators: *, ?
+func validateDirectoryFilter(ifilter string) (string, error) {
+ filter := fmt.Sprint(ifilter)
+ if filter == "" || (filter[0] != '+' && filter[0] != '-') {
+ return "", fmt.Errorf("invalid filter %v, must start with + or -", filter)
+ }
+ segs := strings.Split(filter[1:], "/")
+ unsupportedOps := [...]string{"?", "*"}
+ for _, seg := range segs {
+ if seg != "**" {
+ for _, op := range unsupportedOps {
+ if strings.Contains(seg, op) {
+ return "", fmt.Errorf("invalid filter %v, operator %v not supported. If you want to have this operator supported, consider filing an issue.", filter, op)
+ }
+ }
+ }
+ }
+
+ return strings.TrimRight(filepath.FromSlash(filter), "/"), nil
+}
+
+func (o *Options) set(name string, value interface{}, seen map[string]struct{}) OptionResult {
+ // Flatten the name in case we get options with a hierarchy.
+ split := strings.Split(name, ".")
+ name = split[len(split)-1]
+
+ result := OptionResult{Name: name, Value: value}
+ if _, ok := seen[name]; ok {
+ result.parseErrorf("duplicate configuration for %s", name)
+ }
+ seen[name] = struct{}{}
+
+ switch name {
+ case "env":
+ menv, ok := value.(map[string]interface{})
+ if !ok {
+ result.parseErrorf("invalid type %T, expect map", value)
+ break
+ }
+ if o.Env == nil {
+ o.Env = make(map[string]string)
+ }
+ for k, v := range menv {
+ o.Env[k] = fmt.Sprint(v)
+ }
+
+ case "buildFlags":
+ // TODO(rfindley): use asStringSlice.
+ iflags, ok := value.([]interface{})
+ if !ok {
+ result.parseErrorf("invalid type %T, expect list", value)
+ break
+ }
+ flags := make([]string, 0, len(iflags))
+ for _, flag := range iflags {
+ flags = append(flags, fmt.Sprintf("%s", flag))
+ }
+ o.BuildFlags = flags
+
+ case "directoryFilters":
+ // TODO(rfindley): use asStringSlice.
+ ifilters, ok := value.([]interface{})
+ if !ok {
+ result.parseErrorf("invalid type %T, expect list", value)
+ break
+ }
+ var filters []string
+ for _, ifilter := range ifilters {
+ filter, err := validateDirectoryFilter(fmt.Sprintf("%v", ifilter))
+ if err != nil {
+ result.parseErrorf("%v", err)
+ return result
+ }
+ filters = append(filters, strings.TrimRight(filepath.FromSlash(filter), "/"))
+ }
+ o.DirectoryFilters = filters
+
+ case "memoryMode":
+ if s, ok := result.asOneOf(
+ string(ModeNormal),
+ string(ModeDegradeClosed),
+ ); ok {
+ o.MemoryMode = MemoryMode(s)
+ }
+ case "completionDocumentation":
+ result.setBool(&o.CompletionDocumentation)
+ case "usePlaceholders":
+ result.setBool(&o.UsePlaceholders)
+ case "deepCompletion":
+ result.setBool(&o.DeepCompletion)
+ case "completeUnimported":
+ result.setBool(&o.CompleteUnimported)
+ case "completionBudget":
+ result.setDuration(&o.CompletionBudget)
+ case "matcher":
+ if s, ok := result.asOneOf(
+ string(Fuzzy),
+ string(CaseSensitive),
+ string(CaseInsensitive),
+ ); ok {
+ o.Matcher = Matcher(s)
+ }
+
+ case "symbolMatcher":
+ if s, ok := result.asOneOf(
+ string(SymbolFuzzy),
+ string(SymbolFastFuzzy),
+ string(SymbolCaseInsensitive),
+ string(SymbolCaseSensitive),
+ ); ok {
+ o.SymbolMatcher = SymbolMatcher(s)
+ }
+
+ case "symbolStyle":
+ if s, ok := result.asOneOf(
+ string(FullyQualifiedSymbols),
+ string(PackageQualifiedSymbols),
+ string(DynamicSymbols),
+ ); ok {
+ o.SymbolStyle = SymbolStyle(s)
+ }
+
+ case "hoverKind":
+ if s, ok := result.asOneOf(
+ string(NoDocumentation),
+ string(SingleLine),
+ string(SynopsisDocumentation),
+ string(FullDocumentation),
+ string(Structured),
+ ); ok {
+ o.HoverKind = HoverKind(s)
+ }
+
+ case "linkTarget":
+ result.setString(&o.LinkTarget)
+
+ case "linksInHover":
+ result.setBool(&o.LinksInHover)
+
+ case "importShortcut":
+ if s, ok := result.asOneOf(string(BothShortcuts), string(LinkShortcut), string(DefinitionShortcut)); ok {
+ o.ImportShortcut = ImportShortcut(s)
+ }
+
+ case "analyses":
+ result.setBoolMap(&o.Analyses)
+
+ case "hints":
+ result.setBoolMap(&o.Hints)
+
+ case "annotations":
+ result.setAnnotationMap(&o.Annotations)
+
+ case "vulncheck":
+ if s, ok := result.asOneOf(
+ string(ModeVulncheckOff),
+ string(ModeVulncheckImports),
+ ); ok {
+ o.Vulncheck = VulncheckMode(s)
+ }
+
+ case "codelenses", "codelens":
+ var lensOverrides map[string]bool
+ result.setBoolMap(&lensOverrides)
+ if result.Error == nil {
+ if o.Codelenses == nil {
+ o.Codelenses = make(map[string]bool)
+ }
+ for lens, enabled := range lensOverrides {
+ o.Codelenses[lens] = enabled
+ }
+ }
+
+ // codelens is deprecated, but still works for now.
+ // TODO(rstambler): Remove this for the gopls/v0.7.0 release.
+ if name == "codelens" {
+ result.deprecated("codelenses")
+ }
+
+ case "staticcheck":
+ if v, ok := result.asBool(); ok {
+ o.Staticcheck = v
+ if v && !o.StaticcheckSupported {
+ result.Error = fmt.Errorf("applying setting %q: staticcheck is not supported at %s;"+
+ " rebuild gopls with a more recent version of Go", result.Name, runtime.Version())
+ }
+ }
+
+ case "local":
+ result.setString(&o.Local)
+
+ case "verboseOutput":
+ result.setBool(&o.VerboseOutput)
+
+ case "verboseWorkDoneProgress":
+ result.setBool(&o.VerboseWorkDoneProgress)
+
+ case "tempModfile":
+ result.setBool(&o.TempModfile)
+
+ case "showBugReports":
+ result.setBool(&o.ShowBugReports)
+
+ case "gofumpt":
+ if v, ok := result.asBool(); ok {
+ o.Gofumpt = v
+ if v && o.GofumptFormat == nil {
+ result.Error = fmt.Errorf("applying setting %q: gofumpt is not supported at %s;"+
+ " rebuild gopls with a more recent version of Go", result.Name, runtime.Version())
+ }
+ }
+
+ case "semanticTokens":
+ result.setBool(&o.SemanticTokens)
+
+ case "noSemanticString":
+ result.setBool(&o.NoSemanticString)
+
+ case "noSemanticNumber":
+ result.setBool(&o.NoSemanticNumber)
+
+ case "expandWorkspaceToModule":
+ result.setBool(&o.ExpandWorkspaceToModule)
+
+ case "experimentalPostfixCompletions":
+ result.setBool(&o.ExperimentalPostfixCompletions)
+
+ case "experimentalWorkspaceModule":
+ result.deprecated("")
+
+ case "experimentalTemplateSupport": // TODO(pjw): remove after June 2022
+ result.deprecated("")
+
+ case "templateExtensions":
+ if iexts, ok := value.([]interface{}); ok {
+ ans := []string{}
+ for _, x := range iexts {
+ ans = append(ans, fmt.Sprint(x))
+ }
+ o.TemplateExtensions = ans
+ break
+ }
+ if value == nil {
+ o.TemplateExtensions = nil
+ break
+ }
+ result.parseErrorf("unexpected type %T not []string", value)
+
+ case "experimentalDiagnosticsDelay":
+ result.deprecated("diagnosticsDelay")
+
+ case "diagnosticsDelay":
+ result.setDuration(&o.DiagnosticsDelay)
+
+ case "experimentalWatchedFileDelay":
+ result.deprecated("")
+
+ case "experimentalPackageCacheKey":
+ result.deprecated("")
+
+ case "allowModfileModifications":
+ result.setBool(&o.AllowModfileModifications)
+
+ case "allowImplicitNetworkAccess":
+ result.setBool(&o.AllowImplicitNetworkAccess)
+
+ case "experimentalUseInvalidMetadata":
+ result.deprecated("")
+
+ case "standaloneTags":
+ result.setStringSlice(&o.StandaloneTags)
+
+ case "allExperiments":
+ // This setting should be handled before all of the other options are
+ // processed, so do nothing here.
+
+ case "newDiff":
+ result.setString(&o.NewDiff)
+
+ case "chattyDiagnostics":
+ result.setBool(&o.ChattyDiagnostics)
+
+ // Replaced settings.
+ case "experimentalDisabledAnalyses":
+ result.deprecated("analyses")
+
+ case "disableDeepCompletion":
+ result.deprecated("deepCompletion")
+
+ case "disableFuzzyMatching":
+ result.deprecated("fuzzyMatching")
+
+ case "wantCompletionDocumentation":
+ result.deprecated("completionDocumentation")
+
+ case "wantUnimportedCompletions":
+ result.deprecated("completeUnimported")
+
+ case "fuzzyMatching":
+ result.deprecated("matcher")
+
+ case "caseSensitiveCompletion":
+ result.deprecated("matcher")
+
+ // Deprecated settings.
+ case "wantSuggestedFixes":
+ result.deprecated("")
+
+ case "noIncrementalSync":
+ result.deprecated("")
+
+ case "watchFileChanges":
+ result.deprecated("")
+
+ case "go-diff":
+ result.deprecated("")
+
+ default:
+ result.unexpected()
+ }
+ return result
+}
+
+// parseErrorf reports an error parsing the current configuration value.
+func (r *OptionResult) parseErrorf(msg string, values ...interface{}) {
+ if false {
+ _ = fmt.Sprintf(msg, values...) // this causes vet to check this like printf
+ }
+ prefix := fmt.Sprintf("parsing setting %q: ", r.Name)
+ r.Error = fmt.Errorf(prefix+msg, values...)
+}
+
+// A SoftError is an error that does not affect the functionality of gopls.
+type SoftError struct {
+ msg string
+}
+
+func (e *SoftError) Error() string {
+ return e.msg
+}
+
+// softErrorf reports an error that does not affect the functionality of gopls
+// (a warning in the UI).
+// The formatted message will be shown to the user unmodified.
+func (r *OptionResult) softErrorf(format string, values ...interface{}) {
+ msg := fmt.Sprintf(format, values...)
+ r.Error = &SoftError{msg}
+}
+
+// deprecated reports the current setting as deprecated. If 'replacement' is
+// non-nil, it is suggested to the user.
+func (r *OptionResult) deprecated(replacement string) {
+ msg := fmt.Sprintf("gopls setting %q is deprecated", r.Name)
+ if replacement != "" {
+ msg = fmt.Sprintf("%s, use %q instead", msg, replacement)
+ }
+ r.Error = &SoftError{msg}
+}
+
+// unexpected reports that the current setting is not known to gopls.
+func (r *OptionResult) unexpected() {
+ r.Error = fmt.Errorf("unexpected gopls setting %q", r.Name)
+}
+
+func (r *OptionResult) asBool() (bool, bool) {
+ b, ok := r.Value.(bool)
+ if !ok {
+ r.parseErrorf("invalid type %T, expect bool", r.Value)
+ return false, false
+ }
+ return b, true
+}
+
+func (r *OptionResult) setBool(b *bool) {
+ if v, ok := r.asBool(); ok {
+ *b = v
+ }
+}
+
+func (r *OptionResult) setDuration(d *time.Duration) {
+ if v, ok := r.asString(); ok {
+ parsed, err := time.ParseDuration(v)
+ if err != nil {
+ r.parseErrorf("failed to parse duration %q: %v", v, err)
+ return
+ }
+ *d = parsed
+ }
+}
+
+func (r *OptionResult) setBoolMap(bm *map[string]bool) {
+ m := r.asBoolMap()
+ *bm = m
+}
+
+func (r *OptionResult) setAnnotationMap(bm *map[Annotation]bool) {
+ all := r.asBoolMap()
+ if all == nil {
+ return
+ }
+ // Default to everything enabled by default.
+ m := make(map[Annotation]bool)
+ for k, enabled := range all {
+ a, err := asOneOf(
+ k,
+ string(Nil),
+ string(Escape),
+ string(Inline),
+ string(Bounds),
+ )
+ if err != nil {
+ // In case of an error, process any legacy values.
+ switch k {
+ case "noEscape":
+ m[Escape] = false
+ r.parseErrorf(`"noEscape" is deprecated, set "Escape: false" instead`)
+ case "noNilcheck":
+ m[Nil] = false
+ r.parseErrorf(`"noNilcheck" is deprecated, set "Nil: false" instead`)
+ case "noInline":
+ m[Inline] = false
+ r.parseErrorf(`"noInline" is deprecated, set "Inline: false" instead`)
+ case "noBounds":
+ m[Bounds] = false
+ r.parseErrorf(`"noBounds" is deprecated, set "Bounds: false" instead`)
+ default:
+ r.parseErrorf("%v", err)
+ }
+ continue
+ }
+ m[Annotation(a)] = enabled
+ }
+ *bm = m
+}
+
+func (r *OptionResult) asBoolMap() map[string]bool {
+ all, ok := r.Value.(map[string]interface{})
+ if !ok {
+ r.parseErrorf("invalid type %T for map[string]bool option", r.Value)
+ return nil
+ }
+ m := make(map[string]bool)
+ for a, enabled := range all {
+ if e, ok := enabled.(bool); ok {
+ m[a] = e
+ } else {
+ r.parseErrorf("invalid type %T for map key %q", enabled, a)
+ return m
+ }
+ }
+ return m
+}
+
+func (r *OptionResult) asString() (string, bool) {
+ b, ok := r.Value.(string)
+ if !ok {
+ r.parseErrorf("invalid type %T, expect string", r.Value)
+ return "", false
+ }
+ return b, true
+}
+
+func (r *OptionResult) asStringSlice() ([]string, bool) {
+ iList, ok := r.Value.([]interface{})
+ if !ok {
+ r.parseErrorf("invalid type %T, expect list", r.Value)
+ return nil, false
+ }
+ var list []string
+ for _, elem := range iList {
+ s, ok := elem.(string)
+ if !ok {
+ r.parseErrorf("invalid element type %T, expect string", elem)
+ return nil, false
+ }
+ list = append(list, s)
+ }
+ return list, true
+}
+
+func (r *OptionResult) asOneOf(options ...string) (string, bool) {
+ s, ok := r.asString()
+ if !ok {
+ return "", false
+ }
+ s, err := asOneOf(s, options...)
+ if err != nil {
+ r.parseErrorf("%v", err)
+ }
+ return s, err == nil
+}
+
+func asOneOf(str string, options ...string) (string, error) {
+ lower := strings.ToLower(str)
+ for _, opt := range options {
+ if strings.ToLower(opt) == lower {
+ return opt, nil
+ }
+ }
+ return "", fmt.Errorf("invalid option %q for enum", str)
+}
+
+func (r *OptionResult) setString(s *string) {
+ if v, ok := r.asString(); ok {
+ *s = v
+ }
+}
+
+func (r *OptionResult) setStringSlice(s *[]string) {
+ if v, ok := r.asStringSlice(); ok {
+ *s = v
+ }
+}
+
+func typeErrorAnalyzers() map[string]*Analyzer {
+ return map[string]*Analyzer{
+ fillreturns.Analyzer.Name: {
+ Analyzer: fillreturns.Analyzer,
+ ActionKind: []protocol.CodeActionKind{protocol.SourceFixAll, protocol.QuickFix},
+ Enabled: true,
+ },
+ nonewvars.Analyzer.Name: {
+ Analyzer: nonewvars.Analyzer,
+ Enabled: true,
+ },
+ noresultvalues.Analyzer.Name: {
+ Analyzer: noresultvalues.Analyzer,
+ Enabled: true,
+ },
+ undeclaredname.Analyzer.Name: {
+ Analyzer: undeclaredname.Analyzer,
+ Fix: UndeclaredName,
+ Enabled: true,
+ },
+ unusedvariable.Analyzer.Name: {
+ Analyzer: unusedvariable.Analyzer,
+ Enabled: false,
+ },
+ }
+}
+
+func convenienceAnalyzers() map[string]*Analyzer {
+ return map[string]*Analyzer{
+ fillstruct.Analyzer.Name: {
+ Analyzer: fillstruct.Analyzer,
+ Fix: FillStruct,
+ Enabled: true,
+ ActionKind: []protocol.CodeActionKind{protocol.RefactorRewrite},
+ },
+ stubmethods.Analyzer.Name: {
+ Analyzer: stubmethods.Analyzer,
+ ActionKind: []protocol.CodeActionKind{protocol.RefactorRewrite},
+ Fix: StubMethods,
+ Enabled: true,
+ },
+ }
+}
+
+func defaultAnalyzers() map[string]*Analyzer {
+ return map[string]*Analyzer{
+ // The traditional vet suite:
+ asmdecl.Analyzer.Name: {Analyzer: asmdecl.Analyzer, Enabled: true},
+ assign.Analyzer.Name: {Analyzer: assign.Analyzer, Enabled: true},
+ atomic.Analyzer.Name: {Analyzer: atomic.Analyzer, Enabled: true},
+ bools.Analyzer.Name: {Analyzer: bools.Analyzer, Enabled: true},
+ buildtag.Analyzer.Name: {Analyzer: buildtag.Analyzer, Enabled: true},
+ cgocall.Analyzer.Name: {Analyzer: cgocall.Analyzer, Enabled: true},
+ composite.Analyzer.Name: {Analyzer: composite.Analyzer, Enabled: true},
+ copylock.Analyzer.Name: {Analyzer: copylock.Analyzer, Enabled: true},
+ directive.Analyzer.Name: {Analyzer: directive.Analyzer, Enabled: true},
+ errorsas.Analyzer.Name: {Analyzer: errorsas.Analyzer, Enabled: true},
+ httpresponse.Analyzer.Name: {Analyzer: httpresponse.Analyzer, Enabled: true},
+ ifaceassert.Analyzer.Name: {Analyzer: ifaceassert.Analyzer, Enabled: true},
+ loopclosure.Analyzer.Name: {Analyzer: loopclosure.Analyzer, Enabled: true},
+ lostcancel.Analyzer.Name: {Analyzer: lostcancel.Analyzer, Enabled: true},
+ nilfunc.Analyzer.Name: {Analyzer: nilfunc.Analyzer, Enabled: true},
+ printf.Analyzer.Name: {Analyzer: printf.Analyzer, Enabled: true},
+ shift.Analyzer.Name: {Analyzer: shift.Analyzer, Enabled: true},
+ stdmethods.Analyzer.Name: {Analyzer: stdmethods.Analyzer, Enabled: true},
+ stringintconv.Analyzer.Name: {Analyzer: stringintconv.Analyzer, Enabled: true},
+ structtag.Analyzer.Name: {Analyzer: structtag.Analyzer, Enabled: true},
+ tests.Analyzer.Name: {Analyzer: tests.Analyzer, Enabled: true},
+ unmarshal.Analyzer.Name: {Analyzer: unmarshal.Analyzer, Enabled: true},
+ unreachable.Analyzer.Name: {Analyzer: unreachable.Analyzer, Enabled: true},
+ unsafeptr.Analyzer.Name: {Analyzer: unsafeptr.Analyzer, Enabled: true},
+ unusedresult.Analyzer.Name: {Analyzer: unusedresult.Analyzer, Enabled: true},
+
+ // Non-vet analyzers:
+ atomicalign.Analyzer.Name: {Analyzer: atomicalign.Analyzer, Enabled: true},
+ deepequalerrors.Analyzer.Name: {Analyzer: deepequalerrors.Analyzer, Enabled: true},
+ fieldalignment.Analyzer.Name: {Analyzer: fieldalignment.Analyzer, Enabled: false},
+ nilness.Analyzer.Name: {Analyzer: nilness.Analyzer, Enabled: false},
+ shadow.Analyzer.Name: {Analyzer: shadow.Analyzer, Enabled: false},
+ sortslice.Analyzer.Name: {Analyzer: sortslice.Analyzer, Enabled: true},
+ testinggoroutine.Analyzer.Name: {Analyzer: testinggoroutine.Analyzer, Enabled: true},
+ unusedparams.Analyzer.Name: {Analyzer: unusedparams.Analyzer, Enabled: false},
+ unusedwrite.Analyzer.Name: {Analyzer: unusedwrite.Analyzer, Enabled: false},
+ useany.Analyzer.Name: {Analyzer: useany.Analyzer, Enabled: false},
+ infertypeargs.Analyzer.Name: {Analyzer: infertypeargs.Analyzer, Enabled: true},
+ embeddirective.Analyzer.Name: {Analyzer: embeddirective.Analyzer, Enabled: true},
+ timeformat.Analyzer.Name: {Analyzer: timeformat.Analyzer, Enabled: true},
+
+ // gofmt -s suite:
+ simplifycompositelit.Analyzer.Name: {
+ Analyzer: simplifycompositelit.Analyzer,
+ Enabled: true,
+ ActionKind: []protocol.CodeActionKind{protocol.SourceFixAll, protocol.QuickFix},
+ },
+ simplifyrange.Analyzer.Name: {
+ Analyzer: simplifyrange.Analyzer,
+ Enabled: true,
+ ActionKind: []protocol.CodeActionKind{protocol.SourceFixAll, protocol.QuickFix},
+ },
+ simplifyslice.Analyzer.Name: {
+ Analyzer: simplifyslice.Analyzer,
+ Enabled: true,
+ ActionKind: []protocol.CodeActionKind{protocol.SourceFixAll, protocol.QuickFix},
+ },
+ }
+}
+
+func urlRegexp() *regexp.Regexp {
+ // Ensure links are matched as full words, not anywhere.
+ re := regexp.MustCompile(`\b(http|ftp|https)://([\w_-]+(?:(?:\.[\w_-]+)+))([\w.,@?^=%&:/~+#-]*[\w@?^=%&/~+#-])?\b`)
+ re.Longest()
+ return re
+}
+
+type APIJSON struct {
+ Options map[string][]*OptionJSON
+ Commands []*CommandJSON
+ Lenses []*LensJSON
+ Analyzers []*AnalyzerJSON
+ Hints []*HintJSON
+}
+
+type OptionJSON struct {
+ Name string
+ Type string
+ Doc string
+ EnumKeys EnumKeys
+ EnumValues []EnumValue
+ Default string
+ Status string
+ Hierarchy string
+}
+
+func (o *OptionJSON) String() string {
+ return o.Name
+}
+
+func (o *OptionJSON) Write(w io.Writer) {
+ fmt.Fprintf(w, "**%v** *%v*\n\n", o.Name, o.Type)
+ writeStatus(w, o.Status)
+ enumValues := collectEnums(o)
+ fmt.Fprintf(w, "%v%v\nDefault: `%v`.\n\n", o.Doc, enumValues, o.Default)
+}
+
+func writeStatus(section io.Writer, status string) {
+ switch status {
+ case "":
+ case "advanced":
+ fmt.Fprint(section, "**This is an advanced setting and should not be configured by most `gopls` users.**\n\n")
+ case "debug":
+ fmt.Fprint(section, "**This setting is for debugging purposes only.**\n\n")
+ case "experimental":
+ fmt.Fprint(section, "**This setting is experimental and may be deleted.**\n\n")
+ default:
+ fmt.Fprintf(section, "**Status: %s.**\n\n", status)
+ }
+}
+
+var parBreakRE = regexp.MustCompile("\n{2,}")
+
+func collectEnums(opt *OptionJSON) string {
+ var b strings.Builder
+ write := func(name, doc string, index, len int) {
+ if doc != "" {
+ unbroken := parBreakRE.ReplaceAllString(doc, "\\\n")
+ fmt.Fprintf(&b, "* %s\n", strings.TrimSpace(unbroken))
+ } else {
+ fmt.Fprintf(&b, "* `%s`\n", name)
+ }
+ }
+ if len(opt.EnumValues) > 0 && opt.Type == "enum" {
+ b.WriteString("\nMust be one of:\n\n")
+ for i, val := range opt.EnumValues {
+ write(val.Value, val.Doc, i, len(opt.EnumValues))
+ }
+ } else if len(opt.EnumKeys.Keys) > 0 && shouldShowEnumKeysInSettings(opt.Name) {
+ b.WriteString("\nCan contain any of:\n\n")
+ for i, val := range opt.EnumKeys.Keys {
+ write(val.Name, val.Doc, i, len(opt.EnumKeys.Keys))
+ }
+ }
+ return b.String()
+}
+
+func shouldShowEnumKeysInSettings(name string) bool {
+ // These fields have too many possible options to print.
+ return !(name == "analyses" || name == "codelenses" || name == "hints")
+}
+
+type EnumKeys struct {
+ ValueType string
+ Keys []EnumKey
+}
+
+type EnumKey struct {
+ Name string
+ Doc string
+ Default string
+}
+
+type EnumValue struct {
+ Value string
+ Doc string
+}
+
+type CommandJSON struct {
+ Command string
+ Title string
+ Doc string
+ ArgDoc string
+ ResultDoc string
+}
+
+func (c *CommandJSON) String() string {
+ return c.Command
+}
+
+func (c *CommandJSON) Write(w io.Writer) {
+ fmt.Fprintf(w, "### **%v**\nIdentifier: `%v`\n\n%v\n\n", c.Title, c.Command, c.Doc)
+ if c.ArgDoc != "" {
+ fmt.Fprintf(w, "Args:\n\n```\n%s\n```\n\n", c.ArgDoc)
+ }
+ if c.ResultDoc != "" {
+ fmt.Fprintf(w, "Result:\n\n```\n%s\n```\n\n", c.ResultDoc)
+ }
+}
+
+type LensJSON struct {
+ Lens string
+ Title string
+ Doc string
+}
+
+func (l *LensJSON) String() string {
+ return l.Title
+}
+
+func (l *LensJSON) Write(w io.Writer) {
+ fmt.Fprintf(w, "%s (%s): %s", l.Title, l.Lens, l.Doc)
+}
+
+type AnalyzerJSON struct {
+ Name string
+ Doc string
+ Default bool
+}
+
+func (a *AnalyzerJSON) String() string {
+ return a.Name
+}
+
+func (a *AnalyzerJSON) Write(w io.Writer) {
+ fmt.Fprintf(w, "%s (%s): %v", a.Name, a.Doc, a.Default)
+}
+
+type HintJSON struct {
+ Name string
+ Doc string
+ Default bool
+}
+
+func (h *HintJSON) String() string {
+ return h.Name
+}
+
+func (h *HintJSON) Write(w io.Writer) {
+ fmt.Fprintf(w, "%s (%s): %v", h.Name, h.Doc, h.Default)
+}
diff --git a/gopls/internal/lsp/source/options_test.go b/gopls/internal/lsp/source/options_test.go
new file mode 100644
index 000000000..4fa6ecf15
--- /dev/null
+++ b/gopls/internal/lsp/source/options_test.go
@@ -0,0 +1,206 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package source
+
+import (
+ "testing"
+ "time"
+)
+
+func TestSetOption(t *testing.T) {
+ tests := []struct {
+ name string
+ value interface{}
+ wantError bool
+ check func(Options) bool
+ }{
+ {
+ name: "symbolStyle",
+ value: "Dynamic",
+ check: func(o Options) bool { return o.SymbolStyle == DynamicSymbols },
+ },
+ {
+ name: "symbolStyle",
+ value: "",
+ wantError: true,
+ check: func(o Options) bool { return o.SymbolStyle == "" },
+ },
+ {
+ name: "symbolStyle",
+ value: false,
+ wantError: true,
+ check: func(o Options) bool { return o.SymbolStyle == "" },
+ },
+ {
+ name: "symbolMatcher",
+ value: "caseInsensitive",
+ check: func(o Options) bool { return o.SymbolMatcher == SymbolCaseInsensitive },
+ },
+ {
+ name: "completionBudget",
+ value: "2s",
+ check: func(o Options) bool { return o.CompletionBudget == 2*time.Second },
+ },
+ {
+ name: "staticcheck",
+ value: true,
+ check: func(o Options) bool { return o.Staticcheck == true },
+ wantError: true, // o.StaticcheckSupported is unset
+ },
+ {
+ name: "codelenses",
+ value: map[string]interface{}{"generate": true},
+ check: func(o Options) bool { return o.Codelenses["generate"] },
+ },
+ {
+ name: "allExperiments",
+ value: true,
+ check: func(o Options) bool {
+ return true // just confirm that we handle this setting
+ },
+ },
+ {
+ name: "hoverKind",
+ value: "FullDocumentation",
+ check: func(o Options) bool {
+ return o.HoverKind == FullDocumentation
+ },
+ },
+ {
+ name: "hoverKind",
+ value: "NoDocumentation",
+ check: func(o Options) bool {
+ return o.HoverKind == NoDocumentation
+ },
+ },
+ {
+ name: "hoverKind",
+ value: "SingleLine",
+ check: func(o Options) bool {
+ return o.HoverKind == SingleLine
+ },
+ },
+ {
+ name: "hoverKind",
+ value: "Structured",
+ check: func(o Options) bool {
+ return o.HoverKind == Structured
+ },
+ },
+ {
+ name: "ui.documentation.hoverKind",
+ value: "Structured",
+ check: func(o Options) bool {
+ return o.HoverKind == Structured
+ },
+ },
+ {
+ name: "matcher",
+ value: "Fuzzy",
+ check: func(o Options) bool {
+ return o.Matcher == Fuzzy
+ },
+ },
+ {
+ name: "matcher",
+ value: "CaseSensitive",
+ check: func(o Options) bool {
+ return o.Matcher == CaseSensitive
+ },
+ },
+ {
+ name: "matcher",
+ value: "CaseInsensitive",
+ check: func(o Options) bool {
+ return o.Matcher == CaseInsensitive
+ },
+ },
+ {
+ name: "env",
+ value: map[string]interface{}{"testing": "true"},
+ check: func(o Options) bool {
+ v, found := o.Env["testing"]
+ return found && v == "true"
+ },
+ },
+ {
+ name: "env",
+ value: []string{"invalid", "input"},
+ wantError: true,
+ check: func(o Options) bool {
+ return o.Env == nil
+ },
+ },
+ {
+ name: "directoryFilters",
+ value: []interface{}{"-node_modules", "+project_a"},
+ check: func(o Options) bool {
+ return len(o.DirectoryFilters) == 2
+ },
+ },
+ {
+ name: "directoryFilters",
+ value: []interface{}{"invalid"},
+ wantError: true,
+ check: func(o Options) bool {
+ return len(o.DirectoryFilters) == 0
+ },
+ },
+ {
+ name: "directoryFilters",
+ value: []string{"-invalid", "+type"},
+ wantError: true,
+ check: func(o Options) bool {
+ return len(o.DirectoryFilters) == 0
+ },
+ },
+ {
+ name: "annotations",
+ value: map[string]interface{}{
+ "Nil": false,
+ "noBounds": true,
+ },
+ wantError: true,
+ check: func(o Options) bool {
+ return !o.Annotations[Nil] && !o.Annotations[Bounds]
+ },
+ },
+ {
+ name: "vulncheck",
+ value: []interface{}{"invalid"},
+ wantError: true,
+ check: func(o Options) bool {
+ return o.Vulncheck == "" // For invalid value, default to 'off'.
+ },
+ },
+ {
+ name: "vulncheck",
+ value: "Imports",
+ check: func(o Options) bool {
+ return o.Vulncheck == ModeVulncheckImports // For invalid value, default to 'off'.
+ },
+ },
+ {
+ name: "vulncheck",
+ value: "imports",
+ check: func(o Options) bool {
+ return o.Vulncheck == ModeVulncheckImports
+ },
+ },
+ }
+
+ for _, test := range tests {
+ var opts Options
+ result := opts.set(test.name, test.value, map[string]struct{}{})
+ if (result.Error != nil) != test.wantError {
+ t.Fatalf("Options.set(%q, %v): result.Error = %v, want error: %t", test.name, test.value, result.Error, test.wantError)
+ }
+ // TODO: this could be made much better using cmp.Diff, if that becomes
+ // available in this module.
+ if !test.check(opts) {
+ t.Errorf("Options.set(%q, %v): unexpected result %+v", test.name, test.value, opts)
+ }
+ }
+}
diff --git a/gopls/internal/lsp/source/references.go b/gopls/internal/lsp/source/references.go
new file mode 100644
index 000000000..3f8960180
--- /dev/null
+++ b/gopls/internal/lsp/source/references.go
@@ -0,0 +1,582 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package source
+
+// This file defines the 'references' query based on a serializable
+// index constructed during type checking, thus avoiding the need to
+// type-check packages at search time.
+//
+// See the ./xrefs/ subpackage for the index construction and lookup.
+//
+// This implementation does not intermingle objects from distinct
+// calls to TypeCheck.
+
+import (
+ "context"
+ "fmt"
+ "go/ast"
+ "go/token"
+ "go/types"
+ "sort"
+ "strings"
+ "sync"
+
+ "golang.org/x/sync/errgroup"
+ "golang.org/x/tools/go/types/objectpath"
+ "golang.org/x/tools/gopls/internal/lsp/protocol"
+ "golang.org/x/tools/gopls/internal/lsp/safetoken"
+ "golang.org/x/tools/gopls/internal/lsp/source/methodsets"
+ "golang.org/x/tools/gopls/internal/span"
+ "golang.org/x/tools/internal/bug"
+ "golang.org/x/tools/internal/event"
+)
+
+// References returns a list of all references (sorted with
+// definitions before uses) to the object denoted by the identifier at
+// the given file/position, searching the entire workspace.
+func References(ctx context.Context, snapshot Snapshot, fh FileHandle, pp protocol.Position, includeDeclaration bool) ([]protocol.Location, error) {
+ references, err := references(ctx, snapshot, fh, pp, includeDeclaration)
+ if err != nil {
+ return nil, err
+ }
+ locations := make([]protocol.Location, len(references))
+ for i, ref := range references {
+ locations[i] = ref.location
+ }
+ return locations, nil
+}
+
+// A reference describes an identifier that refers to the same
+// object as the subject of a References query.
+type reference struct {
+ isDeclaration bool
+ location protocol.Location
+ pkgPath PackagePath // of declaring package (same for all elements of the slice)
+}
+
+// references returns a list of all references (sorted with
+// definitions before uses) to the object denoted by the identifier at
+// the given file/position, searching the entire workspace.
+func references(ctx context.Context, snapshot Snapshot, f FileHandle, pp protocol.Position, includeDeclaration bool) ([]reference, error) {
+ ctx, done := event.Start(ctx, "source.References2")
+ defer done()
+
+ // Is the cursor within the package name declaration?
+ _, inPackageName, err := parsePackageNameDecl(ctx, snapshot, f, pp)
+ if err != nil {
+ return nil, err
+ }
+
+ var refs []reference
+ if inPackageName {
+ refs, err = packageReferences(ctx, snapshot, f.URI())
+ } else {
+ refs, err = ordinaryReferences(ctx, snapshot, f.URI(), pp)
+ }
+ if err != nil {
+ return nil, err
+ }
+
+ sort.Slice(refs, func(i, j int) bool {
+ x, y := refs[i], refs[j]
+ if x.isDeclaration != y.isDeclaration {
+ return x.isDeclaration // decls < refs
+ }
+ return protocol.CompareLocation(x.location, y.location) < 0
+ })
+
+ // De-duplicate by location, and optionally remove declarations.
+ out := refs[:0]
+ for _, ref := range refs {
+ if !includeDeclaration && ref.isDeclaration {
+ continue
+ }
+ if len(out) == 0 || out[len(out)-1].location != ref.location {
+ out = append(out, ref)
+ }
+ }
+ refs = out
+
+ return refs, nil
+}
+
+// packageReferences returns a list of references to the package
+// declaration of the specified name and uri by searching among the
+// import declarations of all packages that directly import the target
+// package.
+func packageReferences(ctx context.Context, snapshot Snapshot, uri span.URI) ([]reference, error) {
+ metas, err := snapshot.MetadataForFile(ctx, uri)
+ if err != nil {
+ return nil, err
+ }
+ if len(metas) == 0 {
+ return nil, fmt.Errorf("found no package containing %s", uri)
+ }
+
+ var refs []reference
+
+ // Find external references to the package declaration
+ // from each direct import of the package.
+ //
+ // The narrowest package is the most broadly imported,
+ // so we choose it for the external references.
+ //
+ // But if the file ends with _test.go then we need to
+ // find the package it is testing; there's no direct way
+ // to do that, so pick a file from the same package that
+ // doesn't end in _test.go and start over.
+ narrowest := metas[0]
+ if narrowest.ForTest != "" && strings.HasSuffix(string(uri), "_test.go") {
+ for _, f := range narrowest.CompiledGoFiles {
+ if !strings.HasSuffix(string(f), "_test.go") {
+ return packageReferences(ctx, snapshot, f)
+ }
+ }
+ // This package has no non-test files.
+ // Skip the search for external references.
+ // (Conceivably one could blank-import an empty package, but why?)
+ } else {
+ rdeps, err := snapshot.ReverseDependencies(ctx, narrowest.ID, false) // direct
+ if err != nil {
+ return nil, err
+ }
+ for _, rdep := range rdeps {
+ for _, uri := range rdep.CompiledGoFiles {
+ fh, err := snapshot.GetFile(ctx, uri)
+ if err != nil {
+ return nil, err
+ }
+ f, err := snapshot.ParseGo(ctx, fh, ParseHeader)
+ if err != nil {
+ return nil, err
+ }
+ for _, imp := range f.File.Imports {
+ if rdep.DepsByImpPath[UnquoteImportPath(imp)] == narrowest.ID {
+ refs = append(refs, reference{
+ isDeclaration: false,
+ location: mustLocation(f, imp),
+ pkgPath: narrowest.PkgPath,
+ })
+ }
+ }
+ }
+ }
+ }
+
+ // Find internal "references" to the package from
+ // of each package declaration in the target package itself.
+ //
+ // The widest package (possibly a test variant) has the
+ // greatest number of files and thus we choose it for the
+ // "internal" references.
+ widest := metas[len(metas)-1]
+ for _, uri := range widest.CompiledGoFiles {
+ fh, err := snapshot.GetFile(ctx, uri)
+ if err != nil {
+ return nil, err
+ }
+ f, err := snapshot.ParseGo(ctx, fh, ParseHeader)
+ if err != nil {
+ return nil, err
+ }
+ refs = append(refs, reference{
+ isDeclaration: true, // (one of many)
+ location: mustLocation(f, f.File.Name),
+ pkgPath: widest.PkgPath,
+ })
+ }
+
+ return refs, nil
+}
+
+// ordinaryReferences computes references for all ordinary objects (not package declarations).
+func ordinaryReferences(ctx context.Context, snapshot Snapshot, uri span.URI, pp protocol.Position) ([]reference, error) {
+ // Strategy: use the reference information computed by the
+ // type checker to find the declaration. First type-check this
+ // package to find the declaration, then type check the
+ // declaring package (which may be different), plus variants,
+ // to find local (in-package) references.
+ // Global references are satisfied by the index.
+
+ // Strictly speaking, a wider package could provide a different
+ // declaration (e.g. because the _test.go files can change the
+ // meaning of a field or method selection), but the narrower
+ // package reports the more broadly referenced object.
+ pkg, pgf, err := PackageForFile(ctx, snapshot, uri, NarrowestPackage)
+ if err != nil {
+ return nil, err
+ }
+
+ // Find the selected object (declaration or reference).
+ pos, err := pgf.PositionPos(pp)
+ if err != nil {
+ return nil, err
+ }
+ candidates, _, err := objectsAt(pkg.GetTypesInfo(), pgf.File, pos)
+ if err != nil {
+ return nil, err
+ }
+
+ // Pick first object arbitrarily.
+ // The case variables of a type switch have different
+ // types but that difference is immaterial here.
+ var obj types.Object
+ for obj = range candidates {
+ break
+ }
+ if obj == nil {
+ return nil, ErrNoIdentFound // can't happen
+ }
+
+ // nil, error, error.Error, iota, or other built-in?
+ if obj.Pkg() == nil {
+ // For some reason, existing tests require that iota has no references,
+ // nor an error. TODO(adonovan): do something more principled.
+ if obj.Name() == "iota" {
+ return nil, nil
+ }
+
+ return nil, fmt.Errorf("references to builtin %q are not supported", obj.Name())
+ }
+
+ // Find metadata of all packages containing the object's defining file.
+ // This may include the query pkg, and possibly other variants.
+ declPosn := safetoken.StartPosition(pkg.FileSet(), obj.Pos())
+ declURI := span.URIFromPath(declPosn.Filename)
+ variants, err := snapshot.MetadataForFile(ctx, declURI)
+ if err != nil {
+ return nil, err
+ }
+ if len(variants) == 0 {
+ return nil, fmt.Errorf("no packages for file %q", declURI) // can't happen
+ }
+
+ // Is object exported?
+ // If so, compute scope and targets of the global search.
+ var (
+ globalScope = make(map[PackageID]*Metadata)
+ globalTargets map[PackagePath]map[objectpath.Path]unit
+ )
+ // TODO(adonovan): what about generic functions. Need to consider both
+ // uninstantiated and instantiated. The latter have no objectpath. Use Origin?
+ if path, err := objectpath.For(obj); err == nil && obj.Exported() {
+ pkgPath := variants[0].PkgPath // (all variants have same package path)
+ globalTargets = map[PackagePath]map[objectpath.Path]unit{
+ pkgPath: {path: {}}, // primary target
+ }
+
+ // How far need we search?
+ // For package-level objects, we need only search the direct importers.
+ // For fields and methods, we must search transitively.
+ transitive := obj.Pkg().Scope().Lookup(obj.Name()) != obj
+
+ // The scope is the union of rdeps of each variant.
+ // (Each set is disjoint so there's no benefit to
+ // to combining the metadata graph traversals.)
+ for _, m := range variants {
+ rdeps, err := snapshot.ReverseDependencies(ctx, m.ID, transitive)
+ if err != nil {
+ return nil, err
+ }
+ for id, rdep := range rdeps {
+ globalScope[id] = rdep
+ }
+ }
+
+ // Is object a method?
+ //
+ // If so, expand the search so that the targets include
+ // all methods that correspond to it through interface
+ // satisfaction, and the scope includes the rdeps of
+ // the package that declares each corresponding type.
+ if recv := effectiveReceiver(obj); recv != nil {
+ if err := expandMethodSearch(ctx, snapshot, obj.(*types.Func), recv, globalScope, globalTargets); err != nil {
+ return nil, err
+ }
+ }
+ }
+
+ // The search functions will call report(loc) for each hit.
+ var (
+ refsMu sync.Mutex
+ refs []reference
+ )
+ report := func(loc protocol.Location, isDecl bool) {
+ ref := reference{
+ isDeclaration: isDecl,
+ location: loc,
+ pkgPath: pkg.Metadata().PkgPath,
+ }
+ refsMu.Lock()
+ refs = append(refs, ref)
+ refsMu.Unlock()
+ }
+
+ // Loop over the variants of the declaring package,
+ // and perform both the local (in-package) and global
+ // (cross-package) searches, in parallel.
+ //
+ // TODO(adonovan): opt: support LSP reference streaming. See:
+ // - https://github.com/microsoft/vscode-languageserver-node/pull/164
+ // - https://github.com/microsoft/language-server-protocol/pull/182
+ //
+ // Careful: this goroutine must not return before group.Wait.
+ var group errgroup.Group
+
+ // Compute local references for each variant.
+ for _, m := range variants {
+ // We want the ordinary importable package,
+ // plus any test-augmented variants, since
+ // declarations in _test.go files may change
+ // the reference of a selection, or even a
+ // field into a method or vice versa.
+ //
+ // But we don't need intermediate test variants,
+ // as their local references will be covered
+ // already by other variants.
+ if m.IsIntermediateTestVariant() {
+ continue
+ }
+ m := m
+ group.Go(func() error {
+ return localReferences(ctx, snapshot, declURI, declPosn.Offset, m, report)
+ })
+ }
+
+ // Compute global references for selected reverse dependencies.
+ group.Go(func() error {
+ var globalIDs []PackageID
+ for id := range globalScope {
+ globalIDs = append(globalIDs, id)
+ }
+ indexes, err := snapshot.References(ctx, globalIDs...)
+ if err != nil {
+ return err
+ }
+ for _, index := range indexes {
+ for _, loc := range index.Lookup(globalTargets) {
+ report(loc, false)
+ }
+ }
+ return nil
+ })
+
+ if err := group.Wait(); err != nil {
+ return nil, err
+ }
+ return refs, nil
+}
+
+// expandMethodSearch expands the scope and targets of a global search
+// for an exported method to include all methods that correspond to
+// it through interface satisfaction.
+//
+// recv is the method's effective receiver type, for method-set computations.
+func expandMethodSearch(ctx context.Context, snapshot Snapshot, method *types.Func, recv types.Type, scope map[PackageID]*Metadata, targets map[PackagePath]map[objectpath.Path]unit) error {
+ // Compute the method-set fingerprint used as a key to the global search.
+ key, hasMethods := methodsets.KeyOf(recv)
+ if !hasMethods {
+ return bug.Errorf("KeyOf(%s)={} yet %s is a method", recv, method)
+ }
+ metas, err := snapshot.AllMetadata(ctx)
+ if err != nil {
+ return err
+ }
+ allIDs := make([]PackageID, 0, len(metas))
+ for _, m := range metas {
+ allIDs = append(allIDs, m.ID)
+ }
+ // Search the methodset index of each package in the workspace.
+ indexes, err := snapshot.MethodSets(ctx, allIDs...)
+ if err != nil {
+ return err
+ }
+ var mu sync.Mutex // guards scope and targets
+ var group errgroup.Group
+ for i, index := range indexes {
+ i := i
+ index := index
+ group.Go(func() error {
+ // Consult index for matching methods.
+ results := index.Search(key, method.Name())
+ if len(results) == 0 {
+ return nil
+ }
+
+ // Expand global search scope to include rdeps of this pkg.
+ rdeps, err := snapshot.ReverseDependencies(ctx, allIDs[i], true)
+ if err != nil {
+ return err
+ }
+ mu.Lock()
+ defer mu.Unlock()
+ for _, rdep := range rdeps {
+ scope[rdep.ID] = rdep
+ }
+
+ // Add each corresponding method the to set of global search targets.
+ for _, res := range results {
+ methodPkg := PackagePath(res.PkgPath)
+ opaths, ok := targets[methodPkg]
+ if !ok {
+ opaths = make(map[objectpath.Path]unit)
+ targets[methodPkg] = opaths
+ }
+ opaths[res.ObjectPath] = unit{}
+ }
+ return nil
+ })
+ }
+ return group.Wait()
+}
+
+// localReferences reports each reference to the object
+// declared at the specified URI/offset within its enclosing package m.
+func localReferences(ctx context.Context, snapshot Snapshot, declURI span.URI, declOffset int, m *Metadata, report func(loc protocol.Location, isDecl bool)) error {
+ pkgs, err := snapshot.TypeCheck(ctx, m.ID)
+ if err != nil {
+ return err
+ }
+ pkg := pkgs[0] // narrowest
+
+ // Find declaration of corresponding object
+ // in this package based on (URI, offset).
+ pgf, err := pkg.File(declURI)
+ if err != nil {
+ return err
+ }
+ pos, err := safetoken.Pos(pgf.Tok, declOffset)
+ if err != nil {
+ return err
+ }
+ targets, _, err := objectsAt(pkg.GetTypesInfo(), pgf.File, pos)
+ if err != nil {
+ return err // unreachable? (probably caught earlier)
+ }
+
+ // Report the locations of the declaration(s).
+ // TODO(adonovan): what about for corresponding methods? Add tests.
+ for _, node := range targets {
+ report(mustLocation(pgf, node), true)
+ }
+
+ // If we're searching for references to a method, broaden the
+ // search to include references to corresponding methods of
+ // mutually assignable receiver types.
+ // (We use a slice, but objectsAt never returns >1 methods.)
+ var methodRecvs []types.Type
+ var methodName string // name of an arbitrary target, iff a method
+ for obj := range targets {
+ if t := effectiveReceiver(obj); t != nil {
+ methodRecvs = append(methodRecvs, t)
+ methodName = obj.Name()
+ }
+ }
+
+ // matches reports whether obj either is or corresponds to a target.
+ // (Correspondence is defined as usual for interface methods.)
+ matches := func(obj types.Object) bool {
+ if targets[obj] != nil {
+ return true
+ } else if methodRecvs != nil && obj.Name() == methodName {
+ if orecv := effectiveReceiver(obj); orecv != nil {
+ for _, mrecv := range methodRecvs {
+ if concreteImplementsIntf(orecv, mrecv) {
+ return true
+ }
+ }
+ }
+ }
+ return false
+ }
+
+ // Scan through syntax looking for uses of one of the target objects.
+ for _, pgf := range pkg.CompiledGoFiles() {
+ ast.Inspect(pgf.File, func(n ast.Node) bool {
+ if id, ok := n.(*ast.Ident); ok {
+ if obj, ok := pkg.GetTypesInfo().Uses[id]; ok && matches(obj) {
+ report(mustLocation(pgf, id), false)
+ }
+ }
+ return true
+ })
+ }
+ return nil
+}
+
+// effectiveReceiver returns the effective receiver type for method-set
+// comparisons for obj, if it is a method, or nil otherwise.
+func effectiveReceiver(obj types.Object) types.Type {
+ if fn, ok := obj.(*types.Func); ok {
+ if recv := fn.Type().(*types.Signature).Recv(); recv != nil {
+ return methodsets.EnsurePointer(recv.Type())
+ }
+ }
+ return nil
+}
+
+// objectsAt returns the non-empty set of objects denoted (def or use)
+// by the specified position within a file syntax tree, or an error if
+// none were found.
+//
+// The result may contain more than one element because all case
+// variables of a type switch appear to be declared at the same
+// position.
+//
+// Each object is mapped to the syntax node that was treated as an
+// identifier, which is not always an ast.Ident. The second component
+// of the result is the innermost node enclosing pos.
+//
+// TODO(adonovan): factor in common with referencedObject.
+func objectsAt(info *types.Info, file *ast.File, pos token.Pos) (map[types.Object]ast.Node, ast.Node, error) {
+ path := pathEnclosingObjNode(file, pos)
+ if path == nil {
+ return nil, nil, ErrNoIdentFound
+ }
+
+ targets := make(map[types.Object]ast.Node)
+
+ switch leaf := path[0].(type) {
+ case *ast.Ident:
+ // If leaf represents an implicit type switch object or the type
+ // switch "assign" variable, expand to all of the type switch's
+ // implicit objects.
+ if implicits, _ := typeSwitchImplicits(info, path); len(implicits) > 0 {
+ for _, obj := range implicits {
+ targets[obj] = leaf
+ }
+ } else {
+ obj := info.ObjectOf(leaf)
+ if obj == nil {
+ return nil, nil, fmt.Errorf("%w for %q", errNoObjectFound, leaf.Name)
+ }
+ targets[obj] = leaf
+ }
+ case *ast.ImportSpec:
+ // Look up the implicit *types.PkgName.
+ obj := info.Implicits[leaf]
+ if obj == nil {
+ return nil, nil, fmt.Errorf("%w for import %s", errNoObjectFound, UnquoteImportPath(leaf))
+ }
+ targets[obj] = leaf
+ }
+
+ if len(targets) == 0 {
+ return nil, nil, fmt.Errorf("objectAt: internal error: no targets") // can't happen
+ }
+ return targets, path[0], nil
+}
+
+// mustLocation reports the location interval a syntax node,
+// which must belong to m.File.
+//
+// Safe for use only by references2 and implementations2.
+func mustLocation(pgf *ParsedGoFile, n ast.Node) protocol.Location {
+ loc, err := pgf.NodeLocation(n)
+ if err != nil {
+ panic(err) // can't happen in references2 or implementations2
+ }
+ return loc
+}
diff --git a/gopls/internal/lsp/source/rename.go b/gopls/internal/lsp/source/rename.go
new file mode 100644
index 000000000..c67f15ce5
--- /dev/null
+++ b/gopls/internal/lsp/source/rename.go
@@ -0,0 +1,1244 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package source
+
+// TODO(adonovan):
+//
+// - method of generic concrete type -> arbitrary instances of same
+//
+// - make satisfy work across packages.
+//
+// - tests, tests, tests:
+// - play with renamings in the k8s tree.
+// - generics
+// - error cases (e.g. conflicts)
+// - renaming a symbol declared in the module cache
+// (currently proceeds with half of the renaming!)
+// - make sure all tests have both a local and a cross-package analogue.
+// - look at coverage
+// - special cases: embedded fields, interfaces, test variants,
+// function-local things with uppercase names;
+// packages with type errors (currently 'satisfy' rejects them),
+// pakage with missing imports;
+//
+// - measure performance in k8s.
+//
+// - The original gorename tool assumed well-typedness, but the gopls feature
+// does no such check (which actually makes it much more useful).
+// Audit to ensure it is safe on ill-typed code.
+//
+// - Generics support was no doubt buggy before but incrementalization
+// may have exacerbated it. If the problem were just about objects,
+// defs and uses it would be fairly simple, but type assignability
+// comes into play in the 'satisfy' check for method renamings.
+// De-instantiating Vector[int] to Vector[T] changes its type.
+// We need to come up with a theory for the satisfy check that
+// works with generics, and across packages. We currently have no
+// simple way to pass types between packages (think: objectpath for
+// types), though presumably exportdata could be pressed into service.
+//
+// - FileID-based de-duplication of edits to different URIs for the same file.
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "go/ast"
+ "go/token"
+ "go/types"
+ "path"
+ "path/filepath"
+ "regexp"
+ "sort"
+ "strconv"
+ "strings"
+
+ "golang.org/x/mod/modfile"
+ "golang.org/x/tools/go/ast/astutil"
+ "golang.org/x/tools/go/types/objectpath"
+ "golang.org/x/tools/go/types/typeutil"
+ "golang.org/x/tools/gopls/internal/lsp/protocol"
+ "golang.org/x/tools/gopls/internal/lsp/safetoken"
+ "golang.org/x/tools/gopls/internal/span"
+ "golang.org/x/tools/internal/bug"
+ "golang.org/x/tools/internal/diff"
+ "golang.org/x/tools/internal/event"
+ "golang.org/x/tools/internal/typeparams"
+ "golang.org/x/tools/refactor/satisfy"
+)
+
+// A renamer holds state of a single call to renameObj, which renames
+// an object (or several coupled objects) within a single type-checked
+// syntax package.
+type renamer struct {
+ pkg Package // the syntax package in which the renaming is applied
+ objsToUpdate map[types.Object]bool // records progress of calls to check
+ hadConflicts bool
+ conflicts []string
+ from, to string
+ satisfyConstraints map[satisfy.Constraint]bool
+ msets typeutil.MethodSetCache
+ changeMethods bool
+}
+
+// A PrepareItem holds the result of a "prepare rename" operation:
+// the source range and value of a selected identifier.
+type PrepareItem struct {
+ Range protocol.Range
+ Text string
+}
+
+// PrepareRename searches for a valid renaming at position pp.
+//
+// The returned usererr is intended to be displayed to the user to explain why
+// the prepare fails. Probably we could eliminate the redundancy in returning
+// two errors, but for now this is done defensively.
+func PrepareRename(ctx context.Context, snapshot Snapshot, f FileHandle, pp protocol.Position) (_ *PrepareItem, usererr, err error) {
+ ctx, done := event.Start(ctx, "source.PrepareRename")
+ defer done()
+
+ // Is the cursor within the package name declaration?
+ if pgf, inPackageName, err := parsePackageNameDecl(ctx, snapshot, f, pp); err != nil {
+ return nil, err, err
+ } else if inPackageName {
+ item, err := prepareRenamePackageName(ctx, snapshot, pgf)
+ return item, err, err
+ }
+
+ // Ordinary (non-package) renaming.
+ //
+ // Type-check the current package, locate the reference at the position,
+ // validate the object, and report its name and range.
+ //
+ // TODO(adonovan): in all cases below, we return usererr=nil,
+ // which means we return (nil, nil) at the protocol
+ // layer. This seems like a bug, or at best an exploitation of
+ // knowledge of VSCode-specific behavior. Can we avoid that?
+ pkg, pgf, err := PackageForFile(ctx, snapshot, f.URI(), NarrowestPackage)
+ if err != nil {
+ return nil, nil, err
+ }
+ pos, err := pgf.PositionPos(pp)
+ if err != nil {
+ return nil, nil, err
+ }
+ targets, node, err := objectsAt(pkg.GetTypesInfo(), pgf.File, pos)
+ if err != nil {
+ return nil, nil, err
+ }
+ var obj types.Object
+ for obj = range targets {
+ break // pick one arbitrarily
+ }
+ if err := checkRenamable(obj); err != nil {
+ return nil, nil, err
+ }
+ rng, err := pgf.NodeRange(node)
+ if err != nil {
+ return nil, nil, err
+ }
+ if _, isImport := node.(*ast.ImportSpec); isImport {
+ // We're not really renaming the import path.
+ rng.End = rng.Start
+ }
+ return &PrepareItem{
+ Range: rng,
+ Text: obj.Name(),
+ }, nil, nil
+}
+
+func prepareRenamePackageName(ctx context.Context, snapshot Snapshot, pgf *ParsedGoFile) (*PrepareItem, error) {
+ // Does the client support file renaming?
+ fileRenameSupported := false
+ for _, op := range snapshot.View().Options().SupportedResourceOperations {
+ if op == protocol.Rename {
+ fileRenameSupported = true
+ break
+ }
+ }
+ if !fileRenameSupported {
+ return nil, errors.New("can't rename package: LSP client does not support file renaming")
+ }
+
+ // Check validity of the metadata for the file's containing package.
+ fileMeta, err := snapshot.MetadataForFile(ctx, pgf.URI)
+ if err != nil {
+ return nil, err
+ }
+ if len(fileMeta) == 0 {
+ return nil, fmt.Errorf("no packages found for file %q", pgf.URI)
+ }
+ meta := fileMeta[0]
+ if meta.Name == "main" {
+ return nil, fmt.Errorf("can't rename package \"main\"")
+ }
+ if strings.HasSuffix(string(meta.Name), "_test") {
+ return nil, fmt.Errorf("can't rename x_test packages")
+ }
+ if meta.Module == nil {
+ return nil, fmt.Errorf("can't rename package: missing module information for package %q", meta.PkgPath)
+ }
+ if meta.Module.Path == string(meta.PkgPath) {
+ return nil, fmt.Errorf("can't rename package: package path %q is the same as module path %q", meta.PkgPath, meta.Module.Path)
+ }
+
+ // Return the location of the package declaration.
+ rng, err := pgf.NodeRange(pgf.File.Name)
+ if err != nil {
+ return nil, err
+ }
+ return &PrepareItem{
+ Range: rng,
+ Text: string(meta.Name),
+ }, nil
+}
+
+func checkRenamable(obj types.Object) error {
+ switch obj := obj.(type) {
+ case *types.Var:
+ if obj.Embedded() {
+ return fmt.Errorf("can't rename embedded fields: rename the type directly or name the field")
+ }
+ case *types.Builtin, *types.Nil:
+ return fmt.Errorf("%s is built in and cannot be renamed", obj.Name())
+ }
+ if obj.Pkg() == nil || obj.Pkg().Path() == "unsafe" {
+ // e.g. error.Error, unsafe.Pointer
+ return fmt.Errorf("%s is built in and cannot be renamed", obj.Name())
+ }
+ if obj.Name() == "_" {
+ return errors.New("can't rename \"_\"")
+ }
+ return nil
+}
+
+// Rename returns a map of TextEdits for each file modified when renaming a
+// given identifier within a package and a boolean value of true for renaming
+// package and false otherwise.
+func Rename(ctx context.Context, snapshot Snapshot, f FileHandle, pp protocol.Position, newName string) (map[span.URI][]protocol.TextEdit, bool, error) {
+ ctx, done := event.Start(ctx, "source.Rename")
+ defer done()
+
+ if !isValidIdentifier(newName) {
+ return nil, false, fmt.Errorf("invalid identifier to rename: %q", newName)
+ }
+
+ // Cursor within package name declaration?
+ _, inPackageName, err := parsePackageNameDecl(ctx, snapshot, f, pp)
+ if err != nil {
+ return nil, false, err
+ }
+
+ var editMap map[span.URI][]diff.Edit
+ if inPackageName {
+ editMap, err = renamePackageName(ctx, snapshot, f, PackageName(newName))
+ } else {
+ editMap, err = renameOrdinary(ctx, snapshot, f, pp, newName)
+ }
+ if err != nil {
+ return nil, false, err
+ }
+
+ // Convert edits to protocol form.
+ result := make(map[span.URI][]protocol.TextEdit)
+ for uri, edits := range editMap {
+ // Sort and de-duplicate edits.
+ //
+ // Overlapping edits may arise in local renamings (due
+ // to type switch implicits) and globals ones (due to
+ // processing multiple package variants).
+ //
+ // We assume renaming produces diffs that are all
+ // replacements (no adjacent insertions that might
+ // become reordered) and that are either identical or
+ // non-overlapping.
+ diff.SortEdits(edits)
+ filtered := edits[:0]
+ for i, edit := range edits {
+ if i == 0 || edit != filtered[len(filtered)-1] {
+ filtered = append(filtered, edit)
+ }
+ }
+ edits = filtered
+
+ // TODO(adonovan): the logic above handles repeat edits to the
+ // same file URI (e.g. as a member of package p and p_test) but
+ // is not sufficient to handle file-system level aliasing arising
+ // from symbolic or hard links. For that, we should use a
+ // robustio-FileID-keyed map.
+ // See https://go.dev/cl/457615 for example.
+ // This really occurs in practice, e.g. kubernetes has
+ // vendor/k8s.io/kubectl -> ../../staging/src/k8s.io/kubectl.
+ fh, err := snapshot.GetFile(ctx, uri)
+ if err != nil {
+ return nil, false, err
+ }
+ data, err := fh.Read()
+ if err != nil {
+ return nil, false, err
+ }
+ m := protocol.NewMapper(uri, data)
+ protocolEdits, err := ToProtocolEdits(m, edits)
+ if err != nil {
+ return nil, false, err
+ }
+ result[uri] = protocolEdits
+ }
+
+ return result, inPackageName, nil
+}
+
+// renameOrdinary renames an ordinary (non-package) name throughout the workspace.
+func renameOrdinary(ctx context.Context, snapshot Snapshot, f FileHandle, pp protocol.Position, newName string) (map[span.URI][]diff.Edit, error) {
+ // Type-check the referring package and locate the object(s).
+ // We choose the widest variant as, for non-exported
+ // identifiers, it is the only package we need.
+ pkg, pgf, err := PackageForFile(ctx, snapshot, f.URI(), WidestPackage)
+ if err != nil {
+ return nil, err
+ }
+ pos, err := pgf.PositionPos(pp)
+ if err != nil {
+ return nil, err
+ }
+ targets, _, err := objectsAt(pkg.GetTypesInfo(), pgf.File, pos)
+ if err != nil {
+ return nil, err
+ }
+
+ // Pick a representative object arbitrarily.
+ // (All share the same name, pos, and kind.)
+ var obj types.Object
+ for obj = range targets {
+ break
+ }
+ if obj.Name() == newName {
+ return nil, fmt.Errorf("old and new names are the same: %s", newName)
+ }
+ if err := checkRenamable(obj); err != nil {
+ return nil, err
+ }
+
+ // Find objectpath, if object is exported ("" otherwise).
+ var declObjPath objectpath.Path
+ if obj.Exported() {
+ // objectpath.For requires the origin of a generic
+ // function or type, not an instantiation (a bug?).
+ // Unfortunately we can't call {Func,TypeName}.Origin
+ // as these are not available in go/types@go1.18.
+ // So we take a scenic route.
+ switch obj.(type) { // avoid "obj :=" since cases reassign the var
+ case *types.TypeName:
+ if named, ok := obj.Type().(*types.Named); ok {
+ obj = named.Obj()
+ }
+ case *types.Func:
+ obj = funcOrigin(obj.(*types.Func))
+ case *types.Var:
+ // TODO(adonovan): do vars need the origin treatment too? (issue #58462)
+ }
+ if path, err := objectpath.For(obj); err == nil {
+ declObjPath = path
+ }
+ }
+
+ // Nonexported? Search locally.
+ if declObjPath == "" {
+ var objects []types.Object
+ for obj := range targets {
+ objects = append(objects, obj)
+ }
+ editMap, _, err := renameObjects(ctx, snapshot, newName, pkg, objects...)
+ return editMap, err
+ }
+
+ // Exported: search globally.
+ //
+ // For exported package-level var/const/func/type objects, the
+ // search scope is just the direct importers.
+ //
+ // For exported fields and methods, the scope is the
+ // transitive rdeps. (The exportedness of the field's struct
+ // or method's receiver is irrelevant.)
+ transitive := false
+ switch obj.(type) {
+ case *types.TypeName:
+ // Renaming an exported package-level type
+ // requires us to inspect all transitive rdeps
+ // in the event that the type is embedded.
+ //
+ // TODO(adonovan): opt: this is conservative
+ // but inefficient. Instead, expand the scope
+ // of the search only if we actually encounter
+ // an embedding of the type, and only then to
+ // the rdeps of the embedding package.
+ if obj.Parent() == obj.Pkg().Scope() {
+ transitive = true
+ }
+
+ case *types.Var:
+ if obj.(*types.Var).IsField() {
+ transitive = true // field
+ }
+
+ // TODO(adonovan): opt: process only packages that
+ // contain a reference (xrefs) to the target field.
+
+ case *types.Func:
+ if obj.Type().(*types.Signature).Recv() != nil {
+ transitive = true // method
+ }
+
+ // It's tempting to optimize by skipping
+ // packages that don't contain a reference to
+ // the method in the xrefs index, but we still
+ // need to apply the satisfy check to those
+ // packages to find assignment statements that
+ // might expands the scope of the renaming.
+ }
+
+ // Type-check all the packages to inspect.
+ declURI := span.URIFromPath(pkg.FileSet().File(obj.Pos()).Name())
+ pkgs, err := typeCheckReverseDependencies(ctx, snapshot, declURI, transitive)
+ if err != nil {
+ return nil, err
+ }
+
+ // Apply the renaming to the (initial) object.
+ declPkgPath := PackagePath(obj.Pkg().Path())
+ return renameExported(ctx, snapshot, pkgs, declPkgPath, declObjPath, newName)
+}
+
+// funcOrigin is a go1.18-portable implementation of (*types.Func).Origin.
+func funcOrigin(fn *types.Func) *types.Func {
+ // Method?
+ if fn.Type().(*types.Signature).Recv() != nil {
+ return typeparams.OriginMethod(fn)
+ }
+
+ // Package-level function?
+ // (Assume the origin has the same position.)
+ gen := fn.Pkg().Scope().Lookup(fn.Name())
+ if gen != nil && gen.Pos() == fn.Pos() {
+ return gen.(*types.Func)
+ }
+
+ return fn
+}
+
+// typeCheckReverseDependencies returns the type-checked packages for
+// the reverse dependencies of all packages variants containing
+// file declURI. The packages are in some topological order.
+//
+// It includes all variants (even intermediate test variants) for the
+// purposes of computing reverse dependencies, but discards ITVs for
+// the actual renaming work.
+//
+// (This neglects obscure edge cases where a _test.go file changes the
+// selectors used only in an ITV, but life is short. Also sin must be
+// punished.)
+func typeCheckReverseDependencies(ctx context.Context, snapshot Snapshot, declURI span.URI, transitive bool) ([]Package, error) {
+ variants, err := snapshot.MetadataForFile(ctx, declURI)
+ if err != nil {
+ return nil, err
+ }
+ allRdeps := make(map[PackageID]*Metadata)
+ for _, variant := range variants {
+ rdeps, err := snapshot.ReverseDependencies(ctx, variant.ID, transitive)
+ if err != nil {
+ return nil, err
+ }
+ allRdeps[variant.ID] = variant // include self
+ for id, meta := range rdeps {
+ allRdeps[id] = meta
+ }
+ }
+ var ids []PackageID
+ for id, meta := range allRdeps {
+ if meta.IsIntermediateTestVariant() {
+ continue
+ }
+ ids = append(ids, id)
+ }
+
+ // Sort the packages into some topological order of the
+ // (unfiltered) metadata graph.
+ SortPostOrder(snapshot, ids)
+
+ // Dependencies must be visited first since they can expand
+ // the search set. Ideally we would process the (filtered) set
+ // of packages in the parallel postorder of the snapshot's
+ // (unfiltered) metadata graph, but this is quite tricky
+ // without a good graph abstraction.
+ //
+ // For now, we visit packages sequentially in order of
+ // ascending height, like an inverted breadth-first search.
+ //
+ // Type checking is by far the dominant cost, so
+ // overlapping it with renaming may not be worthwhile.
+ return snapshot.TypeCheck(ctx, ids...)
+}
+
+// SortPostOrder sorts the IDs so that if x depends on y, then y appears before x.
+func SortPostOrder(meta MetadataSource, ids []PackageID) {
+ postorder := make(map[PackageID]int)
+ order := 0
+ var visit func(PackageID)
+ visit = func(id PackageID) {
+ if _, ok := postorder[id]; !ok {
+ postorder[id] = -1 // break recursion
+ if m := meta.Metadata(id); m != nil {
+ for _, depID := range m.DepsByPkgPath {
+ visit(depID)
+ }
+ }
+ order++
+ postorder[id] = order
+ }
+ }
+ for _, id := range ids {
+ visit(id)
+ }
+ sort.Slice(ids, func(i, j int) bool {
+ return postorder[ids[i]] < postorder[ids[j]]
+ })
+}
+
+// renameExported renames the object denoted by (pkgPath, objPath)
+// within the specified packages, along with any other objects that
+// must be renamed as a consequence. The slice of packages must be
+// topologically ordered.
+func renameExported(ctx context.Context, snapshot Snapshot, pkgs []Package, declPkgPath PackagePath, declObjPath objectpath.Path, newName string) (map[span.URI][]diff.Edit, error) {
+
+ // A target is a name for an object that is stable across types.Packages.
+ type target struct {
+ pkg PackagePath
+ obj objectpath.Path
+ }
+
+ // Populate the initial set of target objects.
+ // This set may grow as we discover the consequences of each renaming.
+ //
+ // TODO(adonovan): strictly, each cone of reverse dependencies
+ // of a single variant should have its own target map that
+ // monotonically expands as we go up the import graph, because
+ // declarations in test files can alter the set of
+ // package-level names and change the meaning of field and
+ // method selectors. So if we parallelize the graph
+ // visitation (see above), we should also compute the targets
+ // as a union of dependencies.
+ //
+ // Or we could decide that the logic below is fast enough not
+ // to need parallelism. In small measurements so far the
+ // type-checking step is about 95% and the renaming only 5%.
+ targets := map[target]bool{{declPkgPath, declObjPath}: true}
+
+ // Apply the renaming operation to each package.
+ allEdits := make(map[span.URI][]diff.Edit)
+ for _, pkg := range pkgs {
+
+ // Resolved target objects within package pkg.
+ var objects []types.Object
+ for t := range targets {
+ p := pkg.DependencyTypes(t.pkg)
+ if p == nil {
+ continue // indirect dependency of no consequence
+ }
+ obj, err := objectpath.Object(p, t.obj)
+ if err != nil {
+ // Though this can happen with regular export data
+ // due to trimming of inconsequential objects,
+ // it can't happen if we load dependencies from full
+ // syntax (as today) or shallow export data (soon),
+ // as both are complete.
+ bug.Reportf("objectpath.Object(%v, %v) failed: %v", p, t.obj, err)
+ continue
+ }
+ objects = append(objects, obj)
+ }
+ if len(objects) == 0 {
+ continue // no targets of consequence to this package
+ }
+
+ // Apply the renaming.
+ editMap, moreObjects, err := renameObjects(ctx, snapshot, newName, pkg, objects...)
+ if err != nil {
+ return nil, err
+ }
+
+ // It is safe to concatenate the edits as they are non-overlapping
+ // (or identical, in which case they will be de-duped by Rename).
+ for uri, edits := range editMap {
+ allEdits[uri] = append(allEdits[uri], edits...)
+ }
+
+ // Expand the search set?
+ for obj := range moreObjects {
+ objpath, err := objectpath.For(obj)
+ if err != nil {
+ continue // not exported
+ }
+ target := target{PackagePath(obj.Pkg().Path()), objpath}
+ targets[target] = true
+
+ // TODO(adonovan): methods requires dynamic
+ // programming of the product targets x
+ // packages as any package might add a new
+ // target (from a foward dep) as a
+ // consequence, and any target might imply a
+ // new set of rdeps. See golang/go#58461.
+ }
+ }
+
+ return allEdits, nil
+}
+
+// renamePackageName renames package declarations, imports, and go.mod files.
+func renamePackageName(ctx context.Context, s Snapshot, f FileHandle, newName PackageName) (map[span.URI][]diff.Edit, error) {
+ // Rename the package decl and all imports.
+ renamingEdits, err := renamePackage(ctx, s, f, newName)
+ if err != nil {
+ return nil, err
+ }
+
+ // Update the last component of the file's enclosing directory.
+ oldBase := filepath.Dir(f.URI().Filename())
+ newPkgDir := filepath.Join(filepath.Dir(oldBase), string(newName))
+
+ // Update any affected replace directives in go.mod files.
+ // TODO(adonovan): extract into its own function.
+ //
+ // TODO: should this operate on all go.mod files, irrespective of whether they are included in the workspace?
+ // Get all active mod files in the workspace
+ modFiles := s.ModFiles()
+ for _, m := range modFiles {
+ fh, err := s.GetFile(ctx, m)
+ if err != nil {
+ return nil, err
+ }
+ pm, err := s.ParseMod(ctx, fh)
+ if err != nil {
+ return nil, err
+ }
+
+ modFileDir := filepath.Dir(pm.URI.Filename())
+ affectedReplaces := []*modfile.Replace{}
+
+ // Check if any replace directives need to be fixed
+ for _, r := range pm.File.Replace {
+ if !strings.HasPrefix(r.New.Path, "/") && !strings.HasPrefix(r.New.Path, "./") && !strings.HasPrefix(r.New.Path, "../") {
+ continue
+ }
+
+ replacedPath := r.New.Path
+ if strings.HasPrefix(r.New.Path, "./") || strings.HasPrefix(r.New.Path, "../") {
+ replacedPath = filepath.Join(modFileDir, r.New.Path)
+ }
+
+ // TODO: Is there a risk of converting a '\' delimited replacement to a '/' delimited replacement?
+ if !strings.HasPrefix(filepath.ToSlash(replacedPath)+"/", filepath.ToSlash(oldBase)+"/") {
+ continue // not affected by the package renaming
+ }
+
+ affectedReplaces = append(affectedReplaces, r)
+ }
+
+ if len(affectedReplaces) == 0 {
+ continue
+ }
+ copied, err := modfile.Parse("", pm.Mapper.Content, nil)
+ if err != nil {
+ return nil, err
+ }
+
+ for _, r := range affectedReplaces {
+ replacedPath := r.New.Path
+ if strings.HasPrefix(r.New.Path, "./") || strings.HasPrefix(r.New.Path, "../") {
+ replacedPath = filepath.Join(modFileDir, r.New.Path)
+ }
+
+ suffix := strings.TrimPrefix(replacedPath, string(oldBase))
+
+ newReplacedPath, err := filepath.Rel(modFileDir, newPkgDir+suffix)
+ if err != nil {
+ return nil, err
+ }
+
+ newReplacedPath = filepath.ToSlash(newReplacedPath)
+
+ if !strings.HasPrefix(newReplacedPath, "/") && !strings.HasPrefix(newReplacedPath, "../") {
+ newReplacedPath = "./" + newReplacedPath
+ }
+
+ if err := copied.AddReplace(r.Old.Path, "", newReplacedPath, ""); err != nil {
+ return nil, err
+ }
+ }
+
+ copied.Cleanup()
+ newContent, err := copied.Format()
+ if err != nil {
+ return nil, err
+ }
+
+ // Calculate the edits to be made due to the change.
+ edits := s.View().Options().ComputeEdits(string(pm.Mapper.Content), string(newContent))
+ renamingEdits[pm.URI] = append(renamingEdits[pm.URI], edits...)
+ }
+
+ return renamingEdits, nil
+}
+
+// renamePackage computes all workspace edits required to rename the package
+// described by the given metadata, to newName, by renaming its package
+// directory.
+//
+// It updates package clauses and import paths for the renamed package as well
+// as any other packages affected by the directory renaming among packages
+// described by allMetadata.
+func renamePackage(ctx context.Context, s Snapshot, f FileHandle, newName PackageName) (map[span.URI][]diff.Edit, error) {
+ if strings.HasSuffix(string(newName), "_test") {
+ return nil, fmt.Errorf("cannot rename to _test package")
+ }
+
+ // We need metadata for the relevant package and module paths.
+ // These should be the same for all packages containing the file.
+ metas, err := s.MetadataForFile(ctx, f.URI())
+ if err != nil {
+ return nil, err
+ }
+ if len(metas) == 0 {
+ return nil, fmt.Errorf("no packages found for file %q", f.URI())
+ }
+ meta := metas[0] // narrowest
+
+ oldPkgPath := meta.PkgPath
+ if meta.Module == nil {
+ return nil, fmt.Errorf("cannot rename package: missing module information for package %q", meta.PkgPath)
+ }
+ modulePath := PackagePath(meta.Module.Path)
+ if modulePath == oldPkgPath {
+ return nil, fmt.Errorf("cannot rename package: module path %q is the same as the package path, so renaming the package directory would have no effect", modulePath)
+ }
+
+ newPathPrefix := path.Join(path.Dir(string(oldPkgPath)), string(newName))
+
+ // We must inspect all packages, not just direct importers,
+ // because we also rename subpackages, which may be unrelated.
+ // (If the renamed package imports a subpackage it may require
+ // edits to both its package and import decls.)
+ allMetadata, err := s.AllMetadata(ctx)
+ if err != nil {
+ return nil, err
+ }
+
+ // Rename package and import declarations in all relevant packages.
+ edits := make(map[span.URI][]diff.Edit)
+ for _, m := range allMetadata {
+ // Special case: x_test packages for the renamed package will not have the
+ // package path as as a dir prefix, but still need their package clauses
+ // renamed.
+ if m.PkgPath == oldPkgPath+"_test" {
+ if err := renamePackageClause(ctx, m, s, newName+"_test", edits); err != nil {
+ return nil, err
+ }
+ continue
+ }
+
+ // Subtle: check this condition before checking for valid module info
+ // below, because we should not fail this operation if unrelated packages
+ // lack module info.
+ if !strings.HasPrefix(string(m.PkgPath)+"/", string(oldPkgPath)+"/") {
+ continue // not affected by the package renaming
+ }
+
+ if m.Module == nil {
+ // This check will always fail under Bazel.
+ return nil, fmt.Errorf("cannot rename package: missing module information for package %q", m.PkgPath)
+ }
+
+ if modulePath != PackagePath(m.Module.Path) {
+ continue // don't edit imports if nested package and renaming package have different module paths
+ }
+
+ // Renaming a package consists of changing its import path and package name.
+ suffix := strings.TrimPrefix(string(m.PkgPath), string(oldPkgPath))
+ newPath := newPathPrefix + suffix
+
+ pkgName := m.Name
+ if m.PkgPath == oldPkgPath {
+ pkgName = PackageName(newName)
+
+ if err := renamePackageClause(ctx, m, s, newName, edits); err != nil {
+ return nil, err
+ }
+ }
+
+ imp := ImportPath(newPath) // TODO(adonovan): what if newPath has vendor/ prefix?
+ if err := renameImports(ctx, s, m, imp, pkgName, edits); err != nil {
+ return nil, err
+ }
+ }
+
+ return edits, nil
+}
+
+// renamePackageClause computes edits renaming the package clause of files in
+// the package described by the given metadata, to newName.
+//
+// Edits are written into the edits map.
+func renamePackageClause(ctx context.Context, m *Metadata, snapshot Snapshot, newName PackageName, edits map[span.URI][]diff.Edit) error {
+ // Rename internal references to the package in the renaming package.
+ for _, uri := range m.CompiledGoFiles {
+ fh, err := snapshot.GetFile(ctx, uri)
+ if err != nil {
+ return err
+ }
+ f, err := snapshot.ParseGo(ctx, fh, ParseHeader)
+ if err != nil {
+ return err
+ }
+ if f.File.Name == nil {
+ continue // no package declaration
+ }
+
+ edit, err := posEdit(f.Tok, f.File.Name.Pos(), f.File.Name.End(), string(newName))
+ if err != nil {
+ return err
+ }
+ edits[f.URI] = append(edits[f.URI], edit)
+ }
+
+ return nil
+}
+
+// renameImports computes the set of edits to imports resulting from renaming
+// the package described by the given metadata, to a package with import path
+// newPath and name newName.
+//
+// Edits are written into the edits map.
+func renameImports(ctx context.Context, snapshot Snapshot, m *Metadata, newPath ImportPath, newName PackageName, allEdits map[span.URI][]diff.Edit) error {
+ rdeps, err := snapshot.ReverseDependencies(ctx, m.ID, false) // find direct importers
+ if err != nil {
+ return err
+ }
+
+ // Pass 1: rename import paths in import declarations.
+ needsTypeCheck := make(map[PackageID][]span.URI)
+ for _, rdep := range rdeps {
+ if rdep.IsIntermediateTestVariant() {
+ continue // for renaming, these variants are redundant
+ }
+
+ for _, uri := range rdep.CompiledGoFiles {
+ fh, err := snapshot.GetFile(ctx, uri)
+ if err != nil {
+ return err
+ }
+ f, err := snapshot.ParseGo(ctx, fh, ParseHeader)
+ if err != nil {
+ return err
+ }
+ if f.File.Name == nil {
+ continue // no package declaration
+ }
+ for _, imp := range f.File.Imports {
+ if rdep.DepsByImpPath[UnquoteImportPath(imp)] != m.ID {
+ continue // not the import we're looking for
+ }
+
+ // If the import does not explicitly specify
+ // a local name, then we need to invoke the
+ // type checker to locate references to update.
+ //
+ // TODO(adonovan): is this actually true?
+ // Renaming an import with a local name can still
+ // cause conflicts: shadowing of built-ins, or of
+ // package-level decls in the same or another file.
+ if imp.Name == nil {
+ needsTypeCheck[rdep.ID] = append(needsTypeCheck[rdep.ID], uri)
+ }
+
+ // Create text edit for the import path (string literal).
+ edit, err := posEdit(f.Tok, imp.Path.Pos(), imp.Path.End(), strconv.Quote(string(newPath)))
+ if err != nil {
+ return err
+ }
+ allEdits[uri] = append(allEdits[uri], edit)
+ }
+ }
+ }
+
+ // If the imported package's name hasn't changed,
+ // we don't need to rename references within each file.
+ if newName == m.Name {
+ return nil
+ }
+
+ // Pass 2: rename local name (types.PkgName) of imported
+ // package throughout one or more files of the package.
+ ids := make([]PackageID, 0, len(needsTypeCheck))
+ for id := range needsTypeCheck {
+ ids = append(ids, id)
+ }
+ pkgs, err := snapshot.TypeCheck(ctx, ids...)
+ if err != nil {
+ return err
+ }
+ for i, id := range ids {
+ pkg := pkgs[i]
+ for _, uri := range needsTypeCheck[id] {
+ f, err := pkg.File(uri)
+ if err != nil {
+ return err
+ }
+ for _, imp := range f.File.Imports {
+ if imp.Name != nil {
+ continue // has explicit local name
+ }
+ if rdeps[id].DepsByImpPath[UnquoteImportPath(imp)] != m.ID {
+ continue // not the import we're looking for
+ }
+
+ pkgname := pkg.GetTypesInfo().Implicits[imp].(*types.PkgName)
+
+ pkgScope := pkg.GetTypes().Scope()
+ fileScope := pkg.GetTypesInfo().Scopes[f.File]
+
+ localName := string(newName)
+ try := 0
+
+ // Keep trying with fresh names until one succeeds.
+ //
+ // TODO(adonovan): fix: this loop is not sufficient to choose a name
+ // that is guaranteed to be conflict-free; renameObj may still fail.
+ // So the retry loop should be around renameObj, and we shouldn't
+ // bother with scopes here.
+ for fileScope.Lookup(localName) != nil || pkgScope.Lookup(localName) != nil {
+ try++
+ localName = fmt.Sprintf("%s%d", newName, try)
+ }
+
+ // renameObj detects various conflicts, including:
+ // - new name conflicts with a package-level decl in this file;
+ // - new name hides a package-level decl in another file that
+ // is actually referenced in this file;
+ // - new name hides a built-in that is actually referenced
+ // in this file;
+ // - a reference in this file to the old package name would
+ // become shadowed by an intervening declaration that
+ // uses the new name.
+ // It returns the edits if no conflict was detected.
+ editMap, _, err := renameObjects(ctx, snapshot, localName, pkg, pkgname)
+ if err != nil {
+ return err
+ }
+
+ // If the chosen local package name matches the package's
+ // new name, delete the change that would have inserted
+ // an explicit local name, which is always the lexically
+ // first change.
+ if localName == string(newName) {
+ edits, ok := editMap[uri]
+ if !ok {
+ return fmt.Errorf("internal error: no changes for %s", uri)
+ }
+ diff.SortEdits(edits)
+ editMap[uri] = edits[1:]
+ }
+ for uri, edits := range editMap {
+ allEdits[uri] = append(allEdits[uri], edits...)
+ }
+ }
+ }
+ }
+ return nil
+}
+
+// renameObjects computes the edits to the type-checked syntax package pkg
+// required to rename a set of target objects to newName.
+//
+// It also returns the set of objects that were found (due to
+// corresponding methods and embedded fields) to require renaming as a
+// consequence of the requested renamings.
+//
+// It returns an error if the renaming would cause a conflict.
+func renameObjects(ctx context.Context, snapshot Snapshot, newName string, pkg Package, targets ...types.Object) (map[span.URI][]diff.Edit, map[types.Object]bool, error) {
+ r := renamer{
+ pkg: pkg,
+ objsToUpdate: make(map[types.Object]bool),
+ from: targets[0].Name(),
+ to: newName,
+ }
+
+ // A renaming initiated at an interface method indicates the
+ // intention to rename abstract and concrete methods as needed
+ // to preserve assignability.
+ // TODO(adonovan): pull this into the caller.
+ for _, obj := range targets {
+ if obj, ok := obj.(*types.Func); ok {
+ recv := obj.Type().(*types.Signature).Recv()
+ if recv != nil && types.IsInterface(recv.Type().Underlying()) {
+ r.changeMethods = true
+ break
+ }
+ }
+ }
+
+ // Check that the renaming of the identifier is ok.
+ for _, obj := range targets {
+ r.check(obj)
+ if len(r.conflicts) > 0 {
+ // Stop at first error.
+ return nil, nil, fmt.Errorf("%s", strings.Join(r.conflicts, "\n"))
+ }
+ }
+
+ editMap, err := r.update()
+ if err != nil {
+ return nil, nil, err
+ }
+
+ // Remove initial targets so that only 'consequences' remain.
+ for _, obj := range targets {
+ delete(r.objsToUpdate, obj)
+ }
+ return editMap, r.objsToUpdate, nil
+}
+
+// Rename all references to the target objects.
+func (r *renamer) update() (map[span.URI][]diff.Edit, error) {
+ result := make(map[span.URI][]diff.Edit)
+
+ // shouldUpdate reports whether obj is one of (or an
+ // instantiation of one of) the target objects.
+ shouldUpdate := func(obj types.Object) bool {
+ if r.objsToUpdate[obj] {
+ return true
+ }
+ if fn, ok := obj.(*types.Func); ok && r.objsToUpdate[funcOrigin(fn)] {
+ return true
+ }
+ return false
+ }
+
+ // Find all identifiers in the package that define or use a
+ // renamed object. We iterate over info as it is more efficent
+ // than calling ast.Inspect for each of r.pkg.CompiledGoFiles().
+ type item struct {
+ node ast.Node // Ident, ImportSpec (obj=PkgName), or CaseClause (obj=Var)
+ obj types.Object
+ isDef bool
+ }
+ var items []item
+ info := r.pkg.GetTypesInfo()
+ for id, obj := range info.Uses {
+ if shouldUpdate(obj) {
+ items = append(items, item{id, obj, false})
+ }
+ }
+ for id, obj := range info.Defs {
+ if shouldUpdate(obj) {
+ items = append(items, item{id, obj, true})
+ }
+ }
+ for node, obj := range info.Implicits {
+ if shouldUpdate(obj) {
+ switch node.(type) {
+ case *ast.ImportSpec, *ast.CaseClause:
+ items = append(items, item{node, obj, true})
+ }
+ }
+ }
+ sort.Slice(items, func(i, j int) bool {
+ return items[i].node.Pos() < items[j].node.Pos()
+ })
+
+ // Update each identifier.
+ for _, item := range items {
+ pgf, ok := enclosingFile(r.pkg, item.node.Pos())
+ if !ok {
+ bug.Reportf("edit does not belong to syntax of package %q", r.pkg)
+ continue
+ }
+
+ // Renaming a types.PkgName may result in the addition or removal of an identifier,
+ // so we deal with this separately.
+ if pkgName, ok := item.obj.(*types.PkgName); ok && item.isDef {
+ edit, err := r.updatePkgName(pgf, pkgName)
+ if err != nil {
+ return nil, err
+ }
+ result[pgf.URI] = append(result[pgf.URI], edit)
+ continue
+ }
+
+ // Workaround the unfortunate lack of a Var object
+ // for x in "switch x := expr.(type) {}" by adjusting
+ // the case clause to the switch ident.
+ // This may result in duplicate edits, but we de-dup later.
+ if _, ok := item.node.(*ast.CaseClause); ok {
+ path, _ := astutil.PathEnclosingInterval(pgf.File, item.obj.Pos(), item.obj.Pos())
+ item.node = path[0].(*ast.Ident)
+ }
+
+ // Replace the identifier with r.to.
+ edit, err := posEdit(pgf.Tok, item.node.Pos(), item.node.End(), r.to)
+ if err != nil {
+ return nil, err
+ }
+
+ result[pgf.URI] = append(result[pgf.URI], edit)
+
+ if !item.isDef { // uses do not have doc comments to update.
+ continue
+ }
+
+ doc := docComment(pgf, item.node.(*ast.Ident))
+ if doc == nil {
+ continue
+ }
+
+ // Perform the rename in doc comments declared in the original package.
+ // go/parser strips out \r\n returns from the comment text, so go
+ // line-by-line through the comment text to get the correct positions.
+ docRegexp := regexp.MustCompile(`\b` + r.from + `\b`) // valid identifier => valid regexp
+ for _, comment := range doc.List {
+ if isDirective(comment.Text) {
+ continue
+ }
+ // TODO(adonovan): why are we looping over lines?
+ // Just run the loop body once over the entire multiline comment.
+ lines := strings.Split(comment.Text, "\n")
+ tokFile := pgf.Tok
+ commentLine := tokFile.Line(comment.Pos())
+ uri := span.URIFromPath(tokFile.Name())
+ for i, line := range lines {
+ lineStart := comment.Pos()
+ if i > 0 {
+ lineStart = tokFile.LineStart(commentLine + i)
+ }
+ for _, locs := range docRegexp.FindAllIndex([]byte(line), -1) {
+ edit, err := posEdit(tokFile, lineStart+token.Pos(locs[0]), lineStart+token.Pos(locs[1]), r.to)
+ if err != nil {
+ return nil, err // can't happen
+ }
+ result[uri] = append(result[uri], edit)
+ }
+ }
+ }
+ }
+
+ return result, nil
+}
+
+// docComment returns the doc for an identifier within the specified file.
+func docComment(pgf *ParsedGoFile, id *ast.Ident) *ast.CommentGroup {
+ nodes, _ := astutil.PathEnclosingInterval(pgf.File, id.Pos(), id.End())
+ for _, node := range nodes {
+ switch decl := node.(type) {
+ case *ast.FuncDecl:
+ return decl.Doc
+ case *ast.Field:
+ return decl.Doc
+ case *ast.GenDecl:
+ return decl.Doc
+ // For {Type,Value}Spec, if the doc on the spec is absent,
+ // search for the enclosing GenDecl
+ case *ast.TypeSpec:
+ if decl.Doc != nil {
+ return decl.Doc
+ }
+ case *ast.ValueSpec:
+ if decl.Doc != nil {
+ return decl.Doc
+ }
+ case *ast.Ident:
+ case *ast.AssignStmt:
+ // *ast.AssignStmt doesn't have an associated comment group.
+ // So, we try to find a comment just before the identifier.
+
+ // Try to find a comment group only for short variable declarations (:=).
+ if decl.Tok != token.DEFINE {
+ return nil
+ }
+
+ identLine := pgf.Tok.Line(id.Pos())
+ for _, comment := range nodes[len(nodes)-1].(*ast.File).Comments {
+ if comment.Pos() > id.Pos() {
+ // Comment is after the identifier.
+ continue
+ }
+
+ lastCommentLine := pgf.Tok.Line(comment.End())
+ if lastCommentLine+1 == identLine {
+ return comment
+ }
+ }
+ default:
+ return nil
+ }
+ }
+ return nil
+}
+
+// updatePkgName returns the updates to rename a pkgName in the import spec by
+// only modifying the package name portion of the import declaration.
+func (r *renamer) updatePkgName(pgf *ParsedGoFile, pkgName *types.PkgName) (diff.Edit, error) {
+ // Modify ImportSpec syntax to add or remove the Name as needed.
+ path, _ := astutil.PathEnclosingInterval(pgf.File, pkgName.Pos(), pkgName.Pos())
+ if len(path) < 2 {
+ return diff.Edit{}, fmt.Errorf("no path enclosing interval for %s", pkgName.Name())
+ }
+ spec, ok := path[1].(*ast.ImportSpec)
+ if !ok {
+ return diff.Edit{}, fmt.Errorf("failed to update PkgName for %s", pkgName.Name())
+ }
+
+ newText := ""
+ if pkgName.Imported().Name() != r.to {
+ newText = r.to + " "
+ }
+
+ // Replace the portion (possibly empty) of the spec before the path:
+ // local "path" or "path"
+ // -> <- -><-
+ return posEdit(pgf.Tok, spec.Pos(), spec.Path.Pos(), newText)
+}
+
+// parsePackageNameDecl is a convenience function that parses and
+// returns the package name declaration of file fh, and reports
+// whether the position ppos lies within it.
+//
+// Note: also used by references2.
+func parsePackageNameDecl(ctx context.Context, snapshot Snapshot, fh FileHandle, ppos protocol.Position) (*ParsedGoFile, bool, error) {
+ pgf, err := snapshot.ParseGo(ctx, fh, ParseHeader)
+ if err != nil {
+ return nil, false, err
+ }
+ // Careful: because we used ParseHeader,
+ // pgf.Pos(ppos) may be beyond EOF => (0, err).
+ pos, _ := pgf.PositionPos(ppos)
+ return pgf, pgf.File.Name.Pos() <= pos && pos <= pgf.File.Name.End(), nil
+}
+
+// enclosingFile returns the CompiledGoFile of pkg that contains the specified position.
+func enclosingFile(pkg Package, pos token.Pos) (*ParsedGoFile, bool) {
+ for _, pgf := range pkg.CompiledGoFiles() {
+ if pgf.File.Pos() <= pos && pos <= pgf.File.End() {
+ return pgf, true
+ }
+ }
+ return nil, false
+}
+
+// posEdit returns an edit to replace the (start, end) range of tf with 'new'.
+func posEdit(tf *token.File, start, end token.Pos, new string) (diff.Edit, error) {
+ startOffset, endOffset, err := safetoken.Offsets(tf, start, end)
+ if err != nil {
+ return diff.Edit{}, err
+ }
+ return diff.Edit{Start: startOffset, End: endOffset, New: new}, nil
+}
diff --git a/gopls/internal/lsp/source/rename_check.go b/gopls/internal/lsp/source/rename_check.go
new file mode 100644
index 000000000..a858bb7fa
--- /dev/null
+++ b/gopls/internal/lsp/source/rename_check.go
@@ -0,0 +1,921 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+//
+// Taken from golang.org/x/tools/refactor/rename.
+
+package source
+
+// This file defines the conflict-checking portion of the rename operation.
+//
+// The renamer works on a single package of type-checked syntax, and
+// is called in parallel for all necessary packages in the workspace,
+// possibly up to the transitive reverse dependencies of the
+// declaration. Finally the union of all edits and errors is computed.
+//
+// Renaming one object may entail renaming of others. For example:
+//
+// - An embedded field couples a Var (field) and a TypeName.
+// So, renaming either one requires renaming the other.
+// If the initial object is an embedded field, we must add its
+// TypeName (and its enclosing package) to the renaming set;
+// this is easily discovered at the outset.
+//
+// Conversely, if the initial object is a TypeName, we must observe
+// whether any of its references (from directly importing packages)
+// is coincident with an embedded field Var and, if so, initiate a
+// renaming of it.
+//
+// - A method of an interface type is coupled to all corresponding
+// methods of types that are assigned to the interface (as
+// discovered by the 'satisfy' pass). As a matter of usability, we
+// require that such renamings be initiated from the interface
+// method, not the concrete method.
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "go/types"
+ "path/filepath"
+ "reflect"
+ "strings"
+ "unicode"
+
+ "golang.org/x/tools/go/ast/astutil"
+ "golang.org/x/tools/gopls/internal/lsp/safetoken"
+ "golang.org/x/tools/refactor/satisfy"
+)
+
+// errorf reports an error (e.g. conflict) and prevents file modification.
+func (r *renamer) errorf(pos token.Pos, format string, args ...interface{}) {
+ // Conflict error messages in the old gorename tool (whence this
+ // logic originated) contain rich information associated with
+ // multiple source lines, such as:
+ //
+ // p/a.go:1:2: renaming "x" to "y" here
+ // p/b.go:3:4: \t would cause this reference to "y"
+ // p/c.go:5:5: \t to become shadowed by this intervening declaration.
+ //
+ // Unfortunately LSP provides no means to transmit the
+ // structure of this error, so we format the positions briefly
+ // using dir/file.go where dir is the base name of the parent
+ // directory.
+
+ var conflict strings.Builder
+
+ // Add prefix of (truncated) position.
+ if pos != token.NoPos {
+ // TODO(adonovan): skip position of first error if it is
+ // on the same line as the renaming itself.
+ posn := safetoken.StartPosition(r.pkg.FileSet(), pos).String()
+ segments := strings.Split(filepath.ToSlash(posn), "/")
+ if n := len(segments); n > 2 {
+ segments = segments[n-2:]
+ }
+ posn = strings.Join(segments, "/")
+ fmt.Fprintf(&conflict, "%s:", posn)
+
+ if !strings.HasPrefix(format, "\t") {
+ conflict.WriteByte(' ')
+ }
+ }
+
+ fmt.Fprintf(&conflict, format, args...)
+ r.conflicts = append(r.conflicts, conflict.String())
+}
+
+// check performs safety checks of the renaming of the 'from' object to r.to.
+func (r *renamer) check(from types.Object) {
+ if r.objsToUpdate[from] {
+ return
+ }
+ r.objsToUpdate[from] = true
+
+ // NB: order of conditions is important.
+ if from_, ok := from.(*types.PkgName); ok {
+ r.checkInFileBlock(from_)
+ } else if from_, ok := from.(*types.Label); ok {
+ r.checkLabel(from_)
+ } else if isPackageLevel(from) {
+ r.checkInPackageBlock(from)
+ } else if v, ok := from.(*types.Var); ok && v.IsField() {
+ r.checkStructField(v)
+ } else if f, ok := from.(*types.Func); ok && recv(f) != nil {
+ r.checkMethod(f)
+ } else if isLocal(from) {
+ r.checkInLexicalScope(from)
+ } else {
+ r.errorf(from.Pos(), "unexpected %s object %q (please report a bug)\n",
+ objectKind(from), from)
+ }
+}
+
+// checkInFileBlock performs safety checks for renames of objects in the file block,
+// i.e. imported package names.
+func (r *renamer) checkInFileBlock(from *types.PkgName) {
+ // Check import name is not "init".
+ if r.to == "init" {
+ r.errorf(from.Pos(), "%q is not a valid imported package name", r.to)
+ }
+
+ // Check for conflicts between file and package block.
+ if prev := from.Pkg().Scope().Lookup(r.to); prev != nil {
+ r.errorf(from.Pos(), "renaming this %s %q to %q would conflict",
+ objectKind(from), from.Name(), r.to)
+ r.errorf(prev.Pos(), "\twith this package member %s",
+ objectKind(prev))
+ return // since checkInPackageBlock would report redundant errors
+ }
+
+ // Check for conflicts in lexical scope.
+ r.checkInLexicalScope(from)
+}
+
+// checkInPackageBlock performs safety checks for renames of
+// func/var/const/type objects in the package block.
+func (r *renamer) checkInPackageBlock(from types.Object) {
+ // Check that there are no references to the name from another
+ // package if the renaming would make it unexported.
+ if typ := r.pkg.GetTypes(); typ != from.Pkg() && ast.IsExported(r.from) && !ast.IsExported(r.to) {
+ if id := someUse(r.pkg.GetTypesInfo(), from); id != nil {
+ r.checkExport(id, typ, from)
+ }
+ }
+
+ // Check that in the package block, "init" is a function, and never referenced.
+ if r.to == "init" {
+ kind := objectKind(from)
+ if kind == "func" {
+ // Reject if intra-package references to it exist.
+ for id, obj := range r.pkg.GetTypesInfo().Uses {
+ if obj == from {
+ r.errorf(from.Pos(),
+ "renaming this func %q to %q would make it a package initializer",
+ from.Name(), r.to)
+ r.errorf(id.Pos(), "\tbut references to it exist")
+ break
+ }
+ }
+ } else {
+ r.errorf(from.Pos(), "you cannot have a %s at package level named %q",
+ kind, r.to)
+ }
+ }
+
+ // Check for conflicts between package block and all file blocks.
+ for _, f := range r.pkg.GetSyntax() {
+ fileScope := r.pkg.GetTypesInfo().Scopes[f]
+ b, prev := fileScope.LookupParent(r.to, token.NoPos)
+ if b == fileScope {
+ r.errorf(from.Pos(), "renaming this %s %q to %q would conflict", objectKind(from), from.Name(), r.to)
+ var prevPos token.Pos
+ if prev != nil {
+ prevPos = prev.Pos()
+ }
+ r.errorf(prevPos, "\twith this %s", objectKind(prev))
+ return // since checkInPackageBlock would report redundant errors
+ }
+ }
+
+ // Check for conflicts in lexical scope.
+ r.checkInLexicalScope(from)
+}
+
+// checkInLexicalScope performs safety checks that a renaming does not
+// change the lexical reference structure of the specified package.
+//
+// For objects in lexical scope, there are three kinds of conflicts:
+// same-, sub-, and super-block conflicts. We will illustrate all three
+// using this example:
+//
+// var x int
+// var z int
+//
+// func f(y int) {
+// print(x)
+// print(y)
+// }
+//
+// Renaming x to z encounters a "same-block conflict", because an object
+// with the new name already exists, defined in the same lexical block
+// as the old object.
+//
+// Renaming x to y encounters a "sub-block conflict", because there exists
+// a reference to x from within (what would become) a hole in its scope.
+// The definition of y in an (inner) sub-block would cast a shadow in
+// the scope of the renamed variable.
+//
+// Renaming y to x encounters a "super-block conflict". This is the
+// converse situation: there is an existing definition of the new name
+// (x) in an (enclosing) super-block, and the renaming would create a
+// hole in its scope, within which there exist references to it. The
+// new name shadows the existing definition of x in the super-block.
+//
+// Removing the old name (and all references to it) is always safe, and
+// requires no checks.
+func (r *renamer) checkInLexicalScope(from types.Object) {
+ b := from.Parent() // the block defining the 'from' object
+ if b != nil {
+ toBlock, to := b.LookupParent(r.to, from.Parent().End())
+ if toBlock == b {
+ // same-block conflict
+ r.errorf(from.Pos(), "renaming this %s %q to %q",
+ objectKind(from), from.Name(), r.to)
+ r.errorf(to.Pos(), "\tconflicts with %s in same block",
+ objectKind(to))
+ return
+ } else if toBlock != nil {
+ // Check for super-block conflict.
+ // The name r.to is defined in a superblock.
+ // Is that name referenced from within this block?
+ forEachLexicalRef(r.pkg, to, func(id *ast.Ident, block *types.Scope) bool {
+ _, obj := block.LookupParent(from.Name(), id.Pos())
+ if obj == from {
+ // super-block conflict
+ r.errorf(from.Pos(), "renaming this %s %q to %q",
+ objectKind(from), from.Name(), r.to)
+ r.errorf(id.Pos(), "\twould shadow this reference")
+ r.errorf(to.Pos(), "\tto the %s declared here",
+ objectKind(to))
+ return false // stop
+ }
+ return true
+ })
+ }
+ }
+ // Check for sub-block conflict.
+ // Is there an intervening definition of r.to between
+ // the block defining 'from' and some reference to it?
+ forEachLexicalRef(r.pkg, from, func(id *ast.Ident, block *types.Scope) bool {
+ // Find the block that defines the found reference.
+ // It may be an ancestor.
+ fromBlock, _ := block.LookupParent(from.Name(), id.Pos())
+ // See what r.to would resolve to in the same scope.
+ toBlock, to := block.LookupParent(r.to, id.Pos())
+ if to != nil {
+ // sub-block conflict
+ if deeper(toBlock, fromBlock) {
+ r.errorf(from.Pos(), "renaming this %s %q to %q",
+ objectKind(from), from.Name(), r.to)
+ r.errorf(id.Pos(), "\twould cause this reference to become shadowed")
+ r.errorf(to.Pos(), "\tby this intervening %s definition",
+ objectKind(to))
+ return false // stop
+ }
+ }
+ return true
+ })
+
+ // Renaming a type that is used as an embedded field
+ // requires renaming the field too. e.g.
+ // type T int // if we rename this to U..
+ // var s struct {T}
+ // print(s.T) // ...this must change too
+ if _, ok := from.(*types.TypeName); ok {
+ for id, obj := range r.pkg.GetTypesInfo().Uses {
+ if obj == from {
+ if field := r.pkg.GetTypesInfo().Defs[id]; field != nil {
+ r.check(field)
+ }
+ }
+ }
+ }
+}
+
+// deeper reports whether block x is lexically deeper than y.
+func deeper(x, y *types.Scope) bool {
+ if x == y || x == nil {
+ return false
+ } else if y == nil {
+ return true
+ } else {
+ return deeper(x.Parent(), y.Parent())
+ }
+}
+
+// forEachLexicalRef calls fn(id, block) for each identifier id in package
+// pkg that is a reference to obj in lexical scope. block is the
+// lexical block enclosing the reference. If fn returns false the
+// iteration is terminated and findLexicalRefs returns false.
+func forEachLexicalRef(pkg Package, obj types.Object, fn func(id *ast.Ident, block *types.Scope) bool) bool {
+ ok := true
+ var stack []ast.Node
+
+ var visit func(n ast.Node) bool
+ visit = func(n ast.Node) bool {
+ if n == nil {
+ stack = stack[:len(stack)-1] // pop
+ return false
+ }
+ if !ok {
+ return false // bail out
+ }
+
+ stack = append(stack, n) // push
+ switch n := n.(type) {
+ case *ast.Ident:
+ if pkg.GetTypesInfo().Uses[n] == obj {
+ block := enclosingBlock(pkg.GetTypesInfo(), stack)
+ if !fn(n, block) {
+ ok = false
+ }
+ }
+ return visit(nil) // pop stack
+
+ case *ast.SelectorExpr:
+ // don't visit n.Sel
+ ast.Inspect(n.X, visit)
+ return visit(nil) // pop stack, don't descend
+
+ case *ast.CompositeLit:
+ // Handle recursion ourselves for struct literals
+ // so we don't visit field identifiers.
+ tv, ok := pkg.GetTypesInfo().Types[n]
+ if !ok {
+ return visit(nil) // pop stack, don't descend
+ }
+ if _, ok := Deref(tv.Type).Underlying().(*types.Struct); ok {
+ if n.Type != nil {
+ ast.Inspect(n.Type, visit)
+ }
+ for _, elt := range n.Elts {
+ if kv, ok := elt.(*ast.KeyValueExpr); ok {
+ ast.Inspect(kv.Value, visit)
+ } else {
+ ast.Inspect(elt, visit)
+ }
+ }
+ return visit(nil) // pop stack, don't descend
+ }
+ }
+ return true
+ }
+
+ for _, f := range pkg.GetSyntax() {
+ ast.Inspect(f, visit)
+ if len(stack) != 0 {
+ panic(stack)
+ }
+ if !ok {
+ break
+ }
+ }
+ return ok
+}
+
+// enclosingBlock returns the innermost block enclosing the specified
+// AST node, specified in the form of a path from the root of the file,
+// [file...n].
+func enclosingBlock(info *types.Info, stack []ast.Node) *types.Scope {
+ for i := range stack {
+ n := stack[len(stack)-1-i]
+ // For some reason, go/types always associates a
+ // function's scope with its FuncType.
+ // TODO(adonovan): feature or a bug?
+ switch f := n.(type) {
+ case *ast.FuncDecl:
+ n = f.Type
+ case *ast.FuncLit:
+ n = f.Type
+ }
+ if b := info.Scopes[n]; b != nil {
+ return b
+ }
+ }
+ panic("no Scope for *ast.File")
+}
+
+func (r *renamer) checkLabel(label *types.Label) {
+ // Check there are no identical labels in the function's label block.
+ // (Label blocks don't nest, so this is easy.)
+ if prev := label.Parent().Lookup(r.to); prev != nil {
+ r.errorf(label.Pos(), "renaming this label %q to %q", label.Name(), prev.Name())
+ r.errorf(prev.Pos(), "\twould conflict with this one")
+ }
+}
+
+// checkStructField checks that the field renaming will not cause
+// conflicts at its declaration, or ambiguity or changes to any selection.
+func (r *renamer) checkStructField(from *types.Var) {
+ // Check that the struct declaration is free of field conflicts,
+ // and field/method conflicts.
+
+ // go/types offers no easy way to get from a field (or interface
+ // method) to its declaring struct (or interface), so we must
+ // ascend the AST.
+ pgf, ok := enclosingFile(r.pkg, from.Pos())
+ if !ok {
+ return // not declared by syntax of this package
+ }
+ path, _ := astutil.PathEnclosingInterval(pgf.File, from.Pos(), from.Pos())
+ // path matches this pattern:
+ // [Ident SelectorExpr? StarExpr? Field FieldList StructType ParenExpr* ... File]
+
+ // Ascend to FieldList.
+ var i int
+ for {
+ if _, ok := path[i].(*ast.FieldList); ok {
+ break
+ }
+ i++
+ }
+ i++
+ tStruct := path[i].(*ast.StructType)
+ i++
+ // Ascend past parens (unlikely).
+ for {
+ _, ok := path[i].(*ast.ParenExpr)
+ if !ok {
+ break
+ }
+ i++
+ }
+ if spec, ok := path[i].(*ast.TypeSpec); ok {
+ // This struct is also a named type.
+ // We must check for direct (non-promoted) field/field
+ // and method/field conflicts.
+ named := r.pkg.GetTypesInfo().Defs[spec.Name].Type()
+ prev, indices, _ := types.LookupFieldOrMethod(named, true, r.pkg.GetTypes(), r.to)
+ if len(indices) == 1 {
+ r.errorf(from.Pos(), "renaming this field %q to %q",
+ from.Name(), r.to)
+ r.errorf(prev.Pos(), "\twould conflict with this %s",
+ objectKind(prev))
+ return // skip checkSelections to avoid redundant errors
+ }
+ } else {
+ // This struct is not a named type.
+ // We need only check for direct (non-promoted) field/field conflicts.
+ T := r.pkg.GetTypesInfo().Types[tStruct].Type.Underlying().(*types.Struct)
+ for i := 0; i < T.NumFields(); i++ {
+ if prev := T.Field(i); prev.Name() == r.to {
+ r.errorf(from.Pos(), "renaming this field %q to %q",
+ from.Name(), r.to)
+ r.errorf(prev.Pos(), "\twould conflict with this field")
+ return // skip checkSelections to avoid redundant errors
+ }
+ }
+ }
+
+ // Renaming an anonymous field requires renaming the type too. e.g.
+ // print(s.T) // if we rename T to U,
+ // type T int // this and
+ // var s struct {T} // this must change too.
+ if from.Anonymous() {
+ if named, ok := from.Type().(*types.Named); ok {
+ r.check(named.Obj())
+ } else if named, ok := Deref(from.Type()).(*types.Named); ok {
+ r.check(named.Obj())
+ }
+ }
+
+ // Check integrity of existing (field and method) selections.
+ r.checkSelections(from)
+}
+
+// checkSelections checks that all uses and selections that resolve to
+// the specified object would continue to do so after the renaming.
+func (r *renamer) checkSelections(from types.Object) {
+ pkg := r.pkg
+ typ := pkg.GetTypes()
+ {
+ if id := someUse(pkg.GetTypesInfo(), from); id != nil {
+ if !r.checkExport(id, typ, from) {
+ return
+ }
+ }
+
+ for syntax, sel := range pkg.GetTypesInfo().Selections {
+ // There may be extant selections of only the old
+ // name or only the new name, so we must check both.
+ // (If neither, the renaming is sound.)
+ //
+ // In both cases, we wish to compare the lengths
+ // of the implicit field path (Selection.Index)
+ // to see if the renaming would change it.
+ //
+ // If a selection that resolves to 'from', when renamed,
+ // would yield a path of the same or shorter length,
+ // this indicates ambiguity or a changed referent,
+ // analogous to same- or sub-block lexical conflict.
+ //
+ // If a selection using the name 'to' would
+ // yield a path of the same or shorter length,
+ // this indicates ambiguity or shadowing,
+ // analogous to same- or super-block lexical conflict.
+
+ // TODO(adonovan): fix: derive from Types[syntax.X].Mode
+ // TODO(adonovan): test with pointer, value, addressable value.
+ isAddressable := true
+
+ if sel.Obj() == from {
+ if obj, indices, _ := types.LookupFieldOrMethod(sel.Recv(), isAddressable, from.Pkg(), r.to); obj != nil {
+ // Renaming this existing selection of
+ // 'from' may block access to an existing
+ // type member named 'to'.
+ delta := len(indices) - len(sel.Index())
+ if delta > 0 {
+ continue // no ambiguity
+ }
+ r.selectionConflict(from, delta, syntax, obj)
+ return
+ }
+ } else if sel.Obj().Name() == r.to {
+ if obj, indices, _ := types.LookupFieldOrMethod(sel.Recv(), isAddressable, from.Pkg(), from.Name()); obj == from {
+ // Renaming 'from' may cause this existing
+ // selection of the name 'to' to change
+ // its meaning.
+ delta := len(indices) - len(sel.Index())
+ if delta > 0 {
+ continue // no ambiguity
+ }
+ r.selectionConflict(from, -delta, syntax, sel.Obj())
+ return
+ }
+ }
+ }
+ }
+}
+
+func (r *renamer) selectionConflict(from types.Object, delta int, syntax *ast.SelectorExpr, obj types.Object) {
+ r.errorf(from.Pos(), "renaming this %s %q to %q",
+ objectKind(from), from.Name(), r.to)
+
+ switch {
+ case delta < 0:
+ // analogous to sub-block conflict
+ r.errorf(syntax.Sel.Pos(),
+ "\twould change the referent of this selection")
+ r.errorf(obj.Pos(), "\tof this %s", objectKind(obj))
+ case delta == 0:
+ // analogous to same-block conflict
+ r.errorf(syntax.Sel.Pos(),
+ "\twould make this reference ambiguous")
+ r.errorf(obj.Pos(), "\twith this %s", objectKind(obj))
+ case delta > 0:
+ // analogous to super-block conflict
+ r.errorf(syntax.Sel.Pos(),
+ "\twould shadow this selection")
+ r.errorf(obj.Pos(), "\tof the %s declared here",
+ objectKind(obj))
+ }
+}
+
+// checkMethod performs safety checks for renaming a method.
+// There are three hazards:
+// - declaration conflicts
+// - selection ambiguity/changes
+// - entailed renamings of assignable concrete/interface types.
+//
+// We reject renamings initiated at concrete methods if it would
+// change the assignability relation. For renamings of abstract
+// methods, we rename all methods transitively coupled to it via
+// assignability.
+func (r *renamer) checkMethod(from *types.Func) {
+ // e.g. error.Error
+ if from.Pkg() == nil {
+ r.errorf(from.Pos(), "you cannot rename built-in method %s", from)
+ return
+ }
+
+ // ASSIGNABILITY: We reject renamings of concrete methods that
+ // would break a 'satisfy' constraint; but renamings of abstract
+ // methods are allowed to proceed, and we rename affected
+ // concrete and abstract methods as necessary. It is the
+ // initial method that determines the policy.
+
+ // Check for conflict at point of declaration.
+ // Check to ensure preservation of assignability requirements.
+ R := recv(from).Type()
+ if types.IsInterface(R) {
+ // Abstract method
+
+ // declaration
+ prev, _, _ := types.LookupFieldOrMethod(R, false, from.Pkg(), r.to)
+ if prev != nil {
+ r.errorf(from.Pos(), "renaming this interface method %q to %q",
+ from.Name(), r.to)
+ r.errorf(prev.Pos(), "\twould conflict with this method")
+ return
+ }
+
+ // Check all interfaces that embed this one for
+ // declaration conflicts too.
+ {
+ // Start with named interface types (better errors)
+ for _, obj := range r.pkg.GetTypesInfo().Defs {
+ if obj, ok := obj.(*types.TypeName); ok && types.IsInterface(obj.Type()) {
+ f, _, _ := types.LookupFieldOrMethod(
+ obj.Type(), false, from.Pkg(), from.Name())
+ if f == nil {
+ continue
+ }
+ t, _, _ := types.LookupFieldOrMethod(
+ obj.Type(), false, from.Pkg(), r.to)
+ if t == nil {
+ continue
+ }
+ r.errorf(from.Pos(), "renaming this interface method %q to %q",
+ from.Name(), r.to)
+ r.errorf(t.Pos(), "\twould conflict with this method")
+ r.errorf(obj.Pos(), "\tin named interface type %q", obj.Name())
+ }
+ }
+
+ // Now look at all literal interface types (includes named ones again).
+ for e, tv := range r.pkg.GetTypesInfo().Types {
+ if e, ok := e.(*ast.InterfaceType); ok {
+ _ = e
+ _ = tv.Type.(*types.Interface)
+ // TODO(adonovan): implement same check as above.
+ }
+ }
+ }
+
+ // assignability
+ //
+ // Find the set of concrete or abstract methods directly
+ // coupled to abstract method 'from' by some
+ // satisfy.Constraint, and rename them too.
+ for key := range r.satisfy() {
+ // key = (lhs, rhs) where lhs is always an interface.
+
+ lsel := r.msets.MethodSet(key.LHS).Lookup(from.Pkg(), from.Name())
+ if lsel == nil {
+ continue
+ }
+ rmethods := r.msets.MethodSet(key.RHS)
+ rsel := rmethods.Lookup(from.Pkg(), from.Name())
+ if rsel == nil {
+ continue
+ }
+
+ // If both sides have a method of this name,
+ // and one of them is m, the other must be coupled.
+ var coupled *types.Func
+ switch from {
+ case lsel.Obj():
+ coupled = rsel.Obj().(*types.Func)
+ case rsel.Obj():
+ coupled = lsel.Obj().(*types.Func)
+ default:
+ continue
+ }
+
+ // We must treat concrete-to-interface
+ // constraints like an implicit selection C.f of
+ // each interface method I.f, and check that the
+ // renaming leaves the selection unchanged and
+ // unambiguous.
+ //
+ // Fun fact: the implicit selection of C.f
+ // type I interface{f()}
+ // type C struct{I}
+ // func (C) g()
+ // var _ I = C{} // here
+ // yields abstract method I.f. This can make error
+ // messages less than obvious.
+ //
+ if !types.IsInterface(key.RHS) {
+ // The logic below was derived from checkSelections.
+
+ rtosel := rmethods.Lookup(from.Pkg(), r.to)
+ if rtosel != nil {
+ rto := rtosel.Obj().(*types.Func)
+ delta := len(rsel.Index()) - len(rtosel.Index())
+ if delta < 0 {
+ continue // no ambiguity
+ }
+
+ // TODO(adonovan): record the constraint's position.
+ keyPos := token.NoPos
+
+ r.errorf(from.Pos(), "renaming this method %q to %q",
+ from.Name(), r.to)
+ if delta == 0 {
+ // analogous to same-block conflict
+ r.errorf(keyPos, "\twould make the %s method of %s invoked via interface %s ambiguous",
+ r.to, key.RHS, key.LHS)
+ r.errorf(rto.Pos(), "\twith (%s).%s",
+ recv(rto).Type(), r.to)
+ } else {
+ // analogous to super-block conflict
+ r.errorf(keyPos, "\twould change the %s method of %s invoked via interface %s",
+ r.to, key.RHS, key.LHS)
+ r.errorf(coupled.Pos(), "\tfrom (%s).%s",
+ recv(coupled).Type(), r.to)
+ r.errorf(rto.Pos(), "\tto (%s).%s",
+ recv(rto).Type(), r.to)
+ }
+ return // one error is enough
+ }
+ }
+
+ if !r.changeMethods {
+ // This should be unreachable.
+ r.errorf(from.Pos(), "internal error: during renaming of abstract method %s", from)
+ r.errorf(coupled.Pos(), "\tchangedMethods=false, coupled method=%s", coupled)
+ r.errorf(from.Pos(), "\tPlease file a bug report")
+ return
+ }
+
+ // Rename the coupled method to preserve assignability.
+ r.check(coupled)
+ }
+ } else {
+ // Concrete method
+
+ // declaration
+ prev, indices, _ := types.LookupFieldOrMethod(R, true, from.Pkg(), r.to)
+ if prev != nil && len(indices) == 1 {
+ r.errorf(from.Pos(), "renaming this method %q to %q",
+ from.Name(), r.to)
+ r.errorf(prev.Pos(), "\twould conflict with this %s",
+ objectKind(prev))
+ return
+ }
+
+ // assignability
+ //
+ // Find the set of abstract methods coupled to concrete
+ // method 'from' by some satisfy.Constraint, and rename
+ // them too.
+ //
+ // Coupling may be indirect, e.g. I.f <-> C.f via type D.
+ //
+ // type I interface {f()}
+ // type C int
+ // type (C) f()
+ // type D struct{C}
+ // var _ I = D{}
+ //
+ for key := range r.satisfy() {
+ // key = (lhs, rhs) where lhs is always an interface.
+ if types.IsInterface(key.RHS) {
+ continue
+ }
+ rsel := r.msets.MethodSet(key.RHS).Lookup(from.Pkg(), from.Name())
+ if rsel == nil || rsel.Obj() != from {
+ continue // rhs does not have the method
+ }
+ lsel := r.msets.MethodSet(key.LHS).Lookup(from.Pkg(), from.Name())
+ if lsel == nil {
+ continue
+ }
+ imeth := lsel.Obj().(*types.Func)
+
+ // imeth is the abstract method (e.g. I.f)
+ // and key.RHS is the concrete coupling type (e.g. D).
+ if !r.changeMethods {
+ r.errorf(from.Pos(), "renaming this method %q to %q",
+ from.Name(), r.to)
+ var pos token.Pos
+ var iface string
+
+ I := recv(imeth).Type()
+ if named, ok := I.(*types.Named); ok {
+ pos = named.Obj().Pos()
+ iface = "interface " + named.Obj().Name()
+ } else {
+ pos = from.Pos()
+ iface = I.String()
+ }
+ r.errorf(pos, "\twould make %s no longer assignable to %s",
+ key.RHS, iface)
+ r.errorf(imeth.Pos(), "\t(rename %s.%s if you intend to change both types)",
+ I, from.Name())
+ return // one error is enough
+ }
+
+ // Rename the coupled interface method to preserve assignability.
+ r.check(imeth)
+ }
+ }
+
+ // Check integrity of existing (field and method) selections.
+ // We skip this if there were errors above, to avoid redundant errors.
+ r.checkSelections(from)
+}
+
+func (r *renamer) checkExport(id *ast.Ident, pkg *types.Package, from types.Object) bool {
+ // Reject cross-package references if r.to is unexported.
+ // (Such references may be qualified identifiers or field/method
+ // selections.)
+ if !ast.IsExported(r.to) && pkg != from.Pkg() {
+ r.errorf(from.Pos(),
+ "renaming %q to %q would make it unexported",
+ from.Name(), r.to)
+ r.errorf(id.Pos(), "\tbreaking references from packages such as %q",
+ pkg.Path())
+ return false
+ }
+ return true
+}
+
+// satisfy returns the set of interface satisfaction constraints.
+func (r *renamer) satisfy() map[satisfy.Constraint]bool {
+ if r.satisfyConstraints == nil {
+ // Compute on demand: it's expensive.
+ var f satisfy.Finder
+ pkg := r.pkg
+ {
+ // From satisfy.Finder documentation:
+ //
+ // The package must be free of type errors, and
+ // info.{Defs,Uses,Selections,Types} must have been populated by the
+ // type-checker.
+ //
+ // Only proceed if all packages have no errors.
+ if pkg.HasParseErrors() || pkg.HasTypeErrors() {
+ r.errorf(token.NoPos, // we don't have a position for this error.
+ "renaming %q to %q not possible because %q has errors",
+ r.from, r.to, pkg.Metadata().PkgPath)
+ return nil
+ }
+ f.Find(pkg.GetTypesInfo(), pkg.GetSyntax())
+ }
+ r.satisfyConstraints = f.Result
+ }
+ return r.satisfyConstraints
+}
+
+// -- helpers ----------------------------------------------------------
+
+// recv returns the method's receiver.
+func recv(meth *types.Func) *types.Var {
+ return meth.Type().(*types.Signature).Recv()
+}
+
+// someUse returns an arbitrary use of obj within info.
+func someUse(info *types.Info, obj types.Object) *ast.Ident {
+ for id, o := range info.Uses {
+ if o == obj {
+ return id
+ }
+ }
+ return nil
+}
+
+func objectKind(obj types.Object) string {
+ if obj == nil {
+ return "nil object"
+ }
+ switch obj := obj.(type) {
+ case *types.PkgName:
+ return "imported package name"
+ case *types.TypeName:
+ return "type"
+ case *types.Var:
+ if obj.IsField() {
+ return "field"
+ }
+ case *types.Func:
+ if obj.Type().(*types.Signature).Recv() != nil {
+ return "method"
+ }
+ }
+ // label, func, var, const
+ return strings.ToLower(strings.TrimPrefix(reflect.TypeOf(obj).String(), "*types."))
+}
+
+// NB: for renamings, blank is not considered valid.
+func isValidIdentifier(id string) bool {
+ if id == "" || id == "_" {
+ return false
+ }
+ for i, r := range id {
+ if !isLetter(r) && (i == 0 || !isDigit(r)) {
+ return false
+ }
+ }
+ return token.Lookup(id) == token.IDENT
+}
+
+// isLocal reports whether obj is local to some function.
+// Precondition: not a struct field or interface method.
+func isLocal(obj types.Object) bool {
+ // [... 5=stmt 4=func 3=file 2=pkg 1=universe]
+ var depth int
+ for scope := obj.Parent(); scope != nil; scope = scope.Parent() {
+ depth++
+ }
+ return depth >= 4
+}
+
+func isPackageLevel(obj types.Object) bool {
+ if obj == nil {
+ return false
+ }
+ return obj.Pkg().Scope().Lookup(obj.Name()) == obj
+}
+
+// -- Plundered from go/scanner: ---------------------------------------
+
+func isLetter(ch rune) bool {
+ return 'a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z' || ch == '_' || ch >= 0x80 && unicode.IsLetter(ch)
+}
+
+func isDigit(ch rune) bool {
+ return '0' <= ch && ch <= '9' || ch >= 0x80 && unicode.IsDigit(ch)
+}
diff --git a/gopls/internal/lsp/source/signature_help.go b/gopls/internal/lsp/source/signature_help.go
new file mode 100644
index 000000000..716de2dd9
--- /dev/null
+++ b/gopls/internal/lsp/source/signature_help.go
@@ -0,0 +1,185 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package source
+
+import (
+ "context"
+ "fmt"
+ "go/ast"
+ "go/token"
+ "go/types"
+ "strings"
+
+ "golang.org/x/tools/go/ast/astutil"
+ "golang.org/x/tools/gopls/internal/lsp/protocol"
+ "golang.org/x/tools/internal/event"
+)
+
+func SignatureHelp(ctx context.Context, snapshot Snapshot, fh FileHandle, position protocol.Position) (*protocol.SignatureInformation, int, error) {
+ ctx, done := event.Start(ctx, "source.SignatureHelp")
+ defer done()
+
+ // We need full type-checking here, as we must type-check function bodies in
+ // order to provide signature help at the requested position.
+ pkg, pgf, err := PackageForFile(ctx, snapshot, fh.URI(), NarrowestPackage)
+ if err != nil {
+ return nil, 0, fmt.Errorf("getting file for SignatureHelp: %w", err)
+ }
+ pos, err := pgf.PositionPos(position)
+ if err != nil {
+ return nil, 0, err
+ }
+ // Find a call expression surrounding the query position.
+ var callExpr *ast.CallExpr
+ path, _ := astutil.PathEnclosingInterval(pgf.File, pos, pos)
+ if path == nil {
+ return nil, 0, fmt.Errorf("cannot find node enclosing position")
+ }
+FindCall:
+ for _, node := range path {
+ switch node := node.(type) {
+ case *ast.CallExpr:
+ if pos >= node.Lparen && pos <= node.Rparen {
+ callExpr = node
+ break FindCall
+ }
+ case *ast.FuncLit, *ast.FuncType:
+ // The user is within an anonymous function,
+ // which may be the parameter to the *ast.CallExpr.
+ // Don't show signature help in this case.
+ return nil, 0, fmt.Errorf("no signature help within a function declaration")
+ case *ast.BasicLit:
+ if node.Kind == token.STRING {
+ return nil, 0, fmt.Errorf("no signature help within a string literal")
+ }
+ }
+
+ }
+ if callExpr == nil || callExpr.Fun == nil {
+ return nil, 0, fmt.Errorf("cannot find an enclosing function")
+ }
+
+ qf := Qualifier(pgf.File, pkg.GetTypes(), pkg.GetTypesInfo())
+
+ // Get the object representing the function, if available.
+ // There is no object in certain cases such as calling a function returned by
+ // a function (e.g. "foo()()").
+ var obj types.Object
+ switch t := callExpr.Fun.(type) {
+ case *ast.Ident:
+ obj = pkg.GetTypesInfo().ObjectOf(t)
+ case *ast.SelectorExpr:
+ obj = pkg.GetTypesInfo().ObjectOf(t.Sel)
+ }
+
+ // Handle builtin functions separately.
+ if obj, ok := obj.(*types.Builtin); ok {
+ return builtinSignature(ctx, snapshot, callExpr, obj.Name(), pos)
+ }
+
+ // Get the type information for the function being called.
+ sigType := pkg.GetTypesInfo().TypeOf(callExpr.Fun)
+ if sigType == nil {
+ return nil, 0, fmt.Errorf("cannot get type for Fun %[1]T (%[1]v)", callExpr.Fun)
+ }
+
+ sig, _ := sigType.Underlying().(*types.Signature)
+ if sig == nil {
+ return nil, 0, fmt.Errorf("cannot find signature for Fun %[1]T (%[1]v)", callExpr.Fun)
+ }
+
+ activeParam := activeParameter(callExpr, sig.Params().Len(), sig.Variadic(), pos)
+
+ var (
+ name string
+ comment *ast.CommentGroup
+ )
+ if obj != nil {
+ d, err := HoverDocForObject(ctx, snapshot, pkg.FileSet(), obj)
+ if err != nil {
+ return nil, 0, err
+ }
+ name = obj.Name()
+ comment = d
+ } else {
+ name = "func"
+ }
+ mq := MetadataQualifierForFile(snapshot, pgf.File, pkg.Metadata())
+ s, err := NewSignature(ctx, snapshot, pkg, sig, comment, qf, mq)
+ if err != nil {
+ return nil, 0, err
+ }
+ paramInfo := make([]protocol.ParameterInformation, 0, len(s.params))
+ for _, p := range s.params {
+ paramInfo = append(paramInfo, protocol.ParameterInformation{Label: p})
+ }
+ return &protocol.SignatureInformation{
+ Label: name + s.Format(),
+ Documentation: stringToSigInfoDocumentation(s.doc, snapshot.View().Options()),
+ Parameters: paramInfo,
+ }, activeParam, nil
+}
+
+func builtinSignature(ctx context.Context, snapshot Snapshot, callExpr *ast.CallExpr, name string, pos token.Pos) (*protocol.SignatureInformation, int, error) {
+ sig, err := NewBuiltinSignature(ctx, snapshot, name)
+ if err != nil {
+ return nil, 0, err
+ }
+ paramInfo := make([]protocol.ParameterInformation, 0, len(sig.params))
+ for _, p := range sig.params {
+ paramInfo = append(paramInfo, protocol.ParameterInformation{Label: p})
+ }
+ activeParam := activeParameter(callExpr, len(sig.params), sig.variadic, pos)
+ return &protocol.SignatureInformation{
+ Label: sig.name + sig.Format(),
+ Documentation: stringToSigInfoDocumentation(sig.doc, snapshot.View().Options()),
+ Parameters: paramInfo,
+ }, activeParam, nil
+
+}
+
+func activeParameter(callExpr *ast.CallExpr, numParams int, variadic bool, pos token.Pos) (activeParam int) {
+ if len(callExpr.Args) == 0 {
+ return 0
+ }
+ // First, check if the position is even in the range of the arguments.
+ start, end := callExpr.Lparen, callExpr.Rparen
+ if !(start <= pos && pos <= end) {
+ return 0
+ }
+ for _, expr := range callExpr.Args {
+ if start == token.NoPos {
+ start = expr.Pos()
+ }
+ end = expr.End()
+ if start <= pos && pos <= end {
+ break
+ }
+ // Don't advance the active parameter for the last parameter of a variadic function.
+ if !variadic || activeParam < numParams-1 {
+ activeParam++
+ }
+ start = expr.Pos() + 1 // to account for commas
+ }
+ return activeParam
+}
+
+func stringToSigInfoDocumentation(s string, options *Options) *protocol.Or_SignatureInformation_documentation {
+ v := s
+ k := protocol.PlainText
+ if options.PreferredContentFormat == protocol.Markdown {
+ v = CommentToMarkdown(s, options)
+ // whether or not content is newline terminated may not matter for LSP clients,
+ // but our tests expect trailing newlines to be stripped.
+ v = strings.TrimSuffix(v, "\n") // TODO(pjw): change the golden files
+ k = protocol.Markdown
+ }
+ return &protocol.Or_SignatureInformation_documentation{
+ Value: protocol.MarkupContent{
+ Kind: k,
+ Value: v,
+ },
+ }
+}
diff --git a/gopls/internal/lsp/source/stub.go b/gopls/internal/lsp/source/stub.go
new file mode 100644
index 000000000..6bbc1dba2
--- /dev/null
+++ b/gopls/internal/lsp/source/stub.go
@@ -0,0 +1,238 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package source
+
+import (
+ "bytes"
+ "context"
+ "fmt"
+ "go/format"
+ "go/parser"
+ "go/token"
+ "go/types"
+ "io"
+ "path"
+ "strings"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/ast/astutil"
+ "golang.org/x/tools/gopls/internal/lsp/analysis/stubmethods"
+ "golang.org/x/tools/gopls/internal/lsp/protocol"
+ "golang.org/x/tools/gopls/internal/lsp/safetoken"
+ "golang.org/x/tools/internal/bug"
+ "golang.org/x/tools/internal/typeparams"
+)
+
+// stubSuggestedFixFunc returns a suggested fix to declare the missing
+// methods of the concrete type that is assigned to an interface type
+// at the cursor position.
+func stubSuggestedFixFunc(ctx context.Context, snapshot Snapshot, fh FileHandle, rng protocol.Range) (*token.FileSet, *analysis.SuggestedFix, error) {
+ pkg, pgf, err := PackageForFile(ctx, snapshot, fh.URI(), NarrowestPackage)
+ if err != nil {
+ return nil, nil, fmt.Errorf("GetTypedFile: %w", err)
+ }
+ start, end, err := pgf.RangePos(rng)
+ if err != nil {
+ return nil, nil, err
+ }
+ nodes, _ := astutil.PathEnclosingInterval(pgf.File, start, end)
+ si := stubmethods.GetStubInfo(pkg.FileSet(), pkg.GetTypesInfo(), nodes, start)
+ if si == nil {
+ return nil, nil, fmt.Errorf("nil interface request")
+ }
+ return stub(ctx, snapshot, si)
+}
+
+// stub returns a suggested fix to declare the missing methods of si.Concrete.
+func stub(ctx context.Context, snapshot Snapshot, si *stubmethods.StubInfo) (*token.FileSet, *analysis.SuggestedFix, error) {
+ // A function-local type cannot be stubbed
+ // since there's nowhere to put the methods.
+ conc := si.Concrete.Obj()
+ if conc.Parent() != conc.Pkg().Scope() {
+ return nil, nil, fmt.Errorf("local type %q cannot be stubbed", conc.Name())
+ }
+
+ // Parse the file declaring the concrete type.
+ declPGF, _, err := parseFull(ctx, snapshot, si.Fset, conc.Pos())
+ if err != nil {
+ return nil, nil, fmt.Errorf("failed to parse file %q declaring implementation type: %w", declPGF.URI, err)
+ }
+ if declPGF.Fixed {
+ return nil, nil, fmt.Errorf("file contains parse errors: %s", declPGF.URI)
+ }
+
+ // Build import environment for the declaring file.
+ importEnv := make(map[ImportPath]string) // value is local name
+ for _, imp := range declPGF.File.Imports {
+ importPath := UnquoteImportPath(imp)
+ var name string
+ if imp.Name != nil {
+ name = imp.Name.Name
+ if name == "_" {
+ continue
+ } else if name == "." {
+ name = "" // see types.Qualifier
+ }
+ } else {
+ // TODO(adonovan): may omit a vendor/ prefix; consult the Metadata.
+ name = path.Base(string(importPath))
+ }
+ importEnv[importPath] = name // latest alias wins
+ }
+
+ // Find subset of interface methods that the concrete type lacks.
+ var missing []*types.Func
+ ifaceType := si.Interface.Type().Underlying().(*types.Interface)
+ for i := 0; i < ifaceType.NumMethods(); i++ {
+ imethod := ifaceType.Method(i)
+ cmethod, _, _ := types.LookupFieldOrMethod(si.Concrete, si.Pointer, imethod.Pkg(), imethod.Name())
+ if cmethod == nil {
+ missing = append(missing, imethod)
+ continue
+ }
+
+ if _, ok := cmethod.(*types.Var); ok {
+ // len(LookupFieldOrMethod.index) = 1 => conflict, >1 => shadow.
+ return nil, nil, fmt.Errorf("adding method %s.%s would conflict with (or shadow) existing field",
+ conc.Name(), imethod.Name())
+ }
+
+ if !types.Identical(cmethod.Type(), imethod.Type()) {
+ return nil, nil, fmt.Errorf("method %s.%s already exists but has the wrong type: got %s, want %s",
+ conc.Name(), imethod.Name(), cmethod.Type(), imethod.Type())
+ }
+ }
+ if len(missing) == 0 {
+ return nil, nil, fmt.Errorf("no missing methods found")
+ }
+
+ // Create a package name qualifier that uses the
+ // locally appropriate imported package name.
+ // It records any needed new imports.
+ // TODO(adonovan): factor with source.FormatVarType, stubmethods.RelativeToFiles?
+ //
+ // Prior to CL 469155 this logic preserved any renaming
+ // imports from the file that declares the interface
+ // method--ostensibly the preferred name for imports of
+ // frequently renamed packages such as protobufs.
+ // Now we use the package's declared name. If this turns out
+ // to be a mistake, then use parseHeader(si.iface.Pos()).
+ //
+ type newImport struct{ name, importPath string }
+ var newImports []newImport // for AddNamedImport
+ qual := func(pkg *types.Package) string {
+ // TODO(adonovan): don't ignore vendor prefix.
+ importPath := ImportPath(pkg.Path())
+ name, ok := importEnv[importPath]
+ if !ok {
+ // Insert new import using package's declared name.
+ //
+ // TODO(adonovan): resolve conflict between declared
+ // name and existing file-level (declPGF.File.Imports)
+ // or package-level (si.Concrete.Pkg.Scope) decls by
+ // generating a fresh name.
+ name = pkg.Name()
+ importEnv[importPath] = name
+ new := newImport{importPath: string(importPath)}
+ // For clarity, use a renaming import whenever the
+ // local name does not match the path's last segment.
+ if name != path.Base(new.importPath) {
+ new.name = name
+ }
+ newImports = append(newImports, new)
+ }
+ return name
+ }
+
+ // Format interface name (used only in a comment).
+ iface := si.Interface.Name()
+ if ipkg := si.Interface.Pkg(); ipkg != nil && ipkg != conc.Pkg() {
+ iface = ipkg.Name() + "." + iface
+ }
+
+ // Pointer receiver?
+ var star string
+ if si.Pointer {
+ star = "*"
+ }
+
+ // Format the new methods.
+ var newMethods bytes.Buffer
+ for _, method := range missing {
+ fmt.Fprintf(&newMethods, `// %s implements %s
+func (%s%s%s) %s%s {
+ panic("unimplemented")
+}
+`,
+ method.Name(),
+ iface,
+ star,
+ si.Concrete.Obj().Name(),
+ FormatTypeParams(typeparams.ForNamed(si.Concrete)),
+ method.Name(),
+ strings.TrimPrefix(types.TypeString(method.Type(), qual), "func"))
+ }
+
+ // Compute insertion point for new methods:
+ // after the top-level declaration enclosing the (package-level) type.
+ insertOffset, err := safetoken.Offset(declPGF.Tok, declPGF.File.End())
+ if err != nil {
+ return nil, nil, bug.Errorf("internal error: end position outside file bounds: %v", err)
+ }
+ concOffset, err := safetoken.Offset(si.Fset.File(conc.Pos()), conc.Pos())
+ if err != nil {
+ return nil, nil, bug.Errorf("internal error: finding type decl offset: %v", err)
+ }
+ for _, decl := range declPGF.File.Decls {
+ declEndOffset, err := safetoken.Offset(declPGF.Tok, decl.End())
+ if err != nil {
+ return nil, nil, bug.Errorf("internal error: finding decl offset: %v", err)
+ }
+ if declEndOffset > concOffset {
+ insertOffset = declEndOffset
+ break
+ }
+ }
+
+ // Splice the new methods into the file content.
+ var buf bytes.Buffer
+ input := declPGF.Mapper.Content // unfixed content of file
+ buf.Write(input[:insertOffset])
+ buf.WriteByte('\n')
+ io.Copy(&buf, &newMethods)
+ buf.Write(input[insertOffset:])
+
+ // Re-parse the file.
+ fset := token.NewFileSet()
+ newF, err := parser.ParseFile(fset, declPGF.File.Name.Name, buf.Bytes(), parser.ParseComments)
+ if err != nil {
+ return nil, nil, fmt.Errorf("could not reparse file: %w", err)
+ }
+
+ // Splice the new imports into the syntax tree.
+ for _, imp := range newImports {
+ astutil.AddNamedImport(fset, newF, imp.name, imp.importPath)
+ }
+
+ // Pretty-print.
+ var output strings.Builder
+ if err := format.Node(&output, fset, newF); err != nil {
+ return nil, nil, fmt.Errorf("format.Node: %w", err)
+ }
+
+ // Report the diff.
+ diffs := snapshot.View().Options().ComputeEdits(string(input), output.String())
+ var edits []analysis.TextEdit
+ for _, edit := range diffs {
+ edits = append(edits, analysis.TextEdit{
+ Pos: declPGF.Tok.Pos(edit.Start),
+ End: declPGF.Tok.Pos(edit.End),
+ NewText: []byte(edit.New),
+ })
+ }
+ return FileSetFor(declPGF.Tok), // edits use declPGF.Tok
+ &analysis.SuggestedFix{TextEdits: edits},
+ nil
+}
diff --git a/gopls/internal/lsp/source/symbols.go b/gopls/internal/lsp/source/symbols.go
new file mode 100644
index 000000000..a5c015e0a
--- /dev/null
+++ b/gopls/internal/lsp/source/symbols.go
@@ -0,0 +1,227 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package source
+
+import (
+ "context"
+ "fmt"
+ "go/ast"
+ "go/token"
+ "go/types"
+
+ "golang.org/x/tools/gopls/internal/lsp/protocol"
+ "golang.org/x/tools/internal/event"
+)
+
+func DocumentSymbols(ctx context.Context, snapshot Snapshot, fh FileHandle) ([]protocol.DocumentSymbol, error) {
+ ctx, done := event.Start(ctx, "source.DocumentSymbols")
+ defer done()
+
+ pgf, err := snapshot.ParseGo(ctx, fh, ParseFull)
+ if err != nil {
+ return nil, fmt.Errorf("getting file for DocumentSymbols: %w", err)
+ }
+
+ // Build symbols for file declarations. When encountering a declaration with
+ // errors (typically because positions are invalid), we skip the declaration
+ // entirely. VS Code fails to show any symbols if one of the top-level
+ // symbols is missing position information.
+ var symbols []protocol.DocumentSymbol
+ for _, decl := range pgf.File.Decls {
+ switch decl := decl.(type) {
+ case *ast.FuncDecl:
+ if decl.Name.Name == "_" {
+ continue
+ }
+ fs, err := funcSymbol(pgf.Mapper, pgf.Tok, decl)
+ if err == nil {
+ // If function is a method, prepend the type of the method.
+ if decl.Recv != nil && len(decl.Recv.List) > 0 {
+ fs.Name = fmt.Sprintf("(%s).%s", types.ExprString(decl.Recv.List[0].Type), fs.Name)
+ }
+ symbols = append(symbols, fs)
+ }
+ case *ast.GenDecl:
+ for _, spec := range decl.Specs {
+ switch spec := spec.(type) {
+ case *ast.TypeSpec:
+ if spec.Name.Name == "_" {
+ continue
+ }
+ ts, err := typeSymbol(pgf.Mapper, pgf.Tok, spec)
+ if err == nil {
+ symbols = append(symbols, ts)
+ }
+ case *ast.ValueSpec:
+ for _, name := range spec.Names {
+ if name.Name == "_" {
+ continue
+ }
+ vs, err := varSymbol(pgf.Mapper, pgf.Tok, spec, name, decl.Tok == token.CONST)
+ if err == nil {
+ symbols = append(symbols, vs)
+ }
+ }
+ }
+ }
+ }
+ }
+ return symbols, nil
+}
+
+func funcSymbol(m *protocol.Mapper, tf *token.File, decl *ast.FuncDecl) (protocol.DocumentSymbol, error) {
+ s := protocol.DocumentSymbol{
+ Name: decl.Name.Name,
+ Kind: protocol.Function,
+ }
+ if decl.Recv != nil {
+ s.Kind = protocol.Method
+ }
+ var err error
+ s.Range, err = m.NodeRange(tf, decl)
+ if err != nil {
+ return protocol.DocumentSymbol{}, err
+ }
+ s.SelectionRange, err = m.NodeRange(tf, decl.Name)
+ if err != nil {
+ return protocol.DocumentSymbol{}, err
+ }
+ s.Detail = types.ExprString(decl.Type)
+ return s, nil
+}
+
+func typeSymbol(m *protocol.Mapper, tf *token.File, spec *ast.TypeSpec) (protocol.DocumentSymbol, error) {
+ s := protocol.DocumentSymbol{
+ Name: spec.Name.Name,
+ }
+ var err error
+ s.Range, err = m.NodeRange(tf, spec)
+ if err != nil {
+ return protocol.DocumentSymbol{}, err
+ }
+ s.SelectionRange, err = m.NodeRange(tf, spec.Name)
+ if err != nil {
+ return protocol.DocumentSymbol{}, err
+ }
+ s.Kind, s.Detail, s.Children = typeDetails(m, tf, spec.Type)
+ return s, nil
+}
+
+func typeDetails(m *protocol.Mapper, tf *token.File, typExpr ast.Expr) (kind protocol.SymbolKind, detail string, children []protocol.DocumentSymbol) {
+ switch typExpr := typExpr.(type) {
+ case *ast.StructType:
+ kind = protocol.Struct
+ children = fieldListSymbols(m, tf, typExpr.Fields, protocol.Field)
+ if len(children) > 0 {
+ detail = "struct{...}"
+ } else {
+ detail = "struct{}"
+ }
+
+ // Find interface methods and embedded types.
+ case *ast.InterfaceType:
+ kind = protocol.Interface
+ children = fieldListSymbols(m, tf, typExpr.Methods, protocol.Method)
+ if len(children) > 0 {
+ detail = "interface{...}"
+ } else {
+ detail = "interface{}"
+ }
+
+ case *ast.FuncType:
+ kind = protocol.Function
+ detail = types.ExprString(typExpr)
+
+ default:
+ kind = protocol.Class // catch-all, for cases where we don't know the kind syntactically
+ detail = types.ExprString(typExpr)
+ }
+ return
+}
+
+func fieldListSymbols(m *protocol.Mapper, tf *token.File, fields *ast.FieldList, fieldKind protocol.SymbolKind) []protocol.DocumentSymbol {
+ if fields == nil {
+ return nil
+ }
+
+ var symbols []protocol.DocumentSymbol
+ for _, field := range fields.List {
+ detail, children := "", []protocol.DocumentSymbol(nil)
+ if field.Type != nil {
+ _, detail, children = typeDetails(m, tf, field.Type)
+ }
+ if len(field.Names) == 0 { // embedded interface or struct field
+ // By default, use the formatted type details as the name of this field.
+ // This handles potentially invalid syntax, as well as type embeddings in
+ // interfaces.
+ child := protocol.DocumentSymbol{
+ Name: detail,
+ Kind: protocol.Field, // consider all embeddings to be fields
+ Children: children,
+ }
+
+ // If the field is a valid embedding, promote the type name to field
+ // name.
+ selection := field.Type
+ if id := embeddedIdent(field.Type); id != nil {
+ child.Name = id.Name
+ child.Detail = detail
+ selection = id
+ }
+
+ if rng, err := m.NodeRange(tf, field.Type); err == nil {
+ child.Range = rng
+ }
+ if rng, err := m.NodeRange(tf, selection); err == nil {
+ child.SelectionRange = rng
+ }
+
+ symbols = append(symbols, child)
+ } else {
+ for _, name := range field.Names {
+ child := protocol.DocumentSymbol{
+ Name: name.Name,
+ Kind: fieldKind,
+ Detail: detail,
+ Children: children,
+ }
+
+ if rng, err := m.NodeRange(tf, field); err == nil {
+ child.Range = rng
+ }
+ if rng, err := m.NodeRange(tf, name); err == nil {
+ child.SelectionRange = rng
+ }
+
+ symbols = append(symbols, child)
+ }
+ }
+
+ }
+ return symbols
+}
+
+func varSymbol(m *protocol.Mapper, tf *token.File, spec *ast.ValueSpec, name *ast.Ident, isConst bool) (protocol.DocumentSymbol, error) {
+ s := protocol.DocumentSymbol{
+ Name: name.Name,
+ Kind: protocol.Variable,
+ }
+ if isConst {
+ s.Kind = protocol.Constant
+ }
+ var err error
+ s.Range, err = m.NodeRange(tf, spec)
+ if err != nil {
+ return protocol.DocumentSymbol{}, err
+ }
+ s.SelectionRange, err = m.NodeRange(tf, name)
+ if err != nil {
+ return protocol.DocumentSymbol{}, err
+ }
+ if spec.Type != nil { // type may be missing from the syntax
+ _, s.Detail, s.Children = typeDetails(m, tf, spec.Type)
+ }
+ return s, nil
+}
diff --git a/gopls/internal/lsp/source/type_definition.go b/gopls/internal/lsp/source/type_definition.go
new file mode 100644
index 000000000..104b7accf
--- /dev/null
+++ b/gopls/internal/lsp/source/type_definition.go
@@ -0,0 +1,55 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package source
+
+import (
+ "context"
+ "fmt"
+ "go/token"
+
+ "golang.org/x/tools/gopls/internal/lsp/protocol"
+ "golang.org/x/tools/internal/event"
+)
+
+// TypeDefinition handles the textDocument/typeDefinition request for Go files.
+func TypeDefinition(ctx context.Context, snapshot Snapshot, fh FileHandle, position protocol.Position) ([]protocol.Location, error) {
+ ctx, done := event.Start(ctx, "source.TypeDefinition")
+ defer done()
+
+ pkg, pgf, err := PackageForFile(ctx, snapshot, fh.URI(), NarrowestPackage)
+ if err != nil {
+ return nil, err
+ }
+ pos, err := pgf.PositionPos(position)
+ if err != nil {
+ return nil, err
+ }
+
+ // TODO(rfindley): handle type switch implicits correctly here: if the user
+ // jumps to the type definition of x in x := y.(type), it makes sense to jump
+ // to the type of y.
+ _, obj, _ := referencedObject(pkg, pgf, pos)
+ if obj == nil {
+ return nil, nil
+ }
+
+ typObj := typeToObject(obj.Type())
+ if typObj == nil {
+ return nil, fmt.Errorf("no type definition for %s", obj.Name())
+ }
+
+ // Identifiers with the type "error" are a special case with no position.
+ if hasErrorType(typObj) {
+ // TODO(rfindley): we can do better here, returning a link to the builtin
+ // file.
+ return nil, nil
+ }
+
+ loc, err := mapPosition(ctx, pkg.FileSet(), snapshot, typObj.Pos(), typObj.Pos()+token.Pos(len(typObj.Name())))
+ if err != nil {
+ return nil, err
+ }
+ return []protocol.Location{loc}, nil
+}
diff --git a/gopls/internal/lsp/source/types_format.go b/gopls/internal/lsp/source/types_format.go
new file mode 100644
index 000000000..46e260212
--- /dev/null
+++ b/gopls/internal/lsp/source/types_format.go
@@ -0,0 +1,517 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package source
+
+import (
+ "bytes"
+ "context"
+ "fmt"
+ "go/ast"
+ "go/doc"
+ "go/printer"
+ "go/token"
+ "go/types"
+ "strings"
+
+ "golang.org/x/tools/gopls/internal/lsp/protocol"
+ "golang.org/x/tools/internal/bug"
+ "golang.org/x/tools/internal/event"
+ "golang.org/x/tools/internal/event/tag"
+ "golang.org/x/tools/internal/typeparams"
+)
+
+// FormatType returns the detail and kind for a types.Type.
+func FormatType(typ types.Type, qf types.Qualifier) (detail string, kind protocol.CompletionItemKind) {
+ if types.IsInterface(typ) {
+ detail = "interface{...}"
+ kind = protocol.InterfaceCompletion
+ } else if _, ok := typ.(*types.Struct); ok {
+ detail = "struct{...}"
+ kind = protocol.StructCompletion
+ } else if typ != typ.Underlying() {
+ detail, kind = FormatType(typ.Underlying(), qf)
+ } else {
+ detail = types.TypeString(typ, qf)
+ kind = protocol.ClassCompletion
+ }
+ return detail, kind
+}
+
+type signature struct {
+ name, doc string
+ typeParams, params, results []string
+ variadic bool
+ needResultParens bool
+}
+
+func (s *signature) Format() string {
+ var b strings.Builder
+ b.WriteByte('(')
+ for i, p := range s.params {
+ if i > 0 {
+ b.WriteString(", ")
+ }
+ b.WriteString(p)
+ }
+ b.WriteByte(')')
+
+ // Add space between parameters and results.
+ if len(s.results) > 0 {
+ b.WriteByte(' ')
+ }
+ if s.needResultParens {
+ b.WriteByte('(')
+ }
+ for i, r := range s.results {
+ if i > 0 {
+ b.WriteString(", ")
+ }
+ b.WriteString(r)
+ }
+ if s.needResultParens {
+ b.WriteByte(')')
+ }
+ return b.String()
+}
+
+func (s *signature) TypeParams() []string {
+ return s.typeParams
+}
+
+func (s *signature) Params() []string {
+ return s.params
+}
+
+// NewBuiltinSignature returns signature for the builtin object with a given
+// name, if a builtin object with the name exists.
+func NewBuiltinSignature(ctx context.Context, s Snapshot, name string) (*signature, error) {
+ builtin, err := s.BuiltinFile(ctx)
+ if err != nil {
+ return nil, err
+ }
+ obj := builtin.File.Scope.Lookup(name)
+ if obj == nil {
+ return nil, fmt.Errorf("no builtin object for %s", name)
+ }
+ decl, ok := obj.Decl.(*ast.FuncDecl)
+ if !ok {
+ return nil, fmt.Errorf("no function declaration for builtin: %s", name)
+ }
+ if decl.Type == nil {
+ return nil, fmt.Errorf("no type for builtin decl %s", decl.Name)
+ }
+ var variadic bool
+ if decl.Type.Params.List != nil {
+ numParams := len(decl.Type.Params.List)
+ lastParam := decl.Type.Params.List[numParams-1]
+ if _, ok := lastParam.Type.(*ast.Ellipsis); ok {
+ variadic = true
+ }
+ }
+ fset := FileSetFor(builtin.Tok)
+ params, _ := formatFieldList(ctx, fset, decl.Type.Params, variadic)
+ results, needResultParens := formatFieldList(ctx, fset, decl.Type.Results, false)
+ d := decl.Doc.Text()
+ switch s.View().Options().HoverKind {
+ case SynopsisDocumentation:
+ d = doc.Synopsis(d)
+ case NoDocumentation:
+ d = ""
+ }
+ return &signature{
+ doc: d,
+ name: name,
+ needResultParens: needResultParens,
+ params: params,
+ results: results,
+ variadic: variadic,
+ }, nil
+}
+
+// replacer replaces some synthetic "type classes" used in the builtin file
+// with their most common constituent type.
+var replacer = strings.NewReplacer(
+ `ComplexType`, `complex128`,
+ `FloatType`, `float64`,
+ `IntegerType`, `int`,
+)
+
+func formatFieldList(ctx context.Context, fset *token.FileSet, list *ast.FieldList, variadic bool) ([]string, bool) {
+ if list == nil {
+ return nil, false
+ }
+ var writeResultParens bool
+ var result []string
+ for i := 0; i < len(list.List); i++ {
+ if i >= 1 {
+ writeResultParens = true
+ }
+ p := list.List[i]
+ cfg := printer.Config{Mode: printer.UseSpaces | printer.TabIndent, Tabwidth: 4}
+ b := &bytes.Buffer{}
+ if err := cfg.Fprint(b, fset, p.Type); err != nil {
+ event.Error(ctx, "unable to print type", nil, tag.Type.Of(p.Type))
+ continue
+ }
+ typ := replacer.Replace(b.String())
+ if len(p.Names) == 0 {
+ result = append(result, typ)
+ }
+ for _, name := range p.Names {
+ if name.Name != "" {
+ if i == 0 {
+ writeResultParens = true
+ }
+ result = append(result, fmt.Sprintf("%s %s", name.Name, typ))
+ } else {
+ result = append(result, typ)
+ }
+ }
+ }
+ if variadic {
+ result[len(result)-1] = strings.Replace(result[len(result)-1], "[]", "...", 1)
+ }
+ return result, writeResultParens
+}
+
+// FormatTypeParams turns TypeParamList into its Go representation, such as:
+// [T, Y]. Note that it does not print constraints as this is mainly used for
+// formatting type params in method receivers.
+func FormatTypeParams(tparams *typeparams.TypeParamList) string {
+ if tparams == nil || tparams.Len() == 0 {
+ return ""
+ }
+ var buf bytes.Buffer
+ buf.WriteByte('[')
+ for i := 0; i < tparams.Len(); i++ {
+ if i > 0 {
+ buf.WriteString(", ")
+ }
+ buf.WriteString(tparams.At(i).Obj().Name())
+ }
+ buf.WriteByte(']')
+ return buf.String()
+}
+
+// NewSignature returns formatted signature for a types.Signature struct.
+func NewSignature(ctx context.Context, s Snapshot, pkg Package, sig *types.Signature, comment *ast.CommentGroup, qf types.Qualifier, mq MetadataQualifier) (*signature, error) {
+ var tparams []string
+ tpList := typeparams.ForSignature(sig)
+ for i := 0; i < tpList.Len(); i++ {
+ tparam := tpList.At(i)
+ // TODO: is it possible to reuse the logic from FormatVarType here?
+ s := tparam.Obj().Name() + " " + tparam.Constraint().String()
+ tparams = append(tparams, s)
+ }
+
+ params := make([]string, 0, sig.Params().Len())
+ for i := 0; i < sig.Params().Len(); i++ {
+ el := sig.Params().At(i)
+ typ, err := FormatVarType(ctx, s, pkg, el, qf, mq)
+ if err != nil {
+ return nil, err
+ }
+ p := typ
+ if el.Name() != "" {
+ p = el.Name() + " " + typ
+ }
+ params = append(params, p)
+ }
+
+ var needResultParens bool
+ results := make([]string, 0, sig.Results().Len())
+ for i := 0; i < sig.Results().Len(); i++ {
+ if i >= 1 {
+ needResultParens = true
+ }
+ el := sig.Results().At(i)
+ typ, err := FormatVarType(ctx, s, pkg, el, qf, mq)
+ if err != nil {
+ return nil, err
+ }
+ if el.Name() == "" {
+ results = append(results, typ)
+ } else {
+ if i == 0 {
+ needResultParens = true
+ }
+ results = append(results, el.Name()+" "+typ)
+ }
+ }
+ var d string
+ if comment != nil {
+ d = comment.Text()
+ }
+ switch s.View().Options().HoverKind {
+ case SynopsisDocumentation:
+ d = doc.Synopsis(d)
+ case NoDocumentation:
+ d = ""
+ }
+ return &signature{
+ doc: d,
+ typeParams: tparams,
+ params: params,
+ results: results,
+ variadic: sig.Variadic(),
+ needResultParens: needResultParens,
+ }, nil
+}
+
+// FormatVarType formats a *types.Var, accounting for type aliases.
+// To do this, it looks in the AST of the file in which the object is declared.
+// On any errors, it always falls back to types.TypeString.
+//
+// TODO(rfindley): this function could return the actual name used in syntax,
+// for better parameter names.
+func FormatVarType(ctx context.Context, snapshot Snapshot, srcpkg Package, obj *types.Var, qf types.Qualifier, mq MetadataQualifier) (string, error) {
+ // TODO(rfindley): This looks wrong. The previous comment said:
+ // "If the given expr refers to a type parameter, then use the
+ // object's Type instead of the type parameter declaration. This helps
+ // format the instantiated type as opposed to the original undeclared
+ // generic type".
+ //
+ // But of course, if obj is a type param, we are formatting a generic type
+ // and not an instantiated type. Handling for instantiated types must be done
+ // at a higher level.
+ //
+ // Left this during refactoring in order to preserve pre-existing logic.
+ if typeparams.IsTypeParam(obj.Type()) {
+ return types.TypeString(obj.Type(), qf), nil
+ }
+
+ if obj.Pkg() == nil || !obj.Pos().IsValid() {
+ // This is defensive, though it is extremely unlikely we'll ever have a
+ // builtin var.
+ return types.TypeString(obj.Type(), qf), nil
+ }
+
+ targetpgf, pos, err := parseFull(ctx, snapshot, srcpkg.FileSet(), obj.Pos())
+ if err != nil {
+ return "", err // e.g. ctx cancelled
+ }
+
+ targetMeta := findFileInDeps(snapshot, srcpkg.Metadata(), targetpgf.URI)
+ if targetMeta == nil {
+ // If we have an object from type-checking, it should exist in a file in
+ // the forward transitive closure.
+ return "", bug.Errorf("failed to find file %q in deps of %q", targetpgf.URI, srcpkg.Metadata().ID)
+ }
+
+ decl, spec, field := findDeclInfo([]*ast.File{targetpgf.File}, pos)
+
+ // We can't handle type parameters correctly, so we fall back on TypeString
+ // for parameterized decls.
+ if decl, _ := decl.(*ast.FuncDecl); decl != nil {
+ if typeparams.ForFuncType(decl.Type).NumFields() > 0 {
+ return types.TypeString(obj.Type(), qf), nil // in generic function
+ }
+ if decl.Recv != nil && len(decl.Recv.List) > 0 {
+ if x, _, _, _ := typeparams.UnpackIndexExpr(decl.Recv.List[0].Type); x != nil {
+ return types.TypeString(obj.Type(), qf), nil // in method of generic type
+ }
+ }
+ }
+ if spec, _ := spec.(*ast.TypeSpec); spec != nil && typeparams.ForTypeSpec(spec).NumFields() > 0 {
+ return types.TypeString(obj.Type(), qf), nil // in generic type decl
+ }
+
+ if field == nil {
+ // TODO(rfindley): we should never reach here from an ordinary var, so
+ // should probably return an error here.
+ return types.TypeString(obj.Type(), qf), nil
+ }
+ expr := field.Type
+
+ rq := requalifier(snapshot, targetpgf.File, targetMeta, mq)
+
+ // The type names in the AST may not be correctly qualified.
+ // Determine the package name to use based on the package that originated
+ // the query and the package in which the type is declared.
+ // We then qualify the value by cloning the AST node and editing it.
+ expr = qualifyTypeExpr(expr, rq)
+
+ // If the request came from a different package than the one in which the
+ // types are defined, we may need to modify the qualifiers.
+ return FormatNodeFile(targetpgf.Tok, expr), nil
+}
+
+// qualifyTypeExpr clones the type expression expr after re-qualifying type
+// names using the given function, which accepts the current syntactic
+// qualifier (possibly "" for unqualified idents), and returns a new qualifier
+// (again, possibly "" if the identifier should be unqualified).
+//
+// The resulting expression may be inaccurate: without type-checking we don't
+// properly account for "." imported identifiers or builtins.
+//
+// TODO(rfindley): add many more tests for this function.
+func qualifyTypeExpr(expr ast.Expr, qf func(string) string) ast.Expr {
+ switch expr := expr.(type) {
+ case *ast.ArrayType:
+ return &ast.ArrayType{
+ Lbrack: expr.Lbrack,
+ Elt: qualifyTypeExpr(expr.Elt, qf),
+ Len: expr.Len,
+ }
+
+ case *ast.BinaryExpr:
+ if expr.Op != token.OR {
+ return expr
+ }
+ return &ast.BinaryExpr{
+ X: qualifyTypeExpr(expr.X, qf),
+ OpPos: expr.OpPos,
+ Op: expr.Op,
+ Y: qualifyTypeExpr(expr.Y, qf),
+ }
+
+ case *ast.ChanType:
+ return &ast.ChanType{
+ Arrow: expr.Arrow,
+ Begin: expr.Begin,
+ Dir: expr.Dir,
+ Value: qualifyTypeExpr(expr.Value, qf),
+ }
+
+ case *ast.Ellipsis:
+ return &ast.Ellipsis{
+ Ellipsis: expr.Ellipsis,
+ Elt: qualifyTypeExpr(expr.Elt, qf),
+ }
+
+ case *ast.FuncType:
+ return &ast.FuncType{
+ Func: expr.Func,
+ Params: qualifyFieldList(expr.Params, qf),
+ Results: qualifyFieldList(expr.Results, qf),
+ }
+
+ case *ast.Ident:
+ // Unqualified type (builtin, package local, or dot-imported).
+
+ // Don't qualify names that look like builtins.
+ //
+ // Without type-checking this may be inaccurate. It could be made accurate
+ // by doing syntactic object resolution for the entire package, but that
+ // does not seem worthwhile and we generally want to avoid using
+ // ast.Object, which may be inaccurate.
+ if obj := types.Universe.Lookup(expr.Name); obj != nil {
+ return expr
+ }
+
+ newName := qf("")
+ if newName != "" {
+ return &ast.SelectorExpr{
+ X: &ast.Ident{
+ NamePos: expr.Pos(),
+ Name: newName,
+ },
+ Sel: expr,
+ }
+ }
+ return expr
+
+ case *ast.IndexExpr:
+ return &ast.IndexExpr{
+ X: qualifyTypeExpr(expr.X, qf),
+ Lbrack: expr.Lbrack,
+ Index: qualifyTypeExpr(expr.Index, qf),
+ Rbrack: expr.Rbrack,
+ }
+
+ case *typeparams.IndexListExpr:
+ indices := make([]ast.Expr, len(expr.Indices))
+ for i, idx := range expr.Indices {
+ indices[i] = qualifyTypeExpr(idx, qf)
+ }
+ return &typeparams.IndexListExpr{
+ X: qualifyTypeExpr(expr.X, qf),
+ Lbrack: expr.Lbrack,
+ Indices: indices,
+ Rbrack: expr.Rbrack,
+ }
+
+ case *ast.InterfaceType:
+ return &ast.InterfaceType{
+ Interface: expr.Interface,
+ Methods: qualifyFieldList(expr.Methods, qf),
+ Incomplete: expr.Incomplete,
+ }
+
+ case *ast.MapType:
+ return &ast.MapType{
+ Map: expr.Map,
+ Key: qualifyTypeExpr(expr.Key, qf),
+ Value: qualifyTypeExpr(expr.Value, qf),
+ }
+
+ case *ast.ParenExpr:
+ return &ast.ParenExpr{
+ Lparen: expr.Lparen,
+ Rparen: expr.Rparen,
+ X: qualifyTypeExpr(expr.X, qf),
+ }
+
+ case *ast.SelectorExpr:
+ if id, ok := expr.X.(*ast.Ident); ok {
+ // qualified type
+ newName := qf(id.Name)
+ if newName == "" {
+ return expr.Sel
+ }
+ return &ast.SelectorExpr{
+ X: &ast.Ident{
+ NamePos: id.NamePos,
+ Name: newName,
+ },
+ Sel: expr.Sel,
+ }
+ }
+ return expr
+
+ case *ast.StarExpr:
+ return &ast.StarExpr{
+ Star: expr.Star,
+ X: qualifyTypeExpr(expr.X, qf),
+ }
+
+ case *ast.StructType:
+ return &ast.StructType{
+ Struct: expr.Struct,
+ Fields: qualifyFieldList(expr.Fields, qf),
+ Incomplete: expr.Incomplete,
+ }
+
+ default:
+ return expr
+ }
+}
+
+func qualifyFieldList(fl *ast.FieldList, qf func(string) string) *ast.FieldList {
+ if fl == nil {
+ return nil
+ }
+ if fl.List == nil {
+ return &ast.FieldList{
+ Closing: fl.Closing,
+ Opening: fl.Opening,
+ }
+ }
+ list := make([]*ast.Field, 0, len(fl.List))
+ for _, f := range fl.List {
+ list = append(list, &ast.Field{
+ Comment: f.Comment,
+ Doc: f.Doc,
+ Names: f.Names,
+ Tag: f.Tag,
+ Type: qualifyTypeExpr(f.Type, qf),
+ })
+ }
+ return &ast.FieldList{
+ Closing: fl.Closing,
+ Opening: fl.Opening,
+ List: list,
+ }
+}
diff --git a/gopls/internal/lsp/source/util.go b/gopls/internal/lsp/source/util.go
new file mode 100644
index 000000000..82cb8d075
--- /dev/null
+++ b/gopls/internal/lsp/source/util.go
@@ -0,0 +1,555 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package source
+
+import (
+ "context"
+ "go/ast"
+ "go/printer"
+ "go/token"
+ "go/types"
+ "path/filepath"
+ "regexp"
+ "sort"
+ "strconv"
+ "strings"
+
+ "golang.org/x/tools/gopls/internal/lsp/protocol"
+ "golang.org/x/tools/gopls/internal/lsp/safetoken"
+ "golang.org/x/tools/gopls/internal/span"
+ "golang.org/x/tools/internal/bug"
+ "golang.org/x/tools/internal/tokeninternal"
+ "golang.org/x/tools/internal/typeparams"
+)
+
+// IsGenerated gets and reads the file denoted by uri and reports
+// whether it contains a "generated file" comment as described at
+// https://golang.org/s/generatedcode.
+//
+// TODO(adonovan): opt: this function does too much.
+// Move snapshot.GetFile into the caller (most of which have already done it).
+func IsGenerated(ctx context.Context, snapshot Snapshot, uri span.URI) bool {
+ fh, err := snapshot.GetFile(ctx, uri)
+ if err != nil {
+ return false
+ }
+ pgf, err := snapshot.ParseGo(ctx, fh, ParseHeader)
+ if err != nil {
+ return false
+ }
+ for _, commentGroup := range pgf.File.Comments {
+ for _, comment := range commentGroup.List {
+ if matched := generatedRx.MatchString(comment.Text); matched {
+ // Check if comment is at the beginning of the line in source.
+ if safetoken.Position(pgf.Tok, comment.Slash).Column == 1 {
+ return true
+ }
+ }
+ }
+ }
+ return false
+}
+
+// adjustedObjEnd returns the end position of obj, possibly modified for
+// package names.
+//
+// TODO(rfindley): eliminate this function, by inlining it at callsites where
+// it makes sense.
+func adjustedObjEnd(obj types.Object) token.Pos {
+ nameLen := len(obj.Name())
+ if pkgName, ok := obj.(*types.PkgName); ok {
+ // An imported Go package has a package-local, unqualified name.
+ // When the name matches the imported package name, there is no
+ // identifier in the import spec with the local package name.
+ //
+ // For example:
+ // import "go/ast" // name "ast" matches package name
+ // import a "go/ast" // name "a" does not match package name
+ //
+ // When the identifier does not appear in the source, have the range
+ // of the object be the import path, including quotes.
+ if pkgName.Imported().Name() == pkgName.Name() {
+ nameLen = len(pkgName.Imported().Path()) + len(`""`)
+ }
+ }
+ return obj.Pos() + token.Pos(nameLen)
+}
+
+// Matches cgo generated comment as well as the proposed standard:
+//
+// https://golang.org/s/generatedcode
+var generatedRx = regexp.MustCompile(`// .*DO NOT EDIT\.?`)
+
+// FileKindForLang returns the file kind associated with the given language ID,
+// or UnknownKind if the language ID is not recognized.
+func FileKindForLang(langID string) FileKind {
+ switch langID {
+ case "go":
+ return Go
+ case "go.mod":
+ return Mod
+ case "go.sum":
+ return Sum
+ case "tmpl", "gotmpl":
+ return Tmpl
+ case "go.work":
+ return Work
+ default:
+ return UnknownKind
+ }
+}
+
+// nodeAtPos returns the index and the node whose position is contained inside
+// the node list.
+func nodeAtPos(nodes []ast.Node, pos token.Pos) (ast.Node, int) {
+ if nodes == nil {
+ return nil, -1
+ }
+ for i, node := range nodes {
+ if node.Pos() <= pos && pos <= node.End() {
+ return node, i
+ }
+ }
+ return nil, -1
+}
+
+// FormatNode returns the "pretty-print" output for an ast node.
+func FormatNode(fset *token.FileSet, n ast.Node) string {
+ var buf strings.Builder
+ if err := printer.Fprint(&buf, fset, n); err != nil {
+ return ""
+ }
+ return buf.String()
+}
+
+// FormatNodeFile is like FormatNode, but requires only the token.File for the
+// syntax containing the given ast node.
+func FormatNodeFile(file *token.File, n ast.Node) string {
+ fset := FileSetFor(file)
+ return FormatNode(fset, n)
+}
+
+// FileSetFor returns a new FileSet containing a sequence of new Files with
+// the same base, size, and line as the input files, for use in APIs that
+// require a FileSet.
+//
+// Precondition: the input files must be non-overlapping, and sorted in order
+// of their Base.
+func FileSetFor(files ...*token.File) *token.FileSet {
+ fset := token.NewFileSet()
+ for _, f := range files {
+ f2 := fset.AddFile(f.Name(), f.Base(), f.Size())
+ lines := tokeninternal.GetLines(f)
+ f2.SetLines(lines)
+ }
+ return fset
+}
+
+// Deref returns a pointer's element type, traversing as many levels as needed.
+// Otherwise it returns typ.
+//
+// It can return a pointer type for cyclic types (see golang/go#45510).
+func Deref(typ types.Type) types.Type {
+ var seen map[types.Type]struct{}
+ for {
+ p, ok := typ.Underlying().(*types.Pointer)
+ if !ok {
+ return typ
+ }
+ if _, ok := seen[p.Elem()]; ok {
+ return typ
+ }
+
+ typ = p.Elem()
+
+ if seen == nil {
+ seen = make(map[types.Type]struct{})
+ }
+ seen[typ] = struct{}{}
+ }
+}
+
+func SortDiagnostics(d []*Diagnostic) {
+ sort.Slice(d, func(i int, j int) bool {
+ return CompareDiagnostic(d[i], d[j]) < 0
+ })
+}
+
+func CompareDiagnostic(a, b *Diagnostic) int {
+ if r := protocol.CompareRange(a.Range, b.Range); r != 0 {
+ return r
+ }
+ if a.Source < b.Source {
+ return -1
+ }
+ if a.Source > b.Source {
+ return +1
+ }
+ if a.Message < b.Message {
+ return -1
+ }
+ if a.Message > b.Message {
+ return +1
+ }
+ return 0
+}
+
+// findFileInDeps finds package metadata containing URI in the transitive
+// dependencies of m. When using the Go command, the answer is unique.
+//
+// TODO(rfindley): refactor to share logic with findPackageInDeps?
+func findFileInDeps(s MetadataSource, m *Metadata, uri span.URI) *Metadata {
+ seen := make(map[PackageID]bool)
+ var search func(*Metadata) *Metadata
+ search = func(m *Metadata) *Metadata {
+ if seen[m.ID] {
+ return nil
+ }
+ seen[m.ID] = true
+ for _, cgf := range m.CompiledGoFiles {
+ if cgf == uri {
+ return m
+ }
+ }
+ for _, dep := range m.DepsByPkgPath {
+ m := s.Metadata(dep)
+ if m == nil {
+ bug.Reportf("nil metadata for %q", dep)
+ continue
+ }
+ if found := search(m); found != nil {
+ return found
+ }
+ }
+ return nil
+ }
+ return search(m)
+}
+
+// UnquoteImportPath returns the unquoted import path of s,
+// or "" if the path is not properly quoted.
+func UnquoteImportPath(s *ast.ImportSpec) ImportPath {
+ path, err := strconv.Unquote(s.Path.Value)
+ if err != nil {
+ return ""
+ }
+ return ImportPath(path)
+}
+
+// NodeContains returns true if a node encloses a given position pos.
+func NodeContains(n ast.Node, pos token.Pos) bool {
+ return n != nil && n.Pos() <= pos && pos <= n.End()
+}
+
+// CollectScopes returns all scopes in an ast path, ordered as innermost scope
+// first.
+func CollectScopes(info *types.Info, path []ast.Node, pos token.Pos) []*types.Scope {
+ // scopes[i], where i<len(path), is the possibly nil Scope of path[i].
+ var scopes []*types.Scope
+ for _, n := range path {
+ // Include *FuncType scope if pos is inside the function body.
+ switch node := n.(type) {
+ case *ast.FuncDecl:
+ if node.Body != nil && NodeContains(node.Body, pos) {
+ n = node.Type
+ }
+ case *ast.FuncLit:
+ if node.Body != nil && NodeContains(node.Body, pos) {
+ n = node.Type
+ }
+ }
+ scopes = append(scopes, info.Scopes[n])
+ }
+ return scopes
+}
+
+// Qualifier returns a function that appropriately formats a types.PkgName
+// appearing in a *ast.File.
+func Qualifier(f *ast.File, pkg *types.Package, info *types.Info) types.Qualifier {
+ // Construct mapping of import paths to their defined or implicit names.
+ imports := make(map[*types.Package]string)
+ for _, imp := range f.Imports {
+ var obj types.Object
+ if imp.Name != nil {
+ obj = info.Defs[imp.Name]
+ } else {
+ obj = info.Implicits[imp]
+ }
+ if pkgname, ok := obj.(*types.PkgName); ok {
+ imports[pkgname.Imported()] = pkgname.Name()
+ }
+ }
+ // Define qualifier to replace full package paths with names of the imports.
+ return func(p *types.Package) string {
+ if p == pkg {
+ return ""
+ }
+ if name, ok := imports[p]; ok {
+ if name == "." {
+ return ""
+ }
+ return name
+ }
+ return p.Name()
+ }
+}
+
+// requalifier returns a function that re-qualifies identifiers and qualified
+// identifiers contained in targetFile using the given metadata qualifier.
+func requalifier(s MetadataSource, targetFile *ast.File, targetMeta *Metadata, mq MetadataQualifier) func(string) string {
+ qm := map[string]string{
+ "": mq(targetMeta.Name, "", targetMeta.PkgPath),
+ }
+
+ // Construct mapping of import paths to their defined or implicit names.
+ for _, imp := range targetFile.Imports {
+ name, pkgName, impPath, pkgPath := importInfo(s, imp, targetMeta)
+
+ // Re-map the target name for the source file.
+ qm[name] = mq(pkgName, impPath, pkgPath)
+ }
+
+ return func(name string) string {
+ if newName, ok := qm[name]; ok {
+ return newName
+ }
+ return name
+ }
+}
+
+// A MetadataQualifier is a function that qualifies an identifier declared in a
+// package with the given package name, import path, and package path.
+//
+// In scenarios where metadata is missing the provided PackageName and
+// PackagePath may be empty, but ImportPath must always be non-empty.
+type MetadataQualifier func(PackageName, ImportPath, PackagePath) string
+
+// MetadataQualifierForFile returns a metadata qualifier that chooses the best
+// qualification of an imported package relative to the file f in package with
+// metadata m.
+func MetadataQualifierForFile(s MetadataSource, f *ast.File, m *Metadata) MetadataQualifier {
+ // Record local names for import paths.
+ localNames := make(map[ImportPath]string) // local names for imports in f
+ for _, imp := range f.Imports {
+ name, _, impPath, _ := importInfo(s, imp, m)
+ localNames[impPath] = name
+ }
+
+ // Record a package path -> import path mapping.
+ inverseDeps := make(map[PackageID]PackagePath)
+ for path, id := range m.DepsByPkgPath {
+ inverseDeps[id] = path
+ }
+ importsByPkgPath := make(map[PackagePath]ImportPath) // best import paths by pkgPath
+ for impPath, id := range m.DepsByImpPath {
+ if id == "" {
+ continue
+ }
+ pkgPath := inverseDeps[id]
+ _, hasPath := importsByPkgPath[pkgPath]
+ _, hasImp := localNames[impPath]
+ // In rare cases, there may be multiple import paths with the same package
+ // path. In such scenarios, prefer an import path that already exists in
+ // the file.
+ if !hasPath || hasImp {
+ importsByPkgPath[pkgPath] = impPath
+ }
+ }
+
+ return func(pkgName PackageName, impPath ImportPath, pkgPath PackagePath) string {
+ // If supplied, translate the package path to an import path in the source
+ // package.
+ if pkgPath != "" {
+ if srcImp := importsByPkgPath[pkgPath]; srcImp != "" {
+ impPath = srcImp
+ }
+ if pkgPath == m.PkgPath {
+ return ""
+ }
+ }
+ if localName, ok := localNames[impPath]; ok && impPath != "" {
+ return string(localName)
+ }
+ if pkgName != "" {
+ return string(pkgName)
+ }
+ idx := strings.LastIndexByte(string(impPath), '/')
+ return string(impPath[idx+1:])
+ }
+}
+
+// importInfo collects information about the import specified by imp,
+// extracting its file-local name, package name, import path, and package path.
+//
+// If metadata is missing for the import, the resulting package name and
+// package path may be empty, and the file local name may be guessed based on
+// the import path.
+//
+// Note: previous versions of this helper used a PackageID->PackagePath map
+// extracted from m, for extracting package path even in the case where
+// metadata for a dep was missing. This should not be necessary, as we should
+// always have metadata for IDs contained in DepsByPkgPath.
+func importInfo(s MetadataSource, imp *ast.ImportSpec, m *Metadata) (string, PackageName, ImportPath, PackagePath) {
+ var (
+ name string // local name
+ pkgName PackageName
+ impPath = UnquoteImportPath(imp)
+ pkgPath PackagePath
+ )
+
+ // If the import has a local name, use it.
+ if imp.Name != nil {
+ name = imp.Name.Name
+ }
+
+ // Try to find metadata for the import. If successful and there is no local
+ // name, the package name is the local name.
+ if depID := m.DepsByImpPath[impPath]; depID != "" {
+ if depm := s.Metadata(depID); depm != nil {
+ if name == "" {
+ name = string(depm.Name)
+ }
+ pkgName = depm.Name
+ pkgPath = depm.PkgPath
+ }
+ }
+
+ // If the local name is still unknown, guess it based on the import path.
+ if name == "" {
+ idx := strings.LastIndexByte(string(impPath), '/')
+ name = string(impPath[idx+1:])
+ }
+ return name, pkgName, impPath, pkgPath
+}
+
+// isDirective reports whether c is a comment directive.
+//
+// Copied and adapted from go/src/go/ast/ast.go.
+func isDirective(c string) bool {
+ if len(c) < 3 {
+ return false
+ }
+ if c[1] != '/' {
+ return false
+ }
+ //-style comment (no newline at the end)
+ c = c[2:]
+ if len(c) == 0 {
+ // empty line
+ return false
+ }
+ // "//line " is a line directive.
+ // (The // has been removed.)
+ if strings.HasPrefix(c, "line ") {
+ return true
+ }
+
+ // "//[a-z0-9]+:[a-z0-9]"
+ // (The // has been removed.)
+ colon := strings.Index(c, ":")
+ if colon <= 0 || colon+1 >= len(c) {
+ return false
+ }
+ for i := 0; i <= colon+1; i++ {
+ if i == colon {
+ continue
+ }
+ b := c[i]
+ if !('a' <= b && b <= 'z' || '0' <= b && b <= '9') {
+ return false
+ }
+ }
+ return true
+}
+
+// InDir checks whether path is in the file tree rooted at dir.
+// It checks only the lexical form of the file names.
+// It does not consider symbolic links.
+//
+// Copied from go/src/cmd/go/internal/search/search.go.
+func InDir(dir, path string) bool {
+ pv := strings.ToUpper(filepath.VolumeName(path))
+ dv := strings.ToUpper(filepath.VolumeName(dir))
+ path = path[len(pv):]
+ dir = dir[len(dv):]
+ switch {
+ default:
+ return false
+ case pv != dv:
+ return false
+ case len(path) == len(dir):
+ if path == dir {
+ return true
+ }
+ return false
+ case dir == "":
+ return path != ""
+ case len(path) > len(dir):
+ if dir[len(dir)-1] == filepath.Separator {
+ if path[:len(dir)] == dir {
+ return path[len(dir):] != ""
+ }
+ return false
+ }
+ if path[len(dir)] == filepath.Separator && path[:len(dir)] == dir {
+ if len(path) == len(dir)+1 {
+ return true
+ }
+ return path[len(dir)+1:] != ""
+ }
+ return false
+ }
+}
+
+// IsValidImport returns whether importPkgPath is importable
+// by pkgPath
+func IsValidImport(pkgPath, importPkgPath PackagePath) bool {
+ i := strings.LastIndex(string(importPkgPath), "/internal/")
+ if i == -1 {
+ return true
+ }
+ // TODO(rfindley): this looks wrong: IsCommandLineArguments is meant to
+ // operate on package IDs, not package paths.
+ if IsCommandLineArguments(PackageID(pkgPath)) {
+ return true
+ }
+ // TODO(rfindley): this is wrong. mod.testx/p should not be able to
+ // import mod.test/internal: https://go.dev/play/p/-Ca6P-E4V4q
+ return strings.HasPrefix(string(pkgPath), string(importPkgPath[:i]))
+}
+
+// IsCommandLineArguments reports whether a given value denotes
+// "command-line-arguments" package, which is a package with an unknown ID
+// created by the go command. It can have a test variant, which is why callers
+// should not check that a value equals "command-line-arguments" directly.
+func IsCommandLineArguments(id PackageID) bool {
+ return strings.Contains(string(id), "command-line-arguments")
+}
+
+// embeddedIdent returns the type name identifier for an embedding x, if x in a
+// valid embedding. Otherwise, it returns nil.
+//
+// Spec: An embedded field must be specified as a type name T or as a pointer
+// to a non-interface type name *T
+func embeddedIdent(x ast.Expr) *ast.Ident {
+ if star, ok := x.(*ast.StarExpr); ok {
+ x = star.X
+ }
+ switch ix := x.(type) { // check for instantiated receivers
+ case *ast.IndexExpr:
+ x = ix.X
+ case *typeparams.IndexListExpr:
+ x = ix.X
+ }
+ switch x := x.(type) {
+ case *ast.Ident:
+ return x
+ case *ast.SelectorExpr:
+ if _, ok := x.X.(*ast.Ident); ok {
+ return x.Sel
+ }
+ }
+ return nil
+}
diff --git a/gopls/internal/lsp/source/view.go b/gopls/internal/lsp/source/view.go
new file mode 100644
index 000000000..41bcbac4b
--- /dev/null
+++ b/gopls/internal/lsp/source/view.go
@@ -0,0 +1,857 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package source
+
+import (
+ "bytes"
+ "context"
+ "crypto/sha256"
+ "errors"
+ "fmt"
+ "go/ast"
+ "go/scanner"
+ "go/token"
+ "go/types"
+ "io"
+
+ "golang.org/x/mod/modfile"
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/packages"
+ "golang.org/x/tools/go/types/objectpath"
+ "golang.org/x/tools/gopls/internal/govulncheck"
+ "golang.org/x/tools/gopls/internal/lsp/protocol"
+ "golang.org/x/tools/gopls/internal/lsp/safetoken"
+ "golang.org/x/tools/gopls/internal/lsp/source/methodsets"
+ "golang.org/x/tools/gopls/internal/span"
+ "golang.org/x/tools/internal/event/label"
+ "golang.org/x/tools/internal/event/tag"
+ "golang.org/x/tools/internal/gocommand"
+ "golang.org/x/tools/internal/imports"
+ "golang.org/x/tools/internal/packagesinternal"
+)
+
+// A GlobalSnapshotID uniquely identifies a snapshot within this process and
+// increases monotonically with snapshot creation time.
+//
+// We use a distinct integral type for global IDs to help enforce correct
+// usage.
+type GlobalSnapshotID uint64
+
+// Snapshot represents the current state for the given view.
+type Snapshot interface {
+ // SequenceID is the sequence id of this snapshot within its containing
+ // view.
+ //
+ // Relative to their view sequence ids are monotonically increasing, but this
+ // does not hold globally: when new views are created their initial snapshot
+ // has sequence ID 0. For operations that span multiple views, use global
+ // IDs.
+ SequenceID() uint64
+
+ // GlobalID is a globally unique identifier for this snapshot. Global IDs are
+ // monotonic: subsequent snapshots will have higher global ID, though
+ // subsequent snapshots in a view may not have adjacent global IDs.
+ GlobalID() GlobalSnapshotID
+
+ // View returns the View associated with this snapshot.
+ View() View
+
+ // BackgroundContext returns a context used for all background processing
+ // on behalf of this snapshot.
+ BackgroundContext() context.Context
+
+ // ValidBuildConfiguration returns true if there is some error in the
+ // user's workspace. In particular, if they are both outside of a module
+ // and their GOPATH.
+ ValidBuildConfiguration() bool
+
+ // FindFile returns the FileHandle for the given URI, if it is already
+ // in the given snapshot.
+ FindFile(uri span.URI) FileHandle
+
+ // GetFile returns the FileHandle for a given URI, initializing it if it is
+ // not already part of the snapshot.
+ GetFile(ctx context.Context, uri span.URI) (FileHandle, error)
+
+ // AwaitInitialized waits until the snapshot's view is initialized.
+ AwaitInitialized(ctx context.Context)
+
+ // IsOpen returns whether the editor currently has a file open.
+ IsOpen(uri span.URI) bool
+
+ // IgnoredFile reports if a file would be ignored by a `go list` of the whole
+ // workspace.
+ IgnoredFile(uri span.URI) bool
+
+ // Templates returns the .tmpl files
+ Templates() map[span.URI]FileHandle
+
+ // ParseGo returns the parsed AST for the file.
+ // If the file is not available, returns nil and an error.
+ // Position information is added to FileSet().
+ ParseGo(ctx context.Context, fh FileHandle, mode ParseMode) (*ParsedGoFile, error)
+
+ // Analyze runs the specified analyzers on the given package at this snapshot.
+ Analyze(ctx context.Context, id PackageID, analyzers []*Analyzer) ([]*Diagnostic, error)
+
+ // RunGoCommandPiped runs the given `go` command, writing its output
+ // to stdout and stderr. Verb, Args, and WorkingDir must be specified.
+ //
+ // RunGoCommandPiped runs the command serially using gocommand.RunPiped,
+ // enforcing that this command executes exclusively to other commands on the
+ // server.
+ RunGoCommandPiped(ctx context.Context, mode InvocationFlags, inv *gocommand.Invocation, stdout, stderr io.Writer) error
+
+ // RunGoCommandDirect runs the given `go` command. Verb, Args, and
+ // WorkingDir must be specified.
+ RunGoCommandDirect(ctx context.Context, mode InvocationFlags, inv *gocommand.Invocation) (*bytes.Buffer, error)
+
+ // RunGoCommands runs a series of `go` commands that updates the go.mod
+ // and go.sum file for wd, and returns their updated contents.
+ RunGoCommands(ctx context.Context, allowNetwork bool, wd string, run func(invoke func(...string) (*bytes.Buffer, error)) error) (bool, []byte, []byte, error)
+
+ // RunProcessEnvFunc runs fn with the process env for this snapshot's view.
+ // Note: the process env contains cached module and filesystem state.
+ RunProcessEnvFunc(ctx context.Context, fn func(*imports.Options) error) error
+
+ // ModFiles are the go.mod files enclosed in the snapshot's view and known
+ // to the snapshot.
+ ModFiles() []span.URI
+
+ // ParseMod is used to parse go.mod files.
+ ParseMod(ctx context.Context, fh FileHandle) (*ParsedModule, error)
+
+ // ModWhy returns the results of `go mod why` for the module specified by
+ // the given go.mod file.
+ ModWhy(ctx context.Context, fh FileHandle) (map[string]string, error)
+
+ // ModTidy returns the results of `go mod tidy` for the module specified by
+ // the given go.mod file.
+ ModTidy(ctx context.Context, pm *ParsedModule) (*TidiedModule, error)
+
+ // ModVuln returns import vulnerability analysis for the given go.mod URI.
+ // Concurrent requests are combined into a single command.
+ ModVuln(ctx context.Context, modURI span.URI) (*govulncheck.Result, error)
+
+ // GoModForFile returns the URI of the go.mod file for the given URI.
+ GoModForFile(uri span.URI) span.URI
+
+ // WorkFile, if non-empty, is the go.work file for the workspace.
+ WorkFile() span.URI
+
+ // ParseWork is used to parse go.work files.
+ ParseWork(ctx context.Context, fh FileHandle) (*ParsedWorkFile, error)
+
+ // BuiltinFile returns information about the special builtin package.
+ BuiltinFile(ctx context.Context) (*ParsedGoFile, error)
+
+ // IsBuiltin reports whether uri is part of the builtin package.
+ IsBuiltin(ctx context.Context, uri span.URI) bool
+
+ // ReverseDependencies returns a new mapping whose entries are
+ // the ID and Metadata of each package in the workspace that
+ // directly or transitively depend on the package denoted by id,
+ // excluding id itself.
+ ReverseDependencies(ctx context.Context, id PackageID, transitive bool) (map[PackageID]*Metadata, error)
+
+ // ActiveMetadata returns a new, unordered slice containing
+ // metadata for all packages considered 'active' in the workspace.
+ //
+ // In normal memory mode, this is all workspace packages. In degraded memory
+ // mode, this is just the reverse transitive closure of open packages.
+ ActiveMetadata(ctx context.Context) ([]*Metadata, error)
+
+ // AllMetadata returns a new unordered array of metadata for all packages in the workspace.
+ AllMetadata(ctx context.Context) ([]*Metadata, error)
+
+ // Symbols returns all symbols in the snapshot.
+ Symbols(ctx context.Context) (map[span.URI][]Symbol, error)
+
+ // Metadata returns the metadata for the specified package,
+ // or nil if it was not found.
+ Metadata(id PackageID) *Metadata
+
+ // MetadataForFile returns a new slice containing metadata for each
+ // package containing the Go file identified by uri, ordered by the
+ // number of CompiledGoFiles (i.e. "narrowest" to "widest" package).
+ // The result may include tests and intermediate test variants of
+ // importable packages.
+ // It returns an error if the context was cancelled.
+ MetadataForFile(ctx context.Context, uri span.URI) ([]*Metadata, error)
+
+ // TypeCheck parses and type-checks the specified packages,
+ // and returns them in the same order as the ids.
+ // The resulting packages' types may belong to different importers,
+ // so types from different packages are incommensurable.
+ TypeCheck(ctx context.Context, ids ...PackageID) ([]Package, error)
+
+ // PackageDiagnostics returns diagnostics for files contained in specified
+ // packages.
+ //
+ // If these diagnostics cannot be loaded from cache, the requested packages
+ // may be type-checked.
+ PackageDiagnostics(ctx context.Context, ids ...PackageID) (map[span.URI][]*Diagnostic, error)
+
+ // References returns cross-references indexes for the specified packages.
+ //
+ // If these indexes cannot be loaded from cache, the requested packages may
+ // be type-checked.
+ References(ctx context.Context, ids ...PackageID) ([]XrefIndex, error)
+
+ // MethodSets returns method-set indexes for the specified packages.
+ //
+ // If these indexes cannot be loaded from cache, the requested packages may
+ // be type-checked.
+ MethodSets(ctx context.Context, ids ...PackageID) ([]*methodsets.Index, error)
+
+ // GetCriticalError returns any critical errors in the workspace.
+ //
+ // A nil result may mean success, or context cancellation.
+ GetCriticalError(ctx context.Context) *CriticalError
+}
+
+type XrefIndex interface {
+ Lookup(targets map[PackagePath]map[objectpath.Path]struct{}) (locs []protocol.Location)
+}
+
+// SnapshotLabels returns a new slice of labels that should be used for events
+// related to a snapshot.
+func SnapshotLabels(snapshot Snapshot) []label.Label {
+ return []label.Label{tag.Snapshot.Of(snapshot.SequenceID()), tag.Directory.Of(snapshot.View().Folder())}
+}
+
+// PackageForFile is a convenience function that selects a package to
+// which this file belongs (narrowest or widest), type-checks it in
+// the requested mode (full or workspace), and returns it, along with
+// the parse tree of that file.
+//
+// Type-checking is expensive. Call snapshot.ParseGo if all you need
+// is a parse tree, or snapshot.MetadataForFile if you only need metadata.
+func PackageForFile(ctx context.Context, snapshot Snapshot, uri span.URI, pkgSel PackageSelector) (Package, *ParsedGoFile, error) {
+ metas, err := snapshot.MetadataForFile(ctx, uri)
+ if err != nil {
+ return nil, nil, err
+ }
+ if len(metas) == 0 {
+ return nil, nil, fmt.Errorf("no package metadata for file %s", uri)
+ }
+ switch pkgSel {
+ case NarrowestPackage:
+ metas = metas[:1]
+ case WidestPackage:
+ metas = metas[len(metas)-1:]
+ }
+ pkgs, err := snapshot.TypeCheck(ctx, metas[0].ID)
+ if err != nil {
+ return nil, nil, err
+ }
+ pkg := pkgs[0]
+ pgf, err := pkg.File(uri)
+ if err != nil {
+ return nil, nil, err // "can't happen"
+ }
+ return pkg, pgf, err
+}
+
+// PackageSelector sets how a package is selected out from a set of packages
+// containing a given file.
+type PackageSelector int
+
+const (
+ // NarrowestPackage picks the "narrowest" package for a given file.
+ // By "narrowest" package, we mean the package with the fewest number of
+ // files that includes the given file. This solves the problem of test
+ // variants, as the test will have more files than the non-test package.
+ NarrowestPackage PackageSelector = iota
+
+ // WidestPackage returns the Package containing the most files.
+ // This is useful for something like diagnostics, where we'd prefer to
+ // offer diagnostics for as many files as possible.
+ WidestPackage
+)
+
+// InvocationFlags represents the settings of a particular go command invocation.
+// It is a mode, plus a set of flag bits.
+type InvocationFlags int
+
+const (
+ // Normal is appropriate for commands that might be run by a user and don't
+ // deliberately modify go.mod files, e.g. `go test`.
+ Normal InvocationFlags = iota
+ // WriteTemporaryModFile is for commands that need information from a
+ // modified version of the user's go.mod file, e.g. `go mod tidy` used to
+ // generate diagnostics.
+ WriteTemporaryModFile
+ // LoadWorkspace is for packages.Load, and other operations that should
+ // consider the whole workspace at once.
+ LoadWorkspace
+
+ // AllowNetwork is a flag bit that indicates the invocation should be
+ // allowed to access the network.
+ AllowNetwork InvocationFlags = 1 << 10
+)
+
+func (m InvocationFlags) Mode() InvocationFlags {
+ return m & (AllowNetwork - 1)
+}
+
+func (m InvocationFlags) AllowNetwork() bool {
+ return m&AllowNetwork != 0
+}
+
+// View represents a single workspace.
+// This is the level at which we maintain configuration like working directory
+// and build tags.
+type View interface {
+ // Name returns the name this view was constructed with.
+ Name() string
+
+ // Folder returns the folder with which this view was created.
+ Folder() span.URI
+
+ // Options returns a copy of the Options for this view.
+ Options() *Options
+
+ // Snapshot returns the current snapshot for the view, and a
+ // release function that must be called when the Snapshot is
+ // no longer needed.
+ //
+ // If the view is shut down, the resulting error will be non-nil, and the
+ // release function need not be called.
+ Snapshot() (Snapshot, func(), error)
+
+ // IsGoPrivatePath reports whether target is a private import path, as identified
+ // by the GOPRIVATE environment variable.
+ IsGoPrivatePath(path string) bool
+
+ // ModuleUpgrades returns known module upgrades for the dependencies of
+ // modfile.
+ ModuleUpgrades(modfile span.URI) map[string]string
+
+ // RegisterModuleUpgrades registers that upgrades exist for the given modules
+ // required by modfile.
+ RegisterModuleUpgrades(modfile span.URI, upgrades map[string]string)
+
+ // ClearModuleUpgrades clears all upgrades for the modules in modfile.
+ ClearModuleUpgrades(modfile span.URI)
+
+ // Vulnerabilities returns known vulnerabilities for the given modfile.
+ // TODO(suzmue): replace command.Vuln with a different type, maybe
+ // https://pkg.go.dev/golang.org/x/vuln/cmd/govulncheck/govulnchecklib#Summary?
+ Vulnerabilities(modfile ...span.URI) map[span.URI]*govulncheck.Result
+
+ // SetVulnerabilities resets the list of vulnerabilities that exists for the given modules
+ // required by modfile.
+ SetVulnerabilities(modfile span.URI, vulncheckResult *govulncheck.Result)
+
+ // FileKind returns the type of a file.
+ //
+ // We can't reliably deduce the kind from the file name alone,
+ // as some editors can be told to interpret a buffer as
+ // language different from the file name heuristic, e.g. that
+ // an .html file actually contains Go "html/template" syntax,
+ // or even that a .go file contains Python.
+ FileKind(FileHandle) FileKind
+
+ // GoVersion returns the configured Go version for this view.
+ GoVersion() int
+
+ // GoVersionString returns the go version string configured for this view.
+ // Unlike [GoVersion], this encodes the minor version and commit hash information.
+ GoVersionString() string
+}
+
+// A FileSource maps uris to FileHandles.
+type FileSource interface {
+ // GetFile returns the FileHandle for a given URI.
+ GetFile(ctx context.Context, uri span.URI) (FileHandle, error)
+}
+
+// A MetadataSource maps package IDs to metadata.
+//
+// TODO(rfindley): replace this with a concrete metadata graph, once it is
+// exposed from the snapshot.
+type MetadataSource interface {
+ // Metadata returns Metadata for the given package ID, or nil if it does not
+ // exist.
+ Metadata(PackageID) *Metadata
+}
+
+// A ParsedGoFile contains the results of parsing a Go file.
+type ParsedGoFile struct {
+ URI span.URI
+ Mode ParseMode
+ File *ast.File
+ Tok *token.File
+ // Source code used to build the AST. It may be different from the
+ // actual content of the file if we have fixed the AST.
+ Src []byte
+ Fixed bool
+ Mapper *protocol.Mapper // may map fixed Src, not file content
+ ParseErr scanner.ErrorList
+}
+
+// -- go/token domain convenience helpers --
+
+// PositionPos returns the token.Pos of protocol position p within the file.
+func (pgf *ParsedGoFile) PositionPos(p protocol.Position) (token.Pos, error) {
+ offset, err := pgf.Mapper.PositionOffset(p)
+ if err != nil {
+ return token.NoPos, err
+ }
+ return safetoken.Pos(pgf.Tok, offset)
+}
+
+// PosRange returns a protocol Range for the token.Pos interval in this file.
+func (pgf *ParsedGoFile) PosRange(start, end token.Pos) (protocol.Range, error) {
+ return pgf.Mapper.PosRange(pgf.Tok, start, end)
+}
+
+// PosMappedRange returns a MappedRange for the token.Pos interval in this file.
+// A MappedRange can be converted to any other form.
+func (pgf *ParsedGoFile) PosMappedRange(start, end token.Pos) (protocol.MappedRange, error) {
+ return pgf.Mapper.PosMappedRange(pgf.Tok, start, end)
+}
+
+// PosLocation returns a protocol Location for the token.Pos interval in this file.
+func (pgf *ParsedGoFile) PosLocation(start, end token.Pos) (protocol.Location, error) {
+ return pgf.Mapper.PosLocation(pgf.Tok, start, end)
+}
+
+// NodeRange returns a protocol Range for the ast.Node interval in this file.
+func (pgf *ParsedGoFile) NodeRange(node ast.Node) (protocol.Range, error) {
+ return pgf.Mapper.NodeRange(pgf.Tok, node)
+}
+
+// NodeMappedRange returns a MappedRange for the ast.Node interval in this file.
+// A MappedRange can be converted to any other form.
+func (pgf *ParsedGoFile) NodeMappedRange(node ast.Node) (protocol.MappedRange, error) {
+ return pgf.Mapper.NodeMappedRange(pgf.Tok, node)
+}
+
+// NodeLocation returns a protocol Location for the ast.Node interval in this file.
+func (pgf *ParsedGoFile) NodeLocation(node ast.Node) (protocol.Location, error) {
+ return pgf.Mapper.PosLocation(pgf.Tok, node.Pos(), node.End())
+}
+
+// RangePos parses a protocol Range back into the go/token domain.
+func (pgf *ParsedGoFile) RangePos(r protocol.Range) (token.Pos, token.Pos, error) {
+ start, end, err := pgf.Mapper.RangeOffsets(r)
+ if err != nil {
+ return token.NoPos, token.NoPos, err
+ }
+ return pgf.Tok.Pos(start), pgf.Tok.Pos(end), nil
+}
+
+// A ParsedModule contains the results of parsing a go.mod file.
+type ParsedModule struct {
+ URI span.URI
+ File *modfile.File
+ Mapper *protocol.Mapper
+ ParseErrors []*Diagnostic
+}
+
+// A ParsedWorkFile contains the results of parsing a go.work file.
+type ParsedWorkFile struct {
+ URI span.URI
+ File *modfile.WorkFile
+ Mapper *protocol.Mapper
+ ParseErrors []*Diagnostic
+}
+
+// A TidiedModule contains the results of running `go mod tidy` on a module.
+type TidiedModule struct {
+ // Diagnostics representing changes made by `go mod tidy`.
+ Diagnostics []*Diagnostic
+ // The bytes of the go.mod file after it was tidied.
+ TidiedContent []byte
+}
+
+// Metadata represents package metadata retrieved from go/packages.
+type Metadata struct {
+ ID PackageID
+ PkgPath PackagePath
+ Name PackageName
+ GoFiles []span.URI
+ CompiledGoFiles []span.URI
+ ForTest PackagePath // package path under test, or ""
+ TypesSizes types.Sizes
+ Errors []packages.Error
+ DepsByImpPath map[ImportPath]PackageID // may contain dups; empty ID => missing
+ DepsByPkgPath map[PackagePath]PackageID // values are unique and non-empty
+ Module *packages.Module
+ DepsErrors []*packagesinternal.PackageError
+ Diagnostics []*Diagnostic // processed diagnostics from 'go list'
+ LoadDir string // directory from which go/packages was run
+}
+
+func (m *Metadata) String() string { return string(m.ID) }
+
+// IsIntermediateTestVariant reports whether the given package is an
+// intermediate test variant, e.g. "net/http [net/url.test]".
+//
+// Such test variants arise when an x_test package (in this case net/url_test)
+// imports a package (in this case net/http) that itself imports the the
+// non-x_test package (in this case net/url).
+//
+// This is done so that the forward transitive closure of net/url_test has
+// only one package for the "net/url" import.
+// The intermediate test variant exists to hold the test variant import:
+//
+// net/url_test [net/url.test]
+//
+// | "net/http" -> net/http [net/url.test]
+// | "net/url" -> net/url [net/url.test]
+// | ...
+//
+// net/http [net/url.test]
+//
+// | "net/url" -> net/url [net/url.test]
+// | ...
+//
+// This restriction propagates throughout the import graph of net/http: for
+// every package imported by net/http that imports net/url, there must be an
+// intermediate test variant that instead imports "net/url [net/url.test]".
+//
+// As one can see from the example of net/url and net/http, intermediate test
+// variants can result in many additional packages that are essentially (but
+// not quite) identical. For this reason, we filter these variants wherever
+// possible.
+func (m *Metadata) IsIntermediateTestVariant() bool {
+ return m.ForTest != "" && m.ForTest != m.PkgPath && m.ForTest+"_test" != m.PkgPath
+}
+
+// RemoveIntermediateTestVariants removes intermediate test variants, modifying the array.
+func RemoveIntermediateTestVariants(metas []*Metadata) []*Metadata {
+ res := metas[:0]
+ for _, m := range metas {
+ if !m.IsIntermediateTestVariant() {
+ res = append(res, m)
+ }
+ }
+ return res
+}
+
+var ErrViewExists = errors.New("view already exists for session")
+
+// FileModification represents a modification to a file.
+type FileModification struct {
+ URI span.URI
+ Action FileAction
+
+ // OnDisk is true if a watched file is changed on disk.
+ // If true, Version will be -1 and Text will be nil.
+ OnDisk bool
+
+ // Version will be -1 and Text will be nil when they are not supplied,
+ // specifically on textDocument/didClose and for on-disk changes.
+ Version int32
+ Text []byte
+
+ // LanguageID is only sent from the language client on textDocument/didOpen.
+ LanguageID string
+}
+
+type FileAction int
+
+const (
+ UnknownFileAction = FileAction(iota)
+ Open
+ Change
+ Close
+ Save
+ Create
+ Delete
+ InvalidateMetadata
+)
+
+func (a FileAction) String() string {
+ switch a {
+ case Open:
+ return "Open"
+ case Change:
+ return "Change"
+ case Close:
+ return "Close"
+ case Save:
+ return "Save"
+ case Create:
+ return "Create"
+ case Delete:
+ return "Delete"
+ case InvalidateMetadata:
+ return "InvalidateMetadata"
+ default:
+ return "Unknown"
+ }
+}
+
+var ErrTmpModfileUnsupported = errors.New("-modfile is unsupported for this Go version")
+var ErrNoModOnDisk = errors.New("go.mod file is not on disk")
+
+func IsNonFatalGoModError(err error) bool {
+ return err == ErrTmpModfileUnsupported || err == ErrNoModOnDisk
+}
+
+// ParseMode controls the content of the AST produced when parsing a source file.
+type ParseMode int
+
+const (
+ // ParseHeader specifies that the main package declaration and imports are needed.
+ // This is the mode used when attempting to examine the package graph structure.
+ ParseHeader ParseMode = iota
+
+ // ParseFull specifies the full AST is needed.
+ // This is used for files of direct interest where the entire contents must
+ // be considered.
+ ParseFull
+)
+
+// A FileHandle is an interface to files tracked by the LSP session, which may
+// be either files read from disk, or open in the editor session (overlays).
+type FileHandle interface {
+ // URI is the URI for this file handle.
+ // TODO(rfindley): this is not actually well-defined. In some cases, there
+ // may be more than one URI that resolve to the same FileHandle. Which one is
+ // this?
+ URI() span.URI
+ // FileIdentity returns a FileIdentity for the file, even if there was an
+ // error reading it.
+ FileIdentity() FileIdentity
+ // Saved reports whether the file has the same content on disk.
+ // For on-disk files, this is trivially true.
+ Saved() bool
+ // Version returns the file version, as defined by the LSP client.
+ // For on-disk file handles, Version returns 0.
+ Version() int32
+ // Read reads the contents of a file.
+ // If the file is not available, returns a nil slice and an error.
+ Read() ([]byte, error)
+}
+
+// A Hash is a cryptographic digest of the contents of a file.
+// (Although at 32B it is larger than a 16B string header, it is smaller
+// and has better locality than the string header + 64B of hex digits.)
+type Hash [sha256.Size]byte
+
+// HashOf returns the hash of some data.
+func HashOf(data []byte) Hash {
+ return Hash(sha256.Sum256(data))
+}
+
+// Hashf returns the hash of a printf-formatted string.
+func Hashf(format string, args ...interface{}) Hash {
+ // Although this looks alloc-heavy, it is faster than using
+ // Fprintf on sha256.New() because the allocations don't escape.
+ return HashOf([]byte(fmt.Sprintf(format, args...)))
+}
+
+// String returns the digest as a string of hex digits.
+func (h Hash) String() string {
+ return fmt.Sprintf("%64x", [sha256.Size]byte(h))
+}
+
+// Less returns true if the given hash is less than the other.
+func (h Hash) Less(other Hash) bool {
+ return bytes.Compare(h[:], other[:]) < 0
+}
+
+// XORWith updates *h to *h XOR h2.
+func (h *Hash) XORWith(h2 Hash) {
+ // Small enough that we don't need crypto/subtle.XORBytes.
+ for i := range h {
+ h[i] ^= h2[i]
+ }
+}
+
+// FileIdentity uniquely identifies a file at a version from a FileSystem.
+type FileIdentity struct {
+ URI span.URI
+ Hash Hash // digest of file contents
+}
+
+func (id FileIdentity) String() string {
+ return fmt.Sprintf("%s%s", id.URI, id.Hash)
+}
+
+// FileKind describes the kind of the file in question.
+// It can be one of Go,mod, Sum, or Tmpl.
+type FileKind int
+
+const (
+ // UnknownKind is a file type we don't know about.
+ UnknownKind = FileKind(iota)
+
+ // Go is a normal go source file.
+ Go
+ // Mod is a go.mod file.
+ Mod
+ // Sum is a go.sum file.
+ Sum
+ // Tmpl is a template file.
+ Tmpl
+ // Work is a go.work file.
+ Work
+)
+
+func (k FileKind) String() string {
+ switch k {
+ case Go:
+ return "go"
+ case Mod:
+ return "go.mod"
+ case Sum:
+ return "go.sum"
+ case Tmpl:
+ return "tmpl"
+ case Work:
+ return "go.work"
+ default:
+ return fmt.Sprintf("internal error: unknown file kind %d", k)
+ }
+}
+
+// Analyzer represents a go/analysis analyzer with some boolean properties
+// that let the user know how to use the analyzer.
+type Analyzer struct {
+ Analyzer *analysis.Analyzer
+
+ // Enabled reports whether the analyzer is enabled. This value can be
+ // configured per-analysis in user settings. For staticcheck analyzers,
+ // the value of the Staticcheck setting overrides this field.
+ //
+ // Most clients should use the IsEnabled method.
+ Enabled bool
+
+ // Fix is the name of the suggested fix name used to invoke the suggested
+ // fixes for the analyzer. It is non-empty if we expect this analyzer to
+ // provide its fix separately from its diagnostics. That is, we should apply
+ // the analyzer's suggested fixes through a Command, not a TextEdit.
+ Fix string
+
+ // ActionKind is the kind of code action this analyzer produces. If
+ // unspecified the type defaults to quickfix.
+ ActionKind []protocol.CodeActionKind
+
+ // Severity is the severity set for diagnostics reported by this
+ // analyzer. If left unset it defaults to Warning.
+ Severity protocol.DiagnosticSeverity
+}
+
+func (a *Analyzer) String() string { return a.Analyzer.String() }
+
+// IsEnabled reports whether this analyzer is enabled by the given options.
+func (a Analyzer) IsEnabled(options *Options) bool {
+ // Staticcheck analyzers can only be enabled when staticcheck is on.
+ if _, ok := options.StaticcheckAnalyzers[a.Analyzer.Name]; ok {
+ if !options.Staticcheck {
+ return false
+ }
+ }
+ if enabled, ok := options.Analyses[a.Analyzer.Name]; ok {
+ return enabled
+ }
+ return a.Enabled
+}
+
+// Declare explicit types for package paths, names, and IDs to ensure that we
+// never use an ID where a path belongs, and vice versa. If we confused these,
+// it would result in confusing errors because package IDs often look like
+// package paths.
+type (
+ PackageID string // go list's unique identifier for a package (e.g. "vendor/example.com/foo [vendor/example.com/bar.test]")
+ PackagePath string // name used to prefix linker symbols (e.g. "vendor/example.com/foo")
+ PackageName string // identifier in 'package' declaration (e.g. "foo")
+ ImportPath string // path that appears in an import declaration (e.g. "example.com/foo")
+)
+
+// Package represents a Go package that has been parsed and type-checked.
+//
+// By design, there is no way to reach from a Package to the Package
+// representing one of its dependencies.
+//
+// Callers must not assume that two Packages share the same
+// token.FileSet or types.Importer and thus have commensurable
+// token.Pos values or types.Objects. Instead, use stable naming
+// schemes, such as (URI, byte offset) for positions, or (PackagePath,
+// objectpath.Path) for exported declarations.
+type Package interface {
+ Metadata() *Metadata
+
+ // Results of parsing:
+ FileSet() *token.FileSet
+ ParseMode() ParseMode
+ CompiledGoFiles() []*ParsedGoFile // (borrowed)
+ File(uri span.URI) (*ParsedGoFile, error)
+ GetSyntax() []*ast.File // (borrowed)
+ HasParseErrors() bool
+
+ // Results of type checking:
+ GetTypes() *types.Package
+ GetTypesInfo() *types.Info
+ DependencyTypes(PackagePath) *types.Package // nil for indirect dependency of no consequence
+ HasTypeErrors() bool
+ DiagnosticsForFile(ctx context.Context, s Snapshot, uri span.URI) ([]*Diagnostic, error)
+}
+
+type unit = struct{}
+
+// A CriticalError is a workspace-wide error that generally prevents gopls from
+// functioning correctly. In the presence of critical errors, other diagnostics
+// in the workspace may not make sense.
+type CriticalError struct {
+ // MainError is the primary error. Must be non-nil.
+ MainError error
+
+ // Diagnostics contains any supplemental (structured) diagnostics.
+ Diagnostics []*Diagnostic
+}
+
+// An Diagnostic corresponds to an LSP Diagnostic.
+// https://microsoft.github.io/language-server-protocol/specification#diagnostic
+type Diagnostic struct {
+ URI span.URI
+ Range protocol.Range
+ Severity protocol.DiagnosticSeverity
+ Code string
+ CodeHref string
+
+ // Source is a human-readable description of the source of the error.
+ // Diagnostics generated by an analysis.Analyzer set it to Analyzer.Name.
+ Source DiagnosticSource
+
+ Message string
+
+ Tags []protocol.DiagnosticTag
+ Related []protocol.DiagnosticRelatedInformation
+
+ // Fields below are used internally to generate quick fixes. They aren't
+ // part of the LSP spec and don't leave the server.
+ SuggestedFixes []SuggestedFix
+}
+
+func (d *Diagnostic) String() string {
+ return fmt.Sprintf("%v: %s", d.Range, d.Message)
+}
+
+type DiagnosticSource string
+
+const (
+ UnknownError DiagnosticSource = "<Unknown source>"
+ ListError DiagnosticSource = "go list"
+ ParseError DiagnosticSource = "syntax"
+ TypeError DiagnosticSource = "compiler"
+ ModTidyError DiagnosticSource = "go mod tidy"
+ OptimizationDetailsError DiagnosticSource = "optimizer details"
+ UpgradeNotification DiagnosticSource = "upgrade available"
+ Vulncheck DiagnosticSource = "vulncheck imports"
+ Govulncheck DiagnosticSource = "govulncheck"
+ TemplateError DiagnosticSource = "template"
+ WorkFileError DiagnosticSource = "go.work file"
+)
+
+func AnalyzerErrorKind(name string) DiagnosticSource {
+ return DiagnosticSource(name)
+}
diff --git a/gopls/internal/lsp/source/workspace_symbol.go b/gopls/internal/lsp/source/workspace_symbol.go
new file mode 100644
index 000000000..17c3a24fb
--- /dev/null
+++ b/gopls/internal/lsp/source/workspace_symbol.go
@@ -0,0 +1,632 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package source
+
+import (
+ "context"
+ "fmt"
+ "go/types"
+ "path"
+ "path/filepath"
+ "regexp"
+ "runtime"
+ "sort"
+ "strings"
+ "unicode"
+
+ "golang.org/x/tools/gopls/internal/lsp/protocol"
+ "golang.org/x/tools/gopls/internal/span"
+ "golang.org/x/tools/internal/event"
+ "golang.org/x/tools/internal/fuzzy"
+)
+
+// Symbol holds a precomputed symbol value. Note: we avoid using the
+// protocol.SymbolInformation struct here in order to reduce the size of each
+// symbol.
+type Symbol struct {
+ Name string
+ Kind protocol.SymbolKind
+ Range protocol.Range
+}
+
+// maxSymbols defines the maximum number of symbol results that should ever be
+// sent in response to a client.
+const maxSymbols = 100
+
+// WorkspaceSymbols matches symbols across all views using the given query,
+// according to the match semantics parameterized by matcherType and style.
+//
+// The workspace symbol method is defined in the spec as follows:
+//
+// The workspace symbol request is sent from the client to the server to
+// list project-wide symbols matching the query string.
+//
+// It is unclear what "project-wide" means here, but given the parameters of
+// workspace/symbol do not include any workspace identifier, then it has to be
+// assumed that "project-wide" means "across all workspaces". Hence why
+// WorkspaceSymbols receives the views []View.
+//
+// However, it then becomes unclear what it would mean to call WorkspaceSymbols
+// with a different configured SymbolMatcher per View. Therefore we assume that
+// Session level configuration will define the SymbolMatcher to be used for the
+// WorkspaceSymbols method.
+func WorkspaceSymbols(ctx context.Context, matcher SymbolMatcher, style SymbolStyle, views []View, query string) ([]protocol.SymbolInformation, error) {
+ ctx, done := event.Start(ctx, "source.WorkspaceSymbols")
+ defer done()
+ if query == "" {
+ return nil, nil
+ }
+
+ var s symbolizer
+ switch style {
+ case DynamicSymbols:
+ s = dynamicSymbolMatch
+ case FullyQualifiedSymbols:
+ s = fullyQualifiedSymbolMatch
+ case PackageQualifiedSymbols:
+ s = packageSymbolMatch
+ default:
+ panic(fmt.Errorf("unknown symbol style: %v", style))
+ }
+
+ return collectSymbols(ctx, views, matcher, s, query)
+}
+
+// A matcherFunc returns the index and score of a symbol match.
+//
+// See the comment for symbolCollector for more information.
+type matcherFunc func(chunks []string) (int, float64)
+
+// A symbolizer returns the best symbol match for a name with pkg, according to
+// some heuristic. The symbol name is passed as the slice nameParts of logical
+// name pieces. For example, for myType.field the caller can pass either
+// []string{"myType.field"} or []string{"myType.", "field"}.
+//
+// See the comment for symbolCollector for more information.
+//
+// The space argument is an empty slice with spare capacity that may be used
+// to allocate the result.
+type symbolizer func(space []string, name string, pkg *Metadata, m matcherFunc) ([]string, float64)
+
+func fullyQualifiedSymbolMatch(space []string, name string, pkg *Metadata, matcher matcherFunc) ([]string, float64) {
+ if _, score := dynamicSymbolMatch(space, name, pkg, matcher); score > 0 {
+ return append(space, string(pkg.PkgPath), ".", name), score
+ }
+ return nil, 0
+}
+
+func dynamicSymbolMatch(space []string, name string, pkg *Metadata, matcher matcherFunc) ([]string, float64) {
+ if IsCommandLineArguments(pkg.ID) {
+ // command-line-arguments packages have a non-sensical package path, so
+ // just use their package name.
+ return packageSymbolMatch(space, name, pkg, matcher)
+ }
+
+ var score float64
+
+ endsInPkgName := strings.HasSuffix(string(pkg.PkgPath), string(pkg.Name))
+
+ // If the package path does not end in the package name, we need to check the
+ // package-qualified symbol as an extra pass first.
+ if !endsInPkgName {
+ pkgQualified := append(space, string(pkg.Name), ".", name)
+ idx, score := matcher(pkgQualified)
+ nameStart := len(pkg.Name) + 1
+ if score > 0 {
+ // If our match is contained entirely within the unqualified portion,
+ // just return that.
+ if idx >= nameStart {
+ return append(space, name), score
+ }
+ // Lower the score for matches that include the package name.
+ return pkgQualified, score * 0.8
+ }
+ }
+
+ // Now try matching the fully qualified symbol.
+ fullyQualified := append(space, string(pkg.PkgPath), ".", name)
+ idx, score := matcher(fullyQualified)
+
+ // As above, check if we matched just the unqualified symbol name.
+ nameStart := len(pkg.PkgPath) + 1
+ if idx >= nameStart {
+ return append(space, name), score
+ }
+
+ // If our package path ends in the package name, we'll have skipped the
+ // initial pass above, so check if we matched just the package-qualified
+ // name.
+ if endsInPkgName && idx >= 0 {
+ pkgStart := len(pkg.PkgPath) - len(pkg.Name)
+ if idx >= pkgStart {
+ return append(space, string(pkg.Name), ".", name), score
+ }
+ }
+
+ // Our match was not contained within the unqualified or package qualified
+ // symbol. Return the fully qualified symbol but discount the score.
+ return fullyQualified, score * 0.6
+}
+
+func packageSymbolMatch(space []string, name string, pkg *Metadata, matcher matcherFunc) ([]string, float64) {
+ qualified := append(space, string(pkg.Name), ".", name)
+ if _, s := matcher(qualified); s > 0 {
+ return qualified, s
+ }
+ return nil, 0
+}
+
+func buildMatcher(matcher SymbolMatcher, query string) matcherFunc {
+ switch matcher {
+ case SymbolFuzzy:
+ return parseQuery(query, newFuzzyMatcher)
+ case SymbolFastFuzzy:
+ return parseQuery(query, func(query string) matcherFunc {
+ return fuzzy.NewSymbolMatcher(query).Match
+ })
+ case SymbolCaseSensitive:
+ return matchExact(query)
+ case SymbolCaseInsensitive:
+ q := strings.ToLower(query)
+ exact := matchExact(q)
+ wrapper := []string{""}
+ return func(chunks []string) (int, float64) {
+ s := strings.Join(chunks, "")
+ wrapper[0] = strings.ToLower(s)
+ return exact(wrapper)
+ }
+ }
+ panic(fmt.Errorf("unknown symbol matcher: %v", matcher))
+}
+
+func newFuzzyMatcher(query string) matcherFunc {
+ fm := fuzzy.NewMatcher(query)
+ return func(chunks []string) (int, float64) {
+ score := float64(fm.ScoreChunks(chunks))
+ ranges := fm.MatchedRanges()
+ if len(ranges) > 0 {
+ return ranges[0], score
+ }
+ return -1, score
+ }
+}
+
+// parseQuery parses a field-separated symbol query, extracting the special
+// characters listed below, and returns a matcherFunc corresponding to the AND
+// of all field queries.
+//
+// Special characters:
+//
+// ^ match exact prefix
+// $ match exact suffix
+// ' match exact
+//
+// In all three of these special queries, matches are 'smart-cased', meaning
+// they are case sensitive if the symbol query contains any upper-case
+// characters, and case insensitive otherwise.
+func parseQuery(q string, newMatcher func(string) matcherFunc) matcherFunc {
+ fields := strings.Fields(q)
+ if len(fields) == 0 {
+ return func([]string) (int, float64) { return -1, 0 }
+ }
+ var funcs []matcherFunc
+ for _, field := range fields {
+ var f matcherFunc
+ switch {
+ case strings.HasPrefix(field, "^"):
+ prefix := field[1:]
+ f = smartCase(prefix, func(chunks []string) (int, float64) {
+ s := strings.Join(chunks, "")
+ if strings.HasPrefix(s, prefix) {
+ return 0, 1
+ }
+ return -1, 0
+ })
+ case strings.HasPrefix(field, "'"):
+ exact := field[1:]
+ f = smartCase(exact, matchExact(exact))
+ case strings.HasSuffix(field, "$"):
+ suffix := field[0 : len(field)-1]
+ f = smartCase(suffix, func(chunks []string) (int, float64) {
+ s := strings.Join(chunks, "")
+ if strings.HasSuffix(s, suffix) {
+ return len(s) - len(suffix), 1
+ }
+ return -1, 0
+ })
+ default:
+ f = newMatcher(field)
+ }
+ funcs = append(funcs, f)
+ }
+ if len(funcs) == 1 {
+ return funcs[0]
+ }
+ return comboMatcher(funcs).match
+}
+
+func matchExact(exact string) matcherFunc {
+ return func(chunks []string) (int, float64) {
+ s := strings.Join(chunks, "")
+ if idx := strings.LastIndex(s, exact); idx >= 0 {
+ return idx, 1
+ }
+ return -1, 0
+ }
+}
+
+// smartCase returns a matcherFunc that is case-sensitive if q contains any
+// upper-case characters, and case-insensitive otherwise.
+func smartCase(q string, m matcherFunc) matcherFunc {
+ insensitive := strings.ToLower(q) == q
+ wrapper := []string{""}
+ return func(chunks []string) (int, float64) {
+ s := strings.Join(chunks, "")
+ if insensitive {
+ s = strings.ToLower(s)
+ }
+ wrapper[0] = s
+ return m(wrapper)
+ }
+}
+
+type comboMatcher []matcherFunc
+
+func (c comboMatcher) match(chunks []string) (int, float64) {
+ score := 1.0
+ first := 0
+ for _, f := range c {
+ idx, s := f(chunks)
+ if idx < first {
+ first = idx
+ }
+ score *= s
+ }
+ return first, score
+}
+
+// collectSymbols calls snapshot.Symbols to walk the syntax trees of
+// all files in the views' current snapshots, and returns a sorted,
+// scored list of symbols that best match the parameters.
+//
+// How it matches symbols is parameterized by two interfaces:
+// - A matcherFunc determines how well a string symbol matches a query. It
+// returns a non-negative score indicating the quality of the match. A score
+// of zero indicates no match.
+// - A symbolizer determines how we extract the symbol for an object. This
+// enables the 'symbolStyle' configuration option.
+func collectSymbols(ctx context.Context, views []View, matcherType SymbolMatcher, symbolizer symbolizer, query string) ([]protocol.SymbolInformation, error) {
+ // Extract symbols from all files.
+ var work []symbolFile
+ var roots []string
+ seen := make(map[span.URI]bool)
+ // TODO(adonovan): opt: parallelize this loop? How often is len > 1?
+ for _, v := range views {
+ snapshot, release, err := v.Snapshot()
+ if err != nil {
+ continue // view is shut down; continue with others
+ }
+ defer release()
+
+ // Use the root view URIs for determining (lexically)
+ // whether a URI is in any open workspace.
+ roots = append(roots, strings.TrimRight(string(v.Folder()), "/"))
+
+ filters := v.Options().DirectoryFilters
+ filterer := NewFilterer(filters)
+ folder := filepath.ToSlash(v.Folder().Filename())
+ symbols, err := snapshot.Symbols(ctx)
+ if err != nil {
+ return nil, err
+ }
+ for uri, syms := range symbols {
+ norm := filepath.ToSlash(uri.Filename())
+ nm := strings.TrimPrefix(norm, folder)
+ if filterer.Disallow(nm) {
+ continue
+ }
+ // Only scan each file once.
+ if seen[uri] {
+ continue
+ }
+ mds, err := snapshot.MetadataForFile(ctx, uri)
+ if err != nil {
+ event.Error(ctx, fmt.Sprintf("missing metadata for %q", uri), err)
+ continue
+ }
+ if len(mds) == 0 {
+ // TODO: should use the bug reporting API
+ continue
+ }
+ seen[uri] = true
+ work = append(work, symbolFile{uri, mds[0], syms})
+ }
+ }
+
+ // Match symbols in parallel.
+ // Each worker has its own symbolStore,
+ // which we merge at the end.
+ nmatchers := runtime.GOMAXPROCS(-1) // matching is CPU bound
+ results := make(chan *symbolStore)
+ for i := 0; i < nmatchers; i++ {
+ go func(i int) {
+ matcher := buildMatcher(matcherType, query)
+ store := new(symbolStore)
+ // Assign files to workers in round-robin fashion.
+ for j := i; j < len(work); j += nmatchers {
+ matchFile(store, symbolizer, matcher, roots, work[j])
+ }
+ results <- store
+ }(i)
+ }
+
+ // Gather and merge results as they arrive.
+ var unified symbolStore
+ for i := 0; i < nmatchers; i++ {
+ store := <-results
+ for _, syms := range store.res {
+ unified.store(syms)
+ }
+ }
+ return unified.results(), nil
+}
+
+type Filterer struct {
+ // Whether a filter is excluded depends on the operator (first char of the raw filter).
+ // Slices filters and excluded then should have the same length.
+ filters []*regexp.Regexp
+ excluded []bool
+}
+
+// NewFilterer computes regular expression form of all raw filters
+func NewFilterer(rawFilters []string) *Filterer {
+ var f Filterer
+ for _, filter := range rawFilters {
+ filter = path.Clean(filepath.ToSlash(filter))
+ // TODO(dungtuanle): fix: validate [+-] prefix.
+ op, prefix := filter[0], filter[1:]
+ // convertFilterToRegexp adds "/" at the end of prefix to handle cases where a filter is a prefix of another filter.
+ // For example, it prevents [+foobar, -foo] from excluding "foobar".
+ f.filters = append(f.filters, convertFilterToRegexp(filepath.ToSlash(prefix)))
+ f.excluded = append(f.excluded, op == '-')
+ }
+
+ return &f
+}
+
+// Disallow return true if the path is excluded from the filterer's filters.
+func (f *Filterer) Disallow(path string) bool {
+ // Ensure trailing but not leading slash.
+ path = strings.TrimPrefix(path, "/")
+ if !strings.HasSuffix(path, "/") {
+ path += "/"
+ }
+
+ // TODO(adonovan): opt: iterate in reverse and break at first match.
+ excluded := false
+ for i, filter := range f.filters {
+ if filter.MatchString(path) {
+ excluded = f.excluded[i] // last match wins
+ }
+ }
+ return excluded
+}
+
+// convertFilterToRegexp replaces glob-like operator substrings in a string file path to their equivalent regex forms.
+// Supporting glob-like operators:
+// - **: match zero or more complete path segments
+func convertFilterToRegexp(filter string) *regexp.Regexp {
+ if filter == "" {
+ return regexp.MustCompile(".*")
+ }
+ var ret strings.Builder
+ ret.WriteString("^")
+ segs := strings.Split(filter, "/")
+ for _, seg := range segs {
+ // Inv: seg != "" since path is clean.
+ if seg == "**" {
+ ret.WriteString(".*")
+ } else {
+ ret.WriteString(regexp.QuoteMeta(seg))
+ }
+ ret.WriteString("/")
+ }
+ pattern := ret.String()
+
+ // Remove unnecessary "^.*" prefix, which increased
+ // BenchmarkWorkspaceSymbols time by ~20% (even though
+ // filter CPU time increased by only by ~2.5%) when the
+ // default filter was changed to "**/node_modules".
+ pattern = strings.TrimPrefix(pattern, "^.*")
+
+ return regexp.MustCompile(pattern)
+}
+
+// symbolFile holds symbol information for a single file.
+type symbolFile struct {
+ uri span.URI
+ md *Metadata
+ syms []Symbol
+}
+
+// matchFile scans a symbol file and adds matching symbols to the store.
+func matchFile(store *symbolStore, symbolizer symbolizer, matcher matcherFunc, roots []string, i symbolFile) {
+ space := make([]string, 0, 3)
+ for _, sym := range i.syms {
+ symbolParts, score := symbolizer(space, sym.Name, i.md, matcher)
+
+ // Check if the score is too low before applying any downranking.
+ if store.tooLow(score) {
+ continue
+ }
+
+ // Factors to apply to the match score for the purpose of downranking
+ // results.
+ //
+ // These numbers were crudely calibrated based on trial-and-error using a
+ // small number of sample queries. Adjust as necessary.
+ //
+ // All factors are multiplicative, meaning if more than one applies they are
+ // multiplied together.
+ const (
+ // nonWorkspaceFactor is applied to symbols outside of any active
+ // workspace. Developers are less likely to want to jump to code that they
+ // are not actively working on.
+ nonWorkspaceFactor = 0.5
+ // nonWorkspaceUnexportedFactor is applied to unexported symbols outside of
+ // any active workspace. Since one wouldn't usually jump to unexported
+ // symbols to understand a package API, they are particularly irrelevant.
+ nonWorkspaceUnexportedFactor = 0.5
+ // every field or method nesting level to access the field decreases
+ // the score by a factor of 1.0 - depth*depthFactor, up to a depth of
+ // 3.
+ depthFactor = 0.2
+ )
+
+ startWord := true
+ exported := true
+ depth := 0.0
+ for _, r := range sym.Name {
+ if startWord && !unicode.IsUpper(r) {
+ exported = false
+ }
+ if r == '.' {
+ startWord = true
+ depth++
+ } else {
+ startWord = false
+ }
+ }
+
+ inWorkspace := false
+ for _, root := range roots {
+ if strings.HasPrefix(string(i.uri), root) {
+ inWorkspace = true
+ break
+ }
+ }
+
+ // Apply downranking based on workspace position.
+ if !inWorkspace {
+ score *= nonWorkspaceFactor
+ if !exported {
+ score *= nonWorkspaceUnexportedFactor
+ }
+ }
+
+ // Apply downranking based on symbol depth.
+ if depth > 3 {
+ depth = 3
+ }
+ score *= 1.0 - depth*depthFactor
+
+ if store.tooLow(score) {
+ continue
+ }
+
+ si := symbolInformation{
+ score: score,
+ symbol: strings.Join(symbolParts, ""),
+ kind: sym.Kind,
+ uri: i.uri,
+ rng: sym.Range,
+ container: string(i.md.PkgPath),
+ }
+ store.store(si)
+ }
+}
+
+type symbolStore struct {
+ res [maxSymbols]symbolInformation
+}
+
+// store inserts si into the sorted results, if si has a high enough score.
+func (sc *symbolStore) store(si symbolInformation) {
+ if sc.tooLow(si.score) {
+ return
+ }
+ insertAt := sort.Search(len(sc.res), func(i int) bool {
+ // Sort by score, then symbol length, and finally lexically.
+ if sc.res[i].score != si.score {
+ return sc.res[i].score < si.score
+ }
+ if len(sc.res[i].symbol) != len(si.symbol) {
+ return len(sc.res[i].symbol) > len(si.symbol)
+ }
+ return sc.res[i].symbol > si.symbol
+ })
+ if insertAt < len(sc.res)-1 {
+ copy(sc.res[insertAt+1:], sc.res[insertAt:len(sc.res)-1])
+ }
+ sc.res[insertAt] = si
+}
+
+func (sc *symbolStore) tooLow(score float64) bool {
+ return score <= sc.res[len(sc.res)-1].score
+}
+
+func (sc *symbolStore) results() []protocol.SymbolInformation {
+ var res []protocol.SymbolInformation
+ for _, si := range sc.res {
+ if si.score <= 0 {
+ return res
+ }
+ res = append(res, si.asProtocolSymbolInformation())
+ }
+ return res
+}
+
+func typeToKind(typ types.Type) protocol.SymbolKind {
+ switch typ := typ.Underlying().(type) {
+ case *types.Interface:
+ return protocol.Interface
+ case *types.Struct:
+ return protocol.Struct
+ case *types.Signature:
+ if typ.Recv() != nil {
+ return protocol.Method
+ }
+ return protocol.Function
+ case *types.Named:
+ return typeToKind(typ.Underlying())
+ case *types.Basic:
+ i := typ.Info()
+ switch {
+ case i&types.IsNumeric != 0:
+ return protocol.Number
+ case i&types.IsBoolean != 0:
+ return protocol.Boolean
+ case i&types.IsString != 0:
+ return protocol.String
+ }
+ }
+ return protocol.Variable
+}
+
+// symbolInformation is a cut-down version of protocol.SymbolInformation that
+// allows struct values of this type to be used as map keys.
+type symbolInformation struct {
+ score float64
+ symbol string
+ container string
+ kind protocol.SymbolKind
+ uri span.URI
+ rng protocol.Range
+}
+
+// asProtocolSymbolInformation converts s to a protocol.SymbolInformation value.
+//
+// TODO: work out how to handle tags if/when they are needed.
+func (s symbolInformation) asProtocolSymbolInformation() protocol.SymbolInformation {
+ return protocol.SymbolInformation{
+ Name: s.symbol,
+ Kind: s.kind,
+ Location: protocol.Location{
+ URI: protocol.URIFromSpanURI(s.uri),
+ Range: s.rng,
+ },
+ ContainerName: s.container,
+ }
+}
diff --git a/gopls/internal/lsp/source/workspace_symbol_test.go b/gopls/internal/lsp/source/workspace_symbol_test.go
new file mode 100644
index 000000000..24fb8b452
--- /dev/null
+++ b/gopls/internal/lsp/source/workspace_symbol_test.go
@@ -0,0 +1,136 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package source
+
+import (
+ "testing"
+)
+
+func TestParseQuery(t *testing.T) {
+ tests := []struct {
+ query, s string
+ wantMatch bool
+ }{
+ {"", "anything", false},
+ {"any", "anything", true},
+ {"any$", "anything", false},
+ {"ing$", "anything", true},
+ {"ing$", "anythinG", true},
+ {"inG$", "anything", false},
+ {"^any", "anything", true},
+ {"^any", "Anything", true},
+ {"^Any", "anything", false},
+ {"at", "anything", true},
+ // TODO: this appears to be a bug in the fuzzy matching algorithm. 'At'
+ // should cause a case-sensitive match.
+ // {"At", "anything", false},
+ {"At", "Anything", true},
+ {"'yth", "Anything", true},
+ {"'yti", "Anything", false},
+ {"'any 'thing", "Anything", true},
+ {"anythn nythg", "Anything", true},
+ {"ntx", "Anything", false},
+ {"anythn", "anything", true},
+ {"ing", "anything", true},
+ {"anythn nythgx", "anything", false},
+ }
+
+ for _, test := range tests {
+ matcher := parseQuery(test.query, newFuzzyMatcher)
+ if _, score := matcher([]string{test.s}); score > 0 != test.wantMatch {
+ t.Errorf("parseQuery(%q) match for %q: %.2g, want match: %t", test.query, test.s, score, test.wantMatch)
+ }
+ }
+}
+
+func TestFiltererDisallow(t *testing.T) {
+ tests := []struct {
+ filters []string
+ included []string
+ excluded []string
+ }{
+ {
+ []string{"+**/c.go"},
+ []string{"a/c.go", "a/b/c.go"},
+ []string{},
+ },
+ {
+ []string{"+a/**/c.go"},
+ []string{"a/b/c.go", "a/b/d/c.go", "a/c.go"},
+ []string{},
+ },
+ {
+ []string{"-a/c.go", "+a/**"},
+ []string{"a/c.go"},
+ []string{},
+ },
+ {
+ []string{"+a/**/c.go", "-**/c.go"},
+ []string{},
+ []string{"a/b/c.go"},
+ },
+ {
+ []string{"+a/**/c.go", "-a/**"},
+ []string{},
+ []string{"a/b/c.go"},
+ },
+ {
+ []string{"+**/c.go", "-a/**/c.go"},
+ []string{},
+ []string{"a/b/c.go"},
+ },
+ {
+ []string{"+foobar", "-foo"},
+ []string{"foobar", "foobar/a"},
+ []string{"foo", "foo/a"},
+ },
+ {
+ []string{"+", "-"},
+ []string{},
+ []string{"foobar", "foobar/a", "foo", "foo/a"},
+ },
+ {
+ []string{"-", "+"},
+ []string{"foobar", "foobar/a", "foo", "foo/a"},
+ []string{},
+ },
+ {
+ []string{"-a/**/b/**/c.go"},
+ []string{},
+ []string{"a/x/y/z/b/f/g/h/c.go"},
+ },
+ // tests for unsupported glob operators
+ {
+ []string{"+**/c.go", "-a/*/c.go"},
+ []string{"a/b/c.go"},
+ []string{},
+ },
+ {
+ []string{"+**/c.go", "-a/?/c.go"},
+ []string{"a/b/c.go"},
+ []string{},
+ },
+ {
+ []string{"-b"}, // should only filter paths prefixed with the "b" directory
+ []string{"a/b/c.go", "bb"},
+ []string{"b/c/d.go", "b"},
+ },
+ }
+
+ for _, test := range tests {
+ filterer := NewFilterer(test.filters)
+ for _, inc := range test.included {
+ if filterer.Disallow(inc) {
+ t.Errorf("Filters %v excluded %v, wanted included", test.filters, inc)
+ }
+ }
+
+ for _, exc := range test.excluded {
+ if !filterer.Disallow(exc) {
+ t.Errorf("Filters %v included %v, wanted excluded", test.filters, exc)
+ }
+ }
+ }
+}
diff --git a/gopls/internal/lsp/source/xrefs/xrefs.go b/gopls/internal/lsp/source/xrefs/xrefs.go
new file mode 100644
index 000000000..23c758266
--- /dev/null
+++ b/gopls/internal/lsp/source/xrefs/xrefs.go
@@ -0,0 +1,216 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package xrefs defines the serializable index of cross-package
+// references that is computed during type checking.
+//
+// See ../references2.go for the 'references' query.
+package xrefs
+
+import (
+ "bytes"
+ "encoding/gob"
+ "go/ast"
+ "go/types"
+ "log"
+ "sort"
+
+ "golang.org/x/tools/go/types/objectpath"
+ "golang.org/x/tools/gopls/internal/lsp/protocol"
+ "golang.org/x/tools/gopls/internal/lsp/source"
+ "golang.org/x/tools/internal/typesinternal"
+)
+
+// Index constructs a serializable index of outbound cross-references
+// for the specified type-checked package.
+func Index(files []*source.ParsedGoFile, pkg *types.Package, info *types.Info) []byte {
+ // pkgObjects maps each referenced package Q to a mapping:
+ // from each referenced symbol in Q to the ordered list
+ // of references to that symbol from this package.
+ // A nil types.Object indicates a reference
+ // to the package as a whole: an import.
+ pkgObjects := make(map[*types.Package]map[types.Object]*gobObject)
+
+ // getObjects returns the object-to-references mapping for a package.
+ getObjects := func(pkg *types.Package) map[types.Object]*gobObject {
+ objects, ok := pkgObjects[pkg]
+ if !ok {
+ objects = make(map[types.Object]*gobObject)
+ pkgObjects[pkg] = objects
+ }
+ return objects
+ }
+
+ objectpathFor := typesinternal.NewObjectpathFunc()
+
+ for fileIndex, pgf := range files {
+
+ nodeRange := func(n ast.Node) protocol.Range {
+ rng, err := pgf.PosRange(n.Pos(), n.End())
+ if err != nil {
+ panic(err) // can't fail
+ }
+ return rng
+ }
+
+ ast.Inspect(pgf.File, func(n ast.Node) bool {
+ switch n := n.(type) {
+ case *ast.Ident:
+ // Report a reference for each identifier that
+ // uses a symbol exported from another package.
+ // (The built-in error.Error method has no package.)
+ if n.IsExported() {
+ if obj, ok := info.Uses[n]; ok &&
+ obj.Pkg() != nil &&
+ obj.Pkg() != pkg {
+
+ objects := getObjects(obj.Pkg())
+ gobObj, ok := objects[obj]
+ if !ok {
+ path, err := objectpathFor(obj)
+ if err != nil {
+ // Capitalized but not exported
+ // (e.g. local const/var/type).
+ return true
+ }
+ gobObj = &gobObject{Path: path}
+ objects[obj] = gobObj
+ }
+
+ gobObj.Refs = append(gobObj.Refs, gobRef{
+ FileIndex: fileIndex,
+ Range: nodeRange(n),
+ })
+ }
+ }
+
+ case *ast.ImportSpec:
+ // Report a reference from each import path
+ // string to the imported package.
+ var obj types.Object
+ if n.Name != nil {
+ obj = info.Defs[n.Name]
+ } else {
+ obj = info.Implicits[n]
+ }
+ if obj == nil {
+ return true // missing import
+ }
+ objects := getObjects(obj.(*types.PkgName).Imported())
+ gobObj, ok := objects[nil]
+ if !ok {
+ gobObj = &gobObject{Path: ""}
+ objects[nil] = gobObj
+ }
+ gobObj.Refs = append(gobObj.Refs, gobRef{
+ FileIndex: fileIndex,
+ Range: nodeRange(n.Path),
+ })
+ }
+ return true
+ })
+ }
+
+ // Flatten the maps into slices, and sort for determinism.
+ var packages []*gobPackage
+ for p := range pkgObjects {
+ objects := pkgObjects[p]
+ gp := &gobPackage{
+ PkgPath: source.PackagePath(p.Path()),
+ Objects: make([]*gobObject, 0, len(objects)),
+ }
+ for _, gobObj := range objects {
+ gp.Objects = append(gp.Objects, gobObj)
+ }
+ sort.Slice(gp.Objects, func(i, j int) bool {
+ return gp.Objects[i].Path < gp.Objects[j].Path
+ })
+ packages = append(packages, gp)
+ }
+ sort.Slice(packages, func(i, j int) bool {
+ return packages[i].PkgPath < packages[j].PkgPath
+ })
+
+ return mustEncode(packages)
+}
+
+// Lookup searches a serialized index produced by an indexPackage
+// operation on m, and returns the locations of all references from m
+// to any object in the target set. Each object is denoted by a pair
+// of (package path, object path).
+func Lookup(m *source.Metadata, data []byte, targets map[source.PackagePath]map[objectpath.Path]struct{}) (locs []protocol.Location) {
+
+ // TODO(adonovan): opt: evaluate whether it would be faster to decode
+ // in two passes, first with struct { PkgPath string; Objects BLOB }
+ // to find the relevant record without decoding the Objects slice,
+ // then decode just the desired BLOB into a slice. BLOB would be a
+ // type whose Unmarshal method just retains (a copy of) the bytes.
+ var packages []gobPackage
+ mustDecode(data, &packages)
+
+ for _, gp := range packages {
+ if objectSet, ok := targets[gp.PkgPath]; ok {
+ for _, gobObj := range gp.Objects {
+ if _, ok := objectSet[gobObj.Path]; ok {
+ for _, ref := range gobObj.Refs {
+ uri := m.CompiledGoFiles[ref.FileIndex]
+ locs = append(locs, protocol.Location{
+ URI: protocol.URIFromSpanURI(uri),
+ Range: ref.Range,
+ })
+ }
+ }
+ }
+ }
+ }
+
+ return locs
+}
+
+// -- serialized representation --
+
+// The cross-reference index records the location of all references
+// from one package to symbols defined in other packages
+// (dependencies). It does not record within-package references.
+// The index for package P consists of a list of gopPackage records,
+// each enumerating references to symbols defined a single dependency, Q.
+
+// TODO(adonovan): opt: choose a more compact encoding. Gzip reduces
+// the gob output to about one third its size, so clearly there's room
+// to improve. The gobRef.Range field is the obvious place to begin.
+// Even a zero-length slice gob-encodes to ~285 bytes.
+
+// A gobPackage records the set of outgoing references from the index
+// package to symbols defined in a dependency package.
+type gobPackage struct {
+ PkgPath source.PackagePath // defining package (Q)
+ Objects []*gobObject // set of Q objects referenced by P
+}
+
+// A gobObject records all references to a particular symbol.
+type gobObject struct {
+ Path objectpath.Path // symbol name within package; "" => import of package itself
+ Refs []gobRef // locations of references within P, in lexical order
+}
+
+type gobRef struct {
+ FileIndex int // index of enclosing file within P's CompiledGoFiles
+ Range protocol.Range // source range of reference
+}
+
+// -- duplicated from ../../cache/analysis.go --
+
+func mustEncode(x interface{}) []byte {
+ var buf bytes.Buffer
+ if err := gob.NewEncoder(&buf).Encode(x); err != nil {
+ log.Fatalf("internal error encoding %T: %v", x, err)
+ }
+ return buf.Bytes()
+}
+
+func mustDecode(data []byte, ptr interface{}) {
+ if err := gob.NewDecoder(bytes.NewReader(data)).Decode(ptr); err != nil {
+ log.Fatalf("internal error decoding %T: %v", ptr, err)
+ }
+}