aboutsummaryrefslogtreecommitdiff
path: root/internal/lsp/source
diff options
context:
space:
mode:
Diffstat (limited to 'internal/lsp/source')
-rw-r--r--internal/lsp/source/add_import.go26
-rwxr-xr-xinternal/lsp/source/api_json.go972
-rw-r--r--internal/lsp/source/call_hierarchy.go310
-rw-r--r--internal/lsp/source/code_lens.go244
-rw-r--r--internal/lsp/source/comment.go381
-rw-r--r--internal/lsp/source/comment_test.go368
-rw-r--r--internal/lsp/source/completion/builtin.go147
-rw-r--r--internal/lsp/source/completion/completion.go2967
-rw-r--r--internal/lsp/source/completion/deep_completion.go362
-rw-r--r--internal/lsp/source/completion/deep_completion_test.go33
-rw-r--r--internal/lsp/source/completion/definition.go127
-rw-r--r--internal/lsp/source/completion/format.go340
-rw-r--r--internal/lsp/source/completion/fuzz.go142
-rw-r--r--internal/lsp/source/completion/keywords.go154
-rw-r--r--internal/lsp/source/completion/labels.go112
-rw-r--r--internal/lsp/source/completion/literal.go440
-rw-r--r--internal/lsp/source/completion/package.go364
-rw-r--r--internal/lsp/source/completion/package_test.go77
-rw-r--r--internal/lsp/source/completion/postfix_snippets.go461
-rw-r--r--internal/lsp/source/completion/printf.go172
-rw-r--r--internal/lsp/source/completion/printf_test.go72
-rw-r--r--internal/lsp/source/completion/snippet.go115
-rw-r--r--internal/lsp/source/completion/statements.go360
-rw-r--r--internal/lsp/source/completion/util.go326
-rw-r--r--internal/lsp/source/completion/util_test.go28
-rw-r--r--internal/lsp/source/diagnostics.go84
-rw-r--r--internal/lsp/source/extract.go1307
-rw-r--r--internal/lsp/source/fix.go140
-rw-r--r--internal/lsp/source/folding_range.go185
-rw-r--r--internal/lsp/source/format.go387
-rw-r--r--internal/lsp/source/format_test.go91
-rw-r--r--internal/lsp/source/gc_annotations.go214
-rw-r--r--internal/lsp/source/highlight.go509
-rw-r--r--internal/lsp/source/hover.go870
-rw-r--r--internal/lsp/source/identifier.go576
-rw-r--r--internal/lsp/source/identifier_test.go128
-rw-r--r--internal/lsp/source/implementation.go446
-rw-r--r--internal/lsp/source/known_packages.go118
-rw-r--r--internal/lsp/source/offset_test.go71
-rw-r--r--internal/lsp/source/options.go1449
-rw-r--r--internal/lsp/source/options_test.go183
-rw-r--r--internal/lsp/source/references.go200
-rw-r--r--internal/lsp/source/rename.go371
-rw-r--r--internal/lsp/source/rename_check.go936
-rw-r--r--internal/lsp/source/signature_help.go181
-rw-r--r--internal/lsp/source/source_test.go984
-rw-r--r--internal/lsp/source/stub.go330
-rw-r--r--internal/lsp/source/symbols.go266
-rw-r--r--internal/lsp/source/types_format.go459
-rw-r--r--internal/lsp/source/util.go586
-rw-r--r--internal/lsp/source/view.go696
-rw-r--r--internal/lsp/source/workspace_symbol.go593
-rw-r--r--internal/lsp/source/workspace_symbol_test.go46
53 files changed, 0 insertions, 21506 deletions
diff --git a/internal/lsp/source/add_import.go b/internal/lsp/source/add_import.go
deleted file mode 100644
index 816acc2c2..000000000
--- a/internal/lsp/source/add_import.go
+++ /dev/null
@@ -1,26 +0,0 @@
-// Copyright 2020 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package source
-
-import (
- "context"
-
- "golang.org/x/tools/internal/imports"
- "golang.org/x/tools/internal/lsp/protocol"
-)
-
-// AddImport adds a single import statement to the given file
-func AddImport(ctx context.Context, snapshot Snapshot, fh VersionedFileHandle, importPath string) ([]protocol.TextEdit, error) {
- _, pgf, err := GetParsedFile(ctx, snapshot, fh, NarrowestPackage)
- if err != nil {
- return nil, err
- }
- return ComputeOneImportFixEdits(snapshot, pgf, &imports.ImportFix{
- StmtInfo: imports.ImportInfo{
- ImportPath: importPath,
- },
- FixType: imports.AddImport,
- })
-}
diff --git a/internal/lsp/source/api_json.go b/internal/lsp/source/api_json.go
deleted file mode 100755
index 14140bb63..000000000
--- a/internal/lsp/source/api_json.go
+++ /dev/null
@@ -1,972 +0,0 @@
-// Code generated by "golang.org/x/tools/gopls/doc/generate"; DO NOT EDIT.
-
-package source
-
-var GeneratedAPIJSON = &APIJSON{
- Options: map[string][]*OptionJSON{
- "User": {
- {
- Name: "buildFlags",
- Type: "[]string",
- Doc: "buildFlags is the set of flags passed on to the build system when invoked.\nIt is applied to queries like `go list`, which is used when discovering files.\nThe most common use is to set `-tags`.\n",
- Default: "[]",
- Hierarchy: "build",
- },
- {
- Name: "env",
- Type: "map[string]string",
- Doc: "env adds environment variables to external commands run by `gopls`, most notably `go list`.\n",
- Default: "{}",
- Hierarchy: "build",
- },
- {
- Name: "directoryFilters",
- Type: "[]string",
- Doc: "directoryFilters can be used to exclude unwanted directories from the\nworkspace. By default, all directories are included. Filters are an\noperator, `+` to include and `-` to exclude, followed by a path prefix\nrelative to the workspace folder. They are evaluated in order, and\nthe last filter that applies to a path controls whether it is included.\nThe path prefix can be empty, so an initial `-` excludes everything.\n\nExamples:\n\nExclude node_modules: `-node_modules`\n\nInclude only project_a: `-` (exclude everything), `+project_a`\n\nInclude only project_a, but not node_modules inside it: `-`, `+project_a`, `-project_a/node_modules`\n",
- Default: "[\"-node_modules\"]",
- Hierarchy: "build",
- },
- {
- Name: "templateExtensions",
- Type: "[]string",
- Doc: "templateExtensions gives the extensions of file names that are treateed\nas template files. (The extension\nis the part of the file name after the final dot.)\n",
- Default: "[]",
- Hierarchy: "build",
- },
- {
- Name: "memoryMode",
- Type: "enum",
- Doc: "memoryMode controls the tradeoff `gopls` makes between memory usage and\ncorrectness.\n\nValues other than `Normal` are untested and may break in surprising ways.\n",
- EnumValues: []EnumValue{
- {
- Value: "\"DegradeClosed\"",
- Doc: "`\"DegradeClosed\"`: In DegradeClosed mode, `gopls` will collect less information about\npackages without open files. As a result, features like Find\nReferences and Rename will miss results in such packages.\n",
- },
- {Value: "\"Normal\""},
- },
- Default: "\"Normal\"",
- Status: "experimental",
- Hierarchy: "build",
- },
- {
- Name: "expandWorkspaceToModule",
- Type: "bool",
- Doc: "expandWorkspaceToModule instructs `gopls` to adjust the scope of the\nworkspace to find the best available module root. `gopls` first looks for\na go.mod file in any parent directory of the workspace folder, expanding\nthe scope to that directory if it exists. If no viable parent directory is\nfound, gopls will check if there is exactly one child directory containing\na go.mod file, narrowing the scope to that directory if it exists.\n",
- Default: "true",
- Status: "experimental",
- Hierarchy: "build",
- },
- {
- Name: "experimentalWorkspaceModule",
- Type: "bool",
- Doc: "experimentalWorkspaceModule opts a user into the experimental support\nfor multi-module workspaces.\n",
- Default: "false",
- Status: "experimental",
- Hierarchy: "build",
- },
- {
- Name: "experimentalPackageCacheKey",
- Type: "bool",
- Doc: "experimentalPackageCacheKey controls whether to use a coarser cache key\nfor package type information to increase cache hits. This setting removes\nthe user's environment, build flags, and working directory from the cache\nkey, which should be a safe change as all relevant inputs into the type\nchecking pass are already hashed into the key. This is temporarily guarded\nby an experiment because caching behavior is subtle and difficult to\ncomprehensively test.\n",
- Default: "true",
- Status: "experimental",
- Hierarchy: "build",
- },
- {
- Name: "allowModfileModifications",
- Type: "bool",
- Doc: "allowModfileModifications disables -mod=readonly, allowing imports from\nout-of-scope modules. This option will eventually be removed.\n",
- Default: "false",
- Status: "experimental",
- Hierarchy: "build",
- },
- {
- Name: "allowImplicitNetworkAccess",
- Type: "bool",
- Doc: "allowImplicitNetworkAccess disables GOPROXY=off, allowing implicit module\ndownloads rather than requiring user action. This option will eventually\nbe removed.\n",
- Default: "false",
- Status: "experimental",
- Hierarchy: "build",
- },
- {
- Name: "experimentalUseInvalidMetadata",
- Type: "bool",
- Doc: "experimentalUseInvalidMetadata enables gopls to fall back on outdated\npackage metadata to provide editor features if the go command fails to\nload packages for some reason (like an invalid go.mod file). This will\neventually be the default behavior, and this setting will be removed.\n",
- Default: "false",
- Status: "experimental",
- Hierarchy: "build",
- },
- {
- Name: "hoverKind",
- Type: "enum",
- Doc: "hoverKind controls the information that appears in the hover text.\nSingleLine and Structured are intended for use only by authors of editor plugins.\n",
- EnumValues: []EnumValue{
- {Value: "\"FullDocumentation\""},
- {Value: "\"NoDocumentation\""},
- {Value: "\"SingleLine\""},
- {
- Value: "\"Structured\"",
- Doc: "`\"Structured\"` is an experimental setting that returns a structured hover format.\nThis format separates the signature from the documentation, so that the client\ncan do more manipulation of these fields.\n\nThis should only be used by clients that support this behavior.\n",
- },
- {Value: "\"SynopsisDocumentation\""},
- },
- Default: "\"FullDocumentation\"",
- Hierarchy: "ui.documentation",
- },
- {
- Name: "linkTarget",
- Type: "string",
- Doc: "linkTarget controls where documentation links go.\nIt might be one of:\n\n* `\"godoc.org\"`\n* `\"pkg.go.dev\"`\n\nIf company chooses to use its own `godoc.org`, its address can be used as well.\n",
- Default: "\"pkg.go.dev\"",
- Hierarchy: "ui.documentation",
- },
- {
- Name: "linksInHover",
- Type: "bool",
- Doc: "linksInHover toggles the presence of links to documentation in hover.\n",
- Default: "true",
- Hierarchy: "ui.documentation",
- },
- {
- Name: "usePlaceholders",
- Type: "bool",
- Doc: "placeholders enables placeholders for function parameters or struct\nfields in completion responses.\n",
- Default: "false",
- Hierarchy: "ui.completion",
- },
- {
- Name: "completionBudget",
- Type: "time.Duration",
- Doc: "completionBudget is the soft latency goal for completion requests. Most\nrequests finish in a couple milliseconds, but in some cases deep\ncompletions can take much longer. As we use up our budget we\ndynamically reduce the search scope to ensure we return timely\nresults. Zero means unlimited.\n",
- Default: "\"100ms\"",
- Status: "debug",
- Hierarchy: "ui.completion",
- },
- {
- Name: "matcher",
- Type: "enum",
- Doc: "matcher sets the algorithm that is used when calculating completion\ncandidates.\n",
- EnumValues: []EnumValue{
- {Value: "\"CaseInsensitive\""},
- {Value: "\"CaseSensitive\""},
- {Value: "\"Fuzzy\""},
- },
- Default: "\"Fuzzy\"",
- Status: "advanced",
- Hierarchy: "ui.completion",
- },
- {
- Name: "experimentalPostfixCompletions",
- Type: "bool",
- Doc: "experimentalPostfixCompletions enables artificial method snippets\nsuch as \"someSlice.sort!\".\n",
- Default: "true",
- Status: "experimental",
- Hierarchy: "ui.completion",
- },
- {
- Name: "importShortcut",
- Type: "enum",
- Doc: "importShortcut specifies whether import statements should link to\ndocumentation or go to definitions.\n",
- EnumValues: []EnumValue{
- {Value: "\"Both\""},
- {Value: "\"Definition\""},
- {Value: "\"Link\""},
- },
- Default: "\"Both\"",
- Hierarchy: "ui.navigation",
- },
- {
- Name: "symbolMatcher",
- Type: "enum",
- Doc: "symbolMatcher sets the algorithm that is used when finding workspace symbols.\n",
- EnumValues: []EnumValue{
- {Value: "\"CaseInsensitive\""},
- {Value: "\"CaseSensitive\""},
- {Value: "\"FastFuzzy\""},
- {Value: "\"Fuzzy\""},
- },
- Default: "\"FastFuzzy\"",
- Status: "advanced",
- Hierarchy: "ui.navigation",
- },
- {
- Name: "symbolStyle",
- Type: "enum",
- Doc: "symbolStyle controls how symbols are qualified in symbol responses.\n\nExample Usage:\n\n```json5\n\"gopls\": {\n...\n \"symbolStyle\": \"Dynamic\",\n...\n}\n```\n",
- EnumValues: []EnumValue{
- {
- Value: "\"Dynamic\"",
- Doc: "`\"Dynamic\"` uses whichever qualifier results in the highest scoring\nmatch for the given symbol query. Here a \"qualifier\" is any \"/\" or \".\"\ndelimited suffix of the fully qualified symbol. i.e. \"to/pkg.Foo.Field\" or\njust \"Foo.Field\".\n",
- },
- {
- Value: "\"Full\"",
- Doc: "`\"Full\"` is fully qualified symbols, i.e.\n\"path/to/pkg.Foo.Field\".\n",
- },
- {
- Value: "\"Package\"",
- Doc: "`\"Package\"` is package qualified symbols i.e.\n\"pkg.Foo.Field\".\n",
- },
- },
- Default: "\"Dynamic\"",
- Status: "advanced",
- Hierarchy: "ui.navigation",
- },
- {
- Name: "analyses",
- Type: "map[string]bool",
- Doc: "analyses specify analyses that the user would like to enable or disable.\nA map of the names of analysis passes that should be enabled/disabled.\nA full list of analyzers that gopls uses can be found\n[here](https://github.com/golang/tools/blob/master/gopls/doc/analyzers.md).\n\nExample Usage:\n\n```json5\n...\n\"analyses\": {\n \"unreachable\": false, // Disable the unreachable analyzer.\n \"unusedparams\": true // Enable the unusedparams analyzer.\n}\n...\n```\n",
- EnumKeys: EnumKeys{
- ValueType: "bool",
- Keys: []EnumKey{
- {
- Name: "\"asmdecl\"",
- Doc: "report mismatches between assembly files and Go declarations",
- Default: "true",
- },
- {
- Name: "\"assign\"",
- Doc: "check for useless assignments\n\nThis checker reports assignments of the form x = x or a[i] = a[i].\nThese are almost always useless, and even when they aren't they are\nusually a mistake.",
- Default: "true",
- },
- {
- Name: "\"atomic\"",
- Doc: "check for common mistakes using the sync/atomic package\n\nThe atomic checker looks for assignment statements of the form:\n\n\tx = atomic.AddUint64(&x, 1)\n\nwhich are not atomic.",
- Default: "true",
- },
- {
- Name: "\"atomicalign\"",
- Doc: "check for non-64-bits-aligned arguments to sync/atomic functions",
- Default: "true",
- },
- {
- Name: "\"bools\"",
- Doc: "check for common mistakes involving boolean operators",
- Default: "true",
- },
- {
- Name: "\"buildtag\"",
- Doc: "check that +build tags are well-formed and correctly located",
- Default: "true",
- },
- {
- Name: "\"cgocall\"",
- Doc: "detect some violations of the cgo pointer passing rules\n\nCheck for invalid cgo pointer passing.\nThis looks for code that uses cgo to call C code passing values\nwhose types are almost always invalid according to the cgo pointer\nsharing rules.\nSpecifically, it warns about attempts to pass a Go chan, map, func,\nor slice to C, either directly, or via a pointer, array, or struct.",
- Default: "true",
- },
- {
- Name: "\"composites\"",
- Doc: "check for unkeyed composite literals\n\nThis analyzer reports a diagnostic for composite literals of struct\ntypes imported from another package that do not use the field-keyed\nsyntax. Such literals are fragile because the addition of a new field\n(even if unexported) to the struct will cause compilation to fail.\n\nAs an example,\n\n\terr = &net.DNSConfigError{err}\n\nshould be replaced by:\n\n\terr = &net.DNSConfigError{Err: err}\n",
- Default: "true",
- },
- {
- Name: "\"copylocks\"",
- Doc: "check for locks erroneously passed by value\n\nInadvertently copying a value containing a lock, such as sync.Mutex or\nsync.WaitGroup, may cause both copies to malfunction. Generally such\nvalues should be referred to through a pointer.",
- Default: "true",
- },
- {
- Name: "\"deepequalerrors\"",
- Doc: "check for calls of reflect.DeepEqual on error values\n\nThe deepequalerrors checker looks for calls of the form:\n\n reflect.DeepEqual(err1, err2)\n\nwhere err1 and err2 are errors. Using reflect.DeepEqual to compare\nerrors is discouraged.",
- Default: "true",
- },
- {
- Name: "\"errorsas\"",
- Doc: "report passing non-pointer or non-error values to errors.As\n\nThe errorsas analysis reports calls to errors.As where the type\nof the second argument is not a pointer to a type implementing error.",
- Default: "true",
- },
- {
- Name: "\"fieldalignment\"",
- Doc: "find structs that would use less memory if their fields were sorted\n\nThis analyzer find structs that can be rearranged to use less memory, and provides\na suggested edit with the optimal order.\n\nNote that there are two different diagnostics reported. One checks struct size,\nand the other reports \"pointer bytes\" used. Pointer bytes is how many bytes of the\nobject that the garbage collector has to potentially scan for pointers, for example:\n\n\tstruct { uint32; string }\n\nhave 16 pointer bytes because the garbage collector has to scan up through the string's\ninner pointer.\n\n\tstruct { string; *uint32 }\n\nhas 24 pointer bytes because it has to scan further through the *uint32.\n\n\tstruct { string; uint32 }\n\nhas 8 because it can stop immediately after the string pointer.\n",
- Default: "false",
- },
- {
- Name: "\"httpresponse\"",
- Doc: "check for mistakes using HTTP responses\n\nA common mistake when using the net/http package is to defer a function\ncall to close the http.Response Body before checking the error that\ndetermines whether the response is valid:\n\n\tresp, err := http.Head(url)\n\tdefer resp.Body.Close()\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\t// (defer statement belongs here)\n\nThis checker helps uncover latent nil dereference bugs by reporting a\ndiagnostic for such mistakes.",
- Default: "true",
- },
- {
- Name: "\"ifaceassert\"",
- Doc: "detect impossible interface-to-interface type assertions\n\nThis checker flags type assertions v.(T) and corresponding type-switch cases\nin which the static type V of v is an interface that cannot possibly implement\nthe target interface T. This occurs when V and T contain methods with the same\nname but different signatures. Example:\n\n\tvar v interface {\n\t\tRead()\n\t}\n\t_ = v.(io.Reader)\n\nThe Read method in v has a different signature than the Read method in\nio.Reader, so this assertion cannot succeed.\n",
- Default: "true",
- },
- {
- Name: "\"infertypeargs\"",
- Doc: "check for unnecessary type arguments in call expressions\n\nExplicit type arguments may be omitted from call expressions if they can be\ninferred from function arguments, or from other type arguments:\n\n\tfunc f[T any](T) {}\n\t\n\tfunc _() {\n\t\tf[string](\"foo\") // string could be inferred\n\t}\n",
- Default: "true",
- },
- {
- Name: "\"loopclosure\"",
- Doc: "check references to loop variables from within nested functions\n\nThis analyzer checks for references to loop variables from within a\nfunction literal inside the loop body. It checks only instances where\nthe function literal is called in a defer or go statement that is the\nlast statement in the loop body, as otherwise we would need whole\nprogram analysis.\n\nFor example:\n\n\tfor i, v := range s {\n\t\tgo func() {\n\t\t\tprintln(i, v) // not what you might expect\n\t\t}()\n\t}\n\nSee: https://golang.org/doc/go_faq.html#closures_and_goroutines",
- Default: "true",
- },
- {
- Name: "\"lostcancel\"",
- Doc: "check cancel func returned by context.WithCancel is called\n\nThe cancellation function returned by context.WithCancel, WithTimeout,\nand WithDeadline must be called or the new context will remain live\nuntil its parent context is cancelled.\n(The background context is never cancelled.)",
- Default: "true",
- },
- {
- Name: "\"nilfunc\"",
- Doc: "check for useless comparisons between functions and nil\n\nA useless comparison is one like f == nil as opposed to f() == nil.",
- Default: "true",
- },
- {
- Name: "\"nilness\"",
- Doc: "check for redundant or impossible nil comparisons\n\nThe nilness checker inspects the control-flow graph of each function in\na package and reports nil pointer dereferences, degenerate nil\npointers, and panics with nil values. A degenerate comparison is of the form\nx==nil or x!=nil where x is statically known to be nil or non-nil. These are\noften a mistake, especially in control flow related to errors. Panics with nil\nvalues are checked because they are not detectable by\n\n\tif r := recover(); r != nil {\n\nThis check reports conditions such as:\n\n\tif f == nil { // impossible condition (f is a function)\n\t}\n\nand:\n\n\tp := &v\n\t...\n\tif p != nil { // tautological condition\n\t}\n\nand:\n\n\tif p == nil {\n\t\tprint(*p) // nil dereference\n\t}\n\nand:\n\n\tif p == nil {\n\t\tpanic(p)\n\t}\n",
- Default: "false",
- },
- {
- Name: "\"printf\"",
- Doc: "check consistency of Printf format strings and arguments\n\nThe check applies to known functions (for example, those in package fmt)\nas well as any detected wrappers of known functions.\n\nA function that wants to avail itself of printf checking but is not\nfound by this analyzer's heuristics (for example, due to use of\ndynamic calls) can insert a bogus call:\n\n\tif false {\n\t\t_ = fmt.Sprintf(format, args...) // enable printf checking\n\t}\n\nThe -funcs flag specifies a comma-separated list of names of additional\nknown formatting functions or methods. If the name contains a period,\nit must denote a specific function using one of the following forms:\n\n\tdir/pkg.Function\n\tdir/pkg.Type.Method\n\t(*dir/pkg.Type).Method\n\nOtherwise the name is interpreted as a case-insensitive unqualified\nidentifier such as \"errorf\". Either way, if a listed name ends in f, the\nfunction is assumed to be Printf-like, taking a format string before the\nargument list. Otherwise it is assumed to be Print-like, taking a list\nof arguments with no format string.\n",
- Default: "true",
- },
- {
- Name: "\"shadow\"",
- Doc: "check for possible unintended shadowing of variables\n\nThis analyzer check for shadowed variables.\nA shadowed variable is a variable declared in an inner scope\nwith the same name and type as a variable in an outer scope,\nand where the outer variable is mentioned after the inner one\nis declared.\n\n(This definition can be refined; the module generates too many\nfalse positives and is not yet enabled by default.)\n\nFor example:\n\n\tfunc BadRead(f *os.File, buf []byte) error {\n\t\tvar err error\n\t\tfor {\n\t\t\tn, err := f.Read(buf) // shadows the function variable 'err'\n\t\t\tif err != nil {\n\t\t\t\tbreak // causes return of wrong value\n\t\t\t}\n\t\t\tfoo(buf)\n\t\t}\n\t\treturn err\n\t}\n",
- Default: "false",
- },
- {
- Name: "\"shift\"",
- Doc: "check for shifts that equal or exceed the width of the integer",
- Default: "true",
- },
- {
- Name: "\"simplifycompositelit\"",
- Doc: "check for composite literal simplifications\n\nAn array, slice, or map composite literal of the form:\n\t[]T{T{}, T{}}\nwill be simplified to:\n\t[]T{{}, {}}\n\nThis is one of the simplifications that \"gofmt -s\" applies.",
- Default: "true",
- },
- {
- Name: "\"simplifyrange\"",
- Doc: "check for range statement simplifications\n\nA range of the form:\n\tfor x, _ = range v {...}\nwill be simplified to:\n\tfor x = range v {...}\n\nA range of the form:\n\tfor _ = range v {...}\nwill be simplified to:\n\tfor range v {...}\n\nThis is one of the simplifications that \"gofmt -s\" applies.",
- Default: "true",
- },
- {
- Name: "\"simplifyslice\"",
- Doc: "check for slice simplifications\n\nA slice expression of the form:\n\ts[a:len(s)]\nwill be simplified to:\n\ts[a:]\n\nThis is one of the simplifications that \"gofmt -s\" applies.",
- Default: "true",
- },
- {
- Name: "\"sortslice\"",
- Doc: "check the argument type of sort.Slice\n\nsort.Slice requires an argument of a slice type. Check that\nthe interface{} value passed to sort.Slice is actually a slice.",
- Default: "true",
- },
- {
- Name: "\"stdmethods\"",
- Doc: "check signature of methods of well-known interfaces\n\nSometimes a type may be intended to satisfy an interface but may fail to\ndo so because of a mistake in its method signature.\nFor example, the result of this WriteTo method should be (int64, error),\nnot error, to satisfy io.WriterTo:\n\n\ttype myWriterTo struct{...}\n func (myWriterTo) WriteTo(w io.Writer) error { ... }\n\nThis check ensures that each method whose name matches one of several\nwell-known interface methods from the standard library has the correct\nsignature for that interface.\n\nChecked method names include:\n\tFormat GobEncode GobDecode MarshalJSON MarshalXML\n\tPeek ReadByte ReadFrom ReadRune Scan Seek\n\tUnmarshalJSON UnreadByte UnreadRune WriteByte\n\tWriteTo\n",
- Default: "true",
- },
- {
- Name: "\"stringintconv\"",
- Doc: "check for string(int) conversions\n\nThis checker flags conversions of the form string(x) where x is an integer\n(but not byte or rune) type. Such conversions are discouraged because they\nreturn the UTF-8 representation of the Unicode code point x, and not a decimal\nstring representation of x as one might expect. Furthermore, if x denotes an\ninvalid code point, the conversion cannot be statically rejected.\n\nFor conversions that intend on using the code point, consider replacing them\nwith string(rune(x)). Otherwise, strconv.Itoa and its equivalents return the\nstring representation of the value in the desired base.\n",
- Default: "true",
- },
- {
- Name: "\"structtag\"",
- Doc: "check that struct field tags conform to reflect.StructTag.Get\n\nAlso report certain struct tags (json, xml) used with unexported fields.",
- Default: "true",
- },
- {
- Name: "\"testinggoroutine\"",
- Doc: "report calls to (*testing.T).Fatal from goroutines started by a test.\n\nFunctions that abruptly terminate a test, such as the Fatal, Fatalf, FailNow, and\nSkip{,f,Now} methods of *testing.T, must be called from the test goroutine itself.\nThis checker detects calls to these functions that occur within a goroutine\nstarted by the test. For example:\n\nfunc TestFoo(t *testing.T) {\n go func() {\n t.Fatal(\"oops\") // error: (*T).Fatal called from non-test goroutine\n }()\n}\n",
- Default: "true",
- },
- {
- Name: "\"tests\"",
- Doc: "check for common mistaken usages of tests and examples\n\nThe tests checker walks Test, Benchmark and Example functions checking\nmalformed names, wrong signatures and examples documenting non-existent\nidentifiers.\n\nPlease see the documentation for package testing in golang.org/pkg/testing\nfor the conventions that are enforced for Tests, Benchmarks, and Examples.",
- Default: "true",
- },
- {
- Name: "\"unmarshal\"",
- Doc: "report passing non-pointer or non-interface values to unmarshal\n\nThe unmarshal analysis reports calls to functions such as json.Unmarshal\nin which the argument type is not a pointer or an interface.",
- Default: "true",
- },
- {
- Name: "\"unreachable\"",
- Doc: "check for unreachable code\n\nThe unreachable analyzer finds statements that execution can never reach\nbecause they are preceded by an return statement, a call to panic, an\ninfinite loop, or similar constructs.",
- Default: "true",
- },
- {
- Name: "\"unsafeptr\"",
- Doc: "check for invalid conversions of uintptr to unsafe.Pointer\n\nThe unsafeptr analyzer reports likely incorrect uses of unsafe.Pointer\nto convert integers to pointers. A conversion from uintptr to\nunsafe.Pointer is invalid if it implies that there is a uintptr-typed\nword in memory that holds a pointer value, because that word will be\ninvisible to stack copying and to the garbage collector.",
- Default: "true",
- },
- {
- Name: "\"unusedparams\"",
- Doc: "check for unused parameters of functions\n\nThe unusedparams analyzer checks functions to see if there are\nany parameters that are not being used.\n\nTo reduce false positives it ignores:\n- methods\n- parameters that do not have a name or are underscored\n- functions in test files\n- functions with empty bodies or those with just a return stmt",
- Default: "false",
- },
- {
- Name: "\"unusedresult\"",
- Doc: "check for unused results of calls to some functions\n\nSome functions like fmt.Errorf return a result and have no side effects,\nso it is always a mistake to discard the result. This analyzer reports\ncalls to certain functions in which the result of the call is ignored.\n\nThe set of functions may be controlled using flags.",
- Default: "true",
- },
- {
- Name: "\"unusedwrite\"",
- Doc: "checks for unused writes\n\nThe analyzer reports instances of writes to struct fields and\narrays that are never read. Specifically, when a struct object\nor an array is copied, its elements are copied implicitly by\nthe compiler, and any element write to this copy does nothing\nwith the original object.\n\nFor example:\n\n\ttype T struct { x int }\n\tfunc f(input []T) {\n\t\tfor i, v := range input { // v is a copy\n\t\t\tv.x = i // unused write to field x\n\t\t}\n\t}\n\nAnother example is about non-pointer receiver:\n\n\ttype T struct { x int }\n\tfunc (t T) f() { // t is a copy\n\t\tt.x = i // unused write to field x\n\t}\n",
- Default: "false",
- },
- {
- Name: "\"useany\"",
- Doc: "check for constraints that could be simplified to \"any\"",
- Default: "false",
- },
- {
- Name: "\"fillreturns\"",
- Doc: "suggest fixes for errors due to an incorrect number of return values\n\nThis checker provides suggested fixes for type errors of the\ntype \"wrong number of return values (want %d, got %d)\". For example:\n\tfunc m() (int, string, *bool, error) {\n\t\treturn\n\t}\nwill turn into\n\tfunc m() (int, string, *bool, error) {\n\t\treturn 0, \"\", nil, nil\n\t}\n\nThis functionality is similar to https://github.com/sqs/goreturns.\n",
- Default: "true",
- },
- {
- Name: "\"nonewvars\"",
- Doc: "suggested fixes for \"no new vars on left side of :=\"\n\nThis checker provides suggested fixes for type errors of the\ntype \"no new vars on left side of :=\". For example:\n\tz := 1\n\tz := 2\nwill turn into\n\tz := 1\n\tz = 2\n",
- Default: "true",
- },
- {
- Name: "\"noresultvalues\"",
- Doc: "suggested fixes for unexpected return values\n\nThis checker provides suggested fixes for type errors of the\ntype \"no result values expected\" or \"too many return values\".\nFor example:\n\tfunc z() { return nil }\nwill turn into\n\tfunc z() { return }\n",
- Default: "true",
- },
- {
- Name: "\"undeclaredname\"",
- Doc: "suggested fixes for \"undeclared name: <>\"\n\nThis checker provides suggested fixes for type errors of the\ntype \"undeclared name: <>\". It will either insert a new statement,\nsuch as:\n\n\"<> := \"\n\nor a new function declaration, such as:\n\nfunc <>(inferred parameters) {\n\tpanic(\"implement me!\")\n}\n",
- Default: "true",
- },
- {
- Name: "\"fillstruct\"",
- Doc: "note incomplete struct initializations\n\nThis analyzer provides diagnostics for any struct literals that do not have\nany fields initialized. Because the suggested fix for this analysis is\nexpensive to compute, callers should compute it separately, using the\nSuggestedFix function below.\n",
- Default: "true",
- },
- {
- Name: "\"stubmethods\"",
- Doc: "stub methods analyzer\n\nThis analyzer generates method stubs for concrete types\nin order to implement a target interface",
- Default: "true",
- },
- },
- },
- Default: "{}",
- Hierarchy: "ui.diagnostic",
- },
- {
- Name: "staticcheck",
- Type: "bool",
- Doc: "staticcheck enables additional analyses from staticcheck.io.\n",
- Default: "false",
- Status: "experimental",
- Hierarchy: "ui.diagnostic",
- },
- {
- Name: "annotations",
- Type: "map[string]bool",
- Doc: "annotations specifies the various kinds of optimization diagnostics\nthat should be reported by the gc_details command.\n",
- EnumKeys: EnumKeys{
- ValueType: "bool",
- Keys: []EnumKey{
- {
- Name: "\"bounds\"",
- Doc: "`\"bounds\"` controls bounds checking diagnostics.\n",
- Default: "true",
- },
- {
- Name: "\"escape\"",
- Doc: "`\"escape\"` controls diagnostics about escape choices.\n",
- Default: "true",
- },
- {
- Name: "\"inline\"",
- Doc: "`\"inline\"` controls diagnostics about inlining choices.\n",
- Default: "true",
- },
- {
- Name: "\"nil\"",
- Doc: "`\"nil\"` controls nil checks.\n",
- Default: "true",
- },
- },
- },
- Default: "{\"bounds\":true,\"escape\":true,\"inline\":true,\"nil\":true}",
- Status: "experimental",
- Hierarchy: "ui.diagnostic",
- },
- {
- Name: "diagnosticsDelay",
- Type: "time.Duration",
- Doc: "diagnosticsDelay controls the amount of time that gopls waits\nafter the most recent file modification before computing deep diagnostics.\nSimple diagnostics (parsing and type-checking) are always run immediately\non recently modified packages.\n\nThis option must be set to a valid duration string, for example `\"250ms\"`.\n",
- Default: "\"250ms\"",
- Status: "advanced",
- Hierarchy: "ui.diagnostic",
- },
- {
- Name: "experimentalWatchedFileDelay",
- Type: "time.Duration",
- Doc: "experimentalWatchedFileDelay controls the amount of time that gopls waits\nfor additional workspace/didChangeWatchedFiles notifications to arrive,\nbefore processing all such notifications in a single batch. This is\nintended for use by LSP clients that don't support their own batching of\nfile system notifications.\n\nThis option must be set to a valid duration string, for example `\"100ms\"`.\n",
- Default: "\"0s\"",
- Status: "experimental",
- Hierarchy: "ui.diagnostic",
- },
- {
- Name: "codelenses",
- Type: "map[string]bool",
- Doc: "codelenses overrides the enabled/disabled state of code lenses. See the\n\"Code Lenses\" section of the\n[Settings page](https://github.com/golang/tools/blob/master/gopls/doc/settings.md#code-lenses)\nfor the list of supported lenses.\n\nExample Usage:\n\n```json5\n\"gopls\": {\n...\n \"codelenses\": {\n \"generate\": false, // Don't show the `go generate` lens.\n \"gc_details\": true // Show a code lens toggling the display of gc's choices.\n }\n...\n}\n```\n",
- EnumKeys: EnumKeys{
- ValueType: "bool",
- Keys: []EnumKey{
- {
- Name: "\"gc_details\"",
- Doc: "Toggle the calculation of gc annotations.",
- Default: "false",
- },
- {
- Name: "\"generate\"",
- Doc: "Runs `go generate` for a given directory.",
- Default: "true",
- },
- {
- Name: "\"regenerate_cgo\"",
- Doc: "Regenerates cgo definitions.",
- Default: "true",
- },
- {
- Name: "\"test\"",
- Doc: "Runs `go test` for a specific set of test or benchmark functions.",
- Default: "false",
- },
- {
- Name: "\"tidy\"",
- Doc: "Runs `go mod tidy` for a module.",
- Default: "true",
- },
- {
- Name: "\"upgrade_dependency\"",
- Doc: "Upgrades a dependency in the go.mod file for a module.",
- Default: "true",
- },
- {
- Name: "\"vendor\"",
- Doc: "Runs `go mod vendor` for a module.",
- Default: "true",
- },
- },
- },
- Default: "{\"gc_details\":false,\"generate\":true,\"regenerate_cgo\":true,\"tidy\":true,\"upgrade_dependency\":true,\"vendor\":true}",
- Hierarchy: "ui",
- },
- {
- Name: "semanticTokens",
- Type: "bool",
- Doc: "semanticTokens controls whether the LSP server will send\nsemantic tokens to the client.\n",
- Default: "false",
- Status: "experimental",
- Hierarchy: "ui",
- },
- {
- Name: "local",
- Type: "string",
- Doc: "local is the equivalent of the `goimports -local` flag, which puts\nimports beginning with this string after third-party packages. It should\nbe the prefix of the import path whose imports should be grouped\nseparately.\n",
- Default: "\"\"",
- Hierarchy: "formatting",
- },
- {
- Name: "gofumpt",
- Type: "bool",
- Doc: "gofumpt indicates if we should run gofumpt formatting.\n",
- Default: "false",
- Hierarchy: "formatting",
- },
- {
- Name: "verboseOutput",
- Type: "bool",
- Doc: "verboseOutput enables additional debug logging.\n",
- Default: "false",
- Status: "debug",
- },
- },
- },
- Commands: []*CommandJSON{
- {
- Command: "gopls.add_dependency",
- Title: "Add a dependency",
- Doc: "Adds a dependency to the go.mod file for a module.",
- ArgDoc: "{\n\t// The go.mod file URI.\n\t\"URI\": string,\n\t// Additional args to pass to the go command.\n\t\"GoCmdArgs\": []string,\n\t// Whether to add a require directive.\n\t\"AddRequire\": bool,\n}",
- },
- {
- Command: "gopls.add_import",
- Title: "Add an import",
- Doc: "Ask the server to add an import path to a given Go file. The method will\ncall applyEdit on the client so that clients don't have to apply the edit\nthemselves.",
- ArgDoc: "{\n\t// ImportPath is the target import path that should\n\t// be added to the URI file\n\t\"ImportPath\": string,\n\t// URI is the file that the ImportPath should be\n\t// added to\n\t\"URI\": string,\n}",
- },
- {
- Command: "gopls.apply_fix",
- Title: "Apply a fix",
- Doc: "Applies a fix to a region of source code.",
- ArgDoc: "{\n\t// The fix to apply.\n\t\"Fix\": string,\n\t// The file URI for the document to fix.\n\t\"URI\": string,\n\t// The document range to scan for fixes.\n\t\"Range\": {\n\t\t\"start\": {\n\t\t\t\"line\": uint32,\n\t\t\t\"character\": uint32,\n\t\t},\n\t\t\"end\": {\n\t\t\t\"line\": uint32,\n\t\t\t\"character\": uint32,\n\t\t},\n\t},\n}",
- },
- {
- Command: "gopls.check_upgrades",
- Title: "Check for upgrades",
- Doc: "Checks for module upgrades.",
- ArgDoc: "{\n\t// The go.mod file URI.\n\t\"URI\": string,\n\t// The modules to check.\n\t\"Modules\": []string,\n}",
- },
- {
- Command: "gopls.edit_go_directive",
- Title: "Run go mod edit -go=version",
- Doc: "Runs `go mod edit -go=version` for a module.",
- ArgDoc: "{\n\t// Any document URI within the relevant module.\n\t\"URI\": string,\n\t// The version to pass to `go mod edit -go`.\n\t\"Version\": string,\n}",
- },
- {
- Command: "gopls.gc_details",
- Title: "Toggle gc_details",
- Doc: "Toggle the calculation of gc annotations.",
- ArgDoc: "string",
- },
- {
- Command: "gopls.generate",
- Title: "Run go generate",
- Doc: "Runs `go generate` for a given directory.",
- ArgDoc: "{\n\t// URI for the directory to generate.\n\t\"Dir\": string,\n\t// Whether to generate recursively (go generate ./...)\n\t\"Recursive\": bool,\n}",
- },
- {
- Command: "gopls.generate_gopls_mod",
- Title: "Generate gopls.mod",
- Doc: "(Re)generate the gopls.mod file for a workspace.",
- ArgDoc: "{\n\t// The file URI.\n\t\"URI\": string,\n}",
- },
- {
- Command: "gopls.go_get_package",
- Title: "go get a package",
- Doc: "Runs `go get` to fetch a package.",
- ArgDoc: "{\n\t// Any document URI within the relevant module.\n\t\"URI\": string,\n\t// The package to go get.\n\t\"Pkg\": string,\n\t\"AddRequire\": bool,\n}",
- },
- {
- Command: "gopls.list_imports",
- Title: "List imports of a file and its package",
- Doc: "Retrieve a list of imports in the given Go file, and the package it\nbelongs to.",
- ArgDoc: "{\n\t// The file URI.\n\t\"URI\": string,\n}",
- ResultDoc: "{\n\t// Imports is a list of imports in the requested file.\n\t\"Imports\": []{\n\t\t\"Path\": string,\n\t\t\"Name\": string,\n\t},\n\t// PackageImports is a list of all imports in the requested file's package.\n\t\"PackageImports\": []{\n\t\t\"Path\": string,\n\t},\n}",
- },
- {
- Command: "gopls.list_known_packages",
- Title: "List known packages",
- Doc: "Retrieve a list of packages that are importable from the given URI.",
- ArgDoc: "{\n\t// The file URI.\n\t\"URI\": string,\n}",
- ResultDoc: "{\n\t// Packages is a list of packages relative\n\t// to the URIArg passed by the command request.\n\t// In other words, it omits paths that are already\n\t// imported or cannot be imported due to compiler\n\t// restrictions.\n\t\"Packages\": []string,\n}",
- },
- {
- Command: "gopls.regenerate_cgo",
- Title: "Regenerate cgo",
- Doc: "Regenerates cgo definitions.",
- ArgDoc: "{\n\t// The file URI.\n\t\"URI\": string,\n}",
- },
- {
- Command: "gopls.remove_dependency",
- Title: "Remove a dependency",
- Doc: "Removes a dependency from the go.mod file of a module.",
- ArgDoc: "{\n\t// The go.mod file URI.\n\t\"URI\": string,\n\t// The module path to remove.\n\t\"ModulePath\": string,\n\t\"OnlyDiagnostic\": bool,\n}",
- },
- {
- Command: "gopls.run_tests",
- Title: "Run test(s)",
- Doc: "Runs `go test` for a specific set of test or benchmark functions.",
- ArgDoc: "{\n\t// The test file containing the tests to run.\n\t\"URI\": string,\n\t// Specific test names to run, e.g. TestFoo.\n\t\"Tests\": []string,\n\t// Specific benchmarks to run, e.g. BenchmarkFoo.\n\t\"Benchmarks\": []string,\n}",
- },
- {
- Command: "gopls.run_vulncheck_exp",
- Title: "Run vulncheck (experimental)",
- Doc: "Run vulnerability check (`govulncheck`).",
- ArgDoc: "{\n\t// Dir is the directory from which vulncheck will run from.\n\t\"Dir\": string,\n\t// Package pattern. E.g. \"\", \".\", \"./...\".\n\t\"Pattern\": string,\n}",
- ResultDoc: "{\n\t\"Vuln\": []{\n\t\t\"ID\": string,\n\t\t\"Details\": string,\n\t\t\"Aliases\": []string,\n\t\t\"Symbol\": string,\n\t\t\"PkgPath\": string,\n\t\t\"ModPath\": string,\n\t\t\"URL\": string,\n\t\t\"CurrentVersion\": string,\n\t\t\"FixedVersion\": string,\n\t\t\"CallStacks\": [][]golang.org/x/tools/internal/lsp/command.StackEntry,\n\t},\n}",
- },
- {
- Command: "gopls.start_debugging",
- Title: "Start the gopls debug server",
- Doc: "Start the gopls debug server if it isn't running, and return the debug\naddress.",
- ArgDoc: "{\n\t// Optional: the address (including port) for the debug server to listen on.\n\t// If not provided, the debug server will bind to \"localhost:0\", and the\n\t// full debug URL will be contained in the result.\n\t// \n\t// If there is more than one gopls instance along the serving path (i.e. you\n\t// are using a daemon), each gopls instance will attempt to start debugging.\n\t// If Addr specifies a port, only the daemon will be able to bind to that\n\t// port, and each intermediate gopls instance will fail to start debugging.\n\t// For this reason it is recommended not to specify a port (or equivalently,\n\t// to specify \":0\").\n\t// \n\t// If the server was already debugging this field has no effect, and the\n\t// result will contain the previously configured debug URL(s).\n\t\"Addr\": string,\n}",
- ResultDoc: "{\n\t// The URLs to use to access the debug servers, for all gopls instances in\n\t// the serving path. For the common case of a single gopls instance (i.e. no\n\t// daemon), this will be exactly one address.\n\t// \n\t// In the case of one or more gopls instances forwarding the LSP to a daemon,\n\t// URLs will contain debug addresses for each server in the serving path, in\n\t// serving order. The daemon debug address will be the last entry in the\n\t// slice. If any intermediate gopls instance fails to start debugging, no\n\t// error will be returned but the debug URL for that server in the URLs slice\n\t// will be empty.\n\t\"URLs\": []string,\n}",
- },
- {
- Command: "gopls.test",
- Title: "Run test(s) (legacy)",
- Doc: "Runs `go test` for a specific set of test or benchmark functions.",
- ArgDoc: "string,\n[]string,\n[]string",
- },
- {
- Command: "gopls.tidy",
- Title: "Run go mod tidy",
- Doc: "Runs `go mod tidy` for a module.",
- ArgDoc: "{\n\t// The file URIs.\n\t\"URIs\": []string,\n}",
- },
- {
- Command: "gopls.toggle_gc_details",
- Title: "Toggle gc_details",
- Doc: "Toggle the calculation of gc annotations.",
- ArgDoc: "{\n\t// The file URI.\n\t\"URI\": string,\n}",
- },
- {
- Command: "gopls.update_go_sum",
- Title: "Update go.sum",
- Doc: "Updates the go.sum file for a module.",
- ArgDoc: "{\n\t// The file URIs.\n\t\"URIs\": []string,\n}",
- },
- {
- Command: "gopls.upgrade_dependency",
- Title: "Upgrade a dependency",
- Doc: "Upgrades a dependency in the go.mod file for a module.",
- ArgDoc: "{\n\t// The go.mod file URI.\n\t\"URI\": string,\n\t// Additional args to pass to the go command.\n\t\"GoCmdArgs\": []string,\n\t// Whether to add a require directive.\n\t\"AddRequire\": bool,\n}",
- },
- {
- Command: "gopls.vendor",
- Title: "Run go mod vendor",
- Doc: "Runs `go mod vendor` for a module.",
- ArgDoc: "{\n\t// The file URI.\n\t\"URI\": string,\n}",
- },
- },
- Lenses: []*LensJSON{
- {
- Lens: "gc_details",
- Title: "Toggle gc_details",
- Doc: "Toggle the calculation of gc annotations.",
- },
- {
- Lens: "generate",
- Title: "Run go generate",
- Doc: "Runs `go generate` for a given directory.",
- },
- {
- Lens: "regenerate_cgo",
- Title: "Regenerate cgo",
- Doc: "Regenerates cgo definitions.",
- },
- {
- Lens: "test",
- Title: "Run test(s) (legacy)",
- Doc: "Runs `go test` for a specific set of test or benchmark functions.",
- },
- {
- Lens: "tidy",
- Title: "Run go mod tidy",
- Doc: "Runs `go mod tidy` for a module.",
- },
- {
- Lens: "upgrade_dependency",
- Title: "Upgrade a dependency",
- Doc: "Upgrades a dependency in the go.mod file for a module.",
- },
- {
- Lens: "vendor",
- Title: "Run go mod vendor",
- Doc: "Runs `go mod vendor` for a module.",
- },
- },
- Analyzers: []*AnalyzerJSON{
- {
- Name: "asmdecl",
- Doc: "report mismatches between assembly files and Go declarations",
- Default: true,
- },
- {
- Name: "assign",
- Doc: "check for useless assignments\n\nThis checker reports assignments of the form x = x or a[i] = a[i].\nThese are almost always useless, and even when they aren't they are\nusually a mistake.",
- Default: true,
- },
- {
- Name: "atomic",
- Doc: "check for common mistakes using the sync/atomic package\n\nThe atomic checker looks for assignment statements of the form:\n\n\tx = atomic.AddUint64(&x, 1)\n\nwhich are not atomic.",
- Default: true,
- },
- {
- Name: "atomicalign",
- Doc: "check for non-64-bits-aligned arguments to sync/atomic functions",
- Default: true,
- },
- {
- Name: "bools",
- Doc: "check for common mistakes involving boolean operators",
- Default: true,
- },
- {
- Name: "buildtag",
- Doc: "check that +build tags are well-formed and correctly located",
- Default: true,
- },
- {
- Name: "cgocall",
- Doc: "detect some violations of the cgo pointer passing rules\n\nCheck for invalid cgo pointer passing.\nThis looks for code that uses cgo to call C code passing values\nwhose types are almost always invalid according to the cgo pointer\nsharing rules.\nSpecifically, it warns about attempts to pass a Go chan, map, func,\nor slice to C, either directly, or via a pointer, array, or struct.",
- Default: true,
- },
- {
- Name: "composites",
- Doc: "check for unkeyed composite literals\n\nThis analyzer reports a diagnostic for composite literals of struct\ntypes imported from another package that do not use the field-keyed\nsyntax. Such literals are fragile because the addition of a new field\n(even if unexported) to the struct will cause compilation to fail.\n\nAs an example,\n\n\terr = &net.DNSConfigError{err}\n\nshould be replaced by:\n\n\terr = &net.DNSConfigError{Err: err}\n",
- Default: true,
- },
- {
- Name: "copylocks",
- Doc: "check for locks erroneously passed by value\n\nInadvertently copying a value containing a lock, such as sync.Mutex or\nsync.WaitGroup, may cause both copies to malfunction. Generally such\nvalues should be referred to through a pointer.",
- Default: true,
- },
- {
- Name: "deepequalerrors",
- Doc: "check for calls of reflect.DeepEqual on error values\n\nThe deepequalerrors checker looks for calls of the form:\n\n reflect.DeepEqual(err1, err2)\n\nwhere err1 and err2 are errors. Using reflect.DeepEqual to compare\nerrors is discouraged.",
- Default: true,
- },
- {
- Name: "errorsas",
- Doc: "report passing non-pointer or non-error values to errors.As\n\nThe errorsas analysis reports calls to errors.As where the type\nof the second argument is not a pointer to a type implementing error.",
- Default: true,
- },
- {
- Name: "fieldalignment",
- Doc: "find structs that would use less memory if their fields were sorted\n\nThis analyzer find structs that can be rearranged to use less memory, and provides\na suggested edit with the optimal order.\n\nNote that there are two different diagnostics reported. One checks struct size,\nand the other reports \"pointer bytes\" used. Pointer bytes is how many bytes of the\nobject that the garbage collector has to potentially scan for pointers, for example:\n\n\tstruct { uint32; string }\n\nhave 16 pointer bytes because the garbage collector has to scan up through the string's\ninner pointer.\n\n\tstruct { string; *uint32 }\n\nhas 24 pointer bytes because it has to scan further through the *uint32.\n\n\tstruct { string; uint32 }\n\nhas 8 because it can stop immediately after the string pointer.\n",
- },
- {
- Name: "httpresponse",
- Doc: "check for mistakes using HTTP responses\n\nA common mistake when using the net/http package is to defer a function\ncall to close the http.Response Body before checking the error that\ndetermines whether the response is valid:\n\n\tresp, err := http.Head(url)\n\tdefer resp.Body.Close()\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\t// (defer statement belongs here)\n\nThis checker helps uncover latent nil dereference bugs by reporting a\ndiagnostic for such mistakes.",
- Default: true,
- },
- {
- Name: "ifaceassert",
- Doc: "detect impossible interface-to-interface type assertions\n\nThis checker flags type assertions v.(T) and corresponding type-switch cases\nin which the static type V of v is an interface that cannot possibly implement\nthe target interface T. This occurs when V and T contain methods with the same\nname but different signatures. Example:\n\n\tvar v interface {\n\t\tRead()\n\t}\n\t_ = v.(io.Reader)\n\nThe Read method in v has a different signature than the Read method in\nio.Reader, so this assertion cannot succeed.\n",
- Default: true,
- },
- {
- Name: "infertypeargs",
- Doc: "check for unnecessary type arguments in call expressions\n\nExplicit type arguments may be omitted from call expressions if they can be\ninferred from function arguments, or from other type arguments:\n\n\tfunc f[T any](T) {}\n\t\n\tfunc _() {\n\t\tf[string](\"foo\") // string could be inferred\n\t}\n",
- Default: true,
- },
- {
- Name: "loopclosure",
- Doc: "check references to loop variables from within nested functions\n\nThis analyzer checks for references to loop variables from within a\nfunction literal inside the loop body. It checks only instances where\nthe function literal is called in a defer or go statement that is the\nlast statement in the loop body, as otherwise we would need whole\nprogram analysis.\n\nFor example:\n\n\tfor i, v := range s {\n\t\tgo func() {\n\t\t\tprintln(i, v) // not what you might expect\n\t\t}()\n\t}\n\nSee: https://golang.org/doc/go_faq.html#closures_and_goroutines",
- Default: true,
- },
- {
- Name: "lostcancel",
- Doc: "check cancel func returned by context.WithCancel is called\n\nThe cancellation function returned by context.WithCancel, WithTimeout,\nand WithDeadline must be called or the new context will remain live\nuntil its parent context is cancelled.\n(The background context is never cancelled.)",
- Default: true,
- },
- {
- Name: "nilfunc",
- Doc: "check for useless comparisons between functions and nil\n\nA useless comparison is one like f == nil as opposed to f() == nil.",
- Default: true,
- },
- {
- Name: "nilness",
- Doc: "check for redundant or impossible nil comparisons\n\nThe nilness checker inspects the control-flow graph of each function in\na package and reports nil pointer dereferences, degenerate nil\npointers, and panics with nil values. A degenerate comparison is of the form\nx==nil or x!=nil where x is statically known to be nil or non-nil. These are\noften a mistake, especially in control flow related to errors. Panics with nil\nvalues are checked because they are not detectable by\n\n\tif r := recover(); r != nil {\n\nThis check reports conditions such as:\n\n\tif f == nil { // impossible condition (f is a function)\n\t}\n\nand:\n\n\tp := &v\n\t...\n\tif p != nil { // tautological condition\n\t}\n\nand:\n\n\tif p == nil {\n\t\tprint(*p) // nil dereference\n\t}\n\nand:\n\n\tif p == nil {\n\t\tpanic(p)\n\t}\n",
- },
- {
- Name: "printf",
- Doc: "check consistency of Printf format strings and arguments\n\nThe check applies to known functions (for example, those in package fmt)\nas well as any detected wrappers of known functions.\n\nA function that wants to avail itself of printf checking but is not\nfound by this analyzer's heuristics (for example, due to use of\ndynamic calls) can insert a bogus call:\n\n\tif false {\n\t\t_ = fmt.Sprintf(format, args...) // enable printf checking\n\t}\n\nThe -funcs flag specifies a comma-separated list of names of additional\nknown formatting functions or methods. If the name contains a period,\nit must denote a specific function using one of the following forms:\n\n\tdir/pkg.Function\n\tdir/pkg.Type.Method\n\t(*dir/pkg.Type).Method\n\nOtherwise the name is interpreted as a case-insensitive unqualified\nidentifier such as \"errorf\". Either way, if a listed name ends in f, the\nfunction is assumed to be Printf-like, taking a format string before the\nargument list. Otherwise it is assumed to be Print-like, taking a list\nof arguments with no format string.\n",
- Default: true,
- },
- {
- Name: "shadow",
- Doc: "check for possible unintended shadowing of variables\n\nThis analyzer check for shadowed variables.\nA shadowed variable is a variable declared in an inner scope\nwith the same name and type as a variable in an outer scope,\nand where the outer variable is mentioned after the inner one\nis declared.\n\n(This definition can be refined; the module generates too many\nfalse positives and is not yet enabled by default.)\n\nFor example:\n\n\tfunc BadRead(f *os.File, buf []byte) error {\n\t\tvar err error\n\t\tfor {\n\t\t\tn, err := f.Read(buf) // shadows the function variable 'err'\n\t\t\tif err != nil {\n\t\t\t\tbreak // causes return of wrong value\n\t\t\t}\n\t\t\tfoo(buf)\n\t\t}\n\t\treturn err\n\t}\n",
- },
- {
- Name: "shift",
- Doc: "check for shifts that equal or exceed the width of the integer",
- Default: true,
- },
- {
- Name: "simplifycompositelit",
- Doc: "check for composite literal simplifications\n\nAn array, slice, or map composite literal of the form:\n\t[]T{T{}, T{}}\nwill be simplified to:\n\t[]T{{}, {}}\n\nThis is one of the simplifications that \"gofmt -s\" applies.",
- Default: true,
- },
- {
- Name: "simplifyrange",
- Doc: "check for range statement simplifications\n\nA range of the form:\n\tfor x, _ = range v {...}\nwill be simplified to:\n\tfor x = range v {...}\n\nA range of the form:\n\tfor _ = range v {...}\nwill be simplified to:\n\tfor range v {...}\n\nThis is one of the simplifications that \"gofmt -s\" applies.",
- Default: true,
- },
- {
- Name: "simplifyslice",
- Doc: "check for slice simplifications\n\nA slice expression of the form:\n\ts[a:len(s)]\nwill be simplified to:\n\ts[a:]\n\nThis is one of the simplifications that \"gofmt -s\" applies.",
- Default: true,
- },
- {
- Name: "sortslice",
- Doc: "check the argument type of sort.Slice\n\nsort.Slice requires an argument of a slice type. Check that\nthe interface{} value passed to sort.Slice is actually a slice.",
- Default: true,
- },
- {
- Name: "stdmethods",
- Doc: "check signature of methods of well-known interfaces\n\nSometimes a type may be intended to satisfy an interface but may fail to\ndo so because of a mistake in its method signature.\nFor example, the result of this WriteTo method should be (int64, error),\nnot error, to satisfy io.WriterTo:\n\n\ttype myWriterTo struct{...}\n func (myWriterTo) WriteTo(w io.Writer) error { ... }\n\nThis check ensures that each method whose name matches one of several\nwell-known interface methods from the standard library has the correct\nsignature for that interface.\n\nChecked method names include:\n\tFormat GobEncode GobDecode MarshalJSON MarshalXML\n\tPeek ReadByte ReadFrom ReadRune Scan Seek\n\tUnmarshalJSON UnreadByte UnreadRune WriteByte\n\tWriteTo\n",
- Default: true,
- },
- {
- Name: "stringintconv",
- Doc: "check for string(int) conversions\n\nThis checker flags conversions of the form string(x) where x is an integer\n(but not byte or rune) type. Such conversions are discouraged because they\nreturn the UTF-8 representation of the Unicode code point x, and not a decimal\nstring representation of x as one might expect. Furthermore, if x denotes an\ninvalid code point, the conversion cannot be statically rejected.\n\nFor conversions that intend on using the code point, consider replacing them\nwith string(rune(x)). Otherwise, strconv.Itoa and its equivalents return the\nstring representation of the value in the desired base.\n",
- Default: true,
- },
- {
- Name: "structtag",
- Doc: "check that struct field tags conform to reflect.StructTag.Get\n\nAlso report certain struct tags (json, xml) used with unexported fields.",
- Default: true,
- },
- {
- Name: "testinggoroutine",
- Doc: "report calls to (*testing.T).Fatal from goroutines started by a test.\n\nFunctions that abruptly terminate a test, such as the Fatal, Fatalf, FailNow, and\nSkip{,f,Now} methods of *testing.T, must be called from the test goroutine itself.\nThis checker detects calls to these functions that occur within a goroutine\nstarted by the test. For example:\n\nfunc TestFoo(t *testing.T) {\n go func() {\n t.Fatal(\"oops\") // error: (*T).Fatal called from non-test goroutine\n }()\n}\n",
- Default: true,
- },
- {
- Name: "tests",
- Doc: "check for common mistaken usages of tests and examples\n\nThe tests checker walks Test, Benchmark and Example functions checking\nmalformed names, wrong signatures and examples documenting non-existent\nidentifiers.\n\nPlease see the documentation for package testing in golang.org/pkg/testing\nfor the conventions that are enforced for Tests, Benchmarks, and Examples.",
- Default: true,
- },
- {
- Name: "unmarshal",
- Doc: "report passing non-pointer or non-interface values to unmarshal\n\nThe unmarshal analysis reports calls to functions such as json.Unmarshal\nin which the argument type is not a pointer or an interface.",
- Default: true,
- },
- {
- Name: "unreachable",
- Doc: "check for unreachable code\n\nThe unreachable analyzer finds statements that execution can never reach\nbecause they are preceded by an return statement, a call to panic, an\ninfinite loop, or similar constructs.",
- Default: true,
- },
- {
- Name: "unsafeptr",
- Doc: "check for invalid conversions of uintptr to unsafe.Pointer\n\nThe unsafeptr analyzer reports likely incorrect uses of unsafe.Pointer\nto convert integers to pointers. A conversion from uintptr to\nunsafe.Pointer is invalid if it implies that there is a uintptr-typed\nword in memory that holds a pointer value, because that word will be\ninvisible to stack copying and to the garbage collector.",
- Default: true,
- },
- {
- Name: "unusedparams",
- Doc: "check for unused parameters of functions\n\nThe unusedparams analyzer checks functions to see if there are\nany parameters that are not being used.\n\nTo reduce false positives it ignores:\n- methods\n- parameters that do not have a name or are underscored\n- functions in test files\n- functions with empty bodies or those with just a return stmt",
- },
- {
- Name: "unusedresult",
- Doc: "check for unused results of calls to some functions\n\nSome functions like fmt.Errorf return a result and have no side effects,\nso it is always a mistake to discard the result. This analyzer reports\ncalls to certain functions in which the result of the call is ignored.\n\nThe set of functions may be controlled using flags.",
- Default: true,
- },
- {
- Name: "unusedwrite",
- Doc: "checks for unused writes\n\nThe analyzer reports instances of writes to struct fields and\narrays that are never read. Specifically, when a struct object\nor an array is copied, its elements are copied implicitly by\nthe compiler, and any element write to this copy does nothing\nwith the original object.\n\nFor example:\n\n\ttype T struct { x int }\n\tfunc f(input []T) {\n\t\tfor i, v := range input { // v is a copy\n\t\t\tv.x = i // unused write to field x\n\t\t}\n\t}\n\nAnother example is about non-pointer receiver:\n\n\ttype T struct { x int }\n\tfunc (t T) f() { // t is a copy\n\t\tt.x = i // unused write to field x\n\t}\n",
- },
- {
- Name: "useany",
- Doc: "check for constraints that could be simplified to \"any\"",
- },
- {
- Name: "fillreturns",
- Doc: "suggest fixes for errors due to an incorrect number of return values\n\nThis checker provides suggested fixes for type errors of the\ntype \"wrong number of return values (want %d, got %d)\". For example:\n\tfunc m() (int, string, *bool, error) {\n\t\treturn\n\t}\nwill turn into\n\tfunc m() (int, string, *bool, error) {\n\t\treturn 0, \"\", nil, nil\n\t}\n\nThis functionality is similar to https://github.com/sqs/goreturns.\n",
- Default: true,
- },
- {
- Name: "nonewvars",
- Doc: "suggested fixes for \"no new vars on left side of :=\"\n\nThis checker provides suggested fixes for type errors of the\ntype \"no new vars on left side of :=\". For example:\n\tz := 1\n\tz := 2\nwill turn into\n\tz := 1\n\tz = 2\n",
- Default: true,
- },
- {
- Name: "noresultvalues",
- Doc: "suggested fixes for unexpected return values\n\nThis checker provides suggested fixes for type errors of the\ntype \"no result values expected\" or \"too many return values\".\nFor example:\n\tfunc z() { return nil }\nwill turn into\n\tfunc z() { return }\n",
- Default: true,
- },
- {
- Name: "undeclaredname",
- Doc: "suggested fixes for \"undeclared name: <>\"\n\nThis checker provides suggested fixes for type errors of the\ntype \"undeclared name: <>\". It will either insert a new statement,\nsuch as:\n\n\"<> := \"\n\nor a new function declaration, such as:\n\nfunc <>(inferred parameters) {\n\tpanic(\"implement me!\")\n}\n",
- Default: true,
- },
- {
- Name: "fillstruct",
- Doc: "note incomplete struct initializations\n\nThis analyzer provides diagnostics for any struct literals that do not have\nany fields initialized. Because the suggested fix for this analysis is\nexpensive to compute, callers should compute it separately, using the\nSuggestedFix function below.\n",
- Default: true,
- },
- {
- Name: "stubmethods",
- Doc: "stub methods analyzer\n\nThis analyzer generates method stubs for concrete types\nin order to implement a target interface",
- Default: true,
- },
- },
-}
diff --git a/internal/lsp/source/call_hierarchy.go b/internal/lsp/source/call_hierarchy.go
deleted file mode 100644
index 991c30aeb..000000000
--- a/internal/lsp/source/call_hierarchy.go
+++ /dev/null
@@ -1,310 +0,0 @@
-// Copyright 2020 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package source
-
-import (
- "context"
- "fmt"
- "go/ast"
- "go/token"
- "go/types"
- "path/filepath"
-
- "golang.org/x/tools/go/ast/astutil"
- "golang.org/x/tools/internal/event"
- "golang.org/x/tools/internal/lsp/debug/tag"
- "golang.org/x/tools/internal/lsp/protocol"
- "golang.org/x/tools/internal/span"
- errors "golang.org/x/xerrors"
-)
-
-// PrepareCallHierarchy returns an array of CallHierarchyItem for a file and the position within the file.
-func PrepareCallHierarchy(ctx context.Context, snapshot Snapshot, fh FileHandle, pos protocol.Position) ([]protocol.CallHierarchyItem, error) {
- ctx, done := event.Start(ctx, "source.PrepareCallHierarchy")
- defer done()
-
- identifier, err := Identifier(ctx, snapshot, fh, pos)
- if err != nil {
- if errors.Is(err, ErrNoIdentFound) || errors.Is(err, errNoObjectFound) {
- return nil, nil
- }
- return nil, err
- }
-
- // The identifier can be nil if it is an import spec.
- if identifier == nil || identifier.Declaration.obj == nil {
- return nil, nil
- }
-
- if _, ok := identifier.Declaration.obj.Type().Underlying().(*types.Signature); !ok {
- return nil, nil
- }
-
- if len(identifier.Declaration.MappedRange) == 0 {
- return nil, nil
- }
- declMappedRange := identifier.Declaration.MappedRange[0]
- rng, err := declMappedRange.Range()
- if err != nil {
- return nil, err
- }
-
- callHierarchyItem := protocol.CallHierarchyItem{
- Name: identifier.Name,
- Kind: protocol.Function,
- Tags: []protocol.SymbolTag{},
- Detail: fmt.Sprintf("%s • %s", identifier.Declaration.obj.Pkg().Path(), filepath.Base(declMappedRange.URI().Filename())),
- URI: protocol.DocumentURI(declMappedRange.URI()),
- Range: rng,
- SelectionRange: rng,
- }
- return []protocol.CallHierarchyItem{callHierarchyItem}, nil
-}
-
-// IncomingCalls returns an array of CallHierarchyIncomingCall for a file and the position within the file.
-func IncomingCalls(ctx context.Context, snapshot Snapshot, fh FileHandle, pos protocol.Position) ([]protocol.CallHierarchyIncomingCall, error) {
- ctx, done := event.Start(ctx, "source.IncomingCalls")
- defer done()
-
- refs, err := References(ctx, snapshot, fh, pos, false)
- if err != nil {
- if errors.Is(err, ErrNoIdentFound) || errors.Is(err, errNoObjectFound) {
- return nil, nil
- }
- return nil, err
- }
-
- return toProtocolIncomingCalls(ctx, snapshot, refs)
-}
-
-// toProtocolIncomingCalls returns an array of protocol.CallHierarchyIncomingCall for ReferenceInfo's.
-// References inside same enclosure are assigned to the same enclosing function.
-func toProtocolIncomingCalls(ctx context.Context, snapshot Snapshot, refs []*ReferenceInfo) ([]protocol.CallHierarchyIncomingCall, error) {
- // an enclosing node could have multiple calls to a reference, we only show the enclosure
- // once in the result but highlight all calls using FromRanges (ranges at which the calls occur)
- var incomingCalls = map[protocol.Location]*protocol.CallHierarchyIncomingCall{}
- for _, ref := range refs {
- refRange, err := ref.Range()
- if err != nil {
- return nil, err
- }
-
- callItem, err := enclosingNodeCallItem(snapshot, ref.pkg, ref.URI(), ref.ident.NamePos)
- if err != nil {
- event.Error(ctx, "error getting enclosing node", err, tag.Method.Of(ref.Name))
- continue
- }
- loc := protocol.Location{
- URI: callItem.URI,
- Range: callItem.Range,
- }
-
- if incomingCall, ok := incomingCalls[loc]; ok {
- incomingCall.FromRanges = append(incomingCall.FromRanges, refRange)
- continue
- }
- incomingCalls[loc] = &protocol.CallHierarchyIncomingCall{
- From: callItem,
- FromRanges: []protocol.Range{refRange},
- }
- }
-
- incomingCallItems := make([]protocol.CallHierarchyIncomingCall, 0, len(incomingCalls))
- for _, callItem := range incomingCalls {
- incomingCallItems = append(incomingCallItems, *callItem)
- }
- return incomingCallItems, nil
-}
-
-// enclosingNodeCallItem creates a CallHierarchyItem representing the function call at pos
-func enclosingNodeCallItem(snapshot Snapshot, pkg Package, uri span.URI, pos token.Pos) (protocol.CallHierarchyItem, error) {
- pgf, err := pkg.File(uri)
- if err != nil {
- return protocol.CallHierarchyItem{}, err
- }
-
- var funcDecl *ast.FuncDecl
- var funcLit *ast.FuncLit // innermost function literal
- var litCount int
- // Find the enclosing function, if any, and the number of func literals in between.
- path, _ := astutil.PathEnclosingInterval(pgf.File, pos, pos)
-outer:
- for _, node := range path {
- switch n := node.(type) {
- case *ast.FuncDecl:
- funcDecl = n
- break outer
- case *ast.FuncLit:
- litCount++
- if litCount > 1 {
- continue
- }
- funcLit = n
- }
- }
-
- nameIdent := path[len(path)-1].(*ast.File).Name
- kind := protocol.Package
- if funcDecl != nil {
- nameIdent = funcDecl.Name
- kind = protocol.Function
- }
-
- nameStart, nameEnd := nameIdent.NamePos, nameIdent.NamePos+token.Pos(len(nameIdent.Name))
- if funcLit != nil {
- nameStart, nameEnd = funcLit.Type.Func, funcLit.Type.Params.Pos()
- kind = protocol.Function
- }
- rng, err := NewMappedRange(snapshot.FileSet(), pgf.Mapper, nameStart, nameEnd).Range()
- if err != nil {
- return protocol.CallHierarchyItem{}, err
- }
-
- name := nameIdent.Name
- for i := 0; i < litCount; i++ {
- name += ".func()"
- }
-
- return protocol.CallHierarchyItem{
- Name: name,
- Kind: kind,
- Tags: []protocol.SymbolTag{},
- Detail: fmt.Sprintf("%s • %s", pkg.PkgPath(), filepath.Base(uri.Filename())),
- URI: protocol.DocumentURI(uri),
- Range: rng,
- SelectionRange: rng,
- }, nil
-}
-
-// OutgoingCalls returns an array of CallHierarchyOutgoingCall for a file and the position within the file.
-func OutgoingCalls(ctx context.Context, snapshot Snapshot, fh FileHandle, pos protocol.Position) ([]protocol.CallHierarchyOutgoingCall, error) {
- ctx, done := event.Start(ctx, "source.OutgoingCalls")
- defer done()
-
- identifier, err := Identifier(ctx, snapshot, fh, pos)
- if err != nil {
- if errors.Is(err, ErrNoIdentFound) || errors.Is(err, errNoObjectFound) {
- return nil, nil
- }
- return nil, err
- }
-
- if _, ok := identifier.Declaration.obj.Type().Underlying().(*types.Signature); !ok {
- return nil, nil
- }
- if identifier.Declaration.node == nil {
- return nil, nil
- }
- if len(identifier.Declaration.MappedRange) == 0 {
- return nil, nil
- }
- declMappedRange := identifier.Declaration.MappedRange[0]
- callExprs, err := collectCallExpressions(snapshot.FileSet(), declMappedRange.m, identifier.Declaration.node)
- if err != nil {
- return nil, err
- }
-
- return toProtocolOutgoingCalls(ctx, snapshot, fh, callExprs)
-}
-
-// collectCallExpressions collects call expression ranges inside a function.
-func collectCallExpressions(fset *token.FileSet, mapper *protocol.ColumnMapper, node ast.Node) ([]protocol.Range, error) {
- type callPos struct {
- start, end token.Pos
- }
- callPositions := []callPos{}
-
- ast.Inspect(node, func(n ast.Node) bool {
- if call, ok := n.(*ast.CallExpr); ok {
- var start, end token.Pos
- switch n := call.Fun.(type) {
- case *ast.SelectorExpr:
- start, end = n.Sel.NamePos, call.Lparen
- case *ast.Ident:
- start, end = n.NamePos, call.Lparen
- case *ast.FuncLit:
- // while we don't add the function literal as an 'outgoing' call
- // we still want to traverse into it
- return true
- default:
- // ignore any other kind of call expressions
- // for ex: direct function literal calls since that's not an 'outgoing' call
- return false
- }
- callPositions = append(callPositions, callPos{start: start, end: end})
- }
- return true
- })
-
- callRanges := []protocol.Range{}
- for _, call := range callPositions {
- callRange, err := NewMappedRange(fset, mapper, call.start, call.end).Range()
- if err != nil {
- return nil, err
- }
- callRanges = append(callRanges, callRange)
- }
- return callRanges, nil
-}
-
-// toProtocolOutgoingCalls returns an array of protocol.CallHierarchyOutgoingCall for ast call expressions.
-// Calls to the same function are assigned to the same declaration.
-func toProtocolOutgoingCalls(ctx context.Context, snapshot Snapshot, fh FileHandle, callRanges []protocol.Range) ([]protocol.CallHierarchyOutgoingCall, error) {
- // Multiple calls could be made to the same function, defined by "same declaration
- // AST node & same idenfitier name" to provide a unique identifier key even when
- // the func is declared in a struct or interface.
- type key struct {
- decl ast.Node
- name string
- }
- outgoingCalls := map[key]*protocol.CallHierarchyOutgoingCall{}
- for _, callRange := range callRanges {
- identifier, err := Identifier(ctx, snapshot, fh, callRange.Start)
- if err != nil {
- if errors.Is(err, ErrNoIdentFound) || errors.Is(err, errNoObjectFound) {
- continue
- }
- return nil, err
- }
-
- // ignore calls to builtin functions
- if identifier.Declaration.obj.Pkg() == nil {
- continue
- }
-
- if outgoingCall, ok := outgoingCalls[key{identifier.Declaration.node, identifier.Name}]; ok {
- outgoingCall.FromRanges = append(outgoingCall.FromRanges, callRange)
- continue
- }
-
- if len(identifier.Declaration.MappedRange) == 0 {
- continue
- }
- declMappedRange := identifier.Declaration.MappedRange[0]
- rng, err := declMappedRange.Range()
- if err != nil {
- return nil, err
- }
-
- outgoingCalls[key{identifier.Declaration.node, identifier.Name}] = &protocol.CallHierarchyOutgoingCall{
- To: protocol.CallHierarchyItem{
- Name: identifier.Name,
- Kind: protocol.Function,
- Tags: []protocol.SymbolTag{},
- Detail: fmt.Sprintf("%s • %s", identifier.Declaration.obj.Pkg().Path(), filepath.Base(declMappedRange.URI().Filename())),
- URI: protocol.DocumentURI(declMappedRange.URI()),
- Range: rng,
- SelectionRange: rng,
- },
- FromRanges: []protocol.Range{callRange},
- }
- }
-
- outgoingCallItems := make([]protocol.CallHierarchyOutgoingCall, 0, len(outgoingCalls))
- for _, callItem := range outgoingCalls {
- outgoingCallItems = append(outgoingCallItems, *callItem)
- }
- return outgoingCallItems, nil
-}
diff --git a/internal/lsp/source/code_lens.go b/internal/lsp/source/code_lens.go
deleted file mode 100644
index 0ab857ac6..000000000
--- a/internal/lsp/source/code_lens.go
+++ /dev/null
@@ -1,244 +0,0 @@
-// Copyright 2020 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package source
-
-import (
- "context"
- "go/ast"
- "go/token"
- "go/types"
- "path/filepath"
- "regexp"
- "strings"
-
- "golang.org/x/tools/internal/lsp/command"
- "golang.org/x/tools/internal/lsp/protocol"
- "golang.org/x/tools/internal/span"
-)
-
-type LensFunc func(context.Context, Snapshot, FileHandle) ([]protocol.CodeLens, error)
-
-// LensFuncs returns the supported lensFuncs for Go files.
-func LensFuncs() map[command.Command]LensFunc {
- return map[command.Command]LensFunc{
- command.Generate: goGenerateCodeLens,
- command.Test: runTestCodeLens,
- command.RegenerateCgo: regenerateCgoLens,
- command.GCDetails: toggleDetailsCodeLens,
- }
-}
-
-var (
- testRe = regexp.MustCompile("^Test[^a-z]")
- benchmarkRe = regexp.MustCompile("^Benchmark[^a-z]")
-)
-
-func runTestCodeLens(ctx context.Context, snapshot Snapshot, fh FileHandle) ([]protocol.CodeLens, error) {
- codeLens := make([]protocol.CodeLens, 0)
-
- fns, err := TestsAndBenchmarks(ctx, snapshot, fh)
- if err != nil {
- return nil, err
- }
- puri := protocol.URIFromSpanURI(fh.URI())
- for _, fn := range fns.Tests {
- cmd, err := command.NewTestCommand("run test", puri, []string{fn.Name}, nil)
- if err != nil {
- return nil, err
- }
- rng := protocol.Range{Start: fn.Rng.Start, End: fn.Rng.Start}
- codeLens = append(codeLens, protocol.CodeLens{Range: rng, Command: cmd})
- }
-
- for _, fn := range fns.Benchmarks {
- cmd, err := command.NewTestCommand("run benchmark", puri, nil, []string{fn.Name})
- if err != nil {
- return nil, err
- }
- rng := protocol.Range{Start: fn.Rng.Start, End: fn.Rng.Start}
- codeLens = append(codeLens, protocol.CodeLens{Range: rng, Command: cmd})
- }
-
- if len(fns.Benchmarks) > 0 {
- _, pgf, err := GetParsedFile(ctx, snapshot, fh, WidestPackage)
- if err != nil {
- return nil, err
- }
- // add a code lens to the top of the file which runs all benchmarks in the file
- rng, err := NewMappedRange(snapshot.FileSet(), pgf.Mapper, pgf.File.Package, pgf.File.Package).Range()
- if err != nil {
- return nil, err
- }
- var benches []string
- for _, fn := range fns.Benchmarks {
- benches = append(benches, fn.Name)
- }
- cmd, err := command.NewTestCommand("run file benchmarks", puri, nil, benches)
- if err != nil {
- return nil, err
- }
- codeLens = append(codeLens, protocol.CodeLens{Range: rng, Command: cmd})
- }
- return codeLens, nil
-}
-
-type testFn struct {
- Name string
- Rng protocol.Range
-}
-
-type testFns struct {
- Tests []testFn
- Benchmarks []testFn
-}
-
-func TestsAndBenchmarks(ctx context.Context, snapshot Snapshot, fh FileHandle) (testFns, error) {
- var out testFns
-
- if !strings.HasSuffix(fh.URI().Filename(), "_test.go") {
- return out, nil
- }
- pkg, pgf, err := GetParsedFile(ctx, snapshot, fh, WidestPackage)
- if err != nil {
- return out, err
- }
-
- for _, d := range pgf.File.Decls {
- fn, ok := d.(*ast.FuncDecl)
- if !ok {
- continue
- }
-
- rng, err := NewMappedRange(snapshot.FileSet(), pgf.Mapper, d.Pos(), fn.End()).Range()
- if err != nil {
- return out, err
- }
-
- if matchTestFunc(fn, pkg, testRe, "T") {
- out.Tests = append(out.Tests, testFn{fn.Name.Name, rng})
- }
-
- if matchTestFunc(fn, pkg, benchmarkRe, "B") {
- out.Benchmarks = append(out.Benchmarks, testFn{fn.Name.Name, rng})
- }
- }
-
- return out, nil
-}
-
-func matchTestFunc(fn *ast.FuncDecl, pkg Package, nameRe *regexp.Regexp, paramID string) bool {
- // Make sure that the function name matches a test function.
- if !nameRe.MatchString(fn.Name.Name) {
- return false
- }
- info := pkg.GetTypesInfo()
- if info == nil {
- return false
- }
- obj := info.ObjectOf(fn.Name)
- if obj == nil {
- return false
- }
- sig, ok := obj.Type().(*types.Signature)
- if !ok {
- return false
- }
- // Test functions should have only one parameter.
- if sig.Params().Len() != 1 {
- return false
- }
-
- // Check the type of the only parameter
- paramTyp, ok := sig.Params().At(0).Type().(*types.Pointer)
- if !ok {
- return false
- }
- named, ok := paramTyp.Elem().(*types.Named)
- if !ok {
- return false
- }
- namedObj := named.Obj()
- if namedObj.Pkg().Path() != "testing" {
- return false
- }
- return namedObj.Id() == paramID
-}
-
-func goGenerateCodeLens(ctx context.Context, snapshot Snapshot, fh FileHandle) ([]protocol.CodeLens, error) {
- pgf, err := snapshot.ParseGo(ctx, fh, ParseFull)
- if err != nil {
- return nil, err
- }
- const ggDirective = "//go:generate"
- for _, c := range pgf.File.Comments {
- for _, l := range c.List {
- if !strings.HasPrefix(l.Text, ggDirective) {
- continue
- }
- rng, err := NewMappedRange(snapshot.FileSet(), pgf.Mapper, l.Pos(), l.Pos()+token.Pos(len(ggDirective))).Range()
- if err != nil {
- return nil, err
- }
- dir := protocol.URIFromSpanURI(span.URIFromPath(filepath.Dir(fh.URI().Filename())))
- nonRecursiveCmd, err := command.NewGenerateCommand("run go generate", command.GenerateArgs{Dir: dir, Recursive: false})
- if err != nil {
- return nil, err
- }
- recursiveCmd, err := command.NewGenerateCommand("run go generate ./...", command.GenerateArgs{Dir: dir, Recursive: true})
- if err != nil {
- return nil, err
- }
- return []protocol.CodeLens{
- {Range: rng, Command: recursiveCmd},
- {Range: rng, Command: nonRecursiveCmd},
- }, nil
-
- }
- }
- return nil, nil
-}
-
-func regenerateCgoLens(ctx context.Context, snapshot Snapshot, fh FileHandle) ([]protocol.CodeLens, error) {
- pgf, err := snapshot.ParseGo(ctx, fh, ParseFull)
- if err != nil {
- return nil, err
- }
- var c *ast.ImportSpec
- for _, imp := range pgf.File.Imports {
- if imp.Path.Value == `"C"` {
- c = imp
- }
- }
- if c == nil {
- return nil, nil
- }
- rng, err := NewMappedRange(snapshot.FileSet(), pgf.Mapper, c.Pos(), c.EndPos).Range()
- if err != nil {
- return nil, err
- }
- puri := protocol.URIFromSpanURI(fh.URI())
- cmd, err := command.NewRegenerateCgoCommand("regenerate cgo definitions", command.URIArg{URI: puri})
- if err != nil {
- return nil, err
- }
- return []protocol.CodeLens{{Range: rng, Command: cmd}}, nil
-}
-
-func toggleDetailsCodeLens(ctx context.Context, snapshot Snapshot, fh FileHandle) ([]protocol.CodeLens, error) {
- _, pgf, err := GetParsedFile(ctx, snapshot, fh, WidestPackage)
- if err != nil {
- return nil, err
- }
- rng, err := NewMappedRange(snapshot.FileSet(), pgf.Mapper, pgf.File.Package, pgf.File.Package).Range()
- if err != nil {
- return nil, err
- }
- puri := protocol.URIFromSpanURI(fh.URI())
- cmd, err := command.NewGCDetailsCommand("Toggle gc annotation details", puri)
- if err != nil {
- return nil, err
- }
- return []protocol.CodeLens{{Range: rng, Command: cmd}}, nil
-}
diff --git a/internal/lsp/source/comment.go b/internal/lsp/source/comment.go
deleted file mode 100644
index d88471e42..000000000
--- a/internal/lsp/source/comment.go
+++ /dev/null
@@ -1,381 +0,0 @@
-// Copyright 2019 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package source
-
-import (
- "bytes"
- "io"
- "regexp"
- "strings"
- "unicode"
- "unicode/utf8"
-)
-
-// CommentToMarkdown converts comment text to formatted markdown.
-// The comment was prepared by DocReader,
-// so it is known not to have leading, trailing blank lines
-// nor to have trailing spaces at the end of lines.
-// The comment markers have already been removed.
-//
-// Each line is converted into a markdown line and empty lines are just converted to
-// newlines. Heading are prefixed with `### ` to make it a markdown heading.
-//
-// A span of indented lines retains a 4 space prefix block, with the common indent
-// prefix removed unless empty, in which case it will be converted to a newline.
-//
-// URLs in the comment text are converted into links.
-func CommentToMarkdown(text string) string {
- buf := &bytes.Buffer{}
- commentToMarkdown(buf, text)
- return buf.String()
-}
-
-var (
- mdNewline = []byte("\n")
- mdHeader = []byte("### ")
- mdIndent = []byte(" ")
- mdLinkStart = []byte("[")
- mdLinkDiv = []byte("](")
- mdLinkEnd = []byte(")")
-)
-
-func commentToMarkdown(w io.Writer, text string) {
- blocks := blocks(text)
- for i, b := range blocks {
- switch b.op {
- case opPara:
- for _, line := range b.lines {
- emphasize(w, line, true)
- }
- case opHead:
- // The header block can consist of only one line.
- // However, check the number of lines, just in case.
- if len(b.lines) == 0 {
- // Skip this block.
- continue
- }
- header := b.lines[0]
-
- w.Write(mdHeader)
- commentEscape(w, header, true)
- // Header doesn't end with \n unlike the lines of other blocks.
- w.Write(mdNewline)
- case opPre:
- for _, line := range b.lines {
- if isBlank(line) {
- w.Write(mdNewline)
- continue
- }
- w.Write(mdIndent)
- w.Write([]byte(line))
- }
- }
-
- if i < len(blocks)-1 {
- w.Write(mdNewline)
- }
- }
-}
-
-const (
- ulquo = "“"
- urquo = "”"
-)
-
-var (
- markdownEscape = regexp.MustCompile(`([\\\x60*{}[\]()#+\-.!_>~|"$%&'\/:;<=?@^])`)
-
- unicodeQuoteReplacer = strings.NewReplacer("``", ulquo, "''", urquo)
-)
-
-// commentEscape escapes comment text for markdown. If nice is set,
-// also turn `` into “; and '' into ”;.
-func commentEscape(w io.Writer, text string, nice bool) {
- if nice {
- text = convertQuotes(text)
- }
- text = escapeRegex(text)
- w.Write([]byte(text))
-}
-
-func convertQuotes(text string) string {
- return unicodeQuoteReplacer.Replace(text)
-}
-
-func escapeRegex(text string) string {
- return markdownEscape.ReplaceAllString(text, `\$1`)
-}
-
-func emphasize(w io.Writer, line string, nice bool) {
- for {
- m := matchRx.FindStringSubmatchIndex(line)
- if m == nil {
- break
- }
- // m >= 6 (two parenthesized sub-regexps in matchRx, 1st one is urlRx)
-
- // write text before match
- commentEscape(w, line[0:m[0]], nice)
-
- // adjust match for URLs
- match := line[m[0]:m[1]]
- if strings.Contains(match, "://") {
- m0, m1 := m[0], m[1]
- for _, s := range []string{"()", "{}", "[]"} {
- open, close := s[:1], s[1:] // E.g., "(" and ")"
- // require opening parentheses before closing parentheses (#22285)
- if i := strings.Index(match, close); i >= 0 && i < strings.Index(match, open) {
- m1 = m0 + i
- match = line[m0:m1]
- }
- // require balanced pairs of parentheses (#5043)
- for i := 0; strings.Count(match, open) != strings.Count(match, close) && i < 10; i++ {
- m1 = strings.LastIndexAny(line[:m1], s)
- match = line[m0:m1]
- }
- }
- if m1 != m[1] {
- // redo matching with shortened line for correct indices
- m = matchRx.FindStringSubmatchIndex(line[:m[0]+len(match)])
- }
- }
-
- // Following code has been modified from go/doc since words is always
- // nil. All html formatting has also been transformed into markdown formatting
-
- // analyze match
- url := ""
- if m[2] >= 0 {
- url = match
- }
-
- // write match
- if len(url) > 0 {
- w.Write(mdLinkStart)
- }
-
- commentEscape(w, match, nice)
-
- if len(url) > 0 {
- w.Write(mdLinkDiv)
- w.Write([]byte(urlReplacer.Replace(url)))
- w.Write(mdLinkEnd)
- }
-
- // advance
- line = line[m[1]:]
- }
- commentEscape(w, line, nice)
-}
-
-// Everything from here on is a copy of go/doc/comment.go
-
-const (
- // Regexp for Go identifiers
- identRx = `[\pL_][\pL_0-9]*`
-
- // Regexp for URLs
- // Match parens, and check later for balance - see #5043, #22285
- // Match .,:;?! within path, but not at end - see #18139, #16565
- // This excludes some rare yet valid urls ending in common punctuation
- // in order to allow sentences ending in URLs.
-
- // protocol (required) e.g. http
- protoPart = `(https?|ftp|file|gopher|mailto|nntp)`
- // host (required) e.g. www.example.com or [::1]:8080
- hostPart = `([a-zA-Z0-9_@\-.\[\]:]+)`
- // path+query+fragment (optional) e.g. /path/index.html?q=foo#bar
- pathPart = `([.,:;?!]*[a-zA-Z0-9$'()*+&#=@~_/\-\[\]%])*`
-
- urlRx = protoPart + `://` + hostPart + pathPart
-)
-
-var (
- matchRx = regexp.MustCompile(`(` + urlRx + `)|(` + identRx + `)`)
- urlReplacer = strings.NewReplacer(`(`, `\(`, `)`, `\)`)
-)
-
-func indentLen(s string) int {
- i := 0
- for i < len(s) && (s[i] == ' ' || s[i] == '\t') {
- i++
- }
- return i
-}
-
-func isBlank(s string) bool {
- return len(s) == 0 || (len(s) == 1 && s[0] == '\n')
-}
-
-func commonPrefix(a, b string) string {
- i := 0
- for i < len(a) && i < len(b) && a[i] == b[i] {
- i++
- }
- return a[0:i]
-}
-
-func unindent(block []string) {
- if len(block) == 0 {
- return
- }
-
- // compute maximum common white prefix
- prefix := block[0][0:indentLen(block[0])]
- for _, line := range block {
- if !isBlank(line) {
- prefix = commonPrefix(prefix, line[0:indentLen(line)])
- }
- }
- n := len(prefix)
-
- // remove
- for i, line := range block {
- if !isBlank(line) {
- block[i] = line[n:]
- }
- }
-}
-
-// heading returns the trimmed line if it passes as a section heading;
-// otherwise it returns the empty string.
-func heading(line string) string {
- line = strings.TrimSpace(line)
- if len(line) == 0 {
- return ""
- }
-
- // a heading must start with an uppercase letter
- r, _ := utf8.DecodeRuneInString(line)
- if !unicode.IsLetter(r) || !unicode.IsUpper(r) {
- return ""
- }
-
- // it must end in a letter or digit:
- r, _ = utf8.DecodeLastRuneInString(line)
- if !unicode.IsLetter(r) && !unicode.IsDigit(r) {
- return ""
- }
-
- // exclude lines with illegal characters. we allow "(),"
- if strings.ContainsAny(line, ";:!?+*/=[]{}_^°&§~%#@<\">\\") {
- return ""
- }
-
- // allow "'" for possessive "'s" only
- for b := line; ; {
- i := strings.IndexRune(b, '\'')
- if i < 0 {
- break
- }
- if i+1 >= len(b) || b[i+1] != 's' || (i+2 < len(b) && b[i+2] != ' ') {
- return "" // not followed by "s "
- }
- b = b[i+2:]
- }
-
- // allow "." when followed by non-space
- for b := line; ; {
- i := strings.IndexRune(b, '.')
- if i < 0 {
- break
- }
- if i+1 >= len(b) || b[i+1] == ' ' {
- return "" // not followed by non-space
- }
- b = b[i+1:]
- }
-
- return line
-}
-
-type op int
-
-const (
- opPara op = iota
- opHead
- opPre
-)
-
-type block struct {
- op op
- lines []string
-}
-
-func blocks(text string) []block {
- var (
- out []block
- para []string
-
- lastWasBlank = false
- lastWasHeading = false
- )
-
- close := func() {
- if para != nil {
- out = append(out, block{opPara, para})
- para = nil
- }
- }
-
- lines := strings.SplitAfter(text, "\n")
- unindent(lines)
- for i := 0; i < len(lines); {
- line := lines[i]
- if isBlank(line) {
- // close paragraph
- close()
- i++
- lastWasBlank = true
- continue
- }
- if indentLen(line) > 0 {
- // close paragraph
- close()
-
- // count indented or blank lines
- j := i + 1
- for j < len(lines) && (isBlank(lines[j]) || indentLen(lines[j]) > 0) {
- j++
- }
- // but not trailing blank lines
- for j > i && isBlank(lines[j-1]) {
- j--
- }
- pre := lines[i:j]
- i = j
-
- unindent(pre)
-
- // put those lines in a pre block
- out = append(out, block{opPre, pre})
- lastWasHeading = false
- continue
- }
-
- if lastWasBlank && !lastWasHeading && i+2 < len(lines) &&
- isBlank(lines[i+1]) && !isBlank(lines[i+2]) && indentLen(lines[i+2]) == 0 {
- // current line is non-blank, surrounded by blank lines
- // and the next non-blank line is not indented: this
- // might be a heading.
- if head := heading(line); head != "" {
- close()
- out = append(out, block{opHead, []string{head}})
- i += 2
- lastWasHeading = true
- continue
- }
- }
-
- // open paragraph
- lastWasBlank = false
- lastWasHeading = false
- para = append(para, lines[i])
- i++
- }
- close()
-
- return out
-}
diff --git a/internal/lsp/source/comment_test.go b/internal/lsp/source/comment_test.go
deleted file mode 100644
index 9efde16ef..000000000
--- a/internal/lsp/source/comment_test.go
+++ /dev/null
@@ -1,368 +0,0 @@
-// Copyright 2019 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package source
-
-import (
- "bytes"
- "reflect"
- "strings"
- "testing"
-)
-
-// This file is a copy of go/doc/comment_test.go with the exception for
-// the test cases for TestEmphasize and TestCommentEscape
-
-var headingTests = []struct {
- line string
- ok bool
-}{
- {"Section", true},
- {"A typical usage", true},
- {"ΔΛΞ is Greek", true},
- {"Foo 42", true},
- {"", false},
- {"section", false},
- {"A typical usage:", false},
- {"This code:", false},
- {"δ is Greek", false},
- {"Foo §", false},
- {"Fermat's Last Sentence", true},
- {"Fermat's", true},
- {"'sX", false},
- {"Ted 'Too' Bar", false},
- {"Use n+m", false},
- {"Scanning:", false},
- {"N:M", false},
-}
-
-func TestIsHeading(t *testing.T) {
- for _, tt := range headingTests {
- if h := heading(tt.line); (len(h) > 0) != tt.ok {
- t.Errorf("isHeading(%q) = %v, want %v", tt.line, h, tt.ok)
- }
- }
-}
-
-var blocksTests = []struct {
- in string
- out []block
- text string
-}{
- {
- in: `Para 1.
-Para 1 line 2.
-
-Para 2.
-
-Section
-
-Para 3.
-
- pre
- pre1
-
-Para 4.
-
- pre
- pre1
-
- pre2
-
-Para 5.
-
-
- pre
-
-
- pre1
- pre2
-
-Para 6.
- pre
- pre2
-`,
- out: []block{
- {opPara, []string{"Para 1.\n", "Para 1 line 2.\n"}},
- {opPara, []string{"Para 2.\n"}},
- {opHead, []string{"Section"}},
- {opPara, []string{"Para 3.\n"}},
- {opPre, []string{"pre\n", "pre1\n"}},
- {opPara, []string{"Para 4.\n"}},
- {opPre, []string{"pre\n", "pre1\n", "\n", "pre2\n"}},
- {opPara, []string{"Para 5.\n"}},
- {opPre, []string{"pre\n", "\n", "\n", "pre1\n", "pre2\n"}},
- {opPara, []string{"Para 6.\n"}},
- {opPre, []string{"pre\n", "pre2\n"}},
- },
- text: `. Para 1. Para 1 line 2.
-
-. Para 2.
-
-
-. Section
-
-. Para 3.
-
-$ pre
-$ pre1
-
-. Para 4.
-
-$ pre
-$ pre1
-
-$ pre2
-
-. Para 5.
-
-$ pre
-
-
-$ pre1
-$ pre2
-
-. Para 6.
-
-$ pre
-$ pre2
-`,
- },
- {
- in: "Para.\n\tshould not be ``escaped''",
- out: []block{
- {opPara, []string{"Para.\n"}},
- {opPre, []string{"should not be ``escaped''"}},
- },
- text: ". Para.\n\n$ should not be ``escaped''",
- },
- {
- in: "// A very long line of 46 char for line wrapping.",
- out: []block{
- {opPara, []string{"// A very long line of 46 char for line wrapping."}},
- },
- text: `. // A very long line of 46 char for line
-. // wrapping.
-`,
- },
- {
- in: `/* A very long line of 46 char for line wrapping.
-A very long line of 46 char for line wrapping. */`,
- out: []block{
- {opPara, []string{"/* A very long line of 46 char for line wrapping.\n", "A very long line of 46 char for line wrapping. */"}},
- },
- text: `. /* A very long line of 46 char for line
-. wrapping. A very long line of 46 char
-. for line wrapping. */
-`,
- },
-}
-
-func TestBlocks(t *testing.T) {
- for i, tt := range blocksTests {
- b := blocks(tt.in)
- if !reflect.DeepEqual(b, tt.out) {
- t.Errorf("#%d: mismatch\nhave: %v\nwant: %v", i, b, tt.out)
- }
- }
-}
-
-// This has been modified from go/doc to use markdown links instead of html ones
-// and use markdown escaping instead oh html
-var emphasizeTests = []struct {
- in, out string
-}{
- {"", ""},
- {"http://[::1]:8080/foo.txt", `[http\:\/\/\[\:\:1\]\:8080\/foo\.txt](http://[::1]:8080/foo.txt)`},
- {"before (https://www.google.com) after", `before \([https\:\/\/www\.google\.com](https://www.google.com)\) after`},
- {"before https://www.google.com:30/x/y/z:b::c. After", `before [https\:\/\/www\.google\.com\:30\/x\/y\/z\:b\:\:c](https://www.google.com:30/x/y/z:b::c)\. After`},
- {"http://www.google.com/path/:;!-/?query=%34b#093124", `[http\:\/\/www\.google\.com\/path\/\:\;\!\-\/\?query\=\%34b\#093124](http://www.google.com/path/:;!-/?query=%34b#093124)`},
- {"http://www.google.com/path/:;!-/?query=%34bar#093124", `[http\:\/\/www\.google\.com\/path\/\:\;\!\-\/\?query\=\%34bar\#093124](http://www.google.com/path/:;!-/?query=%34bar#093124)`},
- {"http://www.google.com/index.html! After", `[http\:\/\/www\.google\.com\/index\.html](http://www.google.com/index.html)\! After`},
- {"http://www.google.com/", `[http\:\/\/www\.google\.com\/](http://www.google.com/)`},
- {"https://www.google.com/", `[https\:\/\/www\.google\.com\/](https://www.google.com/)`},
- {"http://www.google.com/path.", `[http\:\/\/www\.google\.com\/path](http://www.google.com/path)\.`},
- {"http://en.wikipedia.org/wiki/Camellia_(cipher)", `[http\:\/\/en\.wikipedia\.org\/wiki\/Camellia\_\(cipher\)](http://en.wikipedia.org/wiki/Camellia_\(cipher\))`},
- {"(http://www.google.com/)", `\([http\:\/\/www\.google\.com\/](http://www.google.com/)\)`},
- {"http://gmail.com)", `[http\:\/\/gmail\.com](http://gmail.com)\)`},
- {"((http://gmail.com))", `\(\([http\:\/\/gmail\.com](http://gmail.com)\)\)`},
- {"http://gmail.com ((http://gmail.com)) ()", `[http\:\/\/gmail\.com](http://gmail.com) \(\([http\:\/\/gmail\.com](http://gmail.com)\)\) \(\)`},
- {"Foo bar http://example.com/ quux!", `Foo bar [http\:\/\/example\.com\/](http://example.com/) quux\!`},
- {"Hello http://example.com/%2f/ /world.", `Hello [http\:\/\/example\.com\/\%2f\/](http://example.com/%2f/) \/world\.`},
- {"Lorem http: ipsum //host/path", `Lorem http\: ipsum \/\/host\/path`},
- {"javascript://is/not/linked", `javascript\:\/\/is\/not\/linked`},
- {"http://foo", `[http\:\/\/foo](http://foo)`},
- {"art by [[https://www.example.com/person/][Person Name]]", `art by \[\[[https\:\/\/www\.example\.com\/person\/](https://www.example.com/person/)\]\[Person Name\]\]`},
- {"please visit (http://golang.org/)", `please visit \([http\:\/\/golang\.org\/](http://golang.org/)\)`},
- {"please visit http://golang.org/hello())", `please visit [http\:\/\/golang\.org\/hello\(\)](http://golang.org/hello\(\))\)`},
- {"http://git.qemu.org/?p=qemu.git;a=blob;f=qapi-schema.json;hb=HEAD", `[http\:\/\/git\.qemu\.org\/\?p\=qemu\.git\;a\=blob\;f\=qapi\-schema\.json\;hb\=HEAD](http://git.qemu.org/?p=qemu.git;a=blob;f=qapi-schema.json;hb=HEAD)`},
- {"https://foo.bar/bal/x(])", `[https\:\/\/foo\.bar\/bal\/x\(](https://foo.bar/bal/x\()\]\)`},
- {"foo [ http://bar(])", `foo \[ [http\:\/\/bar\(](http://bar\()\]\)`},
-}
-
-func TestEmphasize(t *testing.T) {
- for i, tt := range emphasizeTests {
- var buf bytes.Buffer
- emphasize(&buf, tt.in, true)
- out := buf.String()
- if out != tt.out {
- t.Errorf("#%d: mismatch\nhave: %v\nwant: %v", i, out, tt.out)
- }
- }
-}
-
-func TestCommentEscape(t *testing.T) {
- //ldquo -> ulquo and rdquo -> urquo
- commentTests := []struct {
- in, out string
- }{
- {"typically invoked as ``go tool asm'',", "typically invoked as " + ulquo + "go tool asm" + urquo + ","},
- {"For more detail, run ``go help test'' and ``go help testflag''", "For more detail, run " + ulquo + "go help test" + urquo + " and " + ulquo + "go help testflag" + urquo}}
- for i, tt := range commentTests {
- var buf strings.Builder
- commentEscape(&buf, tt.in, true)
- out := buf.String()
- if out != tt.out {
- t.Errorf("#%d: mismatch\nhave: %q\nwant: %q", i, out, tt.out)
- }
- }
-}
-
-func TestCommentToMarkdown(t *testing.T) {
- tests := []struct {
- in, out string
- }{
- {
- in: "F declaration.\n",
- out: "F declaration\\.\n",
- },
- {
- in: `
-F declaration. Lorem ipsum dolor sit amet.
-Etiam mattis eros at orci mollis molestie.
-`,
- out: `
-F declaration\. Lorem ipsum dolor sit amet\.
-Etiam mattis eros at orci mollis molestie\.
-`,
- },
- {
- in: `
-F declaration.
-
-Lorem ipsum dolor sit amet.
-Sed id dui turpis.
-
-
-
-
-Aenean tempus velit non auctor eleifend.
-Aenean efficitur a sem id ultricies.
-
-
-Phasellus efficitur mauris et viverra bibendum.
-`,
- out: `
-F declaration\.
-
-Lorem ipsum dolor sit amet\.
-Sed id dui turpis\.
-
-Aenean tempus velit non auctor eleifend\.
-Aenean efficitur a sem id ultricies\.
-
-Phasellus efficitur mauris et viverra bibendum\.
-`,
- },
- {
- in: `
-F declaration.
-
-Aenean tempus velit non auctor eleifend.
-
-Section
-
-Lorem ipsum dolor sit amet, consectetur adipiscing elit.
-
- func foo() {}
-
-
- func bar() {}
-
-Fusce lorem lacus.
-
- func foo() {}
-
- func bar() {}
-
-Maecenas in lobortis lectus.
-
- func foo() {}
-
- func bar() {}
-
-Phasellus efficitur mauris et viverra bibendum.
-`,
- out: `
-F declaration\.
-
-Aenean tempus velit non auctor eleifend\.
-
-### Section
-
-Lorem ipsum dolor sit amet, consectetur adipiscing elit\.
-
- func foo() {}
-
-
- func bar() {}
-
-Fusce lorem lacus\.
-
- func foo() {}
-
- func bar() {}
-
-Maecenas in lobortis lectus\.
-
- func foo() {}
-
- func bar() {}
-
-Phasellus efficitur mauris et viverra bibendum\.
-`,
- },
- {
- in: `
-F declaration.
-
- func foo() {
- fmt.Println("foo")
- }
- func bar() {
- fmt.Println("bar")
- }
-`,
- out: `
-F declaration\.
-
- func foo() {
- fmt.Println("foo")
- }
- func bar() {
- fmt.Println("bar")
- }
-`,
- },
- }
- for i, tt := range tests {
- // Comments start with new lines for better readability. So, we should trim them.
- tt.in = strings.TrimPrefix(tt.in, "\n")
- tt.out = strings.TrimPrefix(tt.out, "\n")
-
- if out := CommentToMarkdown(tt.in); out != tt.out {
- t.Errorf("#%d: mismatch\nhave: %q\nwant: %q", i, out, tt.out)
- }
- }
-}
diff --git a/internal/lsp/source/completion/builtin.go b/internal/lsp/source/completion/builtin.go
deleted file mode 100644
index 39732d864..000000000
--- a/internal/lsp/source/completion/builtin.go
+++ /dev/null
@@ -1,147 +0,0 @@
-// Copyright 2020 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package completion
-
-import (
- "context"
- "go/ast"
- "go/types"
-)
-
-// builtinArgKind determines the expected object kind for a builtin
-// argument. It attempts to use the AST hints from builtin.go where
-// possible.
-func (c *completer) builtinArgKind(ctx context.Context, obj types.Object, call *ast.CallExpr) objKind {
- builtin, err := c.snapshot.BuiltinFile(ctx)
- if err != nil {
- return 0
- }
- exprIdx := exprAtPos(c.pos, call.Args)
-
- builtinObj := builtin.File.Scope.Lookup(obj.Name())
- if builtinObj == nil {
- return 0
- }
- decl, ok := builtinObj.Decl.(*ast.FuncDecl)
- if !ok || exprIdx >= len(decl.Type.Params.List) {
- return 0
- }
-
- switch ptyp := decl.Type.Params.List[exprIdx].Type.(type) {
- case *ast.ChanType:
- return kindChan
- case *ast.ArrayType:
- return kindSlice
- case *ast.MapType:
- return kindMap
- case *ast.Ident:
- switch ptyp.Name {
- case "Type":
- switch obj.Name() {
- case "make":
- return kindChan | kindSlice | kindMap
- case "len":
- return kindSlice | kindMap | kindArray | kindString | kindChan
- case "cap":
- return kindSlice | kindArray | kindChan
- }
- }
- }
-
- return 0
-}
-
-// builtinArgType infers the type of an argument to a builtin
-// function. parentInf is the inferred type info for the builtin
-// call's parent node.
-func (c *completer) builtinArgType(obj types.Object, call *ast.CallExpr, parentInf candidateInference) candidateInference {
- var (
- exprIdx = exprAtPos(c.pos, call.Args)
-
- // Propagate certain properties from our parent's inference.
- inf = candidateInference{
- typeName: parentInf.typeName,
- modifiers: parentInf.modifiers,
- }
- )
-
- switch obj.Name() {
- case "append":
- if exprIdx <= 0 {
- // Infer first append() arg type as apparent return type of
- // append().
- inf.objType = parentInf.objType
- if parentInf.variadic {
- inf.objType = types.NewSlice(inf.objType)
- }
- break
- }
-
- // For non-initial append() args, infer slice type from the first
- // append() arg, or from parent context.
- if len(call.Args) > 0 {
- inf.objType = c.pkg.GetTypesInfo().TypeOf(call.Args[0])
- }
- if inf.objType == nil {
- inf.objType = parentInf.objType
- }
- if inf.objType == nil {
- break
- }
-
- inf.objType = deslice(inf.objType)
-
- // Check if we are completing the variadic append() param.
- inf.variadic = exprIdx == 1 && len(call.Args) <= 2
-
- // Penalize the first append() argument as a candidate. You
- // don't normally append a slice to itself.
- if sliceChain := objChain(c.pkg.GetTypesInfo(), call.Args[0]); len(sliceChain) > 0 {
- inf.penalized = append(inf.penalized, penalizedObj{objChain: sliceChain, penalty: 0.9})
- }
- case "delete":
- if exprIdx > 0 && len(call.Args) > 0 {
- // Try to fill in expected type of map key.
- firstArgType := c.pkg.GetTypesInfo().TypeOf(call.Args[0])
- if firstArgType != nil {
- if mt, ok := firstArgType.Underlying().(*types.Map); ok {
- inf.objType = mt.Key()
- }
- }
- }
- case "copy":
- var t1, t2 types.Type
- if len(call.Args) > 0 {
- t1 = c.pkg.GetTypesInfo().TypeOf(call.Args[0])
- if len(call.Args) > 1 {
- t2 = c.pkg.GetTypesInfo().TypeOf(call.Args[1])
- }
- }
-
- // Fill in expected type of either arg if the other is already present.
- if exprIdx == 1 && t1 != nil {
- inf.objType = t1
- } else if exprIdx == 0 && t2 != nil {
- inf.objType = t2
- }
- case "new":
- inf.typeName.wantTypeName = true
- if parentInf.objType != nil {
- // Expected type for "new" is the de-pointered parent type.
- if ptr, ok := parentInf.objType.Underlying().(*types.Pointer); ok {
- inf.objType = ptr.Elem()
- }
- }
- case "make":
- if exprIdx == 0 {
- inf.typeName.wantTypeName = true
- inf.objType = parentInf.objType
- } else {
- inf.objType = types.Typ[types.UntypedInt]
- }
- }
-
- return inf
-}
diff --git a/internal/lsp/source/completion/completion.go b/internal/lsp/source/completion/completion.go
deleted file mode 100644
index 60c404dc5..000000000
--- a/internal/lsp/source/completion/completion.go
+++ /dev/null
@@ -1,2967 +0,0 @@
-// Copyright 2018 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Package completion provides core functionality for code completion in Go
-// editors and tools.
-package completion
-
-import (
- "context"
- "fmt"
- "go/ast"
- "go/constant"
- "go/scanner"
- "go/token"
- "go/types"
- "math"
- "sort"
- "strconv"
- "strings"
- "sync"
- "time"
- "unicode"
-
- "golang.org/x/tools/go/ast/astutil"
- "golang.org/x/tools/internal/event"
- "golang.org/x/tools/internal/imports"
- "golang.org/x/tools/internal/lsp/fuzzy"
- "golang.org/x/tools/internal/lsp/protocol"
- "golang.org/x/tools/internal/lsp/snippet"
- "golang.org/x/tools/internal/lsp/source"
- "golang.org/x/tools/internal/typeparams"
- errors "golang.org/x/xerrors"
-)
-
-type CompletionItem struct {
- // Label is the primary text the user sees for this completion item.
- Label string
-
- // Detail is supplemental information to present to the user.
- // This often contains the type or return type of the completion item.
- Detail string
-
- // InsertText is the text to insert if this item is selected.
- // Any of the prefix that has already been typed is not trimmed.
- // The insert text does not contain snippets.
- InsertText string
-
- Kind protocol.CompletionItemKind
- Tags []protocol.CompletionItemTag
- Deprecated bool // Deprecated, prefer Tags if available
-
- // An optional array of additional TextEdits that are applied when
- // selecting this completion.
- //
- // Additional text edits should be used to change text unrelated to the current cursor position
- // (for example adding an import statement at the top of the file if the completion item will
- // insert an unqualified type).
- AdditionalTextEdits []protocol.TextEdit
-
- // Depth is how many levels were searched to find this completion.
- // For example when completing "foo<>", "fooBar" is depth 0, and
- // "fooBar.Baz" is depth 1.
- Depth int
-
- // Score is the internal relevance score.
- // A higher score indicates that this completion item is more relevant.
- Score float64
-
- // snippet is the LSP snippet for the completion item. The LSP
- // specification contains details about LSP snippets. For example, a
- // snippet for a function with the following signature:
- //
- // func foo(a, b, c int)
- //
- // would be:
- //
- // foo(${1:a int}, ${2: b int}, ${3: c int})
- //
- // If Placeholders is false in the CompletionOptions, the above
- // snippet would instead be:
- //
- // foo(${1:})
- snippet *snippet.Builder
-
- // Documentation is the documentation for the completion item.
- Documentation string
-
- // obj is the object from which this candidate was derived, if any.
- // obj is for internal use only.
- obj types.Object
-}
-
-// completionOptions holds completion specific configuration.
-type completionOptions struct {
- unimported bool
- documentation bool
- fullDocumentation bool
- placeholders bool
- literal bool
- snippets bool
- postfix bool
- matcher source.Matcher
- budget time.Duration
-}
-
-// Snippet is a convenience returns the snippet if available, otherwise
-// the InsertText.
-// used for an item, depending on if the callee wants placeholders or not.
-func (i *CompletionItem) Snippet() string {
- if i.snippet != nil {
- return i.snippet.String()
- }
- return i.InsertText
-}
-
-// Scoring constants are used for weighting the relevance of different candidates.
-const (
- // stdScore is the base score for all completion items.
- stdScore float64 = 1.0
-
- // highScore indicates a very relevant completion item.
- highScore float64 = 10.0
-
- // lowScore indicates an irrelevant or not useful completion item.
- lowScore float64 = 0.01
-)
-
-// matcher matches a candidate's label against the user input. The
-// returned score reflects the quality of the match. A score of zero
-// indicates no match, and a score of one means a perfect match.
-type matcher interface {
- Score(candidateLabel string) (score float32)
-}
-
-// prefixMatcher implements case sensitive prefix matching.
-type prefixMatcher string
-
-func (pm prefixMatcher) Score(candidateLabel string) float32 {
- if strings.HasPrefix(candidateLabel, string(pm)) {
- return 1
- }
- return -1
-}
-
-// insensitivePrefixMatcher implements case insensitive prefix matching.
-type insensitivePrefixMatcher string
-
-func (ipm insensitivePrefixMatcher) Score(candidateLabel string) float32 {
- if strings.HasPrefix(strings.ToLower(candidateLabel), string(ipm)) {
- return 1
- }
- return -1
-}
-
-// completer contains the necessary information for a single completion request.
-type completer struct {
- snapshot source.Snapshot
- pkg source.Package
- qf types.Qualifier
- opts *completionOptions
-
- // completionContext contains information about the trigger for this
- // completion request.
- completionContext completionContext
-
- // fh is a handle to the file associated with this completion request.
- fh source.FileHandle
-
- // filename is the name of the file associated with this completion request.
- filename string
-
- // file is the AST of the file associated with this completion request.
- file *ast.File
-
- // pos is the position at which the request was triggered.
- pos token.Pos
-
- // path is the path of AST nodes enclosing the position.
- path []ast.Node
-
- // seen is the map that ensures we do not return duplicate results.
- seen map[types.Object]bool
-
- // items is the list of completion items returned.
- items []CompletionItem
-
- // completionCallbacks is a list of callbacks to collect completions that
- // require expensive operations. This includes operations where we search
- // through the entire module cache.
- completionCallbacks []func(opts *imports.Options) error
-
- // surrounding describes the identifier surrounding the position.
- surrounding *Selection
-
- // inference contains information we've inferred about ideal
- // candidates such as the candidate's type.
- inference candidateInference
-
- // enclosingFunc contains information about the function enclosing
- // the position.
- enclosingFunc *funcInfo
-
- // enclosingCompositeLiteral contains information about the composite literal
- // enclosing the position.
- enclosingCompositeLiteral *compLitInfo
-
- // deepState contains the current state of our deep completion search.
- deepState deepCompletionState
-
- // matcher matches the candidates against the surrounding prefix.
- matcher matcher
-
- // methodSetCache caches the types.NewMethodSet call, which is relatively
- // expensive and can be called many times for the same type while searching
- // for deep completions.
- methodSetCache map[methodSetKey]*types.MethodSet
-
- // mapper converts the positions in the file from which the completion originated.
- mapper *protocol.ColumnMapper
-
- // startTime is when we started processing this completion request. It does
- // not include any time the request spent in the queue.
- startTime time.Time
-}
-
-// funcInfo holds info about a function object.
-type funcInfo struct {
- // sig is the function declaration enclosing the position.
- sig *types.Signature
-
- // body is the function's body.
- body *ast.BlockStmt
-}
-
-type compLitInfo struct {
- // cl is the *ast.CompositeLit enclosing the position.
- cl *ast.CompositeLit
-
- // clType is the type of cl.
- clType types.Type
-
- // kv is the *ast.KeyValueExpr enclosing the position, if any.
- kv *ast.KeyValueExpr
-
- // inKey is true if we are certain the position is in the key side
- // of a key-value pair.
- inKey bool
-
- // maybeInFieldName is true if inKey is false and it is possible
- // we are completing a struct field name. For example,
- // "SomeStruct{<>}" will be inKey=false, but maybeInFieldName=true
- // because we _could_ be completing a field name.
- maybeInFieldName bool
-}
-
-type importInfo struct {
- importPath string
- name string
- pkg source.Package
-}
-
-type methodSetKey struct {
- typ types.Type
- addressable bool
-}
-
-type completionContext struct {
- // triggerCharacter is the character used to trigger completion at current
- // position, if any.
- triggerCharacter string
-
- // triggerKind is information about how a completion was triggered.
- triggerKind protocol.CompletionTriggerKind
-
- // commentCompletion is true if we are completing a comment.
- commentCompletion bool
-
- // packageCompletion is true if we are completing a package name.
- packageCompletion bool
-}
-
-// A Selection represents the cursor position and surrounding identifier.
-type Selection struct {
- content string
- cursor token.Pos
- source.MappedRange
-}
-
-func (p Selection) Content() string {
- return p.content
-}
-
-func (p Selection) Start() token.Pos {
- return p.MappedRange.SpanRange().Start
-}
-
-func (p Selection) End() token.Pos {
- return p.MappedRange.SpanRange().End
-}
-
-func (p Selection) Prefix() string {
- return p.content[:p.cursor-p.SpanRange().Start]
-}
-
-func (p Selection) Suffix() string {
- return p.content[p.cursor-p.SpanRange().Start:]
-}
-
-func (c *completer) setSurrounding(ident *ast.Ident) {
- if c.surrounding != nil {
- return
- }
- if !(ident.Pos() <= c.pos && c.pos <= ident.End()) {
- return
- }
-
- c.surrounding = &Selection{
- content: ident.Name,
- cursor: c.pos,
- // Overwrite the prefix only.
- MappedRange: source.NewMappedRange(c.snapshot.FileSet(), c.mapper, ident.Pos(), ident.End()),
- }
-
- c.setMatcherFromPrefix(c.surrounding.Prefix())
-}
-
-func (c *completer) setMatcherFromPrefix(prefix string) {
- switch c.opts.matcher {
- case source.Fuzzy:
- c.matcher = fuzzy.NewMatcher(prefix)
- case source.CaseSensitive:
- c.matcher = prefixMatcher(prefix)
- default:
- c.matcher = insensitivePrefixMatcher(strings.ToLower(prefix))
- }
-}
-
-func (c *completer) getSurrounding() *Selection {
- if c.surrounding == nil {
- c.surrounding = &Selection{
- content: "",
- cursor: c.pos,
- MappedRange: source.NewMappedRange(c.snapshot.FileSet(), c.mapper, c.pos, c.pos),
- }
- }
- return c.surrounding
-}
-
-// candidate represents a completion candidate.
-type candidate struct {
- // obj is the types.Object to complete to.
- obj types.Object
-
- // score is used to rank candidates.
- score float64
-
- // name is the deep object name path, e.g. "foo.bar"
- name string
-
- // detail is additional information about this item. If not specified,
- // defaults to type string for the object.
- detail string
-
- // path holds the path from the search root (excluding the candidate
- // itself) for a deep candidate.
- path []types.Object
-
- // pathInvokeMask is a bit mask tracking whether each entry in path
- // should be formatted with "()" (i.e. whether it is a function
- // invocation).
- pathInvokeMask uint16
-
- // mods contains modifications that should be applied to the
- // candidate when inserted. For example, "foo" may be insterted as
- // "*foo" or "foo()".
- mods []typeModKind
-
- // addressable is true if a pointer can be taken to the candidate.
- addressable bool
-
- // convertTo is a type that this candidate should be cast to. For
- // example, if convertTo is float64, "foo" should be formatted as
- // "float64(foo)".
- convertTo types.Type
-
- // imp is the import that needs to be added to this package in order
- // for this candidate to be valid. nil if no import needed.
- imp *importInfo
-}
-
-func (c candidate) hasMod(mod typeModKind) bool {
- for _, m := range c.mods {
- if m == mod {
- return true
- }
- }
- return false
-}
-
-// ErrIsDefinition is an error that informs the user they got no
-// completions because they tried to complete the name of a new object
-// being defined.
-type ErrIsDefinition struct {
- objStr string
-}
-
-func (e ErrIsDefinition) Error() string {
- msg := "this is a definition"
- if e.objStr != "" {
- msg += " of " + e.objStr
- }
- return msg
-}
-
-// Completion returns a list of possible candidates for completion, given a
-// a file and a position.
-//
-// The selection is computed based on the preceding identifier and can be used by
-// the client to score the quality of the completion. For instance, some clients
-// may tolerate imperfect matches as valid completion results, since users may make typos.
-func Completion(ctx context.Context, snapshot source.Snapshot, fh source.FileHandle, protoPos protocol.Position, protoContext protocol.CompletionContext) ([]CompletionItem, *Selection, error) {
- ctx, done := event.Start(ctx, "completion.Completion")
- defer done()
-
- startTime := time.Now()
-
- pkg, pgf, err := source.GetParsedFile(ctx, snapshot, fh, source.NarrowestPackage)
- if err != nil || pgf.File.Package == token.NoPos {
- // If we can't parse this file or find position for the package
- // keyword, it may be missing a package declaration. Try offering
- // suggestions for the package declaration.
- // Note that this would be the case even if the keyword 'package' is
- // present but no package name exists.
- items, surrounding, innerErr := packageClauseCompletions(ctx, snapshot, fh, protoPos)
- if innerErr != nil {
- // return the error for GetParsedFile since it's more relevant in this situation.
- return nil, nil, errors.Errorf("getting file for Completion: %w (package completions: %v)", err, innerErr)
- }
- return items, surrounding, nil
- }
- spn, err := pgf.Mapper.PointSpan(protoPos)
- if err != nil {
- return nil, nil, err
- }
- rng, err := spn.Range(pgf.Mapper.Converter)
- if err != nil {
- return nil, nil, err
- }
- // Completion is based on what precedes the cursor.
- // Find the path to the position before pos.
- path, _ := astutil.PathEnclosingInterval(pgf.File, rng.Start-1, rng.Start-1)
- if path == nil {
- return nil, nil, errors.Errorf("cannot find node enclosing position")
- }
-
- pos := rng.Start
-
- // Check if completion at this position is valid. If not, return early.
- switch n := path[0].(type) {
- case *ast.BasicLit:
- // Skip completion inside literals except for ImportSpec
- if len(path) > 1 {
- if _, ok := path[1].(*ast.ImportSpec); ok {
- break
- }
- }
- return nil, nil, nil
- case *ast.CallExpr:
- if n.Ellipsis.IsValid() && pos > n.Ellipsis && pos <= n.Ellipsis+token.Pos(len("...")) {
- // Don't offer completions inside or directly after "...". For
- // example, don't offer completions at "<>" in "foo(bar...<>").
- return nil, nil, nil
- }
- case *ast.Ident:
- // reject defining identifiers
- if obj, ok := pkg.GetTypesInfo().Defs[n]; ok {
- if v, ok := obj.(*types.Var); ok && v.IsField() && v.Embedded() {
- // An anonymous field is also a reference to a type.
- } else if pgf.File.Name == n {
- // Don't skip completions if Ident is for package name.
- break
- } else {
- objStr := ""
- if obj != nil {
- qual := types.RelativeTo(pkg.GetTypes())
- objStr = types.ObjectString(obj, qual)
- }
- ans, sel := definition(path, obj, snapshot.FileSet(), pgf.Mapper, fh)
- if ans != nil {
- sort.Slice(ans, func(i, j int) bool {
- return ans[i].Score > ans[j].Score
- })
- return ans, sel, nil
- }
- return nil, nil, ErrIsDefinition{objStr: objStr}
- }
- }
- }
-
- opts := snapshot.View().Options()
- c := &completer{
- pkg: pkg,
- snapshot: snapshot,
- qf: source.Qualifier(pgf.File, pkg.GetTypes(), pkg.GetTypesInfo()),
- completionContext: completionContext{
- triggerCharacter: protoContext.TriggerCharacter,
- triggerKind: protoContext.TriggerKind,
- },
- fh: fh,
- filename: fh.URI().Filename(),
- file: pgf.File,
- path: path,
- pos: pos,
- seen: make(map[types.Object]bool),
- enclosingFunc: enclosingFunction(path, pkg.GetTypesInfo()),
- enclosingCompositeLiteral: enclosingCompositeLiteral(path, rng.Start, pkg.GetTypesInfo()),
- deepState: deepCompletionState{
- enabled: opts.DeepCompletion,
- },
- opts: &completionOptions{
- matcher: opts.Matcher,
- unimported: opts.CompleteUnimported,
- documentation: opts.CompletionDocumentation && opts.HoverKind != source.NoDocumentation,
- fullDocumentation: opts.HoverKind == source.FullDocumentation,
- placeholders: opts.UsePlaceholders,
- literal: opts.LiteralCompletions && opts.InsertTextFormat == protocol.SnippetTextFormat,
- budget: opts.CompletionBudget,
- snippets: opts.InsertTextFormat == protocol.SnippetTextFormat,
- postfix: opts.ExperimentalPostfixCompletions,
- },
- // default to a matcher that always matches
- matcher: prefixMatcher(""),
- methodSetCache: make(map[methodSetKey]*types.MethodSet),
- mapper: pgf.Mapper,
- startTime: startTime,
- }
-
- var cancel context.CancelFunc
- if c.opts.budget == 0 {
- ctx, cancel = context.WithCancel(ctx)
- } else {
- // timeoutDuration is the completion budget remaining. If less than
- // 10ms, set to 10ms
- timeoutDuration := time.Until(c.startTime.Add(c.opts.budget))
- if timeoutDuration < 10*time.Millisecond {
- timeoutDuration = 10 * time.Millisecond
- }
- ctx, cancel = context.WithTimeout(ctx, timeoutDuration)
- }
- defer cancel()
-
- if surrounding := c.containingIdent(pgf.Src); surrounding != nil {
- c.setSurrounding(surrounding)
- }
-
- c.inference = expectedCandidate(ctx, c)
-
- err = c.collectCompletions(ctx)
- if err != nil {
- return nil, nil, err
- }
-
- // Deep search collected candidates and their members for more candidates.
- c.deepSearch(ctx)
-
- for _, callback := range c.completionCallbacks {
- if err := c.snapshot.RunProcessEnvFunc(ctx, callback); err != nil {
- return nil, nil, err
- }
- }
-
- // Search candidates populated by expensive operations like
- // unimportedMembers etc. for more completion items.
- c.deepSearch(ctx)
-
- // Statement candidates offer an entire statement in certain contexts, as
- // opposed to a single object. Add statement candidates last because they
- // depend on other candidates having already been collected.
- c.addStatementCandidates()
-
- c.sortItems()
- return c.items, c.getSurrounding(), nil
-}
-
-// collectCompletions adds possible completion candidates to either the deep
-// search queue or completion items directly for different completion contexts.
-func (c *completer) collectCompletions(ctx context.Context) error {
- // Inside import blocks, return completions for unimported packages.
- for _, importSpec := range c.file.Imports {
- if !(importSpec.Path.Pos() <= c.pos && c.pos <= importSpec.Path.End()) {
- continue
- }
- return c.populateImportCompletions(ctx, importSpec)
- }
-
- // Inside comments, offer completions for the name of the relevant symbol.
- for _, comment := range c.file.Comments {
- if comment.Pos() < c.pos && c.pos <= comment.End() {
- c.populateCommentCompletions(ctx, comment)
- return nil
- }
- }
-
- // Struct literals are handled entirely separately.
- if c.wantStructFieldCompletions() {
- // If we are definitely completing a struct field name, deep completions
- // don't make sense.
- if c.enclosingCompositeLiteral.inKey {
- c.deepState.enabled = false
- }
- return c.structLiteralFieldName(ctx)
- }
-
- if lt := c.wantLabelCompletion(); lt != labelNone {
- c.labels(lt)
- return nil
- }
-
- if c.emptySwitchStmt() {
- // Empty switch statements only admit "default" and "case" keywords.
- c.addKeywordItems(map[string]bool{}, highScore, CASE, DEFAULT)
- return nil
- }
-
- switch n := c.path[0].(type) {
- case *ast.Ident:
- if c.file.Name == n {
- return c.packageNameCompletions(ctx, c.fh.URI(), n)
- } else if sel, ok := c.path[1].(*ast.SelectorExpr); ok && sel.Sel == n {
- // Is this the Sel part of a selector?
- return c.selector(ctx, sel)
- }
- return c.lexical(ctx)
- // The function name hasn't been typed yet, but the parens are there:
- // recv.‸(arg)
- case *ast.TypeAssertExpr:
- // Create a fake selector expression.
- return c.selector(ctx, &ast.SelectorExpr{X: n.X})
- case *ast.SelectorExpr:
- return c.selector(ctx, n)
- // At the file scope, only keywords are allowed.
- case *ast.BadDecl, *ast.File:
- c.addKeywordCompletions()
- default:
- // fallback to lexical completions
- return c.lexical(ctx)
- }
-
- return nil
-}
-
-// containingIdent returns the *ast.Ident containing pos, if any. It
-// synthesizes an *ast.Ident to allow completion in the face of
-// certain syntax errors.
-func (c *completer) containingIdent(src []byte) *ast.Ident {
- // In the normal case, our leaf AST node is the identifer being completed.
- if ident, ok := c.path[0].(*ast.Ident); ok {
- return ident
- }
-
- pos, tkn, lit := c.scanToken(src)
- if !pos.IsValid() {
- return nil
- }
-
- fakeIdent := &ast.Ident{Name: lit, NamePos: pos}
-
- if _, isBadDecl := c.path[0].(*ast.BadDecl); isBadDecl {
- // You don't get *ast.Idents at the file level, so look for bad
- // decls and use the manually extracted token.
- return fakeIdent
- } else if c.emptySwitchStmt() {
- // Only keywords are allowed in empty switch statements.
- // *ast.Idents are not parsed, so we must use the manually
- // extracted token.
- return fakeIdent
- } else if tkn.IsKeyword() {
- // Otherwise, manually extract the prefix if our containing token
- // is a keyword. This improves completion after an "accidental
- // keyword", e.g. completing to "variance" in "someFunc(var<>)".
- return fakeIdent
- }
-
- return nil
-}
-
-// scanToken scans pgh's contents for the token containing pos.
-func (c *completer) scanToken(contents []byte) (token.Pos, token.Token, string) {
- tok := c.snapshot.FileSet().File(c.pos)
-
- var s scanner.Scanner
- s.Init(tok, contents, nil, 0)
- for {
- tknPos, tkn, lit := s.Scan()
- if tkn == token.EOF || tknPos >= c.pos {
- return token.NoPos, token.ILLEGAL, ""
- }
-
- if len(lit) > 0 && tknPos <= c.pos && c.pos <= tknPos+token.Pos(len(lit)) {
- return tknPos, tkn, lit
- }
- }
-}
-
-func (c *completer) sortItems() {
- sort.SliceStable(c.items, func(i, j int) bool {
- // Sort by score first.
- if c.items[i].Score != c.items[j].Score {
- return c.items[i].Score > c.items[j].Score
- }
-
- // Then sort by label so order stays consistent. This also has the
- // effect of preferring shorter candidates.
- return c.items[i].Label < c.items[j].Label
- })
-}
-
-// emptySwitchStmt reports whether pos is in an empty switch or select
-// statement.
-func (c *completer) emptySwitchStmt() bool {
- block, ok := c.path[0].(*ast.BlockStmt)
- if !ok || len(block.List) > 0 || len(c.path) == 1 {
- return false
- }
-
- switch c.path[1].(type) {
- case *ast.SwitchStmt, *ast.TypeSwitchStmt, *ast.SelectStmt:
- return true
- default:
- return false
- }
-}
-
-// populateImportCompletions yields completions for an import path around the cursor.
-//
-// Completions are suggested at the directory depth of the given import path so
-// that we don't overwhelm the user with a large list of possibilities. As an
-// example, a completion for the prefix "golang" results in "golang.org/".
-// Completions for "golang.org/" yield its subdirectories
-// (i.e. "golang.org/x/"). The user is meant to accept completion suggestions
-// until they reach a complete import path.
-func (c *completer) populateImportCompletions(ctx context.Context, searchImport *ast.ImportSpec) error {
- if !strings.HasPrefix(searchImport.Path.Value, `"`) {
- return nil
- }
-
- // deepSearch is not valuable for import completions.
- c.deepState.enabled = false
-
- importPath := searchImport.Path.Value
-
- // Extract the text between the quotes (if any) in an import spec.
- // prefix is the part of import path before the cursor.
- prefixEnd := c.pos - searchImport.Path.Pos()
- prefix := strings.Trim(importPath[:prefixEnd], `"`)
-
- // The number of directories in the import path gives us the depth at
- // which to search.
- depth := len(strings.Split(prefix, "/")) - 1
-
- content := importPath
- start, end := searchImport.Path.Pos(), searchImport.Path.End()
- namePrefix, nameSuffix := `"`, `"`
- // If a starting quote is present, adjust surrounding to either after the
- // cursor or after the first slash (/), except if cursor is at the starting
- // quote. Otherwise we provide a completion including the starting quote.
- if strings.HasPrefix(importPath, `"`) && c.pos > searchImport.Path.Pos() {
- content = content[1:]
- start++
- if depth > 0 {
- // Adjust textEdit start to replacement range. For ex: if current
- // path was "golang.or/x/to<>ols/internal/", where <> is the cursor
- // position, start of the replacement range would be after
- // "golang.org/x/".
- path := strings.SplitAfter(prefix, "/")
- numChars := len(strings.Join(path[:len(path)-1], ""))
- content = content[numChars:]
- start += token.Pos(numChars)
- }
- namePrefix = ""
- }
-
- // We won't provide an ending quote if one is already present, except if
- // cursor is after the ending quote but still in import spec. This is
- // because cursor has to be in our textEdit range.
- if strings.HasSuffix(importPath, `"`) && c.pos < searchImport.Path.End() {
- end--
- content = content[:len(content)-1]
- nameSuffix = ""
- }
-
- c.surrounding = &Selection{
- content: content,
- cursor: c.pos,
- MappedRange: source.NewMappedRange(c.snapshot.FileSet(), c.mapper, start, end),
- }
-
- seenImports := make(map[string]struct{})
- for _, importSpec := range c.file.Imports {
- if importSpec.Path.Value == importPath {
- continue
- }
- seenImportPath, err := strconv.Unquote(importSpec.Path.Value)
- if err != nil {
- return err
- }
- seenImports[seenImportPath] = struct{}{}
- }
-
- var mu sync.Mutex // guard c.items locally, since searchImports is called in parallel
- seen := make(map[string]struct{})
- searchImports := func(pkg imports.ImportFix) {
- path := pkg.StmtInfo.ImportPath
- if _, ok := seenImports[path]; ok {
- return
- }
-
- // Any package path containing fewer directories than the search
- // prefix is not a match.
- pkgDirList := strings.Split(path, "/")
- if len(pkgDirList) < depth+1 {
- return
- }
- pkgToConsider := strings.Join(pkgDirList[:depth+1], "/")
-
- name := pkgDirList[depth]
- // if we're adding an opening quote to completion too, set name to full
- // package path since we'll need to overwrite that range.
- if namePrefix == `"` {
- name = pkgToConsider
- }
-
- score := pkg.Relevance
- if len(pkgDirList)-1 == depth {
- score *= highScore
- } else {
- // For incomplete package paths, add a terminal slash to indicate that the
- // user should keep triggering completions.
- name += "/"
- pkgToConsider += "/"
- }
-
- if _, ok := seen[pkgToConsider]; ok {
- return
- }
- seen[pkgToConsider] = struct{}{}
-
- mu.Lock()
- defer mu.Unlock()
-
- name = namePrefix + name + nameSuffix
- obj := types.NewPkgName(0, nil, name, types.NewPackage(pkgToConsider, name))
- c.deepState.enqueue(candidate{
- obj: obj,
- detail: fmt.Sprintf("%q", pkgToConsider),
- score: score,
- })
- }
-
- c.completionCallbacks = append(c.completionCallbacks, func(opts *imports.Options) error {
- return imports.GetImportPaths(ctx, searchImports, prefix, c.filename, c.pkg.GetTypes().Name(), opts.Env)
- })
- return nil
-}
-
-// populateCommentCompletions yields completions for comments preceding or in declarations.
-func (c *completer) populateCommentCompletions(ctx context.Context, comment *ast.CommentGroup) {
- // If the completion was triggered by a period, ignore it. These types of
- // completions will not be useful in comments.
- if c.completionContext.triggerCharacter == "." {
- return
- }
-
- // Using the comment position find the line after
- file := c.snapshot.FileSet().File(comment.End())
- if file == nil {
- return
- }
-
- // Deep completion doesn't work properly in comments since we don't
- // have a type object to complete further.
- c.deepState.enabled = false
- c.completionContext.commentCompletion = true
-
- // Documentation isn't useful in comments, since it might end up being the
- // comment itself.
- c.opts.documentation = false
-
- commentLine := file.Line(comment.End())
-
- // comment is valid, set surrounding as word boundaries around cursor
- c.setSurroundingForComment(comment)
-
- // Using the next line pos, grab and parse the exported symbol on that line
- for _, n := range c.file.Decls {
- declLine := file.Line(n.Pos())
- // if the comment is not in, directly above or on the same line as a declaration
- if declLine != commentLine && declLine != commentLine+1 &&
- !(n.Pos() <= comment.Pos() && comment.End() <= n.End()) {
- continue
- }
- switch node := n.(type) {
- // handle const, vars, and types
- case *ast.GenDecl:
- for _, spec := range node.Specs {
- switch spec := spec.(type) {
- case *ast.ValueSpec:
- for _, name := range spec.Names {
- if name.String() == "_" {
- continue
- }
- obj := c.pkg.GetTypesInfo().ObjectOf(name)
- c.deepState.enqueue(candidate{obj: obj, score: stdScore})
- }
- case *ast.TypeSpec:
- // add TypeSpec fields to completion
- switch typeNode := spec.Type.(type) {
- case *ast.StructType:
- c.addFieldItems(ctx, typeNode.Fields)
- case *ast.FuncType:
- c.addFieldItems(ctx, typeNode.Params)
- c.addFieldItems(ctx, typeNode.Results)
- case *ast.InterfaceType:
- c.addFieldItems(ctx, typeNode.Methods)
- }
-
- if spec.Name.String() == "_" {
- continue
- }
-
- obj := c.pkg.GetTypesInfo().ObjectOf(spec.Name)
- // Type name should get a higher score than fields but not highScore by default
- // since field near a comment cursor gets a highScore
- score := stdScore * 1.1
- // If type declaration is on the line after comment, give it a highScore.
- if declLine == commentLine+1 {
- score = highScore
- }
-
- c.deepState.enqueue(candidate{obj: obj, score: score})
- }
- }
- // handle functions
- case *ast.FuncDecl:
- c.addFieldItems(ctx, node.Recv)
- c.addFieldItems(ctx, node.Type.Params)
- c.addFieldItems(ctx, node.Type.Results)
-
- // collect receiver struct fields
- if node.Recv != nil {
- for _, fields := range node.Recv.List {
- for _, name := range fields.Names {
- obj := c.pkg.GetTypesInfo().ObjectOf(name)
- if obj == nil {
- continue
- }
-
- recvType := obj.Type().Underlying()
- if ptr, ok := recvType.(*types.Pointer); ok {
- recvType = ptr.Elem()
- }
- recvStruct, ok := recvType.Underlying().(*types.Struct)
- if !ok {
- continue
- }
- for i := 0; i < recvStruct.NumFields(); i++ {
- field := recvStruct.Field(i)
- c.deepState.enqueue(candidate{obj: field, score: lowScore})
- }
- }
- }
- }
-
- if node.Name.String() == "_" {
- continue
- }
-
- obj := c.pkg.GetTypesInfo().ObjectOf(node.Name)
- if obj == nil || obj.Pkg() != nil && obj.Pkg() != c.pkg.GetTypes() {
- continue
- }
-
- c.deepState.enqueue(candidate{obj: obj, score: highScore})
- }
- }
-}
-
-// sets word boundaries surrounding a cursor for a comment
-func (c *completer) setSurroundingForComment(comments *ast.CommentGroup) {
- var cursorComment *ast.Comment
- for _, comment := range comments.List {
- if c.pos >= comment.Pos() && c.pos <= comment.End() {
- cursorComment = comment
- break
- }
- }
- // if cursor isn't in the comment
- if cursorComment == nil {
- return
- }
-
- // index of cursor in comment text
- cursorOffset := int(c.pos - cursorComment.Pos())
- start, end := cursorOffset, cursorOffset
- for start > 0 && isValidIdentifierChar(cursorComment.Text[start-1]) {
- start--
- }
- for end < len(cursorComment.Text) && isValidIdentifierChar(cursorComment.Text[end]) {
- end++
- }
-
- c.surrounding = &Selection{
- content: cursorComment.Text[start:end],
- cursor: c.pos,
- MappedRange: source.NewMappedRange(c.snapshot.FileSet(), c.mapper,
- token.Pos(int(cursorComment.Slash)+start), token.Pos(int(cursorComment.Slash)+end)),
- }
- c.setMatcherFromPrefix(c.surrounding.Prefix())
-}
-
-// isValidIdentifierChar returns true if a byte is a valid go identifier
-// character, i.e. unicode letter or digit or underscore.
-func isValidIdentifierChar(char byte) bool {
- charRune := rune(char)
- return unicode.In(charRune, unicode.Letter, unicode.Digit) || char == '_'
-}
-
-// adds struct fields, interface methods, function declaration fields to completion
-func (c *completer) addFieldItems(ctx context.Context, fields *ast.FieldList) {
- if fields == nil {
- return
- }
-
- cursor := c.surrounding.cursor
- for _, field := range fields.List {
- for _, name := range field.Names {
- if name.String() == "_" {
- continue
- }
- obj := c.pkg.GetTypesInfo().ObjectOf(name)
- if obj == nil {
- continue
- }
-
- // if we're in a field comment/doc, score that field as more relevant
- score := stdScore
- if field.Comment != nil && field.Comment.Pos() <= cursor && cursor <= field.Comment.End() {
- score = highScore
- } else if field.Doc != nil && field.Doc.Pos() <= cursor && cursor <= field.Doc.End() {
- score = highScore
- }
-
- c.deepState.enqueue(candidate{obj: obj, score: score})
- }
- }
-}
-
-func (c *completer) wantStructFieldCompletions() bool {
- clInfo := c.enclosingCompositeLiteral
- if clInfo == nil {
- return false
- }
-
- return clInfo.isStruct() && (clInfo.inKey || clInfo.maybeInFieldName)
-}
-
-func (c *completer) wantTypeName() bool {
- return !c.completionContext.commentCompletion && c.inference.typeName.wantTypeName
-}
-
-// See https://golang.org/issue/36001. Unimported completions are expensive.
-const (
- maxUnimportedPackageNames = 5
- unimportedMemberTarget = 100
-)
-
-// selector finds completions for the specified selector expression.
-func (c *completer) selector(ctx context.Context, sel *ast.SelectorExpr) error {
- c.inference.objChain = objChain(c.pkg.GetTypesInfo(), sel.X)
-
- // Is sel a qualified identifier?
- if id, ok := sel.X.(*ast.Ident); ok {
- if pkgName, ok := c.pkg.GetTypesInfo().Uses[id].(*types.PkgName); ok {
- var pkg source.Package
- for _, imp := range c.pkg.Imports() {
- if imp.PkgPath() == pkgName.Imported().Path() {
- pkg = imp
- }
- }
- // If the package is not imported, try searching for unimported
- // completions.
- if pkg == nil && c.opts.unimported {
- if err := c.unimportedMembers(ctx, id); err != nil {
- return err
- }
- }
- c.packageMembers(pkgName.Imported(), stdScore, nil, func(cand candidate) {
- c.deepState.enqueue(cand)
- })
- return nil
- }
- }
-
- // Invariant: sel is a true selector.
- tv, ok := c.pkg.GetTypesInfo().Types[sel.X]
- if ok {
- c.methodsAndFields(tv.Type, tv.Addressable(), nil, func(cand candidate) {
- c.deepState.enqueue(cand)
- })
-
- c.addPostfixSnippetCandidates(ctx, sel)
-
- return nil
- }
-
- // Try unimported packages.
- if id, ok := sel.X.(*ast.Ident); ok && c.opts.unimported {
- if err := c.unimportedMembers(ctx, id); err != nil {
- return err
- }
- }
- return nil
-}
-
-func (c *completer) unimportedMembers(ctx context.Context, id *ast.Ident) error {
- // Try loaded packages first. They're relevant, fast, and fully typed.
- known, err := c.snapshot.CachedImportPaths(ctx)
- if err != nil {
- return err
- }
-
- var paths []string
- for path, pkg := range known {
- if pkg.GetTypes().Name() != id.Name {
- continue
- }
- paths = append(paths, path)
- }
-
- var relevances map[string]float64
- if len(paths) != 0 {
- if err := c.snapshot.RunProcessEnvFunc(ctx, func(opts *imports.Options) error {
- var err error
- relevances, err = imports.ScoreImportPaths(ctx, opts.Env, paths)
- return err
- }); err != nil {
- return err
- }
- }
- sort.Slice(paths, func(i, j int) bool {
- return relevances[paths[i]] > relevances[paths[j]]
- })
-
- for _, path := range paths {
- pkg := known[path]
- if pkg.GetTypes().Name() != id.Name {
- continue
- }
- imp := &importInfo{
- importPath: path,
- pkg: pkg,
- }
- if imports.ImportPathToAssumedName(path) != pkg.GetTypes().Name() {
- imp.name = pkg.GetTypes().Name()
- }
- c.packageMembers(pkg.GetTypes(), unimportedScore(relevances[path]), imp, func(cand candidate) {
- c.deepState.enqueue(cand)
- })
- if len(c.items) >= unimportedMemberTarget {
- return nil
- }
- }
-
- ctx, cancel := context.WithCancel(ctx)
-
- var mu sync.Mutex
- add := func(pkgExport imports.PackageExport) {
- mu.Lock()
- defer mu.Unlock()
- if _, ok := known[pkgExport.Fix.StmtInfo.ImportPath]; ok {
- return // We got this one above.
- }
-
- // Continue with untyped proposals.
- pkg := types.NewPackage(pkgExport.Fix.StmtInfo.ImportPath, pkgExport.Fix.IdentName)
- for _, export := range pkgExport.Exports {
- score := unimportedScore(pkgExport.Fix.Relevance)
- c.deepState.enqueue(candidate{
- obj: types.NewVar(0, pkg, export, nil),
- score: score,
- imp: &importInfo{
- importPath: pkgExport.Fix.StmtInfo.ImportPath,
- name: pkgExport.Fix.StmtInfo.Name,
- },
- })
- }
- if len(c.items) >= unimportedMemberTarget {
- cancel()
- }
- }
-
- c.completionCallbacks = append(c.completionCallbacks, func(opts *imports.Options) error {
- defer cancel()
- return imports.GetPackageExports(ctx, add, id.Name, c.filename, c.pkg.GetTypes().Name(), opts.Env)
- })
- return nil
-}
-
-// unimportedScore returns a score for an unimported package that is generally
-// lower than other candidates.
-func unimportedScore(relevance float64) float64 {
- return (stdScore + .1*relevance) / 2
-}
-
-func (c *completer) packageMembers(pkg *types.Package, score float64, imp *importInfo, cb func(candidate)) {
- scope := pkg.Scope()
- for _, name := range scope.Names() {
- obj := scope.Lookup(name)
- cb(candidate{
- obj: obj,
- score: score,
- imp: imp,
- addressable: isVar(obj),
- })
- }
-}
-
-func (c *completer) methodsAndFields(typ types.Type, addressable bool, imp *importInfo, cb func(candidate)) {
- mset := c.methodSetCache[methodSetKey{typ, addressable}]
- if mset == nil {
- if addressable && !types.IsInterface(typ) && !isPointer(typ) {
- // Add methods of *T, which includes methods with receiver T.
- mset = types.NewMethodSet(types.NewPointer(typ))
- } else {
- // Add methods of T.
- mset = types.NewMethodSet(typ)
- }
- c.methodSetCache[methodSetKey{typ, addressable}] = mset
- }
-
- if typ.String() == "*testing.F" && addressable {
- // is that a sufficient test? (or is more care needed?)
- if c.fuzz(typ, mset, imp, cb, c.snapshot.FileSet()) {
- return
- }
- }
-
- for i := 0; i < mset.Len(); i++ {
- cb(candidate{
- obj: mset.At(i).Obj(),
- score: stdScore,
- imp: imp,
- addressable: addressable || isPointer(typ),
- })
- }
-
- // Add fields of T.
- eachField(typ, func(v *types.Var) {
- cb(candidate{
- obj: v,
- score: stdScore - 0.01,
- imp: imp,
- addressable: addressable || isPointer(typ),
- })
- })
-}
-
-// lexical finds completions in the lexical environment.
-func (c *completer) lexical(ctx context.Context) error {
- scopes := source.CollectScopes(c.pkg.GetTypesInfo(), c.path, c.pos)
- scopes = append(scopes, c.pkg.GetTypes().Scope(), types.Universe)
-
- var (
- builtinIota = types.Universe.Lookup("iota")
- builtinNil = types.Universe.Lookup("nil")
- // comparable is an interface that exists on the dev.typeparams Go branch.
- // Filter it out from completion results to stabilize tests.
- // TODO(rFindley) update (or remove) our handling for comparable once the
- // type parameter API has stabilized.
- builtinAny = types.Universe.Lookup("any")
- builtinComparable = types.Universe.Lookup("comparable")
- )
-
- // Track seen variables to avoid showing completions for shadowed variables.
- // This works since we look at scopes from innermost to outermost.
- seen := make(map[string]struct{})
-
- // Process scopes innermost first.
- for i, scope := range scopes {
- if scope == nil {
- continue
- }
-
- Names:
- for _, name := range scope.Names() {
- declScope, obj := scope.LookupParent(name, c.pos)
- if declScope != scope {
- continue // Name was declared in some enclosing scope, or not at all.
- }
- if obj == builtinComparable || obj == builtinAny {
- continue
- }
-
- // If obj's type is invalid, find the AST node that defines the lexical block
- // containing the declaration of obj. Don't resolve types for packages.
- if !isPkgName(obj) && !typeIsValid(obj.Type()) {
- // Match the scope to its ast.Node. If the scope is the package scope,
- // use the *ast.File as the starting node.
- var node ast.Node
- if i < len(c.path) {
- node = c.path[i]
- } else if i == len(c.path) { // use the *ast.File for package scope
- node = c.path[i-1]
- }
- if node != nil {
- if resolved := resolveInvalid(c.snapshot.FileSet(), obj, node, c.pkg.GetTypesInfo()); resolved != nil {
- obj = resolved
- }
- }
- }
-
- // Don't use LHS of decl in RHS.
- for _, ident := range enclosingDeclLHS(c.path) {
- if obj.Pos() == ident.Pos() {
- continue Names
- }
- }
-
- // Don't suggest "iota" outside of const decls.
- if obj == builtinIota && !c.inConstDecl() {
- continue
- }
-
- // Rank outer scopes lower than inner.
- score := stdScore * math.Pow(.99, float64(i))
-
- // Dowrank "nil" a bit so it is ranked below more interesting candidates.
- if obj == builtinNil {
- score /= 2
- }
-
- // If we haven't already added a candidate for an object with this name.
- if _, ok := seen[obj.Name()]; !ok {
- seen[obj.Name()] = struct{}{}
- c.deepState.enqueue(candidate{
- obj: obj,
- score: score,
- addressable: isVar(obj),
- })
- }
- }
- }
-
- if c.inference.objType != nil {
- if named, _ := source.Deref(c.inference.objType).(*types.Named); named != nil {
- // If we expected a named type, check the type's package for
- // completion items. This is useful when the current file hasn't
- // imported the type's package yet.
-
- if named.Obj() != nil && named.Obj().Pkg() != nil {
- pkg := named.Obj().Pkg()
-
- // Make sure the package name isn't already in use by another
- // object, and that this file doesn't import the package yet.
- if _, ok := seen[pkg.Name()]; !ok && pkg != c.pkg.GetTypes() && !alreadyImports(c.file, pkg.Path()) {
- seen[pkg.Name()] = struct{}{}
- obj := types.NewPkgName(0, nil, pkg.Name(), pkg)
- imp := &importInfo{
- importPath: pkg.Path(),
- }
- if imports.ImportPathToAssumedName(pkg.Path()) != pkg.Name() {
- imp.name = pkg.Name()
- }
- c.deepState.enqueue(candidate{
- obj: obj,
- score: stdScore,
- imp: imp,
- })
- }
- }
- }
- }
-
- if c.opts.unimported {
- if err := c.unimportedPackages(ctx, seen); err != nil {
- return err
- }
- }
-
- if c.inference.typeName.isTypeParam {
- // If we are completing a type param, offer each structural type.
- // This ensures we suggest "[]int" and "[]float64" for a constraint
- // with type union "[]int | []float64".
- if t, _ := c.inference.objType.(*types.Interface); t != nil {
- terms, _ := typeparams.InterfaceTermSet(t)
- for _, term := range terms {
- c.injectType(ctx, term.Type())
- }
- }
- } else {
- c.injectType(ctx, c.inference.objType)
- }
-
- // Add keyword completion items appropriate in the current context.
- c.addKeywordCompletions()
-
- return nil
-}
-
-// injectInferredType manufacters candidates based on the given type.
-// For example, if the type is "[]int", this method makes sure you get
-// candidates "[]int{}" and "[]int" (the latter applies when
-// completing a type name).
-func (c *completer) injectType(ctx context.Context, t types.Type) {
- if t == nil {
- return
- }
-
- t = source.Deref(t)
-
- // If we have an expected type and it is _not_ a named type,
- // handle it specially. Non-named types like "[]int" will never be
- // considered via a lexical search, so we need to directly inject
- // them.
- if _, named := t.(*types.Named); !named {
- // If our expected type is "[]int", this will add a literal
- // candidate of "[]int{}".
- c.literal(ctx, t, nil)
-
- if _, isBasic := t.(*types.Basic); !isBasic {
- // If we expect a non-basic type name (e.g. "[]int"), hack up
- // a named type whose name is literally "[]int". This allows
- // us to reuse our object based completion machinery.
- fakeNamedType := candidate{
- obj: types.NewTypeName(token.NoPos, nil, types.TypeString(t, c.qf), t),
- score: stdScore,
- }
- // Make sure the type name matches before considering
- // candidate. This cuts down on useless candidates.
- if c.matchingTypeName(&fakeNamedType) {
- c.deepState.enqueue(fakeNamedType)
- }
- }
- }
-}
-
-func (c *completer) unimportedPackages(ctx context.Context, seen map[string]struct{}) error {
- var prefix string
- if c.surrounding != nil {
- prefix = c.surrounding.Prefix()
- }
-
- // Don't suggest unimported packages if we have absolutely nothing
- // to go on.
- if prefix == "" {
- return nil
- }
-
- count := 0
-
- known, err := c.snapshot.CachedImportPaths(ctx)
- if err != nil {
- return err
- }
- var paths []string
- for path, pkg := range known {
- if !strings.HasPrefix(pkg.GetTypes().Name(), prefix) {
- continue
- }
- paths = append(paths, path)
- }
-
- var relevances map[string]float64
- if len(paths) != 0 {
- if err := c.snapshot.RunProcessEnvFunc(ctx, func(opts *imports.Options) error {
- var err error
- relevances, err = imports.ScoreImportPaths(ctx, opts.Env, paths)
- return err
- }); err != nil {
- return err
- }
- }
-
- sort.Slice(paths, func(i, j int) bool {
- if relevances[paths[i]] != relevances[paths[j]] {
- return relevances[paths[i]] > relevances[paths[j]]
- }
-
- // Fall back to lexical sort to keep truncated set of candidates
- // in a consistent order.
- return paths[i] < paths[j]
- })
-
- for _, path := range paths {
- pkg := known[path]
- if _, ok := seen[pkg.GetTypes().Name()]; ok {
- continue
- }
- imp := &importInfo{
- importPath: path,
- pkg: pkg,
- }
- if imports.ImportPathToAssumedName(path) != pkg.GetTypes().Name() {
- imp.name = pkg.GetTypes().Name()
- }
- if count >= maxUnimportedPackageNames {
- return nil
- }
- c.deepState.enqueue(candidate{
- // Pass an empty *types.Package to disable deep completions.
- obj: types.NewPkgName(0, nil, pkg.GetTypes().Name(), types.NewPackage(path, pkg.Name())),
- score: unimportedScore(relevances[path]),
- imp: imp,
- })
- count++
- }
-
- ctx, cancel := context.WithCancel(ctx)
-
- var mu sync.Mutex
- add := func(pkg imports.ImportFix) {
- mu.Lock()
- defer mu.Unlock()
- if _, ok := seen[pkg.IdentName]; ok {
- return
- }
- if _, ok := relevances[pkg.StmtInfo.ImportPath]; ok {
- return
- }
-
- if count >= maxUnimportedPackageNames {
- cancel()
- return
- }
-
- // Do not add the unimported packages to seen, since we can have
- // multiple packages of the same name as completion suggestions, since
- // only one will be chosen.
- obj := types.NewPkgName(0, nil, pkg.IdentName, types.NewPackage(pkg.StmtInfo.ImportPath, pkg.IdentName))
- c.deepState.enqueue(candidate{
- obj: obj,
- score: unimportedScore(pkg.Relevance),
- imp: &importInfo{
- importPath: pkg.StmtInfo.ImportPath,
- name: pkg.StmtInfo.Name,
- },
- })
- count++
- }
- c.completionCallbacks = append(c.completionCallbacks, func(opts *imports.Options) error {
- defer cancel()
- return imports.GetAllCandidates(ctx, add, prefix, c.filename, c.pkg.GetTypes().Name(), opts.Env)
- })
- return nil
-}
-
-// alreadyImports reports whether f has an import with the specified path.
-func alreadyImports(f *ast.File, path string) bool {
- for _, s := range f.Imports {
- if source.ImportPath(s) == path {
- return true
- }
- }
- return false
-}
-
-func (c *completer) inConstDecl() bool {
- for _, n := range c.path {
- if decl, ok := n.(*ast.GenDecl); ok && decl.Tok == token.CONST {
- return true
- }
- }
- return false
-}
-
-// structLiteralFieldName finds completions for struct field names inside a struct literal.
-func (c *completer) structLiteralFieldName(ctx context.Context) error {
- clInfo := c.enclosingCompositeLiteral
-
- // Mark fields of the composite literal that have already been set,
- // except for the current field.
- addedFields := make(map[*types.Var]bool)
- for _, el := range clInfo.cl.Elts {
- if kvExpr, ok := el.(*ast.KeyValueExpr); ok {
- if clInfo.kv == kvExpr {
- continue
- }
-
- if key, ok := kvExpr.Key.(*ast.Ident); ok {
- if used, ok := c.pkg.GetTypesInfo().Uses[key]; ok {
- if usedVar, ok := used.(*types.Var); ok {
- addedFields[usedVar] = true
- }
- }
- }
- }
- }
-
- deltaScore := 0.0001
- switch t := clInfo.clType.(type) {
- case *types.Struct:
- for i := 0; i < t.NumFields(); i++ {
- field := t.Field(i)
- if !addedFields[field] {
- c.deepState.enqueue(candidate{
- obj: field,
- score: highScore - float64(i)*deltaScore,
- })
- }
- }
-
- // Add lexical completions if we aren't certain we are in the key part of a
- // key-value pair.
- if clInfo.maybeInFieldName {
- return c.lexical(ctx)
- }
- default:
- return c.lexical(ctx)
- }
-
- return nil
-}
-
-func (cl *compLitInfo) isStruct() bool {
- _, ok := cl.clType.(*types.Struct)
- return ok
-}
-
-// enclosingCompositeLiteral returns information about the composite literal enclosing the
-// position.
-func enclosingCompositeLiteral(path []ast.Node, pos token.Pos, info *types.Info) *compLitInfo {
- for _, n := range path {
- switch n := n.(type) {
- case *ast.CompositeLit:
- // The enclosing node will be a composite literal if the user has just
- // opened the curly brace (e.g. &x{<>) or the completion request is triggered
- // from an already completed composite literal expression (e.g. &x{foo: 1, <>})
- //
- // The position is not part of the composite literal unless it falls within the
- // curly braces (e.g. "foo.Foo<>Struct{}").
- if !(n.Lbrace < pos && pos <= n.Rbrace) {
- // Keep searching since we may yet be inside a composite literal.
- // For example "Foo{B: Ba<>{}}".
- break
- }
-
- tv, ok := info.Types[n]
- if !ok {
- return nil
- }
-
- clInfo := compLitInfo{
- cl: n,
- clType: source.Deref(tv.Type).Underlying(),
- }
-
- var (
- expr ast.Expr
- hasKeys bool
- )
- for _, el := range n.Elts {
- // Remember the expression that the position falls in, if any.
- if el.Pos() <= pos && pos <= el.End() {
- expr = el
- }
-
- if kv, ok := el.(*ast.KeyValueExpr); ok {
- hasKeys = true
- // If expr == el then we know the position falls in this expression,
- // so also record kv as the enclosing *ast.KeyValueExpr.
- if expr == el {
- clInfo.kv = kv
- break
- }
- }
- }
-
- if clInfo.kv != nil {
- // If in a *ast.KeyValueExpr, we know we are in the key if the position
- // is to the left of the colon (e.g. "Foo{F<>: V}".
- clInfo.inKey = pos <= clInfo.kv.Colon
- } else if hasKeys {
- // If we aren't in a *ast.KeyValueExpr but the composite literal has
- // other *ast.KeyValueExprs, we must be on the key side of a new
- // *ast.KeyValueExpr (e.g. "Foo{F: V, <>}").
- clInfo.inKey = true
- } else {
- switch clInfo.clType.(type) {
- case *types.Struct:
- if len(n.Elts) == 0 {
- // If the struct literal is empty, next could be a struct field
- // name or an expression (e.g. "Foo{<>}" could become "Foo{F:}"
- // or "Foo{someVar}").
- clInfo.maybeInFieldName = true
- } else if len(n.Elts) == 1 {
- // If there is one expression and the position is in that expression
- // and the expression is an identifier, we may be writing a field
- // name or an expression (e.g. "Foo{F<>}").
- _, clInfo.maybeInFieldName = expr.(*ast.Ident)
- }
- case *types.Map:
- // If we aren't in a *ast.KeyValueExpr we must be adding a new key
- // to the map.
- clInfo.inKey = true
- }
- }
-
- return &clInfo
- default:
- if breaksExpectedTypeInference(n, pos) {
- return nil
- }
- }
- }
-
- return nil
-}
-
-// enclosingFunction returns the signature and body of the function
-// enclosing the given position.
-func enclosingFunction(path []ast.Node, info *types.Info) *funcInfo {
- for _, node := range path {
- switch t := node.(type) {
- case *ast.FuncDecl:
- if obj, ok := info.Defs[t.Name]; ok {
- return &funcInfo{
- sig: obj.Type().(*types.Signature),
- body: t.Body,
- }
- }
- case *ast.FuncLit:
- if typ, ok := info.Types[t]; ok {
- if sig, _ := typ.Type.(*types.Signature); sig == nil {
- // golang/go#49397: it should not be possible, but we somehow arrived
- // here with a non-signature type, most likely due to AST mangling
- // such that node.Type is not a FuncType.
- return nil
- }
- return &funcInfo{
- sig: typ.Type.(*types.Signature),
- body: t.Body,
- }
- }
- }
- }
- return nil
-}
-
-func (c *completer) expectedCompositeLiteralType() types.Type {
- clInfo := c.enclosingCompositeLiteral
- switch t := clInfo.clType.(type) {
- case *types.Slice:
- if clInfo.inKey {
- return types.Typ[types.UntypedInt]
- }
- return t.Elem()
- case *types.Array:
- if clInfo.inKey {
- return types.Typ[types.UntypedInt]
- }
- return t.Elem()
- case *types.Map:
- if clInfo.inKey {
- return t.Key()
- }
- return t.Elem()
- case *types.Struct:
- // If we are completing a key (i.e. field name), there is no expected type.
- if clInfo.inKey {
- return nil
- }
-
- // If we are in a key-value pair, but not in the key, then we must be on the
- // value side. The expected type of the value will be determined from the key.
- if clInfo.kv != nil {
- if key, ok := clInfo.kv.Key.(*ast.Ident); ok {
- for i := 0; i < t.NumFields(); i++ {
- if field := t.Field(i); field.Name() == key.Name {
- return field.Type()
- }
- }
- }
- } else {
- // If we aren't in a key-value pair and aren't in the key, we must be using
- // implicit field names.
-
- // The order of the literal fields must match the order in the struct definition.
- // Find the element that the position belongs to and suggest that field's type.
- if i := exprAtPos(c.pos, clInfo.cl.Elts); i < t.NumFields() {
- return t.Field(i).Type()
- }
- }
- }
- return nil
-}
-
-// typeMod represents an operator that changes the expected type.
-type typeMod struct {
- mod typeModKind
- arrayLen int64
-}
-
-type typeModKind int
-
-const (
- dereference typeModKind = iota // pointer indirection: "*"
- reference // adds level of pointer: "&" for values, "*" for type names
- chanRead // channel read operator: "<-"
- sliceType // make a slice type: "[]" in "[]int"
- arrayType // make an array type: "[2]" in "[2]int"
- invoke // make a function call: "()" in "foo()"
- takeSlice // take slice of array: "[:]" in "foo[:]"
- takeDotDotDot // turn slice into variadic args: "..." in "foo..."
- index // index into slice/array: "[0]" in "foo[0]"
-)
-
-type objKind int
-
-const (
- kindAny objKind = 0
- kindArray objKind = 1 << iota
- kindSlice
- kindChan
- kindMap
- kindStruct
- kindString
- kindInt
- kindBool
- kindBytes
- kindPtr
- kindFloat
- kindComplex
- kindError
- kindStringer
- kindFunc
-)
-
-// penalizedObj represents an object that should be disfavored as a
-// completion candidate.
-type penalizedObj struct {
- // objChain is the full "chain", e.g. "foo.bar().baz" becomes
- // []types.Object{foo, bar, baz}.
- objChain []types.Object
- // penalty is score penalty in the range (0, 1).
- penalty float64
-}
-
-// candidateInference holds information we have inferred about a type that can be
-// used at the current position.
-type candidateInference struct {
- // objType is the desired type of an object used at the query position.
- objType types.Type
-
- // objKind is a mask of expected kinds of types such as "map", "slice", etc.
- objKind objKind
-
- // variadic is true if we are completing the initial variadic
- // parameter. For example:
- // append([]T{}, <>) // objType=T variadic=true
- // append([]T{}, T{}, <>) // objType=T variadic=false
- variadic bool
-
- // modifiers are prefixes such as "*", "&" or "<-" that influence how
- // a candidate type relates to the expected type.
- modifiers []typeMod
-
- // convertibleTo is a type our candidate type must be convertible to.
- convertibleTo types.Type
-
- // typeName holds information about the expected type name at
- // position, if any.
- typeName typeNameInference
-
- // assignees are the types that would receive a function call's
- // results at the position. For example:
- //
- // foo := 123
- // foo, bar := <>
- //
- // at "<>", the assignees are [int, <invalid>].
- assignees []types.Type
-
- // variadicAssignees is true if we could be completing an inner
- // function call that fills out an outer function call's variadic
- // params. For example:
- //
- // func foo(int, ...string) {}
- //
- // foo(<>) // variadicAssignees=true
- // foo(bar<>) // variadicAssignees=true
- // foo(bar, baz<>) // variadicAssignees=false
- variadicAssignees bool
-
- // penalized holds expressions that should be disfavored as
- // candidates. For example, it tracks expressions already used in a
- // switch statement's other cases. Each expression is tracked using
- // its entire object "chain" allowing differentiation between
- // "a.foo" and "b.foo" when "a" and "b" are the same type.
- penalized []penalizedObj
-
- // objChain contains the chain of objects representing the
- // surrounding *ast.SelectorExpr. For example, if we are completing
- // "foo.bar.ba<>", objChain will contain []types.Object{foo, bar}.
- objChain []types.Object
-}
-
-// typeNameInference holds information about the expected type name at
-// position.
-type typeNameInference struct {
- // wantTypeName is true if we expect the name of a type.
- wantTypeName bool
-
- // modifiers are prefixes such as "*", "&" or "<-" that influence how
- // a candidate type relates to the expected type.
- modifiers []typeMod
-
- // assertableFrom is a type that must be assertable to our candidate type.
- assertableFrom types.Type
-
- // wantComparable is true if we want a comparable type.
- wantComparable bool
-
- // seenTypeSwitchCases tracks types that have already been used by
- // the containing type switch.
- seenTypeSwitchCases []types.Type
-
- // compLitType is true if we are completing a composite literal type
- // name, e.g "foo<>{}".
- compLitType bool
-
- // isTypeParam is true if we are completing a type instantiation parameter
- isTypeParam bool
-}
-
-// expectedCandidate returns information about the expected candidate
-// for an expression at the query position.
-func expectedCandidate(ctx context.Context, c *completer) (inf candidateInference) {
- inf.typeName = expectTypeName(c)
-
- if c.enclosingCompositeLiteral != nil {
- inf.objType = c.expectedCompositeLiteralType()
- }
-
-Nodes:
- for i, node := range c.path {
- switch node := node.(type) {
- case *ast.BinaryExpr:
- // Determine if query position comes from left or right of op.
- e := node.X
- if c.pos < node.OpPos {
- e = node.Y
- }
- if tv, ok := c.pkg.GetTypesInfo().Types[e]; ok {
- switch node.Op {
- case token.LAND, token.LOR:
- // Don't infer "bool" type for "&&" or "||". Often you want
- // to compose a boolean expression from non-boolean
- // candidates.
- default:
- inf.objType = tv.Type
- }
- break Nodes
- }
- case *ast.AssignStmt:
- // Only rank completions if you are on the right side of the token.
- if c.pos > node.TokPos {
- i := exprAtPos(c.pos, node.Rhs)
- if i >= len(node.Lhs) {
- i = len(node.Lhs) - 1
- }
- if tv, ok := c.pkg.GetTypesInfo().Types[node.Lhs[i]]; ok {
- inf.objType = tv.Type
- }
-
- // If we have a single expression on the RHS, record the LHS
- // assignees so we can favor multi-return function calls with
- // matching result values.
- if len(node.Rhs) <= 1 {
- for _, lhs := range node.Lhs {
- inf.assignees = append(inf.assignees, c.pkg.GetTypesInfo().TypeOf(lhs))
- }
- } else {
- // Otherwse, record our single assignee, even if its type is
- // not available. We use this info to downrank functions
- // with the wrong number of result values.
- inf.assignees = append(inf.assignees, c.pkg.GetTypesInfo().TypeOf(node.Lhs[i]))
- }
- }
- return inf
- case *ast.ValueSpec:
- if node.Type != nil && c.pos > node.Type.End() {
- inf.objType = c.pkg.GetTypesInfo().TypeOf(node.Type)
- }
- return inf
- case *ast.CallExpr:
- // Only consider CallExpr args if position falls between parens.
- if node.Lparen < c.pos && c.pos <= node.Rparen {
- // For type conversions like "int64(foo)" we can only infer our
- // desired type is convertible to int64.
- if typ := typeConversion(node, c.pkg.GetTypesInfo()); typ != nil {
- inf.convertibleTo = typ
- break Nodes
- }
-
- if tv, ok := c.pkg.GetTypesInfo().Types[node.Fun]; ok {
- if sig, ok := tv.Type.(*types.Signature); ok {
- numParams := sig.Params().Len()
- if numParams == 0 {
- return inf
- }
-
- exprIdx := exprAtPos(c.pos, node.Args)
-
- // If we have one or zero arg expressions, we may be
- // completing to a function call that returns multiple
- // values, in turn getting passed in to the surrounding
- // call. Record the assignees so we can favor function
- // calls that return matching values.
- if len(node.Args) <= 1 && exprIdx == 0 {
- for i := 0; i < sig.Params().Len(); i++ {
- inf.assignees = append(inf.assignees, sig.Params().At(i).Type())
- }
-
- // Record that we may be completing into variadic parameters.
- inf.variadicAssignees = sig.Variadic()
- }
-
- // Make sure not to run past the end of expected parameters.
- if exprIdx >= numParams {
- inf.objType = sig.Params().At(numParams - 1).Type()
- } else {
- inf.objType = sig.Params().At(exprIdx).Type()
- }
-
- if sig.Variadic() && exprIdx >= (numParams-1) {
- // If we are completing a variadic param, deslice the variadic type.
- inf.objType = deslice(inf.objType)
- // Record whether we are completing the initial variadic param.
- inf.variadic = exprIdx == numParams-1 && len(node.Args) <= numParams
-
- // Check if we can infer object kind from printf verb.
- inf.objKind |= printfArgKind(c.pkg.GetTypesInfo(), node, exprIdx)
- }
- }
- }
-
- if funIdent, ok := node.Fun.(*ast.Ident); ok {
- obj := c.pkg.GetTypesInfo().ObjectOf(funIdent)
-
- if obj != nil && obj.Parent() == types.Universe {
- // Defer call to builtinArgType so we can provide it the
- // inferred type from its parent node.
- defer func() {
- inf = c.builtinArgType(obj, node, inf)
- inf.objKind = c.builtinArgKind(ctx, obj, node)
- }()
-
- // The expected type of builtin arguments like append() is
- // the expected type of the builtin call itself. For
- // example:
- //
- // var foo []int = append(<>)
- //
- // To find the expected type at <> we "skip" the append()
- // node and get the expected type one level up, which is
- // []int.
- continue Nodes
- }
- }
-
- return inf
- }
- case *ast.ReturnStmt:
- if c.enclosingFunc != nil {
- sig := c.enclosingFunc.sig
- // Find signature result that corresponds to our return statement.
- if resultIdx := exprAtPos(c.pos, node.Results); resultIdx < len(node.Results) {
- if resultIdx < sig.Results().Len() {
- inf.objType = sig.Results().At(resultIdx).Type()
- }
- }
- }
- return inf
- case *ast.CaseClause:
- if swtch, ok := findSwitchStmt(c.path[i+1:], c.pos, node).(*ast.SwitchStmt); ok {
- if tv, ok := c.pkg.GetTypesInfo().Types[swtch.Tag]; ok {
- inf.objType = tv.Type
-
- // Record which objects have already been used in the case
- // statements so we don't suggest them again.
- for _, cc := range swtch.Body.List {
- for _, caseExpr := range cc.(*ast.CaseClause).List {
- // Don't record the expression we are currently completing.
- if caseExpr.Pos() < c.pos && c.pos <= caseExpr.End() {
- continue
- }
-
- if objs := objChain(c.pkg.GetTypesInfo(), caseExpr); len(objs) > 0 {
- inf.penalized = append(inf.penalized, penalizedObj{objChain: objs, penalty: 0.1})
- }
- }
- }
- }
- }
- return inf
- case *ast.SliceExpr:
- // Make sure position falls within the brackets (e.g. "foo[a:<>]").
- if node.Lbrack < c.pos && c.pos <= node.Rbrack {
- inf.objType = types.Typ[types.UntypedInt]
- }
- return inf
- case *ast.IndexExpr:
- // Make sure position falls within the brackets (e.g. "foo[<>]").
- if node.Lbrack < c.pos && c.pos <= node.Rbrack {
- if tv, ok := c.pkg.GetTypesInfo().Types[node.X]; ok {
- switch t := tv.Type.Underlying().(type) {
- case *types.Map:
- inf.objType = t.Key()
- case *types.Slice, *types.Array:
- inf.objType = types.Typ[types.UntypedInt]
- }
-
- if ct := expectedConstraint(tv.Type, 0); ct != nil {
- inf.objType = ct
- inf.typeName.wantTypeName = true
- inf.typeName.isTypeParam = true
- }
- }
- }
- return inf
- case *typeparams.IndexListExpr:
- if node.Lbrack < c.pos && c.pos <= node.Rbrack {
- if tv, ok := c.pkg.GetTypesInfo().Types[node.X]; ok {
- if ct := expectedConstraint(tv.Type, exprAtPos(c.pos, node.Indices)); ct != nil {
- inf.objType = ct
- inf.typeName.wantTypeName = true
- inf.typeName.isTypeParam = true
- }
- }
- }
- return inf
- case *ast.SendStmt:
- // Make sure we are on right side of arrow (e.g. "foo <- <>").
- if c.pos > node.Arrow+1 {
- if tv, ok := c.pkg.GetTypesInfo().Types[node.Chan]; ok {
- if ch, ok := tv.Type.Underlying().(*types.Chan); ok {
- inf.objType = ch.Elem()
- }
- }
- }
- return inf
- case *ast.RangeStmt:
- if source.NodeContains(node.X, c.pos) {
- inf.objKind |= kindSlice | kindArray | kindMap | kindString
- if node.Value == nil {
- inf.objKind |= kindChan
- }
- }
- return inf
- case *ast.StarExpr:
- inf.modifiers = append(inf.modifiers, typeMod{mod: dereference})
- case *ast.UnaryExpr:
- switch node.Op {
- case token.AND:
- inf.modifiers = append(inf.modifiers, typeMod{mod: reference})
- case token.ARROW:
- inf.modifiers = append(inf.modifiers, typeMod{mod: chanRead})
- }
- case *ast.DeferStmt, *ast.GoStmt:
- inf.objKind |= kindFunc
- return inf
- default:
- if breaksExpectedTypeInference(node, c.pos) {
- return inf
- }
- }
- }
-
- return inf
-}
-
-func expectedConstraint(t types.Type, idx int) types.Type {
- var tp *typeparams.TypeParamList
- if named, _ := t.(*types.Named); named != nil {
- tp = typeparams.ForNamed(named)
- } else if sig, _ := t.Underlying().(*types.Signature); sig != nil {
- tp = typeparams.ForSignature(sig)
- }
- if tp == nil || idx >= tp.Len() {
- return nil
- }
- return tp.At(idx).Constraint()
-}
-
-// objChain decomposes e into a chain of objects if possible. For
-// example, "foo.bar().baz" will yield []types.Object{foo, bar, baz}.
-// If any part can't be turned into an object, return nil.
-func objChain(info *types.Info, e ast.Expr) []types.Object {
- var objs []types.Object
-
- for e != nil {
- switch n := e.(type) {
- case *ast.Ident:
- obj := info.ObjectOf(n)
- if obj == nil {
- return nil
- }
- objs = append(objs, obj)
- e = nil
- case *ast.SelectorExpr:
- obj := info.ObjectOf(n.Sel)
- if obj == nil {
- return nil
- }
- objs = append(objs, obj)
- e = n.X
- case *ast.CallExpr:
- if len(n.Args) > 0 {
- return nil
- }
- e = n.Fun
- default:
- return nil
- }
- }
-
- // Reverse order so the layout matches the syntactic order.
- for i := 0; i < len(objs)/2; i++ {
- objs[i], objs[len(objs)-1-i] = objs[len(objs)-1-i], objs[i]
- }
-
- return objs
-}
-
-// applyTypeModifiers applies the list of type modifiers to a type.
-// It returns nil if the modifiers could not be applied.
-func (ci candidateInference) applyTypeModifiers(typ types.Type, addressable bool) types.Type {
- for _, mod := range ci.modifiers {
- switch mod.mod {
- case dereference:
- // For every "*" indirection operator, remove a pointer layer
- // from candidate type.
- if ptr, ok := typ.Underlying().(*types.Pointer); ok {
- typ = ptr.Elem()
- } else {
- return nil
- }
- case reference:
- // For every "&" address operator, add another pointer layer to
- // candidate type, if the candidate is addressable.
- if addressable {
- typ = types.NewPointer(typ)
- } else {
- return nil
- }
- case chanRead:
- // For every "<-" operator, remove a layer of channelness.
- if ch, ok := typ.(*types.Chan); ok {
- typ = ch.Elem()
- } else {
- return nil
- }
- }
- }
-
- return typ
-}
-
-// applyTypeNameModifiers applies the list of type modifiers to a type name.
-func (ci candidateInference) applyTypeNameModifiers(typ types.Type) types.Type {
- for _, mod := range ci.typeName.modifiers {
- switch mod.mod {
- case reference:
- typ = types.NewPointer(typ)
- case arrayType:
- typ = types.NewArray(typ, mod.arrayLen)
- case sliceType:
- typ = types.NewSlice(typ)
- }
- }
- return typ
-}
-
-// matchesVariadic returns true if we are completing a variadic
-// parameter and candType is a compatible slice type.
-func (ci candidateInference) matchesVariadic(candType types.Type) bool {
- return ci.variadic && ci.objType != nil && types.AssignableTo(candType, types.NewSlice(ci.objType))
-}
-
-// findSwitchStmt returns an *ast.CaseClause's corresponding *ast.SwitchStmt or
-// *ast.TypeSwitchStmt. path should start from the case clause's first ancestor.
-func findSwitchStmt(path []ast.Node, pos token.Pos, c *ast.CaseClause) ast.Stmt {
- // Make sure position falls within a "case <>:" clause.
- if exprAtPos(pos, c.List) >= len(c.List) {
- return nil
- }
- // A case clause is always nested within a block statement in a switch statement.
- if len(path) < 2 {
- return nil
- }
- if _, ok := path[0].(*ast.BlockStmt); !ok {
- return nil
- }
- switch s := path[1].(type) {
- case *ast.SwitchStmt:
- return s
- case *ast.TypeSwitchStmt:
- return s
- default:
- return nil
- }
-}
-
-// breaksExpectedTypeInference reports if an expression node's type is unrelated
-// to its child expression node types. For example, "Foo{Bar: x.Baz(<>)}" should
-// expect a function argument, not a composite literal value.
-func breaksExpectedTypeInference(n ast.Node, pos token.Pos) bool {
- switch n := n.(type) {
- case *ast.CompositeLit:
- // Doesn't break inference if pos is in type name.
- // For example: "Foo<>{Bar: 123}"
- return !source.NodeContains(n.Type, pos)
- case *ast.CallExpr:
- // Doesn't break inference if pos is in func name.
- // For example: "Foo<>(123)"
- return !source.NodeContains(n.Fun, pos)
- case *ast.FuncLit, *ast.IndexExpr, *ast.SliceExpr:
- return true
- default:
- return false
- }
-}
-
-// expectTypeName returns information about the expected type name at position.
-func expectTypeName(c *completer) typeNameInference {
- var inf typeNameInference
-
-Nodes:
- for i, p := range c.path {
- switch n := p.(type) {
- case *ast.FieldList:
- // Expect a type name if pos is in a FieldList. This applies to
- // FuncType params/results, FuncDecl receiver, StructType, and
- // InterfaceType. We don't need to worry about the field name
- // because completion bails out early if pos is in an *ast.Ident
- // that defines an object.
- inf.wantTypeName = true
- break Nodes
- case *ast.CaseClause:
- // Expect type names in type switch case clauses.
- if swtch, ok := findSwitchStmt(c.path[i+1:], c.pos, n).(*ast.TypeSwitchStmt); ok {
- // The case clause types must be assertable from the type switch parameter.
- ast.Inspect(swtch.Assign, func(n ast.Node) bool {
- if ta, ok := n.(*ast.TypeAssertExpr); ok {
- inf.assertableFrom = c.pkg.GetTypesInfo().TypeOf(ta.X)
- return false
- }
- return true
- })
- inf.wantTypeName = true
-
- // Track the types that have already been used in this
- // switch's case statements so we don't recommend them.
- for _, e := range swtch.Body.List {
- for _, typeExpr := range e.(*ast.CaseClause).List {
- // Skip if type expression contains pos. We don't want to
- // count it as already used if the user is completing it.
- if typeExpr.Pos() < c.pos && c.pos <= typeExpr.End() {
- continue
- }
-
- if t := c.pkg.GetTypesInfo().TypeOf(typeExpr); t != nil {
- inf.seenTypeSwitchCases = append(inf.seenTypeSwitchCases, t)
- }
- }
- }
-
- break Nodes
- }
- return typeNameInference{}
- case *ast.TypeAssertExpr:
- // Expect type names in type assert expressions.
- if n.Lparen < c.pos && c.pos <= n.Rparen {
- // The type in parens must be assertable from the expression type.
- inf.assertableFrom = c.pkg.GetTypesInfo().TypeOf(n.X)
- inf.wantTypeName = true
- break Nodes
- }
- return typeNameInference{}
- case *ast.StarExpr:
- inf.modifiers = append(inf.modifiers, typeMod{mod: reference})
- case *ast.CompositeLit:
- // We want a type name if position is in the "Type" part of a
- // composite literal (e.g. "Foo<>{}").
- if n.Type != nil && n.Type.Pos() <= c.pos && c.pos <= n.Type.End() {
- inf.wantTypeName = true
- inf.compLitType = true
-
- if i < len(c.path)-1 {
- // Track preceding "&" operator. Technically it applies to
- // the composite literal and not the type name, but if
- // affects our type completion nonetheless.
- if u, ok := c.path[i+1].(*ast.UnaryExpr); ok && u.Op == token.AND {
- inf.modifiers = append(inf.modifiers, typeMod{mod: reference})
- }
- }
- }
- break Nodes
- case *ast.ArrayType:
- // If we are inside the "Elt" part of an array type, we want a type name.
- if n.Elt.Pos() <= c.pos && c.pos <= n.Elt.End() {
- inf.wantTypeName = true
- if n.Len == nil {
- // No "Len" expression means a slice type.
- inf.modifiers = append(inf.modifiers, typeMod{mod: sliceType})
- } else {
- // Try to get the array type using the constant value of "Len".
- tv, ok := c.pkg.GetTypesInfo().Types[n.Len]
- if ok && tv.Value != nil && tv.Value.Kind() == constant.Int {
- if arrayLen, ok := constant.Int64Val(tv.Value); ok {
- inf.modifiers = append(inf.modifiers, typeMod{mod: arrayType, arrayLen: arrayLen})
- }
- }
- }
-
- // ArrayTypes can be nested, so keep going if our parent is an
- // ArrayType.
- if i < len(c.path)-1 {
- if _, ok := c.path[i+1].(*ast.ArrayType); ok {
- continue Nodes
- }
- }
-
- break Nodes
- }
- case *ast.MapType:
- inf.wantTypeName = true
- if n.Key != nil {
- inf.wantComparable = source.NodeContains(n.Key, c.pos)
- } else {
- // If the key is empty, assume we are completing the key if
- // pos is directly after the "map[".
- inf.wantComparable = c.pos == n.Pos()+token.Pos(len("map["))
- }
- break Nodes
- case *ast.ValueSpec:
- inf.wantTypeName = source.NodeContains(n.Type, c.pos)
- break Nodes
- case *ast.TypeSpec:
- inf.wantTypeName = source.NodeContains(n.Type, c.pos)
- default:
- if breaksExpectedTypeInference(p, c.pos) {
- return typeNameInference{}
- }
- }
- }
-
- return inf
-}
-
-func (c *completer) fakeObj(T types.Type) *types.Var {
- return types.NewVar(token.NoPos, c.pkg.GetTypes(), "", T)
-}
-
-// derivableTypes iterates types you can derive from t. For example,
-// from "foo" we might derive "&foo", and "foo()".
-func derivableTypes(t types.Type, addressable bool, f func(t types.Type, addressable bool, mod typeModKind) bool) bool {
- switch t := t.Underlying().(type) {
- case *types.Signature:
- // If t is a func type with a single result, offer the result type.
- if t.Results().Len() == 1 && f(t.Results().At(0).Type(), false, invoke) {
- return true
- }
- case *types.Array:
- if f(t.Elem(), true, index) {
- return true
- }
- // Try converting array to slice.
- if f(types.NewSlice(t.Elem()), false, takeSlice) {
- return true
- }
- case *types.Pointer:
- if f(t.Elem(), false, dereference) {
- return true
- }
- case *types.Slice:
- if f(t.Elem(), true, index) {
- return true
- }
- case *types.Map:
- if f(t.Elem(), false, index) {
- return true
- }
- case *types.Chan:
- if f(t.Elem(), false, chanRead) {
- return true
- }
- }
-
- // Check if c is addressable and a pointer to c matches our type inference.
- if addressable && f(types.NewPointer(t), false, reference) {
- return true
- }
-
- return false
-}
-
-// anyCandType reports whether f returns true for any candidate type
-// derivable from c. It searches up to three levels of type
-// modification. For example, given "foo" we could discover "***foo"
-// or "*foo()".
-func (c *candidate) anyCandType(f func(t types.Type, addressable bool) bool) bool {
- if c.obj == nil || c.obj.Type() == nil {
- return false
- }
-
- const maxDepth = 3
-
- var searchTypes func(t types.Type, addressable bool, mods []typeModKind) bool
- searchTypes = func(t types.Type, addressable bool, mods []typeModKind) bool {
- if f(t, addressable) {
- if len(mods) > 0 {
- newMods := make([]typeModKind, len(mods)+len(c.mods))
- copy(newMods, mods)
- copy(newMods[len(mods):], c.mods)
- c.mods = newMods
- }
- return true
- }
-
- if len(mods) == maxDepth {
- return false
- }
-
- return derivableTypes(t, addressable, func(t types.Type, addressable bool, mod typeModKind) bool {
- return searchTypes(t, addressable, append(mods, mod))
- })
- }
-
- return searchTypes(c.obj.Type(), c.addressable, make([]typeModKind, 0, maxDepth))
-}
-
-// matchingCandidate reports whether cand matches our type inferences.
-// It mutates cand's score in certain cases.
-func (c *completer) matchingCandidate(cand *candidate) bool {
- if c.completionContext.commentCompletion {
- return false
- }
-
- // Bail out early if we are completing a field name in a composite literal.
- if v, ok := cand.obj.(*types.Var); ok && v.IsField() && c.wantStructFieldCompletions() {
- return true
- }
-
- if isTypeName(cand.obj) {
- return c.matchingTypeName(cand)
- } else if c.wantTypeName() {
- // If we want a type, a non-type object never matches.
- return false
- }
-
- if c.inference.candTypeMatches(cand) {
- return true
- }
-
- candType := cand.obj.Type()
- if candType == nil {
- return false
- }
-
- if sig, ok := candType.Underlying().(*types.Signature); ok {
- if c.inference.assigneesMatch(cand, sig) {
- // Invoke the candidate if its results are multi-assignable.
- cand.mods = append(cand.mods, invoke)
- return true
- }
- }
-
- // Default to invoking *types.Func candidates. This is so function
- // completions in an empty statement (or other cases with no expected type)
- // are invoked by default.
- if isFunc(cand.obj) {
- cand.mods = append(cand.mods, invoke)
- }
-
- return false
-}
-
-// candTypeMatches reports whether cand makes a good completion
-// candidate given the candidate inference. cand's score may be
-// mutated to downrank the candidate in certain situations.
-func (ci *candidateInference) candTypeMatches(cand *candidate) bool {
- var (
- expTypes = make([]types.Type, 0, 2)
- variadicType types.Type
- )
- if ci.objType != nil {
- expTypes = append(expTypes, ci.objType)
-
- if ci.variadic {
- variadicType = types.NewSlice(ci.objType)
- expTypes = append(expTypes, variadicType)
- }
- }
-
- return cand.anyCandType(func(candType types.Type, addressable bool) bool {
- // Take into account any type modifiers on the expected type.
- candType = ci.applyTypeModifiers(candType, addressable)
- if candType == nil {
- return false
- }
-
- if ci.convertibleTo != nil && types.ConvertibleTo(candType, ci.convertibleTo) {
- return true
- }
-
- for _, expType := range expTypes {
- if isEmptyInterface(expType) {
- continue
- }
-
- matches := ci.typeMatches(expType, candType)
- if !matches {
- // If candType doesn't otherwise match, consider if we can
- // convert candType directly to expType.
- if considerTypeConversion(candType, expType, cand.path) {
- cand.convertTo = expType
- // Give a major score penalty so we always prefer directly
- // assignable candidates, all else equal.
- cand.score *= 0.5
- return true
- }
-
- continue
- }
-
- if expType == variadicType {
- cand.mods = append(cand.mods, takeDotDotDot)
- }
-
- // Lower candidate score for untyped conversions. This avoids
- // ranking untyped constants above candidates with an exact type
- // match. Don't lower score of builtin constants, e.g. "true".
- if isUntyped(candType) && !types.Identical(candType, expType) && cand.obj.Parent() != types.Universe {
- // Bigger penalty for deep completions into other packages to
- // avoid random constants from other packages popping up all
- // the time.
- if len(cand.path) > 0 && isPkgName(cand.path[0]) {
- cand.score *= 0.5
- } else {
- cand.score *= 0.75
- }
- }
-
- return true
- }
-
- // If we don't have a specific expected type, fall back to coarser
- // object kind checks.
- if ci.objType == nil || isEmptyInterface(ci.objType) {
- // If we were able to apply type modifiers to our candidate type,
- // count that as a match. For example:
- //
- // var foo chan int
- // <-fo<>
- //
- // We were able to apply the "<-" type modifier to "foo", so "foo"
- // matches.
- if len(ci.modifiers) > 0 {
- return true
- }
-
- // If we didn't have an exact type match, check if our object kind
- // matches.
- if ci.kindMatches(candType) {
- if ci.objKind == kindFunc {
- cand.mods = append(cand.mods, invoke)
- }
- return true
- }
- }
-
- return false
- })
-}
-
-// considerTypeConversion returns true if we should offer a completion
-// automatically converting "from" to "to".
-func considerTypeConversion(from, to types.Type, path []types.Object) bool {
- // Don't offer to convert deep completions from other packages.
- // Otherwise there are many random package level consts/vars that
- // pop up as candidates all the time.
- if len(path) > 0 && isPkgName(path[0]) {
- return false
- }
-
- if _, ok := from.(*typeparams.TypeParam); ok {
- return false
- }
-
- if !types.ConvertibleTo(from, to) {
- return false
- }
-
- // Don't offer to convert ints to strings since that probably
- // doesn't do what the user wants.
- if isBasicKind(from, types.IsInteger) && isBasicKind(to, types.IsString) {
- return false
- }
-
- return true
-}
-
-// typeMatches reports whether an object of candType makes a good
-// completion candidate given the expected type expType.
-func (ci *candidateInference) typeMatches(expType, candType types.Type) bool {
- // Handle untyped values specially since AssignableTo gives false negatives
- // for them (see https://golang.org/issue/32146).
- if candBasic, ok := candType.Underlying().(*types.Basic); ok {
- if expBasic, ok := expType.Underlying().(*types.Basic); ok {
- // Note that the candidate and/or the expected can be untyped.
- // In "fo<> == 100" the expected type is untyped, and the
- // candidate could also be an untyped constant.
-
- // Sort by is_untyped and then by is_int to simplify below logic.
- a, b := candBasic.Info(), expBasic.Info()
- if a&types.IsUntyped == 0 || (b&types.IsInteger > 0 && b&types.IsUntyped > 0) {
- a, b = b, a
- }
-
- // If at least one is untyped...
- if a&types.IsUntyped > 0 {
- switch {
- // Untyped integers are compatible with floats.
- case a&types.IsInteger > 0 && b&types.IsFloat > 0:
- return true
-
- // Check if their constant kind (bool|int|float|complex|string) matches.
- // This doesn't take into account the constant value, so there will be some
- // false positives due to integer sign and overflow.
- case a&types.IsConstType == b&types.IsConstType:
- return true
- }
- }
- }
- }
-
- // AssignableTo covers the case where the types are equal, but also handles
- // cases like assigning a concrete type to an interface type.
- return types.AssignableTo(candType, expType)
-}
-
-// kindMatches reports whether candType's kind matches our expected
-// kind (e.g. slice, map, etc.).
-func (ci *candidateInference) kindMatches(candType types.Type) bool {
- return ci.objKind > 0 && ci.objKind&candKind(candType) > 0
-}
-
-// assigneesMatch reports whether an invocation of sig matches the
-// number and type of any assignees.
-func (ci *candidateInference) assigneesMatch(cand *candidate, sig *types.Signature) bool {
- if len(ci.assignees) == 0 {
- return false
- }
-
- // Uniresult functions are always usable and are handled by the
- // normal, non-assignees type matching logic.
- if sig.Results().Len() == 1 {
- return false
- }
-
- // Don't prefer completing into func(...interface{}) calls since all
- // functions wouuld match.
- if ci.variadicAssignees && len(ci.assignees) == 1 && isEmptyInterface(deslice(ci.assignees[0])) {
- return false
- }
-
- var numberOfResultsCouldMatch bool
- if ci.variadicAssignees {
- numberOfResultsCouldMatch = sig.Results().Len() >= len(ci.assignees)-1
- } else {
- numberOfResultsCouldMatch = sig.Results().Len() == len(ci.assignees)
- }
-
- // If our signature doesn't return the right number of values, it's
- // not a match, so downrank it. For example:
- //
- // var foo func() (int, int)
- // a, b, c := <> // downrank "foo()" since it only returns two values
- if !numberOfResultsCouldMatch {
- cand.score /= 2
- return false
- }
-
- // If at least one assignee has a valid type, and all valid
- // assignees match the corresponding sig result value, the signature
- // is a match.
- allMatch := false
- for i := 0; i < sig.Results().Len(); i++ {
- var assignee types.Type
-
- // If we are completing into variadic parameters, deslice the
- // expected variadic type.
- if ci.variadicAssignees && i >= len(ci.assignees)-1 {
- assignee = ci.assignees[len(ci.assignees)-1]
- if elem := deslice(assignee); elem != nil {
- assignee = elem
- }
- } else {
- assignee = ci.assignees[i]
- }
-
- if assignee == nil {
- continue
- }
-
- allMatch = ci.typeMatches(assignee, sig.Results().At(i).Type())
- if !allMatch {
- break
- }
- }
- return allMatch
-}
-
-func (c *completer) matchingTypeName(cand *candidate) bool {
- if !c.wantTypeName() {
- return false
- }
-
- typeMatches := func(candType types.Type) bool {
- // Take into account any type name modifier prefixes.
- candType = c.inference.applyTypeNameModifiers(candType)
-
- if from := c.inference.typeName.assertableFrom; from != nil {
- // Don't suggest the starting type in type assertions. For example,
- // if "foo" is an io.Writer, don't suggest "foo.(io.Writer)".
- if types.Identical(from, candType) {
- return false
- }
-
- if intf, ok := from.Underlying().(*types.Interface); ok {
- if !types.AssertableTo(intf, candType) {
- return false
- }
- }
- }
-
- if c.inference.typeName.wantComparable && !types.Comparable(candType) {
- return false
- }
-
- // Skip this type if it has already been used in another type
- // switch case.
- for _, seen := range c.inference.typeName.seenTypeSwitchCases {
- if types.Identical(candType, seen) {
- return false
- }
- }
-
- // We can expect a type name and have an expected type in cases like:
- //
- // var foo []int
- // foo = []i<>
- //
- // Where our expected type is "[]int", and we expect a type name.
- if c.inference.objType != nil {
- return types.AssignableTo(candType, c.inference.objType)
- }
-
- // Default to saying any type name is a match.
- return true
- }
-
- t := cand.obj.Type()
-
- if typeMatches(t) {
- return true
- }
-
- if !source.IsInterface(t) && typeMatches(types.NewPointer(t)) {
- if c.inference.typeName.compLitType {
- // If we are completing a composite literal type as in
- // "foo<>{}", to make a pointer we must prepend "&".
- cand.mods = append(cand.mods, reference)
- } else {
- // If we are completing a normal type name such as "foo<>", to
- // make a pointer we must prepend "*".
- cand.mods = append(cand.mods, dereference)
- }
- return true
- }
-
- return false
-}
-
-var (
- // "interface { Error() string }" (i.e. error)
- errorIntf = types.Universe.Lookup("error").Type().Underlying().(*types.Interface)
-
- // "interface { String() string }" (i.e. fmt.Stringer)
- stringerIntf = types.NewInterfaceType([]*types.Func{
- types.NewFunc(token.NoPos, nil, "String", types.NewSignature(
- nil,
- nil,
- types.NewTuple(types.NewParam(token.NoPos, nil, "", types.Typ[types.String])),
- false,
- )),
- }, nil).Complete()
-
- byteType = types.Universe.Lookup("byte").Type()
-)
-
-// candKind returns the objKind of candType, if any.
-func candKind(candType types.Type) objKind {
- var kind objKind
-
- switch t := candType.Underlying().(type) {
- case *types.Array:
- kind |= kindArray
- if t.Elem() == byteType {
- kind |= kindBytes
- }
- case *types.Slice:
- kind |= kindSlice
- if t.Elem() == byteType {
- kind |= kindBytes
- }
- case *types.Chan:
- kind |= kindChan
- case *types.Map:
- kind |= kindMap
- case *types.Pointer:
- kind |= kindPtr
-
- // Some builtins handle array pointers as arrays, so just report a pointer
- // to an array as an array.
- if _, isArray := t.Elem().Underlying().(*types.Array); isArray {
- kind |= kindArray
- }
- case *types.Basic:
- switch info := t.Info(); {
- case info&types.IsString > 0:
- kind |= kindString
- case info&types.IsInteger > 0:
- kind |= kindInt
- case info&types.IsFloat > 0:
- kind |= kindFloat
- case info&types.IsComplex > 0:
- kind |= kindComplex
- case info&types.IsBoolean > 0:
- kind |= kindBool
- }
- case *types.Signature:
- return kindFunc
- }
-
- if types.Implements(candType, errorIntf) {
- kind |= kindError
- }
-
- if types.Implements(candType, stringerIntf) {
- kind |= kindStringer
- }
-
- return kind
-}
diff --git a/internal/lsp/source/completion/deep_completion.go b/internal/lsp/source/completion/deep_completion.go
deleted file mode 100644
index a13d807d4..000000000
--- a/internal/lsp/source/completion/deep_completion.go
+++ /dev/null
@@ -1,362 +0,0 @@
-// Copyright 2019 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package completion
-
-import (
- "context"
- "go/types"
- "strings"
- "time"
-)
-
-// MaxDeepCompletions limits deep completion results because in most cases
-// there are too many to be useful.
-const MaxDeepCompletions = 3
-
-// deepCompletionState stores our state as we search for deep completions.
-// "deep completion" refers to searching into objects' fields and methods to
-// find more completion candidates.
-type deepCompletionState struct {
- // enabled indicates wether deep completion is permitted.
- enabled bool
-
- // queueClosed is used to disable adding new sub-fields to search queue
- // once we're running out of our time budget.
- queueClosed bool
-
- // thisQueue holds the current breadth first search queue.
- thisQueue []candidate
-
- // nextQueue holds the next breadth first search iteration's queue.
- nextQueue []candidate
-
- // highScores tracks the highest deep candidate scores we have found
- // so far. This is used to avoid work for low scoring deep candidates.
- highScores [MaxDeepCompletions]float64
-
- // candidateCount is the count of unique deep candidates encountered
- // so far.
- candidateCount int
-}
-
-// enqueue adds a candidate to the search queue.
-func (s *deepCompletionState) enqueue(cand candidate) {
- s.nextQueue = append(s.nextQueue, cand)
-}
-
-// dequeue removes and returns the leftmost element from the search queue.
-func (s *deepCompletionState) dequeue() *candidate {
- var cand *candidate
- cand, s.thisQueue = &s.thisQueue[len(s.thisQueue)-1], s.thisQueue[:len(s.thisQueue)-1]
- return cand
-}
-
-// scorePenalty computes a deep candidate score penalty. A candidate is
-// penalized based on depth to favor shallower candidates. We also give a
-// slight bonus to unexported objects and a slight additional penalty to
-// function objects.
-func (s *deepCompletionState) scorePenalty(cand *candidate) float64 {
- var deepPenalty float64
- for _, dc := range cand.path {
- deepPenalty++
-
- if !dc.Exported() {
- deepPenalty -= 0.1
- }
-
- if _, isSig := dc.Type().Underlying().(*types.Signature); isSig {
- deepPenalty += 0.1
- }
- }
-
- // Normalize penalty to a max depth of 10.
- return deepPenalty / 10
-}
-
-// isHighScore returns whether score is among the top MaxDeepCompletions deep
-// candidate scores encountered so far. If so, it adds score to highScores,
-// possibly displacing an existing high score.
-func (s *deepCompletionState) isHighScore(score float64) bool {
- // Invariant: s.highScores is sorted with highest score first. Unclaimed
- // positions are trailing zeros.
-
- // If we beat an existing score then take its spot.
- for i, deepScore := range s.highScores {
- if score <= deepScore {
- continue
- }
-
- if deepScore != 0 && i != len(s.highScores)-1 {
- // If this wasn't an empty slot then we need to scooch everyone
- // down one spot.
- copy(s.highScores[i+1:], s.highScores[i:])
- }
- s.highScores[i] = score
- return true
- }
-
- return false
-}
-
-// newPath returns path from search root for an object following a given
-// candidate.
-func (s *deepCompletionState) newPath(cand candidate, obj types.Object) []types.Object {
- path := make([]types.Object, len(cand.path)+1)
- copy(path, cand.path)
- path[len(path)-1] = obj
-
- return path
-}
-
-// deepSearch searches a candidate and its subordinate objects for completion
-// items if deep completion is enabled and adds the valid candidates to
-// completion items.
-func (c *completer) deepSearch(ctx context.Context) {
- defer func() {
- // We can return early before completing the search, so be sure to
- // clear out our queues to not impact any further invocations.
- c.deepState.thisQueue = c.deepState.thisQueue[:0]
- c.deepState.nextQueue = c.deepState.nextQueue[:0]
- }()
-
- for len(c.deepState.nextQueue) > 0 {
- c.deepState.thisQueue, c.deepState.nextQueue = c.deepState.nextQueue, c.deepState.thisQueue[:0]
-
- outer:
- for _, cand := range c.deepState.thisQueue {
- obj := cand.obj
-
- if obj == nil {
- continue
- }
-
- // At the top level, dedupe by object.
- if len(cand.path) == 0 {
- if c.seen[obj] {
- continue
- }
- c.seen[obj] = true
- }
-
- // If obj is not accessible because it lives in another package and is
- // not exported, don't treat it as a completion candidate unless it's
- // a package completion candidate.
- if !c.completionContext.packageCompletion &&
- obj.Pkg() != nil && obj.Pkg() != c.pkg.GetTypes() && !obj.Exported() {
- continue
- }
-
- // If we want a type name, don't offer non-type name candidates.
- // However, do offer package names since they can contain type names,
- // and do offer any candidate without a type since we aren't sure if it
- // is a type name or not (i.e. unimported candidate).
- if c.wantTypeName() && obj.Type() != nil && !isTypeName(obj) && !isPkgName(obj) {
- continue
- }
-
- // When searching deep, make sure we don't have a cycle in our chain.
- // We don't dedupe by object because we want to allow both "foo.Baz"
- // and "bar.Baz" even though "Baz" is represented the same types.Object
- // in both.
- for _, seenObj := range cand.path {
- if seenObj == obj {
- continue outer
- }
- }
-
- c.addCandidate(ctx, &cand)
-
- c.deepState.candidateCount++
- if c.opts.budget > 0 && c.deepState.candidateCount%100 == 0 {
- spent := float64(time.Since(c.startTime)) / float64(c.opts.budget)
- select {
- case <-ctx.Done():
- return
- default:
- // If we are almost out of budgeted time, no further elements
- // should be added to the queue. This ensures remaining time is
- // used for processing current queue.
- if !c.deepState.queueClosed && spent >= 0.85 {
- c.deepState.queueClosed = true
- }
- }
- }
-
- // if deep search is disabled, don't add any more candidates.
- if !c.deepState.enabled || c.deepState.queueClosed {
- continue
- }
-
- // Searching members for a type name doesn't make sense.
- if isTypeName(obj) {
- continue
- }
- if obj.Type() == nil {
- continue
- }
-
- // Don't search embedded fields because they were already included in their
- // parent's fields.
- if v, ok := obj.(*types.Var); ok && v.Embedded() {
- continue
- }
-
- if sig, ok := obj.Type().Underlying().(*types.Signature); ok {
- // If obj is a function that takes no arguments and returns one
- // value, keep searching across the function call.
- if sig.Params().Len() == 0 && sig.Results().Len() == 1 {
- path := c.deepState.newPath(cand, obj)
- // The result of a function call is not addressable.
- c.methodsAndFields(sig.Results().At(0).Type(), false, cand.imp, func(newCand candidate) {
- newCand.pathInvokeMask = cand.pathInvokeMask | (1 << uint64(len(cand.path)))
- newCand.path = path
- c.deepState.enqueue(newCand)
- })
- }
- }
-
- path := c.deepState.newPath(cand, obj)
- switch obj := obj.(type) {
- case *types.PkgName:
- c.packageMembers(obj.Imported(), stdScore, cand.imp, func(newCand candidate) {
- newCand.pathInvokeMask = cand.pathInvokeMask
- newCand.path = path
- c.deepState.enqueue(newCand)
- })
- default:
- c.methodsAndFields(obj.Type(), cand.addressable, cand.imp, func(newCand candidate) {
- newCand.pathInvokeMask = cand.pathInvokeMask
- newCand.path = path
- c.deepState.enqueue(newCand)
- })
- }
- }
- }
-}
-
-// addCandidate adds a completion candidate to suggestions, without searching
-// its members for more candidates.
-func (c *completer) addCandidate(ctx context.Context, cand *candidate) {
- obj := cand.obj
- if c.matchingCandidate(cand) {
- cand.score *= highScore
-
- if p := c.penalty(cand); p > 0 {
- cand.score *= (1 - p)
- }
- } else if isTypeName(obj) {
- // If obj is a *types.TypeName that didn't otherwise match, check
- // if a literal object of this type makes a good candidate.
-
- // We only care about named types (i.e. don't want builtin types).
- if _, isNamed := obj.Type().(*types.Named); isNamed {
- c.literal(ctx, obj.Type(), cand.imp)
- }
- }
-
- // Lower score of method calls so we prefer fields and vars over calls.
- if cand.hasMod(invoke) {
- if sig, ok := obj.Type().Underlying().(*types.Signature); ok && sig.Recv() != nil {
- cand.score *= 0.9
- }
- }
-
- // Prefer private objects over public ones.
- if !obj.Exported() && obj.Parent() != types.Universe {
- cand.score *= 1.1
- }
-
- // Slight penalty for index modifier (e.g. changing "foo" to
- // "foo[]") to curb false positives.
- if cand.hasMod(index) {
- cand.score *= 0.9
- }
-
- // Favor shallow matches by lowering score according to depth.
- cand.score -= cand.score * c.deepState.scorePenalty(cand)
-
- if cand.score < 0 {
- cand.score = 0
- }
-
- cand.name = deepCandName(cand)
- if item, err := c.item(ctx, *cand); err == nil {
- c.items = append(c.items, item)
- }
-}
-
-// deepCandName produces the full candidate name including any
-// ancestor objects. For example, "foo.bar().baz" for candidate "baz".
-func deepCandName(cand *candidate) string {
- totalLen := len(cand.obj.Name())
- for i, obj := range cand.path {
- totalLen += len(obj.Name()) + 1
- if cand.pathInvokeMask&(1<<uint16(i)) > 0 {
- totalLen += 2
- }
- }
-
- var buf strings.Builder
- buf.Grow(totalLen)
-
- for i, obj := range cand.path {
- buf.WriteString(obj.Name())
- if cand.pathInvokeMask&(1<<uint16(i)) > 0 {
- buf.WriteByte('(')
- buf.WriteByte(')')
- }
- buf.WriteByte('.')
- }
-
- buf.WriteString(cand.obj.Name())
-
- return buf.String()
-}
-
-// penalty reports a score penalty for cand in the range (0, 1).
-// For example, a candidate is penalized if it has already been used
-// in another switch case statement.
-func (c *completer) penalty(cand *candidate) float64 {
- for _, p := range c.inference.penalized {
- if c.objChainMatches(cand, p.objChain) {
- return p.penalty
- }
- }
-
- return 0
-}
-
-// objChainMatches reports whether cand combined with the surrounding
-// object prefix matches chain.
-func (c *completer) objChainMatches(cand *candidate, chain []types.Object) bool {
- // For example, when completing:
- //
- // foo.ba<>
- //
- // If we are considering the deep candidate "bar.baz", cand is baz,
- // objChain is [foo] and deepChain is [bar]. We would match the
- // chain [foo, bar, baz].
- if len(chain) != len(c.inference.objChain)+len(cand.path)+1 {
- return false
- }
-
- if chain[len(chain)-1] != cand.obj {
- return false
- }
-
- for i, o := range c.inference.objChain {
- if chain[i] != o {
- return false
- }
- }
-
- for i, o := range cand.path {
- if chain[i+len(c.inference.objChain)] != o {
- return false
- }
- }
-
- return true
-}
diff --git a/internal/lsp/source/completion/deep_completion_test.go b/internal/lsp/source/completion/deep_completion_test.go
deleted file mode 100644
index 27009af1b..000000000
--- a/internal/lsp/source/completion/deep_completion_test.go
+++ /dev/null
@@ -1,33 +0,0 @@
-// Copyright 2020 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package completion
-
-import (
- "testing"
-)
-
-func TestDeepCompletionIsHighScore(t *testing.T) {
- // Test that deepCompletionState.isHighScore properly tracks the top
- // N=MaxDeepCompletions scores.
-
- var s deepCompletionState
-
- if !s.isHighScore(1) {
- // No other scores yet, anything is a winner.
- t.Error("1 should be high score")
- }
-
- // Fill up with higher scores.
- for i := 0; i < MaxDeepCompletions; i++ {
- if !s.isHighScore(10) {
- t.Error("10 should be high score")
- }
- }
-
- // High scores should be filled with 10s so 2 is not a high score.
- if s.isHighScore(2) {
- t.Error("2 shouldn't be high score")
- }
-}
diff --git a/internal/lsp/source/completion/definition.go b/internal/lsp/source/completion/definition.go
deleted file mode 100644
index 17b251cb0..000000000
--- a/internal/lsp/source/completion/definition.go
+++ /dev/null
@@ -1,127 +0,0 @@
-// Copyright 2022 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package completion
-
-import (
- "go/ast"
- "go/token"
- "go/types"
- "strings"
- "unicode"
- "unicode/utf8"
-
- "golang.org/x/tools/internal/lsp/protocol"
- "golang.org/x/tools/internal/lsp/snippet"
- "golang.org/x/tools/internal/lsp/source"
-)
-
-// some definitions can be completed
-// So far, TestFoo(t *testing.T), TestMain(m *testing.M)
-// BenchmarkFoo(b *testing.B), FuzzFoo(f *testing.F)
-
-// path[0] is known to be *ast.Ident
-func definition(path []ast.Node, obj types.Object, fset *token.FileSet, mapper *protocol.ColumnMapper, fh source.FileHandle) ([]CompletionItem, *Selection) {
- if _, ok := obj.(*types.Func); !ok {
- return nil, nil // not a function at all
- }
- if !strings.HasSuffix(fh.URI().Filename(), "_test.go") {
- return nil, nil
- }
-
- name := path[0].(*ast.Ident).Name
- if len(name) == 0 {
- // can't happen
- return nil, nil
- }
- pos := path[0].Pos()
- sel := &Selection{
- content: "",
- cursor: pos,
- MappedRange: source.NewMappedRange(fset, mapper, pos, pos),
- }
- var ans []CompletionItem
-
- // Always suggest TestMain, if possible
- if strings.HasPrefix("TestMain", name) {
- ans = []CompletionItem{defItem("TestMain(m *testing.M)", obj)}
- }
-
- // If a snippet is possible, suggest it
- if strings.HasPrefix("Test", name) {
- ans = append(ans, defSnippet("Test", "Xxx", "(t *testing.T)", obj))
- return ans, sel
- } else if strings.HasPrefix("Benchmark", name) {
- ans = append(ans, defSnippet("Benchmark", "Xxx", "(b *testing.B)", obj))
- return ans, sel
- } else if strings.HasPrefix("Fuzz", name) {
- ans = append(ans, defSnippet("Fuzz", "Xxx", "(f *testing.F)", obj))
- return ans, sel
- }
-
- // Fill in the argument for what the user has already typed
- if got := defMatches(name, "Test", path, "(t *testing.T)"); got != "" {
- ans = append(ans, defItem(got, obj))
- } else if got := defMatches(name, "Benchmark", path, "(b *testing.B)"); got != "" {
- ans = append(ans, defItem(got, obj))
- } else if got := defMatches(name, "Fuzz", path, "(f *testing.F)"); got != "" {
- ans = append(ans, defItem(got, obj))
- }
- return ans, sel
-}
-
-func defMatches(name, pat string, path []ast.Node, arg string) string {
- idx := strings.Index(name, pat)
- if idx < 0 {
- return ""
- }
- c, _ := utf8.DecodeRuneInString(name[len(pat):])
- if unicode.IsLower(c) {
- return ""
- }
- fd, ok := path[1].(*ast.FuncDecl)
- if !ok {
- // we don't know what's going on
- return ""
- }
- fp := fd.Type.Params
- if fp != nil && len(fp.List) > 0 {
- // signature already there, minimal suggestion
- return name
- }
- // suggesting signature too
- return name + arg
-}
-
-func defSnippet(prefix, placeholder, suffix string, obj types.Object) CompletionItem {
- var sn snippet.Builder
- sn.WriteText(prefix)
- if placeholder != "" {
- sn.WritePlaceholder(func(b *snippet.Builder) { b.WriteText(placeholder) })
- }
- sn.WriteText(suffix + " {\n")
- sn.WriteFinalTabstop()
- sn.WriteText("\n}")
- return CompletionItem{
- Label: prefix + placeholder + suffix,
- Detail: "tab, type the rest of the name, then tab",
- Kind: protocol.FunctionCompletion,
- Depth: 0,
- Score: 10,
- snippet: &sn,
- Documentation: prefix + " test function",
- obj: obj,
- }
-}
-func defItem(val string, obj types.Object) CompletionItem {
- return CompletionItem{
- Label: val,
- InsertText: val,
- Kind: protocol.FunctionCompletion,
- Depth: 0,
- Score: 9, // prefer the snippets when available
- Documentation: "complete the parameter",
- obj: obj,
- }
-}
diff --git a/internal/lsp/source/completion/format.go b/internal/lsp/source/completion/format.go
deleted file mode 100644
index e67456911..000000000
--- a/internal/lsp/source/completion/format.go
+++ /dev/null
@@ -1,340 +0,0 @@
-// Copyright 2019 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package completion
-
-import (
- "context"
- "fmt"
- "go/ast"
- "go/doc"
- "go/types"
- "strings"
-
- "golang.org/x/tools/internal/event"
- "golang.org/x/tools/internal/imports"
- "golang.org/x/tools/internal/lsp/debug/tag"
- "golang.org/x/tools/internal/lsp/protocol"
- "golang.org/x/tools/internal/lsp/snippet"
- "golang.org/x/tools/internal/lsp/source"
- "golang.org/x/tools/internal/span"
- "golang.org/x/tools/internal/typeparams"
- errors "golang.org/x/xerrors"
-)
-
-var (
- errNoMatch = errors.New("not a surrounding match")
- errLowScore = errors.New("not a high scoring candidate")
-)
-
-// item formats a candidate to a CompletionItem.
-func (c *completer) item(ctx context.Context, cand candidate) (CompletionItem, error) {
- obj := cand.obj
-
- // if the object isn't a valid match against the surrounding, return early.
- matchScore := c.matcher.Score(cand.name)
- if matchScore <= 0 {
- return CompletionItem{}, errNoMatch
- }
- cand.score *= float64(matchScore)
-
- // Ignore deep candidates that wont be in the MaxDeepCompletions anyway.
- if len(cand.path) != 0 && !c.deepState.isHighScore(cand.score) {
- return CompletionItem{}, errLowScore
- }
-
- // Handle builtin types separately.
- if obj.Parent() == types.Universe {
- return c.formatBuiltin(ctx, cand)
- }
-
- var (
- label = cand.name
- detail = types.TypeString(obj.Type(), c.qf)
- insert = label
- kind = protocol.TextCompletion
- snip snippet.Builder
- protocolEdits []protocol.TextEdit
- )
- if obj.Type() == nil {
- detail = ""
- }
- if isTypeName(obj) && c.wantTypeParams() {
- x := cand.obj.(*types.TypeName)
- if named, ok := x.Type().(*types.Named); ok {
- tp := typeparams.ForNamed(named)
- label += source.FormatTypeParams(tp)
- insert = label // maintain invariant above (label == insert)
- }
- }
-
- snip.WriteText(insert)
-
- switch obj := obj.(type) {
- case *types.TypeName:
- detail, kind = source.FormatType(obj.Type(), c.qf)
- case *types.Const:
- kind = protocol.ConstantCompletion
- case *types.Var:
- if _, ok := obj.Type().(*types.Struct); ok {
- detail = "struct{...}" // for anonymous structs
- } else if obj.IsField() {
- detail = source.FormatVarType(ctx, c.snapshot, c.pkg, obj, c.qf)
- }
- if obj.IsField() {
- kind = protocol.FieldCompletion
- c.structFieldSnippet(cand, detail, &snip)
- } else {
- kind = protocol.VariableCompletion
- }
- if obj.Type() == nil {
- break
- }
- case *types.Func:
- sig, ok := obj.Type().Underlying().(*types.Signature)
- if !ok {
- break
- }
- kind = protocol.FunctionCompletion
- if sig != nil && sig.Recv() != nil {
- kind = protocol.MethodCompletion
- }
- case *types.PkgName:
- kind = protocol.ModuleCompletion
- detail = fmt.Sprintf("%q", obj.Imported().Path())
- case *types.Label:
- kind = protocol.ConstantCompletion
- detail = "label"
- }
-
- var prefix string
- for _, mod := range cand.mods {
- switch mod {
- case reference:
- prefix = "&" + prefix
- case dereference:
- prefix = "*" + prefix
- case chanRead:
- prefix = "<-" + prefix
- }
- }
-
- var (
- suffix string
- funcType = obj.Type()
- )
-Suffixes:
- for _, mod := range cand.mods {
- switch mod {
- case invoke:
- if sig, ok := funcType.Underlying().(*types.Signature); ok {
- s := source.NewSignature(ctx, c.snapshot, c.pkg, sig, nil, c.qf)
- c.functionCallSnippet("", s.TypeParams(), s.Params(), &snip)
- if sig.Results().Len() == 1 {
- funcType = sig.Results().At(0).Type()
- }
- detail = "func" + s.Format()
- }
-
- if !c.opts.snippets {
- // Without snippets the candidate will not include "()". Don't
- // add further suffixes since they will be invalid. For
- // example, with snippets "foo()..." would become "foo..."
- // without snippets if we added the dotDotDot.
- break Suffixes
- }
- case takeSlice:
- suffix += "[:]"
- case takeDotDotDot:
- suffix += "..."
- case index:
- snip.WriteText("[")
- snip.WritePlaceholder(nil)
- snip.WriteText("]")
- }
- }
-
- // If this candidate needs an additional import statement,
- // add the additional text edits needed.
- if cand.imp != nil {
- addlEdits, err := c.importEdits(cand.imp)
-
- if err != nil {
- return CompletionItem{}, err
- }
-
- protocolEdits = append(protocolEdits, addlEdits...)
- if kind != protocol.ModuleCompletion {
- if detail != "" {
- detail += " "
- }
- detail += fmt.Sprintf("(from %q)", cand.imp.importPath)
- }
- }
-
- if cand.convertTo != nil {
- typeName := types.TypeString(cand.convertTo, c.qf)
-
- switch cand.convertTo.(type) {
- // We need extra parens when casting to these types. For example,
- // we need "(*int)(foo)", not "*int(foo)".
- case *types.Pointer, *types.Signature:
- typeName = "(" + typeName + ")"
- }
-
- prefix = typeName + "(" + prefix
- suffix = ")"
- }
-
- if prefix != "" {
- // If we are in a selector, add an edit to place prefix before selector.
- if sel := enclosingSelector(c.path, c.pos); sel != nil {
- edits, err := c.editText(sel.Pos(), sel.Pos(), prefix)
- if err != nil {
- return CompletionItem{}, err
- }
- protocolEdits = append(protocolEdits, edits...)
- } else {
- // If there is no selector, just stick the prefix at the start.
- insert = prefix + insert
- snip.PrependText(prefix)
- }
- }
-
- if suffix != "" {
- insert += suffix
- snip.WriteText(suffix)
- }
-
- detail = strings.TrimPrefix(detail, "untyped ")
- // override computed detail with provided detail, if something is provided.
- if cand.detail != "" {
- detail = cand.detail
- }
- item := CompletionItem{
- Label: label,
- InsertText: insert,
- AdditionalTextEdits: protocolEdits,
- Detail: detail,
- Kind: kind,
- Score: cand.score,
- Depth: len(cand.path),
- snippet: &snip,
- obj: obj,
- }
- // If the user doesn't want documentation for completion items.
- if !c.opts.documentation {
- return item, nil
- }
- pos := c.snapshot.FileSet().Position(obj.Pos())
-
- // We ignore errors here, because some types, like "unsafe" or "error",
- // may not have valid positions that we can use to get documentation.
- if !pos.IsValid() {
- return item, nil
- }
- uri := span.URIFromPath(pos.Filename)
-
- // Find the source file of the candidate.
- pkg, err := source.FindPackageFromPos(ctx, c.snapshot, obj.Pos())
- if err != nil {
- return item, nil
- }
-
- decl, err := c.snapshot.PosToDecl(ctx, pkg, obj.Pos())
- if err != nil {
- return CompletionItem{}, err
- }
- hover, err := source.FindHoverContext(ctx, c.snapshot, pkg, obj, decl, nil)
- if err != nil {
- event.Error(ctx, "failed to find Hover", err, tag.URI.Of(uri))
- return item, nil
- }
- if c.opts.fullDocumentation {
- item.Documentation = hover.Comment.Text()
- } else {
- item.Documentation = doc.Synopsis(hover.Comment.Text())
- }
- // The desired pattern is `^// Deprecated`, but the prefix has been removed
- if strings.HasPrefix(hover.Comment.Text(), "Deprecated") {
- if c.snapshot.View().Options().CompletionTags {
- item.Tags = []protocol.CompletionItemTag{protocol.ComplDeprecated}
- } else if c.snapshot.View().Options().CompletionDeprecated {
- item.Deprecated = true
- }
- }
-
- return item, nil
-}
-
-// importEdits produces the text edits necessary to add the given import to the current file.
-func (c *completer) importEdits(imp *importInfo) ([]protocol.TextEdit, error) {
- if imp == nil {
- return nil, nil
- }
-
- pgf, err := c.pkg.File(span.URIFromPath(c.filename))
- if err != nil {
- return nil, err
- }
-
- return source.ComputeOneImportFixEdits(c.snapshot, pgf, &imports.ImportFix{
- StmtInfo: imports.ImportInfo{
- ImportPath: imp.importPath,
- Name: imp.name,
- },
- // IdentName is unused on this path and is difficult to get.
- FixType: imports.AddImport,
- })
-}
-
-func (c *completer) formatBuiltin(ctx context.Context, cand candidate) (CompletionItem, error) {
- obj := cand.obj
- item := CompletionItem{
- Label: obj.Name(),
- InsertText: obj.Name(),
- Score: cand.score,
- }
- switch obj.(type) {
- case *types.Const:
- item.Kind = protocol.ConstantCompletion
- case *types.Builtin:
- item.Kind = protocol.FunctionCompletion
- sig, err := source.NewBuiltinSignature(ctx, c.snapshot, obj.Name())
- if err != nil {
- return CompletionItem{}, err
- }
- item.Detail = "func" + sig.Format()
- item.snippet = &snippet.Builder{}
- c.functionCallSnippet(obj.Name(), sig.TypeParams(), sig.Params(), item.snippet)
- case *types.TypeName:
- if types.IsInterface(obj.Type()) {
- item.Kind = protocol.InterfaceCompletion
- } else {
- item.Kind = protocol.ClassCompletion
- }
- case *types.Nil:
- item.Kind = protocol.VariableCompletion
- }
- return item, nil
-}
-
-// decide if the type params (if any) should be part of the completion
-// which only possible for types.Named and types.Signature
-// (so far, only in receivers, e.g.; func (s *GENERIC[K, V])..., which is a types.Named)
-func (c *completer) wantTypeParams() bool {
- // Need to be lexically in a receiver, and a child of an IndexListExpr
- // (but IndexListExpr only exists with go1.18)
- start := c.path[0].Pos()
- for i, nd := range c.path {
- if fd, ok := nd.(*ast.FuncDecl); ok {
- if i > 0 && fd.Recv != nil && start < fd.Recv.End() {
- return true
- } else {
- return false
- }
- }
- }
- return false
-}
diff --git a/internal/lsp/source/completion/fuzz.go b/internal/lsp/source/completion/fuzz.go
deleted file mode 100644
index 92349ab93..000000000
--- a/internal/lsp/source/completion/fuzz.go
+++ /dev/null
@@ -1,142 +0,0 @@
-// Copyright 2022 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package completion
-
-import (
- "fmt"
- "go/ast"
- "go/token"
- "go/types"
- "strings"
-
- "golang.org/x/tools/internal/lsp/protocol"
-)
-
-// golang/go#51089
-// *testing.F deserves special treatment as member use is constrained:
-// The arguments to f.Fuzz are determined by the arguments to a previous f.Add
-// Inside f.Fuzz only f.Failed and f.Name are allowed.
-// PJW: are there other packages where we can deduce usage constraints?
-
-// if we find fuzz completions, then return true, as those are the only completions to offer
-func (c *completer) fuzz(typ types.Type, mset *types.MethodSet, imp *importInfo, cb func(candidate), fset *token.FileSet) bool {
- // 1. inside f.Fuzz? (only f.Failed and f.Name)
- // 2. possible completing f.Fuzz?
- // [Ident,SelectorExpr,Callexpr,ExprStmt,BlockiStmt,FuncDecl(Fuzz...)]
- // 3. before f.Fuzz, same (for 2., offer choice when looking at an F)
-
- // does the path contain FuncLit as arg to f.Fuzz CallExpr?
- inside := false
-Loop:
- for i, n := range c.path {
- switch v := n.(type) {
- case *ast.CallExpr:
- if len(v.Args) != 1 {
- continue Loop
- }
- if _, ok := v.Args[0].(*ast.FuncLit); !ok {
- continue
- }
- if s, ok := v.Fun.(*ast.SelectorExpr); !ok || s.Sel.Name != "Fuzz" {
- continue
- }
- if i > 2 { // avoid t.Fuzz itself in tests
- inside = true
- break Loop
- }
- }
- }
- if inside {
- for i := 0; i < mset.Len(); i++ {
- o := mset.At(i).Obj()
- if o.Name() == "Failed" || o.Name() == "Name" {
- cb(candidate{
- obj: o,
- score: stdScore,
- imp: imp,
- addressable: true,
- })
- }
- }
- return true
- }
- // if it could be t.Fuzz, look for the preceding t.Add
- id, ok := c.path[0].(*ast.Ident)
- if ok && strings.HasPrefix("Fuzz", id.Name) {
- var add *ast.CallExpr
- f := func(n ast.Node) bool {
- if n == nil {
- return true
- }
- call, ok := n.(*ast.CallExpr)
- if !ok {
- return true
- }
- s, ok := call.Fun.(*ast.SelectorExpr)
- if !ok {
- return true
- }
- if s.Sel.Name != "Add" {
- return true
- }
- // Sel.X should be of type *testing.F
- got := c.pkg.GetTypesInfo().Types[s.X]
- if got.Type.String() == "*testing.F" {
- add = call
- }
- return false // because we're done...
- }
- // look at the enclosing FuzzFoo functions
- if len(c.path) < 2 {
- return false
- }
- n := c.path[len(c.path)-2]
- if _, ok := n.(*ast.FuncDecl); !ok {
- // the path should start with ast.File, ast.FuncDecl, ...
- // but it didn't, so give up
- return false
- }
- ast.Inspect(n, f)
- if add == nil {
- // looks like f.Fuzz without a preceding f.Add.
- // let the regular completion handle it.
- return false
- }
-
- lbl := "Fuzz(func(t *testing.T"
- for i, a := range add.Args {
- info := c.pkg.GetTypesInfo().TypeOf(a)
- if info == nil {
- return false // How could this happen, but better safe than panic.
- }
- lbl += fmt.Sprintf(", %c %s", 'a'+i, info)
- }
- lbl += ")"
- xx := CompletionItem{
- Label: lbl,
- InsertText: lbl,
- Kind: protocol.FunctionCompletion,
- Depth: 0,
- Score: 10, // pretty confident the user should see this
- Documentation: "argument types from f.Add",
- obj: nil,
- }
- c.items = append(c.items, xx)
- for i := 0; i < mset.Len(); i++ {
- o := mset.At(i).Obj()
- if o.Name() != "Fuzz" {
- cb(candidate{
- obj: o,
- score: stdScore,
- imp: imp,
- addressable: true,
- })
- }
- }
- return true // done
- }
- // let the standard processing take care of it instead
- return false
-}
diff --git a/internal/lsp/source/completion/keywords.go b/internal/lsp/source/completion/keywords.go
deleted file mode 100644
index bbf59b022..000000000
--- a/internal/lsp/source/completion/keywords.go
+++ /dev/null
@@ -1,154 +0,0 @@
-// Copyright 2020 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package completion
-
-import (
- "go/ast"
-
- "golang.org/x/tools/internal/lsp/protocol"
- "golang.org/x/tools/internal/lsp/source"
-)
-
-const (
- BREAK = "break"
- CASE = "case"
- CHAN = "chan"
- CONST = "const"
- CONTINUE = "continue"
- DEFAULT = "default"
- DEFER = "defer"
- ELSE = "else"
- FALLTHROUGH = "fallthrough"
- FOR = "for"
- FUNC = "func"
- GO = "go"
- GOTO = "goto"
- IF = "if"
- IMPORT = "import"
- INTERFACE = "interface"
- MAP = "map"
- PACKAGE = "package"
- RANGE = "range"
- RETURN = "return"
- SELECT = "select"
- STRUCT = "struct"
- SWITCH = "switch"
- TYPE = "type"
- VAR = "var"
-)
-
-// addKeywordCompletions offers keyword candidates appropriate at the position.
-func (c *completer) addKeywordCompletions() {
- seen := make(map[string]bool)
-
- if c.wantTypeName() && c.inference.objType == nil {
- // If we want a type name but don't have an expected obj type,
- // include "interface", "struct", "func", "chan", and "map".
-
- // "interface" and "struct" are more common declaring named types.
- // Give them a higher score if we are in a type declaration.
- structIntf, funcChanMap := stdScore, highScore
- if len(c.path) > 1 {
- if _, namedDecl := c.path[1].(*ast.TypeSpec); namedDecl {
- structIntf, funcChanMap = highScore, stdScore
- }
- }
-
- c.addKeywordItems(seen, structIntf, STRUCT, INTERFACE)
- c.addKeywordItems(seen, funcChanMap, FUNC, CHAN, MAP)
- }
-
- // If we are at the file scope, only offer decl keywords. We don't
- // get *ast.Idents at the file scope because non-keyword identifiers
- // turn into *ast.BadDecl, not *ast.Ident.
- if len(c.path) == 1 || isASTFile(c.path[1]) {
- c.addKeywordItems(seen, stdScore, TYPE, CONST, VAR, FUNC, IMPORT)
- return
- } else if _, ok := c.path[0].(*ast.Ident); !ok {
- // Otherwise only offer keywords if the client is completing an identifier.
- return
- }
-
- if len(c.path) > 2 {
- // Offer "range" if we are in ast.ForStmt.Init. This is what the
- // AST looks like before "range" is typed, e.g. "for i := r<>".
- if loop, ok := c.path[2].(*ast.ForStmt); ok && source.NodeContains(loop.Init, c.pos) {
- c.addKeywordItems(seen, stdScore, RANGE)
- }
- }
-
- // Only suggest keywords if we are beginning a statement.
- switch n := c.path[1].(type) {
- case *ast.BlockStmt, *ast.ExprStmt:
- // OK - our ident must be at beginning of statement.
- case *ast.CommClause:
- // Make sure we aren't in the Comm statement.
- if !n.Colon.IsValid() || c.pos <= n.Colon {
- return
- }
- case *ast.CaseClause:
- // Make sure we aren't in the case List.
- if !n.Colon.IsValid() || c.pos <= n.Colon {
- return
- }
- default:
- return
- }
-
- // Filter out keywords depending on scope
- // Skip the first one because we want to look at the enclosing scopes
- path := c.path[1:]
- for i, n := range path {
- switch node := n.(type) {
- case *ast.CaseClause:
- // only recommend "fallthrough" and "break" within the bodies of a case clause
- if c.pos > node.Colon {
- c.addKeywordItems(seen, stdScore, BREAK)
- // "fallthrough" is only valid in switch statements.
- // A case clause is always nested within a block statement in a switch statement,
- // that block statement is nested within either a TypeSwitchStmt or a SwitchStmt.
- if i+2 >= len(path) {
- continue
- }
- if _, ok := path[i+2].(*ast.SwitchStmt); ok {
- c.addKeywordItems(seen, stdScore, FALLTHROUGH)
- }
- }
- case *ast.CommClause:
- if c.pos > node.Colon {
- c.addKeywordItems(seen, stdScore, BREAK)
- }
- case *ast.TypeSwitchStmt, *ast.SelectStmt, *ast.SwitchStmt:
- c.addKeywordItems(seen, stdScore, CASE, DEFAULT)
- case *ast.ForStmt, *ast.RangeStmt:
- c.addKeywordItems(seen, stdScore, BREAK, CONTINUE)
- // This is a bit weak, functions allow for many keywords
- case *ast.FuncDecl:
- if node.Body != nil && c.pos > node.Body.Lbrace {
- c.addKeywordItems(seen, stdScore, DEFER, RETURN, FOR, GO, SWITCH, SELECT, IF, ELSE, VAR, CONST, GOTO, TYPE)
- }
- }
- }
-}
-
-// addKeywordItems dedupes and adds completion items for the specified
-// keywords with the specified score.
-func (c *completer) addKeywordItems(seen map[string]bool, score float64, kws ...string) {
- for _, kw := range kws {
- if seen[kw] {
- continue
- }
- seen[kw] = true
-
- if matchScore := c.matcher.Score(kw); matchScore > 0 {
- c.items = append(c.items, CompletionItem{
- Label: kw,
- Kind: protocol.KeywordCompletion,
- InsertText: kw,
- Score: score * float64(matchScore),
- })
- }
- }
-}
diff --git a/internal/lsp/source/completion/labels.go b/internal/lsp/source/completion/labels.go
deleted file mode 100644
index e4fd961e3..000000000
--- a/internal/lsp/source/completion/labels.go
+++ /dev/null
@@ -1,112 +0,0 @@
-// Copyright 2019 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package completion
-
-import (
- "go/ast"
- "go/token"
- "math"
-)
-
-type labelType int
-
-const (
- labelNone labelType = iota
- labelBreak
- labelContinue
- labelGoto
-)
-
-// wantLabelCompletion returns true if we want (only) label
-// completions at the position.
-func (c *completer) wantLabelCompletion() labelType {
- if _, ok := c.path[0].(*ast.Ident); ok && len(c.path) > 1 {
- // We want a label if we are an *ast.Ident child of a statement
- // that accepts a label, e.g. "break Lo<>".
- return takesLabel(c.path[1])
- }
-
- return labelNone
-}
-
-// takesLabel returns the corresponding labelType if n is a statement
-// that accepts a label, otherwise labelNone.
-func takesLabel(n ast.Node) labelType {
- if bs, ok := n.(*ast.BranchStmt); ok {
- switch bs.Tok {
- case token.BREAK:
- return labelBreak
- case token.CONTINUE:
- return labelContinue
- case token.GOTO:
- return labelGoto
- }
- }
- return labelNone
-}
-
-// labels adds completion items for labels defined in the enclosing
-// function.
-func (c *completer) labels(lt labelType) {
- if c.enclosingFunc == nil {
- return
- }
-
- addLabel := func(score float64, l *ast.LabeledStmt) {
- labelObj := c.pkg.GetTypesInfo().ObjectOf(l.Label)
- if labelObj != nil {
- c.deepState.enqueue(candidate{obj: labelObj, score: score})
- }
- }
-
- switch lt {
- case labelBreak, labelContinue:
- // "break" and "continue" only accept labels from enclosing statements.
-
- for i, p := range c.path {
- switch p := p.(type) {
- case *ast.FuncLit:
- // Labels are function scoped, so don't continue out of functions.
- return
- case *ast.LabeledStmt:
- switch p.Stmt.(type) {
- case *ast.ForStmt, *ast.RangeStmt:
- // Loop labels can be used for "break" or "continue".
- addLabel(highScore*math.Pow(.99, float64(i)), p)
- case *ast.SwitchStmt, *ast.SelectStmt, *ast.TypeSwitchStmt:
- // Switch and select labels can be used only for "break".
- if lt == labelBreak {
- addLabel(highScore*math.Pow(.99, float64(i)), p)
- }
- }
- }
- }
- case labelGoto:
- // Goto accepts any label in the same function not in a nested
- // block. It also doesn't take labels that would jump across
- // variable definitions, but ignore that case for now.
- ast.Inspect(c.enclosingFunc.body, func(n ast.Node) bool {
- if n == nil {
- return false
- }
-
- switch n := n.(type) {
- // Only search into block-like nodes enclosing our "goto".
- // This prevents us from finding labels in nested blocks.
- case *ast.BlockStmt, *ast.CommClause, *ast.CaseClause:
- for _, p := range c.path {
- if n == p {
- return true
- }
- }
- return false
- case *ast.LabeledStmt:
- addLabel(highScore, n)
- }
-
- return true
- })
- }
-}
diff --git a/internal/lsp/source/completion/literal.go b/internal/lsp/source/completion/literal.go
deleted file mode 100644
index 5025f1f74..000000000
--- a/internal/lsp/source/completion/literal.go
+++ /dev/null
@@ -1,440 +0,0 @@
-// Copyright 2019 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package completion
-
-import (
- "context"
- "fmt"
- "go/types"
- "strings"
- "unicode"
-
- "golang.org/x/tools/internal/event"
- "golang.org/x/tools/internal/lsp/protocol"
- "golang.org/x/tools/internal/lsp/snippet"
- "golang.org/x/tools/internal/lsp/source"
-)
-
-// literal generates composite literal, function literal, and make()
-// completion items.
-func (c *completer) literal(ctx context.Context, literalType types.Type, imp *importInfo) {
- if !c.opts.literal {
- return
- }
-
- expType := c.inference.objType
-
- if c.inference.matchesVariadic(literalType) {
- // Don't offer literal slice candidates for variadic arguments.
- // For example, don't offer "[]interface{}{}" in "fmt.Print(<>)".
- return
- }
-
- // Avoid literal candidates if the expected type is an empty
- // interface. It isn't very useful to suggest a literal candidate of
- // every possible type.
- if expType != nil && isEmptyInterface(expType) {
- return
- }
-
- // We handle unnamed literal completions explicitly before searching
- // for candidates. Avoid named-type literal completions for
- // unnamed-type expected type since that results in duplicate
- // candidates. For example, in
- //
- // type mySlice []int
- // var []int = <>
- //
- // don't offer "mySlice{}" since we have already added a candidate
- // of "[]int{}".
- if _, named := literalType.(*types.Named); named && expType != nil {
- if _, named := source.Deref(expType).(*types.Named); !named {
- return
- }
- }
-
- // Check if an object of type literalType would match our expected type.
- cand := candidate{
- obj: c.fakeObj(literalType),
- }
-
- switch literalType.Underlying().(type) {
- // These literal types are addressable (e.g. "&[]int{}"), others are
- // not (e.g. can't do "&(func(){})").
- case *types.Struct, *types.Array, *types.Slice, *types.Map:
- cand.addressable = true
- }
-
- if !c.matchingCandidate(&cand) || cand.convertTo != nil {
- return
- }
-
- var (
- qf = c.qf
- sel = enclosingSelector(c.path, c.pos)
- )
-
- // Don't qualify the type name if we are in a selector expression
- // since the package name is already present.
- if sel != nil {
- qf = func(_ *types.Package) string { return "" }
- }
-
- typeName := types.TypeString(literalType, qf)
-
- // A type name of "[]int" doesn't work very will with the matcher
- // since "[" isn't a valid identifier prefix. Here we strip off the
- // slice (and array) prefix yielding just "int".
- matchName := typeName
- switch t := literalType.(type) {
- case *types.Slice:
- matchName = types.TypeString(t.Elem(), qf)
- case *types.Array:
- matchName = types.TypeString(t.Elem(), qf)
- }
-
- addlEdits, err := c.importEdits(imp)
- if err != nil {
- event.Error(ctx, "error adding import for literal candidate", err)
- return
- }
-
- // If prefix matches the type name, client may want a composite literal.
- if score := c.matcher.Score(matchName); score > 0 {
- if cand.hasMod(reference) {
- if sel != nil {
- // If we are in a selector we must place the "&" before the selector.
- // For example, "foo.B<>" must complete to "&foo.Bar{}", not
- // "foo.&Bar{}".
- edits, err := c.editText(sel.Pos(), sel.Pos(), "&")
- if err != nil {
- event.Error(ctx, "error making edit for literal pointer completion", err)
- return
- }
- addlEdits = append(addlEdits, edits...)
- } else {
- // Otherwise we can stick the "&" directly before the type name.
- typeName = "&" + typeName
- }
- }
-
- switch t := literalType.Underlying().(type) {
- case *types.Struct, *types.Array, *types.Slice, *types.Map:
- c.compositeLiteral(t, typeName, float64(score), addlEdits)
- case *types.Signature:
- // Add a literal completion for a signature type that implements
- // an interface. For example, offer "http.HandlerFunc()" when
- // expected type is "http.Handler".
- if source.IsInterface(expType) {
- c.basicLiteral(t, typeName, float64(score), addlEdits)
- }
- case *types.Basic:
- // Add a literal completion for basic types that implement our
- // expected interface (e.g. named string type http.Dir
- // implements http.FileSystem), or are identical to our expected
- // type (i.e. yielding a type conversion such as "float64()").
- if source.IsInterface(expType) || types.Identical(expType, literalType) {
- c.basicLiteral(t, typeName, float64(score), addlEdits)
- }
- }
- }
-
- // If prefix matches "make", client may want a "make()"
- // invocation. We also include the type name to allow for more
- // flexible fuzzy matching.
- if score := c.matcher.Score("make." + matchName); !cand.hasMod(reference) && score > 0 {
- switch literalType.Underlying().(type) {
- case *types.Slice:
- // The second argument to "make()" for slices is required, so default to "0".
- c.makeCall(typeName, "0", float64(score), addlEdits)
- case *types.Map, *types.Chan:
- // Maps and channels don't require the second argument, so omit
- // to keep things simple for now.
- c.makeCall(typeName, "", float64(score), addlEdits)
- }
- }
-
- // If prefix matches "func", client may want a function literal.
- if score := c.matcher.Score("func"); !cand.hasMod(reference) && score > 0 && !source.IsInterface(expType) {
- switch t := literalType.Underlying().(type) {
- case *types.Signature:
- c.functionLiteral(ctx, t, float64(score))
- }
- }
-}
-
-// literalCandidateScore is the base score for literal candidates.
-// Literal candidates match the expected type so they should be high
-// scoring, but we want them ranked below lexical objects of the
-// correct type, so scale down highScore.
-const literalCandidateScore = highScore / 2
-
-// functionLiteral adds a function literal completion item for the
-// given signature.
-func (c *completer) functionLiteral(ctx context.Context, sig *types.Signature, matchScore float64) {
- snip := &snippet.Builder{}
- snip.WriteText("func(")
-
- // First we generate names for each param and keep a seen count so
- // we know if we need to uniquify param names. For example,
- // "func(int)" will become "func(i int)", but "func(int, int64)"
- // will become "func(i1 int, i2 int64)".
- var (
- paramNames = make([]string, sig.Params().Len())
- paramNameCount = make(map[string]int)
- )
- for i := 0; i < sig.Params().Len(); i++ {
- var (
- p = sig.Params().At(i)
- name = p.Name()
- )
- if name == "" {
- // If the param has no name in the signature, guess a name based
- // on the type. Use an empty qualifier to ignore the package.
- // For example, we want to name "http.Request" "r", not "hr".
- name = source.FormatVarType(ctx, c.snapshot, c.pkg, p, func(p *types.Package) string {
- return ""
- })
- name = abbreviateTypeName(name)
- }
- paramNames[i] = name
- if name != "_" {
- paramNameCount[name]++
- }
- }
-
- for n, c := range paramNameCount {
- // Any names we saw more than once will need a unique suffix added
- // on. Reset the count to 1 to act as the suffix for the first
- // name.
- if c >= 2 {
- paramNameCount[n] = 1
- } else {
- delete(paramNameCount, n)
- }
- }
-
- for i := 0; i < sig.Params().Len(); i++ {
- if i > 0 {
- snip.WriteText(", ")
- }
-
- var (
- p = sig.Params().At(i)
- name = paramNames[i]
- )
-
- // Uniquify names by adding on an incrementing numeric suffix.
- if idx, found := paramNameCount[name]; found {
- paramNameCount[name]++
- name = fmt.Sprintf("%s%d", name, idx)
- }
-
- if name != p.Name() && c.opts.placeholders {
- // If we didn't use the signature's param name verbatim then we
- // may have chosen a poor name. Give the user a placeholder so
- // they can easily fix the name.
- snip.WritePlaceholder(func(b *snippet.Builder) {
- b.WriteText(name)
- })
- } else {
- snip.WriteText(name)
- }
-
- // If the following param's type is identical to this one, omit
- // this param's type string. For example, emit "i, j int" instead
- // of "i int, j int".
- if i == sig.Params().Len()-1 || !types.Identical(p.Type(), sig.Params().At(i+1).Type()) {
- snip.WriteText(" ")
- typeStr := source.FormatVarType(ctx, c.snapshot, c.pkg, p, c.qf)
- if sig.Variadic() && i == sig.Params().Len()-1 {
- typeStr = strings.Replace(typeStr, "[]", "...", 1)
- }
- snip.WriteText(typeStr)
- }
- }
- snip.WriteText(")")
-
- results := sig.Results()
- if results.Len() > 0 {
- snip.WriteText(" ")
- }
-
- resultsNeedParens := results.Len() > 1 ||
- results.Len() == 1 && results.At(0).Name() != ""
-
- if resultsNeedParens {
- snip.WriteText("(")
- }
- for i := 0; i < results.Len(); i++ {
- if i > 0 {
- snip.WriteText(", ")
- }
- r := results.At(i)
- if name := r.Name(); name != "" {
- snip.WriteText(name + " ")
- }
- snip.WriteText(source.FormatVarType(ctx, c.snapshot, c.pkg, r, c.qf))
- }
- if resultsNeedParens {
- snip.WriteText(")")
- }
-
- snip.WriteText(" {")
- snip.WriteFinalTabstop()
- snip.WriteText("}")
-
- c.items = append(c.items, CompletionItem{
- Label: "func(...) {}",
- Score: matchScore * literalCandidateScore,
- Kind: protocol.VariableCompletion,
- snippet: snip,
- })
-}
-
-// conventionalAcronyms contains conventional acronyms for type names
-// in lower case. For example, "ctx" for "context" and "err" for "error".
-var conventionalAcronyms = map[string]string{
- "context": "ctx",
- "error": "err",
- "tx": "tx",
- "responsewriter": "w",
-}
-
-// abbreviateTypeName abbreviates type names into acronyms. For
-// example, "fooBar" is abbreviated "fb". Care is taken to ignore
-// non-identifier runes. For example, "[]int" becomes "i", and
-// "struct { i int }" becomes "s".
-func abbreviateTypeName(s string) string {
- var (
- b strings.Builder
- useNextUpper bool
- )
-
- // Trim off leading non-letters. We trim everything between "[" and
- // "]" to handle array types like "[someConst]int".
- var inBracket bool
- s = strings.TrimFunc(s, func(r rune) bool {
- if inBracket {
- inBracket = r != ']'
- return true
- }
-
- if r == '[' {
- inBracket = true
- }
-
- return !unicode.IsLetter(r)
- })
-
- if acr, ok := conventionalAcronyms[strings.ToLower(s)]; ok {
- return acr
- }
-
- for i, r := range s {
- // Stop if we encounter a non-identifier rune.
- if !unicode.IsLetter(r) && !unicode.IsNumber(r) {
- break
- }
-
- if i == 0 {
- b.WriteRune(unicode.ToLower(r))
- }
-
- if unicode.IsUpper(r) {
- if useNextUpper {
- b.WriteRune(unicode.ToLower(r))
- useNextUpper = false
- }
- } else {
- useNextUpper = true
- }
- }
-
- return b.String()
-}
-
-// compositeLiteral adds a composite literal completion item for the given typeName.
-func (c *completer) compositeLiteral(T types.Type, typeName string, matchScore float64, edits []protocol.TextEdit) {
- snip := &snippet.Builder{}
- snip.WriteText(typeName + "{")
- // Don't put the tab stop inside the composite literal curlies "{}"
- // for structs that have no accessible fields.
- if strct, ok := T.(*types.Struct); !ok || fieldsAccessible(strct, c.pkg.GetTypes()) {
- snip.WriteFinalTabstop()
- }
- snip.WriteText("}")
-
- nonSnippet := typeName + "{}"
-
- c.items = append(c.items, CompletionItem{
- Label: nonSnippet,
- InsertText: nonSnippet,
- Score: matchScore * literalCandidateScore,
- Kind: protocol.VariableCompletion,
- AdditionalTextEdits: edits,
- snippet: snip,
- })
-}
-
-// basicLiteral adds a literal completion item for the given basic
-// type name typeName.
-func (c *completer) basicLiteral(T types.Type, typeName string, matchScore float64, edits []protocol.TextEdit) {
- // Never give type conversions like "untyped int()".
- if isUntyped(T) {
- return
- }
-
- snip := &snippet.Builder{}
- snip.WriteText(typeName + "(")
- snip.WriteFinalTabstop()
- snip.WriteText(")")
-
- nonSnippet := typeName + "()"
-
- c.items = append(c.items, CompletionItem{
- Label: nonSnippet,
- InsertText: nonSnippet,
- Detail: T.String(),
- Score: matchScore * literalCandidateScore,
- Kind: protocol.VariableCompletion,
- AdditionalTextEdits: edits,
- snippet: snip,
- })
-}
-
-// makeCall adds a completion item for a "make()" call given a specific type.
-func (c *completer) makeCall(typeName string, secondArg string, matchScore float64, edits []protocol.TextEdit) {
- // Keep it simple and don't add any placeholders for optional "make()" arguments.
-
- snip := &snippet.Builder{}
- snip.WriteText("make(" + typeName)
- if secondArg != "" {
- snip.WriteText(", ")
- snip.WritePlaceholder(func(b *snippet.Builder) {
- if c.opts.placeholders {
- b.WriteText(secondArg)
- }
- })
- }
- snip.WriteText(")")
-
- var nonSnippet strings.Builder
- nonSnippet.WriteString("make(" + typeName)
- if secondArg != "" {
- nonSnippet.WriteString(", ")
- nonSnippet.WriteString(secondArg)
- }
- nonSnippet.WriteByte(')')
-
- c.items = append(c.items, CompletionItem{
- Label: nonSnippet.String(),
- InsertText: nonSnippet.String(),
- Score: matchScore * literalCandidateScore,
- Kind: protocol.FunctionCompletion,
- AdditionalTextEdits: edits,
- snippet: snip,
- })
-}
diff --git a/internal/lsp/source/completion/package.go b/internal/lsp/source/completion/package.go
deleted file mode 100644
index c7e52d718..000000000
--- a/internal/lsp/source/completion/package.go
+++ /dev/null
@@ -1,364 +0,0 @@
-// Copyright 2020 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package completion
-
-import (
- "bytes"
- "context"
- "fmt"
- "go/ast"
- "go/parser"
- "go/scanner"
- "go/token"
- "go/types"
- "path/filepath"
- "strings"
- "unicode"
-
- "golang.org/x/tools/internal/lsp/debug"
- "golang.org/x/tools/internal/lsp/fuzzy"
- "golang.org/x/tools/internal/lsp/protocol"
- "golang.org/x/tools/internal/lsp/source"
- "golang.org/x/tools/internal/span"
- errors "golang.org/x/xerrors"
-)
-
-// packageClauseCompletions offers completions for a package declaration when
-// one is not present in the given file.
-func packageClauseCompletions(ctx context.Context, snapshot source.Snapshot, fh source.FileHandle, pos protocol.Position) ([]CompletionItem, *Selection, error) {
- // We know that the AST for this file will be empty due to the missing
- // package declaration, but parse it anyway to get a mapper.
- pgf, err := snapshot.ParseGo(ctx, fh, source.ParseFull)
- if err != nil {
- return nil, nil, err
- }
-
- cursorSpan, err := pgf.Mapper.PointSpan(pos)
- if err != nil {
- return nil, nil, err
- }
- rng, err := cursorSpan.Range(pgf.Mapper.Converter)
- if err != nil {
- return nil, nil, err
- }
-
- surrounding, err := packageCompletionSurrounding(ctx, snapshot.FileSet(), pgf, rng.Start)
- if err != nil {
- return nil, nil, errors.Errorf("invalid position for package completion: %w", err)
- }
-
- packageSuggestions, err := packageSuggestions(ctx, snapshot, fh.URI(), "")
- if err != nil {
- return nil, nil, err
- }
-
- var items []CompletionItem
- for _, pkg := range packageSuggestions {
- insertText := fmt.Sprintf("package %s", pkg.name)
- items = append(items, CompletionItem{
- Label: insertText,
- Kind: protocol.ModuleCompletion,
- InsertText: insertText,
- Score: pkg.score,
- })
- }
-
- return items, surrounding, nil
-}
-
-// packageCompletionSurrounding returns surrounding for package completion if a
-// package completions can be suggested at a given position. A valid location
-// for package completion is above any declarations or import statements.
-func packageCompletionSurrounding(ctx context.Context, fset *token.FileSet, pgf *source.ParsedGoFile, pos token.Pos) (*Selection, error) {
- // If the file lacks a package declaration, the parser will return an empty
- // AST. As a work-around, try to parse an expression from the file contents.
- filename := pgf.URI.Filename()
- expr, _ := parser.ParseExprFrom(fset, filename, pgf.Src, parser.Mode(0))
- if expr == nil {
- return nil, fmt.Errorf("unparseable file (%s)", pgf.URI)
- }
- tok := fset.File(expr.Pos())
- offset, err := source.Offset(pgf.Tok, pos)
- if err != nil {
- return nil, err
- }
- if offset > tok.Size() {
- debug.Bug(ctx, "out of bounds cursor", "cursor offset (%d) out of bounds for %s (size: %d)", offset, pgf.URI, tok.Size())
- return nil, fmt.Errorf("cursor out of bounds")
- }
- cursor := tok.Pos(offset)
- m := &protocol.ColumnMapper{
- URI: pgf.URI,
- Content: pgf.Src,
- Converter: span.NewContentConverter(filename, pgf.Src),
- }
-
- // If we were able to parse out an identifier as the first expression from
- // the file, it may be the beginning of a package declaration ("pack ").
- // We can offer package completions if the cursor is in the identifier.
- if name, ok := expr.(*ast.Ident); ok {
- if cursor >= name.Pos() && cursor <= name.End() {
- if !strings.HasPrefix(PACKAGE, name.Name) {
- return nil, fmt.Errorf("cursor in non-matching ident")
- }
- return &Selection{
- content: name.Name,
- cursor: cursor,
- MappedRange: source.NewMappedRange(fset, m, name.Pos(), name.End()),
- }, nil
- }
- }
-
- // The file is invalid, but it contains an expression that we were able to
- // parse. We will use this expression to construct the cursor's
- // "surrounding".
-
- // First, consider the possibility that we have a valid "package" keyword
- // with an empty package name ("package "). "package" is parsed as an
- // *ast.BadDecl since it is a keyword. This logic would allow "package" to
- // appear on any line of the file as long as it's the first code expression
- // in the file.
- lines := strings.Split(string(pgf.Src), "\n")
- cursorLine := tok.Line(cursor)
- if cursorLine <= 0 || cursorLine > len(lines) {
- return nil, fmt.Errorf("invalid line number")
- }
- if fset.Position(expr.Pos()).Line == cursorLine {
- words := strings.Fields(lines[cursorLine-1])
- if len(words) > 0 && words[0] == PACKAGE {
- content := PACKAGE
- // Account for spaces if there are any.
- if len(words) > 1 {
- content += " "
- }
-
- start := expr.Pos()
- end := token.Pos(int(expr.Pos()) + len(content) + 1)
- // We have verified that we have a valid 'package' keyword as our
- // first expression. Ensure that cursor is in this keyword or
- // otherwise fallback to the general case.
- if cursor >= start && cursor <= end {
- return &Selection{
- content: content,
- cursor: cursor,
- MappedRange: source.NewMappedRange(fset, m, start, end),
- }, nil
- }
- }
- }
-
- // If the cursor is after the start of the expression, no package
- // declaration will be valid.
- if cursor > expr.Pos() {
- return nil, fmt.Errorf("cursor after expression")
- }
-
- // If the cursor is in a comment, don't offer any completions.
- if cursorInComment(fset, cursor, pgf.Src) {
- return nil, fmt.Errorf("cursor in comment")
- }
-
- // The surrounding range in this case is the cursor except for empty file,
- // in which case it's end of file - 1
- start, end := cursor, cursor
- if tok.Size() == 0 {
- start, end = tok.Pos(0)-1, tok.Pos(0)-1
- }
-
- return &Selection{
- content: "",
- cursor: cursor,
- MappedRange: source.NewMappedRange(fset, m, start, end),
- }, nil
-}
-
-func cursorInComment(fset *token.FileSet, cursor token.Pos, src []byte) bool {
- var s scanner.Scanner
- s.Init(fset.File(cursor), src, func(_ token.Position, _ string) {}, scanner.ScanComments)
- for {
- pos, tok, lit := s.Scan()
- if pos <= cursor && cursor <= token.Pos(int(pos)+len(lit)) {
- return tok == token.COMMENT
- }
- if tok == token.EOF {
- break
- }
- }
- return false
-}
-
-// packageNameCompletions returns name completions for a package clause using
-// the current name as prefix.
-func (c *completer) packageNameCompletions(ctx context.Context, fileURI span.URI, name *ast.Ident) error {
- cursor := int(c.pos - name.NamePos)
- if cursor < 0 || cursor > len(name.Name) {
- return errors.New("cursor is not in package name identifier")
- }
-
- c.completionContext.packageCompletion = true
-
- prefix := name.Name[:cursor]
- packageSuggestions, err := packageSuggestions(ctx, c.snapshot, fileURI, prefix)
- if err != nil {
- return err
- }
-
- for _, pkg := range packageSuggestions {
- c.deepState.enqueue(pkg)
- }
- return nil
-}
-
-// packageSuggestions returns a list of packages from workspace packages that
-// have the given prefix and are used in the same directory as the given
-// file. This also includes test packages for these packages (<pkg>_test) and
-// the directory name itself.
-func packageSuggestions(ctx context.Context, snapshot source.Snapshot, fileURI span.URI, prefix string) (packages []candidate, err error) {
- workspacePackages, err := snapshot.ActivePackages(ctx)
- if err != nil {
- return nil, err
- }
-
- toCandidate := func(name string, score float64) candidate {
- obj := types.NewPkgName(0, nil, name, types.NewPackage("", name))
- return candidate{obj: obj, name: name, detail: name, score: score}
- }
-
- matcher := fuzzy.NewMatcher(prefix)
-
- // Always try to suggest a main package
- defer func() {
- if score := float64(matcher.Score("main")); score > 0 {
- packages = append(packages, toCandidate("main", score*lowScore))
- }
- }()
-
- dirPath := filepath.Dir(fileURI.Filename())
- dirName := filepath.Base(dirPath)
- if !isValidDirName(dirName) {
- return packages, nil
- }
- pkgName := convertDirNameToPkgName(dirName)
-
- seenPkgs := make(map[string]struct{})
-
- // The `go` command by default only allows one package per directory but we
- // support multiple package suggestions since gopls is build system agnostic.
- for _, pkg := range workspacePackages {
- if pkg.Name() == "main" || pkg.Name() == "" {
- continue
- }
- if _, ok := seenPkgs[pkg.Name()]; ok {
- continue
- }
-
- // Only add packages that are previously used in the current directory.
- var relevantPkg bool
- for _, pgf := range pkg.CompiledGoFiles() {
- if filepath.Dir(pgf.URI.Filename()) == dirPath {
- relevantPkg = true
- break
- }
- }
- if !relevantPkg {
- continue
- }
-
- // Add a found package used in current directory as a high relevance
- // suggestion and the test package for it as a medium relevance
- // suggestion.
- if score := float64(matcher.Score(pkg.Name())); score > 0 {
- packages = append(packages, toCandidate(pkg.Name(), score*highScore))
- }
- seenPkgs[pkg.Name()] = struct{}{}
-
- testPkgName := pkg.Name() + "_test"
- if _, ok := seenPkgs[testPkgName]; ok || strings.HasSuffix(pkg.Name(), "_test") {
- continue
- }
- if score := float64(matcher.Score(testPkgName)); score > 0 {
- packages = append(packages, toCandidate(testPkgName, score*stdScore))
- }
- seenPkgs[testPkgName] = struct{}{}
- }
-
- // Add current directory name as a low relevance suggestion.
- if _, ok := seenPkgs[pkgName]; !ok {
- if score := float64(matcher.Score(pkgName)); score > 0 {
- packages = append(packages, toCandidate(pkgName, score*lowScore))
- }
-
- testPkgName := pkgName + "_test"
- if score := float64(matcher.Score(testPkgName)); score > 0 {
- packages = append(packages, toCandidate(testPkgName, score*lowScore))
- }
- }
-
- return packages, nil
-}
-
-// isValidDirName checks whether the passed directory name can be used in
-// a package path. Requirements for a package path can be found here:
-// https://golang.org/ref/mod#go-mod-file-ident.
-func isValidDirName(dirName string) bool {
- if dirName == "" {
- return false
- }
-
- for i, ch := range dirName {
- if isLetter(ch) || isDigit(ch) {
- continue
- }
- if i == 0 {
- // Directory name can start only with '_'. '.' is not allowed in module paths.
- // '-' and '~' are not allowed because elements of package paths must be
- // safe command-line arguments.
- if ch == '_' {
- continue
- }
- } else {
- // Modules path elements can't end with '.'
- if isAllowedPunctuation(ch) && (i != len(dirName)-1 || ch != '.') {
- continue
- }
- }
-
- return false
- }
- return true
-}
-
-// convertDirNameToPkgName converts a valid directory name to a valid package name.
-// It leaves only letters and digits. All letters are mapped to lower case.
-func convertDirNameToPkgName(dirName string) string {
- var buf bytes.Buffer
- for _, ch := range dirName {
- switch {
- case isLetter(ch):
- buf.WriteRune(unicode.ToLower(ch))
-
- case buf.Len() != 0 && isDigit(ch):
- buf.WriteRune(ch)
- }
- }
- return buf.String()
-}
-
-// isLetter and isDigit allow only ASCII characters because
-// "Each path element is a non-empty string made of up ASCII letters,
-// ASCII digits, and limited ASCII punctuation"
-// (see https://golang.org/ref/mod#go-mod-file-ident).
-
-func isLetter(ch rune) bool {
- return 'a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z'
-}
-
-func isDigit(ch rune) bool {
- return '0' <= ch && ch <= '9'
-}
-
-func isAllowedPunctuation(ch rune) bool {
- return ch == '_' || ch == '-' || ch == '~' || ch == '.'
-}
diff --git a/internal/lsp/source/completion/package_test.go b/internal/lsp/source/completion/package_test.go
deleted file mode 100644
index 6436984fd..000000000
--- a/internal/lsp/source/completion/package_test.go
+++ /dev/null
@@ -1,77 +0,0 @@
-// Copyright 2021 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package completion
-
-import "testing"
-
-func TestIsValidDirName(t *testing.T) {
- tests := []struct {
- dirName string
- valid bool
- }{
- {dirName: "", valid: false},
- //
- {dirName: "a", valid: true},
- {dirName: "abcdef", valid: true},
- {dirName: "AbCdEf", valid: true},
- //
- {dirName: "1a35", valid: true},
- {dirName: "a16", valid: true},
- //
- {dirName: "_a", valid: true},
- {dirName: "a_", valid: true},
- //
- {dirName: "~a", valid: false},
- {dirName: "a~", valid: true},
- //
- {dirName: "-a", valid: false},
- {dirName: "a-", valid: true},
- //
- {dirName: ".a", valid: false},
- {dirName: "a.", valid: false},
- //
- {dirName: "a~_b--c.-e", valid: true},
- {dirName: "~a~_b--c.-e", valid: false},
- {dirName: "a~_b--c.-e--~", valid: true},
- {dirName: "a~_b--2134dc42.-e6--~", valid: true},
- {dirName: "abc`def", valid: false},
- {dirName: "тест", valid: false},
- {dirName: "你好", valid: false},
- }
- for _, tt := range tests {
- valid := isValidDirName(tt.dirName)
- if tt.valid != valid {
- t.Errorf("%s: expected %v, got %v", tt.dirName, tt.valid, valid)
- }
- }
-}
-
-func TestConvertDirNameToPkgName(t *testing.T) {
- tests := []struct {
- dirName string
- pkgName string
- }{
- {dirName: "a", pkgName: "a"},
- {dirName: "abcdef", pkgName: "abcdef"},
- {dirName: "AbCdEf", pkgName: "abcdef"},
- {dirName: "1a35", pkgName: "a35"},
- {dirName: "14a35", pkgName: "a35"},
- {dirName: "a16", pkgName: "a16"},
- {dirName: "_a", pkgName: "a"},
- {dirName: "a_", pkgName: "a"},
- {dirName: "a~", pkgName: "a"},
- {dirName: "a-", pkgName: "a"},
- {dirName: "a~_b--c.-e", pkgName: "abce"},
- {dirName: "a~_b--c.-e--~", pkgName: "abce"},
- {dirName: "a~_b--2134dc42.-e6--~", pkgName: "ab2134dc42e6"},
- }
- for _, tt := range tests {
- pkgName := convertDirNameToPkgName(tt.dirName)
- if tt.pkgName != pkgName {
- t.Errorf("%s: expected %v, got %v", tt.dirName, tt.pkgName, pkgName)
- continue
- }
- }
-}
diff --git a/internal/lsp/source/completion/postfix_snippets.go b/internal/lsp/source/completion/postfix_snippets.go
deleted file mode 100644
index 7ea962118..000000000
--- a/internal/lsp/source/completion/postfix_snippets.go
+++ /dev/null
@@ -1,461 +0,0 @@
-// Copyright 2020 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package completion
-
-import (
- "context"
- "fmt"
- "go/ast"
- "go/token"
- "go/types"
- "log"
- "reflect"
- "strings"
- "sync"
- "text/template"
-
- "golang.org/x/tools/internal/event"
- "golang.org/x/tools/internal/imports"
- "golang.org/x/tools/internal/lsp/protocol"
- "golang.org/x/tools/internal/lsp/snippet"
- "golang.org/x/tools/internal/lsp/source"
- errors "golang.org/x/xerrors"
-)
-
-// Postfix snippets are artificial methods that allow the user to
-// compose common operations in an "argument oriented" fashion. For
-// example, instead of "sort.Slice(someSlice, ...)" a user can expand
-// "someSlice.sort!".
-
-// postfixTmpl represents a postfix snippet completion candidate.
-type postfixTmpl struct {
- // label is the completion candidate's label presented to the user.
- label string
-
- // details is passed along to the client as the candidate's details.
- details string
-
- // body is the template text. See postfixTmplArgs for details on the
- // facilities available to the template.
- body string
-
- tmpl *template.Template
-}
-
-// postfixTmplArgs are the template execution arguments available to
-// the postfix snippet templates.
-type postfixTmplArgs struct {
- // StmtOK is true if it is valid to replace the selector with a
- // statement. For example:
- //
- // func foo() {
- // bar.sort! // statement okay
- //
- // someMethod(bar.sort!) // statement not okay
- // }
- StmtOK bool
-
- // X is the textual SelectorExpr.X. For example, when completing
- // "foo.bar.print!", "X" is "foo.bar".
- X string
-
- // Obj is the types.Object of SelectorExpr.X, if any.
- Obj types.Object
-
- // Type is the type of "foo.bar" in "foo.bar.print!".
- Type types.Type
-
- scope *types.Scope
- snip snippet.Builder
- importIfNeeded func(pkgPath string, scope *types.Scope) (name string, edits []protocol.TextEdit, err error)
- edits []protocol.TextEdit
- qf types.Qualifier
- varNames map[string]bool
-}
-
-var postfixTmpls = []postfixTmpl{{
- label: "sort",
- details: "sort.Slice()",
- body: `{{if and (eq .Kind "slice") .StmtOK -}}
-{{.Import "sort"}}.Slice({{.X}}, func({{.VarName nil "i"}}, {{.VarName nil "j"}} int) bool {
- {{.Cursor}}
-})
-{{- end}}`,
-}, {
- label: "last",
- details: "s[len(s)-1]",
- body: `{{if and (eq .Kind "slice") .Obj -}}
-{{.X}}[len({{.X}})-1]
-{{- end}}`,
-}, {
- label: "reverse",
- details: "reverse slice",
- body: `{{if and (eq .Kind "slice") .StmtOK -}}
-{{$i := .VarName nil "i"}}{{$j := .VarName nil "j" -}}
-for {{$i}}, {{$j}} := 0, len({{.X}})-1; {{$i}} < {{$j}}; {{$i}}, {{$j}} = {{$i}}+1, {{$j}}-1 {
- {{.X}}[{{$i}}], {{.X}}[{{$j}}] = {{.X}}[{{$j}}], {{.X}}[{{$i}}]
-}
-{{end}}`,
-}, {
- label: "range",
- details: "range over slice",
- body: `{{if and (eq .Kind "slice") .StmtOK -}}
-for {{.VarName nil "i"}}, {{.VarName .ElemType "v"}} := range {{.X}} {
- {{.Cursor}}
-}
-{{- end}}`,
-}, {
- label: "append",
- details: "append and re-assign slice",
- body: `{{if and (eq .Kind "slice") .StmtOK .Obj -}}
-{{.X}} = append({{.X}}, {{.Cursor}})
-{{- end}}`,
-}, {
- label: "append",
- details: "append to slice",
- body: `{{if and (eq .Kind "slice") (not .StmtOK) -}}
-append({{.X}}, {{.Cursor}})
-{{- end}}`,
-}, {
- label: "copy",
- details: "duplicate slice",
- body: `{{if and (eq .Kind "slice") .StmtOK .Obj -}}
-{{$v := (.VarName nil (printf "%sCopy" .X))}}{{$v}} := make([]{{.TypeName .ElemType}}, len({{.X}}))
-copy({{$v}}, {{.X}})
-{{end}}`,
-}, {
- label: "range",
- details: "range over map",
- body: `{{if and (eq .Kind "map") .StmtOK -}}
-for {{.VarName .KeyType "k"}}, {{.VarName .ElemType "v"}} := range {{.X}} {
- {{.Cursor}}
-}
-{{- end}}`,
-}, {
- label: "clear",
- details: "clear map contents",
- body: `{{if and (eq .Kind "map") .StmtOK -}}
-{{$k := (.VarName .KeyType "k")}}for {{$k}} := range {{.X}} {
- delete({{.X}}, {{$k}})
-}
-{{end}}`,
-}, {
- label: "keys",
- details: "create slice of keys",
- body: `{{if and (eq .Kind "map") .StmtOK -}}
-{{$keysVar := (.VarName nil "keys")}}{{$keysVar}} := make([]{{.TypeName .KeyType}}, 0, len({{.X}}))
-{{$k := (.VarName .KeyType "k")}}for {{$k}} := range {{.X}} {
- {{$keysVar}} = append({{$keysVar}}, {{$k}})
-}
-{{end}}`,
-}, {
- label: "var",
- details: "assign to variables",
- body: `{{if and (eq .Kind "tuple") .StmtOK -}}
-{{$a := .}}{{range $i, $v := .Tuple}}{{if $i}}, {{end}}{{$a.VarName $v.Type $v.Name}}{{end}} := {{.X}}
-{{- end}}`,
-}, {
- label: "var",
- details: "assign to variable",
- body: `{{if and (ne .Kind "tuple") .StmtOK -}}
-{{.VarName .Type ""}} := {{.X}}
-{{- end}}`,
-}, {
- label: "print",
- details: "print to stdout",
- body: `{{if and (ne .Kind "tuple") .StmtOK -}}
-{{.Import "fmt"}}.Printf("{{.EscapeQuotes .X}}: %v\n", {{.X}})
-{{- end}}`,
-}, {
- label: "print",
- details: "print to stdout",
- body: `{{if and (eq .Kind "tuple") .StmtOK -}}
-{{.Import "fmt"}}.Println({{.X}})
-{{- end}}`,
-}, {
- label: "split",
- details: "split string",
- body: `{{if (eq (.TypeName .Type) "string") -}}
-{{.Import "strings"}}.Split({{.X}}, "{{.Cursor}}")
-{{- end}}`,
-}, {
- label: "join",
- details: "join string slice",
- body: `{{if and (eq .Kind "slice") (eq (.TypeName .ElemType) "string") -}}
-{{.Import "strings"}}.Join({{.X}}, "{{.Cursor}}")
-{{- end}}`,
-}}
-
-// Cursor indicates where the client's cursor should end up after the
-// snippet is done.
-func (a *postfixTmplArgs) Cursor() string {
- a.snip.WriteFinalTabstop()
- return ""
-}
-
-// Import makes sure the package corresponding to path is imported,
-// returning the identifier to use to refer to the package.
-func (a *postfixTmplArgs) Import(path string) (string, error) {
- name, edits, err := a.importIfNeeded(path, a.scope)
- if err != nil {
- return "", errors.Errorf("couldn't import %q: %w", path, err)
- }
- a.edits = append(a.edits, edits...)
- return name, nil
-}
-
-func (a *postfixTmplArgs) EscapeQuotes(v string) string {
- return strings.ReplaceAll(v, `"`, `\\"`)
-}
-
-// ElemType returns the Elem() type of xType, if applicable.
-func (a *postfixTmplArgs) ElemType() types.Type {
- if e, _ := a.Type.(interface{ Elem() types.Type }); e != nil {
- return e.Elem()
- }
- return nil
-}
-
-// Kind returns the underlying kind of type, e.g. "slice", "struct",
-// etc.
-func (a *postfixTmplArgs) Kind() string {
- t := reflect.TypeOf(a.Type.Underlying())
- return strings.ToLower(strings.TrimPrefix(t.String(), "*types."))
-}
-
-// KeyType returns the type of X's key. KeyType panics if X is not a
-// map.
-func (a *postfixTmplArgs) KeyType() types.Type {
- return a.Type.Underlying().(*types.Map).Key()
-}
-
-// Tuple returns the tuple result vars if X is a call expression.
-func (a *postfixTmplArgs) Tuple() []*types.Var {
- tuple, _ := a.Type.(*types.Tuple)
- if tuple == nil {
- return nil
- }
-
- typs := make([]*types.Var, 0, tuple.Len())
- for i := 0; i < tuple.Len(); i++ {
- typs = append(typs, tuple.At(i))
- }
- return typs
-}
-
-// TypeName returns the textual representation of type t.
-func (a *postfixTmplArgs) TypeName(t types.Type) (string, error) {
- if t == nil || t == types.Typ[types.Invalid] {
- return "", fmt.Errorf("invalid type: %v", t)
- }
- return types.TypeString(t, a.qf), nil
-}
-
-// VarName returns a suitable variable name for the type t. If t
-// implements the error interface, "err" is used. If t is not a named
-// type then nonNamedDefault is used. Otherwise a name is made by
-// abbreviating the type name. If the resultant name is already in
-// scope, an integer is appended to make a unique name.
-func (a *postfixTmplArgs) VarName(t types.Type, nonNamedDefault string) string {
- if t == nil {
- t = types.Typ[types.Invalid]
- }
-
- var name string
- if types.Implements(t, errorIntf) {
- name = "err"
- } else if _, isNamed := source.Deref(t).(*types.Named); !isNamed {
- name = nonNamedDefault
- }
-
- if name == "" {
- name = types.TypeString(t, func(p *types.Package) string {
- return ""
- })
- name = abbreviateTypeName(name)
- }
-
- if dot := strings.LastIndex(name, "."); dot > -1 {
- name = name[dot+1:]
- }
-
- uniqueName := name
- for i := 2; ; i++ {
- if s, _ := a.scope.LookupParent(uniqueName, token.NoPos); s == nil && !a.varNames[uniqueName] {
- break
- }
- uniqueName = fmt.Sprintf("%s%d", name, i)
- }
-
- a.varNames[uniqueName] = true
-
- return uniqueName
-}
-
-func (c *completer) addPostfixSnippetCandidates(ctx context.Context, sel *ast.SelectorExpr) {
- if !c.opts.postfix {
- return
- }
-
- initPostfixRules()
-
- if sel == nil || sel.Sel == nil {
- return
- }
-
- selType := c.pkg.GetTypesInfo().TypeOf(sel.X)
- if selType == nil {
- return
- }
-
- // Skip empty tuples since there is no value to operate on.
- if tuple, ok := selType.Underlying().(*types.Tuple); ok && tuple == nil {
- return
- }
-
- tokFile := c.snapshot.FileSet().File(c.pos)
-
- // Only replace sel with a statement if sel is already a statement.
- var stmtOK bool
- for i, n := range c.path {
- if n == sel && i < len(c.path)-1 {
- switch p := c.path[i+1].(type) {
- case *ast.ExprStmt:
- stmtOK = true
- case *ast.AssignStmt:
- // In cases like:
- //
- // foo.<>
- // bar = 123
- //
- // detect that "foo." makes up the entire statement since the
- // apparent selector spans lines.
- stmtOK = tokFile.Line(c.pos) < tokFile.Line(p.TokPos)
- }
- break
- }
- }
-
- scope := c.pkg.GetTypes().Scope().Innermost(c.pos)
- if scope == nil {
- return
- }
-
- // afterDot is the position after selector dot, e.g. "|" in
- // "foo.|print".
- afterDot := sel.Sel.Pos()
-
- // We must detect dangling selectors such as:
- //
- // foo.<>
- // bar
- //
- // and adjust afterDot so that we don't mistakenly delete the
- // newline thinking "bar" is part of our selector.
- if startLine := tokFile.Line(sel.Pos()); startLine != tokFile.Line(afterDot) {
- if tokFile.Line(c.pos) != startLine {
- return
- }
- afterDot = c.pos
- }
-
- for _, rule := range postfixTmpls {
- // When completing foo.print<>, "print" is naturally overwritten,
- // but we need to also remove "foo." so the snippet has a clean
- // slate.
- edits, err := c.editText(sel.Pos(), afterDot, "")
- if err != nil {
- event.Error(ctx, "error calculating postfix edits", err)
- return
- }
-
- tmplArgs := postfixTmplArgs{
- X: source.FormatNode(c.snapshot.FileSet(), sel.X),
- StmtOK: stmtOK,
- Obj: exprObj(c.pkg.GetTypesInfo(), sel.X),
- Type: selType,
- qf: c.qf,
- importIfNeeded: c.importIfNeeded,
- scope: scope,
- varNames: make(map[string]bool),
- }
-
- // Feed the template straight into the snippet builder. This
- // allows templates to build snippets as they are executed.
- err = rule.tmpl.Execute(&tmplArgs.snip, &tmplArgs)
- if err != nil {
- event.Error(ctx, "error executing postfix template", err)
- continue
- }
-
- if strings.TrimSpace(tmplArgs.snip.String()) == "" {
- continue
- }
-
- score := c.matcher.Score(rule.label)
- if score <= 0 {
- continue
- }
-
- c.items = append(c.items, CompletionItem{
- Label: rule.label + "!",
- Detail: rule.details,
- Score: float64(score) * 0.01,
- Kind: protocol.SnippetCompletion,
- snippet: &tmplArgs.snip,
- AdditionalTextEdits: append(edits, tmplArgs.edits...),
- })
- }
-}
-
-var postfixRulesOnce sync.Once
-
-func initPostfixRules() {
- postfixRulesOnce.Do(func() {
- var idx int
- for _, rule := range postfixTmpls {
- var err error
- rule.tmpl, err = template.New("postfix_snippet").Parse(rule.body)
- if err != nil {
- log.Panicf("error parsing postfix snippet template: %v", err)
- }
- postfixTmpls[idx] = rule
- idx++
- }
- postfixTmpls = postfixTmpls[:idx]
- })
-}
-
-// importIfNeeded returns the package identifier and any necessary
-// edits to import package pkgPath.
-func (c *completer) importIfNeeded(pkgPath string, scope *types.Scope) (string, []protocol.TextEdit, error) {
- defaultName := imports.ImportPathToAssumedName(pkgPath)
-
- // Check if file already imports pkgPath.
- for _, s := range c.file.Imports {
- if source.ImportPath(s) == pkgPath {
- if s.Name == nil {
- return defaultName, nil, nil
- }
- if s.Name.Name != "_" {
- return s.Name.Name, nil, nil
- }
- }
- }
-
- // Give up if the package's name is already in use by another object.
- if _, obj := scope.LookupParent(defaultName, token.NoPos); obj != nil {
- return "", nil, fmt.Errorf("import name %q of %q already in use", defaultName, pkgPath)
- }
-
- edits, err := c.importEdits(&importInfo{
- importPath: pkgPath,
- })
- if err != nil {
- return "", nil, err
- }
-
- return defaultName, edits, nil
-}
diff --git a/internal/lsp/source/completion/printf.go b/internal/lsp/source/completion/printf.go
deleted file mode 100644
index ce74af53b..000000000
--- a/internal/lsp/source/completion/printf.go
+++ /dev/null
@@ -1,172 +0,0 @@
-// Copyright 2020 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package completion
-
-import (
- "go/ast"
- "go/constant"
- "go/types"
- "strconv"
- "strings"
- "unicode/utf8"
-)
-
-// printfArgKind returns the expected objKind when completing a
-// printf-like operand. call is the printf-like function call, and
-// argIdx is the index of call.Args being completed.
-func printfArgKind(info *types.Info, call *ast.CallExpr, argIdx int) objKind {
- // Printf-like function name must end in "f".
- fn := exprObj(info, call.Fun)
- if fn == nil || !strings.HasSuffix(fn.Name(), "f") {
- return kindAny
- }
-
- sig, _ := fn.Type().(*types.Signature)
- if sig == nil {
- return kindAny
- }
-
- // Must be variadic and take at least two params.
- numParams := sig.Params().Len()
- if !sig.Variadic() || numParams < 2 || argIdx < numParams-1 {
- return kindAny
- }
-
- // Param preceding variadic args must be a (format) string.
- if !types.Identical(sig.Params().At(numParams-2).Type(), types.Typ[types.String]) {
- return kindAny
- }
-
- // Format string must be a constant.
- strArg := info.Types[call.Args[numParams-2]].Value
- if strArg == nil || strArg.Kind() != constant.String {
- return kindAny
- }
-
- return formatOperandKind(constant.StringVal(strArg), argIdx-(numParams-1)+1)
-}
-
-// formatOperandKind returns the objKind corresponding to format's
-// operandIdx'th operand.
-func formatOperandKind(format string, operandIdx int) objKind {
- var (
- prevOperandIdx int
- kind = kindAny
- )
- for {
- i := strings.Index(format, "%")
- if i == -1 {
- break
- }
-
- var operands []formatOperand
- format, operands = parsePrintfVerb(format[i+1:], prevOperandIdx)
-
- // Check if any this verb's operands correspond to our target
- // operandIdx.
- for _, v := range operands {
- if v.idx == operandIdx {
- if kind == kindAny {
- kind = v.kind
- } else if v.kind != kindAny {
- // If multiple verbs refer to the same operand, take the
- // intersection of their kinds.
- kind &= v.kind
- }
- }
-
- prevOperandIdx = v.idx
- }
- }
- return kind
-}
-
-type formatOperand struct {
- // idx is the one-based printf operand index.
- idx int
- // kind is a mask of expected kinds of objects for this operand.
- kind objKind
-}
-
-// parsePrintfVerb parses the leading printf verb in f. The opening
-// "%" must already be trimmed from f. prevIdx is the previous
-// operand's index, or zero if this is the first verb. The format
-// string is returned with the leading verb removed. Multiple operands
-// can be returned in the case of dynamic widths such as "%*.*f".
-func parsePrintfVerb(f string, prevIdx int) (string, []formatOperand) {
- var verbs []formatOperand
-
- addVerb := func(k objKind) {
- verbs = append(verbs, formatOperand{
- idx: prevIdx + 1,
- kind: k,
- })
- prevIdx++
- }
-
- for len(f) > 0 {
- // Trim first rune off of f so we are guaranteed to make progress.
- r, l := utf8.DecodeRuneInString(f)
- f = f[l:]
-
- // We care about three things:
- // 1. The verb, which maps directly to object kind.
- // 2. Explicit operand indices like "%[2]s".
- // 3. Dynamic widths using "*".
- switch r {
- case '%':
- return f, nil
- case '*':
- addVerb(kindInt)
- continue
- case '[':
- // Parse operand index as in "%[2]s".
- i := strings.Index(f, "]")
- if i == -1 {
- return f, nil
- }
-
- idx, err := strconv.Atoi(f[:i])
- f = f[i+1:]
- if err != nil {
- return f, nil
- }
-
- prevIdx = idx - 1
- continue
- case 'v', 'T':
- addVerb(kindAny)
- case 't':
- addVerb(kindBool)
- case 'c', 'd', 'o', 'O', 'U':
- addVerb(kindInt)
- case 'e', 'E', 'f', 'F', 'g', 'G':
- addVerb(kindFloat | kindComplex)
- case 'b':
- addVerb(kindInt | kindFloat | kindComplex | kindBytes)
- case 'q', 's':
- addVerb(kindString | kindBytes | kindStringer | kindError)
- case 'x', 'X':
- // Omit kindStringer and kindError though technically allowed.
- addVerb(kindString | kindBytes | kindInt | kindFloat | kindComplex)
- case 'p':
- addVerb(kindPtr | kindSlice)
- case 'w':
- addVerb(kindError)
- case '+', '-', '#', ' ', '.', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
- // Flag or numeric width/precicision value.
- continue
- default:
- // Assume unrecognized rune is a custom fmt.Formatter verb.
- addVerb(kindAny)
- }
-
- if len(verbs) > 0 {
- break
- }
- }
-
- return f, verbs
-}
diff --git a/internal/lsp/source/completion/printf_test.go b/internal/lsp/source/completion/printf_test.go
deleted file mode 100644
index 19d295b8d..000000000
--- a/internal/lsp/source/completion/printf_test.go
+++ /dev/null
@@ -1,72 +0,0 @@
-// Copyright 2020 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package completion
-
-import (
- "fmt"
- "testing"
-)
-
-func TestFormatOperandKind(t *testing.T) {
- cases := []struct {
- f string
- idx int
- kind objKind
- }{
- {"", 1, kindAny},
- {"%", 1, kindAny},
- {"%%%", 1, kindAny},
- {"%[1", 1, kindAny},
- {"%[?%s", 2, kindAny},
- {"%[abc]v", 1, kindAny},
-
- {"%v", 1, kindAny},
- {"%T", 1, kindAny},
- {"%t", 1, kindBool},
- {"%d", 1, kindInt},
- {"%c", 1, kindInt},
- {"%o", 1, kindInt},
- {"%O", 1, kindInt},
- {"%U", 1, kindInt},
- {"%e", 1, kindFloat | kindComplex},
- {"%E", 1, kindFloat | kindComplex},
- {"%f", 1, kindFloat | kindComplex},
- {"%F", 1, kindFloat | kindComplex},
- {"%g", 1, kindFloat | kindComplex},
- {"%G", 1, kindFloat | kindComplex},
- {"%b", 1, kindInt | kindFloat | kindComplex | kindBytes},
- {"%q", 1, kindString | kindBytes | kindStringer | kindError},
- {"%s", 1, kindString | kindBytes | kindStringer | kindError},
- {"%x", 1, kindString | kindBytes | kindInt | kindFloat | kindComplex},
- {"%X", 1, kindString | kindBytes | kindInt | kindFloat | kindComplex},
- {"%p", 1, kindPtr | kindSlice},
- {"%w", 1, kindError},
-
- {"%1.2f", 1, kindFloat | kindComplex},
- {"%*f", 1, kindInt},
- {"%*f", 2, kindFloat | kindComplex},
- {"%*.*f", 1, kindInt},
- {"%*.*f", 2, kindInt},
- {"%*.*f", 3, kindFloat | kindComplex},
- {"%[3]*.[2]*[1]f", 1, kindFloat | kindComplex},
- {"%[3]*.[2]*[1]f", 2, kindInt},
- {"%[3]*.[2]*[1]f", 3, kindInt},
-
- {"foo %% %d", 1, kindInt},
- {"%#-12.34f", 1, kindFloat | kindComplex},
- {"% d", 1, kindInt},
-
- {"%s %[1]X %d", 1, kindString | kindBytes},
- {"%s %[1]X %d", 2, kindInt},
- }
-
- for _, c := range cases {
- t.Run(fmt.Sprintf("%q#%d", c.f, c.idx), func(t *testing.T) {
- if got := formatOperandKind(c.f, c.idx); got != c.kind {
- t.Errorf("expected %d (%[1]b), got %d (%[2]b)", c.kind, got)
- }
- })
- }
-}
diff --git a/internal/lsp/source/completion/snippet.go b/internal/lsp/source/completion/snippet.go
deleted file mode 100644
index 72c351f94..000000000
--- a/internal/lsp/source/completion/snippet.go
+++ /dev/null
@@ -1,115 +0,0 @@
-// Copyright 2019 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package completion
-
-import (
- "go/ast"
-
- "golang.org/x/tools/internal/lsp/snippet"
-)
-
-// structFieldSnippets calculates the snippet for struct literal field names.
-func (c *completer) structFieldSnippet(cand candidate, detail string, snip *snippet.Builder) {
- if !c.wantStructFieldCompletions() {
- return
- }
-
- // If we are in a deep completion then we can't be completing a field
- // name (e.g. "Foo{f<>}" completing to "Foo{f.Bar}" should not generate
- // a snippet).
- if len(cand.path) > 0 {
- return
- }
-
- clInfo := c.enclosingCompositeLiteral
-
- // If we are already in a key-value expression, we don't want a snippet.
- if clInfo.kv != nil {
- return
- }
-
- // A plain snippet turns "Foo{Ba<>" into "Foo{Bar: <>".
- snip.WriteText(": ")
- snip.WritePlaceholder(func(b *snippet.Builder) {
- // A placeholder snippet turns "Foo{Ba<>" into "Foo{Bar: <*int*>".
- if c.opts.placeholders {
- b.WriteText(detail)
- }
- })
-
- fset := c.snapshot.FileSet()
-
- // If the cursor position is on a different line from the literal's opening brace,
- // we are in a multiline literal.
- if fset.Position(c.pos).Line != fset.Position(clInfo.cl.Lbrace).Line {
- snip.WriteText(",")
- }
-}
-
-// functionCallSnippets calculates the snippet for function calls.
-func (c *completer) functionCallSnippet(name string, tparams, params []string, snip *snippet.Builder) {
- // If there is no suffix then we need to reuse existing call parens
- // "()" if present. If there is an identifier suffix then we always
- // need to include "()" since we don't overwrite the suffix.
- if c.surrounding != nil && c.surrounding.Suffix() == "" && len(c.path) > 1 {
- // If we are the left side (i.e. "Fun") part of a call expression,
- // we don't want a snippet since there are already parens present.
- switch n := c.path[1].(type) {
- case *ast.CallExpr:
- // The Lparen != Rparen check detects fudged CallExprs we
- // inserted when fixing the AST. In this case, we do still need
- // to insert the calling "()" parens.
- if n.Fun == c.path[0] && n.Lparen != n.Rparen {
- return
- }
- case *ast.SelectorExpr:
- if len(c.path) > 2 {
- if call, ok := c.path[2].(*ast.CallExpr); ok && call.Fun == c.path[1] && call.Lparen != call.Rparen {
- return
- }
- }
- }
- }
-
- snip.WriteText(name)
-
- if len(tparams) > 0 {
- snip.WriteText("[")
- if c.opts.placeholders {
- for i, tp := range tparams {
- if i > 0 {
- snip.WriteText(", ")
- }
- snip.WritePlaceholder(func(b *snippet.Builder) {
- b.WriteText(tp)
- })
- }
- } else {
- snip.WritePlaceholder(nil)
- }
- snip.WriteText("]")
- }
-
- snip.WriteText("(")
-
- if c.opts.placeholders {
- // A placeholder snippet turns "someFun<>" into "someFunc(<*i int*>, *s string*)".
- for i, p := range params {
- if i > 0 {
- snip.WriteText(", ")
- }
- snip.WritePlaceholder(func(b *snippet.Builder) {
- b.WriteText(p)
- })
- }
- } else {
- // A plain snippet turns "someFun<>" into "someFunc(<>)".
- if len(params) > 0 {
- snip.WritePlaceholder(nil)
- }
- }
-
- snip.WriteText(")")
-}
diff --git a/internal/lsp/source/completion/statements.go b/internal/lsp/source/completion/statements.go
deleted file mode 100644
index 3280bb52c..000000000
--- a/internal/lsp/source/completion/statements.go
+++ /dev/null
@@ -1,360 +0,0 @@
-// Copyright 2020 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package completion
-
-import (
- "fmt"
- "go/ast"
- "go/token"
- "go/types"
-
- "golang.org/x/tools/internal/lsp/protocol"
- "golang.org/x/tools/internal/lsp/snippet"
- "golang.org/x/tools/internal/lsp/source"
-)
-
-// addStatementCandidates adds full statement completion candidates
-// appropriate for the current context.
-func (c *completer) addStatementCandidates() {
- c.addErrCheck()
- c.addAssignAppend()
-}
-
-// addAssignAppend offers a completion candidate of the form:
-//
-// someSlice = append(someSlice, )
-//
-// It will offer the "append" completion in two situations:
-//
-// 1. Position is in RHS of assign, prefix matches "append", and
-// corresponding LHS object is a slice. For example,
-// "foo = ap<>" completes to "foo = append(foo, )".
-//
-// Or
-//
-// 2. Prefix is an ident or selector in an *ast.ExprStmt (i.e.
-// beginning of statement), and our best matching candidate is a
-// slice. For example: "foo.ba" completes to "foo.bar = append(foo.bar, )".
-func (c *completer) addAssignAppend() {
- if len(c.path) < 3 {
- return
- }
-
- ident, _ := c.path[0].(*ast.Ident)
- if ident == nil {
- return
- }
-
- var (
- // sliceText is the full name of our slice object, e.g. "s.abc" in
- // "s.abc = app<>".
- sliceText string
- // needsLHS is true if we need to prepend the LHS slice name and
- // "=" to our candidate.
- needsLHS = false
- fset = c.snapshot.FileSet()
- )
-
- switch n := c.path[1].(type) {
- case *ast.AssignStmt:
- // We are already in an assignment. Make sure our prefix matches "append".
- if c.matcher.Score("append") <= 0 {
- return
- }
-
- exprIdx := exprAtPos(c.pos, n.Rhs)
- if exprIdx == len(n.Rhs) || exprIdx > len(n.Lhs)-1 {
- return
- }
-
- lhsType := c.pkg.GetTypesInfo().TypeOf(n.Lhs[exprIdx])
- if lhsType == nil {
- return
- }
-
- // Make sure our corresponding LHS object is a slice.
- if _, isSlice := lhsType.Underlying().(*types.Slice); !isSlice {
- return
- }
-
- // The name or our slice is whatever's in the LHS expression.
- sliceText = source.FormatNode(fset, n.Lhs[exprIdx])
- case *ast.SelectorExpr:
- // Make sure we are a selector at the beginning of a statement.
- if _, parentIsExprtStmt := c.path[2].(*ast.ExprStmt); !parentIsExprtStmt {
- return
- }
-
- // So far we only know the first part of our slice name. For
- // example in "s.a<>" we only know our slice begins with "s."
- // since the user could still be typing.
- sliceText = source.FormatNode(fset, n.X) + "."
- needsLHS = true
- case *ast.ExprStmt:
- needsLHS = true
- default:
- return
- }
-
- var (
- label string
- snip snippet.Builder
- score = highScore
- )
-
- if needsLHS {
- // Offer the long form assign + append candidate if our best
- // candidate is a slice.
- bestItem := c.topCandidate()
- if bestItem == nil || bestItem.obj == nil || bestItem.obj.Type() == nil {
- return
- }
-
- if _, isSlice := bestItem.obj.Type().Underlying().(*types.Slice); !isSlice {
- return
- }
-
- // Don't rank the full form assign + append candidate above the
- // slice itself.
- score = bestItem.Score - 0.01
-
- // Fill in rest of sliceText now that we have the object name.
- sliceText += bestItem.Label
-
- // Fill in the candidate's LHS bits.
- label = fmt.Sprintf("%s = ", bestItem.Label)
- snip.WriteText(label)
- }
-
- snip.WriteText(fmt.Sprintf("append(%s, ", sliceText))
- snip.WritePlaceholder(nil)
- snip.WriteText(")")
-
- c.items = append(c.items, CompletionItem{
- Label: label + fmt.Sprintf("append(%s, )", sliceText),
- Kind: protocol.FunctionCompletion,
- Score: score,
- snippet: &snip,
- })
-}
-
-// topCandidate returns the strictly highest scoring candidate
-// collected so far. If the top two candidates have the same score,
-// nil is returned.
-func (c *completer) topCandidate() *CompletionItem {
- var bestItem, secondBestItem *CompletionItem
- for i := range c.items {
- if bestItem == nil || c.items[i].Score > bestItem.Score {
- bestItem = &c.items[i]
- } else if secondBestItem == nil || c.items[i].Score > secondBestItem.Score {
- secondBestItem = &c.items[i]
- }
- }
-
- // If secondBestItem has the same score, bestItem isn't
- // the strict best.
- if secondBestItem != nil && secondBestItem.Score == bestItem.Score {
- return nil
- }
-
- return bestItem
-}
-
-// addErrCheck offers a completion candidate of the form:
-//
-// if err != nil {
-// return nil, err
-// }
-//
-// In the case of test functions, it offers a completion candidate of the form:
-//
-// if err != nil {
-// t.Fatal(err)
-// }
-//
-// The position must be in a function that returns an error, and the
-// statement preceding the position must be an assignment where the
-// final LHS object is an error. addErrCheck will synthesize
-// zero values as necessary to make the return statement valid.
-func (c *completer) addErrCheck() {
- if len(c.path) < 2 || c.enclosingFunc == nil || !c.opts.placeholders {
- return
- }
-
- var (
- errorType = types.Universe.Lookup("error").Type()
- result = c.enclosingFunc.sig.Results()
- testVar = getTestVar(c.enclosingFunc, c.pkg)
- isTest = testVar != ""
- doesNotReturnErr = result.Len() == 0 || !types.Identical(result.At(result.Len()-1).Type(), errorType)
- )
- // Make sure our enclosing function is a Test func or returns an error.
- if !isTest && doesNotReturnErr {
- return
- }
-
- prevLine := prevStmt(c.pos, c.path)
- if prevLine == nil {
- return
- }
-
- // Make sure our preceding statement was as assignment.
- assign, _ := prevLine.(*ast.AssignStmt)
- if assign == nil || len(assign.Lhs) == 0 {
- return
- }
-
- lastAssignee := assign.Lhs[len(assign.Lhs)-1]
-
- // Make sure the final assignee is an error.
- if !types.Identical(c.pkg.GetTypesInfo().TypeOf(lastAssignee), errorType) {
- return
- }
-
- var (
- // errVar is e.g. "err" in "foo, err := bar()".
- errVar = source.FormatNode(c.snapshot.FileSet(), lastAssignee)
-
- // Whether we need to include the "if" keyword in our candidate.
- needsIf = true
- )
-
- // If the returned error from the previous statement is "_", it is not a real object.
- // If we don't have an error, and the function signature takes a testing.TB that is either ignored
- // or an "_", then we also can't call t.Fatal(err).
- if errVar == "_" {
- return
- }
-
- // Below we try to detect if the user has already started typing "if
- // err" so we can replace what they've typed with our complete
- // statement.
- switch n := c.path[0].(type) {
- case *ast.Ident:
- switch c.path[1].(type) {
- case *ast.ExprStmt:
- // This handles:
- //
- // f, err := os.Open("foo")
- // i<>
-
- // Make sure they are typing "if".
- if c.matcher.Score("if") <= 0 {
- return
- }
- case *ast.IfStmt:
- // This handles:
- //
- // f, err := os.Open("foo")
- // if er<>
-
- // Make sure they are typing the error's name.
- if c.matcher.Score(errVar) <= 0 {
- return
- }
-
- needsIf = false
- default:
- return
- }
- case *ast.IfStmt:
- // This handles:
- //
- // f, err := os.Open("foo")
- // if <>
-
- // Avoid false positives by ensuring the if's cond is a bad
- // expression. For example, don't offer the completion in cases
- // like "if <> somethingElse".
- if _, bad := n.Cond.(*ast.BadExpr); !bad {
- return
- }
-
- // If "if" is our direct prefix, we need to include it in our
- // candidate since the existing "if" will be overwritten.
- needsIf = c.pos == n.Pos()+token.Pos(len("if"))
- }
-
- // Build up a snippet that looks like:
- //
- // if err != nil {
- // return <zero value>, ..., ${1:err}
- // }
- //
- // We make the error a placeholder so it is easy to alter the error.
- var snip snippet.Builder
- if needsIf {
- snip.WriteText("if ")
- }
- snip.WriteText(fmt.Sprintf("%s != nil {\n\t", errVar))
-
- var label string
- if isTest {
- snip.WriteText(fmt.Sprintf("%s.Fatal(%s)", testVar, errVar))
- label = fmt.Sprintf("%[1]s != nil { %[2]s.Fatal(%[1]s) }", errVar, testVar)
- } else {
- snip.WriteText("return ")
- for i := 0; i < result.Len()-1; i++ {
- snip.WriteText(formatZeroValue(result.At(i).Type(), c.qf))
- snip.WriteText(", ")
- }
- snip.WritePlaceholder(func(b *snippet.Builder) {
- b.WriteText(errVar)
- })
- label = fmt.Sprintf("%[1]s != nil { return %[1]s }", errVar)
- }
-
- snip.WriteText("\n}")
-
- if needsIf {
- label = "if " + label
- }
-
- c.items = append(c.items, CompletionItem{
- Label: label,
- // There doesn't seem to be a more appropriate kind.
- Kind: protocol.KeywordCompletion,
- Score: highScore,
- snippet: &snip,
- })
-}
-
-// getTestVar checks the function signature's input parameters and returns
-// the name of the first parameter that implements "testing.TB". For example,
-// func someFunc(t *testing.T) returns the string "t", func someFunc(b *testing.B)
-// returns "b" etc. An empty string indicates that the function signature
-// does not take a testing.TB parameter or does so but is ignored such
-// as func someFunc(*testing.T).
-func getTestVar(enclosingFunc *funcInfo, pkg source.Package) string {
- if enclosingFunc == nil || enclosingFunc.sig == nil {
- return ""
- }
-
- sig := enclosingFunc.sig
- for i := 0; i < sig.Params().Len(); i++ {
- param := sig.Params().At(i)
- if param.Name() == "_" {
- continue
- }
- testingPkg, err := pkg.GetImport("testing")
- if err != nil {
- continue
- }
- tbObj := testingPkg.GetTypes().Scope().Lookup("TB")
- if tbObj == nil {
- continue
- }
- iface, ok := tbObj.Type().Underlying().(*types.Interface)
- if !ok {
- continue
- }
- if !types.Implements(param.Type(), iface) {
- continue
- }
- return param.Name()
- }
-
- return ""
-}
diff --git a/internal/lsp/source/completion/util.go b/internal/lsp/source/completion/util.go
deleted file mode 100644
index 505c7e256..000000000
--- a/internal/lsp/source/completion/util.go
+++ /dev/null
@@ -1,326 +0,0 @@
-// Copyright 2020 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package completion
-
-import (
- "go/ast"
- "go/token"
- "go/types"
-
- "golang.org/x/tools/internal/lsp/diff"
- "golang.org/x/tools/internal/lsp/protocol"
- "golang.org/x/tools/internal/lsp/source"
-)
-
-// exprAtPos returns the index of the expression containing pos.
-func exprAtPos(pos token.Pos, args []ast.Expr) int {
- for i, expr := range args {
- if expr.Pos() <= pos && pos <= expr.End() {
- return i
- }
- }
- return len(args)
-}
-
-// eachField invokes fn for each field that can be selected from a
-// value of type T.
-func eachField(T types.Type, fn func(*types.Var)) {
- // TODO(adonovan): this algorithm doesn't exclude ambiguous
- // selections that match more than one field/method.
- // types.NewSelectionSet should do that for us.
-
- // for termination on recursive types
- var seen map[*types.Struct]bool
-
- var visit func(T types.Type)
- visit = func(T types.Type) {
- if T, ok := source.Deref(T).Underlying().(*types.Struct); ok {
- if seen[T] {
- return
- }
-
- for i := 0; i < T.NumFields(); i++ {
- f := T.Field(i)
- fn(f)
- if f.Anonymous() {
- if seen == nil {
- // Lazily create "seen" since it is only needed for
- // embedded structs.
- seen = make(map[*types.Struct]bool)
- }
- seen[T] = true
- visit(f.Type())
- }
- }
- }
- }
- visit(T)
-}
-
-// typeIsValid reports whether typ doesn't contain any Invalid types.
-func typeIsValid(typ types.Type) bool {
- // Check named types separately, because we don't want
- // to call Underlying() on them to avoid problems with recursive types.
- if _, ok := typ.(*types.Named); ok {
- return true
- }
-
- switch typ := typ.Underlying().(type) {
- case *types.Basic:
- return typ.Kind() != types.Invalid
- case *types.Array:
- return typeIsValid(typ.Elem())
- case *types.Slice:
- return typeIsValid(typ.Elem())
- case *types.Pointer:
- return typeIsValid(typ.Elem())
- case *types.Map:
- return typeIsValid(typ.Key()) && typeIsValid(typ.Elem())
- case *types.Chan:
- return typeIsValid(typ.Elem())
- case *types.Signature:
- return typeIsValid(typ.Params()) && typeIsValid(typ.Results())
- case *types.Tuple:
- for i := 0; i < typ.Len(); i++ {
- if !typeIsValid(typ.At(i).Type()) {
- return false
- }
- }
- return true
- case *types.Struct, *types.Interface:
- // Don't bother checking structs, interfaces for validity.
- return true
- default:
- return false
- }
-}
-
-// resolveInvalid traverses the node of the AST that defines the scope
-// containing the declaration of obj, and attempts to find a user-friendly
-// name for its invalid type. The resulting Object and its Type are fake.
-func resolveInvalid(fset *token.FileSet, obj types.Object, node ast.Node, info *types.Info) types.Object {
- var resultExpr ast.Expr
- ast.Inspect(node, func(node ast.Node) bool {
- switch n := node.(type) {
- case *ast.ValueSpec:
- for _, name := range n.Names {
- if info.Defs[name] == obj {
- resultExpr = n.Type
- }
- }
- return false
- case *ast.Field: // This case handles parameters and results of a FuncDecl or FuncLit.
- for _, name := range n.Names {
- if info.Defs[name] == obj {
- resultExpr = n.Type
- }
- }
- return false
- default:
- return true
- }
- })
- // Construct a fake type for the object and return a fake object with this type.
- typename := source.FormatNode(fset, resultExpr)
- typ := types.NewNamed(types.NewTypeName(token.NoPos, obj.Pkg(), typename, nil), types.Typ[types.Invalid], nil)
- return types.NewVar(obj.Pos(), obj.Pkg(), obj.Name(), typ)
-}
-
-func isPointer(T types.Type) bool {
- _, ok := T.(*types.Pointer)
- return ok
-}
-
-func isVar(obj types.Object) bool {
- _, ok := obj.(*types.Var)
- return ok
-}
-
-func isTypeName(obj types.Object) bool {
- _, ok := obj.(*types.TypeName)
- return ok
-}
-
-func isFunc(obj types.Object) bool {
- _, ok := obj.(*types.Func)
- return ok
-}
-
-func isEmptyInterface(T types.Type) bool {
- intf, _ := T.(*types.Interface)
- return intf != nil && intf.NumMethods() == 0
-}
-
-func isUntyped(T types.Type) bool {
- if basic, ok := T.(*types.Basic); ok {
- return basic.Info()&types.IsUntyped > 0
- }
- return false
-}
-
-func isPkgName(obj types.Object) bool {
- _, ok := obj.(*types.PkgName)
- return ok
-}
-
-func isASTFile(n ast.Node) bool {
- _, ok := n.(*ast.File)
- return ok
-}
-
-func deslice(T types.Type) types.Type {
- if slice, ok := T.Underlying().(*types.Slice); ok {
- return slice.Elem()
- }
- return nil
-}
-
-// isSelector returns the enclosing *ast.SelectorExpr when pos is in the
-// selector.
-func enclosingSelector(path []ast.Node, pos token.Pos) *ast.SelectorExpr {
- if len(path) == 0 {
- return nil
- }
-
- if sel, ok := path[0].(*ast.SelectorExpr); ok {
- return sel
- }
-
- if _, ok := path[0].(*ast.Ident); ok && len(path) > 1 {
- if sel, ok := path[1].(*ast.SelectorExpr); ok && pos >= sel.Sel.Pos() {
- return sel
- }
- }
-
- return nil
-}
-
-// enclosingDeclLHS returns LHS idents from containing value spec or
-// assign statement.
-func enclosingDeclLHS(path []ast.Node) []*ast.Ident {
- for _, n := range path {
- switch n := n.(type) {
- case *ast.ValueSpec:
- return n.Names
- case *ast.AssignStmt:
- ids := make([]*ast.Ident, 0, len(n.Lhs))
- for _, e := range n.Lhs {
- if id, ok := e.(*ast.Ident); ok {
- ids = append(ids, id)
- }
- }
- return ids
- }
- }
-
- return nil
-}
-
-// exprObj returns the types.Object associated with the *ast.Ident or
-// *ast.SelectorExpr e.
-func exprObj(info *types.Info, e ast.Expr) types.Object {
- var ident *ast.Ident
- switch expr := e.(type) {
- case *ast.Ident:
- ident = expr
- case *ast.SelectorExpr:
- ident = expr.Sel
- default:
- return nil
- }
-
- return info.ObjectOf(ident)
-}
-
-// typeConversion returns the type being converted to if call is a type
-// conversion expression.
-func typeConversion(call *ast.CallExpr, info *types.Info) types.Type {
- // Type conversion (e.g. "float64(foo)").
- if fun, _ := exprObj(info, call.Fun).(*types.TypeName); fun != nil {
- return fun.Type()
- }
-
- return nil
-}
-
-// fieldsAccessible returns whether s has at least one field accessible by p.
-func fieldsAccessible(s *types.Struct, p *types.Package) bool {
- for i := 0; i < s.NumFields(); i++ {
- f := s.Field(i)
- if f.Exported() || f.Pkg() == p {
- return true
- }
- }
- return false
-}
-
-// prevStmt returns the statement that precedes the statement containing pos.
-// For example:
-//
-// foo := 1
-// bar(1 + 2<>)
-//
-// If "<>" is pos, prevStmt returns "foo := 1"
-func prevStmt(pos token.Pos, path []ast.Node) ast.Stmt {
- var blockLines []ast.Stmt
- for i := 0; i < len(path) && blockLines == nil; i++ {
- switch n := path[i].(type) {
- case *ast.BlockStmt:
- blockLines = n.List
- case *ast.CommClause:
- blockLines = n.Body
- case *ast.CaseClause:
- blockLines = n.Body
- }
- }
-
- for i := len(blockLines) - 1; i >= 0; i-- {
- if blockLines[i].End() < pos {
- return blockLines[i]
- }
- }
-
- return nil
-}
-
-// formatZeroValue produces Go code representing the zero value of T. It
-// returns the empty string if T is invalid.
-func formatZeroValue(T types.Type, qf types.Qualifier) string {
- switch u := T.Underlying().(type) {
- case *types.Basic:
- switch {
- case u.Info()&types.IsNumeric > 0:
- return "0"
- case u.Info()&types.IsString > 0:
- return `""`
- case u.Info()&types.IsBoolean > 0:
- return "false"
- default:
- return ""
- }
- case *types.Pointer, *types.Interface, *types.Chan, *types.Map, *types.Slice, *types.Signature:
- return "nil"
- default:
- return types.TypeString(T, qf) + "{}"
- }
-}
-
-// isBasicKind returns whether t is a basic type of kind k.
-func isBasicKind(t types.Type, k types.BasicInfo) bool {
- b, _ := t.Underlying().(*types.Basic)
- return b != nil && b.Info()&k > 0
-}
-
-func (c *completer) editText(from, to token.Pos, newText string) ([]protocol.TextEdit, error) {
- rng := source.NewMappedRange(c.snapshot.FileSet(), c.mapper, from, to)
- spn, err := rng.Span()
- if err != nil {
- return nil, err
- }
- return source.ToProtocolEdits(c.mapper, []diff.TextEdit{{
- Span: spn,
- NewText: newText,
- }})
-}
diff --git a/internal/lsp/source/completion/util_test.go b/internal/lsp/source/completion/util_test.go
deleted file mode 100644
index c94d279fb..000000000
--- a/internal/lsp/source/completion/util_test.go
+++ /dev/null
@@ -1,28 +0,0 @@
-// Copyright 2020 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package completion
-
-import (
- "go/types"
- "testing"
-)
-
-func TestFormatZeroValue(t *testing.T) {
- tests := []struct {
- typ types.Type
- want string
- }{
- {types.Typ[types.String], `""`},
- {types.Typ[types.Byte], "0"},
- {types.Typ[types.Invalid], ""},
- {types.Universe.Lookup("error").Type(), "nil"},
- }
-
- for _, test := range tests {
- if got := formatZeroValue(test.typ, nil); got != test.want {
- t.Errorf("formatZeroValue(%v) = %q, want %q", test.typ, got, test.want)
- }
- }
-}
diff --git a/internal/lsp/source/diagnostics.go b/internal/lsp/source/diagnostics.go
deleted file mode 100644
index e393c2f94..000000000
--- a/internal/lsp/source/diagnostics.go
+++ /dev/null
@@ -1,84 +0,0 @@
-// Copyright 2018 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package source
-
-import (
- "context"
-
- "golang.org/x/tools/internal/lsp/protocol"
- "golang.org/x/tools/internal/span"
-)
-
-type SuggestedFix struct {
- Title string
- Edits map[span.URI][]protocol.TextEdit
- Command *protocol.Command
- ActionKind protocol.CodeActionKind
-}
-
-type RelatedInformation struct {
- URI span.URI
- Range protocol.Range
- Message string
-}
-
-func Analyze(ctx context.Context, snapshot Snapshot, pkg Package, includeConvenience bool) (map[span.URI][]*Diagnostic, error) {
- // Exit early if the context has been canceled. This also protects us
- // from a race on Options, see golang/go#36699.
- if ctx.Err() != nil {
- return nil, ctx.Err()
- }
-
- categories := []map[string]*Analyzer{}
- if includeConvenience {
- categories = append(categories, snapshot.View().Options().ConvenienceAnalyzers)
- }
- // If we had type errors, don't run any other analyzers.
- if !pkg.HasTypeErrors() {
- categories = append(categories, snapshot.View().Options().DefaultAnalyzers, snapshot.View().Options().StaticcheckAnalyzers)
- }
- var analyzers []*Analyzer
- for _, cat := range categories {
- for _, a := range cat {
- analyzers = append(analyzers, a)
- }
- }
-
- analysisDiagnostics, err := snapshot.Analyze(ctx, pkg.ID(), analyzers)
- if err != nil {
- return nil, err
- }
-
- reports := map[span.URI][]*Diagnostic{}
- // Report diagnostics and errors from root analyzers.
- for _, diag := range analysisDiagnostics {
- reports[diag.URI] = append(reports[diag.URI], diag)
- }
- return reports, nil
-}
-
-func FileDiagnostics(ctx context.Context, snapshot Snapshot, uri span.URI) (VersionedFileIdentity, []*Diagnostic, error) {
- fh, err := snapshot.GetVersionedFile(ctx, uri)
- if err != nil {
- return VersionedFileIdentity{}, nil, err
- }
- pkg, _, err := GetParsedFile(ctx, snapshot, fh, NarrowestPackage)
- if err != nil {
- return VersionedFileIdentity{}, nil, err
- }
- diagnostics, err := snapshot.DiagnosePackage(ctx, pkg)
- if err != nil {
- return VersionedFileIdentity{}, nil, err
- }
- fileDiags := diagnostics[fh.URI()]
- if !pkg.HasListOrParseErrors() {
- analysisDiags, err := Analyze(ctx, snapshot, pkg, false)
- if err != nil {
- return VersionedFileIdentity{}, nil, err
- }
- fileDiags = append(fileDiags, analysisDiags[fh.URI()]...)
- }
- return fh.VersionedFileIdentity(), fileDiags, nil
-}
diff --git a/internal/lsp/source/extract.go b/internal/lsp/source/extract.go
deleted file mode 100644
index 43b414add..000000000
--- a/internal/lsp/source/extract.go
+++ /dev/null
@@ -1,1307 +0,0 @@
-// Copyright 2020 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package source
-
-import (
- "bytes"
- "fmt"
- "go/ast"
- "go/format"
- "go/parser"
- "go/token"
- "go/types"
- "strings"
- "unicode"
-
- "golang.org/x/tools/go/analysis"
- "golang.org/x/tools/go/ast/astutil"
- "golang.org/x/tools/internal/analysisinternal"
- "golang.org/x/tools/internal/span"
-)
-
-func extractVariable(fset *token.FileSet, rng span.Range, src []byte, file *ast.File, _ *types.Package, info *types.Info) (*analysis.SuggestedFix, error) {
- expr, path, ok, err := CanExtractVariable(rng, file)
- if !ok {
- return nil, fmt.Errorf("extractVariable: cannot extract %s: %v", fset.Position(rng.Start), err)
- }
-
- // Create new AST node for extracted code.
- var lhsNames []string
- switch expr := expr.(type) {
- // TODO: stricter rules for selectorExpr.
- case *ast.BasicLit, *ast.CompositeLit, *ast.IndexExpr, *ast.SliceExpr,
- *ast.UnaryExpr, *ast.BinaryExpr, *ast.SelectorExpr:
- lhsName, _ := generateAvailableIdentifier(expr.Pos(), file, path, info, "x", 0)
- lhsNames = append(lhsNames, lhsName)
- case *ast.CallExpr:
- tup, ok := info.TypeOf(expr).(*types.Tuple)
- if !ok {
- // If the call expression only has one return value, we can treat it the
- // same as our standard extract variable case.
- lhsName, _ := generateAvailableIdentifier(expr.Pos(), file, path, info, "x", 0)
- lhsNames = append(lhsNames, lhsName)
- break
- }
- idx := 0
- for i := 0; i < tup.Len(); i++ {
- // Generate a unique variable for each return value.
- var lhsName string
- lhsName, idx = generateAvailableIdentifier(expr.Pos(), file, path, info, "x", idx)
- lhsNames = append(lhsNames, lhsName)
- }
- default:
- return nil, fmt.Errorf("cannot extract %T", expr)
- }
-
- insertBeforeStmt := analysisinternal.StmtToInsertVarBefore(path)
- if insertBeforeStmt == nil {
- return nil, fmt.Errorf("cannot find location to insert extraction")
- }
- tok := fset.File(expr.Pos())
- if tok == nil {
- return nil, fmt.Errorf("no file for pos %v", fset.Position(file.Pos()))
- }
- indent, err := calculateIndentation(src, tok, insertBeforeStmt)
- if err != nil {
- return nil, err
- }
- newLineIndent := "\n" + indent
-
- lhs := strings.Join(lhsNames, ", ")
- assignStmt := &ast.AssignStmt{
- Lhs: []ast.Expr{ast.NewIdent(lhs)},
- Tok: token.DEFINE,
- Rhs: []ast.Expr{expr},
- }
- var buf bytes.Buffer
- if err := format.Node(&buf, fset, assignStmt); err != nil {
- return nil, err
- }
- assignment := strings.ReplaceAll(buf.String(), "\n", newLineIndent) + newLineIndent
-
- return &analysis.SuggestedFix{
- TextEdits: []analysis.TextEdit{
- {
- Pos: insertBeforeStmt.Pos(),
- End: insertBeforeStmt.Pos(),
- NewText: []byte(assignment),
- },
- {
- Pos: rng.Start,
- End: rng.End,
- NewText: []byte(lhs),
- },
- },
- }, nil
-}
-
-// CanExtractVariable reports whether the code in the given range can be
-// extracted to a variable.
-func CanExtractVariable(rng span.Range, file *ast.File) (ast.Expr, []ast.Node, bool, error) {
- if rng.Start == rng.End {
- return nil, nil, false, fmt.Errorf("start and end are equal")
- }
- path, _ := astutil.PathEnclosingInterval(file, rng.Start, rng.End)
- if len(path) == 0 {
- return nil, nil, false, fmt.Errorf("no path enclosing interval")
- }
- for _, n := range path {
- if _, ok := n.(*ast.ImportSpec); ok {
- return nil, nil, false, fmt.Errorf("cannot extract variable in an import block")
- }
- }
- node := path[0]
- if rng.Start != node.Pos() || rng.End != node.End() {
- return nil, nil, false, fmt.Errorf("range does not map to an AST node")
- }
- expr, ok := node.(ast.Expr)
- if !ok {
- return nil, nil, false, fmt.Errorf("node is not an expression")
- }
- switch expr.(type) {
- case *ast.BasicLit, *ast.CompositeLit, *ast.IndexExpr, *ast.CallExpr,
- *ast.SliceExpr, *ast.UnaryExpr, *ast.BinaryExpr, *ast.SelectorExpr:
- return expr, path, true, nil
- }
- return nil, nil, false, fmt.Errorf("cannot extract an %T to a variable", expr)
-}
-
-// Calculate indentation for insertion.
-// When inserting lines of code, we must ensure that the lines have consistent
-// formatting (i.e. the proper indentation). To do so, we observe the indentation on the
-// line of code on which the insertion occurs.
-func calculateIndentation(content []byte, tok *token.File, insertBeforeStmt ast.Node) (string, error) {
- line := tok.Line(insertBeforeStmt.Pos())
- lineOffset, err := Offset(tok, tok.LineStart(line))
- if err != nil {
- return "", err
- }
- stmtOffset, err := Offset(tok, insertBeforeStmt.Pos())
- if err != nil {
- return "", err
- }
- return string(content[lineOffset:stmtOffset]), nil
-}
-
-// generateAvailableIdentifier adjusts the new function name until there are no collisons in scope.
-// Possible collisions include other function and variable names. Returns the next index to check for prefix.
-func generateAvailableIdentifier(pos token.Pos, file *ast.File, path []ast.Node, info *types.Info, prefix string, idx int) (string, int) {
- scopes := CollectScopes(info, path, pos)
- return generateIdentifier(idx, prefix, func(name string) bool {
- return file.Scope.Lookup(name) != nil || !isValidName(name, scopes)
- })
-}
-
-func generateIdentifier(idx int, prefix string, hasCollision func(string) bool) (string, int) {
- name := prefix
- if idx != 0 {
- name += fmt.Sprintf("%d", idx)
- }
- for hasCollision(name) {
- idx++
- name = fmt.Sprintf("%v%d", prefix, idx)
- }
- return name, idx + 1
-}
-
-// isValidName checks for variable collision in scope.
-func isValidName(name string, scopes []*types.Scope) bool {
- for _, scope := range scopes {
- if scope == nil {
- continue
- }
- if scope.Lookup(name) != nil {
- return false
- }
- }
- return true
-}
-
-// returnVariable keeps track of the information we need to properly introduce a new variable
-// that we will return in the extracted function.
-type returnVariable struct {
- // name is the identifier that is used on the left-hand side of the call to
- // the extracted function.
- name ast.Expr
- // decl is the declaration of the variable. It is used in the type signature of the
- // extracted function and for variable declarations.
- decl *ast.Field
- // zeroVal is the "zero value" of the type of the variable. It is used in a return
- // statement in the extracted function.
- zeroVal ast.Expr
-}
-
-// extractMethod refactors the selected block of code into a new method.
-func extractMethod(fset *token.FileSet, rng span.Range, src []byte, file *ast.File, pkg *types.Package, info *types.Info) (*analysis.SuggestedFix, error) {
- return extractFunctionMethod(fset, rng, src, file, pkg, info, true)
-}
-
-// extractFunction refactors the selected block of code into a new function.
-func extractFunction(fset *token.FileSet, rng span.Range, src []byte, file *ast.File, pkg *types.Package, info *types.Info) (*analysis.SuggestedFix, error) {
- return extractFunctionMethod(fset, rng, src, file, pkg, info, false)
-}
-
-// extractFunctionMethod refactors the selected block of code into a new function/method.
-// It also replaces the selected block of code with a call to the extracted
-// function. First, we manually adjust the selection range. We remove trailing
-// and leading whitespace characters to ensure the range is precisely bounded
-// by AST nodes. Next, we determine the variables that will be the parameters
-// and return values of the extracted function/method. Lastly, we construct the call
-// of the function/method and insert this call as well as the extracted function/method into
-// their proper locations.
-func extractFunctionMethod(fset *token.FileSet, rng span.Range, src []byte, file *ast.File, pkg *types.Package, info *types.Info, isMethod bool) (*analysis.SuggestedFix, error) {
- errorPrefix := "extractFunction"
- if isMethod {
- errorPrefix = "extractMethod"
- }
- p, ok, methodOk, err := CanExtractFunction(fset, rng, src, file)
- if (!ok && !isMethod) || (!methodOk && isMethod) {
- return nil, fmt.Errorf("%s: cannot extract %s: %v", errorPrefix,
- fset.Position(rng.Start), err)
- }
- tok, path, rng, outer, start := p.tok, p.path, p.rng, p.outer, p.start
- fileScope := info.Scopes[file]
- if fileScope == nil {
- return nil, fmt.Errorf("%s: file scope is empty", errorPrefix)
- }
- pkgScope := fileScope.Parent()
- if pkgScope == nil {
- return nil, fmt.Errorf("%s: package scope is empty", errorPrefix)
- }
-
- // A return statement is non-nested if its parent node is equal to the parent node
- // of the first node in the selection. These cases must be handled separately because
- // non-nested return statements are guaranteed to execute.
- var retStmts []*ast.ReturnStmt
- var hasNonNestedReturn bool
- startParent := findParent(outer, start)
- ast.Inspect(outer, func(n ast.Node) bool {
- if n == nil {
- return false
- }
- if n.Pos() < rng.Start || n.End() > rng.End {
- return n.Pos() <= rng.End
- }
- ret, ok := n.(*ast.ReturnStmt)
- if !ok {
- return true
- }
- if findParent(outer, n) == startParent {
- hasNonNestedReturn = true
- }
- retStmts = append(retStmts, ret)
- return false
- })
- containsReturnStatement := len(retStmts) > 0
-
- // Now that we have determined the correct range for the selection block,
- // we must determine the signature of the extracted function. We will then replace
- // the block with an assignment statement that calls the extracted function with
- // the appropriate parameters and return values.
- variables, err := collectFreeVars(info, file, fileScope, pkgScope, rng, path[0])
- if err != nil {
- return nil, err
- }
-
- var (
- receiverUsed bool
- receiver *ast.Field
- receiverName string
- receiverObj types.Object
- )
- if isMethod {
- if outer == nil || outer.Recv == nil || len(outer.Recv.List) == 0 {
- return nil, fmt.Errorf("%s: cannot extract need method receiver", errorPrefix)
- }
- receiver = outer.Recv.List[0]
- if len(receiver.Names) == 0 || receiver.Names[0] == nil {
- return nil, fmt.Errorf("%s: cannot extract need method receiver name", errorPrefix)
- }
- recvName := receiver.Names[0]
- receiverName = recvName.Name
- receiverObj = info.ObjectOf(recvName)
- }
-
- var (
- params, returns []ast.Expr // used when calling the extracted function
- paramTypes, returnTypes []*ast.Field // used in the signature of the extracted function
- uninitialized []types.Object // vars we will need to initialize before the call
- )
-
- // Avoid duplicates while traversing vars and uninitialzed.
- seenVars := make(map[types.Object]ast.Expr)
- seenUninitialized := make(map[types.Object]struct{})
-
- // Some variables on the left-hand side of our assignment statement may be free. If our
- // selection begins in the same scope in which the free variable is defined, we can
- // redefine it in our assignment statement. See the following example, where 'b' and
- // 'err' (both free variables) can be redefined in the second funcCall() while maintaining
- // correctness.
- //
- //
- // Not Redefined:
- //
- // a, err := funcCall()
- // var b int
- // b, err = funcCall()
- //
- // Redefined:
- //
- // a, err := funcCall()
- // b, err := funcCall()
- //
- // We track the number of free variables that can be redefined to maintain our preference
- // of using "x, y, z := fn()" style assignment statements.
- var canRedefineCount int
-
- // Each identifier in the selected block must become (1) a parameter to the
- // extracted function, (2) a return value of the extracted function, or (3) a local
- // variable in the extracted function. Determine the outcome(s) for each variable
- // based on whether it is free, altered within the selected block, and used outside
- // of the selected block.
- for _, v := range variables {
- if _, ok := seenVars[v.obj]; ok {
- continue
- }
- if v.obj.Name() == "_" {
- // The blank identifier is always a local variable
- continue
- }
- typ := analysisinternal.TypeExpr(fset, file, pkg, v.obj.Type())
- if typ == nil {
- return nil, fmt.Errorf("nil AST expression for type: %v", v.obj.Name())
- }
- seenVars[v.obj] = typ
- identifier := ast.NewIdent(v.obj.Name())
- // An identifier must meet three conditions to become a return value of the
- // extracted function. (1) its value must be defined or reassigned within
- // the selection (isAssigned), (2) it must be used at least once after the
- // selection (isUsed), and (3) its first use after the selection
- // cannot be its own reassignment or redefinition (objOverriden).
- if v.obj.Parent() == nil {
- return nil, fmt.Errorf("parent nil")
- }
- isUsed, firstUseAfter := objUsed(info, span.NewRange(fset, rng.End, v.obj.Parent().End()), v.obj)
- if v.assigned && isUsed && !varOverridden(info, firstUseAfter, v.obj, v.free, outer) {
- returnTypes = append(returnTypes, &ast.Field{Type: typ})
- returns = append(returns, identifier)
- if !v.free {
- uninitialized = append(uninitialized, v.obj)
- } else if v.obj.Parent().Pos() == startParent.Pos() {
- canRedefineCount++
- }
- }
- // An identifier must meet two conditions to become a parameter of the
- // extracted function. (1) it must be free (isFree), and (2) its first
- // use within the selection cannot be its own definition (isDefined).
- if v.free && !v.defined {
- // Skip the selector for a method.
- if isMethod && v.obj == receiverObj {
- receiverUsed = true
- continue
- }
- params = append(params, identifier)
- paramTypes = append(paramTypes, &ast.Field{
- Names: []*ast.Ident{identifier},
- Type: typ,
- })
- }
- }
-
- // Find the function literal that encloses the selection. The enclosing function literal
- // may not be the enclosing function declaration (i.e. 'outer'). For example, in the
- // following block:
- //
- // func main() {
- // ast.Inspect(node, func(n ast.Node) bool {
- // v := 1 // this line extracted
- // return true
- // })
- // }
- //
- // 'outer' is main(). However, the extracted selection most directly belongs to
- // the anonymous function literal, the second argument of ast.Inspect(). We use the
- // enclosing function literal to determine the proper return types for return statements
- // within the selection. We still need the enclosing function declaration because this is
- // the top-level declaration. We inspect the top-level declaration to look for variables
- // as well as for code replacement.
- enclosing := outer.Type
- for _, p := range path {
- if p == enclosing {
- break
- }
- if fl, ok := p.(*ast.FuncLit); ok {
- enclosing = fl.Type
- break
- }
- }
-
- // We put the selection in a constructed file. We can then traverse and edit
- // the extracted selection without modifying the original AST.
- startOffset, err := Offset(tok, rng.Start)
- if err != nil {
- return nil, err
- }
- endOffset, err := Offset(tok, rng.End)
- if err != nil {
- return nil, err
- }
- selection := src[startOffset:endOffset]
- extractedBlock, err := parseBlockStmt(fset, selection)
- if err != nil {
- return nil, err
- }
-
- // We need to account for return statements in the selected block, as they will complicate
- // the logical flow of the extracted function. See the following example, where ** denotes
- // the range to be extracted.
- //
- // Before:
- //
- // func _() int {
- // a := 1
- // b := 2
- // **if a == b {
- // return a
- // }**
- // ...
- // }
- //
- // After:
- //
- // func _() int {
- // a := 1
- // b := 2
- // cond0, ret0 := x0(a, b)
- // if cond0 {
- // return ret0
- // }
- // ...
- // }
- //
- // func x0(a int, b int) (bool, int) {
- // if a == b {
- // return true, a
- // }
- // return false, 0
- // }
- //
- // We handle returns by adding an additional boolean return value to the extracted function.
- // This bool reports whether the original function would have returned. Because the
- // extracted selection contains a return statement, we must also add the types in the
- // return signature of the enclosing function to the return signature of the
- // extracted function. We then add an extra if statement checking this boolean value
- // in the original function. If the condition is met, the original function should
- // return a value, mimicking the functionality of the original return statement(s)
- // in the selection.
- //
- // If there is a return that is guaranteed to execute (hasNonNestedReturns=true), then
- // we don't need to include this additional condition check and can simply return.
- //
- // Before:
- //
- // func _() int {
- // a := 1
- // b := 2
- // **if a == b {
- // return a
- // }
- // return b**
- // }
- //
- // After:
- //
- // func _() int {
- // a := 1
- // b := 2
- // return x0(a, b)
- // }
- //
- // func x0(a int, b int) int {
- // if a == b {
- // return a
- // }
- // return b
- // }
-
- var retVars []*returnVariable
- var ifReturn *ast.IfStmt
- if containsReturnStatement {
- if !hasNonNestedReturn {
- // The selected block contained return statements, so we have to modify the
- // signature of the extracted function as described above. Adjust all of
- // the return statements in the extracted function to reflect this change in
- // signature.
- if err := adjustReturnStatements(returnTypes, seenVars, fset, file,
- pkg, extractedBlock); err != nil {
- return nil, err
- }
- }
- // Collect the additional return values and types needed to accommodate return
- // statements in the selection. Update the type signature of the extracted
- // function and construct the if statement that will be inserted in the enclosing
- // function.
- retVars, ifReturn, err = generateReturnInfo(enclosing, pkg, path, file, info, fset, rng.Start, hasNonNestedReturn)
- if err != nil {
- return nil, err
- }
- }
-
- // Add a return statement to the end of the new function. This return statement must include
- // the values for the types of the original extracted function signature and (if a return
- // statement is present in the selection) enclosing function signature.
- // This only needs to be done if the selections does not have a non-nested return, otherwise
- // it already terminates with a return statement.
- hasReturnValues := len(returns)+len(retVars) > 0
- if hasReturnValues && !hasNonNestedReturn {
- extractedBlock.List = append(extractedBlock.List, &ast.ReturnStmt{
- Results: append(returns, getZeroVals(retVars)...),
- })
- }
-
- // Construct the appropriate call to the extracted function.
- // We must meet two conditions to use ":=" instead of '='. (1) there must be at least
- // one variable on the lhs that is uninitailized (non-free) prior to the assignment.
- // (2) all of the initialized (free) variables on the lhs must be able to be redefined.
- sym := token.ASSIGN
- canDefineCount := len(uninitialized) + canRedefineCount
- canDefine := len(uninitialized)+len(retVars) > 0 && canDefineCount == len(returns)
- if canDefine {
- sym = token.DEFINE
- }
- var name, funName string
- if isMethod {
- name = "newMethod"
- // TODO(suzmue): generate a name that does not conflict for "newMethod".
- funName = name
- } else {
- name = "newFunction"
- funName, _ = generateAvailableIdentifier(rng.Start, file, path, info, name, 0)
- }
- extractedFunCall := generateFuncCall(hasNonNestedReturn, hasReturnValues, params,
- append(returns, getNames(retVars)...), funName, sym, receiverName)
-
- // Build the extracted function.
- newFunc := &ast.FuncDecl{
- Name: ast.NewIdent(funName),
- Type: &ast.FuncType{
- Params: &ast.FieldList{List: paramTypes},
- Results: &ast.FieldList{List: append(returnTypes, getDecls(retVars)...)},
- },
- Body: extractedBlock,
- }
- if isMethod {
- var names []*ast.Ident
- if receiverUsed {
- names = append(names, ast.NewIdent(receiverName))
- }
- newFunc.Recv = &ast.FieldList{
- List: []*ast.Field{{
- Names: names,
- Type: receiver.Type,
- }},
- }
- }
-
- // Create variable declarations for any identifiers that need to be initialized prior to
- // calling the extracted function. We do not manually initialize variables if every return
- // value is unitialized. We can use := to initialize the variables in this situation.
- var declarations []ast.Stmt
- if canDefineCount != len(returns) {
- declarations = initializeVars(uninitialized, retVars, seenUninitialized, seenVars)
- }
-
- var declBuf, replaceBuf, newFuncBuf, ifBuf, commentBuf bytes.Buffer
- if err := format.Node(&declBuf, fset, declarations); err != nil {
- return nil, err
- }
- if err := format.Node(&replaceBuf, fset, extractedFunCall); err != nil {
- return nil, err
- }
- if ifReturn != nil {
- if err := format.Node(&ifBuf, fset, ifReturn); err != nil {
- return nil, err
- }
- }
- if err := format.Node(&newFuncBuf, fset, newFunc); err != nil {
- return nil, err
- }
- // Find all the comments within the range and print them to be put somewhere.
- // TODO(suzmue): print these in the extracted function at the correct place.
- for _, cg := range file.Comments {
- if cg.Pos().IsValid() && cg.Pos() < rng.End && cg.Pos() >= rng.Start {
- for _, c := range cg.List {
- fmt.Fprintln(&commentBuf, c.Text)
- }
- }
- }
-
- // We're going to replace the whole enclosing function,
- // so preserve the text before and after the selected block.
- outerStart, err := Offset(tok, outer.Pos())
- if err != nil {
- return nil, err
- }
- outerEnd, err := Offset(tok, outer.End())
- if err != nil {
- return nil, err
- }
- before := src[outerStart:startOffset]
- after := src[endOffset:outerEnd]
- indent, err := calculateIndentation(src, tok, start)
- if err != nil {
- return nil, err
- }
- newLineIndent := "\n" + indent
-
- var fullReplacement strings.Builder
- fullReplacement.Write(before)
- if commentBuf.Len() > 0 {
- comments := strings.ReplaceAll(commentBuf.String(), "\n", newLineIndent)
- fullReplacement.WriteString(comments)
- }
- if declBuf.Len() > 0 { // add any initializations, if needed
- initializations := strings.ReplaceAll(declBuf.String(), "\n", newLineIndent) +
- newLineIndent
- fullReplacement.WriteString(initializations)
- }
- fullReplacement.Write(replaceBuf.Bytes()) // call the extracted function
- if ifBuf.Len() > 0 { // add the if statement below the function call, if needed
- ifstatement := newLineIndent +
- strings.ReplaceAll(ifBuf.String(), "\n", newLineIndent)
- fullReplacement.WriteString(ifstatement)
- }
- fullReplacement.Write(after)
- fullReplacement.WriteString("\n\n") // add newlines after the enclosing function
- fullReplacement.Write(newFuncBuf.Bytes()) // insert the extracted function
-
- return &analysis.SuggestedFix{
- TextEdits: []analysis.TextEdit{{
- Pos: outer.Pos(),
- End: outer.End(),
- NewText: []byte(fullReplacement.String()),
- }},
- }, nil
-}
-
-// adjustRangeForWhitespace adjusts the given range to exclude unnecessary leading or
-// trailing whitespace characters from selection. In the following example, each line
-// of the if statement is indented once. There are also two extra spaces after the
-// closing bracket before the line break.
-//
-// \tif (true) {
-// \t _ = 1
-// \t} \n
-//
-// By default, a valid range begins at 'if' and ends at the first whitespace character
-// after the '}'. But, users are likely to highlight full lines rather than adjusting
-// their cursors for whitespace. To support this use case, we must manually adjust the
-// ranges to match the correct AST node. In this particular example, we would adjust
-// rng.Start forward by one byte, and rng.End backwards by two bytes.
-func adjustRangeForWhitespace(rng span.Range, tok *token.File, content []byte) (span.Range, error) {
- offset, err := Offset(tok, rng.Start)
- if err != nil {
- return span.Range{}, err
- }
- for offset < len(content) {
- if !unicode.IsSpace(rune(content[offset])) {
- break
- }
- // Move forwards one byte to find a non-whitespace character.
- offset += 1
- }
- rng.Start = tok.Pos(offset)
-
- // Move backwards to find a non-whitespace character.
- offset, err = Offset(tok, rng.End)
- if err != nil {
- return span.Range{}, err
- }
- for o := offset - 1; 0 <= o && o < len(content); o-- {
- if !unicode.IsSpace(rune(content[o])) {
- break
- }
- offset = o
- }
- rng.End = tok.Pos(offset)
- return rng, nil
-}
-
-// findParent finds the parent AST node of the given target node, if the target is a
-// descendant of the starting node.
-func findParent(start ast.Node, target ast.Node) ast.Node {
- var parent ast.Node
- analysisinternal.WalkASTWithParent(start, func(n, p ast.Node) bool {
- if n == target {
- parent = p
- return false
- }
- return true
- })
- return parent
-}
-
-// variable describes the status of a variable within a selection.
-type variable struct {
- obj types.Object
-
- // free reports whether the variable is a free variable, meaning it should
- // be a parameter to the extracted function.
- free bool
-
- // assigned reports whether the variable is assigned to in the selection.
- assigned bool
-
- // defined reports whether the variable is defined in the selection.
- defined bool
-}
-
-// collectFreeVars maps each identifier in the given range to whether it is "free."
-// Given a range, a variable in that range is defined as "free" if it is declared
-// outside of the range and neither at the file scope nor package scope. These free
-// variables will be used as arguments in the extracted function. It also returns a
-// list of identifiers that may need to be returned by the extracted function.
-// Some of the code in this function has been adapted from tools/cmd/guru/freevars.go.
-func collectFreeVars(info *types.Info, file *ast.File, fileScope, pkgScope *types.Scope, rng span.Range, node ast.Node) ([]*variable, error) {
- // id returns non-nil if n denotes an object that is referenced by the span
- // and defined either within the span or in the lexical environment. The bool
- // return value acts as an indicator for where it was defined.
- id := func(n *ast.Ident) (types.Object, bool) {
- obj := info.Uses[n]
- if obj == nil {
- return info.Defs[n], false
- }
- if obj.Name() == "_" {
- return nil, false // exclude objects denoting '_'
- }
- if _, ok := obj.(*types.PkgName); ok {
- return nil, false // imported package
- }
- if !(file.Pos() <= obj.Pos() && obj.Pos() <= file.End()) {
- return nil, false // not defined in this file
- }
- scope := obj.Parent()
- if scope == nil {
- return nil, false // e.g. interface method, struct field
- }
- if scope == fileScope || scope == pkgScope {
- return nil, false // defined at file or package scope
- }
- if rng.Start <= obj.Pos() && obj.Pos() <= rng.End {
- return obj, false // defined within selection => not free
- }
- return obj, true
- }
- // sel returns non-nil if n denotes a selection o.x.y that is referenced by the
- // span and defined either within the span or in the lexical environment. The bool
- // return value acts as an indicator for where it was defined.
- var sel func(n *ast.SelectorExpr) (types.Object, bool)
- sel = func(n *ast.SelectorExpr) (types.Object, bool) {
- switch x := astutil.Unparen(n.X).(type) {
- case *ast.SelectorExpr:
- return sel(x)
- case *ast.Ident:
- return id(x)
- }
- return nil, false
- }
- seen := make(map[types.Object]*variable)
- firstUseIn := make(map[types.Object]token.Pos)
- var vars []types.Object
- ast.Inspect(node, func(n ast.Node) bool {
- if n == nil {
- return false
- }
- if rng.Start <= n.Pos() && n.End() <= rng.End {
- var obj types.Object
- var isFree, prune bool
- switch n := n.(type) {
- case *ast.Ident:
- obj, isFree = id(n)
- case *ast.SelectorExpr:
- obj, isFree = sel(n)
- prune = true
- }
- if obj != nil {
- seen[obj] = &variable{
- obj: obj,
- free: isFree,
- }
- vars = append(vars, obj)
- // Find the first time that the object is used in the selection.
- first, ok := firstUseIn[obj]
- if !ok || n.Pos() < first {
- firstUseIn[obj] = n.Pos()
- }
- if prune {
- return false
- }
- }
- }
- return n.Pos() <= rng.End
- })
-
- // Find identifiers that are initialized or whose values are altered at some
- // point in the selected block. For example, in a selected block from lines 2-4,
- // variables x, y, and z are included in assigned. However, in a selected block
- // from lines 3-4, only variables y and z are included in assigned.
- //
- // 1: var a int
- // 2: var x int
- // 3: y := 3
- // 4: z := x + a
- //
- ast.Inspect(node, func(n ast.Node) bool {
- if n == nil {
- return false
- }
- if n.Pos() < rng.Start || n.End() > rng.End {
- return n.Pos() <= rng.End
- }
- switch n := n.(type) {
- case *ast.AssignStmt:
- for _, assignment := range n.Lhs {
- lhs, ok := assignment.(*ast.Ident)
- if !ok {
- continue
- }
- obj, _ := id(lhs)
- if obj == nil {
- continue
- }
- if _, ok := seen[obj]; !ok {
- continue
- }
- seen[obj].assigned = true
- if n.Tok != token.DEFINE {
- continue
- }
- // Find identifiers that are defined prior to being used
- // elsewhere in the selection.
- // TODO: Include identifiers that are assigned prior to being
- // used elsewhere in the selection. Then, change the assignment
- // to a definition in the extracted function.
- if firstUseIn[obj] != lhs.Pos() {
- continue
- }
- // Ensure that the object is not used in its own re-definition.
- // For example:
- // var f float64
- // f, e := math.Frexp(f)
- for _, expr := range n.Rhs {
- if referencesObj(info, expr, obj) {
- continue
- }
- if _, ok := seen[obj]; !ok {
- continue
- }
- seen[obj].defined = true
- break
- }
- }
- return false
- case *ast.DeclStmt:
- gen, ok := n.Decl.(*ast.GenDecl)
- if !ok {
- return false
- }
- for _, spec := range gen.Specs {
- vSpecs, ok := spec.(*ast.ValueSpec)
- if !ok {
- continue
- }
- for _, vSpec := range vSpecs.Names {
- obj, _ := id(vSpec)
- if obj == nil {
- continue
- }
- if _, ok := seen[obj]; !ok {
- continue
- }
- seen[obj].assigned = true
- }
- }
- return false
- case *ast.IncDecStmt:
- if ident, ok := n.X.(*ast.Ident); !ok {
- return false
- } else if obj, _ := id(ident); obj == nil {
- return false
- } else {
- if _, ok := seen[obj]; !ok {
- return false
- }
- seen[obj].assigned = true
- }
- }
- return true
- })
- var variables []*variable
- for _, obj := range vars {
- v, ok := seen[obj]
- if !ok {
- return nil, fmt.Errorf("no seen types.Object for %v", obj)
- }
- variables = append(variables, v)
- }
- return variables, nil
-}
-
-// referencesObj checks whether the given object appears in the given expression.
-func referencesObj(info *types.Info, expr ast.Expr, obj types.Object) bool {
- var hasObj bool
- ast.Inspect(expr, func(n ast.Node) bool {
- if n == nil {
- return false
- }
- ident, ok := n.(*ast.Ident)
- if !ok {
- return true
- }
- objUse := info.Uses[ident]
- if obj == objUse {
- hasObj = true
- return false
- }
- return false
- })
- return hasObj
-}
-
-type fnExtractParams struct {
- tok *token.File
- path []ast.Node
- rng span.Range
- outer *ast.FuncDecl
- start ast.Node
-}
-
-// CanExtractFunction reports whether the code in the given range can be
-// extracted to a function.
-func CanExtractFunction(fset *token.FileSet, rng span.Range, src []byte, file *ast.File) (*fnExtractParams, bool, bool, error) {
- if rng.Start == rng.End {
- return nil, false, false, fmt.Errorf("start and end are equal")
- }
- tok := fset.File(file.Pos())
- if tok == nil {
- return nil, false, false, fmt.Errorf("no file for pos %v", fset.Position(file.Pos()))
- }
- var err error
- rng, err = adjustRangeForWhitespace(rng, tok, src)
- if err != nil {
- return nil, false, false, err
- }
- path, _ := astutil.PathEnclosingInterval(file, rng.Start, rng.End)
- if len(path) == 0 {
- return nil, false, false, fmt.Errorf("no path enclosing interval")
- }
- // Node that encloses the selection must be a statement.
- // TODO: Support function extraction for an expression.
- _, ok := path[0].(ast.Stmt)
- if !ok {
- return nil, false, false, fmt.Errorf("node is not a statement")
- }
-
- // Find the function declaration that encloses the selection.
- var outer *ast.FuncDecl
- for _, p := range path {
- if p, ok := p.(*ast.FuncDecl); ok {
- outer = p
- break
- }
- }
- if outer == nil {
- return nil, false, false, fmt.Errorf("no enclosing function")
- }
-
- // Find the nodes at the start and end of the selection.
- var start, end ast.Node
- ast.Inspect(outer, func(n ast.Node) bool {
- if n == nil {
- return false
- }
- // Do not override 'start' with a node that begins at the same location
- // but is nested further from 'outer'.
- if start == nil && n.Pos() == rng.Start && n.End() <= rng.End {
- start = n
- }
- if end == nil && n.End() == rng.End && n.Pos() >= rng.Start {
- end = n
- }
- return n.Pos() <= rng.End
- })
- if start == nil || end == nil {
- return nil, false, false, fmt.Errorf("range does not map to AST nodes")
- }
- // If the region is a blockStmt, use the first and last nodes in the block
- // statement.
- // <rng.start>{ ... }<rng.end> => { <rng.start>...<rng.end> }
- if blockStmt, ok := start.(*ast.BlockStmt); ok {
- if len(blockStmt.List) == 0 {
- return nil, false, false, fmt.Errorf("range maps to empty block statement")
- }
- start, end = blockStmt.List[0], blockStmt.List[len(blockStmt.List)-1]
- rng.Start, rng.End = start.Pos(), end.End()
- }
- return &fnExtractParams{
- tok: tok,
- path: path,
- rng: rng,
- outer: outer,
- start: start,
- }, true, outer.Recv != nil, nil
-}
-
-// objUsed checks if the object is used within the range. It returns the first
-// occurrence of the object in the range, if it exists.
-func objUsed(info *types.Info, rng span.Range, obj types.Object) (bool, *ast.Ident) {
- var firstUse *ast.Ident
- for id, objUse := range info.Uses {
- if obj != objUse {
- continue
- }
- if id.Pos() < rng.Start || id.End() > rng.End {
- continue
- }
- if firstUse == nil || id.Pos() < firstUse.Pos() {
- firstUse = id
- }
- }
- return firstUse != nil, firstUse
-}
-
-// varOverridden traverses the given AST node until we find the given identifier. Then, we
-// examine the occurrence of the given identifier and check for (1) whether the identifier
-// is being redefined. If the identifier is free, we also check for (2) whether the identifier
-// is being reassigned. We will not include an identifier in the return statement of the
-// extracted function if it meets one of the above conditions.
-func varOverridden(info *types.Info, firstUse *ast.Ident, obj types.Object, isFree bool, node ast.Node) bool {
- var isOverriden bool
- ast.Inspect(node, func(n ast.Node) bool {
- if n == nil {
- return false
- }
- assignment, ok := n.(*ast.AssignStmt)
- if !ok {
- return true
- }
- // A free variable is initialized prior to the selection. We can always reassign
- // this variable after the selection because it has already been defined.
- // Conversely, a non-free variable is initialized within the selection. Thus, we
- // cannot reassign this variable after the selection unless it is initialized and
- // returned by the extracted function.
- if !isFree && assignment.Tok == token.ASSIGN {
- return false
- }
- for _, assigned := range assignment.Lhs {
- ident, ok := assigned.(*ast.Ident)
- // Check if we found the first use of the identifier.
- if !ok || ident != firstUse {
- continue
- }
- objUse := info.Uses[ident]
- if objUse == nil || objUse != obj {
- continue
- }
- // Ensure that the object is not used in its own definition.
- // For example:
- // var f float64
- // f, e := math.Frexp(f)
- for _, expr := range assignment.Rhs {
- if referencesObj(info, expr, obj) {
- return false
- }
- }
- isOverriden = true
- return false
- }
- return false
- })
- return isOverriden
-}
-
-// parseExtraction generates an AST file from the given text. We then return the portion of the
-// file that represents the text.
-func parseBlockStmt(fset *token.FileSet, src []byte) (*ast.BlockStmt, error) {
- text := "package main\nfunc _() { " + string(src) + " }"
- extract, err := parser.ParseFile(fset, "", text, 0)
- if err != nil {
- return nil, err
- }
- if len(extract.Decls) == 0 {
- return nil, fmt.Errorf("parsed file does not contain any declarations")
- }
- decl, ok := extract.Decls[0].(*ast.FuncDecl)
- if !ok {
- return nil, fmt.Errorf("parsed file does not contain expected function declaration")
- }
- if decl.Body == nil {
- return nil, fmt.Errorf("extracted function has no body")
- }
- return decl.Body, nil
-}
-
-// generateReturnInfo generates the information we need to adjust the return statements and
-// signature of the extracted function. We prepare names, signatures, and "zero values" that
-// represent the new variables. We also use this information to construct the if statement that
-// is inserted below the call to the extracted function.
-func generateReturnInfo(enclosing *ast.FuncType, pkg *types.Package, path []ast.Node, file *ast.File, info *types.Info, fset *token.FileSet, pos token.Pos, hasNonNestedReturns bool) ([]*returnVariable, *ast.IfStmt, error) {
- var retVars []*returnVariable
- var cond *ast.Ident
- if !hasNonNestedReturns {
- // Generate information for the added bool value.
- name, _ := generateAvailableIdentifier(pos, file, path, info, "shouldReturn", 0)
- cond = &ast.Ident{Name: name}
- retVars = append(retVars, &returnVariable{
- name: cond,
- decl: &ast.Field{Type: ast.NewIdent("bool")},
- zeroVal: ast.NewIdent("false"),
- })
- }
- // Generate information for the values in the return signature of the enclosing function.
- if enclosing.Results != nil {
- idx := 0
- for _, field := range enclosing.Results.List {
- typ := info.TypeOf(field.Type)
- if typ == nil {
- return nil, nil, fmt.Errorf(
- "failed type conversion, AST expression: %T", field.Type)
- }
- expr := analysisinternal.TypeExpr(fset, file, pkg, typ)
- if expr == nil {
- return nil, nil, fmt.Errorf("nil AST expression")
- }
- var name string
- name, idx = generateAvailableIdentifier(pos, file,
- path, info, "returnValue", idx)
- retVars = append(retVars, &returnVariable{
- name: ast.NewIdent(name),
- decl: &ast.Field{Type: expr},
- zeroVal: analysisinternal.ZeroValue(
- fset, file, pkg, typ),
- })
- }
- }
- var ifReturn *ast.IfStmt
- if !hasNonNestedReturns {
- // Create the return statement for the enclosing function. We must exclude the variable
- // for the condition of the if statement (cond) from the return statement.
- ifReturn = &ast.IfStmt{
- Cond: cond,
- Body: &ast.BlockStmt{
- List: []ast.Stmt{&ast.ReturnStmt{Results: getNames(retVars)[1:]}},
- },
- }
- }
- return retVars, ifReturn, nil
-}
-
-// adjustReturnStatements adds "zero values" of the given types to each return statement
-// in the given AST node.
-func adjustReturnStatements(returnTypes []*ast.Field, seenVars map[types.Object]ast.Expr, fset *token.FileSet, file *ast.File, pkg *types.Package, extractedBlock *ast.BlockStmt) error {
- var zeroVals []ast.Expr
- // Create "zero values" for each type.
- for _, returnType := range returnTypes {
- var val ast.Expr
- for obj, typ := range seenVars {
- if typ != returnType.Type {
- continue
- }
- val = analysisinternal.ZeroValue(fset, file, pkg, obj.Type())
- break
- }
- if val == nil {
- return fmt.Errorf(
- "could not find matching AST expression for %T", returnType.Type)
- }
- zeroVals = append(zeroVals, val)
- }
- // Add "zero values" to each return statement.
- // The bool reports whether the enclosing function should return after calling the
- // extracted function. We set the bool to 'true' because, if these return statements
- // execute, the extracted function terminates early, and the enclosing function must
- // return as well.
- zeroVals = append(zeroVals, ast.NewIdent("true"))
- ast.Inspect(extractedBlock, func(n ast.Node) bool {
- if n == nil {
- return false
- }
- if n, ok := n.(*ast.ReturnStmt); ok {
- n.Results = append(zeroVals, n.Results...)
- return false
- }
- return true
- })
- return nil
-}
-
-// generateFuncCall constructs a call expression for the extracted function, described by the
-// given parameters and return variables.
-func generateFuncCall(hasNonNestedReturn, hasReturnVals bool, params, returns []ast.Expr, name string, token token.Token, selector string) ast.Node {
- var replace ast.Node
- callExpr := &ast.CallExpr{
- Fun: ast.NewIdent(name),
- Args: params,
- }
- if selector != "" {
- callExpr = &ast.CallExpr{
- Fun: &ast.SelectorExpr{
- X: ast.NewIdent(selector),
- Sel: ast.NewIdent(name),
- },
- Args: params,
- }
- }
- if hasReturnVals {
- if hasNonNestedReturn {
- // Create a return statement that returns the result of the function call.
- replace = &ast.ReturnStmt{
- Return: 0,
- Results: []ast.Expr{callExpr},
- }
- } else {
- // Assign the result of the function call.
- replace = &ast.AssignStmt{
- Lhs: returns,
- Tok: token,
- Rhs: []ast.Expr{callExpr},
- }
- }
- } else {
- replace = callExpr
- }
- return replace
-}
-
-// initializeVars creates variable declarations, if needed.
-// Our preference is to replace the selected block with an "x, y, z := fn()" style
-// assignment statement. We can use this style when all of the variables in the
-// extracted function's return statement are either not defined prior to the extracted block
-// or can be safely redefined. However, for example, if z is already defined
-// in a different scope, we replace the selected block with:
-//
-// var x int
-// var y string
-// x, y, z = fn()
-func initializeVars(uninitialized []types.Object, retVars []*returnVariable, seenUninitialized map[types.Object]struct{}, seenVars map[types.Object]ast.Expr) []ast.Stmt {
- var declarations []ast.Stmt
- for _, obj := range uninitialized {
- if _, ok := seenUninitialized[obj]; ok {
- continue
- }
- seenUninitialized[obj] = struct{}{}
- valSpec := &ast.ValueSpec{
- Names: []*ast.Ident{ast.NewIdent(obj.Name())},
- Type: seenVars[obj],
- }
- genDecl := &ast.GenDecl{
- Tok: token.VAR,
- Specs: []ast.Spec{valSpec},
- }
- declarations = append(declarations, &ast.DeclStmt{Decl: genDecl})
- }
- // Each variable added from a return statement in the selection
- // must be initialized.
- for i, retVar := range retVars {
- n := retVar.name.(*ast.Ident)
- valSpec := &ast.ValueSpec{
- Names: []*ast.Ident{n},
- Type: retVars[i].decl.Type,
- }
- genDecl := &ast.GenDecl{
- Tok: token.VAR,
- Specs: []ast.Spec{valSpec},
- }
- declarations = append(declarations, &ast.DeclStmt{Decl: genDecl})
- }
- return declarations
-}
-
-// getNames returns the names from the given list of returnVariable.
-func getNames(retVars []*returnVariable) []ast.Expr {
- var names []ast.Expr
- for _, retVar := range retVars {
- names = append(names, retVar.name)
- }
- return names
-}
-
-// getZeroVals returns the "zero values" from the given list of returnVariable.
-func getZeroVals(retVars []*returnVariable) []ast.Expr {
- var zvs []ast.Expr
- for _, retVar := range retVars {
- zvs = append(zvs, retVar.zeroVal)
- }
- return zvs
-}
-
-// getDecls returns the declarations from the given list of returnVariable.
-func getDecls(retVars []*returnVariable) []*ast.Field {
- var decls []*ast.Field
- for _, retVar := range retVars {
- decls = append(decls, retVar.decl)
- }
- return decls
-}
diff --git a/internal/lsp/source/fix.go b/internal/lsp/source/fix.go
deleted file mode 100644
index 2f921ad0c..000000000
--- a/internal/lsp/source/fix.go
+++ /dev/null
@@ -1,140 +0,0 @@
-// Copyright 2020 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package source
-
-import (
- "context"
- "fmt"
- "go/ast"
- "go/token"
- "go/types"
-
- "golang.org/x/tools/go/analysis"
- "golang.org/x/tools/internal/lsp/analysis/fillstruct"
- "golang.org/x/tools/internal/lsp/analysis/undeclaredname"
- "golang.org/x/tools/internal/lsp/protocol"
- "golang.org/x/tools/internal/span"
- errors "golang.org/x/xerrors"
-)
-
-type (
- // SuggestedFixFunc is a function used to get the suggested fixes for a given
- // gopls command, some of which are provided by go/analysis.Analyzers. Some of
- // the analyzers in internal/lsp/analysis are not efficient enough to include
- // suggested fixes with their diagnostics, so we have to compute them
- // separately. Such analyzers should provide a function with a signature of
- // SuggestedFixFunc.
- SuggestedFixFunc func(ctx context.Context, snapshot Snapshot, fh VersionedFileHandle, pRng protocol.Range) (*analysis.SuggestedFix, error)
- singleFileFixFunc func(fset *token.FileSet, rng span.Range, src []byte, file *ast.File, pkg *types.Package, info *types.Info) (*analysis.SuggestedFix, error)
-)
-
-const (
- FillStruct = "fill_struct"
- StubMethods = "stub_methods"
- UndeclaredName = "undeclared_name"
- ExtractVariable = "extract_variable"
- ExtractFunction = "extract_function"
- ExtractMethod = "extract_method"
-)
-
-// suggestedFixes maps a suggested fix command id to its handler.
-var suggestedFixes = map[string]SuggestedFixFunc{
- FillStruct: singleFile(fillstruct.SuggestedFix),
- UndeclaredName: singleFile(undeclaredname.SuggestedFix),
- ExtractVariable: singleFile(extractVariable),
- ExtractFunction: singleFile(extractFunction),
- ExtractMethod: singleFile(extractMethod),
- StubMethods: stubSuggestedFixFunc,
-}
-
-// singleFile calls analyzers that expect inputs for a single file
-func singleFile(sf singleFileFixFunc) SuggestedFixFunc {
- return func(ctx context.Context, snapshot Snapshot, fh VersionedFileHandle, pRng protocol.Range) (*analysis.SuggestedFix, error) {
- fset, rng, src, file, pkg, info, err := getAllSuggestedFixInputs(ctx, snapshot, fh, pRng)
- if err != nil {
- return nil, err
- }
- return sf(fset, rng, src, file, pkg, info)
- }
-}
-
-func SuggestedFixFromCommand(cmd protocol.Command, kind protocol.CodeActionKind) SuggestedFix {
- return SuggestedFix{
- Title: cmd.Title,
- Command: &cmd,
- ActionKind: kind,
- }
-}
-
-// ApplyFix applies the command's suggested fix to the given file and
-// range, returning the resulting edits.
-func ApplyFix(ctx context.Context, fix string, snapshot Snapshot, fh VersionedFileHandle, pRng protocol.Range) ([]protocol.TextDocumentEdit, error) {
- handler, ok := suggestedFixes[fix]
- if !ok {
- return nil, fmt.Errorf("no suggested fix function for %s", fix)
- }
- suggestion, err := handler(ctx, snapshot, fh, pRng)
- if err != nil {
- return nil, err
- }
- if suggestion == nil {
- return nil, nil
- }
- fset := snapshot.FileSet()
- editsPerFile := map[span.URI]*protocol.TextDocumentEdit{}
- for _, edit := range suggestion.TextEdits {
- spn, err := span.NewRange(fset, edit.Pos, edit.End).Span()
- if err != nil {
- return nil, err
- }
- fh, err := snapshot.GetVersionedFile(ctx, spn.URI())
- if err != nil {
- return nil, err
- }
- te, ok := editsPerFile[spn.URI()]
- if !ok {
- te = &protocol.TextDocumentEdit{
- TextDocument: protocol.OptionalVersionedTextDocumentIdentifier{
- Version: fh.Version(),
- TextDocumentIdentifier: protocol.TextDocumentIdentifier{
- URI: protocol.URIFromSpanURI(fh.URI()),
- },
- },
- }
- editsPerFile[spn.URI()] = te
- }
- _, pgf, err := GetParsedFile(ctx, snapshot, fh, NarrowestPackage)
- if err != nil {
- return nil, err
- }
- rng, err := pgf.Mapper.Range(spn)
- if err != nil {
- return nil, err
- }
- te.Edits = append(te.Edits, protocol.TextEdit{
- Range: rng,
- NewText: string(edit.NewText),
- })
- }
- var edits []protocol.TextDocumentEdit
- for _, edit := range editsPerFile {
- edits = append(edits, *edit)
- }
- return edits, nil
-}
-
-// getAllSuggestedFixInputs is a helper function to collect all possible needed
-// inputs for an AppliesFunc or SuggestedFixFunc.
-func getAllSuggestedFixInputs(ctx context.Context, snapshot Snapshot, fh FileHandle, pRng protocol.Range) (*token.FileSet, span.Range, []byte, *ast.File, *types.Package, *types.Info, error) {
- pkg, pgf, err := GetParsedFile(ctx, snapshot, fh, NarrowestPackage)
- if err != nil {
- return nil, span.Range{}, nil, nil, nil, nil, errors.Errorf("getting file for Identifier: %w", err)
- }
- rng, err := pgf.Mapper.RangeToSpanRange(pRng)
- if err != nil {
- return nil, span.Range{}, nil, nil, nil, nil, err
- }
- return snapshot.FileSet(), rng, pgf.Src, pgf.File, pkg.GetTypes(), pkg.GetTypesInfo(), nil
-}
diff --git a/internal/lsp/source/folding_range.go b/internal/lsp/source/folding_range.go
deleted file mode 100644
index 576308f99..000000000
--- a/internal/lsp/source/folding_range.go
+++ /dev/null
@@ -1,185 +0,0 @@
-// Copyright 2019 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package source
-
-import (
- "context"
- "go/ast"
- "go/token"
- "sort"
- "strings"
-
- "golang.org/x/tools/internal/lsp/protocol"
-)
-
-// FoldingRangeInfo holds range and kind info of folding for an ast.Node
-type FoldingRangeInfo struct {
- MappedRange
- Kind protocol.FoldingRangeKind
-}
-
-// FoldingRange gets all of the folding range for f.
-func FoldingRange(ctx context.Context, snapshot Snapshot, fh FileHandle, lineFoldingOnly bool) (ranges []*FoldingRangeInfo, err error) {
- // TODO(suzmue): consider limiting the number of folding ranges returned, and
- // implement a way to prioritize folding ranges in that case.
- pgf, err := snapshot.ParseGo(ctx, fh, ParseFull)
- if err != nil {
- return nil, err
- }
-
- // With parse errors, we wouldn't be able to produce accurate folding info.
- // LSP protocol (3.16) currently does not have a way to handle this case
- // (https://github.com/microsoft/language-server-protocol/issues/1200).
- // We cannot return an error either because we are afraid some editors
- // may not handle errors nicely. As a workaround, we now return an empty
- // result and let the client handle this case by double check the file
- // contents (i.e. if the file is not empty and the folding range result
- // is empty, raise an internal error).
- if pgf.ParseErr != nil {
- return nil, nil
- }
-
- fset := snapshot.FileSet()
-
- // Get folding ranges for comments separately as they are not walked by ast.Inspect.
- ranges = append(ranges, commentsFoldingRange(fset, pgf.Mapper, pgf.File)...)
-
- visit := func(n ast.Node) bool {
- rng := foldingRangeFunc(fset, pgf.Mapper, n, lineFoldingOnly)
- if rng != nil {
- ranges = append(ranges, rng)
- }
- return true
- }
- // Walk the ast and collect folding ranges.
- ast.Inspect(pgf.File, visit)
-
- sort.Slice(ranges, func(i, j int) bool {
- irng, _ := ranges[i].Range()
- jrng, _ := ranges[j].Range()
- return protocol.CompareRange(irng, jrng) < 0
- })
-
- return ranges, nil
-}
-
-// foldingRangeFunc calculates the line folding range for ast.Node n
-func foldingRangeFunc(fset *token.FileSet, m *protocol.ColumnMapper, n ast.Node, lineFoldingOnly bool) *FoldingRangeInfo {
- // TODO(suzmue): include trailing empty lines before the closing
- // parenthesis/brace.
- var kind protocol.FoldingRangeKind
- var start, end token.Pos
- switch n := n.(type) {
- case *ast.BlockStmt:
- // Fold between positions of or lines between "{" and "}".
- var startList, endList token.Pos
- if num := len(n.List); num != 0 {
- startList, endList = n.List[0].Pos(), n.List[num-1].End()
- }
- start, end = validLineFoldingRange(fset, n.Lbrace, n.Rbrace, startList, endList, lineFoldingOnly)
- case *ast.CaseClause:
- // Fold from position of ":" to end.
- start, end = n.Colon+1, n.End()
- case *ast.CommClause:
- // Fold from position of ":" to end.
- start, end = n.Colon+1, n.End()
- case *ast.CallExpr:
- // Fold from position of "(" to position of ")".
- start, end = n.Lparen+1, n.Rparen
- case *ast.FieldList:
- // Fold between positions of or lines between opening parenthesis/brace and closing parenthesis/brace.
- var startList, endList token.Pos
- if num := len(n.List); num != 0 {
- startList, endList = n.List[0].Pos(), n.List[num-1].End()
- }
- start, end = validLineFoldingRange(fset, n.Opening, n.Closing, startList, endList, lineFoldingOnly)
- case *ast.GenDecl:
- // If this is an import declaration, set the kind to be protocol.Imports.
- if n.Tok == token.IMPORT {
- kind = protocol.Imports
- }
- // Fold between positions of or lines between "(" and ")".
- var startSpecs, endSpecs token.Pos
- if num := len(n.Specs); num != 0 {
- startSpecs, endSpecs = n.Specs[0].Pos(), n.Specs[num-1].End()
- }
- start, end = validLineFoldingRange(fset, n.Lparen, n.Rparen, startSpecs, endSpecs, lineFoldingOnly)
- case *ast.BasicLit:
- // Fold raw string literals from position of "`" to position of "`".
- if n.Kind == token.STRING && len(n.Value) >= 2 && n.Value[0] == '`' && n.Value[len(n.Value)-1] == '`' {
- start, end = n.Pos(), n.End()
- }
- case *ast.CompositeLit:
- // Fold between positions of or lines between "{" and "}".
- var startElts, endElts token.Pos
- if num := len(n.Elts); num != 0 {
- startElts, endElts = n.Elts[0].Pos(), n.Elts[num-1].End()
- }
- start, end = validLineFoldingRange(fset, n.Lbrace, n.Rbrace, startElts, endElts, lineFoldingOnly)
- }
-
- // Check that folding positions are valid.
- if !start.IsValid() || !end.IsValid() {
- return nil
- }
- // in line folding mode, do not fold if the start and end lines are the same.
- if lineFoldingOnly && fset.Position(start).Line == fset.Position(end).Line {
- return nil
- }
- return &FoldingRangeInfo{
- MappedRange: NewMappedRange(fset, m, start, end),
- Kind: kind,
- }
-}
-
-// validLineFoldingRange returns start and end token.Pos for folding range if the range is valid.
-// returns token.NoPos otherwise, which fails token.IsValid check
-func validLineFoldingRange(fset *token.FileSet, open, close, start, end token.Pos, lineFoldingOnly bool) (token.Pos, token.Pos) {
- if lineFoldingOnly {
- if !open.IsValid() || !close.IsValid() {
- return token.NoPos, token.NoPos
- }
-
- // Don't want to fold if the start/end is on the same line as the open/close
- // as an example, the example below should *not* fold:
- // var x = [2]string{"d",
- // "e" }
- if fset.Position(open).Line == fset.Position(start).Line ||
- fset.Position(close).Line == fset.Position(end).Line {
- return token.NoPos, token.NoPos
- }
-
- return open + 1, end
- }
- return open + 1, close
-}
-
-// commentsFoldingRange returns the folding ranges for all comment blocks in file.
-// The folding range starts at the end of the first line of the comment block, and ends at the end of the
-// comment block and has kind protocol.Comment.
-func commentsFoldingRange(fset *token.FileSet, m *protocol.ColumnMapper, file *ast.File) (comments []*FoldingRangeInfo) {
- for _, commentGrp := range file.Comments {
- startGrp, endGrp := fset.Position(commentGrp.Pos()), fset.Position(commentGrp.End())
- if startGrp.Line == endGrp.Line {
- // Don't fold single line comments.
- continue
- }
-
- firstComment := commentGrp.List[0]
- startPos, endLinePos := firstComment.Pos(), firstComment.End()
- startCmmnt, endCmmnt := fset.Position(startPos), fset.Position(endLinePos)
- if startCmmnt.Line != endCmmnt.Line {
- // If the first comment spans multiple lines, then we want to have the
- // folding range start at the end of the first line.
- endLinePos = token.Pos(int(startPos) + len(strings.Split(firstComment.Text, "\n")[0]))
- }
- comments = append(comments, &FoldingRangeInfo{
- // Fold from the end of the first line comment to the end of the comment block.
- MappedRange: NewMappedRange(fset, m, endLinePos, commentGrp.End()),
- Kind: protocol.Comment,
- })
- }
- return comments
-}
diff --git a/internal/lsp/source/format.go b/internal/lsp/source/format.go
deleted file mode 100644
index 79da0b3ad..000000000
--- a/internal/lsp/source/format.go
+++ /dev/null
@@ -1,387 +0,0 @@
-// Copyright 2018 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Package source provides core features for use by Go editors and tools.
-package source
-
-import (
- "bytes"
- "context"
- "fmt"
- "go/ast"
- "go/format"
- "go/parser"
- "go/token"
- "strings"
- "text/scanner"
-
- "golang.org/x/tools/internal/event"
- "golang.org/x/tools/internal/imports"
- "golang.org/x/tools/internal/lsp/diff"
- "golang.org/x/tools/internal/lsp/lsppos"
- "golang.org/x/tools/internal/lsp/protocol"
- "golang.org/x/tools/internal/span"
-)
-
-// Format formats a file with a given range.
-func Format(ctx context.Context, snapshot Snapshot, fh FileHandle) ([]protocol.TextEdit, error) {
- ctx, done := event.Start(ctx, "source.Format")
- defer done()
-
- // Generated files shouldn't be edited. So, don't format them
- if IsGenerated(ctx, snapshot, fh.URI()) {
- return nil, fmt.Errorf("can't format %q: file is generated", fh.URI().Filename())
- }
-
- pgf, err := snapshot.ParseGo(ctx, fh, ParseFull)
- if err != nil {
- return nil, err
- }
- // Even if this file has parse errors, it might still be possible to format it.
- // Using format.Node on an AST with errors may result in code being modified.
- // Attempt to format the source of this file instead.
- if pgf.ParseErr != nil {
- formatted, err := formatSource(ctx, fh)
- if err != nil {
- return nil, err
- }
- return computeTextEdits(ctx, snapshot, pgf, string(formatted))
- }
-
- fset := snapshot.FileSet()
-
- // format.Node changes slightly from one release to another, so the version
- // of Go used to build the LSP server will determine how it formats code.
- // This should be acceptable for all users, who likely be prompted to rebuild
- // the LSP server on each Go release.
- buf := &bytes.Buffer{}
- if err := format.Node(buf, fset, pgf.File); err != nil {
- return nil, err
- }
- formatted := buf.String()
-
- // Apply additional formatting, if any is supported. Currently, the only
- // supported additional formatter is gofumpt.
- if format := snapshot.View().Options().GofumptFormat; snapshot.View().Options().Gofumpt && format != nil {
- // gofumpt can customize formatting based on language version and module
- // path, if available.
- //
- // Try to derive this information, but fall-back on the default behavior.
- //
- // TODO: under which circumstances can we fail to find module information?
- // Can this, for example, result in inconsistent formatting across saves,
- // due to pending calls to packages.Load?
- var langVersion, modulePath string
- mds, err := snapshot.MetadataForFile(ctx, fh.URI())
- if err == nil && len(mds) > 0 {
- if mi := mds[0].ModuleInfo(); mi != nil {
- langVersion = mi.GoVersion
- modulePath = mi.Path
- }
- }
- b, err := format(ctx, langVersion, modulePath, buf.Bytes())
- if err != nil {
- return nil, err
- }
- formatted = string(b)
- }
- return computeTextEdits(ctx, snapshot, pgf, formatted)
-}
-
-func formatSource(ctx context.Context, fh FileHandle) ([]byte, error) {
- _, done := event.Start(ctx, "source.formatSource")
- defer done()
-
- data, err := fh.Read()
- if err != nil {
- return nil, err
- }
- return format.Source(data)
-}
-
-type ImportFix struct {
- Fix *imports.ImportFix
- Edits []protocol.TextEdit
-}
-
-// AllImportsFixes formats f for each possible fix to the imports.
-// In addition to returning the result of applying all edits,
-// it returns a list of fixes that could be applied to the file, with the
-// corresponding TextEdits that would be needed to apply that fix.
-func AllImportsFixes(ctx context.Context, snapshot Snapshot, fh FileHandle) (allFixEdits []protocol.TextEdit, editsPerFix []*ImportFix, err error) {
- ctx, done := event.Start(ctx, "source.AllImportsFixes")
- defer done()
-
- pgf, err := snapshot.ParseGo(ctx, fh, ParseFull)
- if err != nil {
- return nil, nil, err
- }
- if err := snapshot.RunProcessEnvFunc(ctx, func(opts *imports.Options) error {
- allFixEdits, editsPerFix, err = computeImportEdits(snapshot, pgf, opts)
- return err
- }); err != nil {
- return nil, nil, fmt.Errorf("AllImportsFixes: %v", err)
- }
- return allFixEdits, editsPerFix, nil
-}
-
-// computeImportEdits computes a set of edits that perform one or all of the
-// necessary import fixes.
-func computeImportEdits(snapshot Snapshot, pgf *ParsedGoFile, options *imports.Options) (allFixEdits []protocol.TextEdit, editsPerFix []*ImportFix, err error) {
- filename := pgf.URI.Filename()
-
- // Build up basic information about the original file.
- allFixes, err := imports.FixImports(filename, pgf.Src, options)
- if err != nil {
- return nil, nil, err
- }
-
- allFixEdits, err = computeFixEdits(snapshot, pgf, options, allFixes)
- if err != nil {
- return nil, nil, err
- }
-
- // Apply all of the import fixes to the file.
- // Add the edits for each fix to the result.
- for _, fix := range allFixes {
- edits, err := computeFixEdits(snapshot, pgf, options, []*imports.ImportFix{fix})
- if err != nil {
- return nil, nil, err
- }
- editsPerFix = append(editsPerFix, &ImportFix{
- Fix: fix,
- Edits: edits,
- })
- }
- return allFixEdits, editsPerFix, nil
-}
-
-// ComputeOneImportFixEdits returns text edits for a single import fix.
-func ComputeOneImportFixEdits(snapshot Snapshot, pgf *ParsedGoFile, fix *imports.ImportFix) ([]protocol.TextEdit, error) {
- options := &imports.Options{
- LocalPrefix: snapshot.View().Options().Local,
- // Defaults.
- AllErrors: true,
- Comments: true,
- Fragment: true,
- FormatOnly: false,
- TabIndent: true,
- TabWidth: 8,
- }
- return computeFixEdits(snapshot, pgf, options, []*imports.ImportFix{fix})
-}
-
-func computeFixEdits(snapshot Snapshot, pgf *ParsedGoFile, options *imports.Options, fixes []*imports.ImportFix) ([]protocol.TextEdit, error) {
- // trim the original data to match fixedData
- left, err := importPrefix(pgf.Src)
- if err != nil {
- return nil, err
- }
- extra := !strings.Contains(left, "\n") // one line may have more than imports
- if extra {
- left = string(pgf.Src)
- }
- if len(left) > 0 && left[len(left)-1] != '\n' {
- left += "\n"
- }
- // Apply the fixes and re-parse the file so that we can locate the
- // new imports.
- flags := parser.ImportsOnly
- if extra {
- // used all of origData above, use all of it here too
- flags = 0
- }
- fixedData, err := imports.ApplyFixes(fixes, "", pgf.Src, options, flags)
- if err != nil {
- return nil, err
- }
- if fixedData == nil || fixedData[len(fixedData)-1] != '\n' {
- fixedData = append(fixedData, '\n') // ApplyFixes may miss the newline, go figure.
- }
- edits, err := snapshot.View().Options().ComputeEdits(pgf.URI, left, string(fixedData))
- if err != nil {
- return nil, err
- }
- return ProtocolEditsFromSource([]byte(left), edits, pgf.Mapper.Converter)
-}
-
-// importPrefix returns the prefix of the given file content through the final
-// import statement. If there are no imports, the prefix is the package
-// statement and any comment groups below it.
-func importPrefix(src []byte) (string, error) {
- fset := token.NewFileSet()
- // do as little parsing as possible
- f, err := parser.ParseFile(fset, "", src, parser.ImportsOnly|parser.ParseComments)
- if err != nil { // This can happen if 'package' is misspelled
- return "", fmt.Errorf("importPrefix: failed to parse: %s", err)
- }
- tok := fset.File(f.Pos())
- var importEnd int
- for _, d := range f.Decls {
- if x, ok := d.(*ast.GenDecl); ok && x.Tok == token.IMPORT {
- if e, err := Offset(tok, d.End()); err != nil {
- return "", fmt.Errorf("importPrefix: %s", err)
- } else if e > importEnd {
- importEnd = e
- }
- }
- }
-
- maybeAdjustToLineEnd := func(pos token.Pos, isCommentNode bool) int {
- offset, err := Offset(tok, pos)
- if err != nil {
- return -1
- }
-
- // Don't go past the end of the file.
- if offset > len(src) {
- offset = len(src)
- }
- // The go/ast package does not account for different line endings, and
- // specifically, in the text of a comment, it will strip out \r\n line
- // endings in favor of \n. To account for these differences, we try to
- // return a position on the next line whenever possible.
- switch line := tok.Line(tok.Pos(offset)); {
- case line < tok.LineCount():
- nextLineOffset, err := Offset(tok, tok.LineStart(line+1))
- if err != nil {
- return -1
- }
- // If we found a position that is at the end of a line, move the
- // offset to the start of the next line.
- if offset+1 == nextLineOffset {
- offset = nextLineOffset
- }
- case isCommentNode, offset+1 == tok.Size():
- // If the last line of the file is a comment, or we are at the end
- // of the file, the prefix is the entire file.
- offset = len(src)
- }
- return offset
- }
- if importEnd == 0 {
- pkgEnd := f.Name.End()
- importEnd = maybeAdjustToLineEnd(pkgEnd, false)
- }
- for _, cgroup := range f.Comments {
- for _, c := range cgroup.List {
- if end, err := Offset(tok, c.End()); err != nil {
- return "", err
- } else if end > importEnd {
- startLine := tok.Position(c.Pos()).Line
- endLine := tok.Position(c.End()).Line
-
- // Work around golang/go#41197 by checking if the comment might
- // contain "\r", and if so, find the actual end position of the
- // comment by scanning the content of the file.
- startOffset, err := Offset(tok, c.Pos())
- if err != nil {
- return "", err
- }
- if startLine != endLine && bytes.Contains(src[startOffset:], []byte("\r")) {
- if commentEnd := scanForCommentEnd(src[startOffset:]); commentEnd > 0 {
- end = startOffset + commentEnd
- }
- }
- importEnd = maybeAdjustToLineEnd(tok.Pos(end), true)
- }
- }
- }
- if importEnd > len(src) {
- importEnd = len(src)
- }
- return string(src[:importEnd]), nil
-}
-
-// scanForCommentEnd returns the offset of the end of the multi-line comment
-// at the start of the given byte slice.
-func scanForCommentEnd(src []byte) int {
- var s scanner.Scanner
- s.Init(bytes.NewReader(src))
- s.Mode ^= scanner.SkipComments
-
- t := s.Scan()
- if t == scanner.Comment {
- return s.Pos().Offset
- }
- return 0
-}
-
-func computeTextEdits(ctx context.Context, snapshot Snapshot, pgf *ParsedGoFile, formatted string) ([]protocol.TextEdit, error) {
- _, done := event.Start(ctx, "source.computeTextEdits")
- defer done()
-
- edits, err := snapshot.View().Options().ComputeEdits(pgf.URI, string(pgf.Src), formatted)
- if err != nil {
- return nil, err
- }
- return ToProtocolEdits(pgf.Mapper, edits)
-}
-
-// ProtocolEditsFromSource converts text edits to LSP edits using the original
-// source.
-func ProtocolEditsFromSource(src []byte, edits []diff.TextEdit, converter span.Converter) ([]protocol.TextEdit, error) {
- m := lsppos.NewMapper(src)
- var result []protocol.TextEdit
- for _, edit := range edits {
- spn, err := edit.Span.WithOffset(converter)
- if err != nil {
- return nil, fmt.Errorf("computing offsets: %v", err)
- }
- startLine, startChar := m.Position(spn.Start().Offset())
- endLine, endChar := m.Position(spn.End().Offset())
- if startLine < 0 || endLine < 0 {
- return nil, fmt.Errorf("out of bound span: %v", spn)
- }
-
- pstart := protocol.Position{Line: uint32(startLine), Character: uint32(startChar)}
- pend := protocol.Position{Line: uint32(endLine), Character: uint32(endChar)}
- if pstart == pend && edit.NewText == "" {
- // Degenerate case, which may result from a diff tool wanting to delete
- // '\r' in line endings. Filter it out.
- continue
- }
- result = append(result, protocol.TextEdit{
- Range: protocol.Range{Start: pstart, End: pend},
- NewText: edit.NewText,
- })
- }
- return result, nil
-}
-
-func ToProtocolEdits(m *protocol.ColumnMapper, edits []diff.TextEdit) ([]protocol.TextEdit, error) {
- if edits == nil {
- return nil, nil
- }
- result := make([]protocol.TextEdit, len(edits))
- for i, edit := range edits {
- rng, err := m.Range(edit.Span)
- if err != nil {
- return nil, err
- }
- result[i] = protocol.TextEdit{
- Range: rng,
- NewText: edit.NewText,
- }
- }
- return result, nil
-}
-
-func FromProtocolEdits(m *protocol.ColumnMapper, edits []protocol.TextEdit) ([]diff.TextEdit, error) {
- if edits == nil {
- return nil, nil
- }
- result := make([]diff.TextEdit, len(edits))
- for i, edit := range edits {
- spn, err := m.RangeSpan(edit.Range)
- if err != nil {
- return nil, err
- }
- result[i] = diff.TextEdit{
- Span: spn,
- NewText: edit.NewText,
- }
- }
- return result, nil
-}
diff --git a/internal/lsp/source/format_test.go b/internal/lsp/source/format_test.go
deleted file mode 100644
index eac78d979..000000000
--- a/internal/lsp/source/format_test.go
+++ /dev/null
@@ -1,91 +0,0 @@
-// Copyright 2020 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package source
-
-import (
- "fmt"
- "strings"
- "testing"
-
- "golang.org/x/tools/internal/lsp/diff"
- "golang.org/x/tools/internal/lsp/diff/myers"
-)
-
-func TestImportPrefix(t *testing.T) {
- for i, tt := range []struct {
- input, want string
- }{
- {"package foo", "package foo"},
- {"package foo\n", "package foo\n"},
- {"package foo\n\nfunc f(){}\n", "package foo\n"},
- {"package foo\n\nimport \"fmt\"\n", "package foo\n\nimport \"fmt\""},
- {"package foo\nimport (\n\"fmt\"\n)\n", "package foo\nimport (\n\"fmt\"\n)"},
- {"\n\n\npackage foo\n", "\n\n\npackage foo\n"},
- {"// hi \n\npackage foo //xx\nfunc _(){}\n", "// hi \n\npackage foo //xx\n"},
- {"package foo //hi\n", "package foo //hi\n"},
- {"//hi\npackage foo\n//a\n\n//b\n", "//hi\npackage foo\n//a\n\n//b\n"},
- {
- "package a\n\nimport (\n \"fmt\"\n)\n//hi\n",
- "package a\n\nimport (\n \"fmt\"\n)\n//hi\n",
- },
- {`package a /*hi*/`, `package a /*hi*/`},
- {"package main\r\n\r\nimport \"go/types\"\r\n\r\n/*\r\n\r\n */\r\n", "package main\r\n\r\nimport \"go/types\"\r\n\r\n/*\r\n\r\n */\r\n"},
- {"package x; import \"os\"; func f() {}\n\n", "package x; import \"os\""},
- {"package x; func f() {fmt.Println()}\n\n", "package x"},
- } {
- got, err := importPrefix([]byte(tt.input))
- if err != nil {
- t.Fatal(err)
- }
- if got != tt.want {
- t.Errorf("%d: failed for %q:\n%s", i, tt.input, diffStr(t, tt.want, got))
- }
- }
-}
-
-func TestCRLFFile(t *testing.T) {
- for i, tt := range []struct {
- input, want string
- }{
- {
- input: `package main
-
-/*
-Hi description
-*/
-func Hi() {
-}
-`,
- want: `package main
-
-/*
-Hi description
-*/`,
- },
- } {
- got, err := importPrefix([]byte(strings.ReplaceAll(tt.input, "\n", "\r\n")))
- if err != nil {
- t.Fatal(err)
- }
- want := strings.ReplaceAll(tt.want, "\n", "\r\n")
- if got != want {
- t.Errorf("%d: failed for %q:\n%s", i, tt.input, diffStr(t, want, got))
- }
- }
-}
-
-func diffStr(t *testing.T, want, got string) string {
- if want == got {
- return ""
- }
- // Add newlines to avoid newline messages in diff.
- want += "\n"
- got += "\n"
- d, err := myers.ComputeEdits("", want, got)
- if err != nil {
- t.Fatal(err)
- }
- return fmt.Sprintf("%q", diff.ToUnified("want", "got", want, d))
-}
diff --git a/internal/lsp/source/gc_annotations.go b/internal/lsp/source/gc_annotations.go
deleted file mode 100644
index 3616bbfb1..000000000
--- a/internal/lsp/source/gc_annotations.go
+++ /dev/null
@@ -1,214 +0,0 @@
-// Copyright 2020 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package source
-
-import (
- "bytes"
- "context"
- "encoding/json"
- "fmt"
- "io/ioutil"
- "os"
- "path/filepath"
- "strings"
-
- "golang.org/x/tools/internal/gocommand"
- "golang.org/x/tools/internal/lsp/protocol"
- "golang.org/x/tools/internal/span"
-)
-
-type Annotation string
-
-const (
- // Nil controls nil checks.
- Nil Annotation = "nil"
-
- // Escape controls diagnostics about escape choices.
- Escape Annotation = "escape"
-
- // Inline controls diagnostics about inlining choices.
- Inline Annotation = "inline"
-
- // Bounds controls bounds checking diagnostics.
- Bounds Annotation = "bounds"
-)
-
-func GCOptimizationDetails(ctx context.Context, snapshot Snapshot, pkg Package) (map[VersionedFileIdentity][]*Diagnostic, error) {
- if len(pkg.CompiledGoFiles()) == 0 {
- return nil, nil
- }
- pkgDir := filepath.Dir(pkg.CompiledGoFiles()[0].URI.Filename())
- outDir := filepath.Join(os.TempDir(), fmt.Sprintf("gopls-%d.details", os.Getpid()))
-
- if err := os.MkdirAll(outDir, 0700); err != nil {
- return nil, err
- }
- tmpFile, err := ioutil.TempFile(os.TempDir(), "gopls-x")
- if err != nil {
- return nil, err
- }
- defer os.Remove(tmpFile.Name())
-
- outDirURI := span.URIFromPath(outDir)
- // GC details doesn't handle Windows URIs in the form of "file:///C:/...",
- // so rewrite them to "file://C:/...". See golang/go#41614.
- if !strings.HasPrefix(outDir, "/") {
- outDirURI = span.URI(strings.Replace(string(outDirURI), "file:///", "file://", 1))
- }
- inv := &gocommand.Invocation{
- Verb: "build",
- Args: []string{
- fmt.Sprintf("-gcflags=-json=0,%s", outDirURI),
- fmt.Sprintf("-o=%s", tmpFile.Name()),
- ".",
- },
- WorkingDir: pkgDir,
- }
- _, err = snapshot.RunGoCommandDirect(ctx, Normal, inv)
- if err != nil {
- return nil, err
- }
- files, err := findJSONFiles(outDir)
- if err != nil {
- return nil, err
- }
- reports := make(map[VersionedFileIdentity][]*Diagnostic)
- opts := snapshot.View().Options()
- var parseError error
- for _, fn := range files {
- uri, diagnostics, err := parseDetailsFile(fn, opts)
- if err != nil {
- // expect errors for all the files, save 1
- parseError = err
- }
- fh := snapshot.FindFile(uri)
- if fh == nil {
- continue
- }
- if pkgDir != filepath.Dir(fh.URI().Filename()) {
- // https://github.com/golang/go/issues/42198
- // sometimes the detail diagnostics generated for files
- // outside the package can never be taken back.
- continue
- }
- reports[fh.VersionedFileIdentity()] = diagnostics
- }
- return reports, parseError
-}
-
-func parseDetailsFile(filename string, options *Options) (span.URI, []*Diagnostic, error) {
- buf, err := ioutil.ReadFile(filename)
- if err != nil {
- return "", nil, err
- }
- var (
- uri span.URI
- i int
- diagnostics []*Diagnostic
- )
- type metadata struct {
- File string `json:"file,omitempty"`
- }
- for dec := json.NewDecoder(bytes.NewReader(buf)); dec.More(); {
- // The first element always contains metadata.
- if i == 0 {
- i++
- m := new(metadata)
- if err := dec.Decode(m); err != nil {
- return "", nil, err
- }
- if !strings.HasSuffix(m.File, ".go") {
- continue // <autogenerated>
- }
- uri = span.URIFromPath(m.File)
- continue
- }
- d := new(protocol.Diagnostic)
- if err := dec.Decode(d); err != nil {
- return "", nil, err
- }
- msg := d.Code.(string)
- if msg != "" {
- msg = fmt.Sprintf("%s(%s)", msg, d.Message)
- }
- if !showDiagnostic(msg, d.Source, options) {
- continue
- }
- var related []RelatedInformation
- for _, ri := range d.RelatedInformation {
- related = append(related, RelatedInformation{
- URI: ri.Location.URI.SpanURI(),
- Range: zeroIndexedRange(ri.Location.Range),
- Message: ri.Message,
- })
- }
- diagnostic := &Diagnostic{
- URI: uri,
- Range: zeroIndexedRange(d.Range),
- Message: msg,
- Severity: d.Severity,
- Source: OptimizationDetailsError, // d.Source is always "go compiler" as of 1.16, use our own
- Tags: d.Tags,
- Related: related,
- }
- diagnostics = append(diagnostics, diagnostic)
- i++
- }
- return uri, diagnostics, nil
-}
-
-// showDiagnostic reports whether a given diagnostic should be shown to the end
-// user, given the current options.
-func showDiagnostic(msg, source string, o *Options) bool {
- if source != "go compiler" {
- return false
- }
- if o.Annotations == nil {
- return true
- }
- switch {
- case strings.HasPrefix(msg, "canInline") ||
- strings.HasPrefix(msg, "cannotInline") ||
- strings.HasPrefix(msg, "inlineCall"):
- return o.Annotations[Inline]
- case strings.HasPrefix(msg, "escape") || msg == "leak":
- return o.Annotations[Escape]
- case strings.HasPrefix(msg, "nilcheck"):
- return o.Annotations[Nil]
- case strings.HasPrefix(msg, "isInBounds") ||
- strings.HasPrefix(msg, "isSliceInBounds"):
- return o.Annotations[Bounds]
- }
- return false
-}
-
-// The range produced by the compiler is 1-indexed, so subtract range by 1.
-func zeroIndexedRange(rng protocol.Range) protocol.Range {
- return protocol.Range{
- Start: protocol.Position{
- Line: rng.Start.Line - 1,
- Character: rng.Start.Character - 1,
- },
- End: protocol.Position{
- Line: rng.End.Line - 1,
- Character: rng.End.Character - 1,
- },
- }
-}
-
-func findJSONFiles(dir string) ([]string, error) {
- ans := []string{}
- f := func(path string, fi os.FileInfo, _ error) error {
- if fi.IsDir() {
- return nil
- }
- if strings.HasSuffix(path, ".json") {
- ans = append(ans, path)
- }
- return nil
- }
- err := filepath.Walk(dir, f)
- return ans, err
-}
diff --git a/internal/lsp/source/highlight.go b/internal/lsp/source/highlight.go
deleted file mode 100644
index 7cdb484a8..000000000
--- a/internal/lsp/source/highlight.go
+++ /dev/null
@@ -1,509 +0,0 @@
-// Copyright 2019 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package source
-
-import (
- "context"
- "fmt"
- "go/ast"
- "go/token"
- "go/types"
- "strings"
-
- "golang.org/x/tools/go/ast/astutil"
- "golang.org/x/tools/internal/event"
- "golang.org/x/tools/internal/lsp/protocol"
- errors "golang.org/x/xerrors"
-)
-
-func Highlight(ctx context.Context, snapshot Snapshot, fh FileHandle, pos protocol.Position) ([]protocol.Range, error) {
- ctx, done := event.Start(ctx, "source.Highlight")
- defer done()
-
- // Don't use GetParsedFile because it uses TypecheckWorkspace, and we
- // always want fully parsed files for highlight, regardless of whether
- // the file belongs to a workspace package.
- pkg, err := snapshot.PackageForFile(ctx, fh.URI(), TypecheckFull, WidestPackage)
- if err != nil {
- return nil, errors.Errorf("getting package for Highlight: %w", err)
- }
- pgf, err := pkg.File(fh.URI())
- if err != nil {
- return nil, errors.Errorf("getting file for Highlight: %w", err)
- }
-
- spn, err := pgf.Mapper.PointSpan(pos)
- if err != nil {
- return nil, err
- }
- rng, err := spn.Range(pgf.Mapper.Converter)
- if err != nil {
- return nil, err
- }
- path, _ := astutil.PathEnclosingInterval(pgf.File, rng.Start, rng.Start)
- if len(path) == 0 {
- return nil, fmt.Errorf("no enclosing position found for %v:%v", int(pos.Line), int(pos.Character))
- }
- // If start == end for astutil.PathEnclosingInterval, the 1-char interval
- // following start is used instead. As a result, we might not get an exact
- // match so we should check the 1-char interval to the left of the passed
- // in position to see if that is an exact match.
- if _, ok := path[0].(*ast.Ident); !ok {
- if p, _ := astutil.PathEnclosingInterval(pgf.File, rng.Start-1, rng.Start-1); p != nil {
- switch p[0].(type) {
- case *ast.Ident, *ast.SelectorExpr:
- path = p // use preceding ident/selector
- }
- }
- }
- result, err := highlightPath(pkg, path)
- if err != nil {
- return nil, err
- }
- var ranges []protocol.Range
- for rng := range result {
- mRng, err := posToMappedRange(snapshot, pkg, rng.start, rng.end)
- if err != nil {
- return nil, err
- }
- pRng, err := mRng.Range()
- if err != nil {
- return nil, err
- }
- ranges = append(ranges, pRng)
- }
- return ranges, nil
-}
-
-func highlightPath(pkg Package, path []ast.Node) (map[posRange]struct{}, error) {
- result := make(map[posRange]struct{})
- switch node := path[0].(type) {
- case *ast.BasicLit:
- if len(path) > 1 {
- if _, ok := path[1].(*ast.ImportSpec); ok {
- err := highlightImportUses(pkg, path, result)
- return result, err
- }
- }
- highlightFuncControlFlow(path, result)
- case *ast.ReturnStmt, *ast.FuncDecl, *ast.FuncType:
- highlightFuncControlFlow(path, result)
- case *ast.Ident:
- highlightIdentifiers(pkg, path, result)
- case *ast.ForStmt, *ast.RangeStmt:
- highlightLoopControlFlow(path, result)
- case *ast.SwitchStmt:
- highlightSwitchFlow(path, result)
- case *ast.BranchStmt:
- // BREAK can exit a loop, switch or select, while CONTINUE exit a loop so
- // these need to be handled separately. They can also be embedded in any
- // other loop/switch/select if they have a label. TODO: add support for
- // GOTO and FALLTHROUGH as well.
- if node.Label != nil {
- highlightLabeledFlow(node, result)
- } else {
- switch node.Tok {
- case token.BREAK:
- highlightUnlabeledBreakFlow(path, result)
- case token.CONTINUE:
- highlightLoopControlFlow(path, result)
- }
- }
- default:
- // If the cursor is in an unidentified area, return empty results.
- return nil, nil
- }
- return result, nil
-}
-
-type posRange struct {
- start, end token.Pos
-}
-
-func highlightFuncControlFlow(path []ast.Node, result map[posRange]struct{}) {
- var enclosingFunc ast.Node
- var returnStmt *ast.ReturnStmt
- var resultsList *ast.FieldList
- inReturnList := false
-
-Outer:
- // Reverse walk the path till we get to the func block.
- for i, n := range path {
- switch node := n.(type) {
- case *ast.KeyValueExpr:
- // If cursor is in a key: value expr, we don't want control flow highlighting
- return
- case *ast.CallExpr:
- // If cusor is an arg in a callExpr, we don't want control flow highlighting.
- if i > 0 {
- for _, arg := range node.Args {
- if arg == path[i-1] {
- return
- }
- }
- }
- case *ast.Field:
- inReturnList = true
- case *ast.FuncLit:
- enclosingFunc = n
- resultsList = node.Type.Results
- break Outer
- case *ast.FuncDecl:
- enclosingFunc = n
- resultsList = node.Type.Results
- break Outer
- case *ast.ReturnStmt:
- returnStmt = node
- // If the cursor is not directly in a *ast.ReturnStmt, then
- // we need to know if it is within one of the values that is being returned.
- inReturnList = inReturnList || path[0] != returnStmt
- }
- }
- // Cursor is not in a function.
- if enclosingFunc == nil {
- return
- }
- // If the cursor is on a "return" or "func" keyword, we should highlight all of the exit
- // points of the function, including the "return" and "func" keywords.
- highlightAllReturnsAndFunc := path[0] == returnStmt || path[0] == enclosingFunc
- switch path[0].(type) {
- case *ast.Ident, *ast.BasicLit:
- // Cursor is in an identifier and not in a return statement or in the results list.
- if returnStmt == nil && !inReturnList {
- return
- }
- case *ast.FuncType:
- highlightAllReturnsAndFunc = true
- }
- // The user's cursor may be within the return statement of a function,
- // or within the result section of a function's signature.
- // index := -1
- var nodes []ast.Node
- if returnStmt != nil {
- for _, n := range returnStmt.Results {
- nodes = append(nodes, n)
- }
- } else if resultsList != nil {
- for _, n := range resultsList.List {
- nodes = append(nodes, n)
- }
- }
- _, index := nodeAtPos(nodes, path[0].Pos())
-
- // Highlight the correct argument in the function declaration return types.
- if resultsList != nil && -1 < index && index < len(resultsList.List) {
- rng := posRange{
- start: resultsList.List[index].Pos(),
- end: resultsList.List[index].End(),
- }
- result[rng] = struct{}{}
- }
- // Add the "func" part of the func declaration.
- if highlightAllReturnsAndFunc {
- r := posRange{
- start: enclosingFunc.Pos(),
- end: enclosingFunc.Pos() + token.Pos(len("func")),
- }
- result[r] = struct{}{}
- }
- ast.Inspect(enclosingFunc, func(n ast.Node) bool {
- // Don't traverse any other functions.
- switch n.(type) {
- case *ast.FuncDecl, *ast.FuncLit:
- return enclosingFunc == n
- }
- ret, ok := n.(*ast.ReturnStmt)
- if !ok {
- return true
- }
- var toAdd ast.Node
- // Add the entire return statement, applies when highlight the word "return" or "func".
- if highlightAllReturnsAndFunc {
- toAdd = n
- }
- // Add the relevant field within the entire return statement.
- if -1 < index && index < len(ret.Results) {
- toAdd = ret.Results[index]
- }
- if toAdd != nil {
- result[posRange{start: toAdd.Pos(), end: toAdd.End()}] = struct{}{}
- }
- return false
- })
-}
-
-func highlightUnlabeledBreakFlow(path []ast.Node, result map[posRange]struct{}) {
- // Reverse walk the path until we find closest loop, select, or switch.
- for _, n := range path {
- switch n.(type) {
- case *ast.ForStmt, *ast.RangeStmt:
- highlightLoopControlFlow(path, result)
- return // only highlight the innermost statement
- case *ast.SwitchStmt:
- highlightSwitchFlow(path, result)
- return
- case *ast.SelectStmt:
- // TODO: add highlight when breaking a select.
- return
- }
- }
-}
-
-func highlightLabeledFlow(node *ast.BranchStmt, result map[posRange]struct{}) {
- obj := node.Label.Obj
- if obj == nil || obj.Decl == nil {
- return
- }
- label, ok := obj.Decl.(*ast.LabeledStmt)
- if !ok {
- return
- }
- switch label.Stmt.(type) {
- case *ast.ForStmt, *ast.RangeStmt:
- highlightLoopControlFlow([]ast.Node{label.Stmt, label}, result)
- case *ast.SwitchStmt:
- highlightSwitchFlow([]ast.Node{label.Stmt, label}, result)
- }
-}
-
-func labelFor(path []ast.Node) *ast.Ident {
- if len(path) > 1 {
- if n, ok := path[1].(*ast.LabeledStmt); ok {
- return n.Label
- }
- }
- return nil
-}
-
-func highlightLoopControlFlow(path []ast.Node, result map[posRange]struct{}) {
- var loop ast.Node
- var loopLabel *ast.Ident
- stmtLabel := labelFor(path)
-Outer:
- // Reverse walk the path till we get to the for loop.
- for i := range path {
- switch n := path[i].(type) {
- case *ast.ForStmt, *ast.RangeStmt:
- loopLabel = labelFor(path[i:])
-
- if stmtLabel == nil || loopLabel == stmtLabel {
- loop = n
- break Outer
- }
- }
- }
- if loop == nil {
- return
- }
-
- // Add the for statement.
- rng := posRange{
- start: loop.Pos(),
- end: loop.Pos() + token.Pos(len("for")),
- }
- result[rng] = struct{}{}
-
- // Traverse AST to find branch statements within the same for-loop.
- ast.Inspect(loop, func(n ast.Node) bool {
- switch n.(type) {
- case *ast.ForStmt, *ast.RangeStmt:
- return loop == n
- case *ast.SwitchStmt, *ast.SelectStmt:
- return false
- }
- b, ok := n.(*ast.BranchStmt)
- if !ok {
- return true
- }
- if b.Label == nil || labelDecl(b.Label) == loopLabel {
- result[posRange{start: b.Pos(), end: b.End()}] = struct{}{}
- }
- return true
- })
-
- // Find continue statements in the same loop or switches/selects.
- ast.Inspect(loop, func(n ast.Node) bool {
- switch n.(type) {
- case *ast.ForStmt, *ast.RangeStmt:
- return loop == n
- }
-
- if n, ok := n.(*ast.BranchStmt); ok && n.Tok == token.CONTINUE {
- result[posRange{start: n.Pos(), end: n.End()}] = struct{}{}
- }
- return true
- })
-
- // We don't need to check other for loops if we aren't looking for labeled statements.
- if loopLabel == nil {
- return
- }
-
- // Find labeled branch statements in any loop.
- ast.Inspect(loop, func(n ast.Node) bool {
- b, ok := n.(*ast.BranchStmt)
- if !ok {
- return true
- }
- // statement with labels that matches the loop
- if b.Label != nil && labelDecl(b.Label) == loopLabel {
- result[posRange{start: b.Pos(), end: b.End()}] = struct{}{}
- }
- return true
- })
-}
-
-func highlightSwitchFlow(path []ast.Node, result map[posRange]struct{}) {
- var switchNode ast.Node
- var switchNodeLabel *ast.Ident
- stmtLabel := labelFor(path)
-Outer:
- // Reverse walk the path till we get to the switch statement.
- for i := range path {
- switch n := path[i].(type) {
- case *ast.SwitchStmt:
- switchNodeLabel = labelFor(path[i:])
- if stmtLabel == nil || switchNodeLabel == stmtLabel {
- switchNode = n
- break Outer
- }
- }
- }
- // Cursor is not in a switch statement
- if switchNode == nil {
- return
- }
-
- // Add the switch statement.
- rng := posRange{
- start: switchNode.Pos(),
- end: switchNode.Pos() + token.Pos(len("switch")),
- }
- result[rng] = struct{}{}
-
- // Traverse AST to find break statements within the same switch.
- ast.Inspect(switchNode, func(n ast.Node) bool {
- switch n.(type) {
- case *ast.SwitchStmt:
- return switchNode == n
- case *ast.ForStmt, *ast.RangeStmt, *ast.SelectStmt:
- return false
- }
-
- b, ok := n.(*ast.BranchStmt)
- if !ok || b.Tok != token.BREAK {
- return true
- }
-
- if b.Label == nil || labelDecl(b.Label) == switchNodeLabel {
- result[posRange{start: b.Pos(), end: b.End()}] = struct{}{}
- }
- return true
- })
-
- // We don't need to check other switches if we aren't looking for labeled statements.
- if switchNodeLabel == nil {
- return
- }
-
- // Find labeled break statements in any switch
- ast.Inspect(switchNode, func(n ast.Node) bool {
- b, ok := n.(*ast.BranchStmt)
- if !ok || b.Tok != token.BREAK {
- return true
- }
-
- if b.Label != nil && labelDecl(b.Label) == switchNodeLabel {
- result[posRange{start: b.Pos(), end: b.End()}] = struct{}{}
- }
-
- return true
- })
-}
-
-func labelDecl(n *ast.Ident) *ast.Ident {
- if n == nil {
- return nil
- }
- if n.Obj == nil {
- return nil
- }
- if n.Obj.Decl == nil {
- return nil
- }
- stmt, ok := n.Obj.Decl.(*ast.LabeledStmt)
- if !ok {
- return nil
- }
- return stmt.Label
-}
-
-func highlightImportUses(pkg Package, path []ast.Node, result map[posRange]struct{}) error {
- basicLit, ok := path[0].(*ast.BasicLit)
- if !ok {
- return errors.Errorf("highlightImportUses called with an ast.Node of type %T", basicLit)
- }
- ast.Inspect(path[len(path)-1], func(node ast.Node) bool {
- if imp, ok := node.(*ast.ImportSpec); ok && imp.Path == basicLit {
- result[posRange{start: node.Pos(), end: node.End()}] = struct{}{}
- return false
- }
- n, ok := node.(*ast.Ident)
- if !ok {
- return true
- }
- obj, ok := pkg.GetTypesInfo().ObjectOf(n).(*types.PkgName)
- if !ok {
- return true
- }
- if !strings.Contains(basicLit.Value, obj.Name()) {
- return true
- }
- result[posRange{start: n.Pos(), end: n.End()}] = struct{}{}
- return false
- })
- return nil
-}
-
-func highlightIdentifiers(pkg Package, path []ast.Node, result map[posRange]struct{}) error {
- id, ok := path[0].(*ast.Ident)
- if !ok {
- return errors.Errorf("highlightIdentifiers called with an ast.Node of type %T", id)
- }
- // Check if ident is inside return or func decl.
- highlightFuncControlFlow(path, result)
-
- // TODO: maybe check if ident is a reserved word, if true then don't continue and return results.
-
- idObj := pkg.GetTypesInfo().ObjectOf(id)
- pkgObj, isImported := idObj.(*types.PkgName)
- ast.Inspect(path[len(path)-1], func(node ast.Node) bool {
- if imp, ok := node.(*ast.ImportSpec); ok && isImported {
- highlightImport(pkgObj, imp, result)
- }
- n, ok := node.(*ast.Ident)
- if !ok {
- return true
- }
- if n.Name != id.Name {
- return false
- }
- if nObj := pkg.GetTypesInfo().ObjectOf(n); nObj == idObj {
- result[posRange{start: n.Pos(), end: n.End()}] = struct{}{}
- }
- return false
- })
- return nil
-}
-
-func highlightImport(obj *types.PkgName, imp *ast.ImportSpec, result map[posRange]struct{}) {
- if imp.Name != nil || imp.Path == nil {
- return
- }
- if !strings.Contains(imp.Path.Value, obj.Name()) {
- return
- }
- result[posRange{start: imp.Path.Pos(), end: imp.Path.End()}] = struct{}{}
-}
diff --git a/internal/lsp/source/hover.go b/internal/lsp/source/hover.go
deleted file mode 100644
index b6fd9acf9..000000000
--- a/internal/lsp/source/hover.go
+++ /dev/null
@@ -1,870 +0,0 @@
-// Copyright 2019 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package source
-
-import (
- "context"
- "encoding/json"
- "fmt"
- "go/ast"
- "go/constant"
- "go/doc"
- "go/format"
- "go/token"
- "go/types"
- "strconv"
- "strings"
- "time"
- "unicode/utf8"
-
- "golang.org/x/text/unicode/runenames"
- "golang.org/x/tools/internal/event"
- "golang.org/x/tools/internal/lsp/protocol"
- "golang.org/x/tools/internal/typeparams"
- errors "golang.org/x/xerrors"
-)
-
-// HoverContext contains context extracted from the syntax and type information
-// of a given node, for use in various summaries (hover, autocomplete,
-// signature help).
-type HoverContext struct {
- // signatureSource is the object or node use to derive the hover signature.
- //
- // It may also hold a precomputed string.
- // TODO(rfindley): pre-compute all signatures to avoid this indirection.
- signatureSource interface{}
-
- // comment is the most relevant comment group associated with the hovered object.
- Comment *ast.CommentGroup
-}
-
-// HoverJSON contains information used by hover. It is also the JSON returned
-// for the "structured" hover format
-type HoverJSON struct {
- // Synopsis is a single sentence synopsis of the symbol's documentation.
- Synopsis string `json:"synopsis"`
-
- // FullDocumentation is the symbol's full documentation.
- FullDocumentation string `json:"fullDocumentation"`
-
- // Signature is the symbol's signature.
- Signature string `json:"signature"`
-
- // SingleLine is a single line describing the symbol.
- // This is recommended only for use in clients that show a single line for hover.
- SingleLine string `json:"singleLine"`
-
- // SymbolName is the types.Object.Name for the given symbol.
- SymbolName string `json:"symbolName"`
-
- // LinkPath is the pkg.go.dev link for the given symbol.
- // For example, the "go/ast" part of "pkg.go.dev/go/ast#Node".
- LinkPath string `json:"linkPath"`
-
- // LinkAnchor is the pkg.go.dev link anchor for the given symbol.
- // For example, the "Node" part of "pkg.go.dev/go/ast#Node".
- LinkAnchor string `json:"linkAnchor"`
-}
-
-func Hover(ctx context.Context, snapshot Snapshot, fh FileHandle, position protocol.Position) (*protocol.Hover, error) {
- ident, err := Identifier(ctx, snapshot, fh, position)
- if err != nil {
- if hover, innerErr := hoverRune(ctx, snapshot, fh, position); innerErr == nil {
- return hover, nil
- }
- return nil, nil
- }
- h, err := HoverIdentifier(ctx, ident)
- if err != nil {
- return nil, err
- }
- rng, err := ident.Range()
- if err != nil {
- return nil, err
- }
- hover, err := FormatHover(h, snapshot.View().Options())
- if err != nil {
- return nil, err
- }
- return &protocol.Hover{
- Contents: protocol.MarkupContent{
- Kind: snapshot.View().Options().PreferredContentFormat,
- Value: hover,
- },
- Range: rng,
- }, nil
-}
-
-func hoverRune(ctx context.Context, snapshot Snapshot, fh FileHandle, position protocol.Position) (*protocol.Hover, error) {
- ctx, done := event.Start(ctx, "source.hoverRune")
- defer done()
-
- r, mrng, err := findRune(ctx, snapshot, fh, position)
- if err != nil {
- return nil, err
- }
- rng, err := mrng.Range()
- if err != nil {
- return nil, err
- }
-
- var desc string
- runeName := runenames.Name(r)
- if len(runeName) > 0 && runeName[0] == '<' {
- // Check if the rune looks like an HTML tag. If so, trim the surrounding <>
- // characters to work around https://github.com/microsoft/vscode/issues/124042.
- runeName = strings.TrimRight(runeName[1:], ">")
- }
- if strconv.IsPrint(r) {
- desc = fmt.Sprintf("'%s', U+%04X, %s", string(r), uint32(r), runeName)
- } else {
- desc = fmt.Sprintf("U+%04X, %s", uint32(r), runeName)
- }
- return &protocol.Hover{
- Contents: protocol.MarkupContent{
- Kind: snapshot.View().Options().PreferredContentFormat,
- Value: desc,
- },
- Range: rng,
- }, nil
-}
-
-// ErrNoRuneFound is the error returned when no rune is found at a particular position.
-var ErrNoRuneFound = errors.New("no rune found")
-
-// findRune returns rune information for a position in a file.
-func findRune(ctx context.Context, snapshot Snapshot, fh FileHandle, position protocol.Position) (rune, MappedRange, error) {
- pkg, pgf, err := GetParsedFile(ctx, snapshot, fh, NarrowestPackage)
- if err != nil {
- return 0, MappedRange{}, err
- }
- spn, err := pgf.Mapper.PointSpan(position)
- if err != nil {
- return 0, MappedRange{}, err
- }
- rng, err := spn.Range(pgf.Mapper.Converter)
- if err != nil {
- return 0, MappedRange{}, err
- }
- pos := rng.Start
-
- // Find the basic literal enclosing the given position, if there is one.
- var lit *ast.BasicLit
- var found bool
- ast.Inspect(pgf.File, func(n ast.Node) bool {
- if found {
- return false
- }
- if n, ok := n.(*ast.BasicLit); ok && pos >= n.Pos() && pos <= n.End() {
- lit = n
- found = true
- }
- return !found
- })
- if !found {
- return 0, MappedRange{}, ErrNoRuneFound
- }
-
- var r rune
- var start, end token.Pos
- switch lit.Kind {
- case token.CHAR:
- s, err := strconv.Unquote(lit.Value)
- if err != nil {
- // If the conversion fails, it's because of an invalid syntax, therefore
- // there is no rune to be found.
- return 0, MappedRange{}, ErrNoRuneFound
- }
- r, _ = utf8.DecodeRuneInString(s)
- if r == utf8.RuneError {
- return 0, MappedRange{}, fmt.Errorf("rune error")
- }
- start, end = lit.Pos(), lit.End()
- case token.INT:
- // It's an integer, scan only if it is a hex litteral whose bitsize in
- // ranging from 8 to 32.
- if !(strings.HasPrefix(lit.Value, "0x") && len(lit.Value[2:]) >= 2 && len(lit.Value[2:]) <= 8) {
- return 0, MappedRange{}, ErrNoRuneFound
- }
- v, err := strconv.ParseUint(lit.Value[2:], 16, 32)
- if err != nil {
- return 0, MappedRange{}, err
- }
- r = rune(v)
- if r == utf8.RuneError {
- return 0, MappedRange{}, fmt.Errorf("rune error")
- }
- start, end = lit.Pos(), lit.End()
- case token.STRING:
- // It's a string, scan only if it contains a unicode escape sequence under or before the
- // current cursor position.
- var found bool
- litOffset, err := Offset(pgf.Tok, lit.Pos())
- if err != nil {
- return 0, MappedRange{}, err
- }
- offset, err := Offset(pgf.Tok, pos)
- if err != nil {
- return 0, MappedRange{}, err
- }
- for i := offset - litOffset; i > 0; i-- {
- // Start at the cursor position and search backward for the beginning of a rune escape sequence.
- rr, _ := utf8.DecodeRuneInString(lit.Value[i:])
- if rr == utf8.RuneError {
- return 0, MappedRange{}, fmt.Errorf("rune error")
- }
- if rr == '\\' {
- // Got the beginning, decode it.
- var tail string
- r, _, tail, err = strconv.UnquoteChar(lit.Value[i:], '"')
- if err != nil {
- // If the conversion fails, it's because of an invalid syntax, therefore is no rune to be found.
- return 0, MappedRange{}, ErrNoRuneFound
- }
- // Only the rune escape sequence part of the string has to be highlighted, recompute the range.
- runeLen := len(lit.Value) - (int(i) + len(tail))
- start = token.Pos(int(lit.Pos()) + int(i))
- end = token.Pos(int(start) + runeLen)
- found = true
- break
- }
- }
- if !found {
- // No escape sequence found
- return 0, MappedRange{}, ErrNoRuneFound
- }
- default:
- return 0, MappedRange{}, ErrNoRuneFound
- }
-
- mappedRange, err := posToMappedRange(snapshot, pkg, start, end)
- if err != nil {
- return 0, MappedRange{}, err
- }
- return r, mappedRange, nil
-}
-
-func HoverIdentifier(ctx context.Context, i *IdentifierInfo) (*HoverJSON, error) {
- ctx, done := event.Start(ctx, "source.Hover")
- defer done()
-
- hoverCtx, err := FindHoverContext(ctx, i.Snapshot, i.pkg, i.Declaration.obj, i.Declaration.node, i.Declaration.fullDecl)
- if err != nil {
- return nil, err
- }
-
- h := &HoverJSON{
- FullDocumentation: hoverCtx.Comment.Text(),
- Synopsis: doc.Synopsis(hoverCtx.Comment.Text()),
- }
-
- fset := i.Snapshot.FileSet()
- // Determine the symbol's signature.
- switch x := hoverCtx.signatureSource.(type) {
- case string:
- h.Signature = x // a pre-computed signature
-
- case *ast.TypeSpec:
- x2 := *x
- // Don't duplicate comments when formatting type specs.
- x2.Doc = nil
- x2.Comment = nil
- var b strings.Builder
- b.WriteString("type ")
- if err := format.Node(&b, fset, &x2); err != nil {
- return nil, err
- }
- h.Signature = b.String()
-
- case ast.Node:
- var b strings.Builder
- if err := format.Node(&b, fset, x); err != nil {
- return nil, err
- }
- h.Signature = b.String()
-
- // Check if the variable is an integer whose value we can present in a more
- // user-friendly way, i.e. `var hex = 0xe34e` becomes `var hex = 58190`
- if spec, ok := x.(*ast.ValueSpec); ok && len(spec.Values) > 0 {
- if lit, ok := spec.Values[0].(*ast.BasicLit); ok && len(spec.Names) > 0 {
- val := constant.MakeFromLiteral(types.ExprString(lit), lit.Kind, 0)
- h.Signature = fmt.Sprintf("var %s = %s", spec.Names[0], val)
- }
- }
-
- case types.Object:
- // If the variable is implicitly declared in a type switch, we need to
- // manually generate its object string.
- if typ := i.Declaration.typeSwitchImplicit; typ != nil {
- if v, ok := x.(*types.Var); ok {
- h.Signature = fmt.Sprintf("var %s %s", v.Name(), types.TypeString(typ, i.qf))
- break
- }
- }
- h.Signature = objectString(x, i.qf, i.Inferred)
- }
- if obj := i.Declaration.obj; obj != nil {
- h.SingleLine = objectString(obj, i.qf, nil)
- }
- obj := i.Declaration.obj
- if obj == nil {
- return h, nil
- }
-
- // Check if the identifier is test-only (and is therefore not part of a
- // package's API). This is true if the request originated in a test package,
- // and if the declaration is also found in the same test package.
- if i.pkg != nil && obj.Pkg() != nil && i.pkg.ForTest() != "" {
- if _, err := i.pkg.File(i.Declaration.MappedRange[0].URI()); err == nil {
- return h, nil
- }
- }
-
- h.SymbolName, h.LinkPath, h.LinkAnchor = linkData(obj, i.enclosing)
-
- // See golang/go#36998: don't link to modules matching GOPRIVATE.
- //
- // The path returned by linkData is an import path.
- if i.Snapshot.View().IsGoPrivatePath(h.LinkPath) {
- h.LinkPath = ""
- } else if mod, version, ok := moduleAtVersion(h.LinkPath, i); ok {
- h.LinkPath = strings.Replace(h.LinkPath, mod, mod+"@"+version, 1)
- }
-
- return h, nil
-}
-
-// linkData returns the name, import path, and anchor to use in building links
-// to obj.
-//
-// If obj is not visible in documentation, the returned name will be empty.
-func linkData(obj types.Object, enclosing *types.TypeName) (name, importPath, anchor string) {
- // Package names simply link to the package.
- if obj, ok := obj.(*types.PkgName); ok {
- return obj.Name(), obj.Imported().Path(), ""
- }
-
- // Builtins link to the special builtin package.
- if obj.Parent() == types.Universe {
- return obj.Name(), "builtin", obj.Name()
- }
-
- // In all other cases, the object must be exported.
- if !obj.Exported() {
- return "", "", ""
- }
-
- var recv types.Object // If non-nil, the field or method receiver base.
-
- switch obj := obj.(type) {
- case *types.Var:
- // If the object is a field, and we have an associated selector
- // composite literal, or struct, we can determine the link.
- if obj.IsField() && enclosing != nil {
- recv = enclosing
- }
- case *types.Func:
- typ, ok := obj.Type().(*types.Signature)
- if !ok {
- // Note: this should never happen. go/types guarantees that the type of
- // *Funcs are Signatures.
- //
- // TODO(rfindley): given a 'debug' mode, we should panic here.
- return "", "", ""
- }
- if r := typ.Recv(); r != nil {
- if rtyp, _ := Deref(r.Type()).(*types.Named); rtyp != nil {
- // If we have an unexported type, see if the enclosing type is
- // exported (we may have an interface or struct we can link
- // to). If not, don't show any link.
- if !rtyp.Obj().Exported() {
- if enclosing != nil {
- recv = enclosing
- } else {
- return "", "", ""
- }
- } else {
- recv = rtyp.Obj()
- }
- }
- }
- }
-
- if recv != nil && !recv.Exported() {
- return "", "", ""
- }
-
- // Either the object or its receiver must be in the package scope.
- scopeObj := obj
- if recv != nil {
- scopeObj = recv
- }
- if scopeObj.Pkg() == nil || scopeObj.Pkg().Scope().Lookup(scopeObj.Name()) != scopeObj {
- return "", "", ""
- }
-
- importPath = obj.Pkg().Path()
- if recv != nil {
- anchor = fmt.Sprintf("%s.%s", recv.Name(), obj.Name())
- name = fmt.Sprintf("(%s.%s).%s", obj.Pkg().Name(), recv.Name(), obj.Name())
- } else {
- // For most cases, the link is "package/path#symbol".
- anchor = obj.Name()
- name = fmt.Sprintf("%s.%s", obj.Pkg().Name(), obj.Name())
- }
- return name, importPath, anchor
-}
-
-func moduleAtVersion(path string, i *IdentifierInfo) (string, string, bool) {
- // TODO(rfindley): moduleAtVersion should not be responsible for deciding
- // whether or not the link target supports module version links.
- if strings.ToLower(i.Snapshot.View().Options().LinkTarget) != "pkg.go.dev" {
- return "", "", false
- }
- impPkg, err := i.pkg.GetImport(path)
- if err != nil {
- return "", "", false
- }
- if impPkg.Version() == nil {
- return "", "", false
- }
- version, modpath := impPkg.Version().Version, impPkg.Version().Path
- if modpath == "" || version == "" {
- return "", "", false
- }
- return modpath, version, true
-}
-
-// objectString is a wrapper around the types.ObjectString function.
-// It handles adding more information to the object string.
-func objectString(obj types.Object, qf types.Qualifier, inferred *types.Signature) string {
- // If the signature type was inferred, prefer the preferred signature with a
- // comment showing the generic signature.
- if sig, _ := obj.Type().(*types.Signature); sig != nil && typeparams.ForSignature(sig).Len() > 0 && inferred != nil {
- obj2 := types.NewFunc(obj.Pos(), obj.Pkg(), obj.Name(), inferred)
- str := types.ObjectString(obj2, qf)
- // Try to avoid overly long lines.
- if len(str) > 60 {
- str += "\n"
- } else {
- str += " "
- }
- str += "// " + types.TypeString(sig, qf)
- return str
- }
- str := types.ObjectString(obj, qf)
- switch obj := obj.(type) {
- case *types.Const:
- str = fmt.Sprintf("%s = %s", str, obj.Val())
-
- // Try to add a formatted duration as an inline comment
- typ, ok := obj.Type().(*types.Named)
- if !ok {
- break
- }
- pkg := typ.Obj().Pkg()
- if pkg.Path() == "time" && typ.Obj().Name() == "Duration" {
- if d, ok := constant.Int64Val(obj.Val()); ok {
- str += " // " + time.Duration(d).String()
- }
- }
- }
- return str
-}
-
-// FindHoverContext returns a HoverContext struct for an AST node and its
-// declaration object. node should be the actual node used in type checking,
-// while fullNode could be a separate node with more complete syntactic
-// information.
-func FindHoverContext(ctx context.Context, s Snapshot, pkg Package, obj types.Object, pkgNode ast.Node, fullDecl ast.Decl) (*HoverContext, error) {
- var info *HoverContext
-
- // Type parameters get their signature from their declaration object.
- if _, isTypeName := obj.(*types.TypeName); isTypeName {
- if _, isTypeParam := obj.Type().(*typeparams.TypeParam); isTypeParam {
- return &HoverContext{signatureSource: obj}, nil
- }
- }
-
- // This is problematic for a number of reasons. We really need to have a more
- // general mechanism to validate the coherency of AST with type information,
- // but absent that we must do our best to ensure that we don't use fullNode
- // when we actually need the node that was type checked.
- //
- // pkgNode may be nil, if it was eliminated from the type-checked syntax. In
- // that case, use fullDecl if available.
- node := pkgNode
- if node == nil && fullDecl != nil {
- node = fullDecl
- }
-
- switch node := node.(type) {
- case *ast.Ident:
- // The package declaration.
- for _, f := range pkg.GetSyntax() {
- if f.Name == pkgNode {
- info = &HoverContext{Comment: f.Doc}
- }
- }
- case *ast.ImportSpec:
- // Try to find the package documentation for an imported package.
- if pkgName, ok := obj.(*types.PkgName); ok {
- imp, err := pkg.GetImport(pkgName.Imported().Path())
- if err != nil {
- return nil, err
- }
- // Assume that only one file will contain package documentation,
- // so pick the first file that has a doc comment.
- for _, file := range imp.GetSyntax() {
- if file.Doc != nil {
- info = &HoverContext{signatureSource: obj, Comment: file.Doc}
- break
- }
- }
- }
- info = &HoverContext{signatureSource: node}
- case *ast.GenDecl:
- switch obj := obj.(type) {
- case *types.TypeName, *types.Var, *types.Const, *types.Func:
- // Always use the full declaration here if we have it, because the
- // dependent code doesn't rely on pointer identity. This is fragile.
- if d, _ := fullDecl.(*ast.GenDecl); d != nil {
- node = d
- }
- // obj may not have been produced by type checking the AST containing
- // node, so we need to be careful about using token.Pos.
- tok := s.FileSet().File(obj.Pos())
- offset, err := Offset(tok, obj.Pos())
- if err != nil {
- return nil, err
- }
-
- // fullTok and fullPos are the *token.File and object position in for the
- // full AST.
- fullTok := s.FileSet().File(node.Pos())
- fullPos, err := Pos(fullTok, offset)
- if err != nil {
- return nil, err
- }
-
- var spec ast.Spec
- for _, s := range node.Specs {
- // Avoid panics by guarding the calls to token.Offset (golang/go#48249).
- start, err := Offset(fullTok, s.Pos())
- if err != nil {
- return nil, err
- }
- end, err := Offset(fullTok, s.End())
- if err != nil {
- return nil, err
- }
- if start <= offset && offset <= end {
- spec = s
- break
- }
- }
-
- info, err = hoverGenDecl(node, spec, fullPos, obj)
- if err != nil {
- return nil, err
- }
- }
- case *ast.TypeSpec:
- if obj.Parent() == types.Universe {
- if genDecl, ok := fullDecl.(*ast.GenDecl); ok {
- info = hoverTypeSpec(node, genDecl)
- }
- }
- case *ast.FuncDecl:
- switch obj.(type) {
- case *types.Func:
- info = &HoverContext{signatureSource: obj, Comment: node.Doc}
- case *types.Builtin:
- info = &HoverContext{Comment: node.Doc}
- if sig, err := NewBuiltinSignature(ctx, s, obj.Name()); err == nil {
- info.signatureSource = "func " + sig.name + sig.Format()
- } else {
- // Fall back on the object as a signature source.
-
- // TODO(rfindley): refactor so that we can report bugs from the source
- // package.
-
- // debug.Bug(ctx, "invalid builtin hover", "did not find builtin signature: %v", err)
- info.signatureSource = obj
- }
- case *types.Var:
- // Object is a function param or the field of an anonymous struct
- // declared with ':='. Skip the first one because only fields
- // can have docs.
- if isFunctionParam(obj, node) {
- break
- }
-
- field, err := s.PosToField(ctx, pkg, obj.Pos())
- if err != nil {
- return nil, err
- }
-
- if field != nil {
- comment := field.Doc
- if comment.Text() == "" {
- comment = field.Comment
- }
- info = &HoverContext{signatureSource: obj, Comment: comment}
- }
- }
- }
-
- if info == nil {
- info = &HoverContext{signatureSource: obj}
- }
-
- return info, nil
-}
-
-// isFunctionParam returns true if the passed object is either an incoming
-// or an outgoing function param
-func isFunctionParam(obj types.Object, node *ast.FuncDecl) bool {
- for _, f := range node.Type.Params.List {
- if f.Pos() == obj.Pos() {
- return true
- }
- }
- if node.Type.Results != nil {
- for _, f := range node.Type.Results.List {
- if f.Pos() == obj.Pos() {
- return true
- }
- }
- }
- return false
-}
-
-// hoverGenDecl returns hover information an object declared via spec inside
-// of the GenDecl node. obj is the type-checked object corresponding to the
-// declaration, but may have been type-checked using a different AST than the
-// given nodes; fullPos is the position of obj in node's AST.
-func hoverGenDecl(node *ast.GenDecl, spec ast.Spec, fullPos token.Pos, obj types.Object) (*HoverContext, error) {
- if spec == nil {
- return nil, errors.Errorf("no spec for node %v at position %v", node, fullPos)
- }
-
- // If we have a field or method.
- switch obj.(type) {
- case *types.Var, *types.Const, *types.Func:
- return hoverVar(spec, fullPos, obj, node), nil
- }
- // Handle types.
- switch spec := spec.(type) {
- case *ast.TypeSpec:
- return hoverTypeSpec(spec, node), nil
- case *ast.ValueSpec:
- return &HoverContext{signatureSource: spec, Comment: spec.Doc}, nil
- case *ast.ImportSpec:
- return &HoverContext{signatureSource: spec, Comment: spec.Doc}, nil
- }
- return nil, errors.Errorf("unable to format spec %v (%T)", spec, spec)
-}
-
-// TODO(rfindley): rename this function.
-func hoverTypeSpec(spec *ast.TypeSpec, decl *ast.GenDecl) *HoverContext {
- comment := spec.Doc
- if comment == nil && decl != nil {
- comment = decl.Doc
- }
- if comment == nil {
- comment = spec.Comment
- }
- return &HoverContext{
- signatureSource: spec,
- Comment: comment,
- }
-}
-
-func hoverVar(node ast.Spec, fullPos token.Pos, obj types.Object, decl *ast.GenDecl) *HoverContext {
- var fieldList *ast.FieldList
- switch spec := node.(type) {
- case *ast.TypeSpec:
- switch t := spec.Type.(type) {
- case *ast.StructType:
- fieldList = t.Fields
- case *ast.InterfaceType:
- fieldList = t.Methods
- }
- case *ast.ValueSpec:
- // Try to extract the field list of an anonymous struct
- if fieldList = extractFieldList(spec.Type); fieldList != nil {
- break
- }
-
- comment := spec.Doc
- if comment == nil {
- comment = decl.Doc
- }
- if comment == nil {
- comment = spec.Comment
- }
-
- // We need the AST nodes for variable declarations of basic literals with
- // associated values so that we can augment their hover with more information.
- if _, ok := obj.(*types.Var); ok && spec.Type == nil && len(spec.Values) > 0 {
- if _, ok := spec.Values[0].(*ast.BasicLit); ok {
- return &HoverContext{signatureSource: spec, Comment: comment}
- }
- }
-
- return &HoverContext{signatureSource: obj, Comment: comment}
- }
-
- if fieldList != nil {
- comment := findFieldComment(fullPos, fieldList)
- return &HoverContext{signatureSource: obj, Comment: comment}
- }
- return &HoverContext{signatureSource: obj, Comment: decl.Doc}
-}
-
-// extractFieldList recursively tries to extract a field list.
-// If it is not found, nil is returned.
-func extractFieldList(specType ast.Expr) *ast.FieldList {
- switch t := specType.(type) {
- case *ast.StructType:
- return t.Fields
- case *ast.InterfaceType:
- return t.Methods
- case *ast.ArrayType:
- return extractFieldList(t.Elt)
- case *ast.MapType:
- // Map value has a greater chance to be a struct
- if fields := extractFieldList(t.Value); fields != nil {
- return fields
- }
- return extractFieldList(t.Key)
- case *ast.ChanType:
- return extractFieldList(t.Value)
- }
- return nil
-}
-
-// findFieldComment visits all fields in depth-first order and returns
-// the comment of a field with passed position. If no comment is found,
-// nil is returned.
-func findFieldComment(pos token.Pos, fieldList *ast.FieldList) *ast.CommentGroup {
- for _, field := range fieldList.List {
- if field.Pos() == pos {
- if field.Doc.Text() != "" {
- return field.Doc
- }
- return field.Comment
- }
-
- if nestedFieldList := extractFieldList(field.Type); nestedFieldList != nil {
- if c := findFieldComment(pos, nestedFieldList); c != nil {
- return c
- }
- }
- }
- return nil
-}
-
-func FormatHover(h *HoverJSON, options *Options) (string, error) {
- signature := formatSignature(h, options)
-
- switch options.HoverKind {
- case SingleLine:
- return h.SingleLine, nil
- case NoDocumentation:
- return signature, nil
- case Structured:
- b, err := json.Marshal(h)
- if err != nil {
- return "", err
- }
- return string(b), nil
- }
-
- link := formatLink(h, options)
- doc := formatDoc(h, options)
-
- var b strings.Builder
- parts := []string{signature, doc, link}
- for i, el := range parts {
- if el != "" {
- b.WriteString(el)
-
- // Don't write out final newline.
- if i == len(parts) {
- continue
- }
- // If any elements of the remainder of the list are non-empty,
- // write a newline.
- if anyNonEmpty(parts[i+1:]) {
- if options.PreferredContentFormat == protocol.Markdown {
- b.WriteString("\n\n")
- } else {
- b.WriteRune('\n')
- }
- }
- }
- }
- return b.String(), nil
-}
-
-func formatSignature(h *HoverJSON, options *Options) string {
- signature := h.Signature
- if signature != "" && options.PreferredContentFormat == protocol.Markdown {
- signature = fmt.Sprintf("```go\n%s\n```", signature)
- }
- return signature
-}
-
-func formatLink(h *HoverJSON, options *Options) string {
- if !options.LinksInHover || options.LinkTarget == "" || h.LinkPath == "" {
- return ""
- }
- plainLink := BuildLink(options.LinkTarget, h.LinkPath, h.LinkAnchor)
- switch options.PreferredContentFormat {
- case protocol.Markdown:
- return fmt.Sprintf("[`%s` on %s](%s)", h.SymbolName, options.LinkTarget, plainLink)
- case protocol.PlainText:
- return ""
- default:
- return plainLink
- }
-}
-
-// BuildLink constructs a link with the given target, path, and anchor.
-func BuildLink(target, path, anchor string) string {
- link := fmt.Sprintf("https://%s/%s", target, path)
- if target == "pkg.go.dev" {
- link += "?utm_source=gopls"
- }
- if anchor == "" {
- return link
- }
- return link + "#" + anchor
-}
-
-func formatDoc(h *HoverJSON, options *Options) string {
- var doc string
- switch options.HoverKind {
- case SynopsisDocumentation:
- doc = h.Synopsis
- case FullDocumentation:
- doc = h.FullDocumentation
- }
- if options.PreferredContentFormat == protocol.Markdown {
- return CommentToMarkdown(doc)
- }
- return doc
-}
-
-func anyNonEmpty(x []string) bool {
- for _, el := range x {
- if el != "" {
- return true
- }
- }
- return false
-}
diff --git a/internal/lsp/source/identifier.go b/internal/lsp/source/identifier.go
deleted file mode 100644
index bf4941f18..000000000
--- a/internal/lsp/source/identifier.go
+++ /dev/null
@@ -1,576 +0,0 @@
-// Copyright 2018 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package source
-
-import (
- "context"
- "fmt"
- "go/ast"
- "go/parser"
- "go/token"
- "go/types"
- "sort"
- "strconv"
-
- "golang.org/x/tools/go/ast/astutil"
- "golang.org/x/tools/internal/event"
- "golang.org/x/tools/internal/lsp/protocol"
- "golang.org/x/tools/internal/span"
- "golang.org/x/tools/internal/typeparams"
- errors "golang.org/x/xerrors"
-)
-
-// IdentifierInfo holds information about an identifier in Go source.
-type IdentifierInfo struct {
- Name string
- Snapshot Snapshot
- MappedRange
-
- Type struct {
- MappedRange
- Object types.Object
- }
-
- Inferred *types.Signature
-
- Declaration Declaration
-
- ident *ast.Ident
-
- // For struct fields or embedded interfaces, enclosing is the object
- // corresponding to the outer type declaration, if it is exported, for use in
- // documentation links.
- enclosing *types.TypeName
-
- pkg Package
- qf types.Qualifier
-}
-
-func (i *IdentifierInfo) IsImport() bool {
- _, ok := i.Declaration.node.(*ast.ImportSpec)
- return ok
-}
-
-type Declaration struct {
- MappedRange []MappedRange
-
- // The typechecked node.
- node ast.Node
-
- // Optional: the fully parsed node, to be used for formatting in cases where
- // node has missing information. This could be the case when node was parsed
- // in ParseExported mode.
- fullDecl ast.Decl
-
- // The typechecked object.
- obj types.Object
-
- // typeSwitchImplicit indicates that the declaration is in an implicit
- // type switch. Its type is the type of the variable on the right-hand
- // side of the type switch.
- typeSwitchImplicit types.Type
-}
-
-// Identifier returns identifier information for a position
-// in a file, accounting for a potentially incomplete selector.
-func Identifier(ctx context.Context, snapshot Snapshot, fh FileHandle, pos protocol.Position) (*IdentifierInfo, error) {
- ctx, done := event.Start(ctx, "source.Identifier")
- defer done()
-
- pkgs, err := snapshot.PackagesForFile(ctx, fh.URI(), TypecheckAll, false)
- if err != nil {
- return nil, err
- }
- if len(pkgs) == 0 {
- return nil, fmt.Errorf("no packages for file %v", fh.URI())
- }
- sort.Slice(pkgs, func(i, j int) bool {
- // Prefer packages with a more complete parse mode.
- if pkgs[i].ParseMode() != pkgs[j].ParseMode() {
- return pkgs[i].ParseMode() > pkgs[j].ParseMode()
- }
- return len(pkgs[i].CompiledGoFiles()) < len(pkgs[j].CompiledGoFiles())
- })
- var findErr error
- for _, pkg := range pkgs {
- pgf, err := pkg.File(fh.URI())
- if err != nil {
- return nil, err
- }
- spn, err := pgf.Mapper.PointSpan(pos)
- if err != nil {
- return nil, err
- }
- rng, err := spn.Range(pgf.Mapper.Converter)
- if err != nil {
- return nil, err
- }
- var ident *IdentifierInfo
- ident, findErr = findIdentifier(ctx, snapshot, pkg, pgf, rng.Start)
- if findErr == nil {
- return ident, nil
- }
- }
- return nil, findErr
-}
-
-// ErrNoIdentFound is error returned when no identifer is found at a particular position
-var ErrNoIdentFound = errors.New("no identifier found")
-
-func findIdentifier(ctx context.Context, snapshot Snapshot, pkg Package, pgf *ParsedGoFile, pos token.Pos) (*IdentifierInfo, error) {
- file := pgf.File
- // Handle import specs separately, as there is no formal position for a
- // package declaration.
- if result, err := importSpec(snapshot, pkg, file, pos); result != nil || err != nil {
- return result, err
- }
- path := pathEnclosingObjNode(file, pos)
- if path == nil {
- return nil, ErrNoIdentFound
- }
-
- qf := Qualifier(file, pkg.GetTypes(), pkg.GetTypesInfo())
-
- ident, _ := path[0].(*ast.Ident)
- if ident == nil {
- return nil, ErrNoIdentFound
- }
- // Special case for package declarations, since they have no
- // corresponding types.Object.
- if ident == file.Name {
- rng, err := posToMappedRange(snapshot, pkg, file.Name.Pos(), file.Name.End())
- if err != nil {
- return nil, err
- }
- var declAST *ast.File
- for _, pgf := range pkg.CompiledGoFiles() {
- if pgf.File.Doc != nil {
- declAST = pgf.File
- }
- }
- // If there's no package documentation, just use current file.
- if declAST == nil {
- declAST = file
- }
- declRng, err := posToMappedRange(snapshot, pkg, declAST.Name.Pos(), declAST.Name.End())
- if err != nil {
- return nil, err
- }
- return &IdentifierInfo{
- Name: file.Name.Name,
- ident: file.Name,
- MappedRange: rng,
- pkg: pkg,
- qf: qf,
- Snapshot: snapshot,
- Declaration: Declaration{
- node: declAST.Name,
- MappedRange: []MappedRange{declRng},
- },
- }, nil
- }
-
- result := &IdentifierInfo{
- Snapshot: snapshot,
- qf: qf,
- pkg: pkg,
- ident: ident,
- enclosing: searchForEnclosing(pkg.GetTypesInfo(), path),
- }
-
- result.Name = result.ident.Name
- var err error
- if result.MappedRange, err = posToMappedRange(snapshot, pkg, result.ident.Pos(), result.ident.End()); err != nil {
- return nil, err
- }
-
- result.Declaration.obj = pkg.GetTypesInfo().ObjectOf(result.ident)
- if result.Declaration.obj == nil {
- // If there was no types.Object for the declaration, there might be an
- // implicit local variable declaration in a type switch.
- if objs, typ := typeSwitchImplicits(pkg, path); len(objs) > 0 {
- // There is no types.Object for the declaration of an implicit local variable,
- // but all of the types.Objects associated with the usages of this variable can be
- // used to connect it back to the declaration.
- // Preserve the first of these objects and treat it as if it were the declaring object.
- result.Declaration.obj = objs[0]
- result.Declaration.typeSwitchImplicit = typ
- } else {
- // Probably a type error.
- return nil, errors.Errorf("%w for ident %v", errNoObjectFound, result.Name)
- }
- }
-
- // Handle builtins separately.
- if result.Declaration.obj.Parent() == types.Universe {
- builtin, err := snapshot.BuiltinFile(ctx)
- if err != nil {
- return nil, err
- }
- builtinObj := builtin.File.Scope.Lookup(result.Name)
- if builtinObj == nil {
- return nil, fmt.Errorf("no builtin object for %s", result.Name)
- }
- decl, ok := builtinObj.Decl.(ast.Node)
- if !ok {
- return nil, errors.Errorf("no declaration for %s", result.Name)
- }
- result.Declaration.node = decl
- if typeSpec, ok := decl.(*ast.TypeSpec); ok {
- // Find the GenDecl (which has the doc comments) for the TypeSpec.
- result.Declaration.fullDecl = findGenDecl(builtin.File, typeSpec)
- }
-
- // The builtin package isn't in the dependency graph, so the usual
- // utilities won't work here.
- rng := NewMappedRange(snapshot.FileSet(), builtin.Mapper, decl.Pos(), decl.Pos()+token.Pos(len(result.Name)))
- result.Declaration.MappedRange = append(result.Declaration.MappedRange, rng)
- return result, nil
- }
-
- // (error).Error is a special case of builtin. Lots of checks to confirm
- // that this is the builtin Error.
- if obj := result.Declaration.obj; obj.Parent() == nil && obj.Pkg() == nil && obj.Name() == "Error" {
- if _, ok := obj.Type().(*types.Signature); ok {
- builtin, err := snapshot.BuiltinFile(ctx)
- if err != nil {
- return nil, err
- }
- // Look up "error" and then navigate to its only method.
- // The Error method does not appear in the builtin package's scope.log.Pri
- const errorName = "error"
- builtinObj := builtin.File.Scope.Lookup(errorName)
- if builtinObj == nil {
- return nil, fmt.Errorf("no builtin object for %s", errorName)
- }
- decl, ok := builtinObj.Decl.(ast.Node)
- if !ok {
- return nil, errors.Errorf("no declaration for %s", errorName)
- }
- spec, ok := decl.(*ast.TypeSpec)
- if !ok {
- return nil, fmt.Errorf("no type spec for %s", errorName)
- }
- iface, ok := spec.Type.(*ast.InterfaceType)
- if !ok {
- return nil, fmt.Errorf("%s is not an interface", errorName)
- }
- if iface.Methods.NumFields() != 1 {
- return nil, fmt.Errorf("expected 1 method for %s, got %v", errorName, iface.Methods.NumFields())
- }
- method := iface.Methods.List[0]
- if len(method.Names) != 1 {
- return nil, fmt.Errorf("expected 1 name for %v, got %v", method, len(method.Names))
- }
- name := method.Names[0].Name
- result.Declaration.node = method
- rng := NewMappedRange(snapshot.FileSet(), builtin.Mapper, method.Pos(), method.Pos()+token.Pos(len(name)))
- result.Declaration.MappedRange = append(result.Declaration.MappedRange, rng)
- return result, nil
- }
- }
-
- // If the original position was an embedded field, we want to jump
- // to the field's type definition, not the field's definition.
- if v, ok := result.Declaration.obj.(*types.Var); ok && v.Embedded() {
- // types.Info.Uses contains the embedded field's *types.TypeName.
- if typeName := pkg.GetTypesInfo().Uses[ident]; typeName != nil {
- result.Declaration.obj = typeName
- }
- }
-
- rng, err := objToMappedRange(snapshot, pkg, result.Declaration.obj)
- if err != nil {
- return nil, err
- }
- result.Declaration.MappedRange = append(result.Declaration.MappedRange, rng)
-
- declPkg, err := FindPackageFromPos(ctx, snapshot, result.Declaration.obj.Pos())
- if err != nil {
- return nil, err
- }
- if result.Declaration.node, err = snapshot.PosToDecl(ctx, declPkg, result.Declaration.obj.Pos()); err != nil {
- return nil, err
- }
- // Ensure that we have the full declaration, in case the declaration was
- // parsed in ParseExported and therefore could be missing information.
- if result.Declaration.fullDecl, err = fullNode(snapshot, result.Declaration.obj, declPkg); err != nil {
- return nil, err
- }
- typ := pkg.GetTypesInfo().TypeOf(result.ident)
- if typ == nil {
- return result, nil
- }
-
- result.Inferred = inferredSignature(pkg.GetTypesInfo(), ident)
-
- result.Type.Object = typeToObject(typ)
- if result.Type.Object != nil {
- // Identifiers with the type "error" are a special case with no position.
- if hasErrorType(result.Type.Object) {
- return result, nil
- }
- if result.Type.MappedRange, err = objToMappedRange(snapshot, pkg, result.Type.Object); err != nil {
- return nil, err
- }
- }
- return result, nil
-}
-
-// findGenDecl determines the parent ast.GenDecl for a given ast.Spec.
-func findGenDecl(f *ast.File, spec ast.Spec) *ast.GenDecl {
- for _, decl := range f.Decls {
- if genDecl, ok := decl.(*ast.GenDecl); ok {
- if genDecl.Pos() <= spec.Pos() && genDecl.End() >= spec.End() {
- return genDecl
- }
- }
- }
- return nil
-}
-
-// fullNode tries to extract the full spec corresponding to obj's declaration.
-// If the package was not parsed in full, the declaration file will be
-// re-parsed to ensure it has complete syntax.
-func fullNode(snapshot Snapshot, obj types.Object, pkg Package) (ast.Decl, error) {
- // declaration in a different package... make sure we have full AST information.
- tok := snapshot.FileSet().File(obj.Pos())
- uri := span.URIFromPath(tok.Name())
- pgf, err := pkg.File(uri)
- if err != nil {
- return nil, err
- }
- file := pgf.File
- pos := obj.Pos()
- if pgf.Mode != ParseFull {
- fset := snapshot.FileSet()
- file2, _ := parser.ParseFile(fset, tok.Name(), pgf.Src, parser.AllErrors|parser.ParseComments)
- if file2 != nil {
- offset, err := Offset(tok, obj.Pos())
- if err != nil {
- return nil, err
- }
- file = file2
- tok2 := fset.File(file2.Pos())
- pos = tok2.Pos(offset)
- }
- }
- path, _ := astutil.PathEnclosingInterval(file, pos, pos)
- for _, n := range path {
- if decl, ok := n.(ast.Decl); ok {
- return decl, nil
- }
- }
- return nil, nil
-}
-
-// inferredSignature determines the resolved non-generic signature for an
-// identifier in an instantiation expression.
-//
-// If no such signature exists, it returns nil.
-func inferredSignature(info *types.Info, id *ast.Ident) *types.Signature {
- inst := typeparams.GetInstances(info)[id]
- sig, _ := inst.Type.(*types.Signature)
- return sig
-}
-
-func searchForEnclosing(info *types.Info, path []ast.Node) *types.TypeName {
- for _, n := range path {
- switch n := n.(type) {
- case *ast.SelectorExpr:
- if sel, ok := info.Selections[n]; ok {
- recv := Deref(sel.Recv())
-
- // Keep track of the last exported type seen.
- var exported *types.TypeName
- if named, ok := recv.(*types.Named); ok && named.Obj().Exported() {
- exported = named.Obj()
- }
- // We don't want the last element, as that's the field or
- // method itself.
- for _, index := range sel.Index()[:len(sel.Index())-1] {
- if r, ok := recv.Underlying().(*types.Struct); ok {
- recv = Deref(r.Field(index).Type())
- if named, ok := recv.(*types.Named); ok && named.Obj().Exported() {
- exported = named.Obj()
- }
- }
- }
- return exported
- }
- case *ast.CompositeLit:
- if t, ok := info.Types[n]; ok {
- if named, _ := t.Type.(*types.Named); named != nil {
- return named.Obj()
- }
- }
- case *ast.TypeSpec:
- if _, ok := n.Type.(*ast.StructType); ok {
- if t, ok := info.Defs[n.Name]; ok {
- if tname, _ := t.(*types.TypeName); tname != nil {
- return tname
- }
- }
- }
- }
- }
- return nil
-}
-
-func typeToObject(typ types.Type) types.Object {
- switch typ := typ.(type) {
- case *types.Named:
- return typ.Obj()
- case *types.Pointer:
- return typeToObject(typ.Elem())
- case *types.Array:
- return typeToObject(typ.Elem())
- case *types.Slice:
- return typeToObject(typ.Elem())
- case *types.Chan:
- return typeToObject(typ.Elem())
- case *types.Signature:
- // Try to find a return value of a named type. If there's only one
- // such value, jump to its type definition.
- var res types.Object
-
- results := typ.Results()
- for i := 0; i < results.Len(); i++ {
- obj := typeToObject(results.At(i).Type())
- if obj == nil || hasErrorType(obj) {
- // Skip builtins.
- continue
- }
- if res != nil {
- // The function/method must have only one return value of a named type.
- return nil
- }
-
- res = obj
- }
- return res
- default:
- return nil
- }
-}
-
-func hasErrorType(obj types.Object) bool {
- return types.IsInterface(obj.Type()) && obj.Pkg() == nil && obj.Name() == "error"
-}
-
-// importSpec handles positions inside of an *ast.ImportSpec.
-func importSpec(snapshot Snapshot, pkg Package, file *ast.File, pos token.Pos) (*IdentifierInfo, error) {
- var imp *ast.ImportSpec
- for _, spec := range file.Imports {
- if spec.Path.Pos() <= pos && pos < spec.Path.End() {
- imp = spec
- }
- }
- if imp == nil {
- return nil, nil
- }
- importPath, err := strconv.Unquote(imp.Path.Value)
- if err != nil {
- return nil, errors.Errorf("import path not quoted: %s (%v)", imp.Path.Value, err)
- }
- result := &IdentifierInfo{
- Snapshot: snapshot,
- Name: importPath,
- pkg: pkg,
- }
- if result.MappedRange, err = posToMappedRange(snapshot, pkg, imp.Path.Pos(), imp.Path.End()); err != nil {
- return nil, err
- }
- // Consider the "declaration" of an import spec to be the imported package.
- importedPkg, err := pkg.GetImport(importPath)
- if err != nil {
- return nil, err
- }
- // Return all of the files in the package as the definition of the import spec.
- for _, dst := range importedPkg.GetSyntax() {
- rng, err := posToMappedRange(snapshot, pkg, dst.Pos(), dst.End())
- if err != nil {
- return nil, err
- }
- result.Declaration.MappedRange = append(result.Declaration.MappedRange, rng)
- }
-
- result.Declaration.node = imp
- return result, nil
-}
-
-// typeSwitchImplicits returns all the implicit type switch objects that
-// correspond to the leaf *ast.Ident. It also returns the original type
-// associated with the identifier (outside of a case clause).
-func typeSwitchImplicits(pkg Package, path []ast.Node) ([]types.Object, types.Type) {
- ident, _ := path[0].(*ast.Ident)
- if ident == nil {
- return nil, nil
- }
-
- var (
- ts *ast.TypeSwitchStmt
- assign *ast.AssignStmt
- cc *ast.CaseClause
- obj = pkg.GetTypesInfo().ObjectOf(ident)
- )
-
- // Walk our ancestors to determine if our leaf ident refers to a
- // type switch variable, e.g. the "a" from "switch a := b.(type)".
-Outer:
- for i := 1; i < len(path); i++ {
- switch n := path[i].(type) {
- case *ast.AssignStmt:
- // Check if ident is the "a" in "a := foo.(type)". The "a" in
- // this case has no types.Object, so check for ident equality.
- if len(n.Lhs) == 1 && n.Lhs[0] == ident {
- assign = n
- }
- case *ast.CaseClause:
- // Check if ident is a use of "a" within a case clause. Each
- // case clause implicitly maps "a" to a different types.Object,
- // so check if ident's object is the case clause's implicit
- // object.
- if obj != nil && pkg.GetTypesInfo().Implicits[n] == obj {
- cc = n
- }
- case *ast.TypeSwitchStmt:
- // Look for the type switch that owns our previously found
- // *ast.AssignStmt or *ast.CaseClause.
- if n.Assign == assign {
- ts = n
- break Outer
- }
-
- for _, stmt := range n.Body.List {
- if stmt == cc {
- ts = n
- break Outer
- }
- }
- }
- }
- if ts == nil {
- return nil, nil
- }
- // Our leaf ident refers to a type switch variable. Fan out to the
- // type switch's implicit case clause objects.
- var objs []types.Object
- for _, cc := range ts.Body.List {
- if ccObj := pkg.GetTypesInfo().Implicits[cc]; ccObj != nil {
- objs = append(objs, ccObj)
- }
- }
- // The right-hand side of a type switch should only have one
- // element, and we need to track its type in order to generate
- // hover information for implicit type switch variables.
- var typ types.Type
- if assign, ok := ts.Assign.(*ast.AssignStmt); ok && len(assign.Rhs) == 1 {
- if rhs := assign.Rhs[0].(*ast.TypeAssertExpr); ok {
- typ = pkg.GetTypesInfo().TypeOf(rhs.X)
- }
- }
- return objs, typ
-}
diff --git a/internal/lsp/source/identifier_test.go b/internal/lsp/source/identifier_test.go
deleted file mode 100644
index 9bbdf58de..000000000
--- a/internal/lsp/source/identifier_test.go
+++ /dev/null
@@ -1,128 +0,0 @@
-// Copyright 2020 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package source
-
-import (
- "bytes"
- "go/ast"
- "go/parser"
- "go/token"
- "go/types"
- "testing"
-)
-
-func TestSearchForEnclosing(t *testing.T) {
- tests := []struct {
- desc string
- // For convenience, consider the first occurrence of the identifier "X" in
- // src.
- src string
- // By convention, "" means no type found.
- wantTypeName string
- }{
- {
- desc: "self enclosing",
- src: `package a; type X struct {}`,
- wantTypeName: "X",
- },
- {
- // TODO(rFindley): is this correct, or do we want to resolve I2 here?
- desc: "embedded interface in interface",
- src: `package a; var y = i1.X; type i1 interface {I2}; type I2 interface{X()}`,
- wantTypeName: "",
- },
- {
- desc: "embedded interface in struct",
- src: `package a; var y = t.X; type t struct {I}; type I interface{X()}`,
- wantTypeName: "I",
- },
- {
- desc: "double embedding",
- src: `package a; var y = t1.X; type t1 struct {t2}; type t2 struct {I}; type I interface{X()}`,
- wantTypeName: "I",
- },
- {
- desc: "struct field",
- src: `package a; type T struct { X int }`,
- wantTypeName: "T",
- },
- {
- desc: "nested struct field",
- src: `package a; type T struct { E struct { X int } }`,
- wantTypeName: "T",
- },
- {
- desc: "slice entry",
- src: `package a; type T []int; var S = T{X}; var X int = 2`,
- wantTypeName: "T",
- },
- {
- desc: "struct pointer literal",
- src: `package a; type T struct {i int}; var L = &T{X}; const X = 2`,
- wantTypeName: "T",
- },
- }
-
- for _, test := range tests {
- test := test
- t.Run(test.desc, func(t *testing.T) {
- fset := token.NewFileSet()
- file, err := parser.ParseFile(fset, "a.go", test.src, parser.AllErrors)
- if err != nil {
- t.Fatal(err)
- }
- column := 1 + bytes.IndexRune([]byte(test.src), 'X')
- pos := posAt(1, column, fset, "a.go")
- path := pathEnclosingObjNode(file, pos)
- if path == nil {
- t.Fatalf("no ident found at (1, %d)", column)
- }
- info := newInfo()
- if _, err = (*types.Config)(nil).Check("p", fset, []*ast.File{file}, info); err != nil {
- t.Fatal(err)
- }
- obj := searchForEnclosing(info, path)
- if obj == nil {
- if test.wantTypeName != "" {
- t.Errorf("searchForEnclosing(...) = <nil>, want %q", test.wantTypeName)
- }
- return
- }
- if got := obj.Name(); got != test.wantTypeName {
- t.Errorf("searchForEnclosing(...) = %q, want %q", got, test.wantTypeName)
- }
- })
- }
-}
-
-// posAt returns the token.Pos corresponding to the 1-based (line, column)
-// coordinates in the file fname of fset.
-func posAt(line, column int, fset *token.FileSet, fname string) token.Pos {
- var tok *token.File
- fset.Iterate(func(f *token.File) bool {
- if f.Name() == fname {
- tok = f
- return false
- }
- return true
- })
- if tok == nil {
- return token.NoPos
- }
- start := tok.LineStart(line)
- return start + token.Pos(column-1)
-}
-
-// newInfo returns a types.Info with all maps populated.
-func newInfo() *types.Info {
- return &types.Info{
- Types: make(map[ast.Expr]types.TypeAndValue),
- Defs: make(map[*ast.Ident]types.Object),
- Uses: make(map[*ast.Ident]types.Object),
- Implicits: make(map[ast.Node]types.Object),
- Selections: make(map[*ast.SelectorExpr]*types.Selection),
- Scopes: make(map[ast.Node]*types.Scope),
- }
-}
diff --git a/internal/lsp/source/implementation.go b/internal/lsp/source/implementation.go
deleted file mode 100644
index b53d7c994..000000000
--- a/internal/lsp/source/implementation.go
+++ /dev/null
@@ -1,446 +0,0 @@
-// Copyright 2019 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package source
-
-import (
- "context"
- "errors"
- "fmt"
- "go/ast"
- "go/token"
- "go/types"
- "sort"
-
- "golang.org/x/tools/internal/event"
- "golang.org/x/tools/internal/lsp/protocol"
- "golang.org/x/tools/internal/span"
- "golang.org/x/xerrors"
-)
-
-func Implementation(ctx context.Context, snapshot Snapshot, f FileHandle, pp protocol.Position) ([]protocol.Location, error) {
- ctx, done := event.Start(ctx, "source.Implementation")
- defer done()
-
- impls, err := implementations(ctx, snapshot, f, pp)
- if err != nil {
- return nil, err
- }
- var locations []protocol.Location
- for _, impl := range impls {
- if impl.pkg == nil || len(impl.pkg.CompiledGoFiles()) == 0 {
- continue
- }
- rng, err := objToMappedRange(snapshot, impl.pkg, impl.obj)
- if err != nil {
- return nil, err
- }
- pr, err := rng.Range()
- if err != nil {
- return nil, err
- }
- locations = append(locations, protocol.Location{
- URI: protocol.URIFromSpanURI(rng.URI()),
- Range: pr,
- })
- }
- sort.Slice(locations, func(i, j int) bool {
- li, lj := locations[i], locations[j]
- if li.URI == lj.URI {
- return protocol.CompareRange(li.Range, lj.Range) < 0
- }
- return li.URI < lj.URI
- })
- return locations, nil
-}
-
-var ErrNotAType = errors.New("not a type name or method")
-
-// implementations returns the concrete implementations of the specified
-// interface, or the interfaces implemented by the specified concrete type.
-func implementations(ctx context.Context, s Snapshot, f FileHandle, pp protocol.Position) ([]qualifiedObject, error) {
- var (
- impls []qualifiedObject
- seen = make(map[token.Position]bool)
- fset = s.FileSet()
- )
-
- qos, err := qualifiedObjsAtProtocolPos(ctx, s, f.URI(), pp)
- if err != nil {
- return nil, err
- }
- for _, qo := range qos {
- var (
- queryType types.Type
- queryMethod *types.Func
- )
-
- switch obj := qo.obj.(type) {
- case *types.Func:
- queryMethod = obj
- if recv := obj.Type().(*types.Signature).Recv(); recv != nil {
- queryType = ensurePointer(recv.Type())
- }
- case *types.TypeName:
- queryType = ensurePointer(obj.Type())
- }
-
- if queryType == nil {
- return nil, ErrNotAType
- }
-
- if types.NewMethodSet(queryType).Len() == 0 {
- return nil, nil
- }
-
- // Find all named types, even local types (which can have methods
- // due to promotion).
- var (
- allNamed []*types.Named
- pkgs = make(map[*types.Package]Package)
- )
- knownPkgs, err := s.KnownPackages(ctx)
- if err != nil {
- return nil, err
- }
- for _, pkg := range knownPkgs {
- pkgs[pkg.GetTypes()] = pkg
- info := pkg.GetTypesInfo()
- for _, obj := range info.Defs {
- obj, ok := obj.(*types.TypeName)
- // We ignore aliases 'type M = N' to avoid duplicate reporting
- // of the Named type N.
- if !ok || obj.IsAlias() {
- continue
- }
- if named, ok := obj.Type().(*types.Named); ok {
- allNamed = append(allNamed, named)
- }
- }
- }
-
- // Find all the named types that match our query.
- for _, named := range allNamed {
- var (
- candObj types.Object = named.Obj()
- candType = ensurePointer(named)
- )
-
- if !concreteImplementsIntf(candType, queryType) {
- continue
- }
-
- ms := types.NewMethodSet(candType)
- if ms.Len() == 0 {
- // Skip empty interfaces.
- continue
- }
-
- // If client queried a method, look up corresponding candType method.
- if queryMethod != nil {
- sel := ms.Lookup(queryMethod.Pkg(), queryMethod.Name())
- if sel == nil {
- continue
- }
- candObj = sel.Obj()
- }
-
- pos := fset.Position(candObj.Pos())
- if candObj == queryMethod || seen[pos] {
- continue
- }
-
- seen[pos] = true
-
- impls = append(impls, qualifiedObject{
- obj: candObj,
- pkg: pkgs[candObj.Pkg()],
- })
- }
- }
-
- return impls, nil
-}
-
-// concreteImplementsIntf returns true if a is an interface type implemented by
-// concrete type b, or vice versa.
-func concreteImplementsIntf(a, b types.Type) bool {
- aIsIntf, bIsIntf := IsInterface(a), IsInterface(b)
-
- // Make sure exactly one is an interface type.
- if aIsIntf == bIsIntf {
- return false
- }
-
- // Rearrange if needed so "a" is the concrete type.
- if aIsIntf {
- a, b = b, a
- }
-
- return types.AssignableTo(a, b)
-}
-
-// ensurePointer wraps T in a *types.Pointer if T is a named, non-interface
-// type. This is useful to make sure you consider a named type's full method
-// set.
-func ensurePointer(T types.Type) types.Type {
- if _, ok := T.(*types.Named); ok && !IsInterface(T) {
- return types.NewPointer(T)
- }
-
- return T
-}
-
-type qualifiedObject struct {
- obj types.Object
-
- // pkg is the Package that contains obj's definition.
- pkg Package
-
- // node is the *ast.Ident or *ast.ImportSpec we followed to find obj, if any.
- node ast.Node
-
- // sourcePkg is the Package that contains node, if any.
- sourcePkg Package
-}
-
-var (
- errBuiltin = errors.New("builtin object")
- errNoObjectFound = errors.New("no object found")
-)
-
-// qualifiedObjsAtProtocolPos returns info for all the type.Objects
-// referenced at the given position. An object will be returned for
-// every package that the file belongs to, in every typechecking mode
-// applicable.
-func qualifiedObjsAtProtocolPos(ctx context.Context, s Snapshot, uri span.URI, pp protocol.Position) ([]qualifiedObject, error) {
- pkgs, err := s.PackagesForFile(ctx, uri, TypecheckAll, false)
- if err != nil {
- return nil, err
- }
- if len(pkgs) == 0 {
- return nil, errNoObjectFound
- }
- pkg := pkgs[0]
- pgf, err := pkg.File(uri)
- if err != nil {
- return nil, err
- }
- spn, err := pgf.Mapper.PointSpan(pp)
- if err != nil {
- return nil, err
- }
- rng, err := spn.Range(pgf.Mapper.Converter)
- if err != nil {
- return nil, err
- }
- offset, err := Offset(pgf.Tok, rng.Start)
- if err != nil {
- return nil, err
- }
- return qualifiedObjsAtLocation(ctx, s, objSearchKey{uri, offset}, map[objSearchKey]bool{})
-}
-
-type objSearchKey struct {
- uri span.URI
- offset int
-}
-
-// qualifiedObjsAtLocation finds all objects referenced at offset in uri, across
-// all packages in the snapshot.
-func qualifiedObjsAtLocation(ctx context.Context, s Snapshot, key objSearchKey, seen map[objSearchKey]bool) ([]qualifiedObject, error) {
- if seen[key] {
- return nil, nil
- }
- seen[key] = true
-
- // We search for referenced objects starting with all packages containing the
- // current location, and then repeating the search for every distinct object
- // location discovered.
- //
- // In the common case, there should be at most one additional location to
- // consider: the definition of the object referenced by the location. But we
- // try to be comprehensive in case we ever support variations on build
- // constraints.
-
- pkgs, err := s.PackagesForFile(ctx, key.uri, TypecheckAll, false)
- if err != nil {
- return nil, err
- }
-
- // report objects in the order we encounter them. This ensures that the first
- // result is at the cursor...
- var qualifiedObjs []qualifiedObject
- // ...but avoid duplicates.
- seenObjs := map[types.Object]bool{}
-
- for _, searchpkg := range pkgs {
- pgf, err := searchpkg.File(key.uri)
- if err != nil {
- return nil, err
- }
- pos := pgf.Tok.Pos(key.offset)
- path := pathEnclosingObjNode(pgf.File, pos)
- if path == nil {
- continue
- }
- var objs []types.Object
- switch leaf := path[0].(type) {
- case *ast.Ident:
- // If leaf represents an implicit type switch object or the type
- // switch "assign" variable, expand to all of the type switch's
- // implicit objects.
- if implicits, _ := typeSwitchImplicits(searchpkg, path); len(implicits) > 0 {
- objs = append(objs, implicits...)
- } else {
- obj := searchpkg.GetTypesInfo().ObjectOf(leaf)
- if obj == nil {
- return nil, xerrors.Errorf("%w for %q", errNoObjectFound, leaf.Name)
- }
- objs = append(objs, obj)
- }
- case *ast.ImportSpec:
- // Look up the implicit *types.PkgName.
- obj := searchpkg.GetTypesInfo().Implicits[leaf]
- if obj == nil {
- return nil, xerrors.Errorf("%w for import %q", errNoObjectFound, ImportPath(leaf))
- }
- objs = append(objs, obj)
- }
- // Get all of the transitive dependencies of the search package.
- pkgs := make(map[*types.Package]Package)
- var addPkg func(pkg Package)
- addPkg = func(pkg Package) {
- pkgs[pkg.GetTypes()] = pkg
- for _, imp := range pkg.Imports() {
- if _, ok := pkgs[imp.GetTypes()]; !ok {
- addPkg(imp)
- }
- }
- }
- addPkg(searchpkg)
- for _, obj := range objs {
- if obj.Parent() == types.Universe {
- return nil, xerrors.Errorf("%q: %w", obj.Name(), errBuiltin)
- }
- pkg, ok := pkgs[obj.Pkg()]
- if !ok {
- event.Error(ctx, fmt.Sprintf("no package for obj %s: %v", obj, obj.Pkg()), err)
- continue
- }
- qualifiedObjs = append(qualifiedObjs, qualifiedObject{
- obj: obj,
- pkg: pkg,
- sourcePkg: searchpkg,
- node: path[0],
- })
- seenObjs[obj] = true
-
- // If the qualified object is in another file (or more likely, another
- // package), it's possible that there is another copy of it in a package
- // that we haven't searched, e.g. a test variant. See golang/go#47564.
- //
- // In order to be sure we've considered all packages, call
- // qualifiedObjsAtLocation recursively for all locations we encounter. We
- // could probably be more precise here, only continuing the search if obj
- // is in another package, but this should be good enough to find all
- // uses.
-
- pos := obj.Pos()
- var uri span.URI
- offset := -1
- for _, pgf := range pkg.CompiledGoFiles() {
- if pgf.Tok.Base() <= int(pos) && int(pos) <= pgf.Tok.Base()+pgf.Tok.Size() {
- var err error
- offset, err = Offset(pgf.Tok, pos)
- if err != nil {
- return nil, err
- }
- uri = pgf.URI
- }
- }
- if offset >= 0 {
- otherObjs, err := qualifiedObjsAtLocation(ctx, s, objSearchKey{uri, offset}, seen)
- if err != nil {
- return nil, err
- }
- for _, other := range otherObjs {
- if !seenObjs[other.obj] {
- qualifiedObjs = append(qualifiedObjs, other)
- seenObjs[other.obj] = true
- }
- }
- } else {
- return nil, fmt.Errorf("missing file for position of %q in %q", obj.Name(), obj.Pkg().Name())
- }
- }
- }
- // Return an error if no objects were found since callers will assume that
- // the slice has at least 1 element.
- if len(qualifiedObjs) == 0 {
- return nil, errNoObjectFound
- }
- return qualifiedObjs, nil
-}
-
-// pathEnclosingObjNode returns the AST path to the object-defining
-// node associated with pos. "Object-defining" means either an
-// *ast.Ident mapped directly to a types.Object or an ast.Node mapped
-// implicitly to a types.Object.
-func pathEnclosingObjNode(f *ast.File, pos token.Pos) []ast.Node {
- var (
- path []ast.Node
- found bool
- )
-
- ast.Inspect(f, func(n ast.Node) bool {
- if found {
- return false
- }
-
- if n == nil {
- path = path[:len(path)-1]
- return false
- }
-
- path = append(path, n)
-
- switch n := n.(type) {
- case *ast.Ident:
- // Include the position directly after identifier. This handles
- // the common case where the cursor is right after the
- // identifier the user is currently typing. Previously we
- // handled this by calling astutil.PathEnclosingInterval twice,
- // once for "pos" and once for "pos-1".
- found = n.Pos() <= pos && pos <= n.End()
- case *ast.ImportSpec:
- if n.Path.Pos() <= pos && pos < n.Path.End() {
- found = true
- // If import spec has a name, add name to path even though
- // position isn't in the name.
- if n.Name != nil {
- path = append(path, n.Name)
- }
- }
- case *ast.StarExpr:
- // Follow star expressions to the inner identifier.
- if pos == n.Star {
- pos = n.X.Pos()
- }
- }
-
- return !found
- })
-
- if len(path) == 0 {
- return nil
- }
-
- // Reverse path so leaf is first element.
- for i := 0; i < len(path)/2; i++ {
- path[i], path[len(path)-1-i] = path[len(path)-1-i], path[i]
- }
-
- return path
-}
diff --git a/internal/lsp/source/known_packages.go b/internal/lsp/source/known_packages.go
deleted file mode 100644
index 49ede162b..000000000
--- a/internal/lsp/source/known_packages.go
+++ /dev/null
@@ -1,118 +0,0 @@
-// Copyright 2020 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package source
-
-import (
- "context"
- "sort"
- "strings"
- "sync"
- "time"
-
- "golang.org/x/tools/internal/event"
- "golang.org/x/tools/internal/imports"
- errors "golang.org/x/xerrors"
-)
-
-// KnownPackages returns a list of all known packages
-// in the package graph that could potentially be imported
-// by the given file.
-func KnownPackages(ctx context.Context, snapshot Snapshot, fh VersionedFileHandle) ([]string, error) {
- pkg, pgf, err := GetParsedFile(ctx, snapshot, fh, NarrowestPackage)
- if err != nil {
- return nil, errors.Errorf("GetParsedFile: %w", err)
- }
- alreadyImported := map[string]struct{}{}
- for _, imp := range pgf.File.Imports {
- alreadyImported[imp.Path.Value] = struct{}{}
- }
- pkgs, err := snapshot.CachedImportPaths(ctx)
- if err != nil {
- return nil, err
- }
- var (
- seen = make(map[string]struct{})
- paths []string
- )
- for path, knownPkg := range pkgs {
- gofiles := knownPkg.CompiledGoFiles()
- if len(gofiles) == 0 || gofiles[0].File.Name == nil {
- continue
- }
- pkgName := gofiles[0].File.Name.Name
- // package main cannot be imported
- if pkgName == "main" {
- continue
- }
- // test packages cannot be imported
- if knownPkg.ForTest() != "" {
- continue
- }
- // no need to import what the file already imports
- if _, ok := alreadyImported[path]; ok {
- continue
- }
- // snapshot.KnownPackages could have multiple versions of a pkg
- if _, ok := seen[path]; ok {
- continue
- }
- seen[path] = struct{}{}
- // make sure internal packages are importable by the file
- if !IsValidImport(pkg.PkgPath(), path) {
- continue
- }
- // naive check on cyclical imports
- if isDirectlyCyclical(pkg, knownPkg) {
- continue
- }
- paths = append(paths, path)
- seen[path] = struct{}{}
- }
- err = snapshot.RunProcessEnvFunc(ctx, func(o *imports.Options) error {
- var mu sync.Mutex
- ctx, cancel := context.WithTimeout(ctx, time.Millisecond*80)
- defer cancel()
- return imports.GetAllCandidates(ctx, func(ifix imports.ImportFix) {
- mu.Lock()
- defer mu.Unlock()
- if _, ok := seen[ifix.StmtInfo.ImportPath]; ok {
- return
- }
- paths = append(paths, ifix.StmtInfo.ImportPath)
- }, "", pgf.URI.Filename(), pkg.GetTypes().Name(), o.Env)
- })
- if err != nil {
- // if an error occurred, we stil have a decent list we can
- // show to the user through snapshot.CachedImportPaths
- event.Error(ctx, "imports.GetAllCandidates", err)
- }
- sort.Slice(paths, func(i, j int) bool {
- importI, importJ := paths[i], paths[j]
- iHasDot := strings.Contains(importI, ".")
- jHasDot := strings.Contains(importJ, ".")
- if iHasDot && !jHasDot {
- return false
- }
- if jHasDot && !iHasDot {
- return true
- }
- return importI < importJ
- })
- return paths, nil
-}
-
-// isDirectlyCyclical checks if imported directly imports pkg.
-// It does not (yet) offer a full cyclical check because showing a user
-// a list of importable packages already generates a very large list
-// and having a few false positives in there could be worth the
-// performance snappiness.
-func isDirectlyCyclical(pkg, imported Package) bool {
- for _, imp := range imported.Imports() {
- if imp.PkgPath() == pkg.PkgPath() {
- return true
- }
- }
- return false
-}
diff --git a/internal/lsp/source/offset_test.go b/internal/lsp/source/offset_test.go
deleted file mode 100644
index 10076773a..000000000
--- a/internal/lsp/source/offset_test.go
+++ /dev/null
@@ -1,71 +0,0 @@
-// Copyright 2021 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package source_test
-
-import (
- "go/token"
- "go/types"
- "testing"
-
- "golang.org/x/tools/go/packages"
-)
-
-// This test reports any unexpected uses of (*go/token.File).Offset within
-// the gopls codebase to ensure that we don't check in more code that is prone
-// to panicking. All calls to (*go/token.File).Offset should be replaced with
-// calls to source.Offset.
-func TestTokenOffset(t *testing.T) {
- fset := token.NewFileSet()
- pkgs, err := packages.Load(&packages.Config{
- Fset: fset,
- Mode: packages.NeedName | packages.NeedModule | packages.NeedCompiledGoFiles | packages.NeedTypes | packages.NeedTypesInfo | packages.NeedSyntax | packages.NeedImports | packages.NeedDeps,
- }, "go/token", "golang.org/x/tools/internal/lsp/...", "golang.org/x/tools/gopls/...")
- if err != nil {
- t.Fatal(err)
- }
- var tokPkg *packages.Package
- for _, pkg := range pkgs {
- if pkg.PkgPath == "go/token" {
- tokPkg = pkg
- break
- }
- }
- typname, ok := tokPkg.Types.Scope().Lookup("File").(*types.TypeName)
- if !ok {
- t.Fatal("expected go/token.File typename, got none")
- }
- named, ok := typname.Type().(*types.Named)
- if !ok {
- t.Fatalf("expected named type, got %T", typname.Type)
- }
- var offset *types.Func
- for i := 0; i < named.NumMethods(); i++ {
- meth := named.Method(i)
- if meth.Name() == "Offset" {
- offset = meth
- break
- }
- }
- for _, pkg := range pkgs {
- for ident, obj := range pkg.TypesInfo.Uses {
- if ident.Name != "Offset" {
- continue
- }
- if pkg.PkgPath == "go/token" {
- continue
- }
- if !types.Identical(offset.Type(), obj.Type()) {
- continue
- }
- // The only permitted use is in golang.org/x/tools/internal/lsp/source.Offset,
- // so check the enclosing function.
- sourceOffset := pkg.Types.Scope().Lookup("Offset").(*types.Func)
- if sourceOffset.Pos() <= ident.Pos() && ident.Pos() <= sourceOffset.Scope().End() {
- continue // accepted usage
- }
- t.Errorf(`%s: Unexpected use of (*go/token.File).Offset. Please use golang.org/x/tools/internal/lsp/source.Offset instead.`, fset.Position(ident.Pos()))
- }
- }
-}
diff --git a/internal/lsp/source/options.go b/internal/lsp/source/options.go
deleted file mode 100644
index 8e262c63b..000000000
--- a/internal/lsp/source/options.go
+++ /dev/null
@@ -1,1449 +0,0 @@
-// Copyright 2019 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package source
-
-import (
- "context"
- "fmt"
- "io"
- "path/filepath"
- "regexp"
- "strings"
- "sync"
- "time"
-
- "golang.org/x/tools/go/analysis"
- "golang.org/x/tools/go/analysis/passes/asmdecl"
- "golang.org/x/tools/go/analysis/passes/assign"
- "golang.org/x/tools/go/analysis/passes/atomic"
- "golang.org/x/tools/go/analysis/passes/atomicalign"
- "golang.org/x/tools/go/analysis/passes/bools"
- "golang.org/x/tools/go/analysis/passes/buildtag"
- "golang.org/x/tools/go/analysis/passes/cgocall"
- "golang.org/x/tools/go/analysis/passes/composite"
- "golang.org/x/tools/go/analysis/passes/copylock"
- "golang.org/x/tools/go/analysis/passes/deepequalerrors"
- "golang.org/x/tools/go/analysis/passes/errorsas"
- "golang.org/x/tools/go/analysis/passes/fieldalignment"
- "golang.org/x/tools/go/analysis/passes/httpresponse"
- "golang.org/x/tools/go/analysis/passes/ifaceassert"
- "golang.org/x/tools/go/analysis/passes/loopclosure"
- "golang.org/x/tools/go/analysis/passes/lostcancel"
- "golang.org/x/tools/go/analysis/passes/nilfunc"
- "golang.org/x/tools/go/analysis/passes/nilness"
- "golang.org/x/tools/go/analysis/passes/printf"
- "golang.org/x/tools/go/analysis/passes/shadow"
- "golang.org/x/tools/go/analysis/passes/shift"
- "golang.org/x/tools/go/analysis/passes/sortslice"
- "golang.org/x/tools/go/analysis/passes/stdmethods"
- "golang.org/x/tools/go/analysis/passes/stringintconv"
- "golang.org/x/tools/go/analysis/passes/structtag"
- "golang.org/x/tools/go/analysis/passes/testinggoroutine"
- "golang.org/x/tools/go/analysis/passes/tests"
- "golang.org/x/tools/go/analysis/passes/unmarshal"
- "golang.org/x/tools/go/analysis/passes/unreachable"
- "golang.org/x/tools/go/analysis/passes/unsafeptr"
- "golang.org/x/tools/go/analysis/passes/unusedresult"
- "golang.org/x/tools/go/analysis/passes/unusedwrite"
- "golang.org/x/tools/go/packages"
- "golang.org/x/tools/internal/lsp/analysis/fillreturns"
- "golang.org/x/tools/internal/lsp/analysis/fillstruct"
- "golang.org/x/tools/internal/lsp/analysis/infertypeargs"
- "golang.org/x/tools/internal/lsp/analysis/nonewvars"
- "golang.org/x/tools/internal/lsp/analysis/noresultvalues"
- "golang.org/x/tools/internal/lsp/analysis/simplifycompositelit"
- "golang.org/x/tools/internal/lsp/analysis/simplifyrange"
- "golang.org/x/tools/internal/lsp/analysis/simplifyslice"
- "golang.org/x/tools/internal/lsp/analysis/stubmethods"
- "golang.org/x/tools/internal/lsp/analysis/undeclaredname"
- "golang.org/x/tools/internal/lsp/analysis/unusedparams"
- "golang.org/x/tools/internal/lsp/analysis/useany"
- "golang.org/x/tools/internal/lsp/command"
- "golang.org/x/tools/internal/lsp/diff"
- "golang.org/x/tools/internal/lsp/diff/myers"
- "golang.org/x/tools/internal/lsp/protocol"
- errors "golang.org/x/xerrors"
-)
-
-var (
- optionsOnce sync.Once
- defaultOptions *Options
-)
-
-// DefaultOptions is the options that are used for Gopls execution independent
-// of any externally provided configuration (LSP initialization, command
-// invocation, etc.).
-func DefaultOptions() *Options {
- optionsOnce.Do(func() {
- var commands []string
- for _, c := range command.Commands {
- commands = append(commands, c.ID())
- }
- defaultOptions = &Options{
- ClientOptions: ClientOptions{
- InsertTextFormat: protocol.PlainTextTextFormat,
- PreferredContentFormat: protocol.Markdown,
- ConfigurationSupported: true,
- DynamicConfigurationSupported: true,
- DynamicRegistrationSemanticTokensSupported: true,
- DynamicWatchedFilesSupported: true,
- LineFoldingOnly: false,
- HierarchicalDocumentSymbolSupport: true,
- },
- ServerOptions: ServerOptions{
- SupportedCodeActions: map[FileKind]map[protocol.CodeActionKind]bool{
- Go: {
- protocol.SourceFixAll: true,
- protocol.SourceOrganizeImports: true,
- protocol.QuickFix: true,
- protocol.RefactorRewrite: true,
- protocol.RefactorExtract: true,
- },
- Mod: {
- protocol.SourceOrganizeImports: true,
- protocol.QuickFix: true,
- },
- Work: {},
- Sum: {},
- Tmpl: {},
- },
- SupportedCommands: commands,
- },
- UserOptions: UserOptions{
- BuildOptions: BuildOptions{
- ExpandWorkspaceToModule: true,
- ExperimentalPackageCacheKey: true,
- MemoryMode: ModeNormal,
- DirectoryFilters: []string{"-node_modules"},
- TemplateExtensions: []string{},
- },
- UIOptions: UIOptions{
- DiagnosticOptions: DiagnosticOptions{
- DiagnosticsDelay: 250 * time.Millisecond,
- Annotations: map[Annotation]bool{
- Bounds: true,
- Escape: true,
- Inline: true,
- Nil: true,
- },
- },
- DocumentationOptions: DocumentationOptions{
- HoverKind: FullDocumentation,
- LinkTarget: "pkg.go.dev",
- LinksInHover: true,
- },
- NavigationOptions: NavigationOptions{
- ImportShortcut: Both,
- SymbolMatcher: SymbolFastFuzzy,
- SymbolStyle: DynamicSymbols,
- },
- CompletionOptions: CompletionOptions{
- Matcher: Fuzzy,
- CompletionBudget: 100 * time.Millisecond,
- ExperimentalPostfixCompletions: true,
- },
- Codelenses: map[string]bool{
- string(command.Generate): true,
- string(command.RegenerateCgo): true,
- string(command.Tidy): true,
- string(command.GCDetails): false,
- string(command.UpgradeDependency): true,
- string(command.Vendor): true,
- },
- },
- },
- InternalOptions: InternalOptions{
- LiteralCompletions: true,
- TempModfile: true,
- CompleteUnimported: true,
- CompletionDocumentation: true,
- DeepCompletion: true,
- },
- Hooks: Hooks{
- ComputeEdits: myers.ComputeEdits,
- URLRegexp: urlRegexp(),
- DefaultAnalyzers: defaultAnalyzers(),
- TypeErrorAnalyzers: typeErrorAnalyzers(),
- ConvenienceAnalyzers: convenienceAnalyzers(),
- StaticcheckAnalyzers: map[string]*Analyzer{},
- GoDiff: true,
- },
- }
- })
- return defaultOptions
-}
-
-// Options holds various configuration that affects Gopls execution, organized
-// by the nature or origin of the settings.
-type Options struct {
- ClientOptions
- ServerOptions
- UserOptions
- InternalOptions
- Hooks
-}
-
-// ClientOptions holds LSP-specific configuration that is provided by the
-// client.
-type ClientOptions struct {
- InsertTextFormat protocol.InsertTextFormat
- ConfigurationSupported bool
- DynamicConfigurationSupported bool
- DynamicRegistrationSemanticTokensSupported bool
- DynamicWatchedFilesSupported bool
- PreferredContentFormat protocol.MarkupKind
- LineFoldingOnly bool
- HierarchicalDocumentSymbolSupport bool
- SemanticTypes []string
- SemanticMods []string
- RelatedInformationSupported bool
- CompletionTags bool
- CompletionDeprecated bool
-}
-
-// ServerOptions holds LSP-specific configuration that is provided by the
-// server.
-type ServerOptions struct {
- SupportedCodeActions map[FileKind]map[protocol.CodeActionKind]bool
- SupportedCommands []string
-}
-
-type BuildOptions struct {
- // BuildFlags is the set of flags passed on to the build system when invoked.
- // It is applied to queries like `go list`, which is used when discovering files.
- // The most common use is to set `-tags`.
- BuildFlags []string
-
- // Env adds environment variables to external commands run by `gopls`, most notably `go list`.
- Env map[string]string
-
- // DirectoryFilters can be used to exclude unwanted directories from the
- // workspace. By default, all directories are included. Filters are an
- // operator, `+` to include and `-` to exclude, followed by a path prefix
- // relative to the workspace folder. They are evaluated in order, and
- // the last filter that applies to a path controls whether it is included.
- // The path prefix can be empty, so an initial `-` excludes everything.
- //
- // Examples:
- //
- // Exclude node_modules: `-node_modules`
- //
- // Include only project_a: `-` (exclude everything), `+project_a`
- //
- // Include only project_a, but not node_modules inside it: `-`, `+project_a`, `-project_a/node_modules`
- DirectoryFilters []string
-
- // TemplateExtensions gives the extensions of file names that are treateed
- // as template files. (The extension
- // is the part of the file name after the final dot.)
- TemplateExtensions []string
-
- // MemoryMode controls the tradeoff `gopls` makes between memory usage and
- // correctness.
- //
- // Values other than `Normal` are untested and may break in surprising ways.
- MemoryMode MemoryMode `status:"experimental"`
-
- // ExpandWorkspaceToModule instructs `gopls` to adjust the scope of the
- // workspace to find the best available module root. `gopls` first looks for
- // a go.mod file in any parent directory of the workspace folder, expanding
- // the scope to that directory if it exists. If no viable parent directory is
- // found, gopls will check if there is exactly one child directory containing
- // a go.mod file, narrowing the scope to that directory if it exists.
- ExpandWorkspaceToModule bool `status:"experimental"`
-
- // ExperimentalWorkspaceModule opts a user into the experimental support
- // for multi-module workspaces.
- ExperimentalWorkspaceModule bool `status:"experimental"`
-
- // ExperimentalPackageCacheKey controls whether to use a coarser cache key
- // for package type information to increase cache hits. This setting removes
- // the user's environment, build flags, and working directory from the cache
- // key, which should be a safe change as all relevant inputs into the type
- // checking pass are already hashed into the key. This is temporarily guarded
- // by an experiment because caching behavior is subtle and difficult to
- // comprehensively test.
- ExperimentalPackageCacheKey bool `status:"experimental"`
-
- // AllowModfileModifications disables -mod=readonly, allowing imports from
- // out-of-scope modules. This option will eventually be removed.
- AllowModfileModifications bool `status:"experimental"`
-
- // AllowImplicitNetworkAccess disables GOPROXY=off, allowing implicit module
- // downloads rather than requiring user action. This option will eventually
- // be removed.
- AllowImplicitNetworkAccess bool `status:"experimental"`
-
- // ExperimentalUseInvalidMetadata enables gopls to fall back on outdated
- // package metadata to provide editor features if the go command fails to
- // load packages for some reason (like an invalid go.mod file). This will
- // eventually be the default behavior, and this setting will be removed.
- ExperimentalUseInvalidMetadata bool `status:"experimental"`
-}
-
-type UIOptions struct {
- DocumentationOptions
- CompletionOptions
- NavigationOptions
- DiagnosticOptions
-
- // Codelenses overrides the enabled/disabled state of code lenses. See the
- // "Code Lenses" section of the
- // [Settings page](https://github.com/golang/tools/blob/master/gopls/doc/settings.md#code-lenses)
- // for the list of supported lenses.
- //
- // Example Usage:
- //
- // ```json5
- // "gopls": {
- // ...
- // "codelenses": {
- // "generate": false, // Don't show the `go generate` lens.
- // "gc_details": true // Show a code lens toggling the display of gc's choices.
- // }
- // ...
- // }
- // ```
- Codelenses map[string]bool
-
- // SemanticTokens controls whether the LSP server will send
- // semantic tokens to the client.
- SemanticTokens bool `status:"experimental"`
-}
-
-type CompletionOptions struct {
- // Placeholders enables placeholders for function parameters or struct
- // fields in completion responses.
- UsePlaceholders bool
-
- // CompletionBudget is the soft latency goal for completion requests. Most
- // requests finish in a couple milliseconds, but in some cases deep
- // completions can take much longer. As we use up our budget we
- // dynamically reduce the search scope to ensure we return timely
- // results. Zero means unlimited.
- CompletionBudget time.Duration `status:"debug"`
-
- // Matcher sets the algorithm that is used when calculating completion
- // candidates.
- Matcher Matcher `status:"advanced"`
-
- // ExperimentalPostfixCompletions enables artificial method snippets
- // such as "someSlice.sort!".
- ExperimentalPostfixCompletions bool `status:"experimental"`
-}
-
-type DocumentationOptions struct {
- // HoverKind controls the information that appears in the hover text.
- // SingleLine and Structured are intended for use only by authors of editor plugins.
- HoverKind HoverKind
-
- // LinkTarget controls where documentation links go.
- // It might be one of:
- //
- // * `"godoc.org"`
- // * `"pkg.go.dev"`
- //
- // If company chooses to use its own `godoc.org`, its address can be used as well.
- LinkTarget string
-
- // LinksInHover toggles the presence of links to documentation in hover.
- LinksInHover bool
-}
-
-type FormattingOptions struct {
- // Local is the equivalent of the `goimports -local` flag, which puts
- // imports beginning with this string after third-party packages. It should
- // be the prefix of the import path whose imports should be grouped
- // separately.
- Local string
-
- // Gofumpt indicates if we should run gofumpt formatting.
- Gofumpt bool
-}
-
-type DiagnosticOptions struct {
- // Analyses specify analyses that the user would like to enable or disable.
- // A map of the names of analysis passes that should be enabled/disabled.
- // A full list of analyzers that gopls uses can be found
- // [here](https://github.com/golang/tools/blob/master/gopls/doc/analyzers.md).
- //
- // Example Usage:
- //
- // ```json5
- // ...
- // "analyses": {
- // "unreachable": false, // Disable the unreachable analyzer.
- // "unusedparams": true // Enable the unusedparams analyzer.
- // }
- // ...
- // ```
- Analyses map[string]bool
-
- // Staticcheck enables additional analyses from staticcheck.io.
- Staticcheck bool `status:"experimental"`
-
- // Annotations specifies the various kinds of optimization diagnostics
- // that should be reported by the gc_details command.
- Annotations map[Annotation]bool `status:"experimental"`
-
- // DiagnosticsDelay controls the amount of time that gopls waits
- // after the most recent file modification before computing deep diagnostics.
- // Simple diagnostics (parsing and type-checking) are always run immediately
- // on recently modified packages.
- //
- // This option must be set to a valid duration string, for example `"250ms"`.
- DiagnosticsDelay time.Duration `status:"advanced"`
-
- // ExperimentalWatchedFileDelay controls the amount of time that gopls waits
- // for additional workspace/didChangeWatchedFiles notifications to arrive,
- // before processing all such notifications in a single batch. This is
- // intended for use by LSP clients that don't support their own batching of
- // file system notifications.
- //
- // This option must be set to a valid duration string, for example `"100ms"`.
- ExperimentalWatchedFileDelay time.Duration `status:"experimental"`
-}
-
-type NavigationOptions struct {
- // ImportShortcut specifies whether import statements should link to
- // documentation or go to definitions.
- ImportShortcut ImportShortcut
-
- // SymbolMatcher sets the algorithm that is used when finding workspace symbols.
- SymbolMatcher SymbolMatcher `status:"advanced"`
-
- // SymbolStyle controls how symbols are qualified in symbol responses.
- //
- // Example Usage:
- //
- // ```json5
- // "gopls": {
- // ...
- // "symbolStyle": "Dynamic",
- // ...
- // }
- // ```
- SymbolStyle SymbolStyle `status:"advanced"`
-}
-
-// UserOptions holds custom Gopls configuration (not part of the LSP) that is
-// modified by the client.
-type UserOptions struct {
- BuildOptions
- UIOptions
- FormattingOptions
-
- // VerboseOutput enables additional debug logging.
- VerboseOutput bool `status:"debug"`
-}
-
-// EnvSlice returns Env as a slice of k=v strings.
-func (u *UserOptions) EnvSlice() []string {
- var result []string
- for k, v := range u.Env {
- result = append(result, fmt.Sprintf("%v=%v", k, v))
- }
- return result
-}
-
-// SetEnvSlice sets Env from a slice of k=v strings.
-func (u *UserOptions) SetEnvSlice(env []string) {
- u.Env = map[string]string{}
- for _, kv := range env {
- split := strings.SplitN(kv, "=", 2)
- if len(split) != 2 {
- continue
- }
- u.Env[split[0]] = split[1]
- }
-}
-
-// Hooks contains configuration that is provided to the Gopls command by the
-// main package.
-type Hooks struct {
- LicensesText string
- GoDiff bool
- ComputeEdits diff.ComputeEdits
- URLRegexp *regexp.Regexp
-
- // GofumptFormat allows the gopls module to wire-in a call to
- // gofumpt/format.Source. langVersion and modulePath are used for some
- // Gofumpt formatting rules -- see the Gofumpt documentation for details.
- GofumptFormat func(ctx context.Context, langVersion, modulePath string, src []byte) ([]byte, error)
-
- DefaultAnalyzers map[string]*Analyzer
- TypeErrorAnalyzers map[string]*Analyzer
- ConvenienceAnalyzers map[string]*Analyzer
- StaticcheckAnalyzers map[string]*Analyzer
-
- // Govulncheck is the implementation of the Govulncheck gopls command.
- Govulncheck func(context.Context, *packages.Config, command.VulncheckArgs) (command.VulncheckResult, error)
-}
-
-// InternalOptions contains settings that are not intended for use by the
-// average user. These may be settings used by tests or outdated settings that
-// will soon be deprecated. Some of these settings may not even be configurable
-// by the user.
-type InternalOptions struct {
- // LiteralCompletions controls whether literal candidates such as
- // "&someStruct{}" are offered. Tests disable this flag to simplify
- // their expected values.
- LiteralCompletions bool
-
- // VerboseWorkDoneProgress controls whether the LSP server should send
- // progress reports for all work done outside the scope of an RPC.
- // Used by the regression tests.
- VerboseWorkDoneProgress bool
-
- // The following options were previously available to users, but they
- // really shouldn't be configured by anyone other than "power users".
-
- // CompletionDocumentation enables documentation with completion results.
- CompletionDocumentation bool
-
- // CompleteUnimported enables completion for packages that you do not
- // currently import.
- CompleteUnimported bool
-
- // DeepCompletion enables the ability to return completions from deep
- // inside relevant entities, rather than just the locally accessible ones.
- //
- // Consider this example:
- //
- // ```go
- // package main
- //
- // import "fmt"
- //
- // type wrapString struct {
- // str string
- // }
- //
- // func main() {
- // x := wrapString{"hello world"}
- // fmt.Printf(<>)
- // }
- // ```
- //
- // At the location of the `<>` in this program, deep completion would suggest the result `x.str`.
- DeepCompletion bool
-
- // TempModfile controls the use of the -modfile flag in Go 1.14.
- TempModfile bool
-}
-
-type ImportShortcut string
-
-const (
- Both ImportShortcut = "Both"
- Link ImportShortcut = "Link"
- Definition ImportShortcut = "Definition"
-)
-
-func (s ImportShortcut) ShowLinks() bool {
- return s == Both || s == Link
-}
-
-func (s ImportShortcut) ShowDefinition() bool {
- return s == Both || s == Definition
-}
-
-type Matcher string
-
-const (
- Fuzzy Matcher = "Fuzzy"
- CaseInsensitive Matcher = "CaseInsensitive"
- CaseSensitive Matcher = "CaseSensitive"
-)
-
-type SymbolMatcher string
-
-const (
- SymbolFuzzy SymbolMatcher = "Fuzzy"
- SymbolFastFuzzy SymbolMatcher = "FastFuzzy"
- SymbolCaseInsensitive SymbolMatcher = "CaseInsensitive"
- SymbolCaseSensitive SymbolMatcher = "CaseSensitive"
-)
-
-type SymbolStyle string
-
-const (
- // PackageQualifiedSymbols is package qualified symbols i.e.
- // "pkg.Foo.Field".
- PackageQualifiedSymbols SymbolStyle = "Package"
- // FullyQualifiedSymbols is fully qualified symbols, i.e.
- // "path/to/pkg.Foo.Field".
- FullyQualifiedSymbols SymbolStyle = "Full"
- // DynamicSymbols uses whichever qualifier results in the highest scoring
- // match for the given symbol query. Here a "qualifier" is any "/" or "."
- // delimited suffix of the fully qualified symbol. i.e. "to/pkg.Foo.Field" or
- // just "Foo.Field".
- DynamicSymbols SymbolStyle = "Dynamic"
-)
-
-type HoverKind string
-
-const (
- SingleLine HoverKind = "SingleLine"
- NoDocumentation HoverKind = "NoDocumentation"
- SynopsisDocumentation HoverKind = "SynopsisDocumentation"
- FullDocumentation HoverKind = "FullDocumentation"
-
- // Structured is an experimental setting that returns a structured hover format.
- // This format separates the signature from the documentation, so that the client
- // can do more manipulation of these fields.
- //
- // This should only be used by clients that support this behavior.
- Structured HoverKind = "Structured"
-)
-
-type MemoryMode string
-
-const (
- ModeNormal MemoryMode = "Normal"
- // In DegradeClosed mode, `gopls` will collect less information about
- // packages without open files. As a result, features like Find
- // References and Rename will miss results in such packages.
- ModeDegradeClosed MemoryMode = "DegradeClosed"
-)
-
-type OptionResults []OptionResult
-
-type OptionResult struct {
- Name string
- Value interface{}
- Error error
-
- State OptionState
- Replacement string
-}
-
-type OptionState int
-
-const (
- OptionHandled = OptionState(iota)
- OptionDeprecated
- OptionUnexpected
-)
-
-type LinkTarget string
-
-func SetOptions(options *Options, opts interface{}) OptionResults {
- var results OptionResults
- switch opts := opts.(type) {
- case nil:
- case map[string]interface{}:
- // If the user's settings contains "allExperiments", set that first,
- // and then let them override individual settings independently.
- var enableExperiments bool
- for name, value := range opts {
- if b, ok := value.(bool); name == "allExperiments" && ok && b {
- enableExperiments = true
- options.EnableAllExperiments()
- }
- }
- seen := map[string]struct{}{}
- for name, value := range opts {
- results = append(results, options.set(name, value, seen))
- }
- // Finally, enable any experimental features that are specified in
- // maps, which allows users to individually toggle them on or off.
- if enableExperiments {
- options.enableAllExperimentMaps()
- }
- default:
- results = append(results, OptionResult{
- Value: opts,
- Error: errors.Errorf("Invalid options type %T", opts),
- })
- }
- return results
-}
-
-func (o *Options) ForClientCapabilities(caps protocol.ClientCapabilities) {
- // Check if the client supports snippets in completion items.
- if c := caps.TextDocument.Completion; c.CompletionItem.SnippetSupport {
- o.InsertTextFormat = protocol.SnippetTextFormat
- }
- // Check if the client supports configuration messages.
- o.ConfigurationSupported = caps.Workspace.Configuration
- o.DynamicConfigurationSupported = caps.Workspace.DidChangeConfiguration.DynamicRegistration
- o.DynamicRegistrationSemanticTokensSupported = caps.TextDocument.SemanticTokens.DynamicRegistration
- o.DynamicWatchedFilesSupported = caps.Workspace.DidChangeWatchedFiles.DynamicRegistration
-
- // Check which types of content format are supported by this client.
- if hover := caps.TextDocument.Hover; len(hover.ContentFormat) > 0 {
- o.PreferredContentFormat = hover.ContentFormat[0]
- }
- // Check if the client supports only line folding.
- fr := caps.TextDocument.FoldingRange
- o.LineFoldingOnly = fr.LineFoldingOnly
- // Check if the client supports hierarchical document symbols.
- o.HierarchicalDocumentSymbolSupport = caps.TextDocument.DocumentSymbol.HierarchicalDocumentSymbolSupport
- // Check if the client supports semantic tokens
- o.SemanticTypes = caps.TextDocument.SemanticTokens.TokenTypes
- o.SemanticMods = caps.TextDocument.SemanticTokens.TokenModifiers
- // we don't need Requests, as we support full functionality
- // we don't need Formats, as there is only one, for now
-
- // Check if the client supports diagnostic related information.
- o.RelatedInformationSupported = caps.TextDocument.PublishDiagnostics.RelatedInformation
- // Check if the client completion support incliudes tags (preferred) or deprecation
- if caps.TextDocument.Completion.CompletionItem.TagSupport.ValueSet != nil {
- o.CompletionTags = true
- } else if caps.TextDocument.Completion.CompletionItem.DeprecatedSupport {
- o.CompletionDeprecated = true
- }
-}
-
-func (o *Options) Clone() *Options {
- result := &Options{
- ClientOptions: o.ClientOptions,
- InternalOptions: o.InternalOptions,
- Hooks: Hooks{
- GoDiff: o.GoDiff,
- ComputeEdits: o.ComputeEdits,
- GofumptFormat: o.GofumptFormat,
- URLRegexp: o.URLRegexp,
- Govulncheck: o.Govulncheck,
- },
- ServerOptions: o.ServerOptions,
- UserOptions: o.UserOptions,
- }
- // Fully clone any slice or map fields. Only Hooks, ExperimentalOptions,
- // and UserOptions can be modified.
- copyStringMap := func(src map[string]bool) map[string]bool {
- dst := make(map[string]bool)
- for k, v := range src {
- dst[k] = v
- }
- return dst
- }
- result.Analyses = copyStringMap(o.Analyses)
- result.Codelenses = copyStringMap(o.Codelenses)
-
- copySlice := func(src []string) []string {
- dst := make([]string, len(src))
- copy(dst, src)
- return dst
- }
- result.SetEnvSlice(o.EnvSlice())
- result.BuildFlags = copySlice(o.BuildFlags)
- result.DirectoryFilters = copySlice(o.DirectoryFilters)
-
- copyAnalyzerMap := func(src map[string]*Analyzer) map[string]*Analyzer {
- dst := make(map[string]*Analyzer)
- for k, v := range src {
- dst[k] = v
- }
- return dst
- }
- result.DefaultAnalyzers = copyAnalyzerMap(o.DefaultAnalyzers)
- result.TypeErrorAnalyzers = copyAnalyzerMap(o.TypeErrorAnalyzers)
- result.ConvenienceAnalyzers = copyAnalyzerMap(o.ConvenienceAnalyzers)
- result.StaticcheckAnalyzers = copyAnalyzerMap(o.StaticcheckAnalyzers)
- return result
-}
-
-func (o *Options) AddStaticcheckAnalyzer(a *analysis.Analyzer, enabled bool, severity protocol.DiagnosticSeverity) {
- o.StaticcheckAnalyzers[a.Name] = &Analyzer{
- Analyzer: a,
- Enabled: enabled,
- Severity: severity,
- }
-}
-
-// EnableAllExperiments turns on all of the experimental "off-by-default"
-// features offered by gopls. Any experimental features specified in maps
-// should be enabled in enableAllExperimentMaps.
-func (o *Options) EnableAllExperiments() {
- o.SemanticTokens = true
- o.ExperimentalPostfixCompletions = true
- o.ExperimentalUseInvalidMetadata = true
- o.ExperimentalWatchedFileDelay = 50 * time.Millisecond
- o.SymbolMatcher = SymbolFastFuzzy
-}
-
-func (o *Options) enableAllExperimentMaps() {
- if _, ok := o.Codelenses[string(command.GCDetails)]; !ok {
- o.Codelenses[string(command.GCDetails)] = true
- }
- if _, ok := o.Analyses[unusedparams.Analyzer.Name]; !ok {
- o.Analyses[unusedparams.Analyzer.Name] = true
- }
-}
-
-func (o *Options) set(name string, value interface{}, seen map[string]struct{}) OptionResult {
- // Flatten the name in case we get options with a hierarchy.
- split := strings.Split(name, ".")
- name = split[len(split)-1]
-
- result := OptionResult{Name: name, Value: value}
- if _, ok := seen[name]; ok {
- result.errorf("duplicate configuration for %s", name)
- }
- seen[name] = struct{}{}
-
- switch name {
- case "env":
- menv, ok := value.(map[string]interface{})
- if !ok {
- result.errorf("invalid type %T, expect map", value)
- break
- }
- if o.Env == nil {
- o.Env = make(map[string]string)
- }
- for k, v := range menv {
- o.Env[k] = fmt.Sprint(v)
- }
-
- case "buildFlags":
- iflags, ok := value.([]interface{})
- if !ok {
- result.errorf("invalid type %T, expect list", value)
- break
- }
- flags := make([]string, 0, len(iflags))
- for _, flag := range iflags {
- flags = append(flags, fmt.Sprintf("%s", flag))
- }
- o.BuildFlags = flags
- case "directoryFilters":
- ifilters, ok := value.([]interface{})
- if !ok {
- result.errorf("invalid type %T, expect list", value)
- break
- }
- var filters []string
- for _, ifilter := range ifilters {
- filter := fmt.Sprint(ifilter)
- if filter == "" || (filter[0] != '+' && filter[0] != '-') {
- result.errorf("invalid filter %q, must start with + or -", filter)
- return result
- }
- filters = append(filters, strings.TrimRight(filepath.FromSlash(filter), "/"))
- }
- o.DirectoryFilters = filters
- case "memoryMode":
- if s, ok := result.asOneOf(
- string(ModeNormal),
- string(ModeDegradeClosed),
- ); ok {
- o.MemoryMode = MemoryMode(s)
- }
- case "completionDocumentation":
- result.setBool(&o.CompletionDocumentation)
- case "usePlaceholders":
- result.setBool(&o.UsePlaceholders)
- case "deepCompletion":
- result.setBool(&o.DeepCompletion)
- case "completeUnimported":
- result.setBool(&o.CompleteUnimported)
- case "completionBudget":
- result.setDuration(&o.CompletionBudget)
- case "matcher":
- if s, ok := result.asOneOf(
- string(Fuzzy),
- string(CaseSensitive),
- string(CaseInsensitive),
- ); ok {
- o.Matcher = Matcher(s)
- }
-
- case "symbolMatcher":
- if s, ok := result.asOneOf(
- string(SymbolFuzzy),
- string(SymbolFastFuzzy),
- string(SymbolCaseInsensitive),
- string(SymbolCaseSensitive),
- ); ok {
- o.SymbolMatcher = SymbolMatcher(s)
- }
-
- case "symbolStyle":
- if s, ok := result.asOneOf(
- string(FullyQualifiedSymbols),
- string(PackageQualifiedSymbols),
- string(DynamicSymbols),
- ); ok {
- o.SymbolStyle = SymbolStyle(s)
- }
-
- case "hoverKind":
- if s, ok := result.asOneOf(
- string(NoDocumentation),
- string(SingleLine),
- string(SynopsisDocumentation),
- string(FullDocumentation),
- string(Structured),
- ); ok {
- o.HoverKind = HoverKind(s)
- }
-
- case "linkTarget":
- result.setString(&o.LinkTarget)
-
- case "linksInHover":
- result.setBool(&o.LinksInHover)
-
- case "importShortcut":
- if s, ok := result.asOneOf(string(Both), string(Link), string(Definition)); ok {
- o.ImportShortcut = ImportShortcut(s)
- }
-
- case "analyses":
- result.setBoolMap(&o.Analyses)
-
- case "annotations":
- result.setAnnotationMap(&o.Annotations)
-
- case "codelenses", "codelens":
- var lensOverrides map[string]bool
- result.setBoolMap(&lensOverrides)
- if result.Error == nil {
- if o.Codelenses == nil {
- o.Codelenses = make(map[string]bool)
- }
- for lens, enabled := range lensOverrides {
- o.Codelenses[lens] = enabled
- }
- }
-
- // codelens is deprecated, but still works for now.
- // TODO(rstambler): Remove this for the gopls/v0.7.0 release.
- if name == "codelens" {
- result.State = OptionDeprecated
- result.Replacement = "codelenses"
- }
-
- case "staticcheck":
- result.setBool(&o.Staticcheck)
-
- case "local":
- result.setString(&o.Local)
-
- case "verboseOutput":
- result.setBool(&o.VerboseOutput)
-
- case "verboseWorkDoneProgress":
- result.setBool(&o.VerboseWorkDoneProgress)
-
- case "tempModfile":
- result.setBool(&o.TempModfile)
-
- case "gofumpt":
- result.setBool(&o.Gofumpt)
-
- case "semanticTokens":
- result.setBool(&o.SemanticTokens)
-
- case "expandWorkspaceToModule":
- result.setBool(&o.ExpandWorkspaceToModule)
-
- case "experimentalPostfixCompletions":
- result.setBool(&o.ExperimentalPostfixCompletions)
-
- case "experimentalWorkspaceModule":
- result.setBool(&o.ExperimentalWorkspaceModule)
-
- case "experimentalTemplateSupport": // remove after June 2022
- result.State = OptionDeprecated
-
- case "templateExtensions":
- if iexts, ok := value.([]interface{}); ok {
- ans := []string{}
- for _, x := range iexts {
- ans = append(ans, fmt.Sprint(x))
- }
- o.TemplateExtensions = ans
- break
- }
- if value == nil {
- o.TemplateExtensions = nil
- break
- }
- result.errorf(fmt.Sprintf("unexpected type %T not []string", value))
- case "experimentalDiagnosticsDelay", "diagnosticsDelay":
- if name == "experimentalDiagnosticsDelay" {
- result.State = OptionDeprecated
- result.Replacement = "diagnosticsDelay"
- }
- result.setDuration(&o.DiagnosticsDelay)
-
- case "experimentalWatchedFileDelay":
- result.setDuration(&o.ExperimentalWatchedFileDelay)
-
- case "experimentalPackageCacheKey":
- result.setBool(&o.ExperimentalPackageCacheKey)
-
- case "allowModfileModifications":
- result.setBool(&o.AllowModfileModifications)
-
- case "allowImplicitNetworkAccess":
- result.setBool(&o.AllowImplicitNetworkAccess)
-
- case "experimentalUseInvalidMetadata":
- result.setBool(&o.ExperimentalUseInvalidMetadata)
-
- case "allExperiments":
- // This setting should be handled before all of the other options are
- // processed, so do nothing here.
-
- // Replaced settings.
- case "experimentalDisabledAnalyses":
- result.State = OptionDeprecated
- result.Replacement = "analyses"
-
- case "disableDeepCompletion":
- result.State = OptionDeprecated
- result.Replacement = "deepCompletion"
-
- case "disableFuzzyMatching":
- result.State = OptionDeprecated
- result.Replacement = "fuzzyMatching"
-
- case "wantCompletionDocumentation":
- result.State = OptionDeprecated
- result.Replacement = "completionDocumentation"
-
- case "wantUnimportedCompletions":
- result.State = OptionDeprecated
- result.Replacement = "completeUnimported"
-
- case "fuzzyMatching":
- result.State = OptionDeprecated
- result.Replacement = "matcher"
-
- case "caseSensitiveCompletion":
- result.State = OptionDeprecated
- result.Replacement = "matcher"
-
- // Deprecated settings.
- case "wantSuggestedFixes":
- result.State = OptionDeprecated
-
- case "noIncrementalSync":
- result.State = OptionDeprecated
-
- case "watchFileChanges":
- result.State = OptionDeprecated
-
- case "go-diff":
- result.State = OptionDeprecated
-
- default:
- result.State = OptionUnexpected
- }
- return result
-}
-
-func (r *OptionResult) errorf(msg string, values ...interface{}) {
- prefix := fmt.Sprintf("parsing setting %q: ", r.Name)
- r.Error = errors.Errorf(prefix+msg, values...)
-}
-
-func (r *OptionResult) asBool() (bool, bool) {
- b, ok := r.Value.(bool)
- if !ok {
- r.errorf("invalid type %T, expect bool", r.Value)
- return false, false
- }
- return b, true
-}
-
-func (r *OptionResult) setBool(b *bool) {
- if v, ok := r.asBool(); ok {
- *b = v
- }
-}
-
-func (r *OptionResult) setDuration(d *time.Duration) {
- if v, ok := r.asString(); ok {
- parsed, err := time.ParseDuration(v)
- if err != nil {
- r.errorf("failed to parse duration %q: %v", v, err)
- return
- }
- *d = parsed
- }
-}
-
-func (r *OptionResult) setBoolMap(bm *map[string]bool) {
- m := r.asBoolMap()
- *bm = m
-}
-
-func (r *OptionResult) setAnnotationMap(bm *map[Annotation]bool) {
- all := r.asBoolMap()
- if all == nil {
- return
- }
- // Default to everything enabled by default.
- m := make(map[Annotation]bool)
- for k, enabled := range all {
- a, err := asOneOf(
- k,
- string(Nil),
- string(Escape),
- string(Inline),
- string(Bounds),
- )
- if err != nil {
- // In case of an error, process any legacy values.
- switch k {
- case "noEscape":
- m[Escape] = false
- r.errorf(`"noEscape" is deprecated, set "Escape: false" instead`)
- case "noNilcheck":
- m[Nil] = false
- r.errorf(`"noNilcheck" is deprecated, set "Nil: false" instead`)
- case "noInline":
- m[Inline] = false
- r.errorf(`"noInline" is deprecated, set "Inline: false" instead`)
- case "noBounds":
- m[Bounds] = false
- r.errorf(`"noBounds" is deprecated, set "Bounds: false" instead`)
- default:
- r.errorf(err.Error())
- }
- continue
- }
- m[Annotation(a)] = enabled
- }
- *bm = m
-}
-
-func (r *OptionResult) asBoolMap() map[string]bool {
- all, ok := r.Value.(map[string]interface{})
- if !ok {
- r.errorf("invalid type %T for map[string]bool option", r.Value)
- return nil
- }
- m := make(map[string]bool)
- for a, enabled := range all {
- if enabled, ok := enabled.(bool); ok {
- m[a] = enabled
- } else {
- r.errorf("invalid type %T for map key %q", enabled, a)
- return m
- }
- }
- return m
-}
-
-func (r *OptionResult) asString() (string, bool) {
- b, ok := r.Value.(string)
- if !ok {
- r.errorf("invalid type %T, expect string", r.Value)
- return "", false
- }
- return b, true
-}
-
-func (r *OptionResult) asOneOf(options ...string) (string, bool) {
- s, ok := r.asString()
- if !ok {
- return "", false
- }
- s, err := asOneOf(s, options...)
- if err != nil {
- r.errorf(err.Error())
- }
- return s, err == nil
-}
-
-func asOneOf(str string, options ...string) (string, error) {
- lower := strings.ToLower(str)
- for _, opt := range options {
- if strings.ToLower(opt) == lower {
- return opt, nil
- }
- }
- return "", fmt.Errorf("invalid option %q for enum", str)
-}
-
-func (r *OptionResult) setString(s *string) {
- if v, ok := r.asString(); ok {
- *s = v
- }
-}
-
-// EnabledAnalyzers returns all of the analyzers enabled for the given
-// snapshot.
-func EnabledAnalyzers(snapshot Snapshot) (analyzers []*Analyzer) {
- for _, a := range snapshot.View().Options().DefaultAnalyzers {
- if a.IsEnabled(snapshot.View()) {
- analyzers = append(analyzers, a)
- }
- }
- for _, a := range snapshot.View().Options().TypeErrorAnalyzers {
- if a.IsEnabled(snapshot.View()) {
- analyzers = append(analyzers, a)
- }
- }
- for _, a := range snapshot.View().Options().ConvenienceAnalyzers {
- if a.IsEnabled(snapshot.View()) {
- analyzers = append(analyzers, a)
- }
- }
- for _, a := range snapshot.View().Options().StaticcheckAnalyzers {
- if a.IsEnabled(snapshot.View()) {
- analyzers = append(analyzers, a)
- }
- }
- return analyzers
-}
-
-func typeErrorAnalyzers() map[string]*Analyzer {
- return map[string]*Analyzer{
- fillreturns.Analyzer.Name: {
- Analyzer: fillreturns.Analyzer,
- ActionKind: []protocol.CodeActionKind{protocol.SourceFixAll, protocol.QuickFix},
- Enabled: true,
- },
- nonewvars.Analyzer.Name: {
- Analyzer: nonewvars.Analyzer,
- Enabled: true,
- },
- noresultvalues.Analyzer.Name: {
- Analyzer: noresultvalues.Analyzer,
- Enabled: true,
- },
- undeclaredname.Analyzer.Name: {
- Analyzer: undeclaredname.Analyzer,
- Fix: UndeclaredName,
- Enabled: true,
- },
- }
-}
-
-func convenienceAnalyzers() map[string]*Analyzer {
- return map[string]*Analyzer{
- fillstruct.Analyzer.Name: {
- Analyzer: fillstruct.Analyzer,
- Fix: FillStruct,
- Enabled: true,
- ActionKind: []protocol.CodeActionKind{protocol.RefactorRewrite},
- },
- stubmethods.Analyzer.Name: {
- Analyzer: stubmethods.Analyzer,
- ActionKind: []protocol.CodeActionKind{protocol.RefactorRewrite},
- Fix: StubMethods,
- Enabled: true,
- },
- }
-}
-
-func defaultAnalyzers() map[string]*Analyzer {
- return map[string]*Analyzer{
- // The traditional vet suite:
- asmdecl.Analyzer.Name: {Analyzer: asmdecl.Analyzer, Enabled: true},
- assign.Analyzer.Name: {Analyzer: assign.Analyzer, Enabled: true},
- atomic.Analyzer.Name: {Analyzer: atomic.Analyzer, Enabled: true},
- bools.Analyzer.Name: {Analyzer: bools.Analyzer, Enabled: true},
- buildtag.Analyzer.Name: {Analyzer: buildtag.Analyzer, Enabled: true},
- cgocall.Analyzer.Name: {Analyzer: cgocall.Analyzer, Enabled: true},
- composite.Analyzer.Name: {Analyzer: composite.Analyzer, Enabled: true},
- copylock.Analyzer.Name: {Analyzer: copylock.Analyzer, Enabled: true},
- errorsas.Analyzer.Name: {Analyzer: errorsas.Analyzer, Enabled: true},
- httpresponse.Analyzer.Name: {Analyzer: httpresponse.Analyzer, Enabled: true},
- ifaceassert.Analyzer.Name: {Analyzer: ifaceassert.Analyzer, Enabled: true},
- loopclosure.Analyzer.Name: {Analyzer: loopclosure.Analyzer, Enabled: true},
- lostcancel.Analyzer.Name: {Analyzer: lostcancel.Analyzer, Enabled: true},
- nilfunc.Analyzer.Name: {Analyzer: nilfunc.Analyzer, Enabled: true},
- printf.Analyzer.Name: {Analyzer: printf.Analyzer, Enabled: true},
- shift.Analyzer.Name: {Analyzer: shift.Analyzer, Enabled: true},
- stdmethods.Analyzer.Name: {Analyzer: stdmethods.Analyzer, Enabled: true},
- stringintconv.Analyzer.Name: {Analyzer: stringintconv.Analyzer, Enabled: true},
- structtag.Analyzer.Name: {Analyzer: structtag.Analyzer, Enabled: true},
- tests.Analyzer.Name: {Analyzer: tests.Analyzer, Enabled: true},
- unmarshal.Analyzer.Name: {Analyzer: unmarshal.Analyzer, Enabled: true},
- unreachable.Analyzer.Name: {Analyzer: unreachable.Analyzer, Enabled: true},
- unsafeptr.Analyzer.Name: {Analyzer: unsafeptr.Analyzer, Enabled: true},
- unusedresult.Analyzer.Name: {Analyzer: unusedresult.Analyzer, Enabled: true},
-
- // Non-vet analyzers:
- atomicalign.Analyzer.Name: {Analyzer: atomicalign.Analyzer, Enabled: true},
- deepequalerrors.Analyzer.Name: {Analyzer: deepequalerrors.Analyzer, Enabled: true},
- fieldalignment.Analyzer.Name: {Analyzer: fieldalignment.Analyzer, Enabled: false},
- nilness.Analyzer.Name: {Analyzer: nilness.Analyzer, Enabled: false},
- shadow.Analyzer.Name: {Analyzer: shadow.Analyzer, Enabled: false},
- sortslice.Analyzer.Name: {Analyzer: sortslice.Analyzer, Enabled: true},
- testinggoroutine.Analyzer.Name: {Analyzer: testinggoroutine.Analyzer, Enabled: true},
- unusedparams.Analyzer.Name: {Analyzer: unusedparams.Analyzer, Enabled: false},
- unusedwrite.Analyzer.Name: {Analyzer: unusedwrite.Analyzer, Enabled: false},
- useany.Analyzer.Name: {Analyzer: useany.Analyzer, Enabled: false},
- infertypeargs.Analyzer.Name: {Analyzer: infertypeargs.Analyzer, Enabled: true},
-
- // gofmt -s suite:
- simplifycompositelit.Analyzer.Name: {
- Analyzer: simplifycompositelit.Analyzer,
- Enabled: true,
- ActionKind: []protocol.CodeActionKind{protocol.SourceFixAll, protocol.QuickFix},
- },
- simplifyrange.Analyzer.Name: {
- Analyzer: simplifyrange.Analyzer,
- Enabled: true,
- ActionKind: []protocol.CodeActionKind{protocol.SourceFixAll, protocol.QuickFix},
- },
- simplifyslice.Analyzer.Name: {
- Analyzer: simplifyslice.Analyzer,
- Enabled: true,
- ActionKind: []protocol.CodeActionKind{protocol.SourceFixAll, protocol.QuickFix},
- },
- }
-}
-
-func urlRegexp() *regexp.Regexp {
- // Ensure links are matched as full words, not anywhere.
- re := regexp.MustCompile(`\b(http|ftp|https)://([\w_-]+(?:(?:\.[\w_-]+)+))([\w.,@?^=%&:/~+#-]*[\w@?^=%&/~+#-])?\b`)
- re.Longest()
- return re
-}
-
-type APIJSON struct {
- Options map[string][]*OptionJSON
- Commands []*CommandJSON
- Lenses []*LensJSON
- Analyzers []*AnalyzerJSON
-}
-
-type OptionJSON struct {
- Name string
- Type string
- Doc string
- EnumKeys EnumKeys
- EnumValues []EnumValue
- Default string
- Status string
- Hierarchy string
-}
-
-func (o *OptionJSON) String() string {
- return o.Name
-}
-
-func (o *OptionJSON) Write(w io.Writer) {
- fmt.Fprintf(w, "**%v** *%v*\n\n", o.Name, o.Type)
- writeStatus(w, o.Status)
- enumValues := collectEnums(o)
- fmt.Fprintf(w, "%v%v\nDefault: `%v`.\n\n", o.Doc, enumValues, o.Default)
-}
-
-func writeStatus(section io.Writer, status string) {
- switch status {
- case "":
- case "advanced":
- fmt.Fprint(section, "**This is an advanced setting and should not be configured by most `gopls` users.**\n\n")
- case "debug":
- fmt.Fprint(section, "**This setting is for debugging purposes only.**\n\n")
- case "experimental":
- fmt.Fprint(section, "**This setting is experimental and may be deleted.**\n\n")
- default:
- fmt.Fprintf(section, "**Status: %s.**\n\n", status)
- }
-}
-
-var parBreakRE = regexp.MustCompile("\n{2,}")
-
-func collectEnums(opt *OptionJSON) string {
- var b strings.Builder
- write := func(name, doc string, index, len int) {
- if doc != "" {
- unbroken := parBreakRE.ReplaceAllString(doc, "\\\n")
- fmt.Fprintf(&b, "* %s\n", strings.TrimSpace(unbroken))
- } else {
- fmt.Fprintf(&b, "* `%s`\n", name)
- }
- }
- if len(opt.EnumValues) > 0 && opt.Type == "enum" {
- b.WriteString("\nMust be one of:\n\n")
- for i, val := range opt.EnumValues {
- write(val.Value, val.Doc, i, len(opt.EnumValues))
- }
- } else if len(opt.EnumKeys.Keys) > 0 && shouldShowEnumKeysInSettings(opt.Name) {
- b.WriteString("\nCan contain any of:\n\n")
- for i, val := range opt.EnumKeys.Keys {
- write(val.Name, val.Doc, i, len(opt.EnumKeys.Keys))
- }
- }
- return b.String()
-}
-
-func shouldShowEnumKeysInSettings(name string) bool {
- // Both of these fields have too many possible options to print.
- return !hardcodedEnumKeys(name)
-}
-
-func hardcodedEnumKeys(name string) bool {
- return name == "analyses" || name == "codelenses"
-}
-
-type EnumKeys struct {
- ValueType string
- Keys []EnumKey
-}
-
-type EnumKey struct {
- Name string
- Doc string
- Default string
-}
-
-type EnumValue struct {
- Value string
- Doc string
-}
-
-type CommandJSON struct {
- Command string
- Title string
- Doc string
- ArgDoc string
- ResultDoc string
-}
-
-func (c *CommandJSON) String() string {
- return c.Command
-}
-
-func (c *CommandJSON) Write(w io.Writer) {
- fmt.Fprintf(w, "### **%v**\nIdentifier: `%v`\n\n%v\n\n", c.Title, c.Command, c.Doc)
- if c.ArgDoc != "" {
- fmt.Fprintf(w, "Args:\n\n```\n%s\n```\n\n", c.ArgDoc)
- }
- if c.ResultDoc != "" {
- fmt.Fprintf(w, "Result:\n\n```\n%s\n```\n\n", c.ResultDoc)
- }
-}
-
-type LensJSON struct {
- Lens string
- Title string
- Doc string
-}
-
-func (l *LensJSON) String() string {
- return l.Title
-}
-
-func (l *LensJSON) Write(w io.Writer) {
- fmt.Fprintf(w, "%s (%s): %s", l.Title, l.Lens, l.Doc)
-}
-
-type AnalyzerJSON struct {
- Name string
- Doc string
- Default bool
-}
-
-func (a *AnalyzerJSON) String() string {
- return a.Name
-}
-
-func (a *AnalyzerJSON) Write(w io.Writer) {
- fmt.Fprintf(w, "%s (%s): %v", a.Name, a.Doc, a.Default)
-}
diff --git a/internal/lsp/source/options_test.go b/internal/lsp/source/options_test.go
deleted file mode 100644
index f8260c1dd..000000000
--- a/internal/lsp/source/options_test.go
+++ /dev/null
@@ -1,183 +0,0 @@
-// Copyright 2020 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package source
-
-import (
- "testing"
- "time"
-)
-
-func TestSetOption(t *testing.T) {
- tests := []struct {
- name string
- value interface{}
- wantError bool
- check func(Options) bool
- }{
- {
- name: "symbolStyle",
- value: "Dynamic",
- check: func(o Options) bool { return o.SymbolStyle == DynamicSymbols },
- },
- {
- name: "symbolStyle",
- value: "",
- wantError: true,
- check: func(o Options) bool { return o.SymbolStyle == "" },
- },
- {
- name: "symbolStyle",
- value: false,
- wantError: true,
- check: func(o Options) bool { return o.SymbolStyle == "" },
- },
- {
- name: "symbolMatcher",
- value: "caseInsensitive",
- check: func(o Options) bool { return o.SymbolMatcher == SymbolCaseInsensitive },
- },
- {
- name: "completionBudget",
- value: "2s",
- check: func(o Options) bool { return o.CompletionBudget == 2*time.Second },
- },
- {
- name: "staticcheck",
- value: true,
- check: func(o Options) bool { return o.Staticcheck == true },
- },
- {
- name: "codelenses",
- value: map[string]interface{}{"generate": true},
- check: func(o Options) bool { return o.Codelenses["generate"] },
- },
- {
- name: "allExperiments",
- value: true,
- check: func(o Options) bool {
- return true // just confirm that we handle this setting
- },
- },
- {
- name: "hoverKind",
- value: "FullDocumentation",
- check: func(o Options) bool {
- return o.HoverKind == FullDocumentation
- },
- },
- {
- name: "hoverKind",
- value: "NoDocumentation",
- check: func(o Options) bool {
- return o.HoverKind == NoDocumentation
- },
- },
- {
- name: "hoverKind",
- value: "SingleLine",
- check: func(o Options) bool {
- return o.HoverKind == SingleLine
- },
- },
- {
- name: "hoverKind",
- value: "Structured",
- check: func(o Options) bool {
- return o.HoverKind == Structured
- },
- },
- {
- name: "ui.documentation.hoverKind",
- value: "Structured",
- check: func(o Options) bool {
- return o.HoverKind == Structured
- },
- },
- {
- name: "matcher",
- value: "Fuzzy",
- check: func(o Options) bool {
- return o.Matcher == Fuzzy
- },
- },
- {
- name: "matcher",
- value: "CaseSensitive",
- check: func(o Options) bool {
- return o.Matcher == CaseSensitive
- },
- },
- {
- name: "matcher",
- value: "CaseInsensitive",
- check: func(o Options) bool {
- return o.Matcher == CaseInsensitive
- },
- },
- {
- name: "env",
- value: map[string]interface{}{"testing": "true"},
- check: func(o Options) bool {
- v, found := o.Env["testing"]
- return found && v == "true"
- },
- },
- {
- name: "env",
- value: []string{"invalid", "input"},
- wantError: true,
- check: func(o Options) bool {
- return o.Env == nil
- },
- },
- {
- name: "directoryFilters",
- value: []interface{}{"-node_modules", "+project_a"},
- check: func(o Options) bool {
- return len(o.DirectoryFilters) == 2
- },
- },
- {
- name: "directoryFilters",
- value: []interface{}{"invalid"},
- wantError: true,
- check: func(o Options) bool {
- return len(o.DirectoryFilters) == 0
- },
- },
- {
- name: "directoryFilters",
- value: []string{"-invalid", "+type"},
- wantError: true,
- check: func(o Options) bool {
- return len(o.DirectoryFilters) == 0
- },
- },
- {
- name: "annotations",
- value: map[string]interface{}{
- "Nil": false,
- "noBounds": true,
- },
- wantError: true,
- check: func(o Options) bool {
- return !o.Annotations[Nil] && !o.Annotations[Bounds]
- },
- },
- }
-
- for _, test := range tests {
- var opts Options
- result := opts.set(test.name, test.value, map[string]struct{}{})
- if (result.Error != nil) != test.wantError {
- t.Fatalf("Options.set(%q, %v): result.Error = %v, want error: %t", test.name, test.value, result.Error, test.wantError)
- }
- // TODO: this could be made much better using cmp.Diff, if that becomes
- // available in this module.
- if !test.check(opts) {
- t.Errorf("Options.set(%q, %v): unexpected result %+v", test.name, test.value, opts)
- }
- }
-}
diff --git a/internal/lsp/source/references.go b/internal/lsp/source/references.go
deleted file mode 100644
index 5d3eac337..000000000
--- a/internal/lsp/source/references.go
+++ /dev/null
@@ -1,200 +0,0 @@
-// Copyright 2019 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package source
-
-import (
- "context"
- "fmt"
- "go/ast"
- "go/token"
- "go/types"
- "sort"
-
- "golang.org/x/tools/internal/event"
- "golang.org/x/tools/internal/lsp/protocol"
- "golang.org/x/tools/internal/span"
- errors "golang.org/x/xerrors"
-)
-
-// ReferenceInfo holds information about reference to an identifier in Go source.
-type ReferenceInfo struct {
- Name string
- MappedRange
- ident *ast.Ident
- obj types.Object
- pkg Package
- isDeclaration bool
-}
-
-// References returns a list of references for a given identifier within the packages
-// containing i.File. Declarations appear first in the result.
-func References(ctx context.Context, s Snapshot, f FileHandle, pp protocol.Position, includeDeclaration bool) ([]*ReferenceInfo, error) {
- ctx, done := event.Start(ctx, "source.References")
- defer done()
-
- qualifiedObjs, err := qualifiedObjsAtProtocolPos(ctx, s, f.URI(), pp)
- // Don't return references for builtin types.
- if errors.Is(err, errBuiltin) {
- return nil, nil
- }
- if err != nil {
- return nil, err
- }
-
- refs, err := references(ctx, s, qualifiedObjs, includeDeclaration, true, false)
- if err != nil {
- return nil, err
- }
-
- toSort := refs
- if includeDeclaration {
- toSort = refs[1:]
- }
- sort.Slice(toSort, func(i, j int) bool {
- x := CompareURI(toSort[i].URI(), toSort[j].URI())
- if x == 0 {
- return toSort[i].ident.Pos() < toSort[j].ident.Pos()
- }
- return x < 0
- })
- return refs, nil
-}
-
-// references is a helper function to avoid recomputing qualifiedObjsAtProtocolPos.
-func references(ctx context.Context, snapshot Snapshot, qos []qualifiedObject, includeDeclaration, includeInterfaceRefs, includeEmbeddedRefs bool) ([]*ReferenceInfo, error) {
- var (
- references []*ReferenceInfo
- seen = make(map[token.Pos]bool)
- )
-
- pos := qos[0].obj.Pos()
- if pos == token.NoPos {
- return nil, fmt.Errorf("no position for %s", qos[0].obj)
- }
- filename := snapshot.FileSet().Position(pos).Filename
- pgf, err := qos[0].pkg.File(span.URIFromPath(filename))
- if err != nil {
- return nil, err
- }
- declIdent, err := findIdentifier(ctx, snapshot, qos[0].pkg, pgf, qos[0].obj.Pos())
- if err != nil {
- return nil, err
- }
- // Make sure declaration is the first item in the response.
- if includeDeclaration {
- references = append(references, &ReferenceInfo{
- MappedRange: declIdent.MappedRange,
- Name: qos[0].obj.Name(),
- ident: declIdent.ident,
- obj: qos[0].obj,
- pkg: declIdent.pkg,
- isDeclaration: true,
- })
- }
-
- for _, qo := range qos {
- var searchPkgs []Package
-
- // Only search dependents if the object is exported.
- if qo.obj.Exported() {
- reverseDeps, err := snapshot.GetReverseDependencies(ctx, qo.pkg.ID())
- if err != nil {
- return nil, err
- }
- searchPkgs = append(searchPkgs, reverseDeps...)
- }
- // Add the package in which the identifier is declared.
- searchPkgs = append(searchPkgs, qo.pkg)
- for _, pkg := range searchPkgs {
- for ident, obj := range pkg.GetTypesInfo().Uses {
- // For instantiated objects (as in methods or fields on instantiated
- // types), we may not have pointer-identical objects but still want to
- // consider them references.
- if !equalOrigin(obj, qo.obj) {
- // If ident is not a use of qo.obj, skip it, with one exception:
- // uses of an embedded field can be considered references of the
- // embedded type name
- if !includeEmbeddedRefs {
- continue
- }
- v, ok := obj.(*types.Var)
- if !ok || !v.Embedded() {
- continue
- }
- named, ok := v.Type().(*types.Named)
- if !ok || named.Obj() != qo.obj {
- continue
- }
- }
- if seen[ident.Pos()] {
- continue
- }
- seen[ident.Pos()] = true
- rng, err := posToMappedRange(snapshot, pkg, ident.Pos(), ident.End())
- if err != nil {
- return nil, err
- }
- references = append(references, &ReferenceInfo{
- Name: ident.Name,
- ident: ident,
- pkg: pkg,
- obj: obj,
- MappedRange: rng,
- })
- }
- }
- }
-
- // When searching on type name, don't include interface references -- they
- // would be things like all references to Stringer for any type that
- // happened to have a String method.
- _, isType := declIdent.Declaration.obj.(*types.TypeName)
- if includeInterfaceRefs && !isType {
- declRange, err := declIdent.Range()
- if err != nil {
- return nil, err
- }
- fh, err := snapshot.GetFile(ctx, declIdent.URI())
- if err != nil {
- return nil, err
- }
- interfaceRefs, err := interfaceReferences(ctx, snapshot, fh, declRange.Start)
- if err != nil {
- return nil, err
- }
- references = append(references, interfaceRefs...)
- }
-
- return references, nil
-}
-
-// equalOrigin reports whether obj1 and obj2 have equivalent origin object.
-// This may be the case even if obj1 != obj2, if one or both of them is
-// instantiated.
-func equalOrigin(obj1, obj2 types.Object) bool {
- return obj1.Pkg() == obj2.Pkg() && obj1.Pos() == obj2.Pos() && obj1.Name() == obj2.Name()
-}
-
-// interfaceReferences returns the references to the interfaces implemented by
-// the type or method at the given position.
-func interfaceReferences(ctx context.Context, s Snapshot, f FileHandle, pp protocol.Position) ([]*ReferenceInfo, error) {
- implementations, err := implementations(ctx, s, f, pp)
- if err != nil {
- if errors.Is(err, ErrNotAType) {
- return nil, nil
- }
- return nil, err
- }
-
- var refs []*ReferenceInfo
- for _, impl := range implementations {
- implRefs, err := references(ctx, s, []qualifiedObject{impl}, false, false, false)
- if err != nil {
- return nil, err
- }
- refs = append(refs, implRefs...)
- }
- return refs, nil
-}
diff --git a/internal/lsp/source/rename.go b/internal/lsp/source/rename.go
deleted file mode 100644
index 2ad5d265f..000000000
--- a/internal/lsp/source/rename.go
+++ /dev/null
@@ -1,371 +0,0 @@
-// Copyright 2019 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package source
-
-import (
- "bytes"
- "context"
- "go/ast"
- "go/format"
- "go/token"
- "go/types"
- "regexp"
- "strings"
-
- "golang.org/x/tools/go/types/typeutil"
- "golang.org/x/tools/internal/event"
- "golang.org/x/tools/internal/lsp/diff"
- "golang.org/x/tools/internal/lsp/protocol"
- "golang.org/x/tools/internal/span"
- "golang.org/x/tools/refactor/satisfy"
- errors "golang.org/x/xerrors"
-)
-
-type renamer struct {
- ctx context.Context
- fset *token.FileSet
- refs []*ReferenceInfo
- objsToUpdate map[types.Object]bool
- hadConflicts bool
- errors string
- from, to string
- satisfyConstraints map[satisfy.Constraint]bool
- packages map[*types.Package]Package // may include additional packages that are a rdep of pkg
- msets typeutil.MethodSetCache
- changeMethods bool
-}
-
-type PrepareItem struct {
- Range protocol.Range
- Text string
-}
-
-// PrepareRename searches for a valid renaming at position pp.
-//
-// The returned usererr is intended to be displayed to the user to explain why
-// the prepare fails. Probably we could eliminate the redundancy in returning
-// two errors, but for now this is done defensively.
-func PrepareRename(ctx context.Context, snapshot Snapshot, f FileHandle, pp protocol.Position) (_ *PrepareItem, usererr, err error) {
- ctx, done := event.Start(ctx, "source.PrepareRename")
- defer done()
-
- qos, err := qualifiedObjsAtProtocolPos(ctx, snapshot, f.URI(), pp)
- if err != nil {
- return nil, nil, err
- }
- node, obj, pkg := qos[0].node, qos[0].obj, qos[0].sourcePkg
- if err := checkRenamable(obj); err != nil {
- return nil, err, err
- }
- mr, err := posToMappedRange(snapshot, pkg, node.Pos(), node.End())
- if err != nil {
- return nil, nil, err
- }
- rng, err := mr.Range()
- if err != nil {
- return nil, nil, err
- }
- if _, isImport := node.(*ast.ImportSpec); isImport {
- // We're not really renaming the import path.
- rng.End = rng.Start
- }
- return &PrepareItem{
- Range: rng,
- Text: obj.Name(),
- }, nil, nil
-}
-
-// checkRenamable verifies if an obj may be renamed.
-func checkRenamable(obj types.Object) error {
- if v, ok := obj.(*types.Var); ok && v.Embedded() {
- return errors.New("can't rename embedded fields: rename the type directly or name the field")
- }
- if obj.Name() == "_" {
- return errors.New("can't rename \"_\"")
- }
- return nil
-}
-
-// Rename returns a map of TextEdits for each file modified when renaming a
-// given identifier within a package.
-func Rename(ctx context.Context, s Snapshot, f FileHandle, pp protocol.Position, newName string) (map[span.URI][]protocol.TextEdit, error) {
- ctx, done := event.Start(ctx, "source.Rename")
- defer done()
-
- qos, err := qualifiedObjsAtProtocolPos(ctx, s, f.URI(), pp)
- if err != nil {
- return nil, err
- }
-
- obj, pkg := qos[0].obj, qos[0].pkg
-
- if err := checkRenamable(obj); err != nil {
- return nil, err
- }
- if obj.Name() == newName {
- return nil, errors.Errorf("old and new names are the same: %s", newName)
- }
- if !isValidIdentifier(newName) {
- return nil, errors.Errorf("invalid identifier to rename: %q", newName)
- }
- if pkg == nil || pkg.IsIllTyped() {
- return nil, errors.Errorf("package for %s is ill typed", f.URI())
- }
- refs, err := references(ctx, s, qos, true, false, true)
- if err != nil {
- return nil, err
- }
- r := renamer{
- ctx: ctx,
- fset: s.FileSet(),
- refs: refs,
- objsToUpdate: make(map[types.Object]bool),
- from: obj.Name(),
- to: newName,
- packages: make(map[*types.Package]Package),
- }
-
- // A renaming initiated at an interface method indicates the
- // intention to rename abstract and concrete methods as needed
- // to preserve assignability.
- for _, ref := range refs {
- if obj, ok := ref.obj.(*types.Func); ok {
- recv := obj.Type().(*types.Signature).Recv()
- if recv != nil && IsInterface(recv.Type().Underlying()) {
- r.changeMethods = true
- break
- }
- }
- }
- for _, from := range refs {
- r.packages[from.pkg.GetTypes()] = from.pkg
- }
-
- // Check that the renaming of the identifier is ok.
- for _, ref := range refs {
- r.check(ref.obj)
- if r.hadConflicts { // one error is enough.
- break
- }
- }
- if r.hadConflicts {
- return nil, errors.Errorf(r.errors)
- }
-
- changes, err := r.update()
- if err != nil {
- return nil, err
- }
- result := make(map[span.URI][]protocol.TextEdit)
- for uri, edits := range changes {
- // These edits should really be associated with FileHandles for maximal correctness.
- // For now, this is good enough.
- fh, err := s.GetFile(ctx, uri)
- if err != nil {
- return nil, err
- }
- data, err := fh.Read()
- if err != nil {
- return nil, err
- }
- converter := span.NewContentConverter(uri.Filename(), data)
- m := &protocol.ColumnMapper{
- URI: uri,
- Converter: converter,
- Content: data,
- }
- // Sort the edits first.
- diff.SortTextEdits(edits)
- protocolEdits, err := ToProtocolEdits(m, edits)
- if err != nil {
- return nil, err
- }
- result[uri] = protocolEdits
- }
- return result, nil
-}
-
-// Rename all references to the identifier.
-func (r *renamer) update() (map[span.URI][]diff.TextEdit, error) {
- result := make(map[span.URI][]diff.TextEdit)
- seen := make(map[span.Span]bool)
-
- docRegexp, err := regexp.Compile(`\b` + r.from + `\b`)
- if err != nil {
- return nil, err
- }
- for _, ref := range r.refs {
- refSpan, err := ref.spanRange.Span()
- if err != nil {
- return nil, err
- }
- if seen[refSpan] {
- continue
- }
- seen[refSpan] = true
-
- // Renaming a types.PkgName may result in the addition or removal of an identifier,
- // so we deal with this separately.
- if pkgName, ok := ref.obj.(*types.PkgName); ok && ref.isDeclaration {
- edit, err := r.updatePkgName(pkgName)
- if err != nil {
- return nil, err
- }
- result[refSpan.URI()] = append(result[refSpan.URI()], *edit)
- continue
- }
-
- // Replace the identifier with r.to.
- edit := diff.TextEdit{
- Span: refSpan,
- NewText: r.to,
- }
-
- result[refSpan.URI()] = append(result[refSpan.URI()], edit)
-
- if !ref.isDeclaration || ref.ident == nil { // uses do not have doc comments to update.
- continue
- }
-
- doc := r.docComment(ref.pkg, ref.ident)
- if doc == nil {
- continue
- }
-
- // Perform the rename in doc comments declared in the original package.
- // go/parser strips out \r\n returns from the comment text, so go
- // line-by-line through the comment text to get the correct positions.
- for _, comment := range doc.List {
- if isDirective(comment.Text) {
- continue
- }
- lines := strings.Split(comment.Text, "\n")
- tok := r.fset.File(comment.Pos())
- commentLine := tok.Position(comment.Pos()).Line
- for i, line := range lines {
- lineStart := comment.Pos()
- if i > 0 {
- lineStart = tok.LineStart(commentLine + i)
- }
- for _, locs := range docRegexp.FindAllIndex([]byte(line), -1) {
- rng := span.NewRange(r.fset, lineStart+token.Pos(locs[0]), lineStart+token.Pos(locs[1]))
- spn, err := rng.Span()
- if err != nil {
- return nil, err
- }
- result[spn.URI()] = append(result[spn.URI()], diff.TextEdit{
- Span: spn,
- NewText: r.to,
- })
- }
- }
- }
- }
-
- return result, nil
-}
-
-// docComment returns the doc for an identifier.
-func (r *renamer) docComment(pkg Package, id *ast.Ident) *ast.CommentGroup {
- _, nodes, _ := pathEnclosingInterval(r.fset, pkg, id.Pos(), id.End())
- for _, node := range nodes {
- switch decl := node.(type) {
- case *ast.FuncDecl:
- return decl.Doc
- case *ast.Field:
- return decl.Doc
- case *ast.GenDecl:
- return decl.Doc
- // For {Type,Value}Spec, if the doc on the spec is absent,
- // search for the enclosing GenDecl
- case *ast.TypeSpec:
- if decl.Doc != nil {
- return decl.Doc
- }
- case *ast.ValueSpec:
- if decl.Doc != nil {
- return decl.Doc
- }
- case *ast.Ident:
- case *ast.AssignStmt:
- // *ast.AssignStmt doesn't have an associated comment group.
- // So, we try to find a comment just before the identifier.
-
- // Try to find a comment group only for short variable declarations (:=).
- if decl.Tok != token.DEFINE {
- return nil
- }
-
- var file *ast.File
- for _, f := range pkg.GetSyntax() {
- if f.Pos() <= id.Pos() && id.Pos() <= f.End() {
- file = f
- break
- }
- }
- if file == nil {
- return nil
- }
-
- identLine := r.fset.Position(id.Pos()).Line
- for _, comment := range file.Comments {
- if comment.Pos() > id.Pos() {
- // Comment is after the identifier.
- continue
- }
-
- lastCommentLine := r.fset.Position(comment.End()).Line
- if lastCommentLine+1 == identLine {
- return comment
- }
- }
- default:
- return nil
- }
- }
- return nil
-}
-
-// updatePkgName returns the updates to rename a pkgName in the import spec
-func (r *renamer) updatePkgName(pkgName *types.PkgName) (*diff.TextEdit, error) {
- // Modify ImportSpec syntax to add or remove the Name as needed.
- pkg := r.packages[pkgName.Pkg()]
- _, path, _ := pathEnclosingInterval(r.fset, pkg, pkgName.Pos(), pkgName.Pos())
- if len(path) < 2 {
- return nil, errors.Errorf("no path enclosing interval for %s", pkgName.Name())
- }
- spec, ok := path[1].(*ast.ImportSpec)
- if !ok {
- return nil, errors.Errorf("failed to update PkgName for %s", pkgName.Name())
- }
-
- var astIdent *ast.Ident // will be nil if ident is removed
- if pkgName.Imported().Name() != r.to {
- // ImportSpec.Name needed
- astIdent = &ast.Ident{NamePos: spec.Path.Pos(), Name: r.to}
- }
-
- // Make a copy of the ident that just has the name and path.
- updated := &ast.ImportSpec{
- Name: astIdent,
- Path: spec.Path,
- EndPos: spec.EndPos,
- }
-
- rng := span.NewRange(r.fset, spec.Pos(), spec.End())
- spn, err := rng.Span()
- if err != nil {
- return nil, err
- }
-
- var buf bytes.Buffer
- format.Node(&buf, r.fset, updated)
- newText := buf.String()
-
- return &diff.TextEdit{
- Span: spn,
- NewText: newText,
- }, nil
-}
diff --git a/internal/lsp/source/rename_check.go b/internal/lsp/source/rename_check.go
deleted file mode 100644
index 3aafc391e..000000000
--- a/internal/lsp/source/rename_check.go
+++ /dev/null
@@ -1,936 +0,0 @@
-// Copyright 2019 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-//
-// Taken from golang.org/x/tools/refactor/rename.
-
-package source
-
-import (
- "fmt"
- "go/ast"
- "go/token"
- "go/types"
- "reflect"
- "strconv"
- "strings"
- "unicode"
-
- "golang.org/x/tools/go/ast/astutil"
- "golang.org/x/tools/refactor/satisfy"
-)
-
-// errorf reports an error (e.g. conflict) and prevents file modification.
-func (r *renamer) errorf(pos token.Pos, format string, args ...interface{}) {
- r.hadConflicts = true
- r.errors += fmt.Sprintf(format, args...)
-}
-
-// check performs safety checks of the renaming of the 'from' object to r.to.
-func (r *renamer) check(from types.Object) {
- if r.objsToUpdate[from] {
- return
- }
- r.objsToUpdate[from] = true
-
- // NB: order of conditions is important.
- if from_, ok := from.(*types.PkgName); ok {
- r.checkInFileBlock(from_)
- } else if from_, ok := from.(*types.Label); ok {
- r.checkLabel(from_)
- } else if isPackageLevel(from) {
- r.checkInPackageBlock(from)
- } else if v, ok := from.(*types.Var); ok && v.IsField() {
- r.checkStructField(v)
- } else if f, ok := from.(*types.Func); ok && recv(f) != nil {
- r.checkMethod(f)
- } else if isLocal(from) {
- r.checkInLocalScope(from)
- } else {
- r.errorf(from.Pos(), "unexpected %s object %q (please report a bug)\n",
- objectKind(from), from)
- }
-}
-
-// checkInFileBlock performs safety checks for renames of objects in the file block,
-// i.e. imported package names.
-func (r *renamer) checkInFileBlock(from *types.PkgName) {
- // Check import name is not "init".
- if r.to == "init" {
- r.errorf(from.Pos(), "%q is not a valid imported package name", r.to)
- }
-
- // Check for conflicts between file and package block.
- if prev := from.Pkg().Scope().Lookup(r.to); prev != nil {
- r.errorf(from.Pos(), "renaming this %s %q to %q would conflict",
- objectKind(from), from.Name(), r.to)
- r.errorf(prev.Pos(), "\twith this package member %s",
- objectKind(prev))
- return // since checkInPackageBlock would report redundant errors
- }
-
- // Check for conflicts in lexical scope.
- r.checkInLexicalScope(from, r.packages[from.Pkg()])
-}
-
-// checkInPackageBlock performs safety checks for renames of
-// func/var/const/type objects in the package block.
-func (r *renamer) checkInPackageBlock(from types.Object) {
- // Check that there are no references to the name from another
- // package if the renaming would make it unexported.
- if ast.IsExported(from.Name()) && !ast.IsExported(r.to) {
- for typ, pkg := range r.packages {
- if typ == from.Pkg() {
- continue
- }
- if id := someUse(pkg.GetTypesInfo(), from); id != nil &&
- !r.checkExport(id, typ, from) {
- break
- }
- }
- }
-
- pkg := r.packages[from.Pkg()]
- if pkg == nil {
- return
- }
-
- // Check that in the package block, "init" is a function, and never referenced.
- if r.to == "init" {
- kind := objectKind(from)
- if kind == "func" {
- // Reject if intra-package references to it exist.
- for id, obj := range pkg.GetTypesInfo().Uses {
- if obj == from {
- r.errorf(from.Pos(),
- "renaming this func %q to %q would make it a package initializer",
- from.Name(), r.to)
- r.errorf(id.Pos(), "\tbut references to it exist")
- break
- }
- }
- } else {
- r.errorf(from.Pos(), "you cannot have a %s at package level named %q",
- kind, r.to)
- }
- }
-
- // Check for conflicts between package block and all file blocks.
- for _, f := range pkg.GetSyntax() {
- fileScope := pkg.GetTypesInfo().Scopes[f]
- b, prev := fileScope.LookupParent(r.to, token.NoPos)
- if b == fileScope {
- r.errorf(from.Pos(), "renaming this %s %q to %q would conflict", objectKind(from), from.Name(), r.to)
- var prevPos token.Pos
- if prev != nil {
- prevPos = prev.Pos()
- }
- r.errorf(prevPos, "\twith this %s", objectKind(prev))
- return // since checkInPackageBlock would report redundant errors
- }
- }
-
- // Check for conflicts in lexical scope.
- if from.Exported() {
- for _, pkg := range r.packages {
- r.checkInLexicalScope(from, pkg)
- }
- } else {
- r.checkInLexicalScope(from, pkg)
- }
-}
-
-func (r *renamer) checkInLocalScope(from types.Object) {
- pkg := r.packages[from.Pkg()]
- r.checkInLexicalScope(from, pkg)
-}
-
-// checkInLexicalScope performs safety checks that a renaming does not
-// change the lexical reference structure of the specified package.
-//
-// For objects in lexical scope, there are three kinds of conflicts:
-// same-, sub-, and super-block conflicts. We will illustrate all three
-// using this example:
-//
-// var x int
-// var z int
-//
-// func f(y int) {
-// print(x)
-// print(y)
-// }
-//
-// Renaming x to z encounters a SAME-BLOCK CONFLICT, because an object
-// with the new name already exists, defined in the same lexical block
-// as the old object.
-//
-// Renaming x to y encounters a SUB-BLOCK CONFLICT, because there exists
-// a reference to x from within (what would become) a hole in its scope.
-// The definition of y in an (inner) sub-block would cast a shadow in
-// the scope of the renamed variable.
-//
-// Renaming y to x encounters a SUPER-BLOCK CONFLICT. This is the
-// converse situation: there is an existing definition of the new name
-// (x) in an (enclosing) super-block, and the renaming would create a
-// hole in its scope, within which there exist references to it. The
-// new name casts a shadow in scope of the existing definition of x in
-// the super-block.
-//
-// Removing the old name (and all references to it) is always safe, and
-// requires no checks.
-//
-func (r *renamer) checkInLexicalScope(from types.Object, pkg Package) {
- b := from.Parent() // the block defining the 'from' object
- if b != nil {
- toBlock, to := b.LookupParent(r.to, from.Parent().End())
- if toBlock == b {
- // same-block conflict
- r.errorf(from.Pos(), "renaming this %s %q to %q",
- objectKind(from), from.Name(), r.to)
- r.errorf(to.Pos(), "\tconflicts with %s in same block",
- objectKind(to))
- return
- } else if toBlock != nil {
- // Check for super-block conflict.
- // The name r.to is defined in a superblock.
- // Is that name referenced from within this block?
- forEachLexicalRef(pkg, to, func(id *ast.Ident, block *types.Scope) bool {
- _, obj := block.LookupParent(from.Name(), id.Pos())
- if obj == from {
- // super-block conflict
- r.errorf(from.Pos(), "renaming this %s %q to %q",
- objectKind(from), from.Name(), r.to)
- r.errorf(id.Pos(), "\twould shadow this reference")
- r.errorf(to.Pos(), "\tto the %s declared here",
- objectKind(to))
- return false // stop
- }
- return true
- })
- }
- }
- // Check for sub-block conflict.
- // Is there an intervening definition of r.to between
- // the block defining 'from' and some reference to it?
- forEachLexicalRef(pkg, from, func(id *ast.Ident, block *types.Scope) bool {
- // Find the block that defines the found reference.
- // It may be an ancestor.
- fromBlock, _ := block.LookupParent(from.Name(), id.Pos())
- // See what r.to would resolve to in the same scope.
- toBlock, to := block.LookupParent(r.to, id.Pos())
- if to != nil {
- // sub-block conflict
- if deeper(toBlock, fromBlock) {
- r.errorf(from.Pos(), "renaming this %s %q to %q",
- objectKind(from), from.Name(), r.to)
- r.errorf(id.Pos(), "\twould cause this reference to become shadowed")
- r.errorf(to.Pos(), "\tby this intervening %s definition",
- objectKind(to))
- return false // stop
- }
- }
- return true
- })
-
- // Renaming a type that is used as an embedded field
- // requires renaming the field too. e.g.
- // type T int // if we rename this to U..
- // var s struct {T}
- // print(s.T) // ...this must change too
- if _, ok := from.(*types.TypeName); ok {
- for id, obj := range pkg.GetTypesInfo().Uses {
- if obj == from {
- if field := pkg.GetTypesInfo().Defs[id]; field != nil {
- r.check(field)
- }
- }
- }
- }
-}
-
-// deeper reports whether block x is lexically deeper than y.
-func deeper(x, y *types.Scope) bool {
- if x == y || x == nil {
- return false
- } else if y == nil {
- return true
- } else {
- return deeper(x.Parent(), y.Parent())
- }
-}
-
-// forEachLexicalRef calls fn(id, block) for each identifier id in package
-// pkg that is a reference to obj in lexical scope. block is the
-// lexical block enclosing the reference. If fn returns false the
-// iteration is terminated and findLexicalRefs returns false.
-func forEachLexicalRef(pkg Package, obj types.Object, fn func(id *ast.Ident, block *types.Scope) bool) bool {
- ok := true
- var stack []ast.Node
-
- var visit func(n ast.Node) bool
- visit = func(n ast.Node) bool {
- if n == nil {
- stack = stack[:len(stack)-1] // pop
- return false
- }
- if !ok {
- return false // bail out
- }
-
- stack = append(stack, n) // push
- switch n := n.(type) {
- case *ast.Ident:
- if pkg.GetTypesInfo().Uses[n] == obj {
- block := enclosingBlock(pkg.GetTypesInfo(), stack)
- if !fn(n, block) {
- ok = false
- }
- }
- return visit(nil) // pop stack
-
- case *ast.SelectorExpr:
- // don't visit n.Sel
- ast.Inspect(n.X, visit)
- return visit(nil) // pop stack, don't descend
-
- case *ast.CompositeLit:
- // Handle recursion ourselves for struct literals
- // so we don't visit field identifiers.
- tv, ok := pkg.GetTypesInfo().Types[n]
- if !ok {
- return visit(nil) // pop stack, don't descend
- }
- if _, ok := Deref(tv.Type).Underlying().(*types.Struct); ok {
- if n.Type != nil {
- ast.Inspect(n.Type, visit)
- }
- for _, elt := range n.Elts {
- if kv, ok := elt.(*ast.KeyValueExpr); ok {
- ast.Inspect(kv.Value, visit)
- } else {
- ast.Inspect(elt, visit)
- }
- }
- return visit(nil) // pop stack, don't descend
- }
- }
- return true
- }
-
- for _, f := range pkg.GetSyntax() {
- ast.Inspect(f, visit)
- if len(stack) != 0 {
- panic(stack)
- }
- if !ok {
- break
- }
- }
- return ok
-}
-
-// enclosingBlock returns the innermost block enclosing the specified
-// AST node, specified in the form of a path from the root of the file,
-// [file...n].
-func enclosingBlock(info *types.Info, stack []ast.Node) *types.Scope {
- for i := range stack {
- n := stack[len(stack)-1-i]
- // For some reason, go/types always associates a
- // function's scope with its FuncType.
- // TODO(adonovan): feature or a bug?
- switch f := n.(type) {
- case *ast.FuncDecl:
- n = f.Type
- case *ast.FuncLit:
- n = f.Type
- }
- if b := info.Scopes[n]; b != nil {
- return b
- }
- }
- panic("no Scope for *ast.File")
-}
-
-func (r *renamer) checkLabel(label *types.Label) {
- // Check there are no identical labels in the function's label block.
- // (Label blocks don't nest, so this is easy.)
- if prev := label.Parent().Lookup(r.to); prev != nil {
- r.errorf(label.Pos(), "renaming this label %q to %q", label.Name(), prev.Name())
- r.errorf(prev.Pos(), "\twould conflict with this one")
- }
-}
-
-// checkStructField checks that the field renaming will not cause
-// conflicts at its declaration, or ambiguity or changes to any selection.
-func (r *renamer) checkStructField(from *types.Var) {
- // Check that the struct declaration is free of field conflicts,
- // and field/method conflicts.
-
- // go/types offers no easy way to get from a field (or interface
- // method) to its declaring struct (or interface), so we must
- // ascend the AST.
- fromPkg, ok := r.packages[from.Pkg()]
- if !ok {
- return
- }
- pkg, path, _ := pathEnclosingInterval(r.fset, fromPkg, from.Pos(), from.Pos())
- if pkg == nil || path == nil {
- return
- }
- // path matches this pattern:
- // [Ident SelectorExpr? StarExpr? Field FieldList StructType ParenExpr* ... File]
-
- // Ascend to FieldList.
- var i int
- for {
- if _, ok := path[i].(*ast.FieldList); ok {
- break
- }
- i++
- }
- i++
- tStruct := path[i].(*ast.StructType)
- i++
- // Ascend past parens (unlikely).
- for {
- _, ok := path[i].(*ast.ParenExpr)
- if !ok {
- break
- }
- i++
- }
- if spec, ok := path[i].(*ast.TypeSpec); ok {
- // This struct is also a named type.
- // We must check for direct (non-promoted) field/field
- // and method/field conflicts.
- named := pkg.GetTypesInfo().Defs[spec.Name].Type()
- prev, indices, _ := types.LookupFieldOrMethod(named, true, pkg.GetTypes(), r.to)
- if len(indices) == 1 {
- r.errorf(from.Pos(), "renaming this field %q to %q",
- from.Name(), r.to)
- r.errorf(prev.Pos(), "\twould conflict with this %s",
- objectKind(prev))
- return // skip checkSelections to avoid redundant errors
- }
- } else {
- // This struct is not a named type.
- // We need only check for direct (non-promoted) field/field conflicts.
- T := pkg.GetTypesInfo().Types[tStruct].Type.Underlying().(*types.Struct)
- for i := 0; i < T.NumFields(); i++ {
- if prev := T.Field(i); prev.Name() == r.to {
- r.errorf(from.Pos(), "renaming this field %q to %q",
- from.Name(), r.to)
- r.errorf(prev.Pos(), "\twould conflict with this field")
- return // skip checkSelections to avoid redundant errors
- }
- }
- }
-
- // Renaming an anonymous field requires renaming the type too. e.g.
- // print(s.T) // if we rename T to U,
- // type T int // this and
- // var s struct {T} // this must change too.
- if from.Anonymous() {
- if named, ok := from.Type().(*types.Named); ok {
- r.check(named.Obj())
- } else if named, ok := Deref(from.Type()).(*types.Named); ok {
- r.check(named.Obj())
- }
- }
-
- // Check integrity of existing (field and method) selections.
- r.checkSelections(from)
-}
-
-// checkSelection checks that all uses and selections that resolve to
-// the specified object would continue to do so after the renaming.
-func (r *renamer) checkSelections(from types.Object) {
- for typ, pkg := range r.packages {
- if id := someUse(pkg.GetTypesInfo(), from); id != nil {
- if !r.checkExport(id, typ, from) {
- return
- }
- }
-
- for syntax, sel := range pkg.GetTypesInfo().Selections {
- // There may be extant selections of only the old
- // name or only the new name, so we must check both.
- // (If neither, the renaming is sound.)
- //
- // In both cases, we wish to compare the lengths
- // of the implicit field path (Selection.Index)
- // to see if the renaming would change it.
- //
- // If a selection that resolves to 'from', when renamed,
- // would yield a path of the same or shorter length,
- // this indicates ambiguity or a changed referent,
- // analogous to same- or sub-block lexical conflict.
- //
- // If a selection using the name 'to' would
- // yield a path of the same or shorter length,
- // this indicates ambiguity or shadowing,
- // analogous to same- or super-block lexical conflict.
-
- // TODO(adonovan): fix: derive from Types[syntax.X].Mode
- // TODO(adonovan): test with pointer, value, addressable value.
- isAddressable := true
-
- if sel.Obj() == from {
- if obj, indices, _ := types.LookupFieldOrMethod(sel.Recv(), isAddressable, from.Pkg(), r.to); obj != nil {
- // Renaming this existing selection of
- // 'from' may block access to an existing
- // type member named 'to'.
- delta := len(indices) - len(sel.Index())
- if delta > 0 {
- continue // no ambiguity
- }
- r.selectionConflict(from, delta, syntax, obj)
- return
- }
- } else if sel.Obj().Name() == r.to {
- if obj, indices, _ := types.LookupFieldOrMethod(sel.Recv(), isAddressable, from.Pkg(), from.Name()); obj == from {
- // Renaming 'from' may cause this existing
- // selection of the name 'to' to change
- // its meaning.
- delta := len(indices) - len(sel.Index())
- if delta > 0 {
- continue // no ambiguity
- }
- r.selectionConflict(from, -delta, syntax, sel.Obj())
- return
- }
- }
- }
- }
-}
-
-func (r *renamer) selectionConflict(from types.Object, delta int, syntax *ast.SelectorExpr, obj types.Object) {
- r.errorf(from.Pos(), "renaming this %s %q to %q",
- objectKind(from), from.Name(), r.to)
-
- switch {
- case delta < 0:
- // analogous to sub-block conflict
- r.errorf(syntax.Sel.Pos(),
- "\twould change the referent of this selection")
- r.errorf(obj.Pos(), "\tof this %s", objectKind(obj))
- case delta == 0:
- // analogous to same-block conflict
- r.errorf(syntax.Sel.Pos(),
- "\twould make this reference ambiguous")
- r.errorf(obj.Pos(), "\twith this %s", objectKind(obj))
- case delta > 0:
- // analogous to super-block conflict
- r.errorf(syntax.Sel.Pos(),
- "\twould shadow this selection")
- r.errorf(obj.Pos(), "\tof the %s declared here",
- objectKind(obj))
- }
-}
-
-// checkMethod performs safety checks for renaming a method.
-// There are three hazards:
-// - declaration conflicts
-// - selection ambiguity/changes
-// - entailed renamings of assignable concrete/interface types.
-// We reject renamings initiated at concrete methods if it would
-// change the assignability relation. For renamings of abstract
-// methods, we rename all methods transitively coupled to it via
-// assignability.
-func (r *renamer) checkMethod(from *types.Func) {
- // e.g. error.Error
- if from.Pkg() == nil {
- r.errorf(from.Pos(), "you cannot rename built-in method %s", from)
- return
- }
-
- // ASSIGNABILITY: We reject renamings of concrete methods that
- // would break a 'satisfy' constraint; but renamings of abstract
- // methods are allowed to proceed, and we rename affected
- // concrete and abstract methods as necessary. It is the
- // initial method that determines the policy.
-
- // Check for conflict at point of declaration.
- // Check to ensure preservation of assignability requirements.
- R := recv(from).Type()
- if IsInterface(R) {
- // Abstract method
-
- // declaration
- prev, _, _ := types.LookupFieldOrMethod(R, false, from.Pkg(), r.to)
- if prev != nil {
- r.errorf(from.Pos(), "renaming this interface method %q to %q",
- from.Name(), r.to)
- r.errorf(prev.Pos(), "\twould conflict with this method")
- return
- }
-
- // Check all interfaces that embed this one for
- // declaration conflicts too.
- for _, pkg := range r.packages {
- // Start with named interface types (better errors)
- for _, obj := range pkg.GetTypesInfo().Defs {
- if obj, ok := obj.(*types.TypeName); ok && IsInterface(obj.Type()) {
- f, _, _ := types.LookupFieldOrMethod(
- obj.Type(), false, from.Pkg(), from.Name())
- if f == nil {
- continue
- }
- t, _, _ := types.LookupFieldOrMethod(
- obj.Type(), false, from.Pkg(), r.to)
- if t == nil {
- continue
- }
- r.errorf(from.Pos(), "renaming this interface method %q to %q",
- from.Name(), r.to)
- r.errorf(t.Pos(), "\twould conflict with this method")
- r.errorf(obj.Pos(), "\tin named interface type %q", obj.Name())
- }
- }
-
- // Now look at all literal interface types (includes named ones again).
- for e, tv := range pkg.GetTypesInfo().Types {
- if e, ok := e.(*ast.InterfaceType); ok {
- _ = e
- _ = tv.Type.(*types.Interface)
- // TODO(adonovan): implement same check as above.
- }
- }
- }
-
- // assignability
- //
- // Find the set of concrete or abstract methods directly
- // coupled to abstract method 'from' by some
- // satisfy.Constraint, and rename them too.
- for key := range r.satisfy() {
- // key = (lhs, rhs) where lhs is always an interface.
-
- lsel := r.msets.MethodSet(key.LHS).Lookup(from.Pkg(), from.Name())
- if lsel == nil {
- continue
- }
- rmethods := r.msets.MethodSet(key.RHS)
- rsel := rmethods.Lookup(from.Pkg(), from.Name())
- if rsel == nil {
- continue
- }
-
- // If both sides have a method of this name,
- // and one of them is m, the other must be coupled.
- var coupled *types.Func
- switch from {
- case lsel.Obj():
- coupled = rsel.Obj().(*types.Func)
- case rsel.Obj():
- coupled = lsel.Obj().(*types.Func)
- default:
- continue
- }
-
- // We must treat concrete-to-interface
- // constraints like an implicit selection C.f of
- // each interface method I.f, and check that the
- // renaming leaves the selection unchanged and
- // unambiguous.
- //
- // Fun fact: the implicit selection of C.f
- // type I interface{f()}
- // type C struct{I}
- // func (C) g()
- // var _ I = C{} // here
- // yields abstract method I.f. This can make error
- // messages less than obvious.
- //
- if !IsInterface(key.RHS) {
- // The logic below was derived from checkSelections.
-
- rtosel := rmethods.Lookup(from.Pkg(), r.to)
- if rtosel != nil {
- rto := rtosel.Obj().(*types.Func)
- delta := len(rsel.Index()) - len(rtosel.Index())
- if delta < 0 {
- continue // no ambiguity
- }
-
- // TODO(adonovan): record the constraint's position.
- keyPos := token.NoPos
-
- r.errorf(from.Pos(), "renaming this method %q to %q",
- from.Name(), r.to)
- if delta == 0 {
- // analogous to same-block conflict
- r.errorf(keyPos, "\twould make the %s method of %s invoked via interface %s ambiguous",
- r.to, key.RHS, key.LHS)
- r.errorf(rto.Pos(), "\twith (%s).%s",
- recv(rto).Type(), r.to)
- } else {
- // analogous to super-block conflict
- r.errorf(keyPos, "\twould change the %s method of %s invoked via interface %s",
- r.to, key.RHS, key.LHS)
- r.errorf(coupled.Pos(), "\tfrom (%s).%s",
- recv(coupled).Type(), r.to)
- r.errorf(rto.Pos(), "\tto (%s).%s",
- recv(rto).Type(), r.to)
- }
- return // one error is enough
- }
- }
-
- if !r.changeMethods {
- // This should be unreachable.
- r.errorf(from.Pos(), "internal error: during renaming of abstract method %s", from)
- r.errorf(coupled.Pos(), "\tchangedMethods=false, coupled method=%s", coupled)
- r.errorf(from.Pos(), "\tPlease file a bug report")
- return
- }
-
- // Rename the coupled method to preserve assignability.
- r.check(coupled)
- }
- } else {
- // Concrete method
-
- // declaration
- prev, indices, _ := types.LookupFieldOrMethod(R, true, from.Pkg(), r.to)
- if prev != nil && len(indices) == 1 {
- r.errorf(from.Pos(), "renaming this method %q to %q",
- from.Name(), r.to)
- r.errorf(prev.Pos(), "\twould conflict with this %s",
- objectKind(prev))
- return
- }
-
- // assignability
- //
- // Find the set of abstract methods coupled to concrete
- // method 'from' by some satisfy.Constraint, and rename
- // them too.
- //
- // Coupling may be indirect, e.g. I.f <-> C.f via type D.
- //
- // type I interface {f()}
- // type C int
- // type (C) f()
- // type D struct{C}
- // var _ I = D{}
- //
- for key := range r.satisfy() {
- // key = (lhs, rhs) where lhs is always an interface.
- if IsInterface(key.RHS) {
- continue
- }
- rsel := r.msets.MethodSet(key.RHS).Lookup(from.Pkg(), from.Name())
- if rsel == nil || rsel.Obj() != from {
- continue // rhs does not have the method
- }
- lsel := r.msets.MethodSet(key.LHS).Lookup(from.Pkg(), from.Name())
- if lsel == nil {
- continue
- }
- imeth := lsel.Obj().(*types.Func)
-
- // imeth is the abstract method (e.g. I.f)
- // and key.RHS is the concrete coupling type (e.g. D).
- if !r.changeMethods {
- r.errorf(from.Pos(), "renaming this method %q to %q",
- from.Name(), r.to)
- var pos token.Pos
- var iface string
-
- I := recv(imeth).Type()
- if named, ok := I.(*types.Named); ok {
- pos = named.Obj().Pos()
- iface = "interface " + named.Obj().Name()
- } else {
- pos = from.Pos()
- iface = I.String()
- }
- r.errorf(pos, "\twould make %s no longer assignable to %s",
- key.RHS, iface)
- r.errorf(imeth.Pos(), "\t(rename %s.%s if you intend to change both types)",
- I, from.Name())
- return // one error is enough
- }
-
- // Rename the coupled interface method to preserve assignability.
- r.check(imeth)
- }
- }
-
- // Check integrity of existing (field and method) selections.
- // We skip this if there were errors above, to avoid redundant errors.
- r.checkSelections(from)
-}
-
-func (r *renamer) checkExport(id *ast.Ident, pkg *types.Package, from types.Object) bool {
- // Reject cross-package references if r.to is unexported.
- // (Such references may be qualified identifiers or field/method
- // selections.)
- if !ast.IsExported(r.to) && pkg != from.Pkg() {
- r.errorf(from.Pos(),
- "renaming %q to %q would make it unexported",
- from.Name(), r.to)
- r.errorf(id.Pos(), "\tbreaking references from packages such as %q",
- pkg.Path())
- return false
- }
- return true
-}
-
-// satisfy returns the set of interface satisfaction constraints.
-func (r *renamer) satisfy() map[satisfy.Constraint]bool {
- if r.satisfyConstraints == nil {
- // Compute on demand: it's expensive.
- var f satisfy.Finder
- for _, pkg := range r.packages {
- // From satisfy.Finder documentation:
- //
- // The package must be free of type errors, and
- // info.{Defs,Uses,Selections,Types} must have been populated by the
- // type-checker.
- //
- // Only proceed if all packages have no errors.
- if pkg.HasListOrParseErrors() || pkg.HasTypeErrors() {
- r.errorf(token.NoPos, // we don't have a position for this error.
- "renaming %q to %q not possible because %q has errors",
- r.from, r.to, pkg.PkgPath())
- return nil
- }
- f.Find(pkg.GetTypesInfo(), pkg.GetSyntax())
- }
- r.satisfyConstraints = f.Result
- }
- return r.satisfyConstraints
-}
-
-// -- helpers ----------------------------------------------------------
-
-// recv returns the method's receiver.
-func recv(meth *types.Func) *types.Var {
- return meth.Type().(*types.Signature).Recv()
-}
-
-// someUse returns an arbitrary use of obj within info.
-func someUse(info *types.Info, obj types.Object) *ast.Ident {
- for id, o := range info.Uses {
- if o == obj {
- return id
- }
- }
- return nil
-}
-
-// pathEnclosingInterval returns the Package and ast.Node that
-// contain source interval [start, end), and all the node's ancestors
-// up to the AST root. It searches all ast.Files of all packages.
-// exact is defined as for astutil.PathEnclosingInterval.
-//
-// The zero value is returned if not found.
-//
-func pathEnclosingInterval(fset *token.FileSet, pkg Package, start, end token.Pos) (resPkg Package, path []ast.Node, exact bool) {
- pkgs := []Package{pkg}
- for _, f := range pkg.GetSyntax() {
- for _, imp := range f.Imports {
- if imp == nil {
- continue
- }
- importPath, err := strconv.Unquote(imp.Path.Value)
- if err != nil {
- continue
- }
- importPkg, err := pkg.GetImport(importPath)
- if err != nil {
- return nil, nil, false
- }
- pkgs = append(pkgs, importPkg)
- }
- }
- for _, p := range pkgs {
- for _, f := range p.GetSyntax() {
- if f.Pos() == token.NoPos {
- // This can happen if the parser saw
- // too many errors and bailed out.
- // (Use parser.AllErrors to prevent that.)
- continue
- }
- if !tokenFileContainsPos(fset.File(f.Pos()), start) {
- continue
- }
- if path, exact := astutil.PathEnclosingInterval(f, start, end); path != nil {
- return pkg, path, exact
- }
- }
- }
- return nil, nil, false
-}
-
-// TODO(adonovan): make this a method: func (*token.File) Contains(token.Pos)
-func tokenFileContainsPos(f *token.File, pos token.Pos) bool {
- p := int(pos)
- base := f.Base()
- return base <= p && p < base+f.Size()
-}
-
-func objectKind(obj types.Object) string {
- if obj == nil {
- return "nil object"
- }
- switch obj := obj.(type) {
- case *types.PkgName:
- return "imported package name"
- case *types.TypeName:
- return "type"
- case *types.Var:
- if obj.IsField() {
- return "field"
- }
- case *types.Func:
- if obj.Type().(*types.Signature).Recv() != nil {
- return "method"
- }
- }
- // label, func, var, const
- return strings.ToLower(strings.TrimPrefix(reflect.TypeOf(obj).String(), "*types."))
-}
-
-// NB: for renamings, blank is not considered valid.
-func isValidIdentifier(id string) bool {
- if id == "" || id == "_" {
- return false
- }
- for i, r := range id {
- if !isLetter(r) && (i == 0 || !isDigit(r)) {
- return false
- }
- }
- return token.Lookup(id) == token.IDENT
-}
-
-// isLocal reports whether obj is local to some function.
-// Precondition: not a struct field or interface method.
-func isLocal(obj types.Object) bool {
- // [... 5=stmt 4=func 3=file 2=pkg 1=universe]
- var depth int
- for scope := obj.Parent(); scope != nil; scope = scope.Parent() {
- depth++
- }
- return depth >= 4
-}
-
-func isPackageLevel(obj types.Object) bool {
- if obj == nil {
- return false
- }
- return obj.Pkg().Scope().Lookup(obj.Name()) == obj
-}
-
-// -- Plundered from go/scanner: ---------------------------------------
-
-func isLetter(ch rune) bool {
- return 'a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z' || ch == '_' || ch >= 0x80 && unicode.IsLetter(ch)
-}
-
-func isDigit(ch rune) bool {
- return '0' <= ch && ch <= '9' || ch >= 0x80 && unicode.IsDigit(ch)
-}
diff --git a/internal/lsp/source/signature_help.go b/internal/lsp/source/signature_help.go
deleted file mode 100644
index e7ed9cc8b..000000000
--- a/internal/lsp/source/signature_help.go
+++ /dev/null
@@ -1,181 +0,0 @@
-// Copyright 2018 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package source
-
-import (
- "context"
- "go/ast"
- "go/token"
- "go/types"
-
- "golang.org/x/tools/go/ast/astutil"
- "golang.org/x/tools/internal/event"
- "golang.org/x/tools/internal/lsp/protocol"
- errors "golang.org/x/xerrors"
-)
-
-func SignatureHelp(ctx context.Context, snapshot Snapshot, fh FileHandle, pos protocol.Position) (*protocol.SignatureInformation, int, error) {
- ctx, done := event.Start(ctx, "source.SignatureHelp")
- defer done()
-
- pkg, pgf, err := GetParsedFile(ctx, snapshot, fh, NarrowestPackage)
- if err != nil {
- return nil, 0, errors.Errorf("getting file for SignatureHelp: %w", err)
- }
- spn, err := pgf.Mapper.PointSpan(pos)
- if err != nil {
- return nil, 0, err
- }
- rng, err := spn.Range(pgf.Mapper.Converter)
- if err != nil {
- return nil, 0, err
- }
- // Find a call expression surrounding the query position.
- var callExpr *ast.CallExpr
- path, _ := astutil.PathEnclosingInterval(pgf.File, rng.Start, rng.Start)
- if path == nil {
- return nil, 0, errors.Errorf("cannot find node enclosing position")
- }
-FindCall:
- for _, node := range path {
- switch node := node.(type) {
- case *ast.CallExpr:
- if rng.Start >= node.Lparen && rng.Start <= node.Rparen {
- callExpr = node
- break FindCall
- }
- case *ast.FuncLit, *ast.FuncType:
- // The user is within an anonymous function,
- // which may be the parameter to the *ast.CallExpr.
- // Don't show signature help in this case.
- return nil, 0, errors.Errorf("no signature help within a function declaration")
- case *ast.BasicLit:
- if node.Kind == token.STRING {
- return nil, 0, errors.Errorf("no signature help within a string literal")
- }
- }
-
- }
- if callExpr == nil || callExpr.Fun == nil {
- return nil, 0, errors.Errorf("cannot find an enclosing function")
- }
-
- qf := Qualifier(pgf.File, pkg.GetTypes(), pkg.GetTypesInfo())
-
- // Get the object representing the function, if available.
- // There is no object in certain cases such as calling a function returned by
- // a function (e.g. "foo()()").
- var obj types.Object
- switch t := callExpr.Fun.(type) {
- case *ast.Ident:
- obj = pkg.GetTypesInfo().ObjectOf(t)
- case *ast.SelectorExpr:
- obj = pkg.GetTypesInfo().ObjectOf(t.Sel)
- }
-
- // Handle builtin functions separately.
- if obj, ok := obj.(*types.Builtin); ok {
- return builtinSignature(ctx, snapshot, callExpr, obj.Name(), rng.Start)
- }
-
- // Get the type information for the function being called.
- sigType := pkg.GetTypesInfo().TypeOf(callExpr.Fun)
- if sigType == nil {
- return nil, 0, errors.Errorf("cannot get type for Fun %[1]T (%[1]v)", callExpr.Fun)
- }
-
- sig, _ := sigType.Underlying().(*types.Signature)
- if sig == nil {
- return nil, 0, errors.Errorf("cannot find signature for Fun %[1]T (%[1]v)", callExpr.Fun)
- }
-
- activeParam := activeParameter(callExpr, sig.Params().Len(), sig.Variadic(), rng.Start)
-
- var (
- name string
- comment *ast.CommentGroup
- )
- if obj != nil {
- declPkg, err := FindPackageFromPos(ctx, snapshot, obj.Pos())
- if err != nil {
- return nil, 0, err
- }
- node, err := snapshot.PosToDecl(ctx, declPkg, obj.Pos())
- if err != nil {
- return nil, 0, err
- }
- rng, err := objToMappedRange(snapshot, pkg, obj)
- if err != nil {
- return nil, 0, err
- }
- decl := Declaration{
- obj: obj,
- node: node,
- }
- decl.MappedRange = append(decl.MappedRange, rng)
- d, err := FindHoverContext(ctx, snapshot, pkg, decl.obj, decl.node, nil)
- if err != nil {
- return nil, 0, err
- }
- name = obj.Name()
- comment = d.Comment
- } else {
- name = "func"
- }
- s := NewSignature(ctx, snapshot, pkg, sig, comment, qf)
- paramInfo := make([]protocol.ParameterInformation, 0, len(s.params))
- for _, p := range s.params {
- paramInfo = append(paramInfo, protocol.ParameterInformation{Label: p})
- }
- return &protocol.SignatureInformation{
- Label: name + s.Format(),
- Documentation: s.doc,
- Parameters: paramInfo,
- }, activeParam, nil
-}
-
-func builtinSignature(ctx context.Context, snapshot Snapshot, callExpr *ast.CallExpr, name string, pos token.Pos) (*protocol.SignatureInformation, int, error) {
- sig, err := NewBuiltinSignature(ctx, snapshot, name)
- if err != nil {
- return nil, 0, err
- }
- paramInfo := make([]protocol.ParameterInformation, 0, len(sig.params))
- for _, p := range sig.params {
- paramInfo = append(paramInfo, protocol.ParameterInformation{Label: p})
- }
- activeParam := activeParameter(callExpr, len(sig.params), sig.variadic, pos)
- return &protocol.SignatureInformation{
- Label: sig.name + sig.Format(),
- Documentation: sig.doc,
- Parameters: paramInfo,
- }, activeParam, nil
-
-}
-
-func activeParameter(callExpr *ast.CallExpr, numParams int, variadic bool, pos token.Pos) (activeParam int) {
- if len(callExpr.Args) == 0 {
- return 0
- }
- // First, check if the position is even in the range of the arguments.
- start, end := callExpr.Lparen, callExpr.Rparen
- if !(start <= pos && pos <= end) {
- return 0
- }
- for _, expr := range callExpr.Args {
- if start == token.NoPos {
- start = expr.Pos()
- }
- end = expr.End()
- if start <= pos && pos <= end {
- break
- }
- // Don't advance the active parameter for the last parameter of a variadic function.
- if !variadic || activeParam < numParams-1 {
- activeParam++
- }
- start = expr.Pos() + 1 // to account for commas
- }
- return activeParam
-}
diff --git a/internal/lsp/source/source_test.go b/internal/lsp/source/source_test.go
deleted file mode 100644
index dc5fe53b5..000000000
--- a/internal/lsp/source/source_test.go
+++ /dev/null
@@ -1,984 +0,0 @@
-// Copyright 2018 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package source_test
-
-import (
- "context"
- "fmt"
- "os"
- "os/exec"
- "path/filepath"
- "sort"
- "strings"
- "testing"
-
- "golang.org/x/tools/internal/lsp/cache"
- "golang.org/x/tools/internal/lsp/diff"
- "golang.org/x/tools/internal/lsp/diff/myers"
- "golang.org/x/tools/internal/lsp/fuzzy"
- "golang.org/x/tools/internal/lsp/protocol"
- "golang.org/x/tools/internal/lsp/source"
- "golang.org/x/tools/internal/lsp/source/completion"
- "golang.org/x/tools/internal/lsp/tests"
- "golang.org/x/tools/internal/span"
- "golang.org/x/tools/internal/testenv"
- errors "golang.org/x/xerrors"
-)
-
-func TestMain(m *testing.M) {
- testenv.ExitIfSmallMachine()
- os.Exit(m.Run())
-}
-
-func TestSource(t *testing.T) {
- tests.RunTests(t, "../testdata", true, testSource)
-}
-
-type runner struct {
- snapshot source.Snapshot
- view source.View
- data *tests.Data
- ctx context.Context
- normalizers []tests.Normalizer
-}
-
-func testSource(t *testing.T, datum *tests.Data) {
- ctx := tests.Context(t)
-
- cache := cache.New(nil)
- session := cache.NewSession(ctx)
- options := source.DefaultOptions().Clone()
- tests.DefaultOptions(options)
- options.SetEnvSlice(datum.Config.Env)
- view, _, release, err := session.NewView(ctx, "source_test", span.URIFromPath(datum.Config.Dir), options)
- release()
- if err != nil {
- t.Fatal(err)
- }
- defer view.Shutdown(ctx)
-
- // Enable type error analyses for tests.
- // TODO(golang/go#38212): Delete this once they are enabled by default.
- tests.EnableAllAnalyzers(view, options)
- view.SetOptions(ctx, options)
-
- var modifications []source.FileModification
- for filename, content := range datum.Config.Overlay {
- if filepath.Ext(filename) != ".go" {
- continue
- }
- modifications = append(modifications, source.FileModification{
- URI: span.URIFromPath(filename),
- Action: source.Open,
- Version: -1,
- Text: content,
- LanguageID: "go",
- })
- }
- if err := session.ModifyFiles(ctx, modifications); err != nil {
- t.Fatal(err)
- }
- snapshot, release := view.Snapshot(ctx)
- defer release()
- r := &runner{
- view: view,
- snapshot: snapshot,
- data: datum,
- ctx: ctx,
- normalizers: tests.CollectNormalizers(datum.Exported),
- }
- tests.Run(t, r, datum)
-}
-
-func (r *runner) CallHierarchy(t *testing.T, spn span.Span, expectedCalls *tests.CallHierarchyResult) {
- mapper, err := r.data.Mapper(spn.URI())
- if err != nil {
- t.Fatal(err)
- }
- loc, err := mapper.Location(spn)
- if err != nil {
- t.Fatalf("failed for %v: %v", spn, err)
- }
- fh, err := r.snapshot.GetFile(r.ctx, spn.URI())
- if err != nil {
- t.Fatal(err)
- }
-
- items, err := source.PrepareCallHierarchy(r.ctx, r.snapshot, fh, loc.Range.Start)
- if err != nil {
- t.Fatal(err)
- }
- if len(items) == 0 {
- t.Fatalf("expected call hierarchy item to be returned for identifier at %v\n", loc.Range)
- }
-
- callLocation := protocol.Location{
- URI: items[0].URI,
- Range: items[0].Range,
- }
- if callLocation != loc {
- t.Fatalf("expected source.PrepareCallHierarchy to return identifier at %v but got %v\n", loc, callLocation)
- }
-
- incomingCalls, err := source.IncomingCalls(r.ctx, r.snapshot, fh, loc.Range.Start)
- if err != nil {
- t.Error(err)
- }
- var incomingCallItems []protocol.CallHierarchyItem
- for _, item := range incomingCalls {
- incomingCallItems = append(incomingCallItems, item.From)
- }
- msg := tests.DiffCallHierarchyItems(incomingCallItems, expectedCalls.IncomingCalls)
- if msg != "" {
- t.Error(fmt.Sprintf("incoming calls differ: %s", msg))
- }
-
- outgoingCalls, err := source.OutgoingCalls(r.ctx, r.snapshot, fh, loc.Range.Start)
- if err != nil {
- t.Error(err)
- }
- var outgoingCallItems []protocol.CallHierarchyItem
- for _, item := range outgoingCalls {
- outgoingCallItems = append(outgoingCallItems, item.To)
- }
- msg = tests.DiffCallHierarchyItems(outgoingCallItems, expectedCalls.OutgoingCalls)
- if msg != "" {
- t.Error(fmt.Sprintf("outgoing calls differ: %s", msg))
- }
-}
-
-func (r *runner) Diagnostics(t *testing.T, uri span.URI, want []*source.Diagnostic) {
- fileID, got, err := source.FileDiagnostics(r.ctx, r.snapshot, uri)
- if err != nil {
- t.Fatal(err)
- }
- // A special case to test that there are no diagnostics for a file.
- if len(want) == 1 && want[0].Source == "no_diagnostics" {
- if len(got) != 0 {
- t.Errorf("expected no diagnostics for %s, got %v", uri, got)
- }
- return
- }
- if diff := tests.DiffDiagnostics(fileID.URI, want, got); diff != "" {
- t.Error(diff)
- }
-}
-
-func (r *runner) Completion(t *testing.T, src span.Span, test tests.Completion, items tests.CompletionItems) {
- var want []protocol.CompletionItem
- for _, pos := range test.CompletionItems {
- want = append(want, tests.ToProtocolCompletionItem(*items[pos]))
- }
- _, got := r.callCompletion(t, src, func(opts *source.Options) {
- opts.Matcher = source.CaseInsensitive
- opts.DeepCompletion = false
- opts.CompleteUnimported = false
- opts.InsertTextFormat = protocol.SnippetTextFormat
- opts.LiteralCompletions = strings.Contains(string(src.URI()), "literal")
- opts.ExperimentalPostfixCompletions = strings.Contains(string(src.URI()), "postfix")
- })
- got = tests.FilterBuiltins(src, got)
- if diff := tests.DiffCompletionItems(want, got); diff != "" {
- t.Errorf("%s: %s", src, diff)
- }
-}
-
-func (r *runner) CompletionSnippet(t *testing.T, src span.Span, expected tests.CompletionSnippet, placeholders bool, items tests.CompletionItems) {
- _, list := r.callCompletion(t, src, func(opts *source.Options) {
- opts.UsePlaceholders = placeholders
- opts.DeepCompletion = true
- opts.CompleteUnimported = false
- })
- got := tests.FindItem(list, *items[expected.CompletionItem])
- want := expected.PlainSnippet
- if placeholders {
- want = expected.PlaceholderSnippet
- }
- if diff := tests.DiffSnippets(want, got); diff != "" {
- t.Errorf("%s: %s", src, diff)
- }
-}
-
-func (r *runner) UnimportedCompletion(t *testing.T, src span.Span, test tests.Completion, items tests.CompletionItems) {
- var want []protocol.CompletionItem
- for _, pos := range test.CompletionItems {
- want = append(want, tests.ToProtocolCompletionItem(*items[pos]))
- }
- _, got := r.callCompletion(t, src, func(opts *source.Options) {})
- got = tests.FilterBuiltins(src, got)
- if diff := tests.CheckCompletionOrder(want, got, false); diff != "" {
- t.Errorf("%s: %s", src, diff)
- }
-}
-
-func (r *runner) DeepCompletion(t *testing.T, src span.Span, test tests.Completion, items tests.CompletionItems) {
- var want []protocol.CompletionItem
- for _, pos := range test.CompletionItems {
- want = append(want, tests.ToProtocolCompletionItem(*items[pos]))
- }
- prefix, list := r.callCompletion(t, src, func(opts *source.Options) {
- opts.DeepCompletion = true
- opts.Matcher = source.CaseInsensitive
- opts.CompleteUnimported = false
- })
- list = tests.FilterBuiltins(src, list)
- fuzzyMatcher := fuzzy.NewMatcher(prefix)
- var got []protocol.CompletionItem
- for _, item := range list {
- if fuzzyMatcher.Score(item.Label) <= 0 {
- continue
- }
- got = append(got, item)
- }
- if msg := tests.DiffCompletionItems(want, got); msg != "" {
- t.Errorf("%s: %s", src, msg)
- }
-}
-
-func (r *runner) FuzzyCompletion(t *testing.T, src span.Span, test tests.Completion, items tests.CompletionItems) {
- var want []protocol.CompletionItem
- for _, pos := range test.CompletionItems {
- want = append(want, tests.ToProtocolCompletionItem(*items[pos]))
- }
- _, got := r.callCompletion(t, src, func(opts *source.Options) {
- opts.DeepCompletion = true
- opts.Matcher = source.Fuzzy
- opts.CompleteUnimported = false
- })
- got = tests.FilterBuiltins(src, got)
- if msg := tests.DiffCompletionItems(want, got); msg != "" {
- t.Errorf("%s: %s", src, msg)
- }
-}
-
-func (r *runner) CaseSensitiveCompletion(t *testing.T, src span.Span, test tests.Completion, items tests.CompletionItems) {
- var want []protocol.CompletionItem
- for _, pos := range test.CompletionItems {
- want = append(want, tests.ToProtocolCompletionItem(*items[pos]))
- }
- _, list := r.callCompletion(t, src, func(opts *source.Options) {
- opts.Matcher = source.CaseSensitive
- opts.CompleteUnimported = false
- })
- list = tests.FilterBuiltins(src, list)
- if diff := tests.DiffCompletionItems(want, list); diff != "" {
- t.Errorf("%s: %s", src, diff)
- }
-}
-
-func (r *runner) RankCompletion(t *testing.T, src span.Span, test tests.Completion, items tests.CompletionItems) {
- var want []protocol.CompletionItem
- for _, pos := range test.CompletionItems {
- want = append(want, tests.ToProtocolCompletionItem(*items[pos]))
- }
- _, got := r.callCompletion(t, src, func(opts *source.Options) {
- opts.DeepCompletion = true
- opts.Matcher = source.Fuzzy
- opts.ExperimentalPostfixCompletions = true
- })
- if msg := tests.CheckCompletionOrder(want, got, true); msg != "" {
- t.Errorf("%s: %s", src, msg)
- }
-}
-
-func (r *runner) callCompletion(t *testing.T, src span.Span, options func(*source.Options)) (string, []protocol.CompletionItem) {
- fh, err := r.snapshot.GetFile(r.ctx, src.URI())
- if err != nil {
- t.Fatal(err)
- }
- original := r.view.Options()
- modified := original.Clone()
- options(modified)
- newView, err := r.view.SetOptions(r.ctx, modified)
- if newView != r.view {
- t.Fatalf("options change unexpectedly created new view")
- }
- if err != nil {
- t.Fatal(err)
- }
- defer r.view.SetOptions(r.ctx, original)
-
- list, surrounding, err := completion.Completion(r.ctx, r.snapshot, fh, protocol.Position{
- Line: uint32(src.Start().Line() - 1),
- Character: uint32(src.Start().Column() - 1),
- }, protocol.CompletionContext{})
- if err != nil && !errors.As(err, &completion.ErrIsDefinition{}) {
- t.Fatalf("failed for %v: %v", src, err)
- }
- var prefix string
- if surrounding != nil {
- prefix = strings.ToLower(surrounding.Prefix())
- }
-
- var numDeepCompletionsSeen int
- var items []completion.CompletionItem
- // Apply deep completion filtering.
- for _, item := range list {
- if item.Depth > 0 {
- if !modified.DeepCompletion {
- continue
- }
- if numDeepCompletionsSeen >= completion.MaxDeepCompletions {
- continue
- }
- numDeepCompletionsSeen++
- }
- items = append(items, item)
- }
- return prefix, tests.ToProtocolCompletionItems(items)
-}
-
-func (r *runner) FoldingRanges(t *testing.T, spn span.Span) {
- uri := spn.URI()
-
- fh, err := r.snapshot.GetFile(r.ctx, spn.URI())
- if err != nil {
- t.Fatal(err)
- }
- data, err := fh.Read()
- if err != nil {
- t.Error(err)
- return
- }
-
- // Test all folding ranges.
- ranges, err := source.FoldingRange(r.ctx, r.snapshot, fh, false)
- if err != nil {
- t.Error(err)
- return
- }
- r.foldingRanges(t, "foldingRange", uri, string(data), ranges)
-
- // Test folding ranges with lineFoldingOnly
- ranges, err = source.FoldingRange(r.ctx, r.snapshot, fh, true)
- if err != nil {
- t.Error(err)
- return
- }
- r.foldingRanges(t, "foldingRange-lineFolding", uri, string(data), ranges)
-}
-
-func (r *runner) foldingRanges(t *testing.T, prefix string, uri span.URI, data string, ranges []*source.FoldingRangeInfo) {
- t.Helper()
- // Fold all ranges.
- nonOverlapping := nonOverlappingRanges(t, ranges)
- for i, rngs := range nonOverlapping {
- got, err := foldRanges(string(data), rngs)
- if err != nil {
- t.Error(err)
- continue
- }
- tag := fmt.Sprintf("%s-%d", prefix, i)
- want := string(r.data.Golden(tag, uri.Filename(), func() ([]byte, error) {
- return []byte(got), nil
- }))
-
- if diff := tests.Diff(t, want, got); diff != "" {
- t.Errorf("%s: foldingRanges failed for %s, diff:\n%v", tag, uri.Filename(), diff)
- }
- }
-
- // Filter by kind.
- kinds := []protocol.FoldingRangeKind{protocol.Imports, protocol.Comment}
- for _, kind := range kinds {
- var kindOnly []*source.FoldingRangeInfo
- for _, fRng := range ranges {
- if fRng.Kind == kind {
- kindOnly = append(kindOnly, fRng)
- }
- }
-
- nonOverlapping := nonOverlappingRanges(t, kindOnly)
- for i, rngs := range nonOverlapping {
- got, err := foldRanges(string(data), rngs)
- if err != nil {
- t.Error(err)
- continue
- }
- tag := fmt.Sprintf("%s-%s-%d", prefix, kind, i)
- want := string(r.data.Golden(tag, uri.Filename(), func() ([]byte, error) {
- return []byte(got), nil
- }))
-
- if diff := tests.Diff(t, want, got); diff != "" {
- t.Errorf("%s: failed for %s, diff:\n%v", tag, uri.Filename(), diff)
- }
- }
-
- }
-}
-
-func nonOverlappingRanges(t *testing.T, ranges []*source.FoldingRangeInfo) (res [][]*source.FoldingRangeInfo) {
- for _, fRng := range ranges {
- setNum := len(res)
- for i := 0; i < len(res); i++ {
- canInsert := true
- for _, rng := range res[i] {
- if conflict(t, rng, fRng) {
- canInsert = false
- break
- }
- }
- if canInsert {
- setNum = i
- break
- }
- }
- if setNum == len(res) {
- res = append(res, []*source.FoldingRangeInfo{})
- }
- res[setNum] = append(res[setNum], fRng)
- }
- return res
-}
-
-func conflict(t *testing.T, a, b *source.FoldingRangeInfo) bool {
- arng, err := a.Range()
- if err != nil {
- t.Fatal(err)
- }
- brng, err := b.Range()
- if err != nil {
- t.Fatal(err)
- }
- // a start position is <= b start positions
- return protocol.ComparePosition(arng.Start, brng.Start) <= 0 && protocol.ComparePosition(arng.End, brng.Start) > 0
-}
-
-func foldRanges(contents string, ranges []*source.FoldingRangeInfo) (string, error) {
- foldedText := "<>"
- res := contents
- // Apply the folds from the end of the file forward
- // to preserve the offsets.
- for i := len(ranges) - 1; i >= 0; i-- {
- fRange := ranges[i]
- spn, err := fRange.Span()
- if err != nil {
- return "", err
- }
- start := spn.Start().Offset()
- end := spn.End().Offset()
-
- tmp := res[0:start] + foldedText
- res = tmp + res[end:]
- }
- return res, nil
-}
-
-func (r *runner) Format(t *testing.T, spn span.Span) {
- gofmted := string(r.data.Golden("gofmt", spn.URI().Filename(), func() ([]byte, error) {
- cmd := exec.Command("gofmt", spn.URI().Filename())
- out, _ := cmd.Output() // ignore error, sometimes we have intentionally ungofmt-able files
- return out, nil
- }))
- fh, err := r.snapshot.GetFile(r.ctx, spn.URI())
- if err != nil {
- t.Fatal(err)
- }
- edits, err := source.Format(r.ctx, r.snapshot, fh)
- if err != nil {
- if gofmted != "" {
- t.Error(err)
- }
- return
- }
- data, err := fh.Read()
- if err != nil {
- t.Fatal(err)
- }
- m, err := r.data.Mapper(spn.URI())
- if err != nil {
- t.Fatal(err)
- }
- diffEdits, err := source.FromProtocolEdits(m, edits)
- if err != nil {
- t.Error(err)
- }
- got := diff.ApplyEdits(string(data), diffEdits)
- if gofmted != got {
- t.Errorf("format failed for %s, expected:\n%v\ngot:\n%v", spn.URI().Filename(), gofmted, got)
- }
-}
-
-func (r *runner) SemanticTokens(t *testing.T, spn span.Span) {
- t.Skip("nothing to test in source")
-}
-
-func (r *runner) Import(t *testing.T, spn span.Span) {
- fh, err := r.snapshot.GetFile(r.ctx, spn.URI())
- if err != nil {
- t.Fatal(err)
- }
- edits, _, err := source.AllImportsFixes(r.ctx, r.snapshot, fh)
- if err != nil {
- t.Error(err)
- }
- data, err := fh.Read()
- if err != nil {
- t.Fatal(err)
- }
- m, err := r.data.Mapper(fh.URI())
- if err != nil {
- t.Fatal(err)
- }
- diffEdits, err := source.FromProtocolEdits(m, edits)
- if err != nil {
- t.Error(err)
- }
- got := diff.ApplyEdits(string(data), diffEdits)
- want := string(r.data.Golden("goimports", spn.URI().Filename(), func() ([]byte, error) {
- return []byte(got), nil
- }))
- if want != got {
- d, err := myers.ComputeEdits(spn.URI(), want, got)
- if err != nil {
- t.Fatal(err)
- }
- t.Errorf("import failed for %s: %s", spn.URI().Filename(), diff.ToUnified("want", "got", want, d))
- }
-}
-
-func (r *runner) Definition(t *testing.T, spn span.Span, d tests.Definition) {
- _, srcRng, err := spanToRange(r.data, d.Src)
- if err != nil {
- t.Fatal(err)
- }
- fh, err := r.snapshot.GetFile(r.ctx, spn.URI())
- if err != nil {
- t.Fatal(err)
- }
- ident, err := source.Identifier(r.ctx, r.snapshot, fh, srcRng.Start)
- if err != nil {
- t.Fatalf("failed for %v: %v", d.Src, err)
- }
- h, err := source.HoverIdentifier(r.ctx, ident)
- if err != nil {
- t.Fatalf("failed for %v: %v", d.Src, err)
- }
- hover, err := source.FormatHover(h, r.view.Options())
- if err != nil {
- t.Fatal(err)
- }
- rng, err := ident.Declaration.MappedRange[0].Range()
- if err != nil {
- t.Fatal(err)
- }
- if d.IsType {
- rng, err = ident.Type.Range()
- if err != nil {
- t.Fatal(err)
- }
- hover = ""
- }
- didSomething := false
- if hover != "" {
- didSomething = true
- tag := fmt.Sprintf("%s-hoverdef", d.Name)
- expectHover := string(r.data.Golden(tag, d.Src.URI().Filename(), func() ([]byte, error) {
- return []byte(hover), nil
- }))
- hover = tests.StripSubscripts(hover)
- expectHover = tests.StripSubscripts(expectHover)
- if hover != expectHover {
- t.Errorf("hoverdef for %s failed:\n%s", d.Src, tests.Diff(t, expectHover, hover))
- }
- }
- if !d.OnlyHover {
- didSomething = true
- if _, defRng, err := spanToRange(r.data, d.Def); err != nil {
- t.Fatal(err)
- } else if rng != defRng {
- t.Errorf("for %v got %v want %v", d.Src, rng, defRng)
- }
- }
- if !didSomething {
- t.Errorf("no tests ran for %s", d.Src.URI())
- }
-}
-
-func (r *runner) Implementation(t *testing.T, spn span.Span, impls []span.Span) {
- sm, err := r.data.Mapper(spn.URI())
- if err != nil {
- t.Fatal(err)
- }
- loc, err := sm.Location(spn)
- if err != nil {
- t.Fatalf("failed for %v: %v", spn, err)
- }
- fh, err := r.snapshot.GetFile(r.ctx, spn.URI())
- if err != nil {
- t.Fatal(err)
- }
- locs, err := source.Implementation(r.ctx, r.snapshot, fh, loc.Range.Start)
- if err != nil {
- t.Fatalf("failed for %v: %v", spn, err)
- }
- if len(locs) != len(impls) {
- t.Fatalf("got %d locations for implementation, expected %d", len(locs), len(impls))
- }
- var results []span.Span
- for i := range locs {
- locURI := locs[i].URI.SpanURI()
- lm, err := r.data.Mapper(locURI)
- if err != nil {
- t.Fatal(err)
- }
- imp, err := lm.Span(locs[i])
- if err != nil {
- t.Fatalf("failed for %v: %v", locs[i], err)
- }
- results = append(results, imp)
- }
- // Sort results and expected to make tests deterministic.
- sort.SliceStable(results, func(i, j int) bool {
- return span.Compare(results[i], results[j]) == -1
- })
- sort.SliceStable(impls, func(i, j int) bool {
- return span.Compare(impls[i], impls[j]) == -1
- })
- for i := range results {
- if results[i] != impls[i] {
- t.Errorf("for %dth implementation of %v got %v want %v", i, spn, results[i], impls[i])
- }
- }
-}
-
-func (r *runner) Highlight(t *testing.T, src span.Span, locations []span.Span) {
- ctx := r.ctx
- m, srcRng, err := spanToRange(r.data, src)
- if err != nil {
- t.Fatal(err)
- }
- fh, err := r.snapshot.GetFile(r.ctx, src.URI())
- if err != nil {
- t.Fatal(err)
- }
- highlights, err := source.Highlight(ctx, r.snapshot, fh, srcRng.Start)
- if err != nil {
- t.Errorf("highlight failed for %s: %v", src.URI(), err)
- }
- if len(highlights) != len(locations) {
- t.Fatalf("got %d highlights for highlight at %v:%v:%v, expected %d", len(highlights), src.URI().Filename(), src.Start().Line(), src.Start().Column(), len(locations))
- }
- // Check to make sure highlights have a valid range.
- var results []span.Span
- for i := range highlights {
- h, err := m.RangeSpan(highlights[i])
- if err != nil {
- t.Fatalf("failed for %v: %v", highlights[i], err)
- }
- results = append(results, h)
- }
- // Sort results to make tests deterministic since DocumentHighlight uses a map.
- sort.SliceStable(results, func(i, j int) bool {
- return span.Compare(results[i], results[j]) == -1
- })
- // Check to make sure all the expected highlights are found.
- for i := range results {
- if results[i] != locations[i] {
- t.Errorf("want %v, got %v\n", locations[i], results[i])
- }
- }
-}
-
-func (r *runner) Hover(t *testing.T, src span.Span, text string) {
- ctx := r.ctx
- _, srcRng, err := spanToRange(r.data, src)
- if err != nil {
- t.Fatal(err)
- }
- fh, err := r.snapshot.GetFile(r.ctx, src.URI())
- if err != nil {
- t.Fatal(err)
- }
- hover, err := source.Hover(ctx, r.snapshot, fh, srcRng.Start)
- if err != nil {
- t.Errorf("hover failed for %s: %v", src.URI(), err)
- }
- if text == "" {
- if hover != nil {
- t.Errorf("want nil, got %v\n", hover)
- }
- } else {
- if hover == nil {
- t.Fatalf("want hover result to not be nil")
- }
- if got := hover.Contents.Value; got != text {
- t.Errorf("want %v, got %v\n", got, text)
- }
- if want, got := srcRng, hover.Range; want != got {
- t.Errorf("want range %v, got %v instead", want, got)
- }
- }
-}
-
-func (r *runner) References(t *testing.T, src span.Span, itemList []span.Span) {
- ctx := r.ctx
- _, srcRng, err := spanToRange(r.data, src)
- if err != nil {
- t.Fatal(err)
- }
- snapshot := r.snapshot
- fh, err := snapshot.GetFile(r.ctx, src.URI())
- if err != nil {
- t.Fatal(err)
- }
- for _, includeDeclaration := range []bool{true, false} {
- t.Run(fmt.Sprintf("refs-declaration-%v", includeDeclaration), func(t *testing.T) {
- want := make(map[span.Span]bool)
- for i, pos := range itemList {
- // We don't want the first result if we aren't including the declaration.
- if i == 0 && !includeDeclaration {
- continue
- }
- want[pos] = true
- }
- refs, err := source.References(ctx, snapshot, fh, srcRng.Start, includeDeclaration)
- if err != nil {
- t.Fatalf("failed for %s: %v", src, err)
- }
- got := make(map[span.Span]bool)
- for _, refInfo := range refs {
- refSpan, err := refInfo.Span()
- if err != nil {
- t.Fatal(err)
- }
- got[refSpan] = true
- }
- if len(got) != len(want) {
- t.Errorf("references failed: different lengths got %v want %v", len(got), len(want))
- }
- for spn := range got {
- if !want[spn] {
- t.Errorf("references failed: incorrect references got %v want locations %v", got, want)
- }
- }
- })
- }
-}
-
-func (r *runner) Rename(t *testing.T, spn span.Span, newText string) {
- tag := fmt.Sprintf("%s-rename", newText)
-
- _, srcRng, err := spanToRange(r.data, spn)
- if err != nil {
- t.Fatal(err)
- }
- fh, err := r.snapshot.GetFile(r.ctx, spn.URI())
- if err != nil {
- t.Fatal(err)
- }
- changes, err := source.Rename(r.ctx, r.snapshot, fh, srcRng.Start, newText)
- if err != nil {
- renamed := string(r.data.Golden(tag, spn.URI().Filename(), func() ([]byte, error) {
- return []byte(err.Error()), nil
- }))
- if err.Error() != renamed {
- t.Errorf("rename failed for %s, expected:\n%v\ngot:\n%v\n", newText, renamed, err)
- }
- return
- }
-
- var res []string
- for editURI, edits := range changes {
- fh, err := r.snapshot.GetFile(r.ctx, editURI)
- if err != nil {
- t.Fatal(err)
- }
- data, err := fh.Read()
- if err != nil {
- t.Fatal(err)
- }
- m, err := r.data.Mapper(fh.URI())
- if err != nil {
- t.Fatal(err)
- }
- diffEdits, err := source.FromProtocolEdits(m, edits)
- if err != nil {
- t.Fatal(err)
- }
- contents := applyEdits(string(data), diffEdits)
- if len(changes) > 1 {
- filename := filepath.Base(editURI.Filename())
- contents = fmt.Sprintf("%s:\n%s", filename, contents)
- }
- res = append(res, contents)
- }
-
- // Sort on filename
- sort.Strings(res)
-
- var got string
- for i, val := range res {
- if i != 0 {
- got += "\n"
- }
- got += val
- }
-
- renamed := string(r.data.Golden(tag, spn.URI().Filename(), func() ([]byte, error) {
- return []byte(got), nil
- }))
-
- if renamed != got {
- t.Errorf("rename failed for %s, expected:\n%v\ngot:\n%v", newText, renamed, got)
- }
-}
-
-func applyEdits(contents string, edits []diff.TextEdit) string {
- res := contents
-
- // Apply the edits from the end of the file forward
- // to preserve the offsets
- for i := len(edits) - 1; i >= 0; i-- {
- edit := edits[i]
- start := edit.Span.Start().Offset()
- end := edit.Span.End().Offset()
- tmp := res[0:start] + edit.NewText
- res = tmp + res[end:]
- }
- return res
-}
-
-func (r *runner) PrepareRename(t *testing.T, src span.Span, want *source.PrepareItem) {
- _, srcRng, err := spanToRange(r.data, src)
- if err != nil {
- t.Fatal(err)
- }
- // Find the identifier at the position.
- fh, err := r.snapshot.GetFile(r.ctx, src.URI())
- if err != nil {
- t.Fatal(err)
- }
- item, _, err := source.PrepareRename(r.ctx, r.snapshot, fh, srcRng.Start)
- if err != nil {
- if want.Text != "" { // expected an ident.
- t.Errorf("prepare rename failed for %v: got error: %v", src, err)
- }
- return
- }
- if item == nil {
- if want.Text != "" {
- t.Errorf("prepare rename failed for %v: got nil", src)
- }
- return
- }
- if want.Text == "" {
- t.Errorf("prepare rename failed for %v: expected nil, got %v", src, item)
- return
- }
- if item.Range.Start == item.Range.End {
- // Special case for 0-length ranges. Marks can't specify a 0-length range,
- // so just compare the start.
- if item.Range.Start != want.Range.Start {
- t.Errorf("prepare rename failed: incorrect point, got %v want %v", item.Range.Start, want.Range.Start)
- }
- } else {
- if protocol.CompareRange(item.Range, want.Range) != 0 {
- t.Errorf("prepare rename failed: incorrect range got %v want %v", item.Range, want.Range)
- }
- }
-}
-
-func (r *runner) Symbols(t *testing.T, uri span.URI, expectedSymbols []protocol.DocumentSymbol) {
- fh, err := r.snapshot.GetFile(r.ctx, uri)
- if err != nil {
- t.Fatal(err)
- }
- symbols, err := source.DocumentSymbols(r.ctx, r.snapshot, fh)
- if err != nil {
- t.Errorf("symbols failed for %s: %v", uri, err)
- }
- if len(symbols) != len(expectedSymbols) {
- t.Errorf("want %d top-level symbols in %v, got %d", len(expectedSymbols), uri, len(symbols))
- return
- }
- if diff := tests.DiffSymbols(t, uri, expectedSymbols, symbols); diff != "" {
- t.Error(diff)
- }
-}
-
-func (r *runner) WorkspaceSymbols(t *testing.T, uri span.URI, query string, typ tests.WorkspaceSymbolsTestType) {
- r.callWorkspaceSymbols(t, uri, query, typ)
-}
-
-func (r *runner) callWorkspaceSymbols(t *testing.T, uri span.URI, query string, typ tests.WorkspaceSymbolsTestType) {
- t.Helper()
-
- matcher := tests.WorkspaceSymbolsTestTypeToMatcher(typ)
- gotSymbols, err := source.WorkspaceSymbols(r.ctx, matcher, r.view.Options().SymbolStyle, []source.View{r.view}, query)
- if err != nil {
- t.Fatal(err)
- }
- got, err := tests.WorkspaceSymbolsString(r.ctx, r.data, uri, gotSymbols)
- if err != nil {
- t.Fatal(err)
- }
- got = filepath.ToSlash(tests.Normalize(got, r.normalizers))
- want := string(r.data.Golden(fmt.Sprintf("workspace_symbol-%s-%s", strings.ToLower(string(matcher)), query), uri.Filename(), func() ([]byte, error) {
- return []byte(got), nil
- }))
- if diff := tests.Diff(t, want, got); diff != "" {
- t.Error(diff)
- }
-}
-
-func (r *runner) SignatureHelp(t *testing.T, spn span.Span, want *protocol.SignatureHelp) {
- _, rng, err := spanToRange(r.data, spn)
- if err != nil {
- t.Fatal(err)
- }
- fh, err := r.snapshot.GetFile(r.ctx, spn.URI())
- if err != nil {
- t.Fatal(err)
- }
- gotSignature, gotActiveParameter, err := source.SignatureHelp(r.ctx, r.snapshot, fh, rng.Start)
- if err != nil {
- // Only fail if we got an error we did not expect.
- if want != nil {
- t.Fatalf("failed for %v: %v", spn, err)
- }
- return
- }
- if gotSignature == nil {
- if want != nil {
- t.Fatalf("got nil signature, but expected %v", want)
- }
- return
- }
- got := &protocol.SignatureHelp{
- Signatures: []protocol.SignatureInformation{*gotSignature},
- ActiveParameter: uint32(gotActiveParameter),
- }
- diff, err := tests.DiffSignatures(spn, want, got)
- if err != nil {
- t.Fatal(err)
- }
- if diff != "" {
- t.Error(diff)
- }
-}
-
-// These are pure LSP features, no source level functionality to be tested.
-func (r *runner) Link(t *testing.T, uri span.URI, wantLinks []tests.Link) {}
-
-func (r *runner) SuggestedFix(t *testing.T, spn span.Span, actionKinds []string, expectedActions int) {
-}
-func (r *runner) FunctionExtraction(t *testing.T, start span.Span, end span.Span) {}
-func (r *runner) MethodExtraction(t *testing.T, start span.Span, end span.Span) {}
-func (r *runner) CodeLens(t *testing.T, uri span.URI, want []protocol.CodeLens) {}
-func (r *runner) AddImport(t *testing.T, uri span.URI, expectedImport string) {}
-
-func spanToRange(data *tests.Data, spn span.Span) (*protocol.ColumnMapper, protocol.Range, error) {
- m, err := data.Mapper(spn.URI())
- if err != nil {
- return nil, protocol.Range{}, err
- }
- srcRng, err := m.Range(spn)
- if err != nil {
- return nil, protocol.Range{}, err
- }
- return m, srcRng, nil
-}
diff --git a/internal/lsp/source/stub.go b/internal/lsp/source/stub.go
deleted file mode 100644
index 6810f1d20..000000000
--- a/internal/lsp/source/stub.go
+++ /dev/null
@@ -1,330 +0,0 @@
-// Copyright 2022 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package source
-
-import (
- "bytes"
- "context"
- "fmt"
- "go/ast"
- "go/format"
- "go/parser"
- "go/token"
- "go/types"
- "strings"
-
- "golang.org/x/tools/go/analysis"
- "golang.org/x/tools/go/ast/astutil"
- "golang.org/x/tools/internal/lsp/analysis/stubmethods"
- "golang.org/x/tools/internal/lsp/protocol"
- "golang.org/x/tools/internal/span"
- "golang.org/x/tools/internal/typeparams"
-)
-
-func stubSuggestedFixFunc(ctx context.Context, snapshot Snapshot, fh VersionedFileHandle, rng protocol.Range) (*analysis.SuggestedFix, error) {
- pkg, pgf, err := GetParsedFile(ctx, snapshot, fh, NarrowestPackage)
- if err != nil {
- return nil, fmt.Errorf("GetParsedFile: %w", err)
- }
- nodes, pos, err := getStubNodes(pgf, rng)
- if err != nil {
- return nil, fmt.Errorf("getNodes: %w", err)
- }
- si := stubmethods.GetStubInfo(pkg.GetTypesInfo(), nodes, pos)
- if si == nil {
- return nil, fmt.Errorf("nil interface request")
- }
- parsedConcreteFile, concreteFH, err := getStubFile(ctx, si.Concrete.Obj(), snapshot)
- if err != nil {
- return nil, fmt.Errorf("getFile(concrete): %w", err)
- }
- var (
- methodsSrc []byte
- stubImports []*stubImport // additional imports needed for method stubs
- )
- if si.Interface.Pkg() == nil && si.Interface.Name() == "error" && si.Interface.Parent() == types.Universe {
- methodsSrc = stubErr(ctx, parsedConcreteFile.File, si, snapshot)
- } else {
- methodsSrc, stubImports, err = stubMethods(ctx, parsedConcreteFile.File, si, snapshot)
- }
- if err != nil {
- return nil, fmt.Errorf("stubMethods: %w", err)
- }
- nodes, _ = astutil.PathEnclosingInterval(parsedConcreteFile.File, si.Concrete.Obj().Pos(), si.Concrete.Obj().Pos())
- concreteSrc, err := concreteFH.Read()
- if err != nil {
- return nil, fmt.Errorf("error reading concrete file source: %w", err)
- }
- insertPos := snapshot.FileSet().Position(nodes[1].End()).Offset
- if insertPos >= len(concreteSrc) {
- return nil, fmt.Errorf("insertion position is past the end of the file")
- }
- var buf bytes.Buffer
- buf.Write(concreteSrc[:insertPos])
- buf.WriteByte('\n')
- buf.Write(methodsSrc)
- buf.Write(concreteSrc[insertPos:])
- fset := token.NewFileSet()
- newF, err := parser.ParseFile(fset, parsedConcreteFile.File.Name.Name, buf.Bytes(), parser.ParseComments)
- if err != nil {
- return nil, fmt.Errorf("could not reparse file: %w", err)
- }
- for _, imp := range stubImports {
- astutil.AddNamedImport(fset, newF, imp.Name, imp.Path)
- }
- var source bytes.Buffer
- err = format.Node(&source, fset, newF)
- if err != nil {
- return nil, fmt.Errorf("format.Node: %w", err)
- }
- diffEdits, err := snapshot.View().Options().ComputeEdits(parsedConcreteFile.URI, string(parsedConcreteFile.Src), source.String())
- if err != nil {
- return nil, err
- }
- var edits []analysis.TextEdit
- for _, edit := range diffEdits {
- rng, err := edit.Span.Range(parsedConcreteFile.Mapper.Converter)
- if err != nil {
- return nil, err
- }
- edits = append(edits, analysis.TextEdit{
- Pos: rng.Start,
- End: rng.End,
- NewText: []byte(edit.NewText),
- })
- }
- return &analysis.SuggestedFix{
- TextEdits: edits,
- }, nil
-}
-
-// stubMethods returns the Go code of all methods
-// that implement the given interface
-func stubMethods(ctx context.Context, concreteFile *ast.File, si *stubmethods.StubInfo, snapshot Snapshot) ([]byte, []*stubImport, error) {
- ifacePkg, err := deducePkgFromTypes(ctx, snapshot, si.Interface)
- if err != nil {
- return nil, nil, err
- }
- si.Concrete.Obj().Type()
- concMS := types.NewMethodSet(types.NewPointer(si.Concrete.Obj().Type()))
- missing, err := missingMethods(ctx, snapshot, concMS, si.Concrete.Obj().Pkg(), si.Interface, ifacePkg, map[string]struct{}{})
- if err != nil {
- return nil, nil, fmt.Errorf("missingMethods: %w", err)
- }
- if len(missing) == 0 {
- return nil, nil, fmt.Errorf("no missing methods found")
- }
- var (
- stubImports []*stubImport
- methodsBuffer bytes.Buffer
- )
- for _, mi := range missing {
- for _, m := range mi.missing {
- // TODO(marwan-at-work): this should share the same logic with source.FormatVarType
- // as it also accounts for type aliases.
- sig := types.TypeString(m.Type(), stubmethods.RelativeToFiles(si.Concrete.Obj().Pkg(), concreteFile, mi.file, func(name, path string) {
- for _, imp := range stubImports {
- if imp.Name == name && imp.Path == path {
- return
- }
- }
- stubImports = append(stubImports, &stubImport{name, path})
- }))
- _, err = methodsBuffer.Write(printStubMethod(methodData{
- Method: m.Name(),
- Concrete: getStubReceiver(si),
- Interface: deduceIfaceName(si.Concrete.Obj().Pkg(), si.Interface.Pkg(), si.Interface),
- Signature: strings.TrimPrefix(sig, "func"),
- }))
- if err != nil {
- return nil, nil, fmt.Errorf("error printing method: %w", err)
- }
- methodsBuffer.WriteRune('\n')
- }
- }
- return methodsBuffer.Bytes(), stubImports, nil
-}
-
-// stubErr reurns the Go code implementation
-// of an error interface relevant to the
-// concrete type
-func stubErr(ctx context.Context, concreteFile *ast.File, si *stubmethods.StubInfo, snapshot Snapshot) []byte {
- return printStubMethod(methodData{
- Method: "Error",
- Interface: "error",
- Concrete: getStubReceiver(si),
- Signature: "() string",
- })
-}
-
-// getStubReceiver returns the concrete type's name as a method receiver.
-// It accounts for type parameters if they exist.
-func getStubReceiver(si *stubmethods.StubInfo) string {
- var concrete string
- if si.Pointer {
- concrete += "*"
- }
- concrete += si.Concrete.Obj().Name()
- concrete += FormatTypeParams(typeparams.ForNamed(si.Concrete))
- return concrete
-}
-
-type methodData struct {
- Method string
- Interface string
- Concrete string
- Signature string
-}
-
-// printStubMethod takes methodData and returns Go code that represents the given method such as:
-// // {{ .Method }} implements {{ .Interface }}
-// func ({{ .Concrete }}) {{ .Method }}{{ .Signature }} {
-// panic("unimplemented")
-// }
-func printStubMethod(md methodData) []byte {
- var b bytes.Buffer
- fmt.Fprintf(&b, "// %s implements %s\n", md.Method, md.Interface)
- fmt.Fprintf(&b, "func (%s) %s%s {\n\t", md.Concrete, md.Method, md.Signature)
- fmt.Fprintln(&b, `panic("unimplemented")`)
- fmt.Fprintln(&b, "}")
- return b.Bytes()
-}
-
-func deducePkgFromTypes(ctx context.Context, snapshot Snapshot, ifaceObj types.Object) (Package, error) {
- pkgs, err := snapshot.KnownPackages(ctx)
- if err != nil {
- return nil, err
- }
- for _, p := range pkgs {
- if p.PkgPath() == ifaceObj.Pkg().Path() {
- return p, nil
- }
- }
- return nil, fmt.Errorf("pkg %q not found", ifaceObj.Pkg().Path())
-}
-
-func deduceIfaceName(concretePkg, ifacePkg *types.Package, ifaceObj types.Object) string {
- if concretePkg.Path() == ifacePkg.Path() {
- return ifaceObj.Name()
- }
- return fmt.Sprintf("%s.%s", ifacePkg.Name(), ifaceObj.Name())
-}
-
-func getStubNodes(pgf *ParsedGoFile, pRng protocol.Range) ([]ast.Node, token.Pos, error) {
- spn, err := pgf.Mapper.RangeSpan(pRng)
- if err != nil {
- return nil, 0, err
- }
- rng, err := spn.Range(pgf.Mapper.Converter)
- if err != nil {
- return nil, 0, err
- }
- nodes, _ := astutil.PathEnclosingInterval(pgf.File, rng.Start, rng.End)
- return nodes, rng.Start, nil
-}
-
-/*
-missingMethods takes a concrete type and returns any missing methods for the given interface as well as
-any missing interface that might have been embedded to its parent. For example:
-
-type I interface {
- io.Writer
- Hello()
-}
-returns []*missingInterface{
- {
- iface: *types.Interface (io.Writer),
- file: *ast.File: io.go,
- missing []*types.Func{Write},
- },
- {
- iface: *types.Interface (I),
- file: *ast.File: myfile.go,
- missing: []*types.Func{Hello}
- },
-}
-*/
-func missingMethods(ctx context.Context, snapshot Snapshot, concMS *types.MethodSet, concPkg *types.Package, ifaceObj types.Object, ifacePkg Package, visited map[string]struct{}) ([]*missingInterface, error) {
- iface, ok := ifaceObj.Type().Underlying().(*types.Interface)
- if !ok {
- return nil, fmt.Errorf("expected %v to be an interface but got %T", iface, ifaceObj.Type().Underlying())
- }
- missing := []*missingInterface{}
- for i := 0; i < iface.NumEmbeddeds(); i++ {
- eiface := iface.Embedded(i).Obj()
- depPkg := ifacePkg
- if eiface.Pkg().Path() != ifacePkg.PkgPath() {
- var err error
- depPkg, err = ifacePkg.GetImport(eiface.Pkg().Path())
- if err != nil {
- return nil, err
- }
- }
- em, err := missingMethods(ctx, snapshot, concMS, concPkg, eiface, depPkg, visited)
- if err != nil {
- return nil, err
- }
- missing = append(missing, em...)
- }
- parsedFile, _, err := getStubFile(ctx, ifaceObj, snapshot)
- if err != nil {
- return nil, fmt.Errorf("error getting iface file: %w", err)
- }
- mi := &missingInterface{
- pkg: ifacePkg,
- iface: iface,
- file: parsedFile.File,
- }
- if mi.file == nil {
- return nil, fmt.Errorf("could not find ast.File for %v", ifaceObj.Name())
- }
- for i := 0; i < iface.NumExplicitMethods(); i++ {
- method := iface.ExplicitMethod(i)
- // if the concrete type does not have the interface method
- if concMS.Lookup(concPkg, method.Name()) == nil {
- if _, ok := visited[method.Name()]; !ok {
- mi.missing = append(mi.missing, method)
- visited[method.Name()] = struct{}{}
- }
- }
- if sel := concMS.Lookup(concPkg, method.Name()); sel != nil {
- implSig := sel.Type().(*types.Signature)
- ifaceSig := method.Type().(*types.Signature)
- if !types.Identical(ifaceSig, implSig) {
- return nil, fmt.Errorf("mimsatched %q function signatures:\nhave: %s\nwant: %s", method.Name(), implSig, ifaceSig)
- }
- }
- }
- if len(mi.missing) > 0 {
- missing = append(missing, mi)
- }
- return missing, nil
-}
-
-func getStubFile(ctx context.Context, obj types.Object, snapshot Snapshot) (*ParsedGoFile, VersionedFileHandle, error) {
- objPos := snapshot.FileSet().Position(obj.Pos())
- objFile := span.URIFromPath(objPos.Filename)
- objectFH := snapshot.FindFile(objFile)
- _, goFile, err := GetParsedFile(ctx, snapshot, objectFH, WidestPackage)
- if err != nil {
- return nil, nil, fmt.Errorf("GetParsedFile: %w", err)
- }
- return goFile, objectFH, nil
-}
-
-// missingInterface represents an interface
-// that has all or some of its methods missing
-// from the destination concrete type
-type missingInterface struct {
- iface *types.Interface
- file *ast.File
- pkg Package
- missing []*types.Func
-}
-
-// stubImport represents a newly added import
-// statement to the concrete type. If name is not
-// empty, then that import is required to have that name.
-type stubImport struct{ Name, Path string }
diff --git a/internal/lsp/source/symbols.go b/internal/lsp/source/symbols.go
deleted file mode 100644
index 16fb2223d..000000000
--- a/internal/lsp/source/symbols.go
+++ /dev/null
@@ -1,266 +0,0 @@
-// Copyright 2019 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package source
-
-import (
- "context"
- "fmt"
- "go/ast"
- "go/types"
-
- "golang.org/x/tools/internal/event"
- "golang.org/x/tools/internal/lsp/protocol"
- errors "golang.org/x/xerrors"
-)
-
-func DocumentSymbols(ctx context.Context, snapshot Snapshot, fh FileHandle) ([]protocol.DocumentSymbol, error) {
- ctx, done := event.Start(ctx, "source.DocumentSymbols")
- defer done()
-
- pkg, pgf, err := GetParsedFile(ctx, snapshot, fh, NarrowestPackage)
- if err != nil {
- return nil, errors.Errorf("getting file for DocumentSymbols: %w", err)
- }
-
- info := pkg.GetTypesInfo()
- q := Qualifier(pgf.File, pkg.GetTypes(), info)
-
- symbolsToReceiver := make(map[types.Type]int)
- var symbols []protocol.DocumentSymbol
- for _, decl := range pgf.File.Decls {
- switch decl := decl.(type) {
- case *ast.FuncDecl:
- if decl.Name.Name == "_" {
- continue
- }
- if obj := info.ObjectOf(decl.Name); obj != nil {
- fs, err := funcSymbol(snapshot, pkg, decl, obj, q)
- if err != nil {
- return nil, err
- }
- // If function is a method, prepend the type of the method.
- if fs.Kind == protocol.Method {
- rtype := obj.Type().(*types.Signature).Recv().Type()
- fs.Name = fmt.Sprintf("(%s).%s", types.TypeString(rtype, q), fs.Name)
- }
- symbols = append(symbols, fs)
- }
- case *ast.GenDecl:
- for _, spec := range decl.Specs {
- switch spec := spec.(type) {
- case *ast.TypeSpec:
- if spec.Name.Name == "_" {
- continue
- }
- if obj := info.ObjectOf(spec.Name); obj != nil {
- ts, err := typeSymbol(snapshot, pkg, info, spec, obj, q)
- if err != nil {
- return nil, err
- }
- symbols = append(symbols, ts)
- symbolsToReceiver[obj.Type()] = len(symbols) - 1
- }
- case *ast.ValueSpec:
- for _, name := range spec.Names {
- if name.Name == "_" {
- continue
- }
- if obj := info.ObjectOf(name); obj != nil {
- vs, err := varSymbol(snapshot, pkg, decl, name, obj, q)
- if err != nil {
- return nil, err
- }
- symbols = append(symbols, vs)
- }
- }
- }
- }
- }
- }
- return symbols, nil
-}
-
-func funcSymbol(snapshot Snapshot, pkg Package, decl *ast.FuncDecl, obj types.Object, q types.Qualifier) (protocol.DocumentSymbol, error) {
- s := protocol.DocumentSymbol{
- Name: obj.Name(),
- Kind: protocol.Function,
- }
- var err error
- s.Range, err = nodeToProtocolRange(snapshot, pkg, decl)
- if err != nil {
- return protocol.DocumentSymbol{}, err
- }
- s.SelectionRange, err = nodeToProtocolRange(snapshot, pkg, decl.Name)
- if err != nil {
- return protocol.DocumentSymbol{}, err
- }
- sig, _ := obj.Type().(*types.Signature)
- if sig != nil {
- if sig.Recv() != nil {
- s.Kind = protocol.Method
- }
- s.Detail += "("
- for i := 0; i < sig.Params().Len(); i++ {
- if i > 0 {
- s.Detail += ", "
- }
- param := sig.Params().At(i)
- label := types.TypeString(param.Type(), q)
- if param.Name() != "" {
- label = fmt.Sprintf("%s %s", param.Name(), label)
- }
- s.Detail += label
- }
- s.Detail += ")"
- }
- return s, nil
-}
-
-func typeSymbol(snapshot Snapshot, pkg Package, info *types.Info, spec *ast.TypeSpec, obj types.Object, qf types.Qualifier) (protocol.DocumentSymbol, error) {
- s := protocol.DocumentSymbol{
- Name: obj.Name(),
- }
- s.Detail, _ = FormatType(obj.Type(), qf)
- s.Kind = typeToKind(obj.Type())
-
- var err error
- s.Range, err = nodeToProtocolRange(snapshot, pkg, spec)
- if err != nil {
- return protocol.DocumentSymbol{}, err
- }
- s.SelectionRange, err = nodeToProtocolRange(snapshot, pkg, spec.Name)
- if err != nil {
- return protocol.DocumentSymbol{}, err
- }
- t, objIsStruct := obj.Type().Underlying().(*types.Struct)
- st, specIsStruct := spec.Type.(*ast.StructType)
- if objIsStruct && specIsStruct {
- for i := 0; i < t.NumFields(); i++ {
- f := t.Field(i)
- child := protocol.DocumentSymbol{
- Name: f.Name(),
- Kind: protocol.Field,
- }
- child.Detail, _ = FormatType(f.Type(), qf)
-
- spanNode, selectionNode := nodesForStructField(i, st)
- if span, err := nodeToProtocolRange(snapshot, pkg, spanNode); err == nil {
- child.Range = span
- }
- if span, err := nodeToProtocolRange(snapshot, pkg, selectionNode); err == nil {
- child.SelectionRange = span
- }
- s.Children = append(s.Children, child)
- }
- }
-
- ti, objIsInterface := obj.Type().Underlying().(*types.Interface)
- ai, specIsInterface := spec.Type.(*ast.InterfaceType)
- if objIsInterface && specIsInterface {
- for i := 0; i < ti.NumExplicitMethods(); i++ {
- method := ti.ExplicitMethod(i)
- child := protocol.DocumentSymbol{
- Name: method.Name(),
- Kind: protocol.Method,
- }
-
- var spanNode, selectionNode ast.Node
- Methods:
- for _, f := range ai.Methods.List {
- for _, id := range f.Names {
- if id.Name == method.Name() {
- spanNode, selectionNode = f, id
- break Methods
- }
- }
- }
- child.Range, err = nodeToProtocolRange(snapshot, pkg, spanNode)
- if err != nil {
- return protocol.DocumentSymbol{}, err
- }
- child.SelectionRange, err = nodeToProtocolRange(snapshot, pkg, selectionNode)
- if err != nil {
- return protocol.DocumentSymbol{}, err
- }
- s.Children = append(s.Children, child)
- }
-
- for i := 0; i < ti.NumEmbeddeds(); i++ {
- embedded := ti.EmbeddedType(i)
- nt, isNamed := embedded.(*types.Named)
- if !isNamed {
- continue
- }
-
- child := protocol.DocumentSymbol{
- Name: types.TypeString(embedded, qf),
- }
- child.Kind = typeToKind(embedded)
- var spanNode, selectionNode ast.Node
- Embeddeds:
- for _, f := range ai.Methods.List {
- if len(f.Names) > 0 {
- continue
- }
-
- if t := info.TypeOf(f.Type); types.Identical(nt, t) {
- spanNode, selectionNode = f, f.Type
- break Embeddeds
- }
- }
- child.Range, err = nodeToProtocolRange(snapshot, pkg, spanNode)
- if err != nil {
- return protocol.DocumentSymbol{}, err
- }
- child.SelectionRange, err = nodeToProtocolRange(snapshot, pkg, selectionNode)
- if err != nil {
- return protocol.DocumentSymbol{}, err
- }
- s.Children = append(s.Children, child)
- }
- }
- return s, nil
-}
-
-func nodesForStructField(i int, st *ast.StructType) (span, selection ast.Node) {
- j := 0
- for _, field := range st.Fields.List {
- if len(field.Names) == 0 {
- if i == j {
- return field, field.Type
- }
- j++
- continue
- }
- for _, name := range field.Names {
- if i == j {
- return field, name
- }
- j++
- }
- }
- return nil, nil
-}
-
-func varSymbol(snapshot Snapshot, pkg Package, decl ast.Node, name *ast.Ident, obj types.Object, q types.Qualifier) (protocol.DocumentSymbol, error) {
- s := protocol.DocumentSymbol{
- Name: obj.Name(),
- Kind: protocol.Variable,
- }
- if _, ok := obj.(*types.Const); ok {
- s.Kind = protocol.Constant
- }
- var err error
- s.Range, err = nodeToProtocolRange(snapshot, pkg, decl)
- if err != nil {
- return protocol.DocumentSymbol{}, err
- }
- s.SelectionRange, err = nodeToProtocolRange(snapshot, pkg, name)
- if err != nil {
- return protocol.DocumentSymbol{}, err
- }
- s.Detail = types.TypeString(obj.Type(), q)
- return s, nil
-}
diff --git a/internal/lsp/source/types_format.go b/internal/lsp/source/types_format.go
deleted file mode 100644
index fcbf228ec..000000000
--- a/internal/lsp/source/types_format.go
+++ /dev/null
@@ -1,459 +0,0 @@
-// Copyright 2020 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package source
-
-import (
- "bytes"
- "context"
- "fmt"
- "go/ast"
- "go/doc"
- "go/printer"
- "go/token"
- "go/types"
- "strings"
-
- "golang.org/x/tools/internal/event"
- "golang.org/x/tools/internal/lsp/debug/tag"
- "golang.org/x/tools/internal/lsp/protocol"
- "golang.org/x/tools/internal/typeparams"
-)
-
-// FormatType returns the detail and kind for a types.Type.
-func FormatType(typ types.Type, qf types.Qualifier) (detail string, kind protocol.CompletionItemKind) {
- if types.IsInterface(typ) {
- detail = "interface{...}"
- kind = protocol.InterfaceCompletion
- } else if _, ok := typ.(*types.Struct); ok {
- detail = "struct{...}"
- kind = protocol.StructCompletion
- } else if typ != typ.Underlying() {
- detail, kind = FormatType(typ.Underlying(), qf)
- } else {
- detail = types.TypeString(typ, qf)
- kind = protocol.ClassCompletion
- }
- return detail, kind
-}
-
-type signature struct {
- name, doc string
- typeParams, params, results []string
- variadic bool
- needResultParens bool
-}
-
-func (s *signature) Format() string {
- var b strings.Builder
- b.WriteByte('(')
- for i, p := range s.params {
- if i > 0 {
- b.WriteString(", ")
- }
- b.WriteString(p)
- }
- b.WriteByte(')')
-
- // Add space between parameters and results.
- if len(s.results) > 0 {
- b.WriteByte(' ')
- }
- if s.needResultParens {
- b.WriteByte('(')
- }
- for i, r := range s.results {
- if i > 0 {
- b.WriteString(", ")
- }
- b.WriteString(r)
- }
- if s.needResultParens {
- b.WriteByte(')')
- }
- return b.String()
-}
-
-func (s *signature) TypeParams() []string {
- return s.typeParams
-}
-
-func (s *signature) Params() []string {
- return s.params
-}
-
-// NewBuiltinSignature returns signature for the builtin object with a given
-// name, if a builtin object with the name exists.
-func NewBuiltinSignature(ctx context.Context, s Snapshot, name string) (*signature, error) {
- builtin, err := s.BuiltinFile(ctx)
- if err != nil {
- return nil, err
- }
- obj := builtin.File.Scope.Lookup(name)
- if obj == nil {
- return nil, fmt.Errorf("no builtin object for %s", name)
- }
- decl, ok := obj.Decl.(*ast.FuncDecl)
- if !ok {
- return nil, fmt.Errorf("no function declaration for builtin: %s", name)
- }
- if decl.Type == nil {
- return nil, fmt.Errorf("no type for builtin decl %s", decl.Name)
- }
- var variadic bool
- if decl.Type.Params.List != nil {
- numParams := len(decl.Type.Params.List)
- lastParam := decl.Type.Params.List[numParams-1]
- if _, ok := lastParam.Type.(*ast.Ellipsis); ok {
- variadic = true
- }
- }
- params, _ := formatFieldList(ctx, s, decl.Type.Params, variadic)
- results, needResultParens := formatFieldList(ctx, s, decl.Type.Results, false)
- d := decl.Doc.Text()
- switch s.View().Options().HoverKind {
- case SynopsisDocumentation:
- d = doc.Synopsis(d)
- case NoDocumentation:
- d = ""
- }
- return &signature{
- doc: d,
- name: name,
- needResultParens: needResultParens,
- params: params,
- results: results,
- variadic: variadic,
- }, nil
-}
-
-var replacer = strings.NewReplacer(
- `ComplexType`, `complex128`,
- `FloatType`, `float64`,
- `IntegerType`, `int`,
-)
-
-func formatFieldList(ctx context.Context, snapshot Snapshot, list *ast.FieldList, variadic bool) ([]string, bool) {
- if list == nil {
- return nil, false
- }
- var writeResultParens bool
- var result []string
- for i := 0; i < len(list.List); i++ {
- if i >= 1 {
- writeResultParens = true
- }
- p := list.List[i]
- cfg := printer.Config{Mode: printer.UseSpaces | printer.TabIndent, Tabwidth: 4}
- b := &bytes.Buffer{}
- if err := cfg.Fprint(b, snapshot.FileSet(), p.Type); err != nil {
- event.Error(ctx, "unable to print type", nil, tag.Type.Of(p.Type))
- continue
- }
- typ := replacer.Replace(b.String())
- if len(p.Names) == 0 {
- result = append(result, typ)
- }
- for _, name := range p.Names {
- if name.Name != "" {
- if i == 0 {
- writeResultParens = true
- }
- result = append(result, fmt.Sprintf("%s %s", name.Name, typ))
- } else {
- result = append(result, typ)
- }
- }
- }
- if variadic {
- result[len(result)-1] = strings.Replace(result[len(result)-1], "[]", "...", 1)
- }
- return result, writeResultParens
-}
-
-// FormatTypeParams turns TypeParamList into its Go representation, such as:
-// [T, Y]. Note that it does not print constraints as this is mainly used for
-// formatting type params in method receivers.
-func FormatTypeParams(tparams *typeparams.TypeParamList) string {
- if tparams == nil || tparams.Len() == 0 {
- return ""
- }
- var buf bytes.Buffer
- buf.WriteByte('[')
- for i := 0; i < tparams.Len(); i++ {
- if i > 0 {
- buf.WriteString(", ")
- }
- buf.WriteString(tparams.At(i).Obj().Name())
- }
- buf.WriteByte(']')
- return buf.String()
-}
-
-// NewSignature returns formatted signature for a types.Signature struct.
-func NewSignature(ctx context.Context, s Snapshot, pkg Package, sig *types.Signature, comment *ast.CommentGroup, qf types.Qualifier) *signature {
- var tparams []string
- tpList := typeparams.ForSignature(sig)
- for i := 0; i < tpList.Len(); i++ {
- tparam := tpList.At(i)
- // TODO: is it possible to reuse the logic from FormatVarType here?
- s := tparam.Obj().Name() + " " + tparam.Constraint().String()
- tparams = append(tparams, s)
- }
-
- params := make([]string, 0, sig.Params().Len())
- for i := 0; i < sig.Params().Len(); i++ {
- el := sig.Params().At(i)
- typ := FormatVarType(ctx, s, pkg, el, qf)
- p := typ
- if el.Name() != "" {
- p = el.Name() + " " + typ
- }
- params = append(params, p)
- }
-
- var needResultParens bool
- results := make([]string, 0, sig.Results().Len())
- for i := 0; i < sig.Results().Len(); i++ {
- if i >= 1 {
- needResultParens = true
- }
- el := sig.Results().At(i)
- typ := FormatVarType(ctx, s, pkg, el, qf)
- if el.Name() == "" {
- results = append(results, typ)
- } else {
- if i == 0 {
- needResultParens = true
- }
- results = append(results, el.Name()+" "+typ)
- }
- }
- var d string
- if comment != nil {
- d = comment.Text()
- }
- switch s.View().Options().HoverKind {
- case SynopsisDocumentation:
- d = doc.Synopsis(d)
- case NoDocumentation:
- d = ""
- }
- return &signature{
- doc: d,
- typeParams: tparams,
- params: params,
- results: results,
- variadic: sig.Variadic(),
- needResultParens: needResultParens,
- }
-}
-
-// FormatVarType formats a *types.Var, accounting for type aliases.
-// To do this, it looks in the AST of the file in which the object is declared.
-// On any errors, it always falls back to types.TypeString.
-func FormatVarType(ctx context.Context, snapshot Snapshot, srcpkg Package, obj *types.Var, qf types.Qualifier) string {
- pkg, err := FindPackageFromPos(ctx, snapshot, obj.Pos())
- if err != nil {
- return types.TypeString(obj.Type(), qf)
- }
-
- expr, err := varType(ctx, snapshot, pkg, obj)
- if err != nil {
- return types.TypeString(obj.Type(), qf)
- }
-
- // If the given expr refers to a type parameter, then use the
- // object's Type instead of the type parameter declaration. This helps
- // format the instantiated type as opposed to the original undeclared
- // generic type.
- if typeparams.IsTypeParam(pkg.GetTypesInfo().Types[expr].Type) {
- return types.TypeString(obj.Type(), qf)
- }
-
- // The type names in the AST may not be correctly qualified.
- // Determine the package name to use based on the package that originated
- // the query and the package in which the type is declared.
- // We then qualify the value by cloning the AST node and editing it.
- clonedInfo := make(map[token.Pos]*types.PkgName)
- qualified := cloneExpr(expr, pkg.GetTypesInfo(), clonedInfo)
-
- // If the request came from a different package than the one in which the
- // types are defined, we may need to modify the qualifiers.
- qualified = qualifyExpr(qualified, srcpkg, pkg, clonedInfo, qf)
- fmted := FormatNode(snapshot.FileSet(), qualified)
- return fmted
-}
-
-// varType returns the type expression for a *types.Var.
-func varType(ctx context.Context, snapshot Snapshot, pkg Package, obj *types.Var) (ast.Expr, error) {
- field, err := snapshot.PosToField(ctx, pkg, obj.Pos())
- if err != nil {
- return nil, err
- }
- if field == nil {
- return nil, fmt.Errorf("no declaration for object %s", obj.Name())
- }
- return field.Type, nil
-}
-
-// qualifyExpr applies the "pkgName." prefix to any *ast.Ident in the expr.
-func qualifyExpr(expr ast.Expr, srcpkg, pkg Package, clonedInfo map[token.Pos]*types.PkgName, qf types.Qualifier) ast.Expr {
- ast.Inspect(expr, func(n ast.Node) bool {
- switch n := n.(type) {
- case *ast.ArrayType, *ast.ChanType, *ast.Ellipsis,
- *ast.FuncType, *ast.MapType, *ast.ParenExpr,
- *ast.StarExpr, *ast.StructType, *ast.FieldList, *ast.Field:
- // These are the only types that are cloned by cloneExpr below,
- // so these are the only types that we can traverse and potentially
- // modify. This is not an ideal approach, but it works for now.
-
- // TODO(rFindley): can we eliminate this filtering entirely? This caused
- // bugs in the past (golang/go#50539)
- return true
- case *ast.SelectorExpr:
- // We may need to change any selectors in which the X is a package
- // name and the Sel is exported.
- x, ok := n.X.(*ast.Ident)
- if !ok {
- return false
- }
- obj, ok := clonedInfo[x.Pos()]
- if !ok {
- return false
- }
- x.Name = qf(obj.Imported())
- return false
- case *ast.Ident:
- if srcpkg == pkg {
- return false
- }
- // Only add the qualifier if the identifier is exported.
- if ast.IsExported(n.Name) {
- pkgName := qf(pkg.GetTypes())
- n.Name = pkgName + "." + n.Name
- }
- }
- return false
- })
- return expr
-}
-
-// cloneExpr only clones expressions that appear in the parameters or return
-// values of a function declaration. The original expression may be returned
-// to the caller in 2 cases:
-// (1) The expression has no pointer fields.
-// (2) The expression cannot appear in an *ast.FuncType, making it
-// unnecessary to clone.
-// This function also keeps track of selector expressions in which the X is a
-// package name and marks them in a map along with their type information, so
-// that this information can be used when rewriting the expression.
-//
-// NOTE: This function is tailored to the use case of qualifyExpr, and should
-// be used with caution.
-func cloneExpr(expr ast.Expr, info *types.Info, clonedInfo map[token.Pos]*types.PkgName) ast.Expr {
- switch expr := expr.(type) {
- case *ast.ArrayType:
- return &ast.ArrayType{
- Lbrack: expr.Lbrack,
- Elt: cloneExpr(expr.Elt, info, clonedInfo),
- Len: expr.Len,
- }
- case *ast.ChanType:
- return &ast.ChanType{
- Arrow: expr.Arrow,
- Begin: expr.Begin,
- Dir: expr.Dir,
- Value: cloneExpr(expr.Value, info, clonedInfo),
- }
- case *ast.Ellipsis:
- return &ast.Ellipsis{
- Ellipsis: expr.Ellipsis,
- Elt: cloneExpr(expr.Elt, info, clonedInfo),
- }
- case *ast.FuncType:
- return &ast.FuncType{
- Func: expr.Func,
- Params: cloneFieldList(expr.Params, info, clonedInfo),
- Results: cloneFieldList(expr.Results, info, clonedInfo),
- }
- case *ast.Ident:
- return cloneIdent(expr)
- case *ast.MapType:
- return &ast.MapType{
- Map: expr.Map,
- Key: cloneExpr(expr.Key, info, clonedInfo),
- Value: cloneExpr(expr.Value, info, clonedInfo),
- }
- case *ast.ParenExpr:
- return &ast.ParenExpr{
- Lparen: expr.Lparen,
- Rparen: expr.Rparen,
- X: cloneExpr(expr.X, info, clonedInfo),
- }
- case *ast.SelectorExpr:
- s := &ast.SelectorExpr{
- Sel: cloneIdent(expr.Sel),
- X: cloneExpr(expr.X, info, clonedInfo),
- }
- if x, ok := expr.X.(*ast.Ident); ok && ast.IsExported(expr.Sel.Name) {
- if obj, ok := info.ObjectOf(x).(*types.PkgName); ok {
- clonedInfo[s.X.Pos()] = obj
- }
- }
- return s
- case *ast.StarExpr:
- return &ast.StarExpr{
- Star: expr.Star,
- X: cloneExpr(expr.X, info, clonedInfo),
- }
- case *ast.StructType:
- return &ast.StructType{
- Struct: expr.Struct,
- Fields: cloneFieldList(expr.Fields, info, clonedInfo),
- Incomplete: expr.Incomplete,
- }
- default:
- return expr
- }
-}
-
-func cloneFieldList(fl *ast.FieldList, info *types.Info, clonedInfo map[token.Pos]*types.PkgName) *ast.FieldList {
- if fl == nil {
- return nil
- }
- if fl.List == nil {
- return &ast.FieldList{
- Closing: fl.Closing,
- Opening: fl.Opening,
- }
- }
- list := make([]*ast.Field, 0, len(fl.List))
- for _, f := range fl.List {
- var names []*ast.Ident
- for _, n := range f.Names {
- names = append(names, cloneIdent(n))
- }
- list = append(list, &ast.Field{
- Comment: f.Comment,
- Doc: f.Doc,
- Names: names,
- Tag: f.Tag,
- Type: cloneExpr(f.Type, info, clonedInfo),
- })
- }
- return &ast.FieldList{
- Closing: fl.Closing,
- Opening: fl.Opening,
- List: list,
- }
-}
-
-func cloneIdent(ident *ast.Ident) *ast.Ident {
- return &ast.Ident{
- NamePos: ident.NamePos,
- Name: ident.Name,
- Obj: ident.Obj,
- }
-}
diff --git a/internal/lsp/source/util.go b/internal/lsp/source/util.go
deleted file mode 100644
index 71892eaa1..000000000
--- a/internal/lsp/source/util.go
+++ /dev/null
@@ -1,586 +0,0 @@
-// Copyright 2019 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package source
-
-import (
- "context"
- "fmt"
- "go/ast"
- "go/printer"
- "go/token"
- "go/types"
- "path/filepath"
- "regexp"
- "sort"
- "strconv"
- "strings"
-
- "golang.org/x/mod/modfile"
- "golang.org/x/tools/internal/lsp/protocol"
- "golang.org/x/tools/internal/span"
- errors "golang.org/x/xerrors"
-)
-
-// MappedRange provides mapped protocol.Range for a span.Range, accounting for
-// UTF-16 code points.
-type MappedRange struct {
- spanRange span.Range
- m *protocol.ColumnMapper
-
- // protocolRange is the result of converting the spanRange using the mapper.
- // It is computed on-demand.
- protocolRange *protocol.Range
-}
-
-// NewMappedRange returns a MappedRange for the given start and end token.Pos.
-func NewMappedRange(fset *token.FileSet, m *protocol.ColumnMapper, start, end token.Pos) MappedRange {
- return MappedRange{
- spanRange: span.Range{
- FileSet: fset,
- Start: start,
- End: end,
- Converter: m.Converter,
- },
- m: m,
- }
-}
-
-func (s MappedRange) Range() (protocol.Range, error) {
- if s.protocolRange == nil {
- spn, err := s.spanRange.Span()
- if err != nil {
- return protocol.Range{}, err
- }
- prng, err := s.m.Range(spn)
- if err != nil {
- return protocol.Range{}, err
- }
- s.protocolRange = &prng
- }
- return *s.protocolRange, nil
-}
-
-func (s MappedRange) Span() (span.Span, error) {
- return s.spanRange.Span()
-}
-
-func (s MappedRange) SpanRange() span.Range {
- return s.spanRange
-}
-
-func (s MappedRange) URI() span.URI {
- return s.m.URI
-}
-
-// GetParsedFile is a convenience function that extracts the Package and
-// ParsedGoFile for a file in a Snapshot. pkgPolicy is one of NarrowestPackage/
-// WidestPackage.
-func GetParsedFile(ctx context.Context, snapshot Snapshot, fh FileHandle, pkgPolicy PackageFilter) (Package, *ParsedGoFile, error) {
- pkg, err := snapshot.PackageForFile(ctx, fh.URI(), TypecheckWorkspace, pkgPolicy)
- if err != nil {
- return nil, nil, err
- }
- pgh, err := pkg.File(fh.URI())
- return pkg, pgh, err
-}
-
-func IsGenerated(ctx context.Context, snapshot Snapshot, uri span.URI) bool {
- fh, err := snapshot.GetFile(ctx, uri)
- if err != nil {
- return false
- }
- pgf, err := snapshot.ParseGo(ctx, fh, ParseHeader)
- if err != nil {
- return false
- }
- tok := snapshot.FileSet().File(pgf.File.Pos())
- if tok == nil {
- return false
- }
- for _, commentGroup := range pgf.File.Comments {
- for _, comment := range commentGroup.List {
- if matched := generatedRx.MatchString(comment.Text); matched {
- // Check if comment is at the beginning of the line in source.
- if pos := tok.Position(comment.Slash); pos.Column == 1 {
- return true
- }
- }
- }
- }
- return false
-}
-
-func nodeToProtocolRange(snapshot Snapshot, pkg Package, n ast.Node) (protocol.Range, error) {
- mrng, err := posToMappedRange(snapshot, pkg, n.Pos(), n.End())
- if err != nil {
- return protocol.Range{}, err
- }
- return mrng.Range()
-}
-
-func objToMappedRange(snapshot Snapshot, pkg Package, obj types.Object) (MappedRange, error) {
- if pkgName, ok := obj.(*types.PkgName); ok {
- // An imported Go package has a package-local, unqualified name.
- // When the name matches the imported package name, there is no
- // identifier in the import spec with the local package name.
- //
- // For example:
- // import "go/ast" // name "ast" matches package name
- // import a "go/ast" // name "a" does not match package name
- //
- // When the identifier does not appear in the source, have the range
- // of the object be the import path, including quotes.
- if pkgName.Imported().Name() == pkgName.Name() {
- return posToMappedRange(snapshot, pkg, obj.Pos(), obj.Pos()+token.Pos(len(pkgName.Imported().Path())+2))
- }
- }
- return nameToMappedRange(snapshot, pkg, obj.Pos(), obj.Name())
-}
-
-func nameToMappedRange(snapshot Snapshot, pkg Package, pos token.Pos, name string) (MappedRange, error) {
- return posToMappedRange(snapshot, pkg, pos, pos+token.Pos(len(name)))
-}
-
-func posToMappedRange(snapshot Snapshot, pkg Package, pos, end token.Pos) (MappedRange, error) {
- logicalFilename := snapshot.FileSet().File(pos).Position(pos).Filename
- pgf, _, err := findFileInDeps(pkg, span.URIFromPath(logicalFilename))
- if err != nil {
- return MappedRange{}, err
- }
- if !pos.IsValid() {
- return MappedRange{}, errors.Errorf("invalid position for %v", pos)
- }
- if !end.IsValid() {
- return MappedRange{}, errors.Errorf("invalid position for %v", end)
- }
- return NewMappedRange(snapshot.FileSet(), pgf.Mapper, pos, end), nil
-}
-
-// Matches cgo generated comment as well as the proposed standard:
-// https://golang.org/s/generatedcode
-var generatedRx = regexp.MustCompile(`// .*DO NOT EDIT\.?`)
-
-// FileKindForLang returns the file kind associated with the given language ID,
-// or UnknownKind if the language ID is not recognized.
-func FileKindForLang(langID string) FileKind {
- switch langID {
- case "go":
- return Go
- case "go.mod":
- return Mod
- case "go.sum":
- return Sum
- case "tmpl", "gotmpl":
- return Tmpl
- case "go.work":
- return Work
- default:
- return UnknownKind
- }
-}
-
-func (k FileKind) String() string {
- switch k {
- case Go:
- return "go"
- case Mod:
- return "go.mod"
- case Sum:
- return "go.sum"
- case Tmpl:
- return "tmpl"
- case Work:
- return "go.work"
- default:
- return fmt.Sprintf("unk%d", k)
- }
-}
-
-// nodeAtPos returns the index and the node whose position is contained inside
-// the node list.
-func nodeAtPos(nodes []ast.Node, pos token.Pos) (ast.Node, int) {
- if nodes == nil {
- return nil, -1
- }
- for i, node := range nodes {
- if node.Pos() <= pos && pos <= node.End() {
- return node, i
- }
- }
- return nil, -1
-}
-
-// IsInterface returns if a types.Type is an interface
-func IsInterface(T types.Type) bool {
- return T != nil && types.IsInterface(T)
-}
-
-// FormatNode returns the "pretty-print" output for an ast node.
-func FormatNode(fset *token.FileSet, n ast.Node) string {
- var buf strings.Builder
- if err := printer.Fprint(&buf, fset, n); err != nil {
- return ""
- }
- return buf.String()
-}
-
-// Deref returns a pointer's element type, traversing as many levels as needed.
-// Otherwise it returns typ.
-//
-// It can return a pointer type for cyclic types (see golang/go#45510).
-func Deref(typ types.Type) types.Type {
- var seen map[types.Type]struct{}
- for {
- p, ok := typ.Underlying().(*types.Pointer)
- if !ok {
- return typ
- }
- if _, ok := seen[p.Elem()]; ok {
- return typ
- }
-
- typ = p.Elem()
-
- if seen == nil {
- seen = make(map[types.Type]struct{})
- }
- seen[typ] = struct{}{}
- }
-}
-
-func SortDiagnostics(d []*Diagnostic) {
- sort.Slice(d, func(i int, j int) bool {
- return CompareDiagnostic(d[i], d[j]) < 0
- })
-}
-
-func CompareDiagnostic(a, b *Diagnostic) int {
- if r := protocol.CompareRange(a.Range, b.Range); r != 0 {
- return r
- }
- if a.Source < b.Source {
- return -1
- }
- if a.Message < b.Message {
- return -1
- }
- if a.Message == b.Message {
- return 0
- }
- return 1
-}
-
-// FindPackageFromPos finds the first package containing pos in its
-// type-checked AST.
-func FindPackageFromPos(ctx context.Context, snapshot Snapshot, pos token.Pos) (Package, error) {
- tok := snapshot.FileSet().File(pos)
- if tok == nil {
- return nil, errors.Errorf("no file for pos %v", pos)
- }
- uri := span.URIFromPath(tok.Name())
- pkgs, err := snapshot.PackagesForFile(ctx, uri, TypecheckAll, true)
- if err != nil {
- return nil, err
- }
- // Only return the package if it actually type-checked the given position.
- for _, pkg := range pkgs {
- parsed, err := pkg.File(uri)
- if err != nil {
- return nil, err
- }
- if parsed == nil {
- continue
- }
- if parsed.Tok.Base() != tok.Base() {
- continue
- }
- return pkg, nil
- }
- return nil, errors.Errorf("no package for given file position")
-}
-
-// findFileInDeps finds uri in pkg or its dependencies.
-func findFileInDeps(pkg Package, uri span.URI) (*ParsedGoFile, Package, error) {
- queue := []Package{pkg}
- seen := make(map[string]bool)
-
- for len(queue) > 0 {
- pkg := queue[0]
- queue = queue[1:]
- seen[pkg.ID()] = true
-
- if pgf, err := pkg.File(uri); err == nil {
- return pgf, pkg, nil
- }
- for _, dep := range pkg.Imports() {
- if !seen[dep.ID()] {
- queue = append(queue, dep)
- }
- }
- }
- return nil, nil, errors.Errorf("no file for %s in package %s", uri, pkg.ID())
-}
-
-// ImportPath returns the unquoted import path of s,
-// or "" if the path is not properly quoted.
-func ImportPath(s *ast.ImportSpec) string {
- t, err := strconv.Unquote(s.Path.Value)
- if err != nil {
- return ""
- }
- return t
-}
-
-// NodeContains returns true if a node encloses a given position pos.
-func NodeContains(n ast.Node, pos token.Pos) bool {
- return n != nil && n.Pos() <= pos && pos <= n.End()
-}
-
-// CollectScopes returns all scopes in an ast path, ordered as innermost scope
-// first.
-func CollectScopes(info *types.Info, path []ast.Node, pos token.Pos) []*types.Scope {
- // scopes[i], where i<len(path), is the possibly nil Scope of path[i].
- var scopes []*types.Scope
- for _, n := range path {
- // Include *FuncType scope if pos is inside the function body.
- switch node := n.(type) {
- case *ast.FuncDecl:
- if node.Body != nil && NodeContains(node.Body, pos) {
- n = node.Type
- }
- case *ast.FuncLit:
- if node.Body != nil && NodeContains(node.Body, pos) {
- n = node.Type
- }
- }
- scopes = append(scopes, info.Scopes[n])
- }
- return scopes
-}
-
-// Qualifier returns a function that appropriately formats a types.PkgName
-// appearing in a *ast.File.
-func Qualifier(f *ast.File, pkg *types.Package, info *types.Info) types.Qualifier {
- // Construct mapping of import paths to their defined or implicit names.
- imports := make(map[*types.Package]string)
- for _, imp := range f.Imports {
- var obj types.Object
- if imp.Name != nil {
- obj = info.Defs[imp.Name]
- } else {
- obj = info.Implicits[imp]
- }
- if pkgname, ok := obj.(*types.PkgName); ok {
- imports[pkgname.Imported()] = pkgname.Name()
- }
- }
- // Define qualifier to replace full package paths with names of the imports.
- return func(p *types.Package) string {
- if p == pkg {
- return ""
- }
- if name, ok := imports[p]; ok {
- if name == "." {
- return ""
- }
- return name
- }
- return p.Name()
- }
-}
-
-// isDirective reports whether c is a comment directive.
-//
-// Copied and adapted from go/src/go/ast/ast.go.
-func isDirective(c string) bool {
- if len(c) < 3 {
- return false
- }
- if c[1] != '/' {
- return false
- }
- //-style comment (no newline at the end)
- c = c[2:]
- if len(c) == 0 {
- // empty line
- return false
- }
- // "//line " is a line directive.
- // (The // has been removed.)
- if strings.HasPrefix(c, "line ") {
- return true
- }
-
- // "//[a-z0-9]+:[a-z0-9]"
- // (The // has been removed.)
- colon := strings.Index(c, ":")
- if colon <= 0 || colon+1 >= len(c) {
- return false
- }
- for i := 0; i <= colon+1; i++ {
- if i == colon {
- continue
- }
- b := c[i]
- if !('a' <= b && b <= 'z' || '0' <= b && b <= '9') {
- return false
- }
- }
- return true
-}
-
-// honorSymlinks toggles whether or not we consider symlinks when comparing
-// file or directory URIs.
-const honorSymlinks = false
-
-func CompareURI(left, right span.URI) int {
- if honorSymlinks {
- return span.CompareURI(left, right)
- }
- if left == right {
- return 0
- }
- if left < right {
- return -1
- }
- return 1
-}
-
-// InDir checks whether path is in the file tree rooted at dir.
-// InDir makes some effort to succeed even in the presence of symbolic links.
-//
-// Copied and slightly adjusted from go/src/cmd/go/internal/search/search.go.
-func InDir(dir, path string) bool {
- if inDirLex(dir, path) {
- return true
- }
- if !honorSymlinks {
- return false
- }
- xpath, err := filepath.EvalSymlinks(path)
- if err != nil || xpath == path {
- xpath = ""
- } else {
- if inDirLex(dir, xpath) {
- return true
- }
- }
-
- xdir, err := filepath.EvalSymlinks(dir)
- if err == nil && xdir != dir {
- if inDirLex(xdir, path) {
- return true
- }
- if xpath != "" {
- if inDirLex(xdir, xpath) {
- return true
- }
- }
- }
- return false
-}
-
-// inDirLex is like inDir but only checks the lexical form of the file names.
-// It does not consider symbolic links.
-//
-// Copied from go/src/cmd/go/internal/search/search.go.
-func inDirLex(dir, path string) bool {
- pv := strings.ToUpper(filepath.VolumeName(path))
- dv := strings.ToUpper(filepath.VolumeName(dir))
- path = path[len(pv):]
- dir = dir[len(dv):]
- switch {
- default:
- return false
- case pv != dv:
- return false
- case len(path) == len(dir):
- if path == dir {
- return true
- }
- return false
- case dir == "":
- return path != ""
- case len(path) > len(dir):
- if dir[len(dir)-1] == filepath.Separator {
- if path[:len(dir)] == dir {
- return path[len(dir):] != ""
- }
- return false
- }
- if path[len(dir)] == filepath.Separator && path[:len(dir)] == dir {
- if len(path) == len(dir)+1 {
- return true
- }
- return path[len(dir)+1:] != ""
- }
- return false
- }
-}
-
-// IsValidImport returns whether importPkgPath is importable
-// by pkgPath
-func IsValidImport(pkgPath, importPkgPath string) bool {
- i := strings.LastIndex(string(importPkgPath), "/internal/")
- if i == -1 {
- return true
- }
- if IsCommandLineArguments(string(pkgPath)) {
- return true
- }
- return strings.HasPrefix(string(pkgPath), string(importPkgPath[:i]))
-}
-
-// IsCommandLineArguments reports whether a given value denotes
-// "command-line-arguments" package, which is a package with an unknown ID
-// created by the go command. It can have a test variant, which is why callers
-// should not check that a value equals "command-line-arguments" directly.
-func IsCommandLineArguments(s string) bool {
- return strings.Contains(s, "command-line-arguments")
-}
-
-// Offset returns tok.Offset(pos), but first checks that the pos is in range
-// for the given file.
-func Offset(tok *token.File, pos token.Pos) (int, error) {
- if !InRange(tok, pos) {
- return -1, fmt.Errorf("pos %v is not in range for file [%v:%v)", pos, tok.Base(), tok.Base()+tok.Size())
- }
- return tok.Offset(pos), nil
-}
-
-// Pos returns tok.Pos(offset), but first checks that the offset is valid for
-// the given file.
-func Pos(tok *token.File, offset int) (token.Pos, error) {
- if offset < 0 || offset > tok.Size() {
- return token.NoPos, fmt.Errorf("offset %v is not in range for file of size %v", offset, tok.Size())
- }
- return tok.Pos(offset), nil
-}
-
-// InRange reports whether the given position is in the given token.File.
-func InRange(tok *token.File, pos token.Pos) bool {
- size := tok.Pos(tok.Size())
- return int(pos) >= tok.Base() && pos <= size
-}
-
-// LineToRange creates a Range spanning start and end.
-func LineToRange(m *protocol.ColumnMapper, uri span.URI, start, end modfile.Position) (protocol.Range, error) {
- return ByteOffsetsToRange(m, uri, start.Byte, end.Byte)
-}
-
-// ByteOffsetsToRange creates a range spanning start and end.
-func ByteOffsetsToRange(m *protocol.ColumnMapper, uri span.URI, start, end int) (protocol.Range, error) {
- line, col, err := m.Converter.ToPosition(start)
- if err != nil {
- return protocol.Range{}, err
- }
- s := span.NewPoint(line, col, start)
- line, col, err = m.Converter.ToPosition(end)
- if err != nil {
- return protocol.Range{}, err
- }
- e := span.NewPoint(line, col, end)
- return m.Range(span.New(uri, s, e))
-}
diff --git a/internal/lsp/source/view.go b/internal/lsp/source/view.go
deleted file mode 100644
index 4d7d411e0..000000000
--- a/internal/lsp/source/view.go
+++ /dev/null
@@ -1,696 +0,0 @@
-// Copyright 2018 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package source
-
-import (
- "bytes"
- "context"
- "fmt"
- "go/ast"
- "go/scanner"
- "go/token"
- "go/types"
- "io"
- "strings"
-
- "golang.org/x/mod/modfile"
- "golang.org/x/mod/module"
- "golang.org/x/tools/go/analysis"
- "golang.org/x/tools/go/packages"
- "golang.org/x/tools/internal/gocommand"
- "golang.org/x/tools/internal/imports"
- "golang.org/x/tools/internal/lsp/progress"
- "golang.org/x/tools/internal/lsp/protocol"
- "golang.org/x/tools/internal/span"
- errors "golang.org/x/xerrors"
-)
-
-// Snapshot represents the current state for the given view.
-type Snapshot interface {
- ID() uint64
-
- // View returns the View associated with this snapshot.
- View() View
-
- // BackgroundContext returns a context used for all background processing
- // on behalf of this snapshot.
- BackgroundContext() context.Context
-
- // Fileset returns the Fileset used to parse all the Go files in this snapshot.
- FileSet() *token.FileSet
-
- // ValidBuildConfiguration returns true if there is some error in the
- // user's workspace. In particular, if they are both outside of a module
- // and their GOPATH.
- ValidBuildConfiguration() bool
-
- // WriteEnv writes the view-specific environment to the io.Writer.
- WriteEnv(ctx context.Context, w io.Writer) error
-
- // FindFile returns the FileHandle for the given URI, if it is already
- // in the given snapshot.
- FindFile(uri span.URI) VersionedFileHandle
-
- // GetVersionedFile returns the VersionedFileHandle for a given URI,
- // initializing it if it is not already part of the snapshot.
- GetVersionedFile(ctx context.Context, uri span.URI) (VersionedFileHandle, error)
-
- // GetFile returns the FileHandle for a given URI, initializing it if it is
- // not already part of the snapshot.
- GetFile(ctx context.Context, uri span.URI) (FileHandle, error)
-
- // AwaitInitialized waits until the snapshot's view is initialized.
- AwaitInitialized(ctx context.Context)
-
- // IsOpen returns whether the editor currently has a file open.
- IsOpen(uri span.URI) bool
-
- // IgnoredFile reports if a file would be ignored by a `go list` of the whole
- // workspace.
- IgnoredFile(uri span.URI) bool
-
- // Templates returns the .tmpl files
- Templates() map[span.URI]VersionedFileHandle
-
- // ParseGo returns the parsed AST for the file.
- // If the file is not available, returns nil and an error.
- ParseGo(ctx context.Context, fh FileHandle, mode ParseMode) (*ParsedGoFile, error)
-
- // PosToField is a cache of *ast.Fields by token.Pos. This allows us
- // to quickly find corresponding *ast.Field node given a *types.Var.
- // We must refer to the AST to render type aliases properly when
- // formatting signatures and other types.
- PosToField(ctx context.Context, pkg Package, pos token.Pos) (*ast.Field, error)
-
- // PosToDecl maps certain objects' positions to their surrounding
- // ast.Decl. This mapping is used when building the documentation
- // string for the objects.
- PosToDecl(ctx context.Context, pkg Package, pos token.Pos) (ast.Decl, error)
-
- // DiagnosePackage returns basic diagnostics, including list, parse, and type errors
- // for pkg, grouped by file.
- DiagnosePackage(ctx context.Context, pkg Package) (map[span.URI][]*Diagnostic, error)
-
- // Analyze runs the analyses for the given package at this snapshot.
- Analyze(ctx context.Context, pkgID string, analyzers []*Analyzer) ([]*Diagnostic, error)
-
- // RunGoCommandPiped runs the given `go` command, writing its output
- // to stdout and stderr. Verb, Args, and WorkingDir must be specified.
- RunGoCommandPiped(ctx context.Context, mode InvocationFlags, inv *gocommand.Invocation, stdout, stderr io.Writer) error
-
- // RunGoCommandDirect runs the given `go` command. Verb, Args, and
- // WorkingDir must be specified.
- RunGoCommandDirect(ctx context.Context, mode InvocationFlags, inv *gocommand.Invocation) (*bytes.Buffer, error)
-
- // RunGoCommands runs a series of `go` commands that updates the go.mod
- // and go.sum file for wd, and returns their updated contents.
- RunGoCommands(ctx context.Context, allowNetwork bool, wd string, run func(invoke func(...string) (*bytes.Buffer, error)) error) (bool, []byte, []byte, error)
-
- // RunProcessEnvFunc runs fn with the process env for this snapshot's view.
- // Note: the process env contains cached module and filesystem state.
- RunProcessEnvFunc(ctx context.Context, fn func(*imports.Options) error) error
-
- // ModFiles are the go.mod files enclosed in the snapshot's view and known
- // to the snapshot.
- ModFiles() []span.URI
-
- // ParseMod is used to parse go.mod files.
- ParseMod(ctx context.Context, fh FileHandle) (*ParsedModule, error)
-
- // ModWhy returns the results of `go mod why` for the module specified by
- // the given go.mod file.
- ModWhy(ctx context.Context, fh FileHandle) (map[string]string, error)
-
- // ModTidy returns the results of `go mod tidy` for the module specified by
- // the given go.mod file.
- ModTidy(ctx context.Context, pm *ParsedModule) (*TidiedModule, error)
-
- // GoModForFile returns the URI of the go.mod file for the given URI.
- GoModForFile(uri span.URI) span.URI
-
- // WorkFile, if non-empty, is the go.work file for the workspace.
- WorkFile() span.URI
-
- // ParseWork is used to parse go.work files.
- ParseWork(ctx context.Context, fh FileHandle) (*ParsedWorkFile, error)
-
- // BuiltinFile returns information about the special builtin package.
- BuiltinFile(ctx context.Context) (*ParsedGoFile, error)
-
- // IsBuiltin reports whether uri is part of the builtin package.
- IsBuiltin(ctx context.Context, uri span.URI) bool
-
- // PackagesForFile returns the packages that this file belongs to, checked
- // in mode.
- PackagesForFile(ctx context.Context, uri span.URI, mode TypecheckMode, includeTestVariants bool) ([]Package, error)
-
- // PackageForFile returns a single package that this file belongs to,
- // checked in mode and filtered by the package policy.
- PackageForFile(ctx context.Context, uri span.URI, mode TypecheckMode, selectPackage PackageFilter) (Package, error)
-
- // GetActiveReverseDeps returns the active files belonging to the reverse
- // dependencies of this file's package, checked in TypecheckWorkspace mode.
- GetReverseDependencies(ctx context.Context, id string) ([]Package, error)
-
- // CachedImportPaths returns all the imported packages loaded in this
- // snapshot, indexed by their import path and checked in TypecheckWorkspace
- // mode.
- CachedImportPaths(ctx context.Context) (map[string]Package, error)
-
- // KnownPackages returns all the packages loaded in this snapshot, checked
- // in TypecheckWorkspace mode.
- KnownPackages(ctx context.Context) ([]Package, error)
-
- // ActivePackages returns the packages considered 'active' in the workspace.
- //
- // In normal memory mode, this is all workspace packages. In degraded memory
- // mode, this is just the reverse transitive closure of open packages.
- ActivePackages(ctx context.Context) ([]Package, error)
-
- // Symbols returns all symbols in the snapshot.
- Symbols(ctx context.Context) (map[span.URI][]Symbol, error)
-
- // Metadata returns package metadata associated with the given file URI.
- MetadataForFile(ctx context.Context, uri span.URI) ([]Metadata, error)
-
- // GetCriticalError returns any critical errors in the workspace.
- GetCriticalError(ctx context.Context) *CriticalError
-
- // BuildGoplsMod generates a go.mod file for all modules in the workspace.
- // It bypasses any existing gopls.mod.
- BuildGoplsMod(ctx context.Context) (*modfile.File, error)
-}
-
-// PackageFilter sets how a package is filtered out from a set of packages
-// containing a given file.
-type PackageFilter int
-
-const (
- // NarrowestPackage picks the "narrowest" package for a given file.
- // By "narrowest" package, we mean the package with the fewest number of
- // files that includes the given file. This solves the problem of test
- // variants, as the test will have more files than the non-test package.
- NarrowestPackage PackageFilter = iota
-
- // WidestPackage returns the Package containing the most files.
- // This is useful for something like diagnostics, where we'd prefer to
- // offer diagnostics for as many files as possible.
- WidestPackage
-)
-
-// InvocationFlags represents the settings of a particular go command invocation.
-// It is a mode, plus a set of flag bits.
-type InvocationFlags int
-
-const (
- // Normal is appropriate for commands that might be run by a user and don't
- // deliberately modify go.mod files, e.g. `go test`.
- Normal InvocationFlags = iota
- // WriteTemporaryModFile is for commands that need information from a
- // modified version of the user's go.mod file, e.g. `go mod tidy` used to
- // generate diagnostics.
- WriteTemporaryModFile
- // LoadWorkspace is for packages.Load, and other operations that should
- // consider the whole workspace at once.
- LoadWorkspace
-
- // AllowNetwork is a flag bit that indicates the invocation should be
- // allowed to access the network.
- AllowNetwork InvocationFlags = 1 << 10
-)
-
-func (m InvocationFlags) Mode() InvocationFlags {
- return m & (AllowNetwork - 1)
-}
-
-func (m InvocationFlags) AllowNetwork() bool {
- return m&AllowNetwork != 0
-}
-
-// View represents a single workspace.
-// This is the level at which we maintain configuration like working directory
-// and build tags.
-type View interface {
- // Name returns the name this view was constructed with.
- Name() string
-
- // Folder returns the folder with which this view was created.
- Folder() span.URI
-
- // Shutdown closes this view, and detaches it from its session.
- Shutdown(ctx context.Context)
-
- // Options returns a copy of the Options for this view.
- Options() *Options
-
- // SetOptions sets the options of this view to new values.
- // Calling this may cause the view to be invalidated and a replacement view
- // added to the session. If so the new view will be returned, otherwise the
- // original one will be.
- SetOptions(context.Context, *Options) (View, error)
-
- // Snapshot returns the current snapshot for the view.
- Snapshot(ctx context.Context) (Snapshot, func())
-
- // Rebuild rebuilds the current view, replacing the original view in its session.
- Rebuild(ctx context.Context) (Snapshot, func(), error)
-
- // IsGoPrivatePath reports whether target is a private import path, as identified
- // by the GOPRIVATE environment variable.
- IsGoPrivatePath(path string) bool
-
- // ModuleUpgrades returns known module upgrades.
- ModuleUpgrades() map[string]string
-
- // RegisterModuleUpgrades registers that upgrades exist for the given modules.
- RegisterModuleUpgrades(upgrades map[string]string)
-
- // FileKind returns the type of a file
- FileKind(FileHandle) FileKind
-}
-
-// A FileSource maps uris to FileHandles. This abstraction exists both for
-// testability, and so that algorithms can be run equally on session and
-// snapshot files.
-type FileSource interface {
- // GetFile returns the FileHandle for a given URI.
- GetFile(ctx context.Context, uri span.URI) (FileHandle, error)
-}
-
-// A ParsedGoFile contains the results of parsing a Go file.
-type ParsedGoFile struct {
- URI span.URI
- Mode ParseMode
- File *ast.File
- Tok *token.File
- // Source code used to build the AST. It may be different from the
- // actual content of the file if we have fixed the AST.
- Src []byte
- Mapper *protocol.ColumnMapper
- ParseErr scanner.ErrorList
-}
-
-// A ParsedModule contains the results of parsing a go.mod file.
-type ParsedModule struct {
- URI span.URI
- File *modfile.File
- Mapper *protocol.ColumnMapper
- ParseErrors []*Diagnostic
-}
-
-// A ParsedWorkFile contains the results of parsing a go.work file.
-type ParsedWorkFile struct {
- URI span.URI
- File *modfile.WorkFile
- Mapper *protocol.ColumnMapper
- ParseErrors []*Diagnostic
-}
-
-// A TidiedModule contains the results of running `go mod tidy` on a module.
-type TidiedModule struct {
- // Diagnostics representing changes made by `go mod tidy`.
- Diagnostics []*Diagnostic
- // The bytes of the go.mod file after it was tidied.
- TidiedContent []byte
-}
-
-// Metadata represents package metadata retrieved from go/packages.
-type Metadata interface {
- // PackageName is the package name.
- PackageName() string
-
- // PackagePath is the package path.
- PackagePath() string
-
- // ModuleInfo returns the go/packages module information for the given package.
- ModuleInfo() *packages.Module
-}
-
-// Session represents a single connection from a client.
-// This is the level at which things like open files are maintained on behalf
-// of the client.
-// A session may have many active views at any given time.
-type Session interface {
- // ID returns the unique identifier for this session on this server.
- ID() string
- // NewView creates a new View, returning it and its first snapshot. If a
- // non-empty tempWorkspace directory is provided, the View will record a copy
- // of its gopls workspace module in that directory, so that client tooling
- // can execute in the same main module.
- NewView(ctx context.Context, name string, folder span.URI, options *Options) (View, Snapshot, func(), error)
-
- // Cache returns the cache that created this session, for debugging only.
- Cache() interface{}
-
- // View returns a view with a matching name, if the session has one.
- View(name string) View
-
- // ViewOf returns a view corresponding to the given URI.
- ViewOf(uri span.URI) (View, error)
-
- // Views returns the set of active views built by this session.
- Views() []View
-
- // Shutdown the session and all views it has created.
- Shutdown(ctx context.Context)
-
- // GetFile returns a handle for the specified file.
- GetFile(ctx context.Context, uri span.URI) (FileHandle, error)
-
- // DidModifyFile reports a file modification to the session. It returns
- // the new snapshots after the modifications have been applied, paired with
- // the affected file URIs for those snapshots.
- DidModifyFiles(ctx context.Context, changes []FileModification) (map[Snapshot][]span.URI, []func(), error)
-
- // ExpandModificationsToDirectories returns the set of changes with the
- // directory changes removed and expanded to include all of the files in
- // the directory.
- ExpandModificationsToDirectories(ctx context.Context, changes []FileModification) []FileModification
-
- // Overlays returns a slice of file overlays for the session.
- Overlays() []Overlay
-
- // Options returns a copy of the SessionOptions for this session.
- Options() *Options
-
- // SetOptions sets the options of this session to new values.
- SetOptions(*Options)
-
- // FileWatchingGlobPatterns returns glob patterns to watch every directory
- // known by the view. For views within a module, this is the module root,
- // any directory in the module root, and any replace targets.
- FileWatchingGlobPatterns(ctx context.Context) map[string]struct{}
-
- // SetProgressTracker sets the progress tracker for the session.
- SetProgressTracker(tracker *progress.Tracker)
-}
-
-var ErrViewExists = errors.New("view already exists for session")
-
-// Overlay is the type for a file held in memory on a session.
-type Overlay interface {
- Kind() FileKind
- VersionedFileHandle
-}
-
-// FileModification represents a modification to a file.
-type FileModification struct {
- URI span.URI
- Action FileAction
-
- // OnDisk is true if a watched file is changed on disk.
- // If true, Version will be -1 and Text will be nil.
- OnDisk bool
-
- // Version will be -1 and Text will be nil when they are not supplied,
- // specifically on textDocument/didClose and for on-disk changes.
- Version int32
- Text []byte
-
- // LanguageID is only sent from the language client on textDocument/didOpen.
- LanguageID string
-}
-
-type FileAction int
-
-const (
- UnknownFileAction = FileAction(iota)
- Open
- Change
- Close
- Save
- Create
- Delete
- InvalidateMetadata
-)
-
-func (a FileAction) String() string {
- switch a {
- case Open:
- return "Open"
- case Change:
- return "Change"
- case Close:
- return "Close"
- case Save:
- return "Save"
- case Create:
- return "Create"
- case Delete:
- return "Delete"
- case InvalidateMetadata:
- return "InvalidateMetadata"
- default:
- return "Unknown"
- }
-}
-
-var ErrTmpModfileUnsupported = errors.New("-modfile is unsupported for this Go version")
-var ErrNoModOnDisk = errors.New("go.mod file is not on disk")
-
-func IsNonFatalGoModError(err error) bool {
- return err == ErrTmpModfileUnsupported || err == ErrNoModOnDisk
-}
-
-// ParseMode controls the content of the AST produced when parsing a source file.
-type ParseMode int
-
-const (
- // ParseHeader specifies that the main package declaration and imports are needed.
- // This is the mode used when attempting to examine the package graph structure.
- ParseHeader ParseMode = iota
-
- // ParseExported specifies that the package is used only as a dependency,
- // and only its exported declarations are needed. More may be included if
- // necessary to avoid type errors.
- ParseExported
-
- // ParseFull specifies the full AST is needed.
- // This is used for files of direct interest where the entire contents must
- // be considered.
- ParseFull
-)
-
-// TypecheckMode controls what kind of parsing should be done (see ParseMode)
-// while type checking a package.
-type TypecheckMode int
-
-const (
- // Invalid default value.
- TypecheckUnknown TypecheckMode = iota
- // TypecheckFull means to use ParseFull.
- TypecheckFull
- // TypecheckWorkspace means to use ParseFull for workspace packages, and
- // ParseExported for others.
- TypecheckWorkspace
- // TypecheckAll means ParseFull for workspace packages, and both Full and
- // Exported for others. Only valid for some functions.
- TypecheckAll
-)
-
-type VersionedFileHandle interface {
- FileHandle
- Version() int32
- Session() string
-
- // LSPIdentity returns the version identity of a file.
- VersionedFileIdentity() VersionedFileIdentity
-}
-
-type VersionedFileIdentity struct {
- URI span.URI
-
- // SessionID is the ID of the LSP session.
- SessionID string
-
- // Version is the version of the file, as specified by the client. It should
- // only be set in combination with SessionID.
- Version int32
-}
-
-// FileHandle represents a handle to a specific version of a single file.
-type FileHandle interface {
- URI() span.URI
-
- // FileIdentity returns a FileIdentity for the file, even if there was an
- // error reading it.
- FileIdentity() FileIdentity
- // Read reads the contents of a file.
- // If the file is not available, returns a nil slice and an error.
- Read() ([]byte, error)
- // Saved reports whether the file has the same content on disk.
- Saved() bool
-}
-
-// FileIdentity uniquely identifies a file at a version from a FileSystem.
-type FileIdentity struct {
- URI span.URI
-
- // Identifier represents a unique identifier for the file's content.
- Hash string
-}
-
-func (id FileIdentity) String() string {
- return fmt.Sprintf("%s%s", id.URI, id.Hash)
-}
-
-// FileKind describes the kind of the file in question.
-// It can be one of Go,mod, Sum, or Tmpl.
-type FileKind int
-
-const (
- // UnknownKind is a file type we don't know about.
- UnknownKind = FileKind(iota)
-
- // Go is a normal go source file.
- Go
- // Mod is a go.mod file.
- Mod
- // Sum is a go.sum file.
- Sum
- // Tmpl is a template file.
- Tmpl
- // Work is a go.work file.
- Work
-)
-
-// Analyzer represents a go/analysis analyzer with some boolean properties
-// that let the user know how to use the analyzer.
-type Analyzer struct {
- Analyzer *analysis.Analyzer
-
- // Enabled reports whether the analyzer is enabled. This value can be
- // configured per-analysis in user settings. For staticcheck analyzers,
- // the value of the Staticcheck setting overrides this field.
- Enabled bool
-
- // Fix is the name of the suggested fix name used to invoke the suggested
- // fixes for the analyzer. It is non-empty if we expect this analyzer to
- // provide its fix separately from its diagnostics. That is, we should apply
- // the analyzer's suggested fixes through a Command, not a TextEdit.
- Fix string
-
- // ActionKind is the kind of code action this analyzer produces. If
- // unspecified the type defaults to quickfix.
- ActionKind []protocol.CodeActionKind
-
- // Severity is the severity set for diagnostics reported by this
- // analyzer. If left unset it defaults to Warning.
- Severity protocol.DiagnosticSeverity
-}
-
-func (a Analyzer) IsEnabled(view View) bool {
- // Staticcheck analyzers can only be enabled when staticcheck is on.
- if _, ok := view.Options().StaticcheckAnalyzers[a.Analyzer.Name]; ok {
- if !view.Options().Staticcheck {
- return false
- }
- }
- if enabled, ok := view.Options().Analyses[a.Analyzer.Name]; ok {
- return enabled
- }
- return a.Enabled
-}
-
-// Package represents a Go package that has been type-checked. It maintains
-// only the relevant fields of a *go/packages.Package.
-type Package interface {
- ID() string
- Name() string
- PkgPath() string
- CompiledGoFiles() []*ParsedGoFile
- File(uri span.URI) (*ParsedGoFile, error)
- GetSyntax() []*ast.File
- GetTypes() *types.Package
- GetTypesInfo() *types.Info
- GetTypesSizes() types.Sizes
- IsIllTyped() bool
- ForTest() string
- GetImport(pkgPath string) (Package, error)
- MissingDependencies() []string
- Imports() []Package
- Version() *module.Version
- HasListOrParseErrors() bool
- HasTypeErrors() bool
- ParseMode() ParseMode
-}
-
-type CriticalError struct {
- // MainError is the primary error. Must be non-nil.
- MainError error
- // DiagList contains any supplemental (structured) diagnostics.
- DiagList []*Diagnostic
-}
-
-// An Diagnostic corresponds to an LSP Diagnostic.
-// https://microsoft.github.io/language-server-protocol/specification#diagnostic
-type Diagnostic struct {
- URI span.URI
- Range protocol.Range
- Severity protocol.DiagnosticSeverity
- Code string
- CodeHref string
-
- // Source is a human-readable description of the source of the error.
- // Diagnostics generated by an analysis.Analyzer set it to Analyzer.Name.
- Source DiagnosticSource
-
- Message string
-
- Tags []protocol.DiagnosticTag
- Related []RelatedInformation
-
- // Fields below are used internally to generate quick fixes. They aren't
- // part of the LSP spec and don't leave the server.
- SuggestedFixes []SuggestedFix
- Analyzer *Analyzer
-}
-
-type DiagnosticSource string
-
-const (
- UnknownError DiagnosticSource = "<Unknown source>"
- ListError DiagnosticSource = "go list"
- ParseError DiagnosticSource = "syntax"
- TypeError DiagnosticSource = "compiler"
- ModTidyError DiagnosticSource = "go mod tidy"
- OptimizationDetailsError DiagnosticSource = "optimizer details"
- UpgradeNotification DiagnosticSource = "upgrade available"
- TemplateError DiagnosticSource = "template"
- WorkFileError DiagnosticSource = "go.work file"
-)
-
-func AnalyzerErrorKind(name string) DiagnosticSource {
- return DiagnosticSource(name)
-}
-
-var (
- PackagesLoadError = errors.New("packages.Load error")
-)
-
-// WorkspaceModuleVersion is the nonexistent pseudoversion suffix used in the
-// construction of the workspace module. It is exported so that we can make
-// sure not to show this version to end users in error messages, to avoid
-// confusion.
-// The major version is not included, as that depends on the module path.
-//
-// If workspace module A is dependent on workspace module B, we need our
-// nonexistant version to be greater than the version A mentions.
-// Otherwise, the go command will try to update to that version. Use a very
-// high minor version to make that more likely.
-const workspaceModuleVersion = ".9999999.0-goplsworkspace"
-
-func IsWorkspaceModuleVersion(version string) bool {
- return strings.HasSuffix(version, workspaceModuleVersion)
-}
-
-func WorkspaceModuleVersion(majorVersion string) string {
- // Use the highest compatible major version to avoid unwanted upgrades.
- // See the comment on workspaceModuleVersion.
- if majorVersion == "v0" {
- majorVersion = "v1"
- }
- return majorVersion + workspaceModuleVersion
-}
diff --git a/internal/lsp/source/workspace_symbol.go b/internal/lsp/source/workspace_symbol.go
deleted file mode 100644
index d9257c983..000000000
--- a/internal/lsp/source/workspace_symbol.go
+++ /dev/null
@@ -1,593 +0,0 @@
-// Copyright 2020 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package source
-
-import (
- "context"
- "fmt"
- "go/types"
- "path/filepath"
- "runtime"
- "sort"
- "strings"
- "unicode"
-
- "golang.org/x/tools/internal/event"
- "golang.org/x/tools/internal/lsp/fuzzy"
- "golang.org/x/tools/internal/lsp/protocol"
- "golang.org/x/tools/internal/span"
-)
-
-// Symbol holds a precomputed symbol value. Note: we avoid using the
-// protocol.SymbolInformation struct here in order to reduce the size of each
-// symbol.
-type Symbol struct {
- Name string
- Kind protocol.SymbolKind
- Range protocol.Range
-}
-
-// maxSymbols defines the maximum number of symbol results that should ever be
-// sent in response to a client.
-const maxSymbols = 100
-
-// WorkspaceSymbols matches symbols across all views using the given query,
-// according to the match semantics parameterized by matcherType and style.
-//
-// The workspace symbol method is defined in the spec as follows:
-//
-// The workspace symbol request is sent from the client to the server to
-// list project-wide symbols matching the query string.
-//
-// It is unclear what "project-wide" means here, but given the parameters of
-// workspace/symbol do not include any workspace identifier, then it has to be
-// assumed that "project-wide" means "across all workspaces". Hence why
-// WorkspaceSymbols receives the views []View.
-//
-// However, it then becomes unclear what it would mean to call WorkspaceSymbols
-// with a different configured SymbolMatcher per View. Therefore we assume that
-// Session level configuration will define the SymbolMatcher to be used for the
-// WorkspaceSymbols method.
-func WorkspaceSymbols(ctx context.Context, matcherType SymbolMatcher, style SymbolStyle, views []View, query string) ([]protocol.SymbolInformation, error) {
- ctx, done := event.Start(ctx, "source.WorkspaceSymbols")
- defer done()
- if query == "" {
- return nil, nil
- }
- sc := newSymbolCollector(matcherType, style, query)
- return sc.walk(ctx, views)
-}
-
-// A matcherFunc returns the index and score of a symbol match.
-//
-// See the comment for symbolCollector for more information.
-type matcherFunc func(chunks []string) (int, float64)
-
-// A symbolizer returns the best symbol match for a name with pkg, according to
-// some heuristic. The symbol name is passed as the slice nameParts of logical
-// name pieces. For example, for myType.field the caller can pass either
-// []string{"myType.field"} or []string{"myType.", "field"}.
-//
-// See the comment for symbolCollector for more information.
-type symbolizer func(name string, pkg Metadata, m matcherFunc) ([]string, float64)
-
-func fullyQualifiedSymbolMatch(name string, pkg Metadata, matcher matcherFunc) ([]string, float64) {
- _, score := dynamicSymbolMatch(name, pkg, matcher)
- if score > 0 {
- return []string{pkg.PackagePath(), ".", name}, score
- }
- return nil, 0
-}
-
-func dynamicSymbolMatch(name string, pkg Metadata, matcher matcherFunc) ([]string, float64) {
- var score float64
-
- endsInPkgName := strings.HasSuffix(pkg.PackagePath(), pkg.PackageName())
-
- // If the package path does not end in the package name, we need to check the
- // package-qualified symbol as an extra pass first.
- if !endsInPkgName {
- pkgQualified := []string{pkg.PackageName(), ".", name}
- idx, score := matcher(pkgQualified)
- nameStart := len(pkg.PackageName()) + 1
- if score > 0 {
- // If our match is contained entirely within the unqualified portion,
- // just return that.
- if idx >= nameStart {
- return []string{name}, score
- }
- // Lower the score for matches that include the package name.
- return pkgQualified, score * 0.8
- }
- }
-
- // Now try matching the fully qualified symbol.
- fullyQualified := []string{pkg.PackagePath(), ".", name}
- idx, score := matcher(fullyQualified)
-
- // As above, check if we matched just the unqualified symbol name.
- nameStart := len(pkg.PackagePath()) + 1
- if idx >= nameStart {
- return []string{name}, score
- }
-
- // If our package path ends in the package name, we'll have skipped the
- // initial pass above, so check if we matched just the package-qualified
- // name.
- if endsInPkgName && idx >= 0 {
- pkgStart := len(pkg.PackagePath()) - len(pkg.PackageName())
- if idx >= pkgStart {
- return []string{pkg.PackageName(), ".", name}, score
- }
- }
-
- // Our match was not contained within the unqualified or package qualified
- // symbol. Return the fully qualified symbol but discount the score.
- return fullyQualified, score * 0.6
-}
-
-func packageSymbolMatch(name string, pkg Metadata, matcher matcherFunc) ([]string, float64) {
- qualified := []string{pkg.PackageName(), ".", name}
- if _, s := matcher(qualified); s > 0 {
- return qualified, s
- }
- return nil, 0
-}
-
-// symbolCollector holds context as we walk Packages, gathering symbols that
-// match a given query.
-//
-// How we match symbols is parameterized by two interfaces:
-// * A matcherFunc determines how well a string symbol matches a query. It
-// returns a non-negative score indicating the quality of the match. A score
-// of zero indicates no match.
-// * A symbolizer determines how we extract the symbol for an object. This
-// enables the 'symbolStyle' configuration option.
-type symbolCollector struct {
- // These types parameterize the symbol-matching pass.
- matchers []matcherFunc
- symbolizer symbolizer
-
- symbolStore
-}
-
-func newSymbolCollector(matcher SymbolMatcher, style SymbolStyle, query string) *symbolCollector {
- var s symbolizer
- switch style {
- case DynamicSymbols:
- s = dynamicSymbolMatch
- case FullyQualifiedSymbols:
- s = fullyQualifiedSymbolMatch
- case PackageQualifiedSymbols:
- s = packageSymbolMatch
- default:
- panic(fmt.Errorf("unknown symbol style: %v", style))
- }
- sc := &symbolCollector{symbolizer: s}
- sc.matchers = make([]matcherFunc, runtime.GOMAXPROCS(-1))
- for i := range sc.matchers {
- sc.matchers[i] = buildMatcher(matcher, query)
- }
- return sc
-}
-
-func buildMatcher(matcher SymbolMatcher, query string) matcherFunc {
- switch matcher {
- case SymbolFuzzy:
- return parseQuery(query, newFuzzyMatcher)
- case SymbolFastFuzzy:
- return parseQuery(query, func(query string) matcherFunc {
- return fuzzy.NewSymbolMatcher(query).Match
- })
- case SymbolCaseSensitive:
- return matchExact(query)
- case SymbolCaseInsensitive:
- q := strings.ToLower(query)
- exact := matchExact(q)
- wrapper := []string{""}
- return func(chunks []string) (int, float64) {
- s := strings.Join(chunks, "")
- wrapper[0] = strings.ToLower(s)
- return exact(wrapper)
- }
- }
- panic(fmt.Errorf("unknown symbol matcher: %v", matcher))
-}
-
-func newFuzzyMatcher(query string) matcherFunc {
- fm := fuzzy.NewMatcher(query)
- return func(chunks []string) (int, float64) {
- score := float64(fm.ScoreChunks(chunks))
- ranges := fm.MatchedRanges()
- if len(ranges) > 0 {
- return ranges[0], score
- }
- return -1, score
- }
-}
-
-// parseQuery parses a field-separated symbol query, extracting the special
-// characters listed below, and returns a matcherFunc corresponding to the AND
-// of all field queries.
-//
-// Special characters:
-// ^ match exact prefix
-// $ match exact suffix
-// ' match exact
-//
-// In all three of these special queries, matches are 'smart-cased', meaning
-// they are case sensitive if the symbol query contains any upper-case
-// characters, and case insensitive otherwise.
-func parseQuery(q string, newMatcher func(string) matcherFunc) matcherFunc {
- fields := strings.Fields(q)
- if len(fields) == 0 {
- return func([]string) (int, float64) { return -1, 0 }
- }
- var funcs []matcherFunc
- for _, field := range fields {
- var f matcherFunc
- switch {
- case strings.HasPrefix(field, "^"):
- prefix := field[1:]
- f = smartCase(prefix, func(chunks []string) (int, float64) {
- s := strings.Join(chunks, "")
- if strings.HasPrefix(s, prefix) {
- return 0, 1
- }
- return -1, 0
- })
- case strings.HasPrefix(field, "'"):
- exact := field[1:]
- f = smartCase(exact, matchExact(exact))
- case strings.HasSuffix(field, "$"):
- suffix := field[0 : len(field)-1]
- f = smartCase(suffix, func(chunks []string) (int, float64) {
- s := strings.Join(chunks, "")
- if strings.HasSuffix(s, suffix) {
- return len(s) - len(suffix), 1
- }
- return -1, 0
- })
- default:
- f = newMatcher(field)
- }
- funcs = append(funcs, f)
- }
- if len(funcs) == 1 {
- return funcs[0]
- }
- return comboMatcher(funcs).match
-}
-
-func matchExact(exact string) matcherFunc {
- return func(chunks []string) (int, float64) {
- s := strings.Join(chunks, "")
- if idx := strings.LastIndex(s, exact); idx >= 0 {
- return idx, 1
- }
- return -1, 0
- }
-}
-
-// smartCase returns a matcherFunc that is case-sensitive if q contains any
-// upper-case characters, and case-insensitive otherwise.
-func smartCase(q string, m matcherFunc) matcherFunc {
- insensitive := strings.ToLower(q) == q
- wrapper := []string{""}
- return func(chunks []string) (int, float64) {
- s := strings.Join(chunks, "")
- if insensitive {
- s = strings.ToLower(s)
- }
- wrapper[0] = s
- return m(wrapper)
- }
-}
-
-type comboMatcher []matcherFunc
-
-func (c comboMatcher) match(chunks []string) (int, float64) {
- score := 1.0
- first := 0
- for _, f := range c {
- idx, s := f(chunks)
- if idx < first {
- first = idx
- }
- score *= s
- }
- return first, score
-}
-
-func (sc *symbolCollector) walk(ctx context.Context, views []View) ([]protocol.SymbolInformation, error) {
- // Use the root view URIs for determining (lexically) whether a uri is in any
- // open workspace.
- var roots []string
- for _, v := range views {
- roots = append(roots, strings.TrimRight(string(v.Folder()), "/"))
- }
-
- results := make(chan *symbolStore)
- matcherlen := len(sc.matchers)
- files := make(map[span.URI]symbolFile)
-
- for _, v := range views {
- snapshot, release := v.Snapshot(ctx)
- defer release()
- psyms, err := snapshot.Symbols(ctx)
- if err != nil {
- return nil, err
- }
-
- filters := v.Options().DirectoryFilters
- folder := filepath.ToSlash(v.Folder().Filename())
- for uri, syms := range psyms {
- norm := filepath.ToSlash(uri.Filename())
- nm := strings.TrimPrefix(norm, folder)
- if FiltersDisallow(nm, filters) {
- continue
- }
- // Only scan each file once.
- if _, ok := files[uri]; ok {
- continue
- }
- mds, err := snapshot.MetadataForFile(ctx, uri)
- if err != nil {
- event.Error(ctx, fmt.Sprintf("missing metadata for %q", uri), err)
- continue
- }
- if len(mds) == 0 {
- // TODO: should use the bug reporting API
- continue
- }
- files[uri] = symbolFile{uri, mds[0], syms}
- }
- }
-
- var work []symbolFile
- for _, f := range files {
- work = append(work, f)
- }
-
- // Compute matches concurrently. Each symbolWorker has its own symbolStore,
- // which we merge at the end.
- for i, matcher := range sc.matchers {
- go func(i int, matcher matcherFunc) {
- w := &symbolWorker{
- symbolizer: sc.symbolizer,
- matcher: matcher,
- ss: &symbolStore{},
- roots: roots,
- }
- for j := i; j < len(work); j += matcherlen {
- w.matchFile(work[j])
- }
- results <- w.ss
- }(i, matcher)
- }
-
- for i := 0; i < matcherlen; i++ {
- ss := <-results
- for _, si := range ss.res {
- sc.store(si)
- }
- }
- return sc.results(), nil
-}
-
-// FilterDisallow is code from the body of cache.pathExcludedByFilter in cache/view.go
-// Exporting and using that function would cause an import cycle.
-// Moving it here and exporting it would leave behind view_test.go.
-// (This code is exported and used in the body of cache.pathExcludedByFilter)
-func FiltersDisallow(path string, filters []string) bool {
- path = strings.TrimPrefix(path, "/")
- var excluded bool
- for _, filter := range filters {
- op, prefix := filter[0], filter[1:]
- // Non-empty prefixes have to be precise directory matches.
- if prefix != "" {
- prefix = prefix + "/"
- path = path + "/"
- }
- if !strings.HasPrefix(path, prefix) {
- continue
- }
- excluded = op == '-'
- }
- return excluded
-}
-
-// symbolFile holds symbol information for a single file.
-type symbolFile struct {
- uri span.URI
- md Metadata
- syms []Symbol
-}
-
-// symbolWorker matches symbols and captures the highest scoring results.
-type symbolWorker struct {
- symbolizer symbolizer
- matcher matcherFunc
- ss *symbolStore
- roots []string
-}
-
-func (w *symbolWorker) matchFile(i symbolFile) {
- for _, sym := range i.syms {
- symbolParts, score := w.symbolizer(sym.Name, i.md, w.matcher)
-
- // Check if the score is too low before applying any downranking.
- if w.ss.tooLow(score) {
- continue
- }
-
- // Factors to apply to the match score for the purpose of downranking
- // results.
- //
- // These numbers were crudely calibrated based on trial-and-error using a
- // small number of sample queries. Adjust as necessary.
- //
- // All factors are multiplicative, meaning if more than one applies they are
- // multiplied together.
- const (
- // nonWorkspaceFactor is applied to symbols outside of any active
- // workspace. Developers are less likely to want to jump to code that they
- // are not actively working on.
- nonWorkspaceFactor = 0.5
- // nonWorkspaceUnexportedFactor is applied to unexported symbols outside of
- // any active workspace. Since one wouldn't usually jump to unexported
- // symbols to understand a package API, they are particularly irrelevant.
- nonWorkspaceUnexportedFactor = 0.5
- // every field or method nesting level to access the field decreases
- // the score by a factor of 1.0 - depth*depthFactor, up to a depth of
- // 3.
- depthFactor = 0.2
- )
-
- startWord := true
- exported := true
- depth := 0.0
- for _, r := range sym.Name {
- if startWord && !unicode.IsUpper(r) {
- exported = false
- }
- if r == '.' {
- startWord = true
- depth++
- } else {
- startWord = false
- }
- }
-
- inWorkspace := false
- for _, root := range w.roots {
- if strings.HasPrefix(string(i.uri), root) {
- inWorkspace = true
- break
- }
- }
-
- // Apply downranking based on workspace position.
- if !inWorkspace {
- score *= nonWorkspaceFactor
- if !exported {
- score *= nonWorkspaceUnexportedFactor
- }
- }
-
- // Apply downranking based on symbol depth.
- if depth > 3 {
- depth = 3
- }
- score *= 1.0 - depth*depthFactor
-
- if w.ss.tooLow(score) {
- continue
- }
-
- si := symbolInformation{
- score: score,
- symbol: strings.Join(symbolParts, ""),
- kind: sym.Kind,
- uri: i.uri,
- rng: sym.Range,
- container: i.md.PackagePath(),
- }
- w.ss.store(si)
- }
-}
-
-type symbolStore struct {
- res [maxSymbols]symbolInformation
-}
-
-// store inserts si into the sorted results, if si has a high enough score.
-func (sc *symbolStore) store(si symbolInformation) {
- if sc.tooLow(si.score) {
- return
- }
- insertAt := sort.Search(len(sc.res), func(i int) bool {
- // Sort by score, then symbol length, and finally lexically.
- if sc.res[i].score != si.score {
- return sc.res[i].score < si.score
- }
- if len(sc.res[i].symbol) != len(si.symbol) {
- return len(sc.res[i].symbol) > len(si.symbol)
- }
- return sc.res[i].symbol > si.symbol
- })
- if insertAt < len(sc.res)-1 {
- copy(sc.res[insertAt+1:], sc.res[insertAt:len(sc.res)-1])
- }
- sc.res[insertAt] = si
-}
-
-func (sc *symbolStore) tooLow(score float64) bool {
- return score <= sc.res[len(sc.res)-1].score
-}
-
-func (sc *symbolStore) results() []protocol.SymbolInformation {
- var res []protocol.SymbolInformation
- for _, si := range sc.res {
- if si.score <= 0 {
- return res
- }
- res = append(res, si.asProtocolSymbolInformation())
- }
- return res
-}
-
-func typeToKind(typ types.Type) protocol.SymbolKind {
- switch typ := typ.Underlying().(type) {
- case *types.Interface:
- return protocol.Interface
- case *types.Struct:
- return protocol.Struct
- case *types.Signature:
- if typ.Recv() != nil {
- return protocol.Method
- }
- return protocol.Function
- case *types.Named:
- return typeToKind(typ.Underlying())
- case *types.Basic:
- i := typ.Info()
- switch {
- case i&types.IsNumeric != 0:
- return protocol.Number
- case i&types.IsBoolean != 0:
- return protocol.Boolean
- case i&types.IsString != 0:
- return protocol.String
- }
- }
- return protocol.Variable
-}
-
-// symbolInformation is a cut-down version of protocol.SymbolInformation that
-// allows struct values of this type to be used as map keys.
-type symbolInformation struct {
- score float64
- symbol string
- container string
- kind protocol.SymbolKind
- uri span.URI
- rng protocol.Range
-}
-
-// asProtocolSymbolInformation converts s to a protocol.SymbolInformation value.
-//
-// TODO: work out how to handle tags if/when they are needed.
-func (s symbolInformation) asProtocolSymbolInformation() protocol.SymbolInformation {
- return protocol.SymbolInformation{
- Name: s.symbol,
- Kind: s.kind,
- Location: protocol.Location{
- URI: protocol.URIFromSpanURI(s.uri),
- Range: s.rng,
- },
- ContainerName: s.container,
- }
-}
diff --git a/internal/lsp/source/workspace_symbol_test.go b/internal/lsp/source/workspace_symbol_test.go
deleted file mode 100644
index 314ef785d..000000000
--- a/internal/lsp/source/workspace_symbol_test.go
+++ /dev/null
@@ -1,46 +0,0 @@
-// Copyright 2020 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package source
-
-import (
- "testing"
-)
-
-func TestParseQuery(t *testing.T) {
- tests := []struct {
- query, s string
- wantMatch bool
- }{
- {"", "anything", false},
- {"any", "anything", true},
- {"any$", "anything", false},
- {"ing$", "anything", true},
- {"ing$", "anythinG", true},
- {"inG$", "anything", false},
- {"^any", "anything", true},
- {"^any", "Anything", true},
- {"^Any", "anything", false},
- {"at", "anything", true},
- // TODO: this appears to be a bug in the fuzzy matching algorithm. 'At'
- // should cause a case-sensitive match.
- // {"At", "anything", false},
- {"At", "Anything", true},
- {"'yth", "Anything", true},
- {"'yti", "Anything", false},
- {"'any 'thing", "Anything", true},
- {"anythn nythg", "Anything", true},
- {"ntx", "Anything", false},
- {"anythn", "anything", true},
- {"ing", "anything", true},
- {"anythn nythgx", "anything", false},
- }
-
- for _, test := range tests {
- matcher := parseQuery(test.query, newFuzzyMatcher)
- if _, score := matcher([]string{test.s}); score > 0 != test.wantMatch {
- t.Errorf("parseQuery(%q) match for %q: %.2g, want match: %t", test.query, test.s, score, test.wantMatch)
- }
- }
-}