aboutsummaryrefslogtreecommitdiff
path: root/go/analysis/internal
diff options
context:
space:
mode:
Diffstat (limited to 'go/analysis/internal')
-rw-r--r--go/analysis/internal/analysisflags/flags.go75
-rw-r--r--go/analysis/internal/analysisflags/flags_test.go7
-rw-r--r--go/analysis/internal/checker/checker.go296
-rw-r--r--go/analysis/internal/checker/checker_test.go90
-rw-r--r--go/analysis/internal/checker/fix_test.go309
-rw-r--r--go/analysis/internal/checker/start_test.go85
-rw-r--r--go/analysis/internal/facts/facts.go323
-rw-r--r--go/analysis/internal/facts/facts_test.go384
-rw-r--r--go/analysis/internal/facts/imports.go119
9 files changed, 694 insertions, 994 deletions
diff --git a/go/analysis/internal/analysisflags/flags.go b/go/analysis/internal/analysisflags/flags.go
index 4b7be2d1f..e127a42b9 100644
--- a/go/analysis/internal/analysisflags/flags.go
+++ b/go/analysis/internal/analysisflags/flags.go
@@ -206,7 +206,7 @@ func (versionFlag) Get() interface{} { return nil }
func (versionFlag) String() string { return "" }
func (versionFlag) Set(s string) error {
if s != "full" {
- log.Fatalf("unsupported flag value: -V=%s", s)
+ log.Fatalf("unsupported flag value: -V=%s (use -V=full)", s)
}
// This replicates the minimal subset of
@@ -218,7 +218,10 @@ func (versionFlag) Set(s string) error {
// Formats:
// $progname version devel ... buildID=...
// $progname version go1.9.1
- progname := os.Args[0]
+ progname, err := os.Executable()
+ if err != nil {
+ return err
+ }
f, err := os.Open(progname)
if err != nil {
log.Fatal(err)
@@ -339,9 +342,38 @@ func PrintPlain(fset *token.FileSet, diag analysis.Diagnostic) {
}
// A JSONTree is a mapping from package ID to analysis name to result.
-// Each result is either a jsonError or a list of jsonDiagnostic.
+// Each result is either a jsonError or a list of JSONDiagnostic.
type JSONTree map[string]map[string]interface{}
+// A TextEdit describes the replacement of a portion of a file.
+// Start and End are zero-based half-open indices into the original byte
+// sequence of the file, and New is the new text.
+type JSONTextEdit struct {
+ Filename string `json:"filename"`
+ Start int `json:"start"`
+ End int `json:"end"`
+ New string `json:"new"`
+}
+
+// A JSONSuggestedFix describes an edit that should be applied as a whole or not
+// at all. It might contain multiple TextEdits/text_edits if the SuggestedFix
+// consists of multiple non-contiguous edits.
+type JSONSuggestedFix struct {
+ Message string `json:"message"`
+ Edits []JSONTextEdit `json:"edits"`
+}
+
+// A JSONDiagnostic can be used to encode and decode analysis.Diagnostics to and
+// from JSON.
+// TODO(matloob): Should the JSON diagnostics contain ranges?
+// If so, how should they be formatted?
+type JSONDiagnostic struct {
+ Category string `json:"category,omitempty"`
+ Posn string `json:"posn"`
+ Message string `json:"message"`
+ SuggestedFixes []JSONSuggestedFix `json:"suggested_fixes,omitempty"`
+}
+
// Add adds the result of analysis 'name' on package 'id'.
// The result is either a list of diagnostics or an error.
func (tree JSONTree) Add(fset *token.FileSet, id, name string, diags []analysis.Diagnostic, err error) {
@@ -352,20 +384,31 @@ func (tree JSONTree) Add(fset *token.FileSet, id, name string, diags []analysis.
}
v = jsonError{err.Error()}
} else if len(diags) > 0 {
- type jsonDiagnostic struct {
- Category string `json:"category,omitempty"`
- Posn string `json:"posn"`
- Message string `json:"message"`
- }
- var diagnostics []jsonDiagnostic
- // TODO(matloob): Should the JSON diagnostics contain ranges?
- // If so, how should they be formatted?
+ diagnostics := make([]JSONDiagnostic, 0, len(diags))
for _, f := range diags {
- diagnostics = append(diagnostics, jsonDiagnostic{
- Category: f.Category,
- Posn: fset.Position(f.Pos).String(),
- Message: f.Message,
- })
+ var fixes []JSONSuggestedFix
+ for _, fix := range f.SuggestedFixes {
+ var edits []JSONTextEdit
+ for _, edit := range fix.TextEdits {
+ edits = append(edits, JSONTextEdit{
+ Filename: fset.Position(edit.Pos).Filename,
+ Start: fset.Position(edit.Pos).Offset,
+ End: fset.Position(edit.End).Offset,
+ New: string(edit.NewText),
+ })
+ }
+ fixes = append(fixes, JSONSuggestedFix{
+ Message: fix.Message,
+ Edits: edits,
+ })
+ }
+ jdiag := JSONDiagnostic{
+ Category: f.Category,
+ Posn: fset.Position(f.Pos).String(),
+ Message: f.Message,
+ SuggestedFixes: fixes,
+ }
+ diagnostics = append(diagnostics, jdiag)
}
v = diagnostics
}
diff --git a/go/analysis/internal/analysisflags/flags_test.go b/go/analysis/internal/analysisflags/flags_test.go
index 1f055dde7..b5cfb3d44 100644
--- a/go/analysis/internal/analysisflags/flags_test.go
+++ b/go/analysis/internal/analysisflags/flags_test.go
@@ -42,7 +42,7 @@ func TestExec(t *testing.T) {
for _, test := range []struct {
flags string
- want string
+ want string // output should contain want
}{
{"", "[a1 a2 a3]"},
{"-a1=0", "[a2 a3]"},
@@ -50,6 +50,7 @@ func TestExec(t *testing.T) {
{"-a1", "[a1]"},
{"-a1=1 -a3=1", "[a1 a3]"},
{"-a1=1 -a3=0", "[a1]"},
+ {"-V=full", "analysisflags.test version devel"},
} {
cmd := exec.Command(progname, "-test.run=TestExec")
cmd.Env = append(os.Environ(), "ANALYSISFLAGS_CHILD=1", "FLAGS="+test.flags)
@@ -60,8 +61,8 @@ func TestExec(t *testing.T) {
}
got := strings.TrimSpace(string(output))
- if got != test.want {
- t.Errorf("got %s, want %s", got, test.want)
+ if !strings.Contains(got, test.want) {
+ t.Errorf("got %q, does not contain %q", got, test.want)
}
}
}
diff --git a/go/analysis/internal/checker/checker.go b/go/analysis/internal/checker/checker.go
index e405a2ae1..5346acd76 100644
--- a/go/analysis/internal/checker/checker.go
+++ b/go/analysis/internal/checker/checker.go
@@ -15,7 +15,6 @@ import (
"flag"
"fmt"
"go/format"
- "go/parser"
"go/token"
"go/types"
"io/ioutil"
@@ -33,8 +32,8 @@ import (
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/internal/analysisflags"
"golang.org/x/tools/go/packages"
- "golang.org/x/tools/internal/analysisinternal"
- "golang.org/x/tools/internal/span"
+ "golang.org/x/tools/internal/diff"
+ "golang.org/x/tools/internal/robustio"
)
var (
@@ -51,6 +50,9 @@ var (
// Log files for optional performance tracing.
CPUProfile, MemProfile, Trace string
+ // IncludeTests indicates whether test files should be analyzed too.
+ IncludeTests = true
+
// Fix determines whether to apply all suggested fixes.
Fix bool
)
@@ -65,6 +67,7 @@ func RegisterFlags() {
flag.StringVar(&CPUProfile, "cpuprofile", "", "write CPU profile to this file")
flag.StringVar(&MemProfile, "memprofile", "", "write memory profile to this file")
flag.StringVar(&Trace, "trace", "", "write trace log to this file")
+ flag.BoolVar(&IncludeTests, "test", IncludeTests, "indicates whether test files should be analyzed, too")
flag.BoolVar(&Fix, "fix", false, "apply all suggested fixes")
}
@@ -143,7 +146,11 @@ func Run(args []string, analyzers []*analysis.Analyzer) (exitcode int) {
roots := analyze(initial, analyzers)
if Fix {
- applyFixes(roots)
+ if err := applyFixes(roots); err != nil {
+ // Fail when applying fixes failed.
+ log.Print(err)
+ return 1
+ }
}
return printDiagnostics(roots)
}
@@ -163,7 +170,7 @@ func load(patterns []string, allSyntax bool) ([]*packages.Package, error) {
}
conf := packages.Config{
Mode: mode,
- Tests: true,
+ Tests: IncludeTests,
}
initial, err := packages.Load(&conf, patterns...)
if err == nil {
@@ -301,7 +308,10 @@ func analyze(pkgs []*packages.Package, analyzers []*analysis.Analyzer) []*action
return roots
}
-func applyFixes(roots []*action) {
+func applyFixes(roots []*action) error {
+ // visit all of the actions and accumulate the suggested edits.
+ paths := make(map[robustio.FileID]string)
+ editsByAction := make(map[robustio.FileID]map[*action][]diff.Edit)
visited := make(map[*action]bool)
var apply func(*action) error
var visitAll func(actions []*action) error
@@ -309,7 +319,9 @@ func applyFixes(roots []*action) {
for _, act := range actions {
if !visited[act] {
visited[act] = true
- visitAll(act.deps)
+ if err := visitAll(act.deps); err != nil {
+ return err
+ }
if err := apply(act); err != nil {
return err
}
@@ -318,116 +330,167 @@ func applyFixes(roots []*action) {
return nil
}
- // TODO(matloob): Is this tree business too complicated? (After all this is Go!)
- // Just create a set (map) of edits, sort by pos and call it a day?
- type offsetedit struct {
- start, end int
- newText []byte
- } // TextEdit using byteOffsets instead of pos
- type node struct {
- edit offsetedit
- left, right *node
- }
-
- var insert func(tree **node, edit offsetedit) error
- insert = func(treeptr **node, edit offsetedit) error {
- if *treeptr == nil {
- *treeptr = &node{edit, nil, nil}
- return nil
- }
- tree := *treeptr
- if edit.end <= tree.edit.start {
- return insert(&tree.left, edit)
- } else if edit.start >= tree.edit.end {
- return insert(&tree.right, edit)
- }
-
- // Overlapping text edit.
- return fmt.Errorf("analyses applying overlapping text edits affecting pos range (%v, %v) and (%v, %v)",
- edit.start, edit.end, tree.edit.start, tree.edit.end)
-
- }
-
- editsForFile := make(map[*token.File]*node)
-
apply = func(act *action) error {
+ editsForTokenFile := make(map[*token.File][]diff.Edit)
for _, diag := range act.diagnostics {
for _, sf := range diag.SuggestedFixes {
for _, edit := range sf.TextEdits {
// Validate the edit.
+ // Any error here indicates a bug in the analyzer.
+ file := act.pkg.Fset.File(edit.Pos)
+ if file == nil {
+ return fmt.Errorf("analysis %q suggests invalid fix: missing file info for pos (%v)",
+ act.a.Name, edit.Pos)
+ }
if edit.Pos > edit.End {
- return fmt.Errorf(
- "diagnostic for analysis %v contains Suggested Fix with malformed edit: pos (%v) > end (%v)",
+ return fmt.Errorf("analysis %q suggests invalid fix: pos (%v) > end (%v)",
act.a.Name, edit.Pos, edit.End)
}
- file, endfile := act.pkg.Fset.File(edit.Pos), act.pkg.Fset.File(edit.End)
- if file == nil || endfile == nil || file != endfile {
- return (fmt.Errorf(
- "diagnostic for analysis %v contains Suggested Fix with malformed spanning files %v and %v",
- act.a.Name, file.Name(), endfile.Name()))
+ if eof := token.Pos(file.Base() + file.Size()); edit.End > eof {
+ return fmt.Errorf("analysis %q suggests invalid fix: end (%v) past end of file (%v)",
+ act.a.Name, edit.End, eof)
}
- start, end := file.Offset(edit.Pos), file.Offset(edit.End)
-
- // TODO(matloob): Validate that edits do not affect other packages.
- root := editsForFile[file]
- if err := insert(&root, offsetedit{start, end, edit.NewText}); err != nil {
- return err
- }
- editsForFile[file] = root // In case the root changed
+ edit := diff.Edit{Start: file.Offset(edit.Pos), End: file.Offset(edit.End), New: string(edit.NewText)}
+ editsForTokenFile[file] = append(editsForTokenFile[file], edit)
}
}
}
+
+ for f, edits := range editsForTokenFile {
+ id, _, err := robustio.GetFileID(f.Name())
+ if err != nil {
+ return err
+ }
+ if _, hasId := paths[id]; !hasId {
+ paths[id] = f.Name()
+ editsByAction[id] = make(map[*action][]diff.Edit)
+ }
+ editsByAction[id][act] = edits
+ }
return nil
}
- visitAll(roots)
+ if err := visitAll(roots); err != nil {
+ return err
+ }
- fset := token.NewFileSet() // Shared by parse calls below
- // Now we've got a set of valid edits for each file. Get the new file contents.
- for f, tree := range editsForFile {
- contents, err := ioutil.ReadFile(f.Name())
- if err != nil {
- log.Fatal(err)
+ // Validate and group the edits to each actual file.
+ editsByPath := make(map[string][]diff.Edit)
+ for id, actToEdits := range editsByAction {
+ path := paths[id]
+ actions := make([]*action, 0, len(actToEdits))
+ for act := range actToEdits {
+ actions = append(actions, act)
}
- cur := 0 // current position in the file
-
- var out bytes.Buffer
-
- var recurse func(*node)
- recurse = func(node *node) {
- if node.left != nil {
- recurse(node.left)
+ // Does any action create conflicting edits?
+ for _, act := range actions {
+ edits := actToEdits[act]
+ if _, invalid := validateEdits(edits); invalid > 0 {
+ name, x, y := act.a.Name, edits[invalid-1], edits[invalid]
+ return diff3Conflict(path, name, name, []diff.Edit{x}, []diff.Edit{y})
}
+ }
- edit := node.edit
- if edit.start > cur {
- out.Write(contents[cur:edit.start])
- out.Write(edit.newText)
+ // Does any pair of different actions create edits that conflict?
+ for j := range actions {
+ for k := range actions[:j] {
+ x, y := actions[j], actions[k]
+ if x.a.Name > y.a.Name {
+ x, y = y, x
+ }
+ xedits, yedits := actToEdits[x], actToEdits[y]
+ combined := append(xedits, yedits...)
+ if _, invalid := validateEdits(combined); invalid > 0 {
+ // TODO: consider applying each action's consistent list of edits entirely,
+ // and then using a three-way merge (such as GNU diff3) on the resulting
+ // files to report more precisely the parts that actually conflict.
+ return diff3Conflict(path, x.a.Name, y.a.Name, xedits, yedits)
+ }
}
- cur = edit.end
+ }
- if node.right != nil {
- recurse(node.right)
- }
+ var edits []diff.Edit
+ for act := range actToEdits {
+ edits = append(edits, actToEdits[act]...)
}
- recurse(tree)
- // Write out the rest of the file.
- if cur < len(contents) {
- out.Write(contents[cur:])
+ editsByPath[path], _ = validateEdits(edits) // remove duplicates. already validated.
+ }
+
+ // Now we've got a set of valid edits for each file. Apply them.
+ for path, edits := range editsByPath {
+ contents, err := ioutil.ReadFile(path)
+ if err != nil {
+ return err
+ }
+
+ out, err := diff.ApplyBytes(contents, edits)
+ if err != nil {
+ return err
}
// Try to format the file.
- ff, err := parser.ParseFile(fset, f.Name(), out.Bytes(), parser.ParseComments)
- if err == nil {
- var buf bytes.Buffer
- if err = format.Node(&buf, fset, ff); err == nil {
- out = buf
+ if formatted, err := format.Source(out); err == nil {
+ out = formatted
+ }
+
+ if err := ioutil.WriteFile(path, out, 0644); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+// validateEdits returns a list of edits that is sorted and
+// contains no duplicate edits. Returns the index of some
+// overlapping adjacent edits if there is one and <0 if the
+// edits are valid.
+func validateEdits(edits []diff.Edit) ([]diff.Edit, int) {
+ if len(edits) == 0 {
+ return nil, -1
+ }
+ equivalent := func(x, y diff.Edit) bool {
+ return x.Start == y.Start && x.End == y.End && x.New == y.New
+ }
+ diff.SortEdits(edits)
+ unique := []diff.Edit{edits[0]}
+ invalid := -1
+ for i := 1; i < len(edits); i++ {
+ prev, cur := edits[i-1], edits[i]
+ // We skip over equivalent edits without considering them
+ // an error. This handles identical edits coming from the
+ // multiple ways of loading a package into a
+ // *go/packages.Packages for testing, e.g. packages "p" and "p [p.test]".
+ if !equivalent(prev, cur) {
+ unique = append(unique, cur)
+ if prev.End > cur.Start {
+ invalid = i
}
}
+ }
+ return unique, invalid
+}
- ioutil.WriteFile(f.Name(), out.Bytes(), 0644)
+// diff3Conflict returns an error describing two conflicting sets of
+// edits on a file at path.
+func diff3Conflict(path string, xlabel, ylabel string, xedits, yedits []diff.Edit) error {
+ contents, err := ioutil.ReadFile(path)
+ if err != nil {
+ return err
}
+ oldlabel, old := "base", string(contents)
+
+ xdiff, err := diff.ToUnified(oldlabel, xlabel, old, xedits)
+ if err != nil {
+ return err
+ }
+ ydiff, err := diff.ToUnified(oldlabel, ylabel, old, yedits)
+ if err != nil {
+ return err
+ }
+
+ return fmt.Errorf("conflicting edits from %s and %s on %s\nfirst edits:\n%s\nsecond edits:\n%s",
+ xlabel, ylabel, path, xdiff, ydiff)
}
// printDiagnostics prints the diagnostics for the root packages in either
@@ -574,7 +637,6 @@ type action struct {
deps []*action
objectFacts map[objectFactKey]analysis.Fact
packageFacts map[packageFactKey]analysis.Fact
- inputs map[*analysis.Analyzer]interface{}
result interface{}
diagnostics []analysis.Diagnostic
err error
@@ -672,14 +734,16 @@ func (act *action) execOnce() {
// Run the analysis.
pass := &analysis.Pass{
- Analyzer: act.a,
- Fset: act.pkg.Fset,
- Files: act.pkg.Syntax,
- OtherFiles: act.pkg.OtherFiles,
- IgnoredFiles: act.pkg.IgnoredFiles,
- Pkg: act.pkg.Types,
- TypesInfo: act.pkg.TypesInfo,
- TypesSizes: act.pkg.TypesSizes,
+ Analyzer: act.a,
+ Fset: act.pkg.Fset,
+ Files: act.pkg.Syntax,
+ OtherFiles: act.pkg.OtherFiles,
+ IgnoredFiles: act.pkg.IgnoredFiles,
+ Pkg: act.pkg.Types,
+ TypesInfo: act.pkg.TypesInfo,
+ TypesSizes: act.pkg.TypesSizes,
+ TypeErrors: act.pkg.TypeErrors,
+
ResultOf: inputs,
Report: func(d analysis.Diagnostic) { act.diagnostics = append(act.diagnostics, d) },
ImportObjectFact: act.importObjectFact,
@@ -691,36 +755,6 @@ func (act *action) execOnce() {
}
act.pass = pass
- var errors []types.Error
- // Get any type errors that are attributed to the pkg.
- // This is necessary to test analyzers that provide
- // suggested fixes for compiler/type errors.
- for _, err := range act.pkg.Errors {
- if err.Kind != packages.TypeError {
- continue
- }
- // err.Pos is a string of form: "file:line:col" or "file:line" or "" or "-"
- spn := span.Parse(err.Pos)
- // Extract the token positions from the error string.
- line, col, offset := spn.Start().Line(), spn.Start().Column(), -1
- act.pkg.Fset.Iterate(func(f *token.File) bool {
- if f.Name() != spn.URI().Filename() {
- return true
- }
- offset = int(f.LineStart(line)) + col - 1
- return false
- })
- if offset == -1 {
- continue
- }
- errors = append(errors, types.Error{
- Fset: act.pkg.Fset,
- Msg: err.Msg,
- Pos: token.Pos(offset),
- })
- }
- analysisinternal.SetTypeErrors(pass, errors)
-
var err error
if act.pkg.IllTyped && !pass.Analyzer.RunDespiteErrors {
err = fmt.Errorf("analysis skipped due to errors in package")
@@ -762,7 +796,7 @@ func inheritFacts(act, dep *action) {
if serialize {
encodedFact, err := codeFact(fact)
if err != nil {
- log.Panicf("internal error: encoding of %T fact failed in %v", fact, act)
+ log.Panicf("internal error: encoding of %T fact failed in %v: %v", fact, act, err)
}
fact = encodedFact
}
@@ -826,7 +860,7 @@ func codeFact(fact analysis.Fact) (analysis.Fact, error) {
// exportedFrom reports whether obj may be visible to a package that imports pkg.
// This includes not just the exported members of pkg, but also unexported
-// constants, types, fields, and methods, perhaps belonging to oether packages,
+// constants, types, fields, and methods, perhaps belonging to other packages,
// that find there way into the API.
// This is an overapproximation of the more accurate approach used by
// gc export data, which walks the type graph, but it's much simpler.
@@ -890,7 +924,7 @@ func (act *action) exportObjectFact(obj types.Object, fact analysis.Fact) {
func (act *action) allObjectFacts() []analysis.ObjectFact {
facts := make([]analysis.ObjectFact, 0, len(act.objectFacts))
for k := range act.objectFacts {
- facts = append(facts, analysis.ObjectFact{k.obj, act.objectFacts[k]})
+ facts = append(facts, analysis.ObjectFact{Object: k.obj, Fact: act.objectFacts[k]})
}
return facts
}
@@ -932,11 +966,11 @@ func factType(fact analysis.Fact) reflect.Type {
return t
}
-// allObjectFacts implements Pass.AllObjectFacts.
+// allPackageFacts implements Pass.AllPackageFacts.
func (act *action) allPackageFacts() []analysis.PackageFact {
facts := make([]analysis.PackageFact, 0, len(act.packageFacts))
for k := range act.packageFacts {
- facts = append(facts, analysis.PackageFact{k.pkg, act.packageFacts[k]})
+ facts = append(facts, analysis.PackageFact{Package: k.pkg, Fact: act.packageFacts[k]})
}
return facts
}
diff --git a/go/analysis/internal/checker/checker_test.go b/go/analysis/internal/checker/checker_test.go
index eee211c21..34acae81e 100644
--- a/go/analysis/internal/checker/checker_test.go
+++ b/go/analysis/internal/checker/checker_test.go
@@ -19,14 +19,9 @@ import (
"golang.org/x/tools/internal/testenv"
)
-var from, to string
-
func TestApplyFixes(t *testing.T) {
testenv.NeedsGoPackages(t)
- from = "bar"
- to = "baz"
-
files := map[string]string{
"rename/test.go": `package rename
@@ -74,26 +69,55 @@ var analyzer = &analysis.Analyzer{
Run: run,
}
+var other = &analysis.Analyzer{ // like analyzer but with a different Name.
+ Name: "other",
+ Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Run: run,
+}
+
func run(pass *analysis.Pass) (interface{}, error) {
+ const (
+ from = "bar"
+ to = "baz"
+ conflict = "conflict" // add conflicting edits to package conflict.
+ duplicate = "duplicate" // add duplicate edits to package conflict.
+ other = "other" // add conflicting edits to package other from different analyzers.
+ )
+
+ if pass.Analyzer.Name == other {
+ if pass.Pkg.Name() != other {
+ return nil, nil // only apply Analyzer other to packages named other
+ }
+ }
+
inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
nodeFilter := []ast.Node{(*ast.Ident)(nil)}
inspect.Preorder(nodeFilter, func(n ast.Node) {
ident := n.(*ast.Ident)
if ident.Name == from {
msg := fmt.Sprintf("renaming %q to %q", from, to)
+ edits := []analysis.TextEdit{
+ {Pos: ident.Pos(), End: ident.End(), NewText: []byte(to)},
+ }
+ switch pass.Pkg.Name() {
+ case conflict:
+ edits = append(edits, []analysis.TextEdit{
+ {Pos: ident.Pos() - 1, End: ident.End(), NewText: []byte(to)},
+ {Pos: ident.Pos(), End: ident.End() - 1, NewText: []byte(to)},
+ {Pos: ident.Pos(), End: ident.End(), NewText: []byte("lorem ipsum")},
+ }...)
+ case duplicate:
+ edits = append(edits, edits...)
+ case other:
+ if pass.Analyzer.Name == other {
+ edits[0].Pos = edits[0].Pos + 1 // shift by one to mismatch analyzer and other
+ }
+ }
pass.Report(analysis.Diagnostic{
- Pos: ident.Pos(),
- End: ident.End(),
- Message: msg,
- SuggestedFixes: []analysis.SuggestedFix{{
- Message: msg,
- TextEdits: []analysis.TextEdit{{
- Pos: ident.Pos(),
- End: ident.End(),
- NewText: []byte(to),
- }},
- }},
- })
+ Pos: ident.Pos(),
+ End: ident.End(),
+ Message: msg,
+ SuggestedFixes: []analysis.SuggestedFix{{Message: msg, TextEdits: edits}}})
}
})
@@ -129,6 +153,18 @@ func Foo(s string) int {
RunDespiteErrors: true,
}
+ // A no-op analyzer that should finish regardless of
+ // parse or type errors in the code.
+ noopWithFact := &analysis.Analyzer{
+ Name: "noopfact",
+ Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Run: func(pass *analysis.Pass) (interface{}, error) {
+ return nil, nil
+ },
+ RunDespiteErrors: true,
+ FactTypes: []analysis.Fact{&EmptyFact{}},
+ }
+
for _, test := range []struct {
name string
pattern []string
@@ -137,7 +173,17 @@ func Foo(s string) int {
}{
// parse/type errors
{name: "skip-error", pattern: []string{"file=" + path}, analyzers: []*analysis.Analyzer{analyzer}, code: 1},
- {name: "despite-error", pattern: []string{"file=" + path}, analyzers: []*analysis.Analyzer{noop}, code: 0},
+ // RunDespiteErrors allows a driver to run an Analyzer even after parse/type errors.
+ //
+ // The noop analyzer doesn't use facts, so the driver loads only the root
+ // package from source. For the rest, it asks 'go list' for export data,
+ // which fails because the compiler encounters the type error. Since the
+ // errors come from 'go list', the driver doesn't run the analyzer.
+ {name: "despite-error", pattern: []string{"file=" + path}, analyzers: []*analysis.Analyzer{noop}, code: 1},
+ // The noopfact analyzer does use facts, so the driver loads source for
+ // all dependencies, does type checking itself, recognizes the error as a
+ // type error, and runs the analyzer.
+ {name: "despite-error-fact", pattern: []string{"file=" + path}, analyzers: []*analysis.Analyzer{noopWithFact}, code: 0},
// combination of parse/type errors and no errors
{name: "despite-error-and-no-error", pattern: []string{"file=" + path, "sort"}, analyzers: []*analysis.Analyzer{analyzer, noop}, code: 1},
// non-existing package error
@@ -151,6 +197,10 @@ func Foo(s string) int {
// no errors
{name: "no-errors", pattern: []string{"sort"}, analyzers: []*analysis.Analyzer{analyzer, noop}, code: 0},
} {
+ if test.name == "despite-error" && testenv.Go1Point() < 20 {
+ // The behavior in the comment on the despite-error test only occurs for Go 1.20+.
+ continue
+ }
if got := checker.Run(test.pattern, test.analyzers); got != test.code {
t.Errorf("got incorrect exit code %d for test %s; want %d", got, test.name, test.code)
}
@@ -158,3 +208,7 @@ func Foo(s string) int {
defer cleanup()
}
+
+type EmptyFact struct{}
+
+func (f *EmptyFact) AFact() {}
diff --git a/go/analysis/internal/checker/fix_test.go b/go/analysis/internal/checker/fix_test.go
new file mode 100644
index 000000000..3ea92b38c
--- /dev/null
+++ b/go/analysis/internal/checker/fix_test.go
@@ -0,0 +1,309 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package checker_test
+
+import (
+ "flag"
+ "io/ioutil"
+ "os"
+ "os/exec"
+ "path"
+ "regexp"
+ "runtime"
+ "testing"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/analysis/analysistest"
+ "golang.org/x/tools/go/analysis/internal/checker"
+ "golang.org/x/tools/internal/testenv"
+)
+
+func main() {
+ checker.Fix = true
+ patterns := flag.Args()
+
+ code := checker.Run(patterns, []*analysis.Analyzer{analyzer, other})
+ os.Exit(code)
+}
+
+// TestFixes ensures that checker.Run applies fixes correctly.
+// This test fork/execs the main function above.
+func TestFixes(t *testing.T) {
+ oses := map[string]bool{"darwin": true, "linux": true}
+ if !oses[runtime.GOOS] {
+ t.Skipf("skipping fork/exec test on this platform")
+ }
+
+ if os.Getenv("TESTFIXES_CHILD") == "1" {
+ // child process
+
+ // replace [progname -test.run=TestFixes -- ...]
+ // by [progname ...]
+ os.Args = os.Args[2:]
+ os.Args[0] = "vet"
+ main()
+ panic("unreachable")
+ }
+
+ testenv.NeedsTool(t, "go")
+
+ files := map[string]string{
+ "rename/foo.go": `package rename
+
+func Foo() {
+ bar := 12
+ _ = bar
+}
+
+// the end
+`,
+ "rename/intestfile_test.go": `package rename
+
+func InTestFile() {
+ bar := 13
+ _ = bar
+}
+
+// the end
+`,
+ "rename/foo_test.go": `package rename_test
+
+func Foo() {
+ bar := 14
+ _ = bar
+}
+
+// the end
+`,
+ "duplicate/dup.go": `package duplicate
+
+func Foo() {
+ bar := 14
+ _ = bar
+}
+
+// the end
+`,
+ }
+ fixed := map[string]string{
+ "rename/foo.go": `package rename
+
+func Foo() {
+ baz := 12
+ _ = baz
+}
+
+// the end
+`,
+ "rename/intestfile_test.go": `package rename
+
+func InTestFile() {
+ baz := 13
+ _ = baz
+}
+
+// the end
+`,
+ "rename/foo_test.go": `package rename_test
+
+func Foo() {
+ baz := 14
+ _ = baz
+}
+
+// the end
+`,
+ "duplicate/dup.go": `package duplicate
+
+func Foo() {
+ baz := 14
+ _ = baz
+}
+
+// the end
+`,
+ }
+ dir, cleanup, err := analysistest.WriteFiles(files)
+ if err != nil {
+ t.Fatalf("Creating test files failed with %s", err)
+ }
+ defer cleanup()
+
+ args := []string{"-test.run=TestFixes", "--", "rename", "duplicate"}
+ cmd := exec.Command(os.Args[0], args...)
+ cmd.Env = append(os.Environ(), "TESTFIXES_CHILD=1", "GOPATH="+dir, "GO111MODULE=off", "GOPROXY=off")
+
+ out, err := cmd.CombinedOutput()
+ if len(out) > 0 {
+ t.Logf("%s: out=<<%s>>", args, out)
+ }
+ var exitcode int
+ if err, ok := err.(*exec.ExitError); ok {
+ exitcode = err.ExitCode() // requires go1.12
+ }
+
+ const diagnosticsExitCode = 3
+ if exitcode != diagnosticsExitCode {
+ t.Errorf("%s: exited %d, want %d", args, exitcode, diagnosticsExitCode)
+ }
+
+ for name, want := range fixed {
+ path := path.Join(dir, "src", name)
+ contents, err := ioutil.ReadFile(path)
+ if err != nil {
+ t.Errorf("error reading %s: %v", path, err)
+ }
+ if got := string(contents); got != want {
+ t.Errorf("contents of %s file did not match expectations. got=%s, want=%s", path, got, want)
+ }
+ }
+}
+
+// TestConflict ensures that checker.Run detects conflicts correctly.
+// This test fork/execs the main function above.
+func TestConflict(t *testing.T) {
+ oses := map[string]bool{"darwin": true, "linux": true}
+ if !oses[runtime.GOOS] {
+ t.Skipf("skipping fork/exec test on this platform")
+ }
+
+ if os.Getenv("TESTCONFLICT_CHILD") == "1" {
+ // child process
+
+ // replace [progname -test.run=TestConflict -- ...]
+ // by [progname ...]
+ os.Args = os.Args[2:]
+ os.Args[0] = "vet"
+ main()
+ panic("unreachable")
+ }
+
+ testenv.NeedsTool(t, "go")
+
+ files := map[string]string{
+ "conflict/foo.go": `package conflict
+
+func Foo() {
+ bar := 12
+ _ = bar
+}
+
+// the end
+`,
+ }
+ dir, cleanup, err := analysistest.WriteFiles(files)
+ if err != nil {
+ t.Fatalf("Creating test files failed with %s", err)
+ }
+ defer cleanup()
+
+ args := []string{"-test.run=TestConflict", "--", "conflict"}
+ cmd := exec.Command(os.Args[0], args...)
+ cmd.Env = append(os.Environ(), "TESTCONFLICT_CHILD=1", "GOPATH="+dir, "GO111MODULE=off", "GOPROXY=off")
+
+ out, err := cmd.CombinedOutput()
+ var exitcode int
+ if err, ok := err.(*exec.ExitError); ok {
+ exitcode = err.ExitCode() // requires go1.12
+ }
+ const errExitCode = 1
+ if exitcode != errExitCode {
+ t.Errorf("%s: exited %d, want %d", args, exitcode, errExitCode)
+ }
+
+ pattern := `conflicting edits from rename and rename on /.*/conflict/foo.go`
+ matched, err := regexp.Match(pattern, out)
+ if err != nil {
+ t.Errorf("error matching pattern %s: %v", pattern, err)
+ } else if !matched {
+ t.Errorf("%s: output was=<<%s>>. Expected it to match <<%s>>", args, out, pattern)
+ }
+
+ // No files updated
+ for name, want := range files {
+ path := path.Join(dir, "src", name)
+ contents, err := ioutil.ReadFile(path)
+ if err != nil {
+ t.Errorf("error reading %s: %v", path, err)
+ }
+ if got := string(contents); got != want {
+ t.Errorf("contents of %s file updated. got=%s, want=%s", path, got, want)
+ }
+ }
+}
+
+// TestOther ensures that checker.Run reports conflicts from
+// distinct actions correctly.
+// This test fork/execs the main function above.
+func TestOther(t *testing.T) {
+ oses := map[string]bool{"darwin": true, "linux": true}
+ if !oses[runtime.GOOS] {
+ t.Skipf("skipping fork/exec test on this platform")
+ }
+
+ if os.Getenv("TESTOTHER_CHILD") == "1" {
+ // child process
+
+ // replace [progname -test.run=TestOther -- ...]
+ // by [progname ...]
+ os.Args = os.Args[2:]
+ os.Args[0] = "vet"
+ main()
+ panic("unreachable")
+ }
+
+ testenv.NeedsTool(t, "go")
+
+ files := map[string]string{
+ "other/foo.go": `package other
+
+func Foo() {
+ bar := 12
+ _ = bar
+}
+
+// the end
+`,
+ }
+ dir, cleanup, err := analysistest.WriteFiles(files)
+ if err != nil {
+ t.Fatalf("Creating test files failed with %s", err)
+ }
+ defer cleanup()
+
+ args := []string{"-test.run=TestOther", "--", "other"}
+ cmd := exec.Command(os.Args[0], args...)
+ cmd.Env = append(os.Environ(), "TESTOTHER_CHILD=1", "GOPATH="+dir, "GO111MODULE=off", "GOPROXY=off")
+
+ out, err := cmd.CombinedOutput()
+ var exitcode int
+ if err, ok := err.(*exec.ExitError); ok {
+ exitcode = err.ExitCode() // requires go1.12
+ }
+ const errExitCode = 1
+ if exitcode != errExitCode {
+ t.Errorf("%s: exited %d, want %d", args, exitcode, errExitCode)
+ }
+
+ pattern := `conflicting edits from other and rename on /.*/other/foo.go`
+ matched, err := regexp.Match(pattern, out)
+ if err != nil {
+ t.Errorf("error matching pattern %s: %v", pattern, err)
+ } else if !matched {
+ t.Errorf("%s: output was=<<%s>>. Expected it to match <<%s>>", args, out, pattern)
+ }
+
+ // No files updated
+ for name, want := range files {
+ path := path.Join(dir, "src", name)
+ contents, err := ioutil.ReadFile(path)
+ if err != nil {
+ t.Errorf("error reading %s: %v", path, err)
+ }
+ if got := string(contents); got != want {
+ t.Errorf("contents of %s file updated. got=%s, want=%s", path, got, want)
+ }
+ }
+}
diff --git a/go/analysis/internal/checker/start_test.go b/go/analysis/internal/checker/start_test.go
new file mode 100644
index 000000000..ede21159b
--- /dev/null
+++ b/go/analysis/internal/checker/start_test.go
@@ -0,0 +1,85 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package checker_test
+
+import (
+ "go/ast"
+ "io/ioutil"
+ "path/filepath"
+ "testing"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/analysis/analysistest"
+ "golang.org/x/tools/go/analysis/internal/checker"
+ "golang.org/x/tools/go/analysis/passes/inspect"
+ "golang.org/x/tools/go/ast/inspector"
+ "golang.org/x/tools/internal/testenv"
+)
+
+// TestStartFixes make sure modifying the first character
+// of the file takes effect.
+func TestStartFixes(t *testing.T) {
+ testenv.NeedsGoPackages(t)
+
+ files := map[string]string{
+ "comment/doc.go": `/* Package comment */
+package comment
+`}
+
+ want := `// Package comment
+package comment
+`
+
+ testdata, cleanup, err := analysistest.WriteFiles(files)
+ if err != nil {
+ t.Fatal(err)
+ }
+ path := filepath.Join(testdata, "src/comment/doc.go")
+ checker.Fix = true
+ checker.Run([]string{"file=" + path}, []*analysis.Analyzer{commentAnalyzer})
+
+ contents, err := ioutil.ReadFile(path)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ got := string(contents)
+ if got != want {
+ t.Errorf("contents of rewritten file\ngot: %s\nwant: %s", got, want)
+ }
+
+ defer cleanup()
+}
+
+var commentAnalyzer = &analysis.Analyzer{
+ Name: "comment",
+ Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Run: commentRun,
+}
+
+func commentRun(pass *analysis.Pass) (interface{}, error) {
+ const (
+ from = "/* Package comment */"
+ to = "// Package comment"
+ )
+ inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+ inspect.Preorder(nil, func(n ast.Node) {
+ if n, ok := n.(*ast.Comment); ok && n.Text == from {
+ pass.Report(analysis.Diagnostic{
+ Pos: n.Pos(),
+ End: n.End(),
+ SuggestedFixes: []analysis.SuggestedFix{{
+ TextEdits: []analysis.TextEdit{{
+ Pos: n.Pos(),
+ End: n.End(),
+ NewText: []byte(to),
+ }},
+ }},
+ })
+ }
+ })
+
+ return nil, nil
+}
diff --git a/go/analysis/internal/facts/facts.go b/go/analysis/internal/facts/facts.go
deleted file mode 100644
index 1fb69c615..000000000
--- a/go/analysis/internal/facts/facts.go
+++ /dev/null
@@ -1,323 +0,0 @@
-// Copyright 2018 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Package facts defines a serializable set of analysis.Fact.
-//
-// It provides a partial implementation of the Fact-related parts of the
-// analysis.Pass interface for use in analysis drivers such as "go vet"
-// and other build systems.
-//
-// The serial format is unspecified and may change, so the same version
-// of this package must be used for reading and writing serialized facts.
-//
-// The handling of facts in the analysis system parallels the handling
-// of type information in the compiler: during compilation of package P,
-// the compiler emits an export data file that describes the type of
-// every object (named thing) defined in package P, plus every object
-// indirectly reachable from one of those objects. Thus the downstream
-// compiler of package Q need only load one export data file per direct
-// import of Q, and it will learn everything about the API of package P
-// and everything it needs to know about the API of P's dependencies.
-//
-// Similarly, analysis of package P emits a fact set containing facts
-// about all objects exported from P, plus additional facts about only
-// those objects of P's dependencies that are reachable from the API of
-// package P; the downstream analysis of Q need only load one fact set
-// per direct import of Q.
-//
-// The notion of "exportedness" that matters here is that of the
-// compiler. According to the language spec, a method pkg.T.f is
-// unexported simply because its name starts with lowercase. But the
-// compiler must nonetheless export f so that downstream compilations can
-// accurately ascertain whether pkg.T implements an interface pkg.I
-// defined as interface{f()}. Exported thus means "described in export
-// data".
-//
-package facts
-
-import (
- "bytes"
- "encoding/gob"
- "fmt"
- "go/types"
- "io/ioutil"
- "log"
- "reflect"
- "sort"
- "sync"
-
- "golang.org/x/tools/go/analysis"
- "golang.org/x/tools/go/types/objectpath"
-)
-
-const debug = false
-
-// A Set is a set of analysis.Facts.
-//
-// Decode creates a Set of facts by reading from the imports of a given
-// package, and Encode writes out the set. Between these operation,
-// the Import and Export methods will query and update the set.
-//
-// All of Set's methods except String are safe to call concurrently.
-type Set struct {
- pkg *types.Package
- mu sync.Mutex
- m map[key]analysis.Fact
-}
-
-type key struct {
- pkg *types.Package
- obj types.Object // (object facts only)
- t reflect.Type
-}
-
-// ImportObjectFact implements analysis.Pass.ImportObjectFact.
-func (s *Set) ImportObjectFact(obj types.Object, ptr analysis.Fact) bool {
- if obj == nil {
- panic("nil object")
- }
- key := key{pkg: obj.Pkg(), obj: obj, t: reflect.TypeOf(ptr)}
- s.mu.Lock()
- defer s.mu.Unlock()
- if v, ok := s.m[key]; ok {
- reflect.ValueOf(ptr).Elem().Set(reflect.ValueOf(v).Elem())
- return true
- }
- return false
-}
-
-// ExportObjectFact implements analysis.Pass.ExportObjectFact.
-func (s *Set) ExportObjectFact(obj types.Object, fact analysis.Fact) {
- if obj.Pkg() != s.pkg {
- log.Panicf("in package %s: ExportObjectFact(%s, %T): can't set fact on object belonging another package",
- s.pkg, obj, fact)
- }
- key := key{pkg: obj.Pkg(), obj: obj, t: reflect.TypeOf(fact)}
- s.mu.Lock()
- s.m[key] = fact // clobber any existing entry
- s.mu.Unlock()
-}
-
-func (s *Set) AllObjectFacts(filter map[reflect.Type]bool) []analysis.ObjectFact {
- var facts []analysis.ObjectFact
- s.mu.Lock()
- for k, v := range s.m {
- if k.obj != nil && filter[k.t] {
- facts = append(facts, analysis.ObjectFact{Object: k.obj, Fact: v})
- }
- }
- s.mu.Unlock()
- return facts
-}
-
-// ImportPackageFact implements analysis.Pass.ImportPackageFact.
-func (s *Set) ImportPackageFact(pkg *types.Package, ptr analysis.Fact) bool {
- if pkg == nil {
- panic("nil package")
- }
- key := key{pkg: pkg, t: reflect.TypeOf(ptr)}
- s.mu.Lock()
- defer s.mu.Unlock()
- if v, ok := s.m[key]; ok {
- reflect.ValueOf(ptr).Elem().Set(reflect.ValueOf(v).Elem())
- return true
- }
- return false
-}
-
-// ExportPackageFact implements analysis.Pass.ExportPackageFact.
-func (s *Set) ExportPackageFact(fact analysis.Fact) {
- key := key{pkg: s.pkg, t: reflect.TypeOf(fact)}
- s.mu.Lock()
- s.m[key] = fact // clobber any existing entry
- s.mu.Unlock()
-}
-
-func (s *Set) AllPackageFacts(filter map[reflect.Type]bool) []analysis.PackageFact {
- var facts []analysis.PackageFact
- s.mu.Lock()
- for k, v := range s.m {
- if k.obj == nil && filter[k.t] {
- facts = append(facts, analysis.PackageFact{Package: k.pkg, Fact: v})
- }
- }
- s.mu.Unlock()
- return facts
-}
-
-// gobFact is the Gob declaration of a serialized fact.
-type gobFact struct {
- PkgPath string // path of package
- Object objectpath.Path // optional path of object relative to package itself
- Fact analysis.Fact // type and value of user-defined Fact
-}
-
-// Decode decodes all the facts relevant to the analysis of package pkg.
-// The read function reads serialized fact data from an external source
-// for one of of pkg's direct imports. The empty file is a valid
-// encoding of an empty fact set.
-//
-// It is the caller's responsibility to call gob.Register on all
-// necessary fact types.
-func Decode(pkg *types.Package, read func(packagePath string) ([]byte, error)) (*Set, error) {
- // Compute the import map for this package.
- // See the package doc comment.
- packages := importMap(pkg.Imports())
-
- // Read facts from imported packages.
- // Facts may describe indirectly imported packages, or their objects.
- m := make(map[key]analysis.Fact) // one big bucket
- for _, imp := range pkg.Imports() {
- logf := func(format string, args ...interface{}) {
- if debug {
- prefix := fmt.Sprintf("in %s, importing %s: ",
- pkg.Path(), imp.Path())
- log.Print(prefix, fmt.Sprintf(format, args...))
- }
- }
-
- // Read the gob-encoded facts.
- data, err := read(imp.Path())
- if err != nil {
- return nil, fmt.Errorf("in %s, can't import facts for package %q: %v",
- pkg.Path(), imp.Path(), err)
- }
- if len(data) == 0 {
- continue // no facts
- }
- var gobFacts []gobFact
- if err := gob.NewDecoder(bytes.NewReader(data)).Decode(&gobFacts); err != nil {
- return nil, fmt.Errorf("decoding facts for %q: %v", imp.Path(), err)
- }
- if debug {
- logf("decoded %d facts: %v", len(gobFacts), gobFacts)
- }
-
- // Parse each one into a key and a Fact.
- for _, f := range gobFacts {
- factPkg := packages[f.PkgPath]
- if factPkg == nil {
- // Fact relates to a dependency that was
- // unused in this translation unit. Skip.
- logf("no package %q; discarding %v", f.PkgPath, f.Fact)
- continue
- }
- key := key{pkg: factPkg, t: reflect.TypeOf(f.Fact)}
- if f.Object != "" {
- // object fact
- obj, err := objectpath.Object(factPkg, f.Object)
- if err != nil {
- // (most likely due to unexported object)
- // TODO(adonovan): audit for other possibilities.
- logf("no object for path: %v; discarding %s", err, f.Fact)
- continue
- }
- key.obj = obj
- logf("read %T fact %s for %v", f.Fact, f.Fact, key.obj)
- } else {
- // package fact
- logf("read %T fact %s for %v", f.Fact, f.Fact, factPkg)
- }
- m[key] = f.Fact
- }
- }
-
- return &Set{pkg: pkg, m: m}, nil
-}
-
-// Encode encodes a set of facts to a memory buffer.
-//
-// It may fail if one of the Facts could not be gob-encoded, but this is
-// a sign of a bug in an Analyzer.
-func (s *Set) Encode() []byte {
-
- // TODO(adonovan): opt: use a more efficient encoding
- // that avoids repeating PkgPath for each fact.
-
- // Gather all facts, including those from imported packages.
- var gobFacts []gobFact
-
- s.mu.Lock()
- for k, fact := range s.m {
- if debug {
- log.Printf("%v => %s\n", k, fact)
- }
- var object objectpath.Path
- if k.obj != nil {
- path, err := objectpath.For(k.obj)
- if err != nil {
- if debug {
- log.Printf("discarding fact %s about %s\n", fact, k.obj)
- }
- continue // object not accessible from package API; discard fact
- }
- object = path
- }
- gobFacts = append(gobFacts, gobFact{
- PkgPath: k.pkg.Path(),
- Object: object,
- Fact: fact,
- })
- }
- s.mu.Unlock()
-
- // Sort facts by (package, object, type) for determinism.
- sort.Slice(gobFacts, func(i, j int) bool {
- x, y := gobFacts[i], gobFacts[j]
- if x.PkgPath != y.PkgPath {
- return x.PkgPath < y.PkgPath
- }
- if x.Object != y.Object {
- return x.Object < y.Object
- }
- tx := reflect.TypeOf(x.Fact)
- ty := reflect.TypeOf(y.Fact)
- if tx != ty {
- return tx.String() < ty.String()
- }
- return false // equal
- })
-
- var buf bytes.Buffer
- if len(gobFacts) > 0 {
- if err := gob.NewEncoder(&buf).Encode(gobFacts); err != nil {
- // Fact encoding should never fail. Identify the culprit.
- for _, gf := range gobFacts {
- if err := gob.NewEncoder(ioutil.Discard).Encode(gf); err != nil {
- fact := gf.Fact
- pkgpath := reflect.TypeOf(fact).Elem().PkgPath()
- log.Panicf("internal error: gob encoding of analysis fact %s failed: %v; please report a bug against fact %T in package %q",
- fact, err, fact, pkgpath)
- }
- }
- }
- }
-
- if debug {
- log.Printf("package %q: encode %d facts, %d bytes\n",
- s.pkg.Path(), len(gobFacts), buf.Len())
- }
-
- return buf.Bytes()
-}
-
-// String is provided only for debugging, and must not be called
-// concurrent with any Import/Export method.
-func (s *Set) String() string {
- var buf bytes.Buffer
- buf.WriteString("{")
- for k, f := range s.m {
- if buf.Len() > 1 {
- buf.WriteString(", ")
- }
- if k.obj != nil {
- buf.WriteString(k.obj.String())
- } else {
- buf.WriteString(k.pkg.Path())
- }
- fmt.Fprintf(&buf, ": %v", f)
- }
- buf.WriteString("}")
- return buf.String()
-}
diff --git a/go/analysis/internal/facts/facts_test.go b/go/analysis/internal/facts/facts_test.go
deleted file mode 100644
index 13c358230..000000000
--- a/go/analysis/internal/facts/facts_test.go
+++ /dev/null
@@ -1,384 +0,0 @@
-// Copyright 2018 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package facts_test
-
-import (
- "encoding/gob"
- "fmt"
- "go/token"
- "go/types"
- "os"
- "reflect"
- "testing"
-
- "golang.org/x/tools/go/analysis/analysistest"
- "golang.org/x/tools/go/analysis/internal/facts"
- "golang.org/x/tools/go/packages"
- "golang.org/x/tools/internal/testenv"
- "golang.org/x/tools/internal/typeparams"
-)
-
-type myFact struct {
- S string
-}
-
-func (f *myFact) String() string { return fmt.Sprintf("myFact(%s)", f.S) }
-func (f *myFact) AFact() {}
-
-func init() {
- gob.Register(new(myFact))
-}
-
-func TestEncodeDecode(t *testing.T) {
- tests := []struct {
- name string
- typeparams bool // requires typeparams to be enabled
- files map[string]string
- plookups []pkgLookups // see testEncodeDecode for details
- }{
- {
- name: "loading-order",
- // c -> b -> a, a2
- // c does not directly depend on a, but it indirectly uses a.T.
- //
- // Package a2 is never loaded directly so it is incomplete.
- //
- // We use only types in this example because we rely on
- // types.Eval to resolve the lookup expressions, and it only
- // works for types. This is a definite gap in the typechecker API.
- files: map[string]string{
- "a/a.go": `package a; type A int; type T int`,
- "a2/a.go": `package a2; type A2 int; type Unneeded int`,
- "b/b.go": `package b; import ("a"; "a2"); type B chan a2.A2; type F func() a.T`,
- "c/c.go": `package c; import "b"; type C []b.B`,
- },
- // In the following table, we analyze packages (a, b, c) in order,
- // look up various objects accessible within each package,
- // and see if they have a fact. The "analysis" exports a fact
- // for every object at package level.
- //
- // Note: Loop iterations are not independent test cases;
- // order matters, as we populate factmap.
- plookups: []pkgLookups{
- {"a", []lookup{
- {"A", "myFact(a.A)"},
- }},
- {"b", []lookup{
- {"a.A", "myFact(a.A)"},
- {"a.T", "myFact(a.T)"},
- {"B", "myFact(b.B)"},
- {"F", "myFact(b.F)"},
- {"F(nil)()", "myFact(a.T)"}, // (result type of b.F)
- }},
- {"c", []lookup{
- {"b.B", "myFact(b.B)"},
- {"b.F", "myFact(b.F)"},
- //{"b.F(nil)()", "myFact(a.T)"}, // no fact; TODO(adonovan): investigate
- {"C", "myFact(c.C)"},
- {"C{}[0]", "myFact(b.B)"},
- {"<-(C{}[0])", "no fact"}, // object but no fact (we never "analyze" a2)
- }},
- },
- },
- {
- name: "globals",
- files: map[string]string{
- "a/a.go": `package a;
- type T1 int
- type T2 int
- type T3 int
- type T4 int
- type T5 int
- type K int; type V string
- `,
- "b/b.go": `package b
- import "a"
- var (
- G1 []a.T1
- G2 [7]a.T2
- G3 chan a.T3
- G4 *a.T4
- G5 struct{ F a.T5 }
- G6 map[a.K]a.V
- )
- `,
- "c/c.go": `package c; import "b";
- var (
- v1 = b.G1
- v2 = b.G2
- v3 = b.G3
- v4 = b.G4
- v5 = b.G5
- v6 = b.G6
- )
- `,
- },
- plookups: []pkgLookups{
- {"a", []lookup{}},
- {"b", []lookup{}},
- {"c", []lookup{
- {"v1[0]", "myFact(a.T1)"},
- {"v2[0]", "myFact(a.T2)"},
- {"<-v3", "myFact(a.T3)"},
- {"*v4", "myFact(a.T4)"},
- {"v5.F", "myFact(a.T5)"},
- {"v6[0]", "myFact(a.V)"},
- }},
- },
- },
- {
- name: "typeparams",
- typeparams: true,
- files: map[string]string{
- "a/a.go": `package a
- type T1 int
- type T2 int
- type T3 interface{Foo()}
- type T4 int
- type T5 int
- type T6 interface{Foo()}
- `,
- "b/b.go": `package b
- import "a"
- type N1[T a.T1|int8] func() T
- type N2[T any] struct{ F T }
- type N3[T a.T3] func() T
- type N4[T a.T4|int8] func() T
- type N5[T interface{Bar() a.T5} ] func() T
-
- type t5 struct{}; func (t5) Bar() a.T5
-
- var G1 N1[a.T1]
- var G2 func() N2[a.T2]
- var G3 N3[a.T3]
- var G4 N4[a.T4]
- var G5 N5[t5]
-
- func F6[T a.T6]() T { var x T; return x }
- `,
- "c/c.go": `package c; import "b";
- var (
- v1 = b.G1
- v2 = b.G2
- v3 = b.G3
- v4 = b.G4
- v5 = b.G5
- v6 = b.F6[t6]
- )
-
- type t6 struct{}; func (t6) Foo() {}
- `,
- },
- plookups: []pkgLookups{
- {"a", []lookup{}},
- {"b", []lookup{}},
- {"c", []lookup{
- {"v1", "myFact(b.N1)"},
- {"v1()", "myFact(a.T1)"},
- {"v2()", "myFact(b.N2)"},
- {"v2().F", "myFact(a.T2)"},
- {"v3", "myFact(b.N3)"},
- {"v4", "myFact(b.N4)"},
- {"v4()", "myFact(a.T4)"},
- {"v5", "myFact(b.N5)"},
- {"v5()", "myFact(b.t5)"},
- {"v6()", "myFact(c.t6)"},
- }},
- },
- },
- }
-
- for i := range tests {
- test := tests[i]
- t.Run(test.name, func(t *testing.T) {
- t.Parallel()
- if test.typeparams && !typeparams.Enabled {
- t.Skip("type parameters are not enabled")
- }
- testEncodeDecode(t, test.files, test.plookups)
- })
- }
-}
-
-type lookup struct {
- objexpr string
- want string
-}
-
-type pkgLookups struct {
- path string
- lookups []lookup
-}
-
-// testEncodeDecode tests fact encoding and decoding and simulates how package facts
-// are passed during analysis. It operates on a group of Go file contents. Then
-// for each <package, []lookup> in tests it does the following:
-// 1) loads and type checks the package,
-// 2) calls facts.Decode to loads the facts exported by its imports,
-// 3) exports a myFact Fact for all of package level objects,
-// 4) For each lookup for the current package:
-// 4.a) lookup the types.Object for an Go source expression in the curent package
-// (or confirms one is not expected want=="no object"),
-// 4.b) finds a Fact for the object (or confirms one is not expected want=="no fact"),
-// 4.c) compares the content of the Fact to want.
-// 5) encodes the Facts of the package.
-//
-// Note: tests are not independent test cases; order matters (as does a package being
-// skipped). It changes what Facts can be imported.
-//
-// Failures are reported on t.
-func testEncodeDecode(t *testing.T, files map[string]string, tests []pkgLookups) {
- dir, cleanup, err := analysistest.WriteFiles(files)
- if err != nil {
- t.Fatal(err)
- }
- defer cleanup()
-
- // factmap represents the passing of encoded facts from one
- // package to another. In practice one would use the file system.
- factmap := make(map[string][]byte)
- read := func(path string) ([]byte, error) { return factmap[path], nil }
-
- // Analyze packages in order, look up various objects accessible within
- // each package, and see if they have a fact. The "analysis" exports a
- // fact for every object at package level.
- //
- // Note: Loop iterations are not independent test cases;
- // order matters, as we populate factmap.
- for _, test := range tests {
- // load package
- pkg, err := load(t, dir, test.path)
- if err != nil {
- t.Fatal(err)
- }
-
- // decode
- facts, err := facts.Decode(pkg, read)
- if err != nil {
- t.Fatalf("Decode failed: %v", err)
- }
- t.Logf("decode %s facts = %v", pkg.Path(), facts) // show all facts
-
- // export
- // (one fact for each package-level object)
- for _, name := range pkg.Scope().Names() {
- obj := pkg.Scope().Lookup(name)
- fact := &myFact{obj.Pkg().Name() + "." + obj.Name()}
- facts.ExportObjectFact(obj, fact)
- }
- t.Logf("exported %s facts = %v", pkg.Path(), facts) // show all facts
-
- // import
- // (after export, because an analyzer may import its own facts)
- for _, lookup := range test.lookups {
- fact := new(myFact)
- var got string
- if obj := find(pkg, lookup.objexpr); obj == nil {
- got = "no object"
- } else if facts.ImportObjectFact(obj, fact) {
- got = fact.String()
- } else {
- got = "no fact"
- }
- if got != lookup.want {
- t.Errorf("in %s, ImportObjectFact(%s, %T) = %s, want %s",
- pkg.Path(), lookup.objexpr, fact, got, lookup.want)
- }
- }
-
- // encode
- factmap[pkg.Path()] = facts.Encode()
- }
-}
-
-func find(p *types.Package, expr string) types.Object {
- // types.Eval only allows us to compute a TypeName object for an expression.
- // TODO(adonovan): support other expressions that denote an object:
- // - an identifier (or qualified ident) for a func, const, or var
- // - new(T).f for a field or method
- // I've added CheckExpr in https://go-review.googlesource.com/c/go/+/144677.
- // If that becomes available, use it.
-
- // Choose an arbitrary position within the (single-file) package
- // so that we are within the scope of its import declarations.
- somepos := p.Scope().Lookup(p.Scope().Names()[0]).Pos()
- tv, err := types.Eval(token.NewFileSet(), p, somepos, expr)
- if err != nil {
- return nil
- }
- if n, ok := tv.Type.(*types.Named); ok {
- return n.Obj()
- }
- return nil
-}
-
-func load(t *testing.T, dir string, path string) (*types.Package, error) {
- cfg := &packages.Config{
- Mode: packages.LoadSyntax,
- Dir: dir,
- Env: append(os.Environ(), "GOPATH="+dir, "GO111MODULE=off", "GOPROXY=off"),
- }
- testenv.NeedsGoPackagesEnv(t, cfg.Env)
- pkgs, err := packages.Load(cfg, path)
- if err != nil {
- return nil, err
- }
- if packages.PrintErrors(pkgs) > 0 {
- return nil, fmt.Errorf("packages had errors")
- }
- if len(pkgs) == 0 {
- return nil, fmt.Errorf("no package matched %s", path)
- }
- return pkgs[0].Types, nil
-}
-
-type otherFact struct {
- S string
-}
-
-func (f *otherFact) String() string { return fmt.Sprintf("otherFact(%s)", f.S) }
-func (f *otherFact) AFact() {}
-
-func TestFactFilter(t *testing.T) {
- files := map[string]string{
- "a/a.go": `package a; type A int`,
- }
- dir, cleanup, err := analysistest.WriteFiles(files)
- if err != nil {
- t.Fatal(err)
- }
- defer cleanup()
-
- pkg, err := load(t, dir, "a")
- if err != nil {
- t.Fatal(err)
- }
-
- obj := pkg.Scope().Lookup("A")
- s, err := facts.Decode(pkg, func(string) ([]byte, error) { return nil, nil })
- if err != nil {
- t.Fatal(err)
- }
- s.ExportObjectFact(obj, &myFact{"good object fact"})
- s.ExportPackageFact(&myFact{"good package fact"})
- s.ExportObjectFact(obj, &otherFact{"bad object fact"})
- s.ExportPackageFact(&otherFact{"bad package fact"})
-
- filter := map[reflect.Type]bool{
- reflect.TypeOf(&myFact{}): true,
- }
-
- pkgFacts := s.AllPackageFacts(filter)
- wantPkgFacts := `[{package a ("a") myFact(good package fact)}]`
- if got := fmt.Sprintf("%v", pkgFacts); got != wantPkgFacts {
- t.Errorf("AllPackageFacts: got %v, want %v", got, wantPkgFacts)
- }
-
- objFacts := s.AllObjectFacts(filter)
- wantObjFacts := "[{type a.A int myFact(good object fact)}]"
- if got := fmt.Sprintf("%v", objFacts); got != wantObjFacts {
- t.Errorf("AllObjectFacts: got %v, want %v", got, wantObjFacts)
- }
-}
diff --git a/go/analysis/internal/facts/imports.go b/go/analysis/internal/facts/imports.go
deleted file mode 100644
index ade0cc6fa..000000000
--- a/go/analysis/internal/facts/imports.go
+++ /dev/null
@@ -1,119 +0,0 @@
-// Copyright 2018 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package facts
-
-import (
- "go/types"
-
- "golang.org/x/tools/internal/typeparams"
-)
-
-// importMap computes the import map for a package by traversing the
-// entire exported API each of its imports.
-//
-// This is a workaround for the fact that we cannot access the map used
-// internally by the types.Importer returned by go/importer. The entries
-// in this map are the packages and objects that may be relevant to the
-// current analysis unit.
-//
-// Packages in the map that are only indirectly imported may be
-// incomplete (!pkg.Complete()).
-//
-func importMap(imports []*types.Package) map[string]*types.Package {
- objects := make(map[types.Object]bool)
- packages := make(map[string]*types.Package)
-
- var addObj func(obj types.Object) bool
- var addType func(T types.Type)
-
- addObj = func(obj types.Object) bool {
- if !objects[obj] {
- objects[obj] = true
- addType(obj.Type())
- if pkg := obj.Pkg(); pkg != nil {
- packages[pkg.Path()] = pkg
- }
- return true
- }
- return false
- }
-
- addType = func(T types.Type) {
- switch T := T.(type) {
- case *types.Basic:
- // nop
- case *types.Named:
- if addObj(T.Obj()) {
- // TODO(taking): Investigate why the Underlying type is not added here.
- for i := 0; i < T.NumMethods(); i++ {
- addObj(T.Method(i))
- }
- if tparams := typeparams.ForNamed(T); tparams != nil {
- for i := 0; i < tparams.Len(); i++ {
- addType(tparams.At(i))
- }
- }
- if targs := typeparams.NamedTypeArgs(T); targs != nil {
- for i := 0; i < targs.Len(); i++ {
- addType(targs.At(i))
- }
- }
- }
- case *types.Pointer:
- addType(T.Elem())
- case *types.Slice:
- addType(T.Elem())
- case *types.Array:
- addType(T.Elem())
- case *types.Chan:
- addType(T.Elem())
- case *types.Map:
- addType(T.Key())
- addType(T.Elem())
- case *types.Signature:
- addType(T.Params())
- addType(T.Results())
- if tparams := typeparams.ForSignature(T); tparams != nil {
- for i := 0; i < tparams.Len(); i++ {
- addType(tparams.At(i))
- }
- }
- case *types.Struct:
- for i := 0; i < T.NumFields(); i++ {
- addObj(T.Field(i))
- }
- case *types.Tuple:
- for i := 0; i < T.Len(); i++ {
- addObj(T.At(i))
- }
- case *types.Interface:
- for i := 0; i < T.NumMethods(); i++ {
- addObj(T.Method(i))
- }
- for i := 0; i < T.NumEmbeddeds(); i++ {
- addType(T.EmbeddedType(i)) // walk Embedded for implicits
- }
- case *typeparams.Union:
- for i := 0; i < T.Len(); i++ {
- addType(T.Term(i).Type())
- }
- case *typeparams.TypeParam:
- if addObj(T.Obj()) {
- addType(T.Constraint())
- }
- }
- }
-
- for _, imp := range imports {
- packages[imp.Path()] = imp
-
- scope := imp.Scope()
- for _, name := range scope.Names() {
- addObj(scope.Lookup(name))
- }
- }
-
- return packages
-}