aboutsummaryrefslogtreecommitdiff
path: root/go
diff options
context:
space:
mode:
authorDan Willemsen <dwillemsen@google.com>2023-03-15 13:19:36 -0400
committerDan Willemsen <dwillemsen@google.com>2023-03-15 14:18:08 -0400
commit09c5a32afc5b66f28f166a68afe1fc71afbf9b73 (patch)
tree194d7b0e539d014393564a256bec571e18d6533a /go
parentf10932f763d058b0dcb3acfb795c869996fef47b (diff)
parent031fc75960d487b0b15db12fb328676236a3a39c (diff)
downloadgolang-x-tools-09c5a32afc5b66f28f166a68afe1fc71afbf9b73.tar.gz
Upgrade golang-x-tools to v0.7.0HEADmastermain
Not using external_updater this time to switch to the new upstream tags. Test: treehugger Change-Id: I31488b4958a366ed7f183bb387d3e1446acc13ae
Diffstat (limited to 'go')
-rw-r--r--go/analysis/analysis.go15
-rw-r--r--go/analysis/analysistest/analysistest.go96
-rw-r--r--go/analysis/diagnostic.go2
-rw-r--r--go/analysis/doc.go44
-rw-r--r--go/analysis/internal/analysisflags/flags.go75
-rw-r--r--go/analysis/internal/analysisflags/flags_test.go7
-rw-r--r--go/analysis/internal/checker/checker.go296
-rw-r--r--go/analysis/internal/checker/checker_test.go90
-rw-r--r--go/analysis/internal/checker/fix_test.go309
-rw-r--r--go/analysis/internal/checker/start_test.go85
-rw-r--r--go/analysis/internal/facts/facts.go323
-rw-r--r--go/analysis/internal/facts/facts_test.go384
-rw-r--r--go/analysis/internal/facts/imports.go119
-rw-r--r--go/analysis/passes/asmdecl/arches_go118.go12
-rw-r--r--go/analysis/passes/asmdecl/arches_go119.go14
-rw-r--r--go/analysis/passes/asmdecl/asmdecl.go5
-rw-r--r--go/analysis/passes/asmdecl/asmdecl_test.go19
-rw-r--r--go/analysis/passes/asmdecl/testdata/src/a/asm10.s192
-rw-r--r--go/analysis/passes/asmdecl/testdata/src/a/asm11.s13
-rw-r--r--go/analysis/passes/assign/assign.go15
-rw-r--r--go/analysis/passes/assign/testdata/src/a/a.go28
-rw-r--r--go/analysis/passes/assign/testdata/src/a/a.go.golden28
-rw-r--r--go/analysis/passes/bools/bools.go12
-rw-r--r--go/analysis/passes/buildssa/buildssa_test.go37
-rw-r--r--go/analysis/passes/buildssa/testdata/src/b/b.go20
-rw-r--r--go/analysis/passes/buildssa/testdata/src/c/c.go24
-rw-r--r--go/analysis/passes/buildtag/buildtag.go2
-rw-r--r--go/analysis/passes/buildtag/buildtag_old.go2
-rw-r--r--go/analysis/passes/cgocall/cgocall.go16
-rw-r--r--go/analysis/passes/composite/composite.go41
-rw-r--r--go/analysis/passes/composite/composite_test.go2
-rw-r--r--go/analysis/passes/composite/testdata/src/a/a.go17
-rw-r--r--go/analysis/passes/composite/testdata/src/a/a.go.golden144
-rw-r--r--go/analysis/passes/composite/testdata/src/a/a_fuzz_test.go.golden16
-rw-r--r--go/analysis/passes/composite/testdata/src/typeparams/typeparams.go10
-rw-r--r--go/analysis/passes/composite/testdata/src/typeparams/typeparams.go.golden27
-rw-r--r--go/analysis/passes/copylock/copylock.go2
-rw-r--r--go/analysis/passes/copylock/testdata/src/a/copylock.go42
-rw-r--r--go/analysis/passes/copylock/testdata/src/a/copylock_func.go2
-rw-r--r--go/analysis/passes/directive/directive.go216
-rw-r--r--go/analysis/passes/directive/directive_test.go39
-rw-r--r--go/analysis/passes/directive/testdata/src/a/badspace.go11
-rw-r--r--go/analysis/passes/directive/testdata/src/a/misplaced.go10
-rw-r--r--go/analysis/passes/directive/testdata/src/a/misplaced.s19
-rw-r--r--go/analysis/passes/directive/testdata/src/a/misplaced_test.go10
-rw-r--r--go/analysis/passes/directive/testdata/src/a/p.go11
-rw-r--r--go/analysis/passes/errorsas/errorsas.go28
-rw-r--r--go/analysis/passes/errorsas/testdata/src/a/a.go4
-rw-r--r--go/analysis/passes/errorsas/testdata/src/typeparams/typeparams.go2
-rw-r--r--go/analysis/passes/fieldalignment/fieldalignment.go7
-rw-r--r--go/analysis/passes/httpresponse/httpresponse.go27
-rw-r--r--go/analysis/passes/httpresponse/httpresponse_test.go3
-rw-r--r--go/analysis/passes/httpresponse/testdata/src/a/a.go27
-rw-r--r--go/analysis/passes/ifaceassert/parameterized.go1
-rw-r--r--go/analysis/passes/inspect/inspect.go15
-rw-r--r--go/analysis/passes/loopclosure/loopclosure.go408
-rw-r--r--go/analysis/passes/loopclosure/loopclosure_test.go4
-rw-r--r--go/analysis/passes/loopclosure/testdata/src/a/a.go131
-rw-r--r--go/analysis/passes/loopclosure/testdata/src/a/b.go9
-rw-r--r--go/analysis/passes/loopclosure/testdata/src/subtests/subtest.go202
-rw-r--r--go/analysis/passes/nilness/nilness.go18
-rw-r--r--go/analysis/passes/nilness/nilness_test.go17
-rw-r--r--go/analysis/passes/nilness/testdata/src/a/a.go41
-rw-r--r--go/analysis/passes/nilness/testdata/src/c/c.go14
-rw-r--r--go/analysis/passes/nilness/testdata/src/d/d.go55
-rw-r--r--go/analysis/passes/pkgfact/pkgfact.go8
-rw-r--r--go/analysis/passes/printf/printf.go27
-rw-r--r--go/analysis/passes/printf/testdata/src/a/a.go3
-rw-r--r--go/analysis/passes/printf/testdata/src/typeparams/diagnostics.go22
-rw-r--r--go/analysis/passes/printf/types.go10
-rw-r--r--go/analysis/passes/shadow/shadow.go1
-rw-r--r--go/analysis/passes/sigchanyzer/sigchanyzer.go2
-rw-r--r--go/analysis/passes/sortslice/analyzer.go9
-rw-r--r--go/analysis/passes/sortslice/testdata/src/a/a.go24
-rw-r--r--go/analysis/passes/stdmethods/stdmethods.go13
-rw-r--r--go/analysis/passes/stdmethods/testdata/src/a/a.go14
-rw-r--r--go/analysis/passes/stdmethods/testdata/src/typeparams/typeparams.go4
-rw-r--r--go/analysis/passes/tests/testdata/src/a/go118_test.go5
-rw-r--r--go/analysis/passes/tests/tests.go30
-rw-r--r--go/analysis/passes/timeformat/testdata/src/a/a.go50
-rw-r--r--go/analysis/passes/timeformat/testdata/src/a/a.go.golden50
-rw-r--r--go/analysis/passes/timeformat/testdata/src/b/b.go11
-rw-r--r--go/analysis/passes/timeformat/timeformat.go129
-rw-r--r--go/analysis/passes/timeformat/timeformat_test.go17
-rw-r--r--go/analysis/passes/unusedwrite/unusedwrite.go53
-rw-r--r--go/analysis/singlechecker/singlechecker.go15
-rw-r--r--go/analysis/unitchecker/main.go4
-rw-r--r--go/analysis/unitchecker/unitchecker.go30
-rw-r--r--go/analysis/unitchecker/unitchecker_test.go81
-rw-r--r--go/analysis/validate.go5
-rw-r--r--go/analysis/validate_test.go34
-rw-r--r--go/ast/astutil/enclosing.go29
-rw-r--r--go/ast/astutil/enclosing_test.go1
-rw-r--r--go/ast/astutil/imports.go7
-rw-r--r--go/ast/astutil/imports_test.go28
-rw-r--r--go/ast/astutil/rewrite.go11
-rw-r--r--go/ast/astutil/rewrite_test.go18
-rw-r--r--go/ast/inspector/inspector.go68
-rw-r--r--go/ast/inspector/inspector_test.go44
-rw-r--r--go/ast/inspector/typeof.go18
-rw-r--r--go/buildutil/allpackages.go11
-rw-r--r--go/buildutil/fakecontext.go1
-rw-r--r--go/buildutil/overlay.go3
-rw-r--r--go/buildutil/tags.go3
-rw-r--r--go/buildutil/util.go5
-rw-r--r--go/callgraph/callgraph.go5
-rw-r--r--go/callgraph/callgraph_test.go253
-rw-r--r--go/callgraph/cha/cha.go104
-rw-r--r--go/callgraph/cha/cha_test.go89
-rw-r--r--go/callgraph/cha/testdata/generics.go49
-rw-r--r--go/callgraph/rta/rta.go8
-rw-r--r--go/callgraph/rta/rta_test.go92
-rw-r--r--go/callgraph/rta/testdata/generics.go79
-rw-r--r--go/callgraph/static/static.go3
-rw-r--r--go/callgraph/static/static_test.go116
-rw-r--r--go/callgraph/util.go4
-rw-r--r--go/callgraph/vta/graph.go126
-rw-r--r--go/callgraph/vta/graph_test.go7
-rw-r--r--go/callgraph/vta/helpers_test.go8
-rw-r--r--go/callgraph/vta/internal/trie/bits.go25
-rw-r--r--go/callgraph/vta/internal/trie/builder.go24
-rw-r--r--go/callgraph/vta/internal/trie/trie.go12
-rw-r--r--go/callgraph/vta/propagation.go57
-rw-r--r--go/callgraph/vta/propagation_test.go61
-rw-r--r--go/callgraph/vta/testdata/src/callgraph_generics.go71
-rw-r--r--go/callgraph/vta/testdata/src/callgraph_issue_57756.go67
-rw-r--r--go/callgraph/vta/testdata/src/callgraph_recursive_types.go56
-rw-r--r--go/callgraph/vta/testdata/src/function_alias.go44
-rw-r--r--go/callgraph/vta/testdata/src/panic.go3
-rw-r--r--go/callgraph/vta/utils.go110
-rw-r--r--go/callgraph/vta/vta.go33
-rw-r--r--go/callgraph/vta/vta_go117_test.go3
-rw-r--r--go/callgraph/vta/vta_test.go28
-rw-r--r--go/cfg/builder.go3
-rw-r--r--go/cfg/cfg.go17
-rw-r--r--go/expect/expect.go13
-rw-r--r--go/expect/expect_test.go2
-rw-r--r--go/expect/testdata/go.fake.mod9
-rw-r--r--go/expect/testdata/go.mod5
-rw-r--r--go/gccgoexportdata/gccgoexportdata_test.go12
-rw-r--r--go/gcexportdata/example_test.go36
-rw-r--r--go/gcexportdata/gcexportdata.go81
-rw-r--r--go/gcexportdata/gcexportdata_test.go45
-rw-r--r--go/gcexportdata/importer.go2
-rw-r--r--go/gcexportdata/testdata/errors-ae16.abin5494 -> 0 bytes
-rw-r--r--go/internal/cgo/cgo.go1
-rw-r--r--go/internal/gccgoimporter/parser.go145
-rw-r--r--go/internal/gccgoimporter/testenv_test.go4
-rw-r--r--go/internal/gcimporter/bexport.go851
-rw-r--r--go/internal/gcimporter/bexport_test.go551
-rw-r--r--go/internal/gcimporter/bimport.go1053
-rw-r--r--go/internal/gcimporter/exportdata.go99
-rw-r--r--go/internal/gcimporter/gcimporter.go1084
-rw-r--r--go/internal/gcimporter/gcimporter_test.go611
-rw-r--r--go/internal/gcimporter/iexport.go1010
-rw-r--r--go/internal/gcimporter/iexport_common_test.go16
-rw-r--r--go/internal/gcimporter/iexport_go118_test.go254
-rw-r--r--go/internal/gcimporter/iexport_test.go405
-rw-r--r--go/internal/gcimporter/iimport.go898
-rw-r--r--go/internal/gcimporter/newInterface10.go22
-rw-r--r--go/internal/gcimporter/newInterface11.go14
-rw-r--r--go/internal/gcimporter/support_go117.go16
-rw-r--r--go/internal/gcimporter/support_go118.go23
-rw-r--r--go/internal/gcimporter/testdata/a.go14
-rw-r--r--go/internal/gcimporter/testdata/b.go11
-rw-r--r--go/internal/gcimporter/testdata/exports.go89
-rw-r--r--go/internal/gcimporter/testdata/issue15920.go11
-rw-r--r--go/internal/gcimporter/testdata/issue20046.go9
-rw-r--r--go/internal/gcimporter/testdata/issue25301.go17
-rw-r--r--go/internal/gcimporter/testdata/p.go13
-rw-r--r--go/internal/gcimporter/testdata/versions/test.go30
-rw-r--r--go/internal/gcimporter/testdata/versions/test_go1.11_0i.abin2420 -> 0 bytes
-rw-r--r--go/internal/gcimporter/testdata/versions/test_go1.11_6b.abin2426 -> 0 bytes
-rw-r--r--go/internal/gcimporter/testdata/versions/test_go1.11_999b.abin2600 -> 0 bytes
-rw-r--r--go/internal/gcimporter/testdata/versions/test_go1.11_999i.abin2420 -> 0 bytes
-rw-r--r--go/internal/gcimporter/testdata/versions/test_go1.7_0.abin1862 -> 0 bytes
-rw-r--r--go/internal/gcimporter/testdata/versions/test_go1.7_1.abin2316 -> 0 bytes
-rw-r--r--go/internal/gcimporter/testdata/versions/test_go1.8_4.abin1658 -> 0 bytes
-rw-r--r--go/internal/gcimporter/testdata/versions/test_go1.8_5.abin1658 -> 0 bytes
-rw-r--r--go/loader/doc.go42
-rw-r--r--go/loader/loader.go21
-rw-r--r--go/loader/stdlib_test.go2
-rw-r--r--go/loader/util.go1
-rw-r--r--go/packages/doc.go1
-rw-r--r--go/packages/golist.go108
-rw-r--r--go/packages/overlay_test.go7
-rw-r--r--go/packages/packages.go103
-rw-r--r--go/packages/packages_test.go75
-rw-r--r--go/packages/packagestest/expect.go140
-rw-r--r--go/packages/packagestest/expect_test.go3
-rw-r--r--go/packages/packagestest/export.go35
-rw-r--r--go/packages/packagestest/gopath.go41
-rw-r--r--go/packages/packagestest/modules.go29
-rw-r--r--go/packages/packagestest/modules_test.go2
-rw-r--r--go/pointer/analysis.go30
-rw-r--r--go/pointer/api.go16
-rw-r--r--go/pointer/callgraph.go1
-rw-r--r--go/pointer/doc.go637
-rw-r--r--go/pointer/example_test.go3
-rw-r--r--go/pointer/gen.go57
-rw-r--r--go/pointer/hvn.go30
-rw-r--r--go/pointer/intrinsics.go2
-rw-r--r--go/pointer/labels.go48
-rw-r--r--go/pointer/opt.go1
-rw-r--r--go/pointer/pointer_race_test.go (renamed from go/internal/gcimporter/israce_test.go)6
-rw-r--r--go/pointer/pointer_test.go293
-rw-r--r--go/pointer/reflect.go12
-rw-r--r--go/pointer/solve.go4
-rw-r--r--go/pointer/stdlib_test.go2
-rw-r--r--go/pointer/testdata/typeparams.go68
-rw-r--r--go/pointer/util.go28
-rw-r--r--go/ssa/TODO16
-rw-r--r--go/ssa/block.go5
-rw-r--r--go/ssa/blockopt.go4
-rw-r--r--go/ssa/builder.go616
-rw-r--r--go/ssa/builder_generic_test.go679
-rw-r--r--go/ssa/builder_go117_test.go1
-rw-r--r--go/ssa/builder_go120_test.go102
-rw-r--r--go/ssa/builder_test.go517
-rw-r--r--go/ssa/const.go154
-rw-r--r--go/ssa/const_test.go104
-rw-r--r--go/ssa/coretype.go159
-rw-r--r--go/ssa/coretype_test.go105
-rw-r--r--go/ssa/create.go68
-rw-r--r--go/ssa/doc.go100
-rw-r--r--go/ssa/dom.go8
-rw-r--r--go/ssa/emit.go211
-rw-r--r--go/ssa/example_test.go6
-rw-r--r--go/ssa/func.go169
-rw-r--r--go/ssa/instantiate.go177
-rw-r--r--go/ssa/instantiate_test.go361
-rw-r--r--go/ssa/interp/interp.go65
-rw-r--r--go/ssa/interp/interp_go120_test.go12
-rw-r--r--go/ssa/interp/interp_test.go97
-rw-r--r--go/ssa/interp/map.go2
-rw-r--r--go/ssa/interp/ops.go87
-rw-r--r--go/ssa/interp/reflect.go6
-rw-r--r--go/ssa/interp/testdata/boundmeth.go3
-rw-r--r--go/ssa/interp/testdata/convert.go9
-rw-r--r--go/ssa/interp/testdata/deepequal.go93
-rw-r--r--go/ssa/interp/testdata/fixedbugs/issue52342.go17
-rw-r--r--go/ssa/interp/testdata/fixedbugs/issue52835.go27
-rw-r--r--go/ssa/interp/testdata/fixedbugs/issue55086.go132
-rw-r--r--go/ssa/interp/testdata/slice2array.go92
-rw-r--r--go/ssa/interp/testdata/slice2arrayptr.go2
-rw-r--r--go/ssa/interp/testdata/src/encoding/encoding.go15
-rw-r--r--go/ssa/interp/testdata/src/log/log.go8
-rw-r--r--go/ssa/interp/testdata/src/reflect/deepequal.go109
-rw-r--r--go/ssa/interp/testdata/src/reflect/reflect.go13
-rw-r--r--go/ssa/interp/testdata/typeassert.go32
-rw-r--r--go/ssa/interp/testdata/width32.go42
-rw-r--r--go/ssa/interp/testdata/zeros.go45
-rw-r--r--go/ssa/lift.go13
-rw-r--r--go/ssa/lvalue.go49
-rw-r--r--go/ssa/methods.go111
-rw-r--r--go/ssa/methods_test.go96
-rw-r--r--go/ssa/mode.go12
-rw-r--r--go/ssa/parameterized.go12
-rw-r--r--go/ssa/print.go32
-rw-r--r--go/ssa/sanity.go31
-rw-r--r--go/ssa/source.go50
-rw-r--r--go/ssa/source_test.go7
-rw-r--r--go/ssa/ssa.go397
-rw-r--r--go/ssa/ssautil/load.go8
-rw-r--r--go/ssa/ssautil/load_test.go120
-rw-r--r--go/ssa/ssautil/switch.go4
-rw-r--r--go/ssa/ssautil/switch_test.go2
-rw-r--r--go/ssa/ssautil/visit.go1
-rw-r--r--go/ssa/stdlib_test.go19
-rw-r--r--go/ssa/subst.go113
-rw-r--r--go/ssa/subst_test.go6
-rw-r--r--go/ssa/testdata/src/README.txt5
-rw-r--r--go/ssa/testdata/src/bytes/bytes.go3
-rw-r--r--go/ssa/testdata/src/context/context.go7
-rw-r--r--go/ssa/testdata/src/encoding/encoding.go9
-rw-r--r--go/ssa/testdata/src/encoding/json/json.go4
-rw-r--r--go/ssa/testdata/src/encoding/xml/xml.go4
-rw-r--r--go/ssa/testdata/src/errors/errors.go3
-rw-r--r--go/ssa/testdata/src/fmt/fmt.go11
-rw-r--r--go/ssa/testdata/src/io/io.go5
-rw-r--r--go/ssa/testdata/src/log/log.go5
-rw-r--r--go/ssa/testdata/src/math/math.go15
-rw-r--r--go/ssa/testdata/src/os/os.go5
-rw-r--r--go/ssa/testdata/src/reflect/reflect.go40
-rw-r--r--go/ssa/testdata/src/runtime/runtime.go5
-rw-r--r--go/ssa/testdata/src/sort/sort.go13
-rw-r--r--go/ssa/testdata/src/strconv/strconv.go6
-rw-r--r--go/ssa/testdata/src/strings/strings.go13
-rw-r--r--go/ssa/testdata/src/sync/atomic/atomic.go5
-rw-r--r--go/ssa/testdata/src/sync/sync.go12
-rw-r--r--go/ssa/testdata/src/time/time.go24
-rw-r--r--go/ssa/testdata/src/unsafe/unsafe.go4
-rw-r--r--go/ssa/testdata/valueforexpr.go1
-rw-r--r--go/ssa/util.go258
-rw-r--r--go/ssa/wrappers.go196
-rw-r--r--go/types/objectpath/objectpath.go261
-rw-r--r--go/types/objectpath/objectpath_test.go23
-rw-r--r--go/types/typeutil/imports.go1
-rw-r--r--go/types/typeutil/map.go84
-rw-r--r--go/types/typeutil/map_test.go25
-rw-r--r--go/types/typeutil/methodsetcache.go1
-rw-r--r--go/types/typeutil/ui.go1
-rw-r--r--go/vcs/vcs.go1
303 files changed, 11554 insertions, 10724 deletions
diff --git a/go/analysis/analysis.go b/go/analysis/analysis.go
index d11505a16..44ada22a0 100644
--- a/go/analysis/analysis.go
+++ b/go/analysis/analysis.go
@@ -11,8 +11,6 @@ import (
"go/token"
"go/types"
"reflect"
-
- "golang.org/x/tools/internal/analysisinternal"
)
// An Analyzer describes an analysis function and its options.
@@ -48,6 +46,7 @@ type Analyzer struct {
// RunDespiteErrors allows the driver to invoke
// the Run method of this analyzer even on a
// package that contains parse or type errors.
+ // The Pass.TypeErrors field may consequently be non-empty.
RunDespiteErrors bool
// Requires is a set of analyzers that must run successfully
@@ -75,17 +74,6 @@ type Analyzer struct {
func (a *Analyzer) String() string { return a.Name }
-func init() {
- // Set the analysisinternal functions to be able to pass type errors
- // to the Pass type without modifying the go/analysis API.
- analysisinternal.SetTypeErrors = func(p interface{}, errors []types.Error) {
- p.(*Pass).typeErrors = errors
- }
- analysisinternal.GetTypeErrors = func(p interface{}) []types.Error {
- return p.(*Pass).typeErrors
- }
-}
-
// A Pass provides information to the Run function that
// applies a specific analyzer to a single Go package.
//
@@ -106,6 +94,7 @@ type Pass struct {
Pkg *types.Package // type information about the package
TypesInfo *types.Info // type information about the syntax trees
TypesSizes types.Sizes // function for computing sizes of types
+ TypeErrors []types.Error // type errors (only if Analyzer.RunDespiteErrors)
// Report reports a Diagnostic, a finding about a specific location
// in the analyzed source code such as a potential mistake.
diff --git a/go/analysis/analysistest/analysistest.go b/go/analysis/analysistest/analysistest.go
index df79a4419..be016e7e9 100644
--- a/go/analysis/analysistest/analysistest.go
+++ b/go/analysis/analysistest/analysistest.go
@@ -19,14 +19,13 @@ import (
"sort"
"strconv"
"strings"
+ "testing"
"text/scanner"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/internal/checker"
"golang.org/x/tools/go/packages"
- "golang.org/x/tools/internal/lsp/diff"
- "golang.org/x/tools/internal/lsp/diff/myers"
- "golang.org/x/tools/internal/span"
+ "golang.org/x/tools/internal/diff"
"golang.org/x/tools/internal/testenv"
"golang.org/x/tools/txtar"
)
@@ -81,23 +80,24 @@ type Testing interface {
// Each section in the archive corresponds to a single message.
//
// A golden file using txtar may look like this:
-// -- turn into single negation --
-// package pkg
//
-// func fn(b1, b2 bool) {
-// if !b1 { // want `negating a boolean twice`
-// println()
-// }
-// }
+// -- turn into single negation --
+// package pkg
//
-// -- remove double negation --
-// package pkg
+// func fn(b1, b2 bool) {
+// if !b1 { // want `negating a boolean twice`
+// println()
+// }
+// }
//
-// func fn(b1, b2 bool) {
-// if b1 { // want `negating a boolean twice`
-// println()
-// }
-// }
+// -- remove double negation --
+// package pkg
+//
+// func fn(b1, b2 bool) {
+// if b1 { // want `negating a boolean twice`
+// println()
+// }
+// }
func RunWithSuggestedFixes(t Testing, dir string, a *analysis.Analyzer, patterns ...string) []*Result {
r := Run(t, dir, a, patterns...)
@@ -113,7 +113,7 @@ func RunWithSuggestedFixes(t Testing, dir string, a *analysis.Analyzer, patterns
// should match up.
for _, act := range r {
// file -> message -> edits
- fileEdits := make(map[*token.File]map[string][]diff.TextEdit)
+ fileEdits := make(map[*token.File]map[string][]diff.Edit)
fileContents := make(map[*token.File][]byte)
// Validate edits, prepare the fileEdits map and read the file contents.
@@ -141,17 +141,13 @@ func RunWithSuggestedFixes(t Testing, dir string, a *analysis.Analyzer, patterns
}
fileContents[file] = contents
}
- spn, err := span.NewRange(act.Pass.Fset, edit.Pos, edit.End).Span()
- if err != nil {
- t.Errorf("error converting edit to span %s: %v", file.Name(), err)
- }
-
if _, ok := fileEdits[file]; !ok {
- fileEdits[file] = make(map[string][]diff.TextEdit)
+ fileEdits[file] = make(map[string][]diff.Edit)
}
- fileEdits[file][sf.Message] = append(fileEdits[file][sf.Message], diff.TextEdit{
- Span: spn,
- NewText: string(edit.NewText),
+ fileEdits[file][sf.Message] = append(fileEdits[file][sf.Message], diff.Edit{
+ Start: file.Offset(edit.Pos),
+ End: file.Offset(edit.End),
+ New: string(edit.NewText),
})
}
}
@@ -188,23 +184,24 @@ func RunWithSuggestedFixes(t Testing, dir string, a *analysis.Analyzer, patterns
for _, vf := range ar.Files {
if vf.Name == sf {
found = true
- out := diff.ApplyEdits(string(orig), edits)
+ out, err := diff.ApplyBytes(orig, edits)
+ if err != nil {
+ t.Errorf("%s: error applying fixes: %v", file.Name(), err)
+ continue
+ }
// the file may contain multiple trailing
// newlines if the user places empty lines
// between files in the archive. normalize
// this to a single newline.
want := string(bytes.TrimRight(vf.Data, "\n")) + "\n"
- formatted, err := format.Source([]byte(out))
+ formatted, err := format.Source(out)
if err != nil {
t.Errorf("%s: error formatting edited source: %v\n%s", file.Name(), err, out)
continue
}
- if want != string(formatted) {
- d, err := myers.ComputeEdits("", want, string(formatted))
- if err != nil {
- t.Errorf("failed to compute suggested fix diff: %v", err)
- }
- t.Errorf("suggested fixes failed for %s:\n%s", file.Name(), diff.ToUnified(fmt.Sprintf("%s.golden [%s]", file.Name(), sf), "actual", want, d))
+ if got := string(formatted); got != want {
+ unified := diff.Unified(fmt.Sprintf("%s.golden [%s]", file.Name(), sf), "actual", want, got)
+ t.Errorf("suggested fixes failed for %s:\n%s", file.Name(), unified)
}
break
}
@@ -216,25 +213,26 @@ func RunWithSuggestedFixes(t Testing, dir string, a *analysis.Analyzer, patterns
} else {
// all suggested fixes are represented by a single file
- var catchallEdits []diff.TextEdit
+ var catchallEdits []diff.Edit
for _, edits := range fixes {
catchallEdits = append(catchallEdits, edits...)
}
- out := diff.ApplyEdits(string(orig), catchallEdits)
+ out, err := diff.ApplyBytes(orig, catchallEdits)
+ if err != nil {
+ t.Errorf("%s: error applying fixes: %v", file.Name(), err)
+ continue
+ }
want := string(ar.Comment)
- formatted, err := format.Source([]byte(out))
+ formatted, err := format.Source(out)
if err != nil {
t.Errorf("%s: error formatting resulting source: %v\n%s", file.Name(), err, out)
continue
}
- if want != string(formatted) {
- d, err := myers.ComputeEdits("", want, string(formatted))
- if err != nil {
- t.Errorf("%s: failed to compute suggested fix diff: %s", file.Name(), err)
- }
- t.Errorf("suggested fixes failed for %s:\n%s", file.Name(), diff.ToUnified(file.Name()+".golden", "actual", want, d))
+ if got := string(formatted); got != want {
+ unified := diff.Unified(file.Name()+".golden", "actual", want, got)
+ t.Errorf("suggested fixes failed for %s:\n%s", file.Name(), unified)
}
}
}
@@ -248,7 +246,8 @@ func RunWithSuggestedFixes(t Testing, dir string, a *analysis.Analyzer, patterns
// directory using golang.org/x/tools/go/packages, runs the analysis on
// them, and checks that each analysis emits the expected diagnostics
// and facts specified by the contents of '// want ...' comments in the
-// package's source files.
+// package's source files. It treats a comment of the form
+// "//...// want..." or "/*...// want... */" as if it starts at 'want'
//
// An expectation of a Diagnostic is specified by a string literal
// containing a regular expression that must match the diagnostic
@@ -280,7 +279,7 @@ func RunWithSuggestedFixes(t Testing, dir string, a *analysis.Analyzer, patterns
// attempted, even if unsuccessful. It is safe for a test to ignore all
// the results, but a test may use it to perform additional checks.
func Run(t Testing, dir string, a *analysis.Analyzer, patterns ...string) []*Result {
- if t, ok := t.(testenv.Testing); ok {
+ if t, ok := t.(testing.TB); ok {
testenv.NeedsGoPackages(t)
}
@@ -316,8 +315,11 @@ func loadPackages(a *analysis.Analyzer, dir string, patterns ...string) ([]*pack
// a list of packages we generate and then do the parsing and
// typechecking, though this feature seems to be a recurring need.
+ mode := packages.NeedName | packages.NeedFiles | packages.NeedCompiledGoFiles | packages.NeedImports |
+ packages.NeedTypes | packages.NeedTypesSizes | packages.NeedSyntax | packages.NeedTypesInfo |
+ packages.NeedDeps
cfg := &packages.Config{
- Mode: packages.LoadAllSyntax,
+ Mode: mode,
Dir: dir,
Tests: true,
Env: append(os.Environ(), "GOPATH="+dir, "GO111MODULE=off", "GOPROXY=off"),
diff --git a/go/analysis/diagnostic.go b/go/analysis/diagnostic.go
index cd462a0cb..5cdcf46d2 100644
--- a/go/analysis/diagnostic.go
+++ b/go/analysis/diagnostic.go
@@ -37,7 +37,7 @@ type Diagnostic struct {
// declaration.
type RelatedInformation struct {
Pos token.Pos
- End token.Pos
+ End token.Pos // optional
Message string
}
diff --git a/go/analysis/doc.go b/go/analysis/doc.go
index 94a3bd5d0..c5429c9e2 100644
--- a/go/analysis/doc.go
+++ b/go/analysis/doc.go
@@ -3,12 +3,10 @@
// license that can be found in the LICENSE file.
/*
-
Package analysis defines the interface between a modular static
analysis and an analysis driver program.
-
-Background
+# Background
A static analysis is a function that inspects a package of Go code and
reports a set of diagnostics (typically mistakes in the code), and
@@ -32,8 +30,7 @@ frameworks, code review tools, code-base indexers (such as SourceGraph),
documentation viewers (such as godoc), batch pipelines for large code
bases, and so on.
-
-Analyzer
+# Analyzer
The primary type in the API is Analyzer. An Analyzer statically
describes an analysis function: its name, documentation, flags,
@@ -115,8 +112,7 @@ Finally, the Run field contains a function to be called by the driver to
execute the analysis on a single package. The driver passes it an
instance of the Pass type.
-
-Pass
+# Pass
A Pass describes a single unit of work: the application of a particular
Analyzer to a particular package of Go code.
@@ -181,14 +177,14 @@ Diagnostic is defined as:
The optional Category field is a short identifier that classifies the
kind of message when an analysis produces several kinds of diagnostic.
-Many analyses want to associate diagnostics with a severity level.
-Because Diagnostic does not have a severity level field, an Analyzer's
-diagnostics effectively all have the same severity level. To separate which
-diagnostics are high severity and which are low severity, expose multiple
-Analyzers instead. Analyzers should also be separated when their
-diagnostics belong in different groups, or could be tagged differently
-before being shown to the end user. Analyzers should document their severity
-level to help downstream tools surface diagnostics properly.
+The Diagnostic struct does not have a field to indicate its severity
+because opinions about the relative importance of Analyzers and their
+diagnostics vary widely among users. The design of this framework does
+not hold each Analyzer responsible for identifying the severity of its
+diagnostics. Instead, we expect that drivers will allow the user to
+customize the filtering and prioritization of diagnostics based on the
+producing Analyzer and optional Category, according to the user's
+preferences.
Most Analyzers inspect typed Go syntax trees, but a few, such as asmdecl
and buildtag, inspect the raw text of Go source files or even non-Go
@@ -202,8 +198,7 @@ raw text file, use the following sequence:
...
pass.Reportf(tf.LineStart(line), "oops")
-
-Modular analysis with Facts
+# Modular analysis with Facts
To improve efficiency and scalability, large programs are routinely
built using separate compilation: units of the program are compiled
@@ -246,6 +241,12 @@ Consequently, Facts must be serializable. The API requires that drivers
use the gob encoding, an efficient, robust, self-describing binary
protocol. A fact type may implement the GobEncoder/GobDecoder interfaces
if the default encoding is unsuitable. Facts should be stateless.
+Because serialized facts may appear within build outputs, the gob encoding
+of a fact must be deterministic, to avoid spurious cache misses in
+build systems that use content-addressable caches.
+The driver makes a single call to the gob encoder for all facts
+exported by a given analysis pass, so that the topology of
+shared data structures referenced by multiple facts is preserved.
The Pass type has functions to import and export facts,
associated either with an object or with a package:
@@ -280,8 +281,7 @@ this fact is built in to the analyzer so that it correctly checks
calls to log.Printf even when run in a driver that does not apply
it to standard packages. We would like to remove this limitation in future.
-
-Testing an Analyzer
+# Testing an Analyzer
The analysistest subpackage provides utilities for testing an Analyzer.
In a few lines of code, it is possible to run an analyzer on a package
@@ -289,8 +289,7 @@ of testdata files and check that it reported all the expected
diagnostics and facts (and no more). Expectations are expressed using
"// want ..." comments in the input code.
-
-Standalone commands
+# Standalone commands
Analyzers are provided in the form of packages that a driver program is
expected to import. The vet command imports a set of several analyzers,
@@ -301,7 +300,7 @@ singlechecker and multichecker subpackages.
The singlechecker package provides the main function for a command that
runs one analyzer. By convention, each analyzer such as
-go/passes/findcall should be accompanied by a singlechecker-based
+go/analysis/passes/findcall should be accompanied by a singlechecker-based
command such as go/analysis/passes/findcall/cmd/findcall, defined in its
entirety as:
@@ -316,6 +315,5 @@ entirety as:
A tool that provides multiple analyzers can use multichecker in a
similar way, giving it the list of Analyzers.
-
*/
package analysis
diff --git a/go/analysis/internal/analysisflags/flags.go b/go/analysis/internal/analysisflags/flags.go
index 4b7be2d1f..e127a42b9 100644
--- a/go/analysis/internal/analysisflags/flags.go
+++ b/go/analysis/internal/analysisflags/flags.go
@@ -206,7 +206,7 @@ func (versionFlag) Get() interface{} { return nil }
func (versionFlag) String() string { return "" }
func (versionFlag) Set(s string) error {
if s != "full" {
- log.Fatalf("unsupported flag value: -V=%s", s)
+ log.Fatalf("unsupported flag value: -V=%s (use -V=full)", s)
}
// This replicates the minimal subset of
@@ -218,7 +218,10 @@ func (versionFlag) Set(s string) error {
// Formats:
// $progname version devel ... buildID=...
// $progname version go1.9.1
- progname := os.Args[0]
+ progname, err := os.Executable()
+ if err != nil {
+ return err
+ }
f, err := os.Open(progname)
if err != nil {
log.Fatal(err)
@@ -339,9 +342,38 @@ func PrintPlain(fset *token.FileSet, diag analysis.Diagnostic) {
}
// A JSONTree is a mapping from package ID to analysis name to result.
-// Each result is either a jsonError or a list of jsonDiagnostic.
+// Each result is either a jsonError or a list of JSONDiagnostic.
type JSONTree map[string]map[string]interface{}
+// A TextEdit describes the replacement of a portion of a file.
+// Start and End are zero-based half-open indices into the original byte
+// sequence of the file, and New is the new text.
+type JSONTextEdit struct {
+ Filename string `json:"filename"`
+ Start int `json:"start"`
+ End int `json:"end"`
+ New string `json:"new"`
+}
+
+// A JSONSuggestedFix describes an edit that should be applied as a whole or not
+// at all. It might contain multiple TextEdits/text_edits if the SuggestedFix
+// consists of multiple non-contiguous edits.
+type JSONSuggestedFix struct {
+ Message string `json:"message"`
+ Edits []JSONTextEdit `json:"edits"`
+}
+
+// A JSONDiagnostic can be used to encode and decode analysis.Diagnostics to and
+// from JSON.
+// TODO(matloob): Should the JSON diagnostics contain ranges?
+// If so, how should they be formatted?
+type JSONDiagnostic struct {
+ Category string `json:"category,omitempty"`
+ Posn string `json:"posn"`
+ Message string `json:"message"`
+ SuggestedFixes []JSONSuggestedFix `json:"suggested_fixes,omitempty"`
+}
+
// Add adds the result of analysis 'name' on package 'id'.
// The result is either a list of diagnostics or an error.
func (tree JSONTree) Add(fset *token.FileSet, id, name string, diags []analysis.Diagnostic, err error) {
@@ -352,20 +384,31 @@ func (tree JSONTree) Add(fset *token.FileSet, id, name string, diags []analysis.
}
v = jsonError{err.Error()}
} else if len(diags) > 0 {
- type jsonDiagnostic struct {
- Category string `json:"category,omitempty"`
- Posn string `json:"posn"`
- Message string `json:"message"`
- }
- var diagnostics []jsonDiagnostic
- // TODO(matloob): Should the JSON diagnostics contain ranges?
- // If so, how should they be formatted?
+ diagnostics := make([]JSONDiagnostic, 0, len(diags))
for _, f := range diags {
- diagnostics = append(diagnostics, jsonDiagnostic{
- Category: f.Category,
- Posn: fset.Position(f.Pos).String(),
- Message: f.Message,
- })
+ var fixes []JSONSuggestedFix
+ for _, fix := range f.SuggestedFixes {
+ var edits []JSONTextEdit
+ for _, edit := range fix.TextEdits {
+ edits = append(edits, JSONTextEdit{
+ Filename: fset.Position(edit.Pos).Filename,
+ Start: fset.Position(edit.Pos).Offset,
+ End: fset.Position(edit.End).Offset,
+ New: string(edit.NewText),
+ })
+ }
+ fixes = append(fixes, JSONSuggestedFix{
+ Message: fix.Message,
+ Edits: edits,
+ })
+ }
+ jdiag := JSONDiagnostic{
+ Category: f.Category,
+ Posn: fset.Position(f.Pos).String(),
+ Message: f.Message,
+ SuggestedFixes: fixes,
+ }
+ diagnostics = append(diagnostics, jdiag)
}
v = diagnostics
}
diff --git a/go/analysis/internal/analysisflags/flags_test.go b/go/analysis/internal/analysisflags/flags_test.go
index 1f055dde7..b5cfb3d44 100644
--- a/go/analysis/internal/analysisflags/flags_test.go
+++ b/go/analysis/internal/analysisflags/flags_test.go
@@ -42,7 +42,7 @@ func TestExec(t *testing.T) {
for _, test := range []struct {
flags string
- want string
+ want string // output should contain want
}{
{"", "[a1 a2 a3]"},
{"-a1=0", "[a2 a3]"},
@@ -50,6 +50,7 @@ func TestExec(t *testing.T) {
{"-a1", "[a1]"},
{"-a1=1 -a3=1", "[a1 a3]"},
{"-a1=1 -a3=0", "[a1]"},
+ {"-V=full", "analysisflags.test version devel"},
} {
cmd := exec.Command(progname, "-test.run=TestExec")
cmd.Env = append(os.Environ(), "ANALYSISFLAGS_CHILD=1", "FLAGS="+test.flags)
@@ -60,8 +61,8 @@ func TestExec(t *testing.T) {
}
got := strings.TrimSpace(string(output))
- if got != test.want {
- t.Errorf("got %s, want %s", got, test.want)
+ if !strings.Contains(got, test.want) {
+ t.Errorf("got %q, does not contain %q", got, test.want)
}
}
}
diff --git a/go/analysis/internal/checker/checker.go b/go/analysis/internal/checker/checker.go
index e405a2ae1..5346acd76 100644
--- a/go/analysis/internal/checker/checker.go
+++ b/go/analysis/internal/checker/checker.go
@@ -15,7 +15,6 @@ import (
"flag"
"fmt"
"go/format"
- "go/parser"
"go/token"
"go/types"
"io/ioutil"
@@ -33,8 +32,8 @@ import (
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/internal/analysisflags"
"golang.org/x/tools/go/packages"
- "golang.org/x/tools/internal/analysisinternal"
- "golang.org/x/tools/internal/span"
+ "golang.org/x/tools/internal/diff"
+ "golang.org/x/tools/internal/robustio"
)
var (
@@ -51,6 +50,9 @@ var (
// Log files for optional performance tracing.
CPUProfile, MemProfile, Trace string
+ // IncludeTests indicates whether test files should be analyzed too.
+ IncludeTests = true
+
// Fix determines whether to apply all suggested fixes.
Fix bool
)
@@ -65,6 +67,7 @@ func RegisterFlags() {
flag.StringVar(&CPUProfile, "cpuprofile", "", "write CPU profile to this file")
flag.StringVar(&MemProfile, "memprofile", "", "write memory profile to this file")
flag.StringVar(&Trace, "trace", "", "write trace log to this file")
+ flag.BoolVar(&IncludeTests, "test", IncludeTests, "indicates whether test files should be analyzed, too")
flag.BoolVar(&Fix, "fix", false, "apply all suggested fixes")
}
@@ -143,7 +146,11 @@ func Run(args []string, analyzers []*analysis.Analyzer) (exitcode int) {
roots := analyze(initial, analyzers)
if Fix {
- applyFixes(roots)
+ if err := applyFixes(roots); err != nil {
+ // Fail when applying fixes failed.
+ log.Print(err)
+ return 1
+ }
}
return printDiagnostics(roots)
}
@@ -163,7 +170,7 @@ func load(patterns []string, allSyntax bool) ([]*packages.Package, error) {
}
conf := packages.Config{
Mode: mode,
- Tests: true,
+ Tests: IncludeTests,
}
initial, err := packages.Load(&conf, patterns...)
if err == nil {
@@ -301,7 +308,10 @@ func analyze(pkgs []*packages.Package, analyzers []*analysis.Analyzer) []*action
return roots
}
-func applyFixes(roots []*action) {
+func applyFixes(roots []*action) error {
+ // visit all of the actions and accumulate the suggested edits.
+ paths := make(map[robustio.FileID]string)
+ editsByAction := make(map[robustio.FileID]map[*action][]diff.Edit)
visited := make(map[*action]bool)
var apply func(*action) error
var visitAll func(actions []*action) error
@@ -309,7 +319,9 @@ func applyFixes(roots []*action) {
for _, act := range actions {
if !visited[act] {
visited[act] = true
- visitAll(act.deps)
+ if err := visitAll(act.deps); err != nil {
+ return err
+ }
if err := apply(act); err != nil {
return err
}
@@ -318,116 +330,167 @@ func applyFixes(roots []*action) {
return nil
}
- // TODO(matloob): Is this tree business too complicated? (After all this is Go!)
- // Just create a set (map) of edits, sort by pos and call it a day?
- type offsetedit struct {
- start, end int
- newText []byte
- } // TextEdit using byteOffsets instead of pos
- type node struct {
- edit offsetedit
- left, right *node
- }
-
- var insert func(tree **node, edit offsetedit) error
- insert = func(treeptr **node, edit offsetedit) error {
- if *treeptr == nil {
- *treeptr = &node{edit, nil, nil}
- return nil
- }
- tree := *treeptr
- if edit.end <= tree.edit.start {
- return insert(&tree.left, edit)
- } else if edit.start >= tree.edit.end {
- return insert(&tree.right, edit)
- }
-
- // Overlapping text edit.
- return fmt.Errorf("analyses applying overlapping text edits affecting pos range (%v, %v) and (%v, %v)",
- edit.start, edit.end, tree.edit.start, tree.edit.end)
-
- }
-
- editsForFile := make(map[*token.File]*node)
-
apply = func(act *action) error {
+ editsForTokenFile := make(map[*token.File][]diff.Edit)
for _, diag := range act.diagnostics {
for _, sf := range diag.SuggestedFixes {
for _, edit := range sf.TextEdits {
// Validate the edit.
+ // Any error here indicates a bug in the analyzer.
+ file := act.pkg.Fset.File(edit.Pos)
+ if file == nil {
+ return fmt.Errorf("analysis %q suggests invalid fix: missing file info for pos (%v)",
+ act.a.Name, edit.Pos)
+ }
if edit.Pos > edit.End {
- return fmt.Errorf(
- "diagnostic for analysis %v contains Suggested Fix with malformed edit: pos (%v) > end (%v)",
+ return fmt.Errorf("analysis %q suggests invalid fix: pos (%v) > end (%v)",
act.a.Name, edit.Pos, edit.End)
}
- file, endfile := act.pkg.Fset.File(edit.Pos), act.pkg.Fset.File(edit.End)
- if file == nil || endfile == nil || file != endfile {
- return (fmt.Errorf(
- "diagnostic for analysis %v contains Suggested Fix with malformed spanning files %v and %v",
- act.a.Name, file.Name(), endfile.Name()))
+ if eof := token.Pos(file.Base() + file.Size()); edit.End > eof {
+ return fmt.Errorf("analysis %q suggests invalid fix: end (%v) past end of file (%v)",
+ act.a.Name, edit.End, eof)
}
- start, end := file.Offset(edit.Pos), file.Offset(edit.End)
-
- // TODO(matloob): Validate that edits do not affect other packages.
- root := editsForFile[file]
- if err := insert(&root, offsetedit{start, end, edit.NewText}); err != nil {
- return err
- }
- editsForFile[file] = root // In case the root changed
+ edit := diff.Edit{Start: file.Offset(edit.Pos), End: file.Offset(edit.End), New: string(edit.NewText)}
+ editsForTokenFile[file] = append(editsForTokenFile[file], edit)
}
}
}
+
+ for f, edits := range editsForTokenFile {
+ id, _, err := robustio.GetFileID(f.Name())
+ if err != nil {
+ return err
+ }
+ if _, hasId := paths[id]; !hasId {
+ paths[id] = f.Name()
+ editsByAction[id] = make(map[*action][]diff.Edit)
+ }
+ editsByAction[id][act] = edits
+ }
return nil
}
- visitAll(roots)
+ if err := visitAll(roots); err != nil {
+ return err
+ }
- fset := token.NewFileSet() // Shared by parse calls below
- // Now we've got a set of valid edits for each file. Get the new file contents.
- for f, tree := range editsForFile {
- contents, err := ioutil.ReadFile(f.Name())
- if err != nil {
- log.Fatal(err)
+ // Validate and group the edits to each actual file.
+ editsByPath := make(map[string][]diff.Edit)
+ for id, actToEdits := range editsByAction {
+ path := paths[id]
+ actions := make([]*action, 0, len(actToEdits))
+ for act := range actToEdits {
+ actions = append(actions, act)
}
- cur := 0 // current position in the file
-
- var out bytes.Buffer
-
- var recurse func(*node)
- recurse = func(node *node) {
- if node.left != nil {
- recurse(node.left)
+ // Does any action create conflicting edits?
+ for _, act := range actions {
+ edits := actToEdits[act]
+ if _, invalid := validateEdits(edits); invalid > 0 {
+ name, x, y := act.a.Name, edits[invalid-1], edits[invalid]
+ return diff3Conflict(path, name, name, []diff.Edit{x}, []diff.Edit{y})
}
+ }
- edit := node.edit
- if edit.start > cur {
- out.Write(contents[cur:edit.start])
- out.Write(edit.newText)
+ // Does any pair of different actions create edits that conflict?
+ for j := range actions {
+ for k := range actions[:j] {
+ x, y := actions[j], actions[k]
+ if x.a.Name > y.a.Name {
+ x, y = y, x
+ }
+ xedits, yedits := actToEdits[x], actToEdits[y]
+ combined := append(xedits, yedits...)
+ if _, invalid := validateEdits(combined); invalid > 0 {
+ // TODO: consider applying each action's consistent list of edits entirely,
+ // and then using a three-way merge (such as GNU diff3) on the resulting
+ // files to report more precisely the parts that actually conflict.
+ return diff3Conflict(path, x.a.Name, y.a.Name, xedits, yedits)
+ }
}
- cur = edit.end
+ }
- if node.right != nil {
- recurse(node.right)
- }
+ var edits []diff.Edit
+ for act := range actToEdits {
+ edits = append(edits, actToEdits[act]...)
}
- recurse(tree)
- // Write out the rest of the file.
- if cur < len(contents) {
- out.Write(contents[cur:])
+ editsByPath[path], _ = validateEdits(edits) // remove duplicates. already validated.
+ }
+
+ // Now we've got a set of valid edits for each file. Apply them.
+ for path, edits := range editsByPath {
+ contents, err := ioutil.ReadFile(path)
+ if err != nil {
+ return err
+ }
+
+ out, err := diff.ApplyBytes(contents, edits)
+ if err != nil {
+ return err
}
// Try to format the file.
- ff, err := parser.ParseFile(fset, f.Name(), out.Bytes(), parser.ParseComments)
- if err == nil {
- var buf bytes.Buffer
- if err = format.Node(&buf, fset, ff); err == nil {
- out = buf
+ if formatted, err := format.Source(out); err == nil {
+ out = formatted
+ }
+
+ if err := ioutil.WriteFile(path, out, 0644); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+// validateEdits returns a list of edits that is sorted and
+// contains no duplicate edits. Returns the index of some
+// overlapping adjacent edits if there is one and <0 if the
+// edits are valid.
+func validateEdits(edits []diff.Edit) ([]diff.Edit, int) {
+ if len(edits) == 0 {
+ return nil, -1
+ }
+ equivalent := func(x, y diff.Edit) bool {
+ return x.Start == y.Start && x.End == y.End && x.New == y.New
+ }
+ diff.SortEdits(edits)
+ unique := []diff.Edit{edits[0]}
+ invalid := -1
+ for i := 1; i < len(edits); i++ {
+ prev, cur := edits[i-1], edits[i]
+ // We skip over equivalent edits without considering them
+ // an error. This handles identical edits coming from the
+ // multiple ways of loading a package into a
+ // *go/packages.Packages for testing, e.g. packages "p" and "p [p.test]".
+ if !equivalent(prev, cur) {
+ unique = append(unique, cur)
+ if prev.End > cur.Start {
+ invalid = i
}
}
+ }
+ return unique, invalid
+}
- ioutil.WriteFile(f.Name(), out.Bytes(), 0644)
+// diff3Conflict returns an error describing two conflicting sets of
+// edits on a file at path.
+func diff3Conflict(path string, xlabel, ylabel string, xedits, yedits []diff.Edit) error {
+ contents, err := ioutil.ReadFile(path)
+ if err != nil {
+ return err
}
+ oldlabel, old := "base", string(contents)
+
+ xdiff, err := diff.ToUnified(oldlabel, xlabel, old, xedits)
+ if err != nil {
+ return err
+ }
+ ydiff, err := diff.ToUnified(oldlabel, ylabel, old, yedits)
+ if err != nil {
+ return err
+ }
+
+ return fmt.Errorf("conflicting edits from %s and %s on %s\nfirst edits:\n%s\nsecond edits:\n%s",
+ xlabel, ylabel, path, xdiff, ydiff)
}
// printDiagnostics prints the diagnostics for the root packages in either
@@ -574,7 +637,6 @@ type action struct {
deps []*action
objectFacts map[objectFactKey]analysis.Fact
packageFacts map[packageFactKey]analysis.Fact
- inputs map[*analysis.Analyzer]interface{}
result interface{}
diagnostics []analysis.Diagnostic
err error
@@ -672,14 +734,16 @@ func (act *action) execOnce() {
// Run the analysis.
pass := &analysis.Pass{
- Analyzer: act.a,
- Fset: act.pkg.Fset,
- Files: act.pkg.Syntax,
- OtherFiles: act.pkg.OtherFiles,
- IgnoredFiles: act.pkg.IgnoredFiles,
- Pkg: act.pkg.Types,
- TypesInfo: act.pkg.TypesInfo,
- TypesSizes: act.pkg.TypesSizes,
+ Analyzer: act.a,
+ Fset: act.pkg.Fset,
+ Files: act.pkg.Syntax,
+ OtherFiles: act.pkg.OtherFiles,
+ IgnoredFiles: act.pkg.IgnoredFiles,
+ Pkg: act.pkg.Types,
+ TypesInfo: act.pkg.TypesInfo,
+ TypesSizes: act.pkg.TypesSizes,
+ TypeErrors: act.pkg.TypeErrors,
+
ResultOf: inputs,
Report: func(d analysis.Diagnostic) { act.diagnostics = append(act.diagnostics, d) },
ImportObjectFact: act.importObjectFact,
@@ -691,36 +755,6 @@ func (act *action) execOnce() {
}
act.pass = pass
- var errors []types.Error
- // Get any type errors that are attributed to the pkg.
- // This is necessary to test analyzers that provide
- // suggested fixes for compiler/type errors.
- for _, err := range act.pkg.Errors {
- if err.Kind != packages.TypeError {
- continue
- }
- // err.Pos is a string of form: "file:line:col" or "file:line" or "" or "-"
- spn := span.Parse(err.Pos)
- // Extract the token positions from the error string.
- line, col, offset := spn.Start().Line(), spn.Start().Column(), -1
- act.pkg.Fset.Iterate(func(f *token.File) bool {
- if f.Name() != spn.URI().Filename() {
- return true
- }
- offset = int(f.LineStart(line)) + col - 1
- return false
- })
- if offset == -1 {
- continue
- }
- errors = append(errors, types.Error{
- Fset: act.pkg.Fset,
- Msg: err.Msg,
- Pos: token.Pos(offset),
- })
- }
- analysisinternal.SetTypeErrors(pass, errors)
-
var err error
if act.pkg.IllTyped && !pass.Analyzer.RunDespiteErrors {
err = fmt.Errorf("analysis skipped due to errors in package")
@@ -762,7 +796,7 @@ func inheritFacts(act, dep *action) {
if serialize {
encodedFact, err := codeFact(fact)
if err != nil {
- log.Panicf("internal error: encoding of %T fact failed in %v", fact, act)
+ log.Panicf("internal error: encoding of %T fact failed in %v: %v", fact, act, err)
}
fact = encodedFact
}
@@ -826,7 +860,7 @@ func codeFact(fact analysis.Fact) (analysis.Fact, error) {
// exportedFrom reports whether obj may be visible to a package that imports pkg.
// This includes not just the exported members of pkg, but also unexported
-// constants, types, fields, and methods, perhaps belonging to oether packages,
+// constants, types, fields, and methods, perhaps belonging to other packages,
// that find there way into the API.
// This is an overapproximation of the more accurate approach used by
// gc export data, which walks the type graph, but it's much simpler.
@@ -890,7 +924,7 @@ func (act *action) exportObjectFact(obj types.Object, fact analysis.Fact) {
func (act *action) allObjectFacts() []analysis.ObjectFact {
facts := make([]analysis.ObjectFact, 0, len(act.objectFacts))
for k := range act.objectFacts {
- facts = append(facts, analysis.ObjectFact{k.obj, act.objectFacts[k]})
+ facts = append(facts, analysis.ObjectFact{Object: k.obj, Fact: act.objectFacts[k]})
}
return facts
}
@@ -932,11 +966,11 @@ func factType(fact analysis.Fact) reflect.Type {
return t
}
-// allObjectFacts implements Pass.AllObjectFacts.
+// allPackageFacts implements Pass.AllPackageFacts.
func (act *action) allPackageFacts() []analysis.PackageFact {
facts := make([]analysis.PackageFact, 0, len(act.packageFacts))
for k := range act.packageFacts {
- facts = append(facts, analysis.PackageFact{k.pkg, act.packageFacts[k]})
+ facts = append(facts, analysis.PackageFact{Package: k.pkg, Fact: act.packageFacts[k]})
}
return facts
}
diff --git a/go/analysis/internal/checker/checker_test.go b/go/analysis/internal/checker/checker_test.go
index eee211c21..34acae81e 100644
--- a/go/analysis/internal/checker/checker_test.go
+++ b/go/analysis/internal/checker/checker_test.go
@@ -19,14 +19,9 @@ import (
"golang.org/x/tools/internal/testenv"
)
-var from, to string
-
func TestApplyFixes(t *testing.T) {
testenv.NeedsGoPackages(t)
- from = "bar"
- to = "baz"
-
files := map[string]string{
"rename/test.go": `package rename
@@ -74,26 +69,55 @@ var analyzer = &analysis.Analyzer{
Run: run,
}
+var other = &analysis.Analyzer{ // like analyzer but with a different Name.
+ Name: "other",
+ Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Run: run,
+}
+
func run(pass *analysis.Pass) (interface{}, error) {
+ const (
+ from = "bar"
+ to = "baz"
+ conflict = "conflict" // add conflicting edits to package conflict.
+ duplicate = "duplicate" // add duplicate edits to package conflict.
+ other = "other" // add conflicting edits to package other from different analyzers.
+ )
+
+ if pass.Analyzer.Name == other {
+ if pass.Pkg.Name() != other {
+ return nil, nil // only apply Analyzer other to packages named other
+ }
+ }
+
inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
nodeFilter := []ast.Node{(*ast.Ident)(nil)}
inspect.Preorder(nodeFilter, func(n ast.Node) {
ident := n.(*ast.Ident)
if ident.Name == from {
msg := fmt.Sprintf("renaming %q to %q", from, to)
+ edits := []analysis.TextEdit{
+ {Pos: ident.Pos(), End: ident.End(), NewText: []byte(to)},
+ }
+ switch pass.Pkg.Name() {
+ case conflict:
+ edits = append(edits, []analysis.TextEdit{
+ {Pos: ident.Pos() - 1, End: ident.End(), NewText: []byte(to)},
+ {Pos: ident.Pos(), End: ident.End() - 1, NewText: []byte(to)},
+ {Pos: ident.Pos(), End: ident.End(), NewText: []byte("lorem ipsum")},
+ }...)
+ case duplicate:
+ edits = append(edits, edits...)
+ case other:
+ if pass.Analyzer.Name == other {
+ edits[0].Pos = edits[0].Pos + 1 // shift by one to mismatch analyzer and other
+ }
+ }
pass.Report(analysis.Diagnostic{
- Pos: ident.Pos(),
- End: ident.End(),
- Message: msg,
- SuggestedFixes: []analysis.SuggestedFix{{
- Message: msg,
- TextEdits: []analysis.TextEdit{{
- Pos: ident.Pos(),
- End: ident.End(),
- NewText: []byte(to),
- }},
- }},
- })
+ Pos: ident.Pos(),
+ End: ident.End(),
+ Message: msg,
+ SuggestedFixes: []analysis.SuggestedFix{{Message: msg, TextEdits: edits}}})
}
})
@@ -129,6 +153,18 @@ func Foo(s string) int {
RunDespiteErrors: true,
}
+ // A no-op analyzer that should finish regardless of
+ // parse or type errors in the code.
+ noopWithFact := &analysis.Analyzer{
+ Name: "noopfact",
+ Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Run: func(pass *analysis.Pass) (interface{}, error) {
+ return nil, nil
+ },
+ RunDespiteErrors: true,
+ FactTypes: []analysis.Fact{&EmptyFact{}},
+ }
+
for _, test := range []struct {
name string
pattern []string
@@ -137,7 +173,17 @@ func Foo(s string) int {
}{
// parse/type errors
{name: "skip-error", pattern: []string{"file=" + path}, analyzers: []*analysis.Analyzer{analyzer}, code: 1},
- {name: "despite-error", pattern: []string{"file=" + path}, analyzers: []*analysis.Analyzer{noop}, code: 0},
+ // RunDespiteErrors allows a driver to run an Analyzer even after parse/type errors.
+ //
+ // The noop analyzer doesn't use facts, so the driver loads only the root
+ // package from source. For the rest, it asks 'go list' for export data,
+ // which fails because the compiler encounters the type error. Since the
+ // errors come from 'go list', the driver doesn't run the analyzer.
+ {name: "despite-error", pattern: []string{"file=" + path}, analyzers: []*analysis.Analyzer{noop}, code: 1},
+ // The noopfact analyzer does use facts, so the driver loads source for
+ // all dependencies, does type checking itself, recognizes the error as a
+ // type error, and runs the analyzer.
+ {name: "despite-error-fact", pattern: []string{"file=" + path}, analyzers: []*analysis.Analyzer{noopWithFact}, code: 0},
// combination of parse/type errors and no errors
{name: "despite-error-and-no-error", pattern: []string{"file=" + path, "sort"}, analyzers: []*analysis.Analyzer{analyzer, noop}, code: 1},
// non-existing package error
@@ -151,6 +197,10 @@ func Foo(s string) int {
// no errors
{name: "no-errors", pattern: []string{"sort"}, analyzers: []*analysis.Analyzer{analyzer, noop}, code: 0},
} {
+ if test.name == "despite-error" && testenv.Go1Point() < 20 {
+ // The behavior in the comment on the despite-error test only occurs for Go 1.20+.
+ continue
+ }
if got := checker.Run(test.pattern, test.analyzers); got != test.code {
t.Errorf("got incorrect exit code %d for test %s; want %d", got, test.name, test.code)
}
@@ -158,3 +208,7 @@ func Foo(s string) int {
defer cleanup()
}
+
+type EmptyFact struct{}
+
+func (f *EmptyFact) AFact() {}
diff --git a/go/analysis/internal/checker/fix_test.go b/go/analysis/internal/checker/fix_test.go
new file mode 100644
index 000000000..3ea92b38c
--- /dev/null
+++ b/go/analysis/internal/checker/fix_test.go
@@ -0,0 +1,309 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package checker_test
+
+import (
+ "flag"
+ "io/ioutil"
+ "os"
+ "os/exec"
+ "path"
+ "regexp"
+ "runtime"
+ "testing"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/analysis/analysistest"
+ "golang.org/x/tools/go/analysis/internal/checker"
+ "golang.org/x/tools/internal/testenv"
+)
+
+func main() {
+ checker.Fix = true
+ patterns := flag.Args()
+
+ code := checker.Run(patterns, []*analysis.Analyzer{analyzer, other})
+ os.Exit(code)
+}
+
+// TestFixes ensures that checker.Run applies fixes correctly.
+// This test fork/execs the main function above.
+func TestFixes(t *testing.T) {
+ oses := map[string]bool{"darwin": true, "linux": true}
+ if !oses[runtime.GOOS] {
+ t.Skipf("skipping fork/exec test on this platform")
+ }
+
+ if os.Getenv("TESTFIXES_CHILD") == "1" {
+ // child process
+
+ // replace [progname -test.run=TestFixes -- ...]
+ // by [progname ...]
+ os.Args = os.Args[2:]
+ os.Args[0] = "vet"
+ main()
+ panic("unreachable")
+ }
+
+ testenv.NeedsTool(t, "go")
+
+ files := map[string]string{
+ "rename/foo.go": `package rename
+
+func Foo() {
+ bar := 12
+ _ = bar
+}
+
+// the end
+`,
+ "rename/intestfile_test.go": `package rename
+
+func InTestFile() {
+ bar := 13
+ _ = bar
+}
+
+// the end
+`,
+ "rename/foo_test.go": `package rename_test
+
+func Foo() {
+ bar := 14
+ _ = bar
+}
+
+// the end
+`,
+ "duplicate/dup.go": `package duplicate
+
+func Foo() {
+ bar := 14
+ _ = bar
+}
+
+// the end
+`,
+ }
+ fixed := map[string]string{
+ "rename/foo.go": `package rename
+
+func Foo() {
+ baz := 12
+ _ = baz
+}
+
+// the end
+`,
+ "rename/intestfile_test.go": `package rename
+
+func InTestFile() {
+ baz := 13
+ _ = baz
+}
+
+// the end
+`,
+ "rename/foo_test.go": `package rename_test
+
+func Foo() {
+ baz := 14
+ _ = baz
+}
+
+// the end
+`,
+ "duplicate/dup.go": `package duplicate
+
+func Foo() {
+ baz := 14
+ _ = baz
+}
+
+// the end
+`,
+ }
+ dir, cleanup, err := analysistest.WriteFiles(files)
+ if err != nil {
+ t.Fatalf("Creating test files failed with %s", err)
+ }
+ defer cleanup()
+
+ args := []string{"-test.run=TestFixes", "--", "rename", "duplicate"}
+ cmd := exec.Command(os.Args[0], args...)
+ cmd.Env = append(os.Environ(), "TESTFIXES_CHILD=1", "GOPATH="+dir, "GO111MODULE=off", "GOPROXY=off")
+
+ out, err := cmd.CombinedOutput()
+ if len(out) > 0 {
+ t.Logf("%s: out=<<%s>>", args, out)
+ }
+ var exitcode int
+ if err, ok := err.(*exec.ExitError); ok {
+ exitcode = err.ExitCode() // requires go1.12
+ }
+
+ const diagnosticsExitCode = 3
+ if exitcode != diagnosticsExitCode {
+ t.Errorf("%s: exited %d, want %d", args, exitcode, diagnosticsExitCode)
+ }
+
+ for name, want := range fixed {
+ path := path.Join(dir, "src", name)
+ contents, err := ioutil.ReadFile(path)
+ if err != nil {
+ t.Errorf("error reading %s: %v", path, err)
+ }
+ if got := string(contents); got != want {
+ t.Errorf("contents of %s file did not match expectations. got=%s, want=%s", path, got, want)
+ }
+ }
+}
+
+// TestConflict ensures that checker.Run detects conflicts correctly.
+// This test fork/execs the main function above.
+func TestConflict(t *testing.T) {
+ oses := map[string]bool{"darwin": true, "linux": true}
+ if !oses[runtime.GOOS] {
+ t.Skipf("skipping fork/exec test on this platform")
+ }
+
+ if os.Getenv("TESTCONFLICT_CHILD") == "1" {
+ // child process
+
+ // replace [progname -test.run=TestConflict -- ...]
+ // by [progname ...]
+ os.Args = os.Args[2:]
+ os.Args[0] = "vet"
+ main()
+ panic("unreachable")
+ }
+
+ testenv.NeedsTool(t, "go")
+
+ files := map[string]string{
+ "conflict/foo.go": `package conflict
+
+func Foo() {
+ bar := 12
+ _ = bar
+}
+
+// the end
+`,
+ }
+ dir, cleanup, err := analysistest.WriteFiles(files)
+ if err != nil {
+ t.Fatalf("Creating test files failed with %s", err)
+ }
+ defer cleanup()
+
+ args := []string{"-test.run=TestConflict", "--", "conflict"}
+ cmd := exec.Command(os.Args[0], args...)
+ cmd.Env = append(os.Environ(), "TESTCONFLICT_CHILD=1", "GOPATH="+dir, "GO111MODULE=off", "GOPROXY=off")
+
+ out, err := cmd.CombinedOutput()
+ var exitcode int
+ if err, ok := err.(*exec.ExitError); ok {
+ exitcode = err.ExitCode() // requires go1.12
+ }
+ const errExitCode = 1
+ if exitcode != errExitCode {
+ t.Errorf("%s: exited %d, want %d", args, exitcode, errExitCode)
+ }
+
+ pattern := `conflicting edits from rename and rename on /.*/conflict/foo.go`
+ matched, err := regexp.Match(pattern, out)
+ if err != nil {
+ t.Errorf("error matching pattern %s: %v", pattern, err)
+ } else if !matched {
+ t.Errorf("%s: output was=<<%s>>. Expected it to match <<%s>>", args, out, pattern)
+ }
+
+ // No files updated
+ for name, want := range files {
+ path := path.Join(dir, "src", name)
+ contents, err := ioutil.ReadFile(path)
+ if err != nil {
+ t.Errorf("error reading %s: %v", path, err)
+ }
+ if got := string(contents); got != want {
+ t.Errorf("contents of %s file updated. got=%s, want=%s", path, got, want)
+ }
+ }
+}
+
+// TestOther ensures that checker.Run reports conflicts from
+// distinct actions correctly.
+// This test fork/execs the main function above.
+func TestOther(t *testing.T) {
+ oses := map[string]bool{"darwin": true, "linux": true}
+ if !oses[runtime.GOOS] {
+ t.Skipf("skipping fork/exec test on this platform")
+ }
+
+ if os.Getenv("TESTOTHER_CHILD") == "1" {
+ // child process
+
+ // replace [progname -test.run=TestOther -- ...]
+ // by [progname ...]
+ os.Args = os.Args[2:]
+ os.Args[0] = "vet"
+ main()
+ panic("unreachable")
+ }
+
+ testenv.NeedsTool(t, "go")
+
+ files := map[string]string{
+ "other/foo.go": `package other
+
+func Foo() {
+ bar := 12
+ _ = bar
+}
+
+// the end
+`,
+ }
+ dir, cleanup, err := analysistest.WriteFiles(files)
+ if err != nil {
+ t.Fatalf("Creating test files failed with %s", err)
+ }
+ defer cleanup()
+
+ args := []string{"-test.run=TestOther", "--", "other"}
+ cmd := exec.Command(os.Args[0], args...)
+ cmd.Env = append(os.Environ(), "TESTOTHER_CHILD=1", "GOPATH="+dir, "GO111MODULE=off", "GOPROXY=off")
+
+ out, err := cmd.CombinedOutput()
+ var exitcode int
+ if err, ok := err.(*exec.ExitError); ok {
+ exitcode = err.ExitCode() // requires go1.12
+ }
+ const errExitCode = 1
+ if exitcode != errExitCode {
+ t.Errorf("%s: exited %d, want %d", args, exitcode, errExitCode)
+ }
+
+ pattern := `conflicting edits from other and rename on /.*/other/foo.go`
+ matched, err := regexp.Match(pattern, out)
+ if err != nil {
+ t.Errorf("error matching pattern %s: %v", pattern, err)
+ } else if !matched {
+ t.Errorf("%s: output was=<<%s>>. Expected it to match <<%s>>", args, out, pattern)
+ }
+
+ // No files updated
+ for name, want := range files {
+ path := path.Join(dir, "src", name)
+ contents, err := ioutil.ReadFile(path)
+ if err != nil {
+ t.Errorf("error reading %s: %v", path, err)
+ }
+ if got := string(contents); got != want {
+ t.Errorf("contents of %s file updated. got=%s, want=%s", path, got, want)
+ }
+ }
+}
diff --git a/go/analysis/internal/checker/start_test.go b/go/analysis/internal/checker/start_test.go
new file mode 100644
index 000000000..ede21159b
--- /dev/null
+++ b/go/analysis/internal/checker/start_test.go
@@ -0,0 +1,85 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package checker_test
+
+import (
+ "go/ast"
+ "io/ioutil"
+ "path/filepath"
+ "testing"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/analysis/analysistest"
+ "golang.org/x/tools/go/analysis/internal/checker"
+ "golang.org/x/tools/go/analysis/passes/inspect"
+ "golang.org/x/tools/go/ast/inspector"
+ "golang.org/x/tools/internal/testenv"
+)
+
+// TestStartFixes make sure modifying the first character
+// of the file takes effect.
+func TestStartFixes(t *testing.T) {
+ testenv.NeedsGoPackages(t)
+
+ files := map[string]string{
+ "comment/doc.go": `/* Package comment */
+package comment
+`}
+
+ want := `// Package comment
+package comment
+`
+
+ testdata, cleanup, err := analysistest.WriteFiles(files)
+ if err != nil {
+ t.Fatal(err)
+ }
+ path := filepath.Join(testdata, "src/comment/doc.go")
+ checker.Fix = true
+ checker.Run([]string{"file=" + path}, []*analysis.Analyzer{commentAnalyzer})
+
+ contents, err := ioutil.ReadFile(path)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ got := string(contents)
+ if got != want {
+ t.Errorf("contents of rewritten file\ngot: %s\nwant: %s", got, want)
+ }
+
+ defer cleanup()
+}
+
+var commentAnalyzer = &analysis.Analyzer{
+ Name: "comment",
+ Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Run: commentRun,
+}
+
+func commentRun(pass *analysis.Pass) (interface{}, error) {
+ const (
+ from = "/* Package comment */"
+ to = "// Package comment"
+ )
+ inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+ inspect.Preorder(nil, func(n ast.Node) {
+ if n, ok := n.(*ast.Comment); ok && n.Text == from {
+ pass.Report(analysis.Diagnostic{
+ Pos: n.Pos(),
+ End: n.End(),
+ SuggestedFixes: []analysis.SuggestedFix{{
+ TextEdits: []analysis.TextEdit{{
+ Pos: n.Pos(),
+ End: n.End(),
+ NewText: []byte(to),
+ }},
+ }},
+ })
+ }
+ })
+
+ return nil, nil
+}
diff --git a/go/analysis/internal/facts/facts.go b/go/analysis/internal/facts/facts.go
deleted file mode 100644
index 1fb69c615..000000000
--- a/go/analysis/internal/facts/facts.go
+++ /dev/null
@@ -1,323 +0,0 @@
-// Copyright 2018 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Package facts defines a serializable set of analysis.Fact.
-//
-// It provides a partial implementation of the Fact-related parts of the
-// analysis.Pass interface for use in analysis drivers such as "go vet"
-// and other build systems.
-//
-// The serial format is unspecified and may change, so the same version
-// of this package must be used for reading and writing serialized facts.
-//
-// The handling of facts in the analysis system parallels the handling
-// of type information in the compiler: during compilation of package P,
-// the compiler emits an export data file that describes the type of
-// every object (named thing) defined in package P, plus every object
-// indirectly reachable from one of those objects. Thus the downstream
-// compiler of package Q need only load one export data file per direct
-// import of Q, and it will learn everything about the API of package P
-// and everything it needs to know about the API of P's dependencies.
-//
-// Similarly, analysis of package P emits a fact set containing facts
-// about all objects exported from P, plus additional facts about only
-// those objects of P's dependencies that are reachable from the API of
-// package P; the downstream analysis of Q need only load one fact set
-// per direct import of Q.
-//
-// The notion of "exportedness" that matters here is that of the
-// compiler. According to the language spec, a method pkg.T.f is
-// unexported simply because its name starts with lowercase. But the
-// compiler must nonetheless export f so that downstream compilations can
-// accurately ascertain whether pkg.T implements an interface pkg.I
-// defined as interface{f()}. Exported thus means "described in export
-// data".
-//
-package facts
-
-import (
- "bytes"
- "encoding/gob"
- "fmt"
- "go/types"
- "io/ioutil"
- "log"
- "reflect"
- "sort"
- "sync"
-
- "golang.org/x/tools/go/analysis"
- "golang.org/x/tools/go/types/objectpath"
-)
-
-const debug = false
-
-// A Set is a set of analysis.Facts.
-//
-// Decode creates a Set of facts by reading from the imports of a given
-// package, and Encode writes out the set. Between these operation,
-// the Import and Export methods will query and update the set.
-//
-// All of Set's methods except String are safe to call concurrently.
-type Set struct {
- pkg *types.Package
- mu sync.Mutex
- m map[key]analysis.Fact
-}
-
-type key struct {
- pkg *types.Package
- obj types.Object // (object facts only)
- t reflect.Type
-}
-
-// ImportObjectFact implements analysis.Pass.ImportObjectFact.
-func (s *Set) ImportObjectFact(obj types.Object, ptr analysis.Fact) bool {
- if obj == nil {
- panic("nil object")
- }
- key := key{pkg: obj.Pkg(), obj: obj, t: reflect.TypeOf(ptr)}
- s.mu.Lock()
- defer s.mu.Unlock()
- if v, ok := s.m[key]; ok {
- reflect.ValueOf(ptr).Elem().Set(reflect.ValueOf(v).Elem())
- return true
- }
- return false
-}
-
-// ExportObjectFact implements analysis.Pass.ExportObjectFact.
-func (s *Set) ExportObjectFact(obj types.Object, fact analysis.Fact) {
- if obj.Pkg() != s.pkg {
- log.Panicf("in package %s: ExportObjectFact(%s, %T): can't set fact on object belonging another package",
- s.pkg, obj, fact)
- }
- key := key{pkg: obj.Pkg(), obj: obj, t: reflect.TypeOf(fact)}
- s.mu.Lock()
- s.m[key] = fact // clobber any existing entry
- s.mu.Unlock()
-}
-
-func (s *Set) AllObjectFacts(filter map[reflect.Type]bool) []analysis.ObjectFact {
- var facts []analysis.ObjectFact
- s.mu.Lock()
- for k, v := range s.m {
- if k.obj != nil && filter[k.t] {
- facts = append(facts, analysis.ObjectFact{Object: k.obj, Fact: v})
- }
- }
- s.mu.Unlock()
- return facts
-}
-
-// ImportPackageFact implements analysis.Pass.ImportPackageFact.
-func (s *Set) ImportPackageFact(pkg *types.Package, ptr analysis.Fact) bool {
- if pkg == nil {
- panic("nil package")
- }
- key := key{pkg: pkg, t: reflect.TypeOf(ptr)}
- s.mu.Lock()
- defer s.mu.Unlock()
- if v, ok := s.m[key]; ok {
- reflect.ValueOf(ptr).Elem().Set(reflect.ValueOf(v).Elem())
- return true
- }
- return false
-}
-
-// ExportPackageFact implements analysis.Pass.ExportPackageFact.
-func (s *Set) ExportPackageFact(fact analysis.Fact) {
- key := key{pkg: s.pkg, t: reflect.TypeOf(fact)}
- s.mu.Lock()
- s.m[key] = fact // clobber any existing entry
- s.mu.Unlock()
-}
-
-func (s *Set) AllPackageFacts(filter map[reflect.Type]bool) []analysis.PackageFact {
- var facts []analysis.PackageFact
- s.mu.Lock()
- for k, v := range s.m {
- if k.obj == nil && filter[k.t] {
- facts = append(facts, analysis.PackageFact{Package: k.pkg, Fact: v})
- }
- }
- s.mu.Unlock()
- return facts
-}
-
-// gobFact is the Gob declaration of a serialized fact.
-type gobFact struct {
- PkgPath string // path of package
- Object objectpath.Path // optional path of object relative to package itself
- Fact analysis.Fact // type and value of user-defined Fact
-}
-
-// Decode decodes all the facts relevant to the analysis of package pkg.
-// The read function reads serialized fact data from an external source
-// for one of of pkg's direct imports. The empty file is a valid
-// encoding of an empty fact set.
-//
-// It is the caller's responsibility to call gob.Register on all
-// necessary fact types.
-func Decode(pkg *types.Package, read func(packagePath string) ([]byte, error)) (*Set, error) {
- // Compute the import map for this package.
- // See the package doc comment.
- packages := importMap(pkg.Imports())
-
- // Read facts from imported packages.
- // Facts may describe indirectly imported packages, or their objects.
- m := make(map[key]analysis.Fact) // one big bucket
- for _, imp := range pkg.Imports() {
- logf := func(format string, args ...interface{}) {
- if debug {
- prefix := fmt.Sprintf("in %s, importing %s: ",
- pkg.Path(), imp.Path())
- log.Print(prefix, fmt.Sprintf(format, args...))
- }
- }
-
- // Read the gob-encoded facts.
- data, err := read(imp.Path())
- if err != nil {
- return nil, fmt.Errorf("in %s, can't import facts for package %q: %v",
- pkg.Path(), imp.Path(), err)
- }
- if len(data) == 0 {
- continue // no facts
- }
- var gobFacts []gobFact
- if err := gob.NewDecoder(bytes.NewReader(data)).Decode(&gobFacts); err != nil {
- return nil, fmt.Errorf("decoding facts for %q: %v", imp.Path(), err)
- }
- if debug {
- logf("decoded %d facts: %v", len(gobFacts), gobFacts)
- }
-
- // Parse each one into a key and a Fact.
- for _, f := range gobFacts {
- factPkg := packages[f.PkgPath]
- if factPkg == nil {
- // Fact relates to a dependency that was
- // unused in this translation unit. Skip.
- logf("no package %q; discarding %v", f.PkgPath, f.Fact)
- continue
- }
- key := key{pkg: factPkg, t: reflect.TypeOf(f.Fact)}
- if f.Object != "" {
- // object fact
- obj, err := objectpath.Object(factPkg, f.Object)
- if err != nil {
- // (most likely due to unexported object)
- // TODO(adonovan): audit for other possibilities.
- logf("no object for path: %v; discarding %s", err, f.Fact)
- continue
- }
- key.obj = obj
- logf("read %T fact %s for %v", f.Fact, f.Fact, key.obj)
- } else {
- // package fact
- logf("read %T fact %s for %v", f.Fact, f.Fact, factPkg)
- }
- m[key] = f.Fact
- }
- }
-
- return &Set{pkg: pkg, m: m}, nil
-}
-
-// Encode encodes a set of facts to a memory buffer.
-//
-// It may fail if one of the Facts could not be gob-encoded, but this is
-// a sign of a bug in an Analyzer.
-func (s *Set) Encode() []byte {
-
- // TODO(adonovan): opt: use a more efficient encoding
- // that avoids repeating PkgPath for each fact.
-
- // Gather all facts, including those from imported packages.
- var gobFacts []gobFact
-
- s.mu.Lock()
- for k, fact := range s.m {
- if debug {
- log.Printf("%v => %s\n", k, fact)
- }
- var object objectpath.Path
- if k.obj != nil {
- path, err := objectpath.For(k.obj)
- if err != nil {
- if debug {
- log.Printf("discarding fact %s about %s\n", fact, k.obj)
- }
- continue // object not accessible from package API; discard fact
- }
- object = path
- }
- gobFacts = append(gobFacts, gobFact{
- PkgPath: k.pkg.Path(),
- Object: object,
- Fact: fact,
- })
- }
- s.mu.Unlock()
-
- // Sort facts by (package, object, type) for determinism.
- sort.Slice(gobFacts, func(i, j int) bool {
- x, y := gobFacts[i], gobFacts[j]
- if x.PkgPath != y.PkgPath {
- return x.PkgPath < y.PkgPath
- }
- if x.Object != y.Object {
- return x.Object < y.Object
- }
- tx := reflect.TypeOf(x.Fact)
- ty := reflect.TypeOf(y.Fact)
- if tx != ty {
- return tx.String() < ty.String()
- }
- return false // equal
- })
-
- var buf bytes.Buffer
- if len(gobFacts) > 0 {
- if err := gob.NewEncoder(&buf).Encode(gobFacts); err != nil {
- // Fact encoding should never fail. Identify the culprit.
- for _, gf := range gobFacts {
- if err := gob.NewEncoder(ioutil.Discard).Encode(gf); err != nil {
- fact := gf.Fact
- pkgpath := reflect.TypeOf(fact).Elem().PkgPath()
- log.Panicf("internal error: gob encoding of analysis fact %s failed: %v; please report a bug against fact %T in package %q",
- fact, err, fact, pkgpath)
- }
- }
- }
- }
-
- if debug {
- log.Printf("package %q: encode %d facts, %d bytes\n",
- s.pkg.Path(), len(gobFacts), buf.Len())
- }
-
- return buf.Bytes()
-}
-
-// String is provided only for debugging, and must not be called
-// concurrent with any Import/Export method.
-func (s *Set) String() string {
- var buf bytes.Buffer
- buf.WriteString("{")
- for k, f := range s.m {
- if buf.Len() > 1 {
- buf.WriteString(", ")
- }
- if k.obj != nil {
- buf.WriteString(k.obj.String())
- } else {
- buf.WriteString(k.pkg.Path())
- }
- fmt.Fprintf(&buf, ": %v", f)
- }
- buf.WriteString("}")
- return buf.String()
-}
diff --git a/go/analysis/internal/facts/facts_test.go b/go/analysis/internal/facts/facts_test.go
deleted file mode 100644
index 13c358230..000000000
--- a/go/analysis/internal/facts/facts_test.go
+++ /dev/null
@@ -1,384 +0,0 @@
-// Copyright 2018 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package facts_test
-
-import (
- "encoding/gob"
- "fmt"
- "go/token"
- "go/types"
- "os"
- "reflect"
- "testing"
-
- "golang.org/x/tools/go/analysis/analysistest"
- "golang.org/x/tools/go/analysis/internal/facts"
- "golang.org/x/tools/go/packages"
- "golang.org/x/tools/internal/testenv"
- "golang.org/x/tools/internal/typeparams"
-)
-
-type myFact struct {
- S string
-}
-
-func (f *myFact) String() string { return fmt.Sprintf("myFact(%s)", f.S) }
-func (f *myFact) AFact() {}
-
-func init() {
- gob.Register(new(myFact))
-}
-
-func TestEncodeDecode(t *testing.T) {
- tests := []struct {
- name string
- typeparams bool // requires typeparams to be enabled
- files map[string]string
- plookups []pkgLookups // see testEncodeDecode for details
- }{
- {
- name: "loading-order",
- // c -> b -> a, a2
- // c does not directly depend on a, but it indirectly uses a.T.
- //
- // Package a2 is never loaded directly so it is incomplete.
- //
- // We use only types in this example because we rely on
- // types.Eval to resolve the lookup expressions, and it only
- // works for types. This is a definite gap in the typechecker API.
- files: map[string]string{
- "a/a.go": `package a; type A int; type T int`,
- "a2/a.go": `package a2; type A2 int; type Unneeded int`,
- "b/b.go": `package b; import ("a"; "a2"); type B chan a2.A2; type F func() a.T`,
- "c/c.go": `package c; import "b"; type C []b.B`,
- },
- // In the following table, we analyze packages (a, b, c) in order,
- // look up various objects accessible within each package,
- // and see if they have a fact. The "analysis" exports a fact
- // for every object at package level.
- //
- // Note: Loop iterations are not independent test cases;
- // order matters, as we populate factmap.
- plookups: []pkgLookups{
- {"a", []lookup{
- {"A", "myFact(a.A)"},
- }},
- {"b", []lookup{
- {"a.A", "myFact(a.A)"},
- {"a.T", "myFact(a.T)"},
- {"B", "myFact(b.B)"},
- {"F", "myFact(b.F)"},
- {"F(nil)()", "myFact(a.T)"}, // (result type of b.F)
- }},
- {"c", []lookup{
- {"b.B", "myFact(b.B)"},
- {"b.F", "myFact(b.F)"},
- //{"b.F(nil)()", "myFact(a.T)"}, // no fact; TODO(adonovan): investigate
- {"C", "myFact(c.C)"},
- {"C{}[0]", "myFact(b.B)"},
- {"<-(C{}[0])", "no fact"}, // object but no fact (we never "analyze" a2)
- }},
- },
- },
- {
- name: "globals",
- files: map[string]string{
- "a/a.go": `package a;
- type T1 int
- type T2 int
- type T3 int
- type T4 int
- type T5 int
- type K int; type V string
- `,
- "b/b.go": `package b
- import "a"
- var (
- G1 []a.T1
- G2 [7]a.T2
- G3 chan a.T3
- G4 *a.T4
- G5 struct{ F a.T5 }
- G6 map[a.K]a.V
- )
- `,
- "c/c.go": `package c; import "b";
- var (
- v1 = b.G1
- v2 = b.G2
- v3 = b.G3
- v4 = b.G4
- v5 = b.G5
- v6 = b.G6
- )
- `,
- },
- plookups: []pkgLookups{
- {"a", []lookup{}},
- {"b", []lookup{}},
- {"c", []lookup{
- {"v1[0]", "myFact(a.T1)"},
- {"v2[0]", "myFact(a.T2)"},
- {"<-v3", "myFact(a.T3)"},
- {"*v4", "myFact(a.T4)"},
- {"v5.F", "myFact(a.T5)"},
- {"v6[0]", "myFact(a.V)"},
- }},
- },
- },
- {
- name: "typeparams",
- typeparams: true,
- files: map[string]string{
- "a/a.go": `package a
- type T1 int
- type T2 int
- type T3 interface{Foo()}
- type T4 int
- type T5 int
- type T6 interface{Foo()}
- `,
- "b/b.go": `package b
- import "a"
- type N1[T a.T1|int8] func() T
- type N2[T any] struct{ F T }
- type N3[T a.T3] func() T
- type N4[T a.T4|int8] func() T
- type N5[T interface{Bar() a.T5} ] func() T
-
- type t5 struct{}; func (t5) Bar() a.T5
-
- var G1 N1[a.T1]
- var G2 func() N2[a.T2]
- var G3 N3[a.T3]
- var G4 N4[a.T4]
- var G5 N5[t5]
-
- func F6[T a.T6]() T { var x T; return x }
- `,
- "c/c.go": `package c; import "b";
- var (
- v1 = b.G1
- v2 = b.G2
- v3 = b.G3
- v4 = b.G4
- v5 = b.G5
- v6 = b.F6[t6]
- )
-
- type t6 struct{}; func (t6) Foo() {}
- `,
- },
- plookups: []pkgLookups{
- {"a", []lookup{}},
- {"b", []lookup{}},
- {"c", []lookup{
- {"v1", "myFact(b.N1)"},
- {"v1()", "myFact(a.T1)"},
- {"v2()", "myFact(b.N2)"},
- {"v2().F", "myFact(a.T2)"},
- {"v3", "myFact(b.N3)"},
- {"v4", "myFact(b.N4)"},
- {"v4()", "myFact(a.T4)"},
- {"v5", "myFact(b.N5)"},
- {"v5()", "myFact(b.t5)"},
- {"v6()", "myFact(c.t6)"},
- }},
- },
- },
- }
-
- for i := range tests {
- test := tests[i]
- t.Run(test.name, func(t *testing.T) {
- t.Parallel()
- if test.typeparams && !typeparams.Enabled {
- t.Skip("type parameters are not enabled")
- }
- testEncodeDecode(t, test.files, test.plookups)
- })
- }
-}
-
-type lookup struct {
- objexpr string
- want string
-}
-
-type pkgLookups struct {
- path string
- lookups []lookup
-}
-
-// testEncodeDecode tests fact encoding and decoding and simulates how package facts
-// are passed during analysis. It operates on a group of Go file contents. Then
-// for each <package, []lookup> in tests it does the following:
-// 1) loads and type checks the package,
-// 2) calls facts.Decode to loads the facts exported by its imports,
-// 3) exports a myFact Fact for all of package level objects,
-// 4) For each lookup for the current package:
-// 4.a) lookup the types.Object for an Go source expression in the curent package
-// (or confirms one is not expected want=="no object"),
-// 4.b) finds a Fact for the object (or confirms one is not expected want=="no fact"),
-// 4.c) compares the content of the Fact to want.
-// 5) encodes the Facts of the package.
-//
-// Note: tests are not independent test cases; order matters (as does a package being
-// skipped). It changes what Facts can be imported.
-//
-// Failures are reported on t.
-func testEncodeDecode(t *testing.T, files map[string]string, tests []pkgLookups) {
- dir, cleanup, err := analysistest.WriteFiles(files)
- if err != nil {
- t.Fatal(err)
- }
- defer cleanup()
-
- // factmap represents the passing of encoded facts from one
- // package to another. In practice one would use the file system.
- factmap := make(map[string][]byte)
- read := func(path string) ([]byte, error) { return factmap[path], nil }
-
- // Analyze packages in order, look up various objects accessible within
- // each package, and see if they have a fact. The "analysis" exports a
- // fact for every object at package level.
- //
- // Note: Loop iterations are not independent test cases;
- // order matters, as we populate factmap.
- for _, test := range tests {
- // load package
- pkg, err := load(t, dir, test.path)
- if err != nil {
- t.Fatal(err)
- }
-
- // decode
- facts, err := facts.Decode(pkg, read)
- if err != nil {
- t.Fatalf("Decode failed: %v", err)
- }
- t.Logf("decode %s facts = %v", pkg.Path(), facts) // show all facts
-
- // export
- // (one fact for each package-level object)
- for _, name := range pkg.Scope().Names() {
- obj := pkg.Scope().Lookup(name)
- fact := &myFact{obj.Pkg().Name() + "." + obj.Name()}
- facts.ExportObjectFact(obj, fact)
- }
- t.Logf("exported %s facts = %v", pkg.Path(), facts) // show all facts
-
- // import
- // (after export, because an analyzer may import its own facts)
- for _, lookup := range test.lookups {
- fact := new(myFact)
- var got string
- if obj := find(pkg, lookup.objexpr); obj == nil {
- got = "no object"
- } else if facts.ImportObjectFact(obj, fact) {
- got = fact.String()
- } else {
- got = "no fact"
- }
- if got != lookup.want {
- t.Errorf("in %s, ImportObjectFact(%s, %T) = %s, want %s",
- pkg.Path(), lookup.objexpr, fact, got, lookup.want)
- }
- }
-
- // encode
- factmap[pkg.Path()] = facts.Encode()
- }
-}
-
-func find(p *types.Package, expr string) types.Object {
- // types.Eval only allows us to compute a TypeName object for an expression.
- // TODO(adonovan): support other expressions that denote an object:
- // - an identifier (or qualified ident) for a func, const, or var
- // - new(T).f for a field or method
- // I've added CheckExpr in https://go-review.googlesource.com/c/go/+/144677.
- // If that becomes available, use it.
-
- // Choose an arbitrary position within the (single-file) package
- // so that we are within the scope of its import declarations.
- somepos := p.Scope().Lookup(p.Scope().Names()[0]).Pos()
- tv, err := types.Eval(token.NewFileSet(), p, somepos, expr)
- if err != nil {
- return nil
- }
- if n, ok := tv.Type.(*types.Named); ok {
- return n.Obj()
- }
- return nil
-}
-
-func load(t *testing.T, dir string, path string) (*types.Package, error) {
- cfg := &packages.Config{
- Mode: packages.LoadSyntax,
- Dir: dir,
- Env: append(os.Environ(), "GOPATH="+dir, "GO111MODULE=off", "GOPROXY=off"),
- }
- testenv.NeedsGoPackagesEnv(t, cfg.Env)
- pkgs, err := packages.Load(cfg, path)
- if err != nil {
- return nil, err
- }
- if packages.PrintErrors(pkgs) > 0 {
- return nil, fmt.Errorf("packages had errors")
- }
- if len(pkgs) == 0 {
- return nil, fmt.Errorf("no package matched %s", path)
- }
- return pkgs[0].Types, nil
-}
-
-type otherFact struct {
- S string
-}
-
-func (f *otherFact) String() string { return fmt.Sprintf("otherFact(%s)", f.S) }
-func (f *otherFact) AFact() {}
-
-func TestFactFilter(t *testing.T) {
- files := map[string]string{
- "a/a.go": `package a; type A int`,
- }
- dir, cleanup, err := analysistest.WriteFiles(files)
- if err != nil {
- t.Fatal(err)
- }
- defer cleanup()
-
- pkg, err := load(t, dir, "a")
- if err != nil {
- t.Fatal(err)
- }
-
- obj := pkg.Scope().Lookup("A")
- s, err := facts.Decode(pkg, func(string) ([]byte, error) { return nil, nil })
- if err != nil {
- t.Fatal(err)
- }
- s.ExportObjectFact(obj, &myFact{"good object fact"})
- s.ExportPackageFact(&myFact{"good package fact"})
- s.ExportObjectFact(obj, &otherFact{"bad object fact"})
- s.ExportPackageFact(&otherFact{"bad package fact"})
-
- filter := map[reflect.Type]bool{
- reflect.TypeOf(&myFact{}): true,
- }
-
- pkgFacts := s.AllPackageFacts(filter)
- wantPkgFacts := `[{package a ("a") myFact(good package fact)}]`
- if got := fmt.Sprintf("%v", pkgFacts); got != wantPkgFacts {
- t.Errorf("AllPackageFacts: got %v, want %v", got, wantPkgFacts)
- }
-
- objFacts := s.AllObjectFacts(filter)
- wantObjFacts := "[{type a.A int myFact(good object fact)}]"
- if got := fmt.Sprintf("%v", objFacts); got != wantObjFacts {
- t.Errorf("AllObjectFacts: got %v, want %v", got, wantObjFacts)
- }
-}
diff --git a/go/analysis/internal/facts/imports.go b/go/analysis/internal/facts/imports.go
deleted file mode 100644
index ade0cc6fa..000000000
--- a/go/analysis/internal/facts/imports.go
+++ /dev/null
@@ -1,119 +0,0 @@
-// Copyright 2018 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package facts
-
-import (
- "go/types"
-
- "golang.org/x/tools/internal/typeparams"
-)
-
-// importMap computes the import map for a package by traversing the
-// entire exported API each of its imports.
-//
-// This is a workaround for the fact that we cannot access the map used
-// internally by the types.Importer returned by go/importer. The entries
-// in this map are the packages and objects that may be relevant to the
-// current analysis unit.
-//
-// Packages in the map that are only indirectly imported may be
-// incomplete (!pkg.Complete()).
-//
-func importMap(imports []*types.Package) map[string]*types.Package {
- objects := make(map[types.Object]bool)
- packages := make(map[string]*types.Package)
-
- var addObj func(obj types.Object) bool
- var addType func(T types.Type)
-
- addObj = func(obj types.Object) bool {
- if !objects[obj] {
- objects[obj] = true
- addType(obj.Type())
- if pkg := obj.Pkg(); pkg != nil {
- packages[pkg.Path()] = pkg
- }
- return true
- }
- return false
- }
-
- addType = func(T types.Type) {
- switch T := T.(type) {
- case *types.Basic:
- // nop
- case *types.Named:
- if addObj(T.Obj()) {
- // TODO(taking): Investigate why the Underlying type is not added here.
- for i := 0; i < T.NumMethods(); i++ {
- addObj(T.Method(i))
- }
- if tparams := typeparams.ForNamed(T); tparams != nil {
- for i := 0; i < tparams.Len(); i++ {
- addType(tparams.At(i))
- }
- }
- if targs := typeparams.NamedTypeArgs(T); targs != nil {
- for i := 0; i < targs.Len(); i++ {
- addType(targs.At(i))
- }
- }
- }
- case *types.Pointer:
- addType(T.Elem())
- case *types.Slice:
- addType(T.Elem())
- case *types.Array:
- addType(T.Elem())
- case *types.Chan:
- addType(T.Elem())
- case *types.Map:
- addType(T.Key())
- addType(T.Elem())
- case *types.Signature:
- addType(T.Params())
- addType(T.Results())
- if tparams := typeparams.ForSignature(T); tparams != nil {
- for i := 0; i < tparams.Len(); i++ {
- addType(tparams.At(i))
- }
- }
- case *types.Struct:
- for i := 0; i < T.NumFields(); i++ {
- addObj(T.Field(i))
- }
- case *types.Tuple:
- for i := 0; i < T.Len(); i++ {
- addObj(T.At(i))
- }
- case *types.Interface:
- for i := 0; i < T.NumMethods(); i++ {
- addObj(T.Method(i))
- }
- for i := 0; i < T.NumEmbeddeds(); i++ {
- addType(T.EmbeddedType(i)) // walk Embedded for implicits
- }
- case *typeparams.Union:
- for i := 0; i < T.Len(); i++ {
- addType(T.Term(i).Type())
- }
- case *typeparams.TypeParam:
- if addObj(T.Obj()) {
- addType(T.Constraint())
- }
- }
- }
-
- for _, imp := range imports {
- packages[imp.Path()] = imp
-
- scope := imp.Scope()
- for _, name := range scope.Names() {
- addObj(scope.Lookup(name))
- }
- }
-
- return packages
-}
diff --git a/go/analysis/passes/asmdecl/arches_go118.go b/go/analysis/passes/asmdecl/arches_go118.go
new file mode 100644
index 000000000..d8211afdc
--- /dev/null
+++ b/go/analysis/passes/asmdecl/arches_go118.go
@@ -0,0 +1,12 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build !go1.19
+// +build !go1.19
+
+package asmdecl
+
+func additionalArches() []*asmArch {
+ return nil
+}
diff --git a/go/analysis/passes/asmdecl/arches_go119.go b/go/analysis/passes/asmdecl/arches_go119.go
new file mode 100644
index 000000000..3018383e7
--- /dev/null
+++ b/go/analysis/passes/asmdecl/arches_go119.go
@@ -0,0 +1,14 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.19
+// +build go1.19
+
+package asmdecl
+
+var asmArchLoong64 = asmArch{name: "loong64", bigEndian: false, stack: "R3", lr: true}
+
+func additionalArches() []*asmArch {
+ return []*asmArch{&asmArchLoong64}
+}
diff --git a/go/analysis/passes/asmdecl/asmdecl.go b/go/analysis/passes/asmdecl/asmdecl.go
index b05ed5c15..7288559fc 100644
--- a/go/analysis/passes/asmdecl/asmdecl.go
+++ b/go/analysis/passes/asmdecl/asmdecl.go
@@ -92,7 +92,7 @@ var (
asmArchMips64LE = asmArch{name: "mips64le", bigEndian: false, stack: "R29", lr: true}
asmArchPpc64 = asmArch{name: "ppc64", bigEndian: true, stack: "R1", lr: true, retRegs: []string{"R3", "F1"}}
asmArchPpc64LE = asmArch{name: "ppc64le", bigEndian: false, stack: "R1", lr: true, retRegs: []string{"R3", "F1"}}
- asmArchRISCV64 = asmArch{name: "riscv64", bigEndian: false, stack: "SP", lr: true}
+ asmArchRISCV64 = asmArch{name: "riscv64", bigEndian: false, stack: "SP", lr: true, retRegs: []string{"X10", "F10"}}
asmArchS390X = asmArch{name: "s390x", bigEndian: true, stack: "R15", lr: true}
asmArchWasm = asmArch{name: "wasm", bigEndian: false, stack: "SP", lr: false}
@@ -114,6 +114,7 @@ var (
)
func init() {
+ arches = append(arches, additionalArches()...)
for _, arch := range arches {
arch.sizes = types.SizesFor("gc", arch.name)
if arch.sizes == nil {
@@ -731,7 +732,7 @@ func asmCheckVar(badf func(string, ...interface{}), fn *asmFunc, line, expr stri
src = 8
}
}
- case "mips", "mipsle", "mips64", "mips64le":
+ case "loong64", "mips", "mipsle", "mips64", "mips64le":
switch op {
case "MOVB", "MOVBU":
src = 1
diff --git a/go/analysis/passes/asmdecl/asmdecl_test.go b/go/analysis/passes/asmdecl/asmdecl_test.go
index f88c188b2..50938a075 100644
--- a/go/analysis/passes/asmdecl/asmdecl_test.go
+++ b/go/analysis/passes/asmdecl/asmdecl_test.go
@@ -14,14 +14,17 @@ import (
)
var goosarches = []string{
- "linux/amd64", // asm1.s, asm4.s
- "linux/386", // asm2.s
- "linux/arm", // asm3.s
- "linux/mips64", // asm5.s
- "linux/s390x", // asm6.s
- "linux/ppc64", // asm7.s
- "linux/mips", // asm8.s,
- "js/wasm", // asm9.s
+ "linux/amd64", // asm1.s, asm4.s
+ "linux/386", // asm2.s
+ "linux/arm", // asm3.s
+ // TODO: skip test on loong64 until go toolchain supported loong64.
+ // "linux/loong64", // asm10.s
+ "linux/mips64", // asm5.s
+ "linux/s390x", // asm6.s
+ "linux/ppc64", // asm7.s
+ "linux/mips", // asm8.s,
+ "js/wasm", // asm9.s
+ "linux/riscv64", // asm11.s
}
func Test(t *testing.T) {
diff --git a/go/analysis/passes/asmdecl/testdata/src/a/asm10.s b/go/analysis/passes/asmdecl/testdata/src/a/asm10.s
new file mode 100644
index 000000000..f0045882a
--- /dev/null
+++ b/go/analysis/passes/asmdecl/testdata/src/a/asm10.s
@@ -0,0 +1,192 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build loong64
+
+TEXT ·arg1(SB),0,$0-2
+ MOVB x+0(FP), R19
+ MOVBU y+1(FP), R18
+ MOVH x+0(FP), R19 // want `\[loong64\] arg1: invalid MOVH of x\+0\(FP\); int8 is 1-byte value`
+ MOVHU y+1(FP), R19 // want `invalid MOVHU of y\+1\(FP\); uint8 is 1-byte value`
+ MOVW x+0(FP), R19 // want `invalid MOVW of x\+0\(FP\); int8 is 1-byte value`
+ MOVWU y+1(FP), R19 // want `invalid MOVWU of y\+1\(FP\); uint8 is 1-byte value`
+ MOVV x+0(FP), R19 // want `invalid MOVV of x\+0\(FP\); int8 is 1-byte value`
+ MOVV y+1(FP), R19 // want `invalid MOVV of y\+1\(FP\); uint8 is 1-byte value`
+ MOVB x+1(FP), R19 // want `invalid offset x\+1\(FP\); expected x\+0\(FP\)`
+ MOVBU y+2(FP), R19 // want `invalid offset y\+2\(FP\); expected y\+1\(FP\)`
+ MOVB 16(R3), R19 // want `16\(R3\) should be x\+0\(FP\)`
+ MOVB 17(R3), R19 // want `17\(R3\) should be y\+1\(FP\)`
+ MOVB 18(R3), R19 // want `use of 18\(R3\) points beyond argument frame`
+ RET
+
+TEXT ·arg2(SB),0,$0-4
+ MOVBU x+0(FP), R19 // want `arg2: invalid MOVBU of x\+0\(FP\); int16 is 2-byte value`
+ MOVB y+2(FP), R19 // want `invalid MOVB of y\+2\(FP\); uint16 is 2-byte value`
+ MOVHU x+0(FP), R19
+ MOVH y+2(FP), R18
+ MOVWU x+0(FP), R19 // want `invalid MOVWU of x\+0\(FP\); int16 is 2-byte value`
+ MOVW y+2(FP), R19 // want `invalid MOVW of y\+2\(FP\); uint16 is 2-byte value`
+ MOVV x+0(FP), R19 // want `invalid MOVV of x\+0\(FP\); int16 is 2-byte value`
+ MOVV y+2(FP), R19 // want `invalid MOVV of y\+2\(FP\); uint16 is 2-byte value`
+ MOVHU x+2(FP), R19 // want `invalid offset x\+2\(FP\); expected x\+0\(FP\)`
+ MOVH y+0(FP), R19 // want `invalid offset y\+0\(FP\); expected y\+2\(FP\)`
+ RET
+
+TEXT ·arg4(SB),0,$0-2 // want `arg4: wrong argument size 2; expected \$\.\.\.-8`
+ MOVB x+0(FP), R19 // want `invalid MOVB of x\+0\(FP\); int32 is 4-byte value`
+ MOVB y+4(FP), R18 // want `invalid MOVB of y\+4\(FP\); uint32 is 4-byte value`
+ MOVH x+0(FP), R19 // want `invalid MOVH of x\+0\(FP\); int32 is 4-byte value`
+ MOVH y+4(FP), R19 // want `invalid MOVH of y\+4\(FP\); uint32 is 4-byte value`
+ MOVW x+0(FP), R19
+ MOVW y+4(FP), R19
+ MOVV x+0(FP), R19 // want `invalid MOVV of x\+0\(FP\); int32 is 4-byte value`
+ MOVV y+4(FP), R19 // want `invalid MOVV of y\+4\(FP\); uint32 is 4-byte value`
+ MOVW x+4(FP), R19 // want `invalid offset x\+4\(FP\); expected x\+0\(FP\)`
+ MOVW y+2(FP), R19 // want `invalid offset y\+2\(FP\); expected y\+4\(FP\)`
+ RET
+
+TEXT ·arg8(SB),7,$0-2 // want `wrong argument size 2; expected \$\.\.\.-16`
+ MOVB x+0(FP), R19 // want `invalid MOVB of x\+0\(FP\); int64 is 8-byte value`
+ MOVB y+8(FP), R18 // want `invalid MOVB of y\+8\(FP\); uint64 is 8-byte value`
+ MOVH x+0(FP), R19 // want `invalid MOVH of x\+0\(FP\); int64 is 8-byte value`
+ MOVH y+8(FP), R19 // want `invalid MOVH of y\+8\(FP\); uint64 is 8-byte value`
+ MOVW x+0(FP), R19 // want `invalid MOVW of x\+0\(FP\); int64 is 8-byte value`
+ MOVW y+8(FP), R19 // want `invalid MOVW of y\+8\(FP\); uint64 is 8-byte value`
+ MOVV x+0(FP), R19
+ MOVV y+8(FP), R19
+ MOVV x+8(FP), R19 // want `invalid offset x\+8\(FP\); expected x\+0\(FP\)`
+ MOVV y+2(FP), R19 // want `invalid offset y\+2\(FP\); expected y\+8\(FP\)`
+ RET
+
+TEXT ·argint(SB),0,$0-2 // want `wrong argument size 2; expected \$\.\.\.-16`
+ MOVB x+0(FP), R19 // want `invalid MOVB of x\+0\(FP\); int is 8-byte value`
+ MOVB y+8(FP), R18 // want `invalid MOVB of y\+8\(FP\); uint is 8-byte value`
+ MOVH x+0(FP), R19 // want `invalid MOVH of x\+0\(FP\); int is 8-byte value`
+ MOVH y+8(FP), R19 // want `invalid MOVH of y\+8\(FP\); uint is 8-byte value`
+ MOVW x+0(FP), R19 // want `invalid MOVW of x\+0\(FP\); int is 8-byte value`
+ MOVW y+8(FP), R19 // want `invalid MOVW of y\+8\(FP\); uint is 8-byte value`
+ MOVV x+0(FP), R19
+ MOVV y+8(FP), R19
+ MOVV x+8(FP), R19 // want `invalid offset x\+8\(FP\); expected x\+0\(FP\)`
+ MOVV y+2(FP), R19 // want `invalid offset y\+2\(FP\); expected y\+8\(FP\)`
+ RET
+
+TEXT ·argptr(SB),7,$0-2 // want `wrong argument size 2; expected \$\.\.\.-40`
+ MOVB x+0(FP), R19 // want `invalid MOVB of x\+0\(FP\); \*byte is 8-byte value`
+ MOVB y+8(FP), R18 // want `invalid MOVB of y\+8\(FP\); \*byte is 8-byte value`
+ MOVH x+0(FP), R19 // want `invalid MOVH of x\+0\(FP\); \*byte is 8-byte value`
+ MOVH y+8(FP), R19 // want `invalid MOVH of y\+8\(FP\); \*byte is 8-byte value`
+ MOVW x+0(FP), R19 // want `invalid MOVW of x\+0\(FP\); \*byte is 8-byte value`
+ MOVW y+8(FP), R19 // want `invalid MOVW of y\+8\(FP\); \*byte is 8-byte value`
+ MOVV x+0(FP), R19
+ MOVV y+8(FP), R19
+ MOVV x+8(FP), R19 // want `invalid offset x\+8\(FP\); expected x\+0\(FP\)`
+ MOVV y+2(FP), R19 // want `invalid offset y\+2\(FP\); expected y\+8\(FP\)`
+ MOVW c+16(FP), R19 // want `invalid MOVW of c\+16\(FP\); chan int is 8-byte value`
+ MOVW m+24(FP), R19 // want `invalid MOVW of m\+24\(FP\); map\[int\]int is 8-byte value`
+ MOVW f+32(FP), R19 // want `invalid MOVW of f\+32\(FP\); func\(\) is 8-byte value`
+ RET
+
+TEXT ·argstring(SB),0,$32 // want `wrong argument size 0; expected \$\.\.\.-32`
+ MOVH x+0(FP), R19 // want `invalid MOVH of x\+0\(FP\); string base is 8-byte value`
+ MOVW x+0(FP), R19 // want `invalid MOVW of x\+0\(FP\); string base is 8-byte value`
+ MOVV x+0(FP), R19
+ MOVH x_base+0(FP), R19 // want `invalid MOVH of x_base\+0\(FP\); string base is 8-byte value`
+ MOVW x_base+0(FP), R19 // want `invalid MOVW of x_base\+0\(FP\); string base is 8-byte value`
+ MOVV x_base+0(FP), R19
+ MOVH x_len+0(FP), R19 // want `invalid offset x_len\+0\(FP\); expected x_len\+8\(FP\)`
+ MOVW x_len+0(FP), R19 // want `invalid offset x_len\+0\(FP\); expected x_len\+8\(FP\)`
+ MOVV x_len+0(FP), R19 // want `invalid offset x_len\+0\(FP\); expected x_len\+8\(FP\)`
+ MOVH x_len+8(FP), R19 // want `invalid MOVH of x_len\+8\(FP\); string len is 8-byte value`
+ MOVW x_len+8(FP), R19 // want `invalid MOVW of x_len\+8\(FP\); string len is 8-byte value`
+ MOVV x_len+8(FP), R19
+ MOVV y+0(FP), R19 // want `invalid offset y\+0\(FP\); expected y\+16\(FP\)`
+ MOVV y_len+8(FP), R19 // want `invalid offset y_len\+8\(FP\); expected y_len\+24\(FP\)`
+ RET
+
+TEXT ·argslice(SB),0,$48 // want `wrong argument size 0; expected \$\.\.\.-48`
+ MOVH x+0(FP), R19 // want `invalid MOVH of x\+0\(FP\); slice base is 8-byte value`
+ MOVW x+0(FP), R19 // want `invalid MOVW of x\+0\(FP\); slice base is 8-byte value`
+ MOVV x+0(FP), R19
+ MOVH x_base+0(FP), R19 // want `invalid MOVH of x_base\+0\(FP\); slice base is 8-byte value`
+ MOVW x_base+0(FP), R19 // want `invalid MOVW of x_base\+0\(FP\); slice base is 8-byte value`
+ MOVV x_base+0(FP), R19
+ MOVH x_len+0(FP), R19 // want `invalid offset x_len\+0\(FP\); expected x_len\+8\(FP\)`
+ MOVW x_len+0(FP), R19 // want `invalid offset x_len\+0\(FP\); expected x_len\+8\(FP\)`
+ MOVV x_len+0(FP), R19 // want `invalid offset x_len\+0\(FP\); expected x_len\+8\(FP\)`
+ MOVH x_len+8(FP), R19 // want `invalid MOVH of x_len\+8\(FP\); slice len is 8-byte value`
+ MOVW x_len+8(FP), R19 // want `invalid MOVW of x_len\+8\(FP\); slice len is 8-byte value`
+ MOVV x_len+8(FP), R19
+ MOVH x_cap+0(FP), R19 // want `invalid offset x_cap\+0\(FP\); expected x_cap\+16\(FP\)`
+ MOVW x_cap+0(FP), R19 // want `invalid offset x_cap\+0\(FP\); expected x_cap\+16\(FP\)`
+ MOVV x_cap+0(FP), R19 // want `invalid offset x_cap\+0\(FP\); expected x_cap\+16\(FP\)`
+ MOVH x_cap+16(FP), R19 // want `invalid MOVH of x_cap\+16\(FP\); slice cap is 8-byte value`
+ MOVW x_cap+16(FP), R19 // want `invalid MOVW of x_cap\+16\(FP\); slice cap is 8-byte value`
+ MOVV x_cap+16(FP), R19
+ MOVV y+0(FP), R19 // want `invalid offset y\+0\(FP\); expected y\+24\(FP\)`
+ MOVV y_len+8(FP), R19 // want `invalid offset y_len\+8\(FP\); expected y_len\+32\(FP\)`
+ MOVV y_cap+16(FP), R19 // want `invalid offset y_cap\+16\(FP\); expected y_cap\+40\(FP\)`
+ RET
+
+TEXT ·argiface(SB),0,$0-32
+ MOVH x+0(FP), R19 // want `invalid MOVH of x\+0\(FP\); interface type is 8-byte value`
+ MOVW x+0(FP), R19 // want `invalid MOVW of x\+0\(FP\); interface type is 8-byte value`
+ MOVV x+0(FP), R19
+ MOVH x_type+0(FP), R19 // want `invalid MOVH of x_type\+0\(FP\); interface type is 8-byte value`
+ MOVW x_type+0(FP), R19 // want `invalid MOVW of x_type\+0\(FP\); interface type is 8-byte value`
+ MOVV x_type+0(FP), R19
+ MOVV x_itable+0(FP), R19 // want `unknown variable x_itable; offset 0 is x_type\+0\(FP\)`
+ MOVV x_itable+1(FP), R19 // want `unknown variable x_itable; offset 1 is x_type\+0\(FP\)`
+ MOVH x_data+0(FP), R19 // want `invalid offset x_data\+0\(FP\); expected x_data\+8\(FP\)`
+ MOVW x_data+0(FP), R19 // want `invalid offset x_data\+0\(FP\); expected x_data\+8\(FP\)`
+ MOVV x_data+0(FP), R19 // want `invalid offset x_data\+0\(FP\); expected x_data\+8\(FP\)`
+ MOVH x_data+8(FP), R19 // want `invalid MOVH of x_data\+8\(FP\); interface data is 8-byte value`
+ MOVW x_data+8(FP), R19 // want `invalid MOVW of x_data\+8\(FP\); interface data is 8-byte value`
+ MOVV x_data+8(FP), R19
+ MOVH y+16(FP), R19 // want `invalid MOVH of y\+16\(FP\); interface itable is 8-byte value`
+ MOVW y+16(FP), R19 // want `invalid MOVW of y\+16\(FP\); interface itable is 8-byte value`
+ MOVV y+16(FP), R19
+ MOVH y_itable+16(FP), R19 // want `invalid MOVH of y_itable\+16\(FP\); interface itable is 8-byte value`
+ MOVW y_itable+16(FP), R19 // want `invalid MOVW of y_itable\+16\(FP\); interface itable is 8-byte value`
+ MOVV y_itable+16(FP), R19
+ MOVV y_type+16(FP), R19 // want `unknown variable y_type; offset 16 is y_itable\+16\(FP\)`
+ MOVH y_data+16(FP), R19 // want `invalid offset y_data\+16\(FP\); expected y_data\+24\(FP\)`
+ MOVW y_data+16(FP), R19 // want `invalid offset y_data\+16\(FP\); expected y_data\+24\(FP\)`
+ MOVV y_data+16(FP), R19 // want `invalid offset y_data\+16\(FP\); expected y_data\+24\(FP\)`
+ MOVH y_data+24(FP), R19 // want `invalid MOVH of y_data\+24\(FP\); interface data is 8-byte value`
+ MOVW y_data+24(FP), R19 // want `invalid MOVW of y_data\+24\(FP\); interface data is 8-byte value`
+ MOVV y_data+24(FP), R19
+ RET
+
+TEXT ·returnint(SB),0,$0-8
+ MOVB R19, ret+0(FP) // want `invalid MOVB of ret\+0\(FP\); int is 8-byte value`
+ MOVH R19, ret+0(FP) // want `invalid MOVH of ret\+0\(FP\); int is 8-byte value`
+ MOVW R19, ret+0(FP) // want `invalid MOVW of ret\+0\(FP\); int is 8-byte value`
+ MOVV R19, ret+0(FP)
+ MOVV R19, ret+1(FP) // want `invalid offset ret\+1\(FP\); expected ret\+0\(FP\)`
+ MOVV R19, r+0(FP) // want `unknown variable r; offset 0 is ret\+0\(FP\)`
+ RET
+
+TEXT ·returnbyte(SB),0,$0-9
+ MOVV x+0(FP), R19
+ MOVB R19, ret+8(FP)
+ MOVH R19, ret+8(FP) // want `invalid MOVH of ret\+8\(FP\); byte is 1-byte value`
+ MOVW R19, ret+8(FP) // want `invalid MOVW of ret\+8\(FP\); byte is 1-byte value`
+ MOVV R19, ret+8(FP) // want `invalid MOVV of ret\+8\(FP\); byte is 1-byte value`
+ MOVB R19, ret+7(FP) // want `invalid offset ret\+7\(FP\); expected ret\+8\(FP\)`
+ RET
+
+TEXT ·returnnamed(SB),0,$0-41
+ MOVB x+0(FP), R19
+ MOVV R19, r1+8(FP)
+ MOVH R19, r2+16(FP)
+ MOVV R19, r3+24(FP)
+ MOVV R19, r3_base+24(FP)
+ MOVV R19, r3_len+32(FP)
+ MOVB R19, r4+40(FP)
+ MOVW R19, r1+8(FP) // want `invalid MOVW of r1\+8\(FP\); int is 8-byte value`
+ RET
+
+TEXT ·returnintmissing(SB),0,$0-8
+ RET // want `RET without writing to 8-byte ret\+0\(FP\)`
diff --git a/go/analysis/passes/asmdecl/testdata/src/a/asm11.s b/go/analysis/passes/asmdecl/testdata/src/a/asm11.s
new file mode 100644
index 000000000..e81e8ee17
--- /dev/null
+++ b/go/analysis/passes/asmdecl/testdata/src/a/asm11.s
@@ -0,0 +1,13 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build riscv64
+
+// writing to result in ABIInternal function
+TEXT ·returnABIInternal<ABIInternal>(SB), NOSPLIT, $8
+ MOV $123, X10
+ RET
+TEXT ·returnmissingABIInternal<ABIInternal>(SB), NOSPLIT, $8
+ MOV $123, X20
+ RET // want `RET without writing to result register`
diff --git a/go/analysis/passes/assign/assign.go b/go/analysis/passes/assign/assign.go
index 3586638ef..89146b733 100644
--- a/go/analysis/passes/assign/assign.go
+++ b/go/analysis/passes/assign/assign.go
@@ -12,6 +12,7 @@ import (
"fmt"
"go/ast"
"go/token"
+ "go/types"
"reflect"
"golang.org/x/tools/go/analysis"
@@ -51,7 +52,8 @@ func run(pass *analysis.Pass) (interface{}, error) {
for i, lhs := range stmt.Lhs {
rhs := stmt.Rhs[i]
if analysisutil.HasSideEffects(pass.TypesInfo, lhs) ||
- analysisutil.HasSideEffects(pass.TypesInfo, rhs) {
+ analysisutil.HasSideEffects(pass.TypesInfo, rhs) ||
+ isMapIndex(pass.TypesInfo, lhs) {
continue // expressions may not be equal
}
if reflect.TypeOf(lhs) != reflect.TypeOf(rhs) {
@@ -74,3 +76,14 @@ func run(pass *analysis.Pass) (interface{}, error) {
return nil, nil
}
+
+// isMapIndex returns true if e is a map index expression.
+func isMapIndex(info *types.Info, e ast.Expr) bool {
+ if idx, ok := analysisutil.Unparen(e).(*ast.IndexExpr); ok {
+ if typ := info.Types[idx.X].Type; typ != nil {
+ _, ok := typ.Underlying().(*types.Map)
+ return ok
+ }
+ }
+ return false
+}
diff --git a/go/analysis/passes/assign/testdata/src/a/a.go b/go/analysis/passes/assign/testdata/src/a/a.go
index eaec634d1..f9663120b 100644
--- a/go/analysis/passes/assign/testdata/src/a/a.go
+++ b/go/analysis/passes/assign/testdata/src/a/a.go
@@ -29,3 +29,31 @@ func (s *ST) SetX(x int, ch chan int) {
}
func num() int { return 2 }
+
+func Index() {
+ s := []int{1}
+ s[0] = s[0] // want "self-assignment"
+
+ var a [5]int
+ a[0] = a[0] // want "self-assignment"
+
+ pa := &[2]int{1, 2}
+ pa[1] = pa[1] // want "self-assignment"
+
+ var pss *struct { // report self assignment despite nil dereference
+ s []int
+ }
+ pss.s[0] = pss.s[0] // want "self-assignment"
+
+ m := map[int]string{1: "a"}
+ m[0] = m[0] // bail on map self-assignments due to side effects
+ m[1] = m[1] // not modeling what elements must be in the map
+ (m[2]) = (m[2]) // even with parens
+ type Map map[string]bool
+ named := make(Map)
+ named["s"] = named["s"] // even on named maps.
+ var psm *struct {
+ m map[string]int
+ }
+ psm.m["key"] = psm.m["key"] // handles dereferences
+}
diff --git a/go/analysis/passes/assign/testdata/src/a/a.go.golden b/go/analysis/passes/assign/testdata/src/a/a.go.golden
index 6c91d3666..f45b7f208 100644
--- a/go/analysis/passes/assign/testdata/src/a/a.go.golden
+++ b/go/analysis/passes/assign/testdata/src/a/a.go.golden
@@ -29,3 +29,31 @@ func (s *ST) SetX(x int, ch chan int) {
}
func num() int { return 2 }
+
+func Index() {
+ s := []int{1}
+ // want "self-assignment"
+
+ var a [5]int
+ // want "self-assignment"
+
+ pa := &[2]int{1, 2}
+ // want "self-assignment"
+
+ var pss *struct { // report self assignment despite nil dereference
+ s []int
+ }
+ // want "self-assignment"
+
+ m := map[int]string{1: "a"}
+ m[0] = m[0] // bail on map self-assignments due to side effects
+ m[1] = m[1] // not modeling what elements must be in the map
+ (m[2]) = (m[2]) // even with parens
+ type Map map[string]bool
+ named := make(Map)
+ named["s"] = named["s"] // even on named maps.
+ var psm *struct {
+ m map[string]int
+ }
+ psm.m["key"] = psm.m["key"] // handles dereferences
+}
diff --git a/go/analysis/passes/bools/bools.go b/go/analysis/passes/bools/bools.go
index 5ae47d894..0d8b0bf4f 100644
--- a/go/analysis/passes/bools/bools.go
+++ b/go/analysis/passes/bools/bools.go
@@ -94,8 +94,10 @@ func (op boolOp) commutativeSets(info *types.Info, e *ast.BinaryExpr, seen map[*
}
// checkRedundant checks for expressions of the form
-// e && e
-// e || e
+//
+// e && e
+// e || e
+//
// Exprs must contain only side effect free expressions.
func (op boolOp) checkRedundant(pass *analysis.Pass, exprs []ast.Expr) {
seen := make(map[string]bool)
@@ -110,8 +112,10 @@ func (op boolOp) checkRedundant(pass *analysis.Pass, exprs []ast.Expr) {
}
// checkSuspect checks for expressions of the form
-// x != c1 || x != c2
-// x == c1 && x == c2
+//
+// x != c1 || x != c2
+// x == c1 && x == c2
+//
// where c1 and c2 are constant expressions.
// If c1 and c2 are the same then it's redundant;
// if c1 and c2 are different then it's always true or always false.
diff --git a/go/analysis/passes/buildssa/buildssa_test.go b/go/analysis/passes/buildssa/buildssa_test.go
index 0b381500b..52f7e7aa6 100644
--- a/go/analysis/passes/buildssa/buildssa_test.go
+++ b/go/analysis/passes/buildssa/buildssa_test.go
@@ -11,6 +11,7 @@ import (
"golang.org/x/tools/go/analysis/analysistest"
"golang.org/x/tools/go/analysis/passes/buildssa"
+ "golang.org/x/tools/internal/typeparams"
)
func Test(t *testing.T) {
@@ -27,3 +28,39 @@ func Test(t *testing.T) {
}
}
}
+
+func TestGenericDecls(t *testing.T) {
+ if !typeparams.Enabled {
+ t.Skip("TestGenericDecls requires type parameters.")
+ }
+ testdata := analysistest.TestData()
+ result := analysistest.Run(t, testdata, buildssa.Analyzer, "b")[0].Result
+
+ ssainfo := result.(*buildssa.SSA)
+ got := fmt.Sprint(ssainfo.SrcFuncs)
+ want := `[(*b.Pointer[T]).Load b.Load b.LoadPointer]`
+ if got != want {
+ t.Errorf("SSA.SrcFuncs = %s, want %s", got, want)
+ for _, f := range ssainfo.SrcFuncs {
+ f.WriteTo(os.Stderr)
+ }
+ }
+}
+
+func TestImporting(t *testing.T) {
+ if !typeparams.Enabled {
+ t.Skip("TestImporting depends on testdata/b/b/go which uses type parameters.")
+ }
+ testdata := analysistest.TestData()
+ result := analysistest.Run(t, testdata, buildssa.Analyzer, "c")[0].Result
+
+ ssainfo := result.(*buildssa.SSA)
+ got := fmt.Sprint(ssainfo.SrcFuncs)
+ want := `[c.A c.B]`
+ if got != want {
+ t.Errorf("SSA.SrcFuncs = %s, want %s", got, want)
+ for _, f := range ssainfo.SrcFuncs {
+ f.WriteTo(os.Stderr)
+ }
+ }
+}
diff --git a/go/analysis/passes/buildssa/testdata/src/b/b.go b/go/analysis/passes/buildssa/testdata/src/b/b.go
new file mode 100644
index 000000000..dd029cf60
--- /dev/null
+++ b/go/analysis/passes/buildssa/testdata/src/b/b.go
@@ -0,0 +1,20 @@
+// Package b contains declarations of generic functions.
+package b
+
+import "unsafe"
+
+type Pointer[T any] struct {
+ v unsafe.Pointer
+}
+
+func (x *Pointer[T]) Load() *T {
+ return (*T)(LoadPointer(&x.v))
+}
+
+func Load[T any](x *Pointer[T]) *T {
+ return x.Load()
+}
+
+func LoadPointer(addr *unsafe.Pointer) (val unsafe.Pointer)
+
+var G Pointer[int]
diff --git a/go/analysis/passes/buildssa/testdata/src/c/c.go b/go/analysis/passes/buildssa/testdata/src/c/c.go
new file mode 100644
index 000000000..387a3b0ed
--- /dev/null
+++ b/go/analysis/passes/buildssa/testdata/src/c/c.go
@@ -0,0 +1,24 @@
+// Package c is to test buildssa importing packages.
+package c
+
+import (
+ "a"
+ "b"
+ "unsafe"
+)
+
+func A() {
+ _ = a.Fib(10)
+}
+
+func B() {
+ var x int
+ ptr := unsafe.Pointer(&x)
+ _ = b.LoadPointer(&ptr)
+
+ m := b.G.Load()
+ f := b.Load(&b.G)
+ if f != m {
+ panic("loads of b.G are expected to be indentical")
+ }
+}
diff --git a/go/analysis/passes/buildtag/buildtag.go b/go/analysis/passes/buildtag/buildtag.go
index c4407ad91..775e507a3 100644
--- a/go/analysis/passes/buildtag/buildtag.go
+++ b/go/analysis/passes/buildtag/buildtag.go
@@ -20,7 +20,7 @@ import (
"golang.org/x/tools/go/analysis/passes/internal/analysisutil"
)
-const Doc = "check that +build tags are well-formed and correctly located"
+const Doc = "check //go:build and // +build directives"
var Analyzer = &analysis.Analyzer{
Name: "buildtag",
diff --git a/go/analysis/passes/buildtag/buildtag_old.go b/go/analysis/passes/buildtag/buildtag_old.go
index e9234925f..0001ba536 100644
--- a/go/analysis/passes/buildtag/buildtag_old.go
+++ b/go/analysis/passes/buildtag/buildtag_old.go
@@ -22,7 +22,7 @@ import (
"golang.org/x/tools/go/analysis/passes/internal/analysisutil"
)
-const Doc = "check that +build tags are well-formed and correctly located"
+const Doc = "check // +build directives"
var Analyzer = &analysis.Analyzer{
Name: "buildtag",
diff --git a/go/analysis/passes/cgocall/cgocall.go b/go/analysis/passes/cgocall/cgocall.go
index 5768d0b9b..b61ee5c3d 100644
--- a/go/analysis/passes/cgocall/cgocall.go
+++ b/go/analysis/passes/cgocall/cgocall.go
@@ -122,8 +122,8 @@ func checkCgo(fset *token.FileSet, f *ast.File, info *types.Info, reportf func(t
// For example, for each raw cgo source file in the original package,
// such as this one:
//
-// package p
-// import "C"
+// package p
+// import "C"
// import "fmt"
// type T int
// const k = 3
@@ -147,9 +147,9 @@ func checkCgo(fset *token.FileSet, f *ast.File, info *types.Info, reportf func(t
// the receiver into the first parameter;
// and all functions are renamed to "_".
//
-// package p
-// import . "·this·" // declares T, k, x, y, f, g, T.f
-// import "C"
+// package p
+// import . "·this·" // declares T, k, x, y, f, g, T.f
+// import "C"
// import "fmt"
// const _ = 3
// var _, _ = fmt.Println()
@@ -169,7 +169,6 @@ func checkCgo(fset *token.FileSet, f *ast.File, info *types.Info, reportf func(t
// C.f would resolve to "·this·"._C_func_f, for example. But we have
// limited ourselves here to preserving function bodies and initializer
// expressions since that is all that the cgocall analyzer needs.
-//
func typeCheckCgoSourceFiles(fset *token.FileSet, pkg *types.Package, files []*ast.File, info *types.Info, sizes types.Sizes) ([]*ast.File, *types.Info, error) {
const thispkg = "·this·"
@@ -284,8 +283,9 @@ func typeCheckCgoSourceFiles(fset *token.FileSet, pkg *types.Package, files []*a
// cgoBaseType tries to look through type conversions involving
// unsafe.Pointer to find the real type. It converts:
-// unsafe.Pointer(x) => x
-// *(*unsafe.Pointer)(unsafe.Pointer(&x)) => x
+//
+// unsafe.Pointer(x) => x
+// *(*unsafe.Pointer)(unsafe.Pointer(&x)) => x
func cgoBaseType(info *types.Info, arg ast.Expr) types.Type {
switch arg := arg.(type) {
case *ast.CallExpr:
diff --git a/go/analysis/passes/composite/composite.go b/go/analysis/passes/composite/composite.go
index d3670aca9..64e184d34 100644
--- a/go/analysis/passes/composite/composite.go
+++ b/go/analysis/passes/composite/composite.go
@@ -7,6 +7,7 @@
package composite
import (
+ "fmt"
"go/ast"
"go/types"
"strings"
@@ -83,7 +84,8 @@ func run(pass *analysis.Pass) (interface{}, error) {
}
for _, typ := range structuralTypes {
under := deref(typ.Underlying())
- if _, ok := under.(*types.Struct); !ok {
+ strct, ok := under.(*types.Struct)
+ if !ok {
// skip non-struct composite literals
continue
}
@@ -92,20 +94,47 @@ func run(pass *analysis.Pass) (interface{}, error) {
continue
}
- // check if the CompositeLit contains an unkeyed field
+ // check if the struct contains an unkeyed field
allKeyValue := true
- for _, e := range cl.Elts {
+ var suggestedFixAvailable = len(cl.Elts) == strct.NumFields()
+ var missingKeys []analysis.TextEdit
+ for i, e := range cl.Elts {
if _, ok := e.(*ast.KeyValueExpr); !ok {
allKeyValue = false
- break
+ if i >= strct.NumFields() {
+ break
+ }
+ field := strct.Field(i)
+ if !field.Exported() {
+ // Adding unexported field names for structs not defined
+ // locally will not work.
+ suggestedFixAvailable = false
+ break
+ }
+ missingKeys = append(missingKeys, analysis.TextEdit{
+ Pos: e.Pos(),
+ End: e.Pos(),
+ NewText: []byte(fmt.Sprintf("%s: ", field.Name())),
+ })
}
}
if allKeyValue {
- // all the composite literal fields are keyed
+ // all the struct fields are keyed
continue
}
- pass.ReportRangef(cl, "%s composite literal uses unkeyed fields", typeName)
+ diag := analysis.Diagnostic{
+ Pos: cl.Pos(),
+ End: cl.End(),
+ Message: fmt.Sprintf("%s struct literal uses unkeyed fields", typeName),
+ }
+ if suggestedFixAvailable {
+ diag.SuggestedFixes = []analysis.SuggestedFix{{
+ Message: "Add field names to struct literal",
+ TextEdits: missingKeys,
+ }}
+ }
+ pass.Report(diag)
return
}
})
diff --git a/go/analysis/passes/composite/composite_test.go b/go/analysis/passes/composite/composite_test.go
index 952de8bfd..7afaaa7ff 100644
--- a/go/analysis/passes/composite/composite_test.go
+++ b/go/analysis/passes/composite/composite_test.go
@@ -18,5 +18,5 @@ func Test(t *testing.T) {
if typeparams.Enabled {
pkgs = append(pkgs, "typeparams")
}
- analysistest.Run(t, testdata, composite.Analyzer, pkgs...)
+ analysistest.RunWithSuggestedFixes(t, testdata, composite.Analyzer, pkgs...)
}
diff --git a/go/analysis/passes/composite/testdata/src/a/a.go b/go/analysis/passes/composite/testdata/src/a/a.go
index 3a5bc203b..cd69d3951 100644
--- a/go/analysis/passes/composite/testdata/src/a/a.go
+++ b/go/analysis/passes/composite/testdata/src/a/a.go
@@ -11,6 +11,7 @@ import (
"go/scanner"
"go/token"
"image"
+ "sync"
"unicode"
)
@@ -79,6 +80,18 @@ var badStructLiteral = flag.Flag{ // want "unkeyed fields"
nil, // Value
"DefValue",
}
+var tooManyFieldsStructLiteral = flag.Flag{ // want "unkeyed fields"
+ "Name",
+ "Usage",
+ nil, // Value
+ "DefValue",
+ "Extra Field",
+}
+var tooFewFieldsStructLiteral = flag.Flag{ // want "unkeyed fields"
+ "Name",
+ "Usage",
+ nil, // Value
+}
var delta [3]rune
@@ -100,6 +113,10 @@ var badScannerErrorList = scanner.ErrorList{
&scanner.Error{token.Position{}, "foobar"}, // want "unkeyed fields"
}
+// sync.Mutex has unexported fields. We expect a diagnostic but no
+// suggested fix.
+var mu = sync.Mutex{0, 0} // want "unkeyed fields"
+
// Check whitelisted structs: if vet is run with --compositewhitelist=false,
// this line triggers an error.
var whitelistedPoint = image.Point{1, 2}
diff --git a/go/analysis/passes/composite/testdata/src/a/a.go.golden b/go/analysis/passes/composite/testdata/src/a/a.go.golden
new file mode 100644
index 000000000..fe73a2e0a
--- /dev/null
+++ b/go/analysis/passes/composite/testdata/src/a/a.go.golden
@@ -0,0 +1,144 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file contains the test for untagged struct literals.
+
+package a
+
+import (
+ "flag"
+ "go/scanner"
+ "go/token"
+ "image"
+ "sync"
+ "unicode"
+)
+
+var Okay1 = []string{
+ "Name",
+ "Usage",
+ "DefValue",
+}
+
+var Okay2 = map[string]bool{
+ "Name": true,
+ "Usage": true,
+ "DefValue": true,
+}
+
+var Okay3 = struct {
+ X string
+ Y string
+ Z string
+}{
+ "Name",
+ "Usage",
+ "DefValue",
+}
+
+var Okay4 = []struct {
+ A int
+ B int
+}{
+ {1, 2},
+ {3, 4},
+}
+
+type MyStruct struct {
+ X string
+ Y string
+ Z string
+}
+
+var Okay5 = &MyStruct{
+ "Name",
+ "Usage",
+ "DefValue",
+}
+
+var Okay6 = []MyStruct{
+ {"foo", "bar", "baz"},
+ {"aa", "bb", "cc"},
+}
+
+var Okay7 = []*MyStruct{
+ {"foo", "bar", "baz"},
+ {"aa", "bb", "cc"},
+}
+
+// Testing is awkward because we need to reference things from a separate package
+// to trigger the warnings.
+
+var goodStructLiteral = flag.Flag{
+ Name: "Name",
+ Usage: "Usage",
+}
+var badStructLiteral = flag.Flag{ // want "unkeyed fields"
+ Name: "Name",
+ Usage: "Usage",
+ Value: nil, // Value
+ DefValue: "DefValue",
+}
+var tooManyFieldsStructLiteral = flag.Flag{ // want "unkeyed fields"
+ "Name",
+ "Usage",
+ nil, // Value
+ "DefValue",
+ "Extra Field",
+}
+var tooFewFieldsStructLiteral = flag.Flag{ // want "unkeyed fields"
+ "Name",
+ "Usage",
+ nil, // Value
+}
+
+var delta [3]rune
+
+// SpecialCase is a named slice of CaseRange to test issue 9171.
+var goodNamedSliceLiteral = unicode.SpecialCase{
+ {Lo: 1, Hi: 2, Delta: delta},
+ unicode.CaseRange{Lo: 1, Hi: 2, Delta: delta},
+}
+var badNamedSliceLiteral = unicode.SpecialCase{
+ {Lo: 1, Hi: 2, Delta: delta}, // want "unkeyed fields"
+ unicode.CaseRange{Lo: 1, Hi: 2, Delta: delta}, // want "unkeyed fields"
+}
+
+// ErrorList is a named slice, so no warnings should be emitted.
+var goodScannerErrorList = scanner.ErrorList{
+ &scanner.Error{Msg: "foobar"},
+}
+var badScannerErrorList = scanner.ErrorList{
+ &scanner.Error{Pos: token.Position{}, Msg: "foobar"}, // want "unkeyed fields"
+}
+
+// sync.Mutex has unexported fields. We expect a diagnostic but no
+// suggested fix.
+var mu = sync.Mutex{0, 0} // want "unkeyed fields"
+
+// Check whitelisted structs: if vet is run with --compositewhitelist=false,
+// this line triggers an error.
+var whitelistedPoint = image.Point{1, 2}
+
+// Do not check type from unknown package.
+// See issue 15408.
+var unknownPkgVar = unicode.NoSuchType{"foo", "bar"}
+
+// A named pointer slice of CaseRange to test issue 23539. In
+// particular, we're interested in how some slice elements omit their
+// type.
+var goodNamedPointerSliceLiteral = []*unicode.CaseRange{
+ {Lo: 1, Hi: 2},
+ &unicode.CaseRange{Lo: 1, Hi: 2},
+}
+var badNamedPointerSliceLiteral = []*unicode.CaseRange{
+ {Lo: 1, Hi: 2, Delta: delta}, // want "unkeyed fields"
+ &unicode.CaseRange{Lo: 1, Hi: 2, Delta: delta}, // want "unkeyed fields"
+}
+
+// unicode.Range16 is whitelisted, so there'll be no vet error
+var range16 = unicode.Range16{0xfdd0, 0xfdef, 1}
+
+// unicode.Range32 is whitelisted, so there'll be no vet error
+var range32 = unicode.Range32{0x1fffe, 0x1ffff, 1}
diff --git a/go/analysis/passes/composite/testdata/src/a/a_fuzz_test.go.golden b/go/analysis/passes/composite/testdata/src/a/a_fuzz_test.go.golden
new file mode 100644
index 000000000..20b652e88
--- /dev/null
+++ b/go/analysis/passes/composite/testdata/src/a/a_fuzz_test.go.golden
@@ -0,0 +1,16 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.18
+// +build go1.18
+
+package a
+
+import "testing"
+
+var fuzzTargets = []testing.InternalFuzzTarget{
+ {"Fuzz", Fuzz},
+}
+
+func Fuzz(f *testing.F) {}
diff --git a/go/analysis/passes/composite/testdata/src/typeparams/typeparams.go b/go/analysis/passes/composite/testdata/src/typeparams/typeparams.go
index dd5d57efe..f9a5e1fb1 100644
--- a/go/analysis/passes/composite/testdata/src/typeparams/typeparams.go
+++ b/go/analysis/passes/composite/testdata/src/typeparams/typeparams.go
@@ -6,7 +6,7 @@ package typeparams
import "typeparams/lib"
-type localStruct struct { F int }
+type localStruct struct{ F int }
func F[
T1 ~struct{ f int },
@@ -20,8 +20,8 @@ func F[
_ = T1{2}
_ = T2a{2}
_ = T2b{2} // want "unkeyed fields"
- _ = T3{1,2}
- _ = T4{1,2}
- _ = T5{1:2}
- _ = T6{1:2}
+ _ = T3{1, 2}
+ _ = T4{1, 2}
+ _ = T5{1: 2}
+ _ = T6{1: 2}
}
diff --git a/go/analysis/passes/composite/testdata/src/typeparams/typeparams.go.golden b/go/analysis/passes/composite/testdata/src/typeparams/typeparams.go.golden
new file mode 100644
index 000000000..66cd9158c
--- /dev/null
+++ b/go/analysis/passes/composite/testdata/src/typeparams/typeparams.go.golden
@@ -0,0 +1,27 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typeparams
+
+import "typeparams/lib"
+
+type localStruct struct{ F int }
+
+func F[
+ T1 ~struct{ f int },
+ T2a localStruct,
+ T2b lib.Struct,
+ T3 ~[]int,
+ T4 lib.Slice,
+ T5 ~map[int]int,
+ T6 lib.Map,
+]() {
+ _ = T1{2}
+ _ = T2a{2}
+ _ = T2b{F: 2} // want "unkeyed fields"
+ _ = T3{1, 2}
+ _ = T4{1, 2}
+ _ = T5{1: 2}
+ _ = T6{1: 2}
+}
diff --git a/go/analysis/passes/copylock/copylock.go b/go/analysis/passes/copylock/copylock.go
index 350dc4e0f..8cc93e94d 100644
--- a/go/analysis/passes/copylock/copylock.go
+++ b/go/analysis/passes/copylock/copylock.go
@@ -128,7 +128,7 @@ func checkCopyLocksCallExpr(pass *analysis.Pass, ce *ast.CallExpr) {
}
if fun, ok := pass.TypesInfo.Uses[id].(*types.Builtin); ok {
switch fun.Name() {
- case "new", "len", "cap", "Sizeof":
+ case "new", "len", "cap", "Sizeof", "Offsetof", "Alignof":
return
}
}
diff --git a/go/analysis/passes/copylock/testdata/src/a/copylock.go b/go/analysis/passes/copylock/testdata/src/a/copylock.go
index e528280ab..4ab66dca1 100644
--- a/go/analysis/passes/copylock/testdata/src/a/copylock.go
+++ b/go/analysis/passes/copylock/testdata/src/a/copylock.go
@@ -50,27 +50,27 @@ func BadFunc() {
var t Tlock
var tp *Tlock
tp = &t
- *tp = t // want `assignment copies lock value to \*tp: a.Tlock contains sync.Once contains sync.Mutex`
- t = *tp // want "assignment copies lock value to t: a.Tlock contains sync.Once contains sync.Mutex"
+ *tp = t // want `assignment copies lock value to \*tp: a.Tlock contains sync.Once contains sync\b.*`
+ t = *tp // want `assignment copies lock value to t: a.Tlock contains sync.Once contains sync\b.*`
y := *x // want "assignment copies lock value to y: sync.Mutex"
- var z = t // want "variable declaration copies lock value to z: a.Tlock contains sync.Once contains sync.Mutex"
+ var z = t // want `variable declaration copies lock value to z: a.Tlock contains sync.Once contains sync\b.*`
w := struct{ L sync.Mutex }{
L: *x, // want `literal copies lock value from \*x: sync.Mutex`
}
var q = map[int]Tlock{
- 1: t, // want "literal copies lock value from t: a.Tlock contains sync.Once contains sync.Mutex"
- 2: *tp, // want `literal copies lock value from \*tp: a.Tlock contains sync.Once contains sync.Mutex`
+ 1: t, // want `literal copies lock value from t: a.Tlock contains sync.Once contains sync\b.*`
+ 2: *tp, // want `literal copies lock value from \*tp: a.Tlock contains sync.Once contains sync\b.*`
}
yy := []Tlock{
- t, // want "literal copies lock value from t: a.Tlock contains sync.Once contains sync.Mutex"
- *tp, // want `literal copies lock value from \*tp: a.Tlock contains sync.Once contains sync.Mutex`
+ t, // want `literal copies lock value from t: a.Tlock contains sync.Once contains sync\b.*`
+ *tp, // want `literal copies lock value from \*tp: a.Tlock contains sync.Once contains sync\b.*`
}
// override 'new' keyword
new := func(interface{}) {}
- new(t) // want "call of new copies lock value: a.Tlock contains sync.Once contains sync.Mutex"
+ new(t) // want `call of new copies lock value: a.Tlock contains sync.Once contains sync\b.*`
// copy of array of locks
var muA [5]sync.Mutex
@@ -124,6 +124,26 @@ func SizeofMutex() {
Sizeof(mu) // want "call of Sizeof copies lock value: sync.Mutex"
}
+func OffsetofMutex() {
+ type T struct {
+ f int
+ mu sync.Mutex
+ }
+ unsafe.Offsetof(T{}.mu) // OK
+ unsafe := struct{ Offsetof func(interface{}) }{}
+ unsafe.Offsetof(T{}.mu) // want "call of unsafe.Offsetof copies lock value: sync.Mutex"
+}
+
+func AlignofMutex() {
+ type T struct {
+ f int
+ mu sync.Mutex
+ }
+ unsafe.Alignof(T{}.mu) // OK
+ unsafe := struct{ Alignof func(interface{}) }{}
+ unsafe.Alignof(T{}.mu) // want "call of unsafe.Alignof copies lock value: sync.Mutex"
+}
+
// SyncTypesCheck checks copying of sync.* types except sync.Mutex
func SyncTypesCheck() {
// sync.RWMutex copying
@@ -173,9 +193,9 @@ func SyncTypesCheck() {
var onceX sync.Once
var onceXX = sync.Once{}
onceX1 := new(sync.Once)
- onceY := onceX // want "assignment copies lock value to onceY: sync.Once contains sync.Mutex"
- onceY = onceX // want "assignment copies lock value to onceY: sync.Once contains sync.Mutex"
- var onceYY = onceX // want "variable declaration copies lock value to onceYY: sync.Once contains sync.Mutex"
+ onceY := onceX // want `assignment copies lock value to onceY: sync.Once contains sync\b.*`
+ onceY = onceX // want `assignment copies lock value to onceY: sync.Once contains sync\b.*`
+ var onceYY = onceX // want `variable declaration copies lock value to onceYY: sync.Once contains sync\b.*`
onceP := &onceX
onceZ := &sync.Once{}
}
diff --git a/go/analysis/passes/copylock/testdata/src/a/copylock_func.go b/go/analysis/passes/copylock/testdata/src/a/copylock_func.go
index 801bc6f24..0d3168f1e 100644
--- a/go/analysis/passes/copylock/testdata/src/a/copylock_func.go
+++ b/go/analysis/passes/copylock/testdata/src/a/copylock_func.go
@@ -126,7 +126,7 @@ func AcceptedCases() {
// sync.Mutex gets called out, but without any reference to the sync.Once.
type LocalOnce sync.Once
-func (LocalOnce) Bad() {} // want "Bad passes lock by value: a.LocalOnce contains sync.Mutex"
+func (LocalOnce) Bad() {} // want `Bad passes lock by value: a.LocalOnce contains sync.\b.*`
// False negative:
// LocalMutex doesn't have a Lock method.
diff --git a/go/analysis/passes/directive/directive.go b/go/analysis/passes/directive/directive.go
new file mode 100644
index 000000000..76d852cd0
--- /dev/null
+++ b/go/analysis/passes/directive/directive.go
@@ -0,0 +1,216 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package directive defines an Analyzer that checks known Go toolchain directives.
+package directive
+
+import (
+ "go/ast"
+ "go/parser"
+ "go/token"
+ "strings"
+ "unicode"
+ "unicode/utf8"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/analysis/passes/internal/analysisutil"
+)
+
+const Doc = `check Go toolchain directives such as //go:debug
+
+This analyzer checks for problems with known Go toolchain directives
+in all Go source files in a package directory, even those excluded by
+//go:build constraints, and all non-Go source files too.
+
+For //go:debug (see https://go.dev/doc/godebug), the analyzer checks
+that the directives are placed only in Go source files, only above the
+package comment, and only in package main or *_test.go files.
+
+Support for other known directives may be added in the future.
+
+This analyzer does not check //go:build, which is handled by the
+buildtag analyzer.
+`
+
+var Analyzer = &analysis.Analyzer{
+ Name: "directive",
+ Doc: Doc,
+ Run: runDirective,
+}
+
+func runDirective(pass *analysis.Pass) (interface{}, error) {
+ for _, f := range pass.Files {
+ checkGoFile(pass, f)
+ }
+ for _, name := range pass.OtherFiles {
+ if err := checkOtherFile(pass, name); err != nil {
+ return nil, err
+ }
+ }
+ for _, name := range pass.IgnoredFiles {
+ if strings.HasSuffix(name, ".go") {
+ f, err := parser.ParseFile(pass.Fset, name, nil, parser.ParseComments)
+ if err != nil {
+ // Not valid Go source code - not our job to diagnose, so ignore.
+ continue
+ }
+ checkGoFile(pass, f)
+ } else {
+ if err := checkOtherFile(pass, name); err != nil {
+ return nil, err
+ }
+ }
+ }
+ return nil, nil
+}
+
+func checkGoFile(pass *analysis.Pass, f *ast.File) {
+ check := newChecker(pass, pass.Fset.File(f.Package).Name(), f)
+
+ for _, group := range f.Comments {
+ // A +build comment is ignored after or adjoining the package declaration.
+ if group.End()+1 >= f.Package {
+ check.inHeader = false
+ }
+ // A //go:build comment is ignored after the package declaration
+ // (but adjoining it is OK, in contrast to +build comments).
+ if group.Pos() >= f.Package {
+ check.inHeader = false
+ }
+
+ // Check each line of a //-comment.
+ for _, c := range group.List {
+ check.comment(c.Slash, c.Text)
+ }
+ }
+}
+
+func checkOtherFile(pass *analysis.Pass, filename string) error {
+ // We cannot use the Go parser, since is not a Go source file.
+ // Read the raw bytes instead.
+ content, tf, err := analysisutil.ReadFile(pass.Fset, filename)
+ if err != nil {
+ return err
+ }
+
+ check := newChecker(pass, filename, nil)
+ check.nonGoFile(token.Pos(tf.Base()), string(content))
+ return nil
+}
+
+type checker struct {
+ pass *analysis.Pass
+ filename string
+ file *ast.File // nil for non-Go file
+ inHeader bool // in file header (before package declaration)
+ inStar bool // currently in a /* */ comment
+}
+
+func newChecker(pass *analysis.Pass, filename string, file *ast.File) *checker {
+ return &checker{
+ pass: pass,
+ filename: filename,
+ file: file,
+ inHeader: true,
+ }
+}
+
+func (check *checker) nonGoFile(pos token.Pos, fullText string) {
+ // Process each line.
+ text := fullText
+ inStar := false
+ for text != "" {
+ offset := len(fullText) - len(text)
+ var line string
+ line, text, _ = stringsCut(text, "\n")
+
+ if !inStar && strings.HasPrefix(line, "//") {
+ check.comment(pos+token.Pos(offset), line)
+ continue
+ }
+
+ // Skip over, cut out any /* */ comments,
+ // to avoid being confused by a commented-out // comment.
+ for {
+ line = strings.TrimSpace(line)
+ if inStar {
+ var ok bool
+ _, line, ok = stringsCut(line, "*/")
+ if !ok {
+ break
+ }
+ inStar = false
+ continue
+ }
+ line, inStar = stringsCutPrefix(line, "/*")
+ if !inStar {
+ break
+ }
+ }
+ if line != "" {
+ // Found non-comment non-blank line.
+ // Ends space for valid //go:build comments,
+ // but also ends the fraction of the file we can
+ // reliably parse. From this point on we might
+ // incorrectly flag "comments" inside multiline
+ // string constants or anything else (this might
+ // not even be a Go program). So stop.
+ break
+ }
+ }
+}
+
+func (check *checker) comment(pos token.Pos, line string) {
+ if !strings.HasPrefix(line, "//go:") {
+ return
+ }
+ // testing hack: stop at // ERROR
+ if i := strings.Index(line, " // ERROR "); i >= 0 {
+ line = line[:i]
+ }
+
+ verb := line
+ if i := strings.IndexFunc(verb, unicode.IsSpace); i >= 0 {
+ verb = verb[:i]
+ if line[i] != ' ' && line[i] != '\t' && line[i] != '\n' {
+ r, _ := utf8.DecodeRuneInString(line[i:])
+ check.pass.Reportf(pos, "invalid space %#q in %s directive", r, verb)
+ }
+ }
+
+ switch verb {
+ default:
+ // TODO: Use the go language version for the file.
+ // If that version is not newer than us, then we can
+ // report unknown directives.
+
+ case "//go:build":
+ // Ignore. The buildtag analyzer reports misplaced comments.
+
+ case "//go:debug":
+ if check.file == nil {
+ check.pass.Reportf(pos, "//go:debug directive only valid in Go source files")
+ } else if check.file.Name.Name != "main" && !strings.HasSuffix(check.filename, "_test.go") {
+ check.pass.Reportf(pos, "//go:debug directive only valid in package main or test")
+ } else if !check.inHeader {
+ check.pass.Reportf(pos, "//go:debug directive only valid before package declaration")
+ }
+ }
+}
+
+// Go 1.18 strings.Cut.
+func stringsCut(s, sep string) (before, after string, found bool) {
+ if i := strings.Index(s, sep); i >= 0 {
+ return s[:i], s[i+len(sep):], true
+ }
+ return s, "", false
+}
+
+// Go 1.20 strings.CutPrefix.
+func stringsCutPrefix(s, prefix string) (after string, found bool) {
+ if !strings.HasPrefix(s, prefix) {
+ return s, false
+ }
+ return s[len(prefix):], true
+}
diff --git a/go/analysis/passes/directive/directive_test.go b/go/analysis/passes/directive/directive_test.go
new file mode 100644
index 000000000..a526c0d74
--- /dev/null
+++ b/go/analysis/passes/directive/directive_test.go
@@ -0,0 +1,39 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package directive_test
+
+import (
+ "runtime"
+ "strings"
+ "testing"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/analysis/analysistest"
+ "golang.org/x/tools/go/analysis/passes/directive"
+)
+
+func Test(t *testing.T) {
+ if strings.HasPrefix(runtime.Version(), "go1.") && runtime.Version() < "go1.16" {
+ t.Skipf("skipping on %v", runtime.Version())
+ }
+ analyzer := *directive.Analyzer
+ analyzer.Run = func(pass *analysis.Pass) (interface{}, error) {
+ defer func() {
+ // The directive pass is unusual in that it checks the IgnoredFiles.
+ // After analysis, add IgnoredFiles to OtherFiles so that
+ // the test harness checks for expected diagnostics in those.
+ // (The test harness shouldn't do this by default because most
+ // passes can't do anything with the IgnoredFiles without type
+ // information, which is unavailable because they are ignored.)
+ var files []string
+ files = append(files, pass.OtherFiles...)
+ files = append(files, pass.IgnoredFiles...)
+ pass.OtherFiles = files
+ }()
+
+ return directive.Analyzer.Run(pass)
+ }
+ analysistest.Run(t, analysistest.TestData(), &analyzer, "a")
+}
diff --git a/go/analysis/passes/directive/testdata/src/a/badspace.go b/go/analysis/passes/directive/testdata/src/a/badspace.go
new file mode 100644
index 000000000..113139960
--- /dev/null
+++ b/go/analysis/passes/directive/testdata/src/a/badspace.go
@@ -0,0 +1,11 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build ignore
+
+// want +1 `invalid space '\\u00a0' in //go:debug directive`
+//go:debug 00a0
+
+package main
+
diff --git a/go/analysis/passes/directive/testdata/src/a/misplaced.go b/go/analysis/passes/directive/testdata/src/a/misplaced.go
new file mode 100644
index 000000000..db30ceb47
--- /dev/null
+++ b/go/analysis/passes/directive/testdata/src/a/misplaced.go
@@ -0,0 +1,10 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build ignore
+
+package main
+
+// want +1 `//go:debug directive only valid before package declaration`
+//go:debug panicnil=1
diff --git a/go/analysis/passes/directive/testdata/src/a/misplaced.s b/go/analysis/passes/directive/testdata/src/a/misplaced.s
new file mode 100644
index 000000000..9e26dbc52
--- /dev/null
+++ b/go/analysis/passes/directive/testdata/src/a/misplaced.s
@@ -0,0 +1,19 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// want +1 `//go:debug directive only valid in Go source files`
+//go:debug panicnil=1
+
+/*
+can skip over comments
+//go:debug doesn't matter here
+*/
+
+// want +1 `//go:debug directive only valid in Go source files`
+//go:debug panicnil=1
+
+package a
+
+// no error here because we can't parse this far
+//go:debug panicnil=1
diff --git a/go/analysis/passes/directive/testdata/src/a/misplaced_test.go b/go/analysis/passes/directive/testdata/src/a/misplaced_test.go
new file mode 100644
index 000000000..6b4527a35
--- /dev/null
+++ b/go/analysis/passes/directive/testdata/src/a/misplaced_test.go
@@ -0,0 +1,10 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:debug panicnil=1
+
+package p_test
+
+// want +1 `//go:debug directive only valid before package declaration`
+//go:debug panicnil=1
diff --git a/go/analysis/passes/directive/testdata/src/a/p.go b/go/analysis/passes/directive/testdata/src/a/p.go
new file mode 100644
index 000000000..e1e3e6552
--- /dev/null
+++ b/go/analysis/passes/directive/testdata/src/a/p.go
@@ -0,0 +1,11 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// want +1 `//go:debug directive only valid in package main or test`
+//go:debug panicnil=1
+
+package p
+
+// want +1 `//go:debug directive only valid in package main or test`
+//go:debug panicnil=1
diff --git a/go/analysis/passes/errorsas/errorsas.go b/go/analysis/passes/errorsas/errorsas.go
index 384f02557..96adad3ee 100644
--- a/go/analysis/passes/errorsas/errorsas.go
+++ b/go/analysis/passes/errorsas/errorsas.go
@@ -7,6 +7,7 @@
package errorsas
import (
+ "errors"
"go/ast"
"go/types"
@@ -50,26 +51,39 @@ func run(pass *analysis.Pass) (interface{}, error) {
if len(call.Args) < 2 {
return // not enough arguments, e.g. called with return values of another function
}
- if fn.FullName() == "errors.As" && !pointerToInterfaceOrError(pass, call.Args[1]) {
- pass.ReportRangef(call, "second argument to errors.As must be a non-nil pointer to either a type that implements error, or to any interface type")
+ if fn.FullName() != "errors.As" {
+ return
+ }
+ if err := checkAsTarget(pass, call.Args[1]); err != nil {
+ pass.ReportRangef(call, "%v", err)
}
})
return nil, nil
}
-var errorType = types.Universe.Lookup("error").Type().Underlying().(*types.Interface)
+var errorType = types.Universe.Lookup("error").Type()
// pointerToInterfaceOrError reports whether the type of e is a pointer to an interface or a type implementing error,
// or is the empty interface.
-func pointerToInterfaceOrError(pass *analysis.Pass, e ast.Expr) bool {
+
+// checkAsTarget reports an error if the second argument to errors.As is invalid.
+func checkAsTarget(pass *analysis.Pass, e ast.Expr) error {
t := pass.TypesInfo.Types[e].Type
if it, ok := t.Underlying().(*types.Interface); ok && it.NumMethods() == 0 {
- return true
+ // A target of interface{} is always allowed, since it often indicates
+ // a value forwarded from another source.
+ return nil
}
pt, ok := t.Underlying().(*types.Pointer)
if !ok {
- return false
+ return errors.New("second argument to errors.As must be a non-nil pointer to either a type that implements error, or to any interface type")
+ }
+ if pt.Elem() == errorType {
+ return errors.New("second argument to errors.As should not be *error")
}
_, ok = pt.Elem().Underlying().(*types.Interface)
- return ok || types.Implements(pt.Elem(), errorType)
+ if ok || types.Implements(pt.Elem(), errorType.Underlying().(*types.Interface)) {
+ return nil
+ }
+ return errors.New("second argument to errors.As must be a non-nil pointer to either a type that implements error, or to any interface type")
}
diff --git a/go/analysis/passes/errorsas/testdata/src/a/a.go b/go/analysis/passes/errorsas/testdata/src/a/a.go
index c987a8a65..7a9ae8976 100644
--- a/go/analysis/passes/errorsas/testdata/src/a/a.go
+++ b/go/analysis/passes/errorsas/testdata/src/a/a.go
@@ -28,10 +28,10 @@ func _() {
f iface
ei interface{}
)
- errors.As(nil, &e) // *error
+ errors.As(nil, &e) // want `second argument to errors.As should not be \*error`
errors.As(nil, &m) // *T where T implemements error
errors.As(nil, &f) // *interface
- errors.As(nil, perr()) // *error, via a call
+ errors.As(nil, perr()) // want `second argument to errors.As should not be \*error`
errors.As(nil, ei) // empty interface
errors.As(nil, nil) // want `second argument to errors.As must be a non-nil pointer to either a type that implements error, or to any interface type`
diff --git a/go/analysis/passes/errorsas/testdata/src/typeparams/typeparams.go b/go/analysis/passes/errorsas/testdata/src/typeparams/typeparams.go
index 5b9ec457c..4f7ae8491 100644
--- a/go/analysis/passes/errorsas/testdata/src/typeparams/typeparams.go
+++ b/go/analysis/passes/errorsas/testdata/src/typeparams/typeparams.go
@@ -28,7 +28,7 @@ func _[E error](e E) {
errors.As(nil, &e)
errors.As(nil, &m) // *T where T implemements error
errors.As(nil, &tw.t) // *T where T implements error
- errors.As(nil, perr[error]()) // *error, via a call
+ errors.As(nil, perr[error]()) // want `second argument to errors.As should not be \*error`
errors.As(nil, e) // want `second argument to errors.As must be a non-nil pointer to either a type that implements error, or to any interface type`
errors.As(nil, m) // want `second argument to errors.As must be a non-nil pointer to either a type that implements error, or to any interface type`
diff --git a/go/analysis/passes/fieldalignment/fieldalignment.go b/go/analysis/passes/fieldalignment/fieldalignment.go
index 78afe94ab..aff663046 100644
--- a/go/analysis/passes/fieldalignment/fieldalignment.go
+++ b/go/analysis/passes/fieldalignment/fieldalignment.go
@@ -23,7 +23,7 @@ import (
const Doc = `find structs that would use less memory if their fields were sorted
This analyzer find structs that can be rearranged to use less memory, and provides
-a suggested edit with the optimal order.
+a suggested edit with the most compact order.
Note that there are two different diagnostics reported. One checks struct size,
and the other reports "pointer bytes" used. Pointer bytes is how many bytes of the
@@ -41,6 +41,11 @@ has 24 pointer bytes because it has to scan further through the *uint32.
struct { string; uint32 }
has 8 because it can stop immediately after the string pointer.
+
+Be aware that the most compact order is not always the most efficient.
+In rare cases it may cause two variables each updated by its own goroutine
+to occupy the same CPU cache line, inducing a form of memory contention
+known as "false sharing" that slows down both goroutines.
`
var Analyzer = &analysis.Analyzer{
diff --git a/go/analysis/passes/httpresponse/httpresponse.go b/go/analysis/passes/httpresponse/httpresponse.go
index fd9e2af2b..3b9168c6c 100644
--- a/go/analysis/passes/httpresponse/httpresponse.go
+++ b/go/analysis/passes/httpresponse/httpresponse.go
@@ -62,15 +62,23 @@ func run(pass *analysis.Pass) (interface{}, error) {
// Find the innermost containing block, and get the list
// of statements starting with the one containing call.
- stmts := restOfBlock(stack)
+ stmts, ncalls := restOfBlock(stack)
if len(stmts) < 2 {
- return true // the call to the http function is the last statement of the block.
+ // The call to the http function is the last statement of the block.
+ return true
+ }
+
+ // Skip cases in which the call is wrapped by another (#52661).
+ // Example: resp, err := checkError(http.Get(url))
+ if ncalls > 1 {
+ return true
}
asg, ok := stmts[0].(*ast.AssignStmt)
if !ok {
return true // the first statement is not assignment.
}
+
resp := rootIdent(asg.Lhs[0])
if resp == nil {
return true // could not find the http.Response in the assignment.
@@ -130,20 +138,25 @@ func isHTTPFuncOrMethodOnClient(info *types.Info, expr *ast.CallExpr) bool {
}
// restOfBlock, given a traversal stack, finds the innermost containing
-// block and returns the suffix of its statements starting with the
-// current node (the last element of stack).
-func restOfBlock(stack []ast.Node) []ast.Stmt {
+// block and returns the suffix of its statements starting with the current
+// node, along with the number of call expressions encountered.
+func restOfBlock(stack []ast.Node) ([]ast.Stmt, int) {
+ var ncalls int
for i := len(stack) - 1; i >= 0; i-- {
if b, ok := stack[i].(*ast.BlockStmt); ok {
for j, v := range b.List {
if v == stack[i+1] {
- return b.List[j:]
+ return b.List[j:], ncalls
}
}
break
}
+
+ if _, ok := stack[i].(*ast.CallExpr); ok {
+ ncalls++
+ }
}
- return nil
+ return nil, 0
}
// rootIdent finds the root identifier x in a chain of selections x.y.z, or nil if not found.
diff --git a/go/analysis/passes/httpresponse/httpresponse_test.go b/go/analysis/passes/httpresponse/httpresponse_test.go
index 14e166789..34dc78ce2 100644
--- a/go/analysis/passes/httpresponse/httpresponse_test.go
+++ b/go/analysis/passes/httpresponse/httpresponse_test.go
@@ -5,10 +5,11 @@
package httpresponse_test
import (
+ "testing"
+
"golang.org/x/tools/go/analysis/analysistest"
"golang.org/x/tools/go/analysis/passes/httpresponse"
"golang.org/x/tools/internal/typeparams"
- "testing"
)
func Test(t *testing.T) {
diff --git a/go/analysis/passes/httpresponse/testdata/src/a/a.go b/go/analysis/passes/httpresponse/testdata/src/a/a.go
index df7703f41..de4121270 100644
--- a/go/analysis/passes/httpresponse/testdata/src/a/a.go
+++ b/go/analysis/passes/httpresponse/testdata/src/a/a.go
@@ -83,3 +83,30 @@ func badClientDo() {
log.Fatal(err)
}
}
+
+func goodUnwrapResp() {
+ unwrapResp := func(resp *http.Response, err error) *http.Response {
+ if err != nil {
+ panic(err)
+ }
+ return resp
+ }
+ resp := unwrapResp(http.Get("https://golang.org"))
+ // It is ok to call defer here immediately as err has
+ // been checked in unwrapResp (see #52661).
+ defer resp.Body.Close()
+}
+
+func badUnwrapResp() {
+ unwrapResp := func(resp *http.Response, err error) string {
+ if err != nil {
+ panic(err)
+ }
+ return "https://golang.org/" + resp.Status
+ }
+ resp, err := http.Get(unwrapResp(http.Get("https://golang.org")))
+ defer resp.Body.Close() // want "using resp before checking for errors"
+ if err != nil {
+ log.Fatal(err)
+ }
+}
diff --git a/go/analysis/passes/ifaceassert/parameterized.go b/go/analysis/passes/ifaceassert/parameterized.go
index 1285ecf13..b35f62dc7 100644
--- a/go/analysis/passes/ifaceassert/parameterized.go
+++ b/go/analysis/passes/ifaceassert/parameterized.go
@@ -1,6 +1,7 @@
// Copyright 2022 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
+
package ifaceassert
import (
diff --git a/go/analysis/passes/inspect/inspect.go b/go/analysis/passes/inspect/inspect.go
index 4bb652a72..165c70cbd 100644
--- a/go/analysis/passes/inspect/inspect.go
+++ b/go/analysis/passes/inspect/inspect.go
@@ -19,14 +19,13 @@
// Requires: []*analysis.Analyzer{inspect.Analyzer},
// }
//
-// func run(pass *analysis.Pass) (interface{}, error) {
-// inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
-// inspect.Preorder(nil, func(n ast.Node) {
-// ...
-// })
-// return nil
-// }
-//
+// func run(pass *analysis.Pass) (interface{}, error) {
+// inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+// inspect.Preorder(nil, func(n ast.Node) {
+// ...
+// })
+// return nil, nil
+// }
package inspect
import (
diff --git a/go/analysis/passes/loopclosure/loopclosure.go b/go/analysis/passes/loopclosure/loopclosure.go
index 3ea91574d..ae5b4151d 100644
--- a/go/analysis/passes/loopclosure/loopclosure.go
+++ b/go/analysis/passes/loopclosure/loopclosure.go
@@ -18,19 +18,60 @@ import (
const Doc = `check references to loop variables from within nested functions
-This analyzer checks for references to loop variables from within a
-function literal inside the loop body. It checks only instances where
-the function literal is called in a defer or go statement that is the
-last statement in the loop body, as otherwise we would need whole
-program analysis.
+This analyzer reports places where a function literal references the
+iteration variable of an enclosing loop, and the loop calls the function
+in such a way (e.g. with go or defer) that it may outlive the loop
+iteration and possibly observe the wrong value of the variable.
-For example:
+In this example, all the deferred functions run after the loop has
+completed, so all observe the final value of v.
- for i, v := range s {
- go func() {
- println(i, v) // not what you might expect
- }()
- }
+ for _, v := range list {
+ defer func() {
+ use(v) // incorrect
+ }()
+ }
+
+One fix is to create a new variable for each iteration of the loop:
+
+ for _, v := range list {
+ v := v // new var per iteration
+ defer func() {
+ use(v) // ok
+ }()
+ }
+
+The next example uses a go statement and has a similar problem.
+In addition, it has a data race because the loop updates v
+concurrent with the goroutines accessing it.
+
+ for _, v := range elem {
+ go func() {
+ use(v) // incorrect, and a data race
+ }()
+ }
+
+A fix is the same as before. The checker also reports problems
+in goroutines started by golang.org/x/sync/errgroup.Group.
+A hard-to-spot variant of this form is common in parallel tests:
+
+ func Test(t *testing.T) {
+ for _, test := range tests {
+ t.Run(test.name, func(t *testing.T) {
+ t.Parallel()
+ use(test) // incorrect, and a data race
+ })
+ }
+ }
+
+The t.Parallel() call causes the rest of the function to execute
+concurrent with the loop.
+
+The analyzer reports references only in the last statement,
+as it is not deep enough to understand the effects of subsequent
+statements that might render the reference benign.
+("Last statement" is defined recursively in compound
+statements such as if, switch, and select.)
See: https://golang.org/doc/go_faq.html#closures_and_goroutines`
@@ -50,10 +91,12 @@ func run(pass *analysis.Pass) (interface{}, error) {
}
inspect.Preorder(nodeFilter, func(n ast.Node) {
// Find the variables updated by the loop statement.
- var vars []*ast.Ident
+ var vars []types.Object
addVar := func(expr ast.Expr) {
- if id, ok := expr.(*ast.Ident); ok {
- vars = append(vars, id)
+ if id, _ := expr.(*ast.Ident); id != nil {
+ if obj := pass.TypesInfo.ObjectOf(id); obj != nil {
+ vars = append(vars, obj)
+ }
}
}
var body *ast.BlockStmt
@@ -79,87 +122,312 @@ func run(pass *analysis.Pass) (interface{}, error) {
return
}
- // Inspect a go or defer statement
- // if it's the last one in the loop body.
- // (We give up if there are following statements,
- // because it's hard to prove go isn't followed by wait,
- // or defer by return.)
- if len(body.List) == 0 {
- return
- }
- // The function invoked in the last return statement.
- var fun ast.Expr
- switch s := body.List[len(body.List)-1].(type) {
- case *ast.GoStmt:
- fun = s.Call.Fun
- case *ast.DeferStmt:
- fun = s.Call.Fun
- case *ast.ExprStmt: // check for errgroup.Group.Go()
- if call, ok := s.X.(*ast.CallExpr); ok {
- fun = goInvokes(pass.TypesInfo, call)
- }
- }
- lit, ok := fun.(*ast.FuncLit)
- if !ok {
- return
- }
- ast.Inspect(lit.Body, func(n ast.Node) bool {
- id, ok := n.(*ast.Ident)
- if !ok || id.Obj == nil {
- return true
+ // Inspect statements to find function literals that may be run outside of
+ // the current loop iteration.
+ //
+ // For go, defer, and errgroup.Group.Go, we ignore all but the last
+ // statement, because it's hard to prove go isn't followed by wait, or
+ // defer by return. "Last" is defined recursively.
+ //
+ // TODO: consider allowing the "last" go/defer/Go statement to be followed by
+ // N "trivial" statements, possibly under a recursive definition of "trivial"
+ // so that that checker could, for example, conclude that a go statement is
+ // followed by an if statement made of only trivial statements and trivial expressions,
+ // and hence the go statement could still be checked.
+ forEachLastStmt(body.List, func(last ast.Stmt) {
+ var stmts []ast.Stmt
+ switch s := last.(type) {
+ case *ast.GoStmt:
+ stmts = litStmts(s.Call.Fun)
+ case *ast.DeferStmt:
+ stmts = litStmts(s.Call.Fun)
+ case *ast.ExprStmt: // check for errgroup.Group.Go
+ if call, ok := s.X.(*ast.CallExpr); ok {
+ stmts = litStmts(goInvoke(pass.TypesInfo, call))
+ }
}
- if pass.TypesInfo.Types[id].Type == nil {
- // Not referring to a variable (e.g. struct field name)
- return true
+ for _, stmt := range stmts {
+ reportCaptured(pass, vars, stmt)
}
- for _, v := range vars {
- if v.Obj == id.Obj {
- pass.ReportRangef(id, "loop variable %s captured by func literal",
- id.Name)
+ })
+
+ // Also check for testing.T.Run (with T.Parallel).
+ // We consider every t.Run statement in the loop body, because there is
+ // no commonly used mechanism for synchronizing parallel subtests.
+ // It is of course theoretically possible to synchronize parallel subtests,
+ // though such a pattern is likely to be exceedingly rare as it would be
+ // fighting against the test runner.
+ for _, s := range body.List {
+ switch s := s.(type) {
+ case *ast.ExprStmt:
+ if call, ok := s.X.(*ast.CallExpr); ok {
+ for _, stmt := range parallelSubtest(pass.TypesInfo, call) {
+ reportCaptured(pass, vars, stmt)
+ }
+
}
}
- return true
- })
+ }
})
return nil, nil
}
-// goInvokes returns a function expression that would be called asynchronously
+// reportCaptured reports a diagnostic stating a loop variable
+// has been captured by a func literal if checkStmt has escaping
+// references to vars. vars is expected to be variables updated by a loop statement,
+// and checkStmt is expected to be a statements from the body of a func literal in the loop.
+func reportCaptured(pass *analysis.Pass, vars []types.Object, checkStmt ast.Stmt) {
+ ast.Inspect(checkStmt, func(n ast.Node) bool {
+ id, ok := n.(*ast.Ident)
+ if !ok {
+ return true
+ }
+ obj := pass.TypesInfo.Uses[id]
+ if obj == nil {
+ return true
+ }
+ for _, v := range vars {
+ if v == obj {
+ pass.ReportRangef(id, "loop variable %s captured by func literal", id.Name)
+ }
+ }
+ return true
+ })
+}
+
+// forEachLastStmt calls onLast on each "last" statement in a list of statements.
+// "Last" is defined recursively so, for example, if the last statement is
+// a switch statement, then each switch case is also visited to examine
+// its last statements.
+func forEachLastStmt(stmts []ast.Stmt, onLast func(last ast.Stmt)) {
+ if len(stmts) == 0 {
+ return
+ }
+
+ s := stmts[len(stmts)-1]
+ switch s := s.(type) {
+ case *ast.IfStmt:
+ loop:
+ for {
+ forEachLastStmt(s.Body.List, onLast)
+ switch e := s.Else.(type) {
+ case *ast.BlockStmt:
+ forEachLastStmt(e.List, onLast)
+ break loop
+ case *ast.IfStmt:
+ s = e
+ case nil:
+ break loop
+ }
+ }
+ case *ast.ForStmt:
+ forEachLastStmt(s.Body.List, onLast)
+ case *ast.RangeStmt:
+ forEachLastStmt(s.Body.List, onLast)
+ case *ast.SwitchStmt:
+ for _, c := range s.Body.List {
+ cc := c.(*ast.CaseClause)
+ forEachLastStmt(cc.Body, onLast)
+ }
+ case *ast.TypeSwitchStmt:
+ for _, c := range s.Body.List {
+ cc := c.(*ast.CaseClause)
+ forEachLastStmt(cc.Body, onLast)
+ }
+ case *ast.SelectStmt:
+ for _, c := range s.Body.List {
+ cc := c.(*ast.CommClause)
+ forEachLastStmt(cc.Body, onLast)
+ }
+ default:
+ onLast(s)
+ }
+}
+
+// litStmts returns all statements from the function body of a function
+// literal.
+//
+// If fun is not a function literal, it returns nil.
+func litStmts(fun ast.Expr) []ast.Stmt {
+ lit, _ := fun.(*ast.FuncLit)
+ if lit == nil {
+ return nil
+ }
+ return lit.Body.List
+}
+
+// goInvoke returns a function expression that would be called asynchronously
// (but not awaited) in another goroutine as a consequence of the call.
// For example, given the g.Go call below, it returns the function literal expression.
//
-// import "sync/errgroup"
-// var g errgroup.Group
-// g.Go(func() error { ... })
+// import "sync/errgroup"
+// var g errgroup.Group
+// g.Go(func() error { ... })
//
// Currently only "golang.org/x/sync/errgroup.Group()" is considered.
-func goInvokes(info *types.Info, call *ast.CallExpr) ast.Expr {
- f := typeutil.StaticCallee(info, call)
- // Note: Currently only supports: golang.org/x/sync/errgroup.Go.
- if f == nil || f.Name() != "Go" {
+func goInvoke(info *types.Info, call *ast.CallExpr) ast.Expr {
+ if !isMethodCall(info, call, "golang.org/x/sync/errgroup", "Group", "Go") {
return nil
}
- recv := f.Type().(*types.Signature).Recv()
- if recv == nil {
+ return call.Args[0]
+}
+
+// parallelSubtest returns statements that can be easily proven to execute
+// concurrently via the go test runner, as t.Run has been invoked with a
+// function literal that calls t.Parallel.
+//
+// In practice, users rely on the fact that statements before the call to
+// t.Parallel are synchronous. For example by declaring test := test inside the
+// function literal, but before the call to t.Parallel.
+//
+// Therefore, we only flag references in statements that are obviously
+// dominated by a call to t.Parallel. As a simple heuristic, we only consider
+// statements following the final labeled statement in the function body, to
+// avoid scenarios where a jump would cause either the call to t.Parallel or
+// the problematic reference to be skipped.
+//
+// import "testing"
+//
+// func TestFoo(t *testing.T) {
+// tests := []int{0, 1, 2}
+// for i, test := range tests {
+// t.Run("subtest", func(t *testing.T) {
+// println(i, test) // OK
+// t.Parallel()
+// println(i, test) // Not OK
+// })
+// }
+// }
+func parallelSubtest(info *types.Info, call *ast.CallExpr) []ast.Stmt {
+ if !isMethodCall(info, call, "testing", "T", "Run") {
return nil
}
- rtype, ok := recv.Type().(*types.Pointer)
- if !ok {
+
+ if len(call.Args) != 2 {
+ // Ignore calls such as t.Run(fn()).
return nil
}
- named, ok := rtype.Elem().(*types.Named)
- if !ok {
+
+ lit, _ := call.Args[1].(*ast.FuncLit)
+ if lit == nil {
return nil
}
- if named.Obj().Name() != "Group" {
+
+ // Capture the *testing.T object for the first argument to the function
+ // literal.
+ if len(lit.Type.Params.List[0].Names) == 0 {
+ return nil
+ }
+
+ tObj := info.Defs[lit.Type.Params.List[0].Names[0]]
+ if tObj == nil {
return nil
}
+
+ // Match statements that occur after a call to t.Parallel following the final
+ // labeled statement in the function body.
+ //
+ // We iterate over lit.Body.List to have a simple, fast and "frequent enough"
+ // dominance relationship for t.Parallel(): lit.Body.List[i] dominates
+ // lit.Body.List[j] for i < j unless there is a jump.
+ var stmts []ast.Stmt
+ afterParallel := false
+ for _, stmt := range lit.Body.List {
+ stmt, labeled := unlabel(stmt)
+ if labeled {
+ // Reset: naively we don't know if a jump could have caused the
+ // previously considered statements to be skipped.
+ stmts = nil
+ afterParallel = false
+ }
+
+ if afterParallel {
+ stmts = append(stmts, stmt)
+ continue
+ }
+
+ // Check if stmt is a call to t.Parallel(), for the correct t.
+ exprStmt, ok := stmt.(*ast.ExprStmt)
+ if !ok {
+ continue
+ }
+ expr := exprStmt.X
+ if isMethodCall(info, expr, "testing", "T", "Parallel") {
+ call, _ := expr.(*ast.CallExpr)
+ if call == nil {
+ continue
+ }
+ x, _ := call.Fun.(*ast.SelectorExpr)
+ if x == nil {
+ continue
+ }
+ id, _ := x.X.(*ast.Ident)
+ if id == nil {
+ continue
+ }
+ if info.Uses[id] == tObj {
+ afterParallel = true
+ }
+ }
+ }
+
+ return stmts
+}
+
+// unlabel returns the inner statement for the possibly labeled statement stmt,
+// stripping any (possibly nested) *ast.LabeledStmt wrapper.
+//
+// The second result reports whether stmt was an *ast.LabeledStmt.
+func unlabel(stmt ast.Stmt) (ast.Stmt, bool) {
+ labeled := false
+ for {
+ labelStmt, ok := stmt.(*ast.LabeledStmt)
+ if !ok {
+ return stmt, labeled
+ }
+ labeled = true
+ stmt = labelStmt.Stmt
+ }
+}
+
+// isMethodCall reports whether expr is a method call of
+// <pkgPath>.<typeName>.<method>.
+func isMethodCall(info *types.Info, expr ast.Expr, pkgPath, typeName, method string) bool {
+ call, ok := expr.(*ast.CallExpr)
+ if !ok {
+ return false
+ }
+
+ // Check that we are calling a method <method>
+ f := typeutil.StaticCallee(info, call)
+ if f == nil || f.Name() != method {
+ return false
+ }
+ recv := f.Type().(*types.Signature).Recv()
+ if recv == nil {
+ return false
+ }
+
+ // Check that the receiver is a <pkgPath>.<typeName> or
+ // *<pkgPath>.<typeName>.
+ rtype := recv.Type()
+ if ptr, ok := recv.Type().(*types.Pointer); ok {
+ rtype = ptr.Elem()
+ }
+ named, ok := rtype.(*types.Named)
+ if !ok {
+ return false
+ }
+ if named.Obj().Name() != typeName {
+ return false
+ }
pkg := f.Pkg()
if pkg == nil {
- return nil
+ return false
}
- if pkg.Path() != "golang.org/x/sync/errgroup" {
- return nil
+ if pkg.Path() != pkgPath {
+ return false
}
- return call.Args[0]
+
+ return true
}
diff --git a/go/analysis/passes/loopclosure/loopclosure_test.go b/go/analysis/passes/loopclosure/loopclosure_test.go
index 1498838d7..55fb2a4a3 100644
--- a/go/analysis/passes/loopclosure/loopclosure_test.go
+++ b/go/analysis/passes/loopclosure/loopclosure_test.go
@@ -5,16 +5,16 @@
package loopclosure_test
import (
- "golang.org/x/tools/internal/typeparams"
"testing"
"golang.org/x/tools/go/analysis/analysistest"
"golang.org/x/tools/go/analysis/passes/loopclosure"
+ "golang.org/x/tools/internal/typeparams"
)
func Test(t *testing.T) {
testdata := analysistest.TestData()
- tests := []string{"a", "golang.org/..."}
+ tests := []string{"a", "golang.org/...", "subtests"}
if typeparams.Enabled {
tests = append(tests, "typeparams")
}
diff --git a/go/analysis/passes/loopclosure/testdata/src/a/a.go b/go/analysis/passes/loopclosure/testdata/src/a/a.go
index 2c8e2e6c4..7a7f05f66 100644
--- a/go/analysis/passes/loopclosure/testdata/src/a/a.go
+++ b/go/analysis/passes/loopclosure/testdata/src/a/a.go
@@ -6,7 +6,13 @@
package testdata
-import "golang.org/x/sync/errgroup"
+import (
+ "sync"
+
+ "golang.org/x/sync/errgroup"
+)
+
+var A int
func _() {
var s []int
@@ -49,6 +55,19 @@ func _() {
println(i, v)
}()
}
+
+ // iteration variable declared outside the loop
+ for A = range s {
+ go func() {
+ println(A) // want "loop variable A captured by func literal"
+ }()
+ }
+ // iteration variable declared in a different file
+ for B = range s {
+ go func() {
+ println(B) // want "loop variable B captured by func literal"
+ }()
+ }
// If the key of the range statement is not an identifier
// the code should not panic (it used to).
var x [2]int
@@ -91,9 +110,73 @@ func _() {
}
}
-// Group is used to test that loopclosure does not match on any type named "Group".
-// The checker only matches on methods "(*...errgroup.Group).Go".
-type Group struct{};
+// Cases that rely on recursively checking for last statements.
+func _() {
+
+ for i := range "outer" {
+ for j := range "inner" {
+ if j < 1 {
+ defer func() {
+ print(i) // want "loop variable i captured by func literal"
+ }()
+ } else if j < 2 {
+ go func() {
+ print(i) // want "loop variable i captured by func literal"
+ }()
+ } else {
+ go func() {
+ print(i)
+ }()
+ println("we don't catch the error above because of this statement")
+ }
+ }
+ }
+
+ for i := 0; i < 10; i++ {
+ for j := 0; j < 10; j++ {
+ if j < 1 {
+ switch j {
+ case 0:
+ defer func() {
+ print(i) // want "loop variable i captured by func literal"
+ }()
+ default:
+ go func() {
+ print(i) // want "loop variable i captured by func literal"
+ }()
+ }
+ } else if j < 2 {
+ var a interface{} = j
+ switch a.(type) {
+ case int:
+ defer func() {
+ print(i) // want "loop variable i captured by func literal"
+ }()
+ default:
+ go func() {
+ print(i) // want "loop variable i captured by func literal"
+ }()
+ }
+ } else {
+ ch := make(chan string)
+ select {
+ case <-ch:
+ defer func() {
+ print(i) // want "loop variable i captured by func literal"
+ }()
+ default:
+ go func() {
+ print(i) // want "loop variable i captured by func literal"
+ }()
+ }
+ }
+ }
+ }
+}
+
+// Group is used to test that loopclosure only matches Group.Go when Group is
+// from the golang.org/x/sync/errgroup package.
+type Group struct{}
func (g *Group) Go(func() error) {}
@@ -108,6 +191,21 @@ func _() {
return nil
})
}
+
+ for i, v := range s {
+ if i > 0 {
+ g.Go(func() error {
+ print(i) // want "loop variable i captured by func literal"
+ return nil
+ })
+ } else {
+ g.Go(func() error {
+ print(v) // want "loop variable v captured by func literal"
+ return nil
+ })
+ }
+ }
+
// Do not match other Group.Go cases
g1 := new(Group)
for i, v := range s {
@@ -118,3 +216,28 @@ func _() {
})
}
}
+
+// Real-world example from #16520, slightly simplified
+func _() {
+ var nodes []interface{}
+
+ critical := new(errgroup.Group)
+ others := sync.WaitGroup{}
+
+ isCritical := func(node interface{}) bool { return false }
+ run := func(node interface{}) error { return nil }
+
+ for _, node := range nodes {
+ if isCritical(node) {
+ critical.Go(func() error {
+ return run(node) // want "loop variable node captured by func literal"
+ })
+ } else {
+ others.Add(1)
+ go func() {
+ _ = run(node) // want "loop variable node captured by func literal"
+ others.Done()
+ }()
+ }
+ }
+}
diff --git a/go/analysis/passes/loopclosure/testdata/src/a/b.go b/go/analysis/passes/loopclosure/testdata/src/a/b.go
new file mode 100644
index 000000000..d4e5da418
--- /dev/null
+++ b/go/analysis/passes/loopclosure/testdata/src/a/b.go
@@ -0,0 +1,9 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package testdata
+
+// B is declared in a separate file to test that object resolution spans the
+// entire package.
+var B int
diff --git a/go/analysis/passes/loopclosure/testdata/src/subtests/subtest.go b/go/analysis/passes/loopclosure/testdata/src/subtests/subtest.go
new file mode 100644
index 000000000..50283ec61
--- /dev/null
+++ b/go/analysis/passes/loopclosure/testdata/src/subtests/subtest.go
@@ -0,0 +1,202 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file contains tests that the loopclosure analyzer detects leaked
+// references via parallel subtests.
+
+package subtests
+
+import (
+ "testing"
+)
+
+// T is used to test that loopclosure only matches T.Run when T is from the
+// testing package.
+type T struct{}
+
+// Run should not match testing.T.Run. Note that the second argument is
+// intentionally a *testing.T, not a *T, so that we can check both
+// testing.T.Parallel inside a T.Run, and a T.Parallel inside a testing.T.Run.
+func (t *T) Run(string, func(*testing.T)) {
+}
+
+func (t *T) Parallel() {}
+
+func _(t *testing.T) {
+ for i, test := range []int{1, 2, 3} {
+ // Check that parallel subtests are identified.
+ t.Run("", func(t *testing.T) {
+ t.Parallel()
+ println(i) // want "loop variable i captured by func literal"
+ println(test) // want "loop variable test captured by func literal"
+ })
+
+ // Check that serial tests are OK.
+ t.Run("", func(t *testing.T) {
+ println(i)
+ println(test)
+ })
+
+ // Check that the location of t.Parallel matters.
+ t.Run("", func(t *testing.T) {
+ println(i)
+ println(test)
+ t.Parallel()
+ println(i) // want "loop variable i captured by func literal"
+ println(test) // want "loop variable test captured by func literal"
+ })
+
+ // Check that *testing.T value matters.
+ t.Run("", func(t *testing.T) {
+ var x testing.T
+ x.Parallel()
+ println(i)
+ println(test)
+ })
+
+ // Check that shadowing the loop variables within the test literal is OK if
+ // it occurs before t.Parallel().
+ t.Run("", func(t *testing.T) {
+ i := i
+ test := test
+ t.Parallel()
+ println(i)
+ println(test)
+ })
+
+ // Check that shadowing the loop variables within the test literal is Not
+ // OK if it occurs after t.Parallel().
+ t.Run("", func(t *testing.T) {
+ t.Parallel()
+ i := i // want "loop variable i captured by func literal"
+ test := test // want "loop variable test captured by func literal"
+ println(i) // OK
+ println(test) // OK
+ })
+
+ // Check uses in nested blocks.
+ t.Run("", func(t *testing.T) {
+ t.Parallel()
+ {
+ println(i) // want "loop variable i captured by func literal"
+ println(test) // want "loop variable test captured by func literal"
+ }
+ })
+
+ // Check that we catch uses in nested subtests.
+ t.Run("", func(t *testing.T) {
+ t.Parallel()
+ t.Run("", func(t *testing.T) {
+ println(i) // want "loop variable i captured by func literal"
+ println(test) // want "loop variable test captured by func literal"
+ })
+ })
+
+ // Check that there is no diagnostic if t is not a *testing.T.
+ t.Run("", func(_ *testing.T) {
+ t := &T{}
+ t.Parallel()
+ println(i)
+ println(test)
+ })
+
+ // Check that there is no diagnostic when a jump to a label may have caused
+ // the call to t.Parallel to have been skipped.
+ t.Run("", func(t *testing.T) {
+ if true {
+ goto Test
+ }
+ t.Parallel()
+ Test:
+ println(i)
+ println(test)
+ })
+
+ // Check that there is no diagnostic when a jump to a label may have caused
+ // the loop variable reference to be skipped, but there is a diagnostic
+ // when both the call to t.Parallel and the loop variable reference occur
+ // after the final label in the block.
+ t.Run("", func(t *testing.T) {
+ if true {
+ goto Test
+ }
+ t.Parallel()
+ println(i) // maybe OK
+ Test:
+ t.Parallel()
+ println(test) // want "loop variable test captured by func literal"
+ })
+
+ // Check that multiple labels are handled.
+ t.Run("", func(t *testing.T) {
+ if true {
+ goto Test1
+ } else {
+ goto Test2
+ }
+ Test1:
+ Test2:
+ t.Parallel()
+ println(test) // want "loop variable test captured by func literal"
+ })
+
+ // Check that we do not have problems when t.Run has a single argument.
+ fn := func() (string, func(t *testing.T)) { return "", nil }
+ t.Run(fn())
+ }
+}
+
+// Check that there is no diagnostic when loop variables are shadowed within
+// the loop body.
+func _(t *testing.T) {
+ for i, test := range []int{1, 2, 3} {
+ i := i
+ test := test
+ t.Run("", func(t *testing.T) {
+ t.Parallel()
+ println(i)
+ println(test)
+ })
+ }
+}
+
+// Check that t.Run must be *testing.T.Run.
+func _(t *T) {
+ for i, test := range []int{1, 2, 3} {
+ t.Run("", func(t *testing.T) {
+ t.Parallel()
+ println(i)
+ println(test)
+ })
+ }
+}
+
+// Check that the top-level must be parallel in order to cause a diagnostic.
+//
+// From https://pkg.go.dev/testing:
+//
+// "Run does not return until parallel subtests have completed, providing a
+// way to clean up after a group of parallel tests"
+func _(t *testing.T) {
+ for _, test := range []int{1, 2, 3} {
+ // In this subtest, a/b must complete before the synchronous subtest "a"
+ // completes, so the reference to test does not escape the current loop
+ // iteration.
+ t.Run("a", func(s *testing.T) {
+ s.Run("b", func(u *testing.T) {
+ u.Parallel()
+ println(test)
+ })
+ })
+
+ // In this subtest, c executes concurrently, so the reference to test may
+ // escape the current loop iteration.
+ t.Run("c", func(s *testing.T) {
+ s.Parallel()
+ s.Run("d", func(u *testing.T) {
+ println(test) // want "loop variable test captured by func literal"
+ })
+ })
+ }
+}
diff --git a/go/analysis/passes/nilness/nilness.go b/go/analysis/passes/nilness/nilness.go
index 8fd8cd000..6849c33cc 100644
--- a/go/analysis/passes/nilness/nilness.go
+++ b/go/analysis/passes/nilness/nilness.go
@@ -15,6 +15,7 @@ import (
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/buildssa"
"golang.org/x/tools/go/ssa"
+ "golang.org/x/tools/internal/typeparams"
)
const Doc = `check for redundant or impossible nil comparisons
@@ -102,8 +103,11 @@ func runFunc(pass *analysis.Pass, fn *ssa.Function) {
for _, instr := range b.Instrs {
switch instr := instr.(type) {
case ssa.CallInstruction:
- notNil(stack, instr, instr.Common().Value,
- instr.Common().Description())
+ // A nil receiver may be okay for type params.
+ cc := instr.Common()
+ if !(cc.IsInvoke() && typeparams.IsTypeParam(cc.Value.Type())) {
+ notNil(stack, instr, cc.Value, cc.Description())
+ }
case *ssa.FieldAddr:
notNil(stack, instr, instr.X, "field selection")
case *ssa.IndexAddr:
@@ -250,7 +254,7 @@ func (n nilness) String() string { return nilnessStrings[n+1] }
// or unknown given the dominating stack of facts.
func nilnessOf(stack []fact, v ssa.Value) nilness {
switch v := v.(type) {
- // unwrap ChangeInterface values recursively, to detect if underlying
+ // unwrap ChangeInterface and Slice values recursively, to detect if underlying
// values have any facts recorded or are otherwise known with regard to nilness.
//
// This work must be in addition to expanding facts about
@@ -264,6 +268,10 @@ func nilnessOf(stack []fact, v ssa.Value) nilness {
if underlying := nilnessOf(stack, v.X); underlying != unknown {
return underlying
}
+ case *ssa.Slice:
+ if underlying := nilnessOf(stack, v.X); underlying != unknown {
+ return underlying
+ }
case *ssa.SliceToArrayPointer:
nn := nilnessOf(stack, v.X)
if slice2ArrayPtrLen(v) > 0 {
@@ -302,9 +310,9 @@ func nilnessOf(stack []fact, v ssa.Value) nilness {
return isnonnil
case *ssa.Const:
if v.IsNil() {
- return isnil
+ return isnil // nil or zero value of a pointer-like type
} else {
- return isnonnil
+ return unknown // non-pointer
}
}
diff --git a/go/analysis/passes/nilness/nilness_test.go b/go/analysis/passes/nilness/nilness_test.go
index b258c1efb..99c4dfbac 100644
--- a/go/analysis/passes/nilness/nilness_test.go
+++ b/go/analysis/passes/nilness/nilness_test.go
@@ -9,9 +9,26 @@ import (
"golang.org/x/tools/go/analysis/analysistest"
"golang.org/x/tools/go/analysis/passes/nilness"
+ "golang.org/x/tools/internal/typeparams"
)
func Test(t *testing.T) {
testdata := analysistest.TestData()
analysistest.Run(t, testdata, nilness.Analyzer, "a")
}
+
+func TestInstantiated(t *testing.T) {
+ if !typeparams.Enabled {
+ t.Skip("TestInstantiated requires type parameters")
+ }
+ testdata := analysistest.TestData()
+ analysistest.Run(t, testdata, nilness.Analyzer, "c")
+}
+
+func TestTypeSet(t *testing.T) {
+ if !typeparams.Enabled {
+ t.Skip("TestTypeSet requires type parameters")
+ }
+ testdata := analysistest.TestData()
+ analysistest.Run(t, testdata, nilness.Analyzer, "d")
+}
diff --git a/go/analysis/passes/nilness/testdata/src/a/a.go b/go/analysis/passes/nilness/testdata/src/a/a.go
index f4d8f455e..0629e08d8 100644
--- a/go/analysis/passes/nilness/testdata/src/a/a.go
+++ b/go/analysis/passes/nilness/testdata/src/a/a.go
@@ -130,7 +130,6 @@ func f9(x interface {
b()
c()
}) {
-
x.b() // we don't catch this panic because we don't have any facts yet
xx := interface {
a()
@@ -155,11 +154,27 @@ func f9(x interface {
}
}
+func f10() {
+ s0 := make([]string, 0)
+ if s0 == nil { // want "impossible condition: non-nil == nil"
+ print(0)
+ }
+
+ var s1 []string
+ if s1 == nil { // want "tautological condition: nil == nil"
+ print(0)
+ }
+ s2 := s1[:][:]
+ if s2 == nil { // want "tautological condition: nil == nil"
+ print(0)
+ }
+}
+
func unknown() bool {
return false
}
-func f10(a interface{}) {
+func f11(a interface{}) {
switch a.(type) {
case nil:
return
@@ -170,7 +185,7 @@ func f10(a interface{}) {
}
}
-func f11(a interface{}) {
+func f12(a interface{}) {
switch a {
case nil:
return
@@ -181,3 +196,23 @@ func f11(a interface{}) {
return
}
}
+
+type Y struct {
+ innerY
+}
+
+type innerY struct {
+ value int
+}
+
+func f13() {
+ var d *Y
+ print(d.value) // want "nil dereference in field selection"
+}
+
+func f14() {
+ var x struct{ f string }
+ if x == struct{ f string }{} { // we don't catch this tautology as we restrict to reference types
+ print(x)
+ }
+}
diff --git a/go/analysis/passes/nilness/testdata/src/c/c.go b/go/analysis/passes/nilness/testdata/src/c/c.go
new file mode 100644
index 000000000..c9a05a714
--- /dev/null
+++ b/go/analysis/passes/nilness/testdata/src/c/c.go
@@ -0,0 +1,14 @@
+package c
+
+func instantiated[X any](x *X) int {
+ if x == nil {
+ print(*x) // want "nil dereference in load"
+ }
+ return 1
+}
+
+var g int
+
+func init() {
+ g = instantiated[int](&g)
+}
diff --git a/go/analysis/passes/nilness/testdata/src/d/d.go b/go/analysis/passes/nilness/testdata/src/d/d.go
new file mode 100644
index 000000000..72bd1c872
--- /dev/null
+++ b/go/analysis/passes/nilness/testdata/src/d/d.go
@@ -0,0 +1,55 @@
+package d
+
+type message interface{ PR() }
+
+func noparam() {
+ var messageT message
+ messageT.PR() // want "nil dereference in dynamic method call"
+}
+
+func paramNonnil[T message]() {
+ var messageT T
+ messageT.PR() // cannot conclude messageT is nil.
+}
+
+func instance() {
+ // buildssa.BuilderMode does not include InstantiateGenerics.
+ paramNonnil[message]() // no warning is expected as param[message] id not built.
+}
+
+func param[T interface {
+ message
+ ~*int | ~chan int
+}]() {
+ var messageT T // messageT is nil.
+ messageT.PR() // nil receiver may be okay. See param[nilMsg].
+}
+
+type nilMsg chan int
+
+func (m nilMsg) PR() {
+ if m == nil {
+ print("not an error")
+ }
+}
+
+var G func() = param[nilMsg] // no warning
+
+func allNillable[T ~*int | ~chan int]() {
+ var x, y T // both are nillable and are nil.
+ if x != y { // want "impossible condition: nil != nil"
+ print("unreachable")
+ }
+}
+
+func notAll[T ~*int | ~chan int | ~int]() {
+ var x, y T // neither are nillable due to ~int
+ if x != y { // no warning
+ print("unreachable")
+ }
+}
+
+func noninvoke[T ~func()]() {
+ var x T
+ x() // want "nil dereference in dynamic function call"
+}
diff --git a/go/analysis/passes/pkgfact/pkgfact.go b/go/analysis/passes/pkgfact/pkgfact.go
index 2262fc4f1..f4f5616e5 100644
--- a/go/analysis/passes/pkgfact/pkgfact.go
+++ b/go/analysis/passes/pkgfact/pkgfact.go
@@ -10,14 +10,14 @@
// Each key/value pair comes from a top-level constant declaration
// whose name starts and ends with "_". For example:
//
-// package p
+// package p
//
-// const _greeting_ = "hello"
-// const _audience_ = "world"
+// const _greeting_ = "hello"
+// const _audience_ = "world"
//
// the pkgfact analysis output for package p would be:
//
-// {"greeting": "hello", "audience": "world"}.
+// {"greeting": "hello", "audience": "world"}.
//
// In addition, the analysis reports a diagnostic at each import
// showing which key/value pairs it contributes.
diff --git a/go/analysis/passes/printf/printf.go b/go/analysis/passes/printf/printf.go
index dee37d78a..daaf709a4 100644
--- a/go/analysis/passes/printf/printf.go
+++ b/go/analysis/passes/printf/printf.go
@@ -342,7 +342,6 @@ func checkPrintfFwd(pass *analysis.Pass, w *printfWrapper, call *ast.CallExpr, k
// not do so with gccgo, and nor do some other build systems.
// TODO(adonovan): eliminate the redundant facts once this restriction
// is lifted.
-//
var isPrint = stringSet{
"fmt.Errorf": true,
"fmt.Fprint": true,
@@ -584,7 +583,6 @@ func checkPrintf(pass *analysis.Pass, kind Kind, call *ast.CallExpr, fn *types.F
argNum := firstArg
maxArgNum := firstArg
anyIndex := false
- anyW := false
for i, w := 0, 0; i < len(format); i += w {
w = 1
if format[i] != '%' {
@@ -607,11 +605,6 @@ func checkPrintf(pass *analysis.Pass, kind Kind, call *ast.CallExpr, fn *types.F
pass.Reportf(call.Pos(), "%s does not support error-wrapping directive %%w", state.name)
return
}
- if anyW {
- pass.Reportf(call.Pos(), "%s call has more than one error-wrapping directive %%w", state.name)
- return
- }
- anyW = true
}
if len(state.argNums) > 0 {
// Continue with the next sequential argument.
@@ -673,12 +666,13 @@ func (s *formatState) parseIndex() bool {
s.scanNum()
ok := true
if s.nbytes == len(s.format) || s.nbytes == start || s.format[s.nbytes] != ']' {
- ok = false
- s.nbytes = strings.Index(s.format, "]")
+ ok = false // syntax error is either missing "]" or invalid index.
+ s.nbytes = strings.Index(s.format[start:], "]")
if s.nbytes < 0 {
s.pass.ReportRangef(s.call, "%s format %s is missing closing ]", s.name, s.format)
return false
}
+ s.nbytes = s.nbytes + start
}
arg32, err := strconv.ParseInt(s.format[start:s.nbytes], 10, 32)
if err != nil || !ok || arg32 <= 0 || arg32 > int64(len(s.call.Args)-s.firstArg) {
@@ -931,9 +925,9 @@ func okPrintfArg(pass *analysis.Pass, call *ast.CallExpr, state *formatState) (o
// recursiveStringer reports whether the argument e is a potential
// recursive call to stringer or is an error, such as t and &t in these examples:
//
-// func (t *T) String() string { printf("%s", t) }
-// func (t T) Error() string { printf("%s", t) }
-// func (t T) String() string { printf("%s", &t) }
+// func (t *T) String() string { printf("%s", t) }
+// func (t T) Error() string { printf("%s", t) }
+// func (t T) String() string { printf("%s", &t) }
func recursiveStringer(pass *analysis.Pass, e ast.Expr) (string, bool) {
typ := pass.TypesInfo.Types[e].Type
@@ -951,11 +945,16 @@ func recursiveStringer(pass *analysis.Pass, e ast.Expr) (string, bool) {
return "", false
}
+ // inScope returns true if e is in the scope of f.
+ inScope := func(e ast.Expr, f *types.Func) bool {
+ return f.Scope() != nil && f.Scope().Contains(e.Pos())
+ }
+
// Is the expression e within the body of that String or Error method?
var method *types.Func
- if strOk && strMethod.Pkg() == pass.Pkg && strMethod.Scope().Contains(e.Pos()) {
+ if strOk && strMethod.Pkg() == pass.Pkg && inScope(e, strMethod) {
method = strMethod
- } else if errOk && errMethod.Pkg() == pass.Pkg && errMethod.Scope().Contains(e.Pos()) {
+ } else if errOk && errMethod.Pkg() == pass.Pkg && inScope(e, errMethod) {
method = errMethod
} else {
return "", false
diff --git a/go/analysis/passes/printf/testdata/src/a/a.go b/go/analysis/passes/printf/testdata/src/a/a.go
index 5eca3172d..0c4d11bf0 100644
--- a/go/analysis/passes/printf/testdata/src/a/a.go
+++ b/go/analysis/passes/printf/testdata/src/a/a.go
@@ -217,6 +217,7 @@ func PrintfTests() {
Printf("%[2]*.[1]*[3]d x", 2, "hi", 4) // want `a.Printf format %\[2]\*\.\[1\]\*\[3\]d uses non-int \x22hi\x22 as argument of \*`
Printf("%[0]s x", "arg1") // want `a.Printf format has invalid argument index \[0\]`
Printf("%[0]d x", 1) // want `a.Printf format has invalid argument index \[0\]`
+ Printf("%[3]*.[2*[1]f", 1, 2, 3) // want `a.Printf format has invalid argument index \[2\*\[1\]`
// Something that satisfies the error interface.
var e error
fmt.Println(e.Error()) // ok
@@ -341,7 +342,7 @@ func PrintfTests() {
_ = fmt.Errorf("%[2]w %[1]s", "x", err) // OK
_ = fmt.Errorf("%[2]w %[1]s", e, "x") // want `fmt.Errorf format %\[2\]w has arg "x" of wrong type string`
_ = fmt.Errorf("%w", "x") // want `fmt.Errorf format %w has arg "x" of wrong type string`
- _ = fmt.Errorf("%w %w", err, err) // want `fmt.Errorf call has more than one error-wrapping directive %w`
+ _ = fmt.Errorf("%w %w", err, err) // OK
_ = fmt.Errorf("%w", interface{}(nil)) // want `fmt.Errorf format %w has arg interface{}\(nil\) of wrong type interface{}`
_ = fmt.Errorf("%w", errorTestOK(0)) // concrete value implements error
_ = fmt.Errorf("%w", errSubset) // interface value implements error
diff --git a/go/analysis/passes/printf/testdata/src/typeparams/diagnostics.go b/go/analysis/passes/printf/testdata/src/typeparams/diagnostics.go
index 76a9a205a..c4d7e530d 100644
--- a/go/analysis/passes/printf/testdata/src/typeparams/diagnostics.go
+++ b/go/analysis/passes/printf/testdata/src/typeparams/diagnostics.go
@@ -121,3 +121,25 @@ func TestTermReduction[T1 interface{ ~int | string }, T2 interface {
fmt.Printf("%d", t2)
fmt.Printf("%s", t2) // want "wrong type.*contains typeparams.myInt"
}
+
+type U[T any] struct{}
+
+func (u U[T]) String() string {
+ fmt.Println(u) // want `fmt.Println arg u causes recursive call to \(typeparams.U\[T\]\).String method`
+ return ""
+}
+
+type S[T comparable] struct {
+ t T
+}
+
+func (s S[T]) String() T {
+ fmt.Println(s) // Not flagged. We currently do not consider String() T to implement fmt.Stringer (see #55928).
+ return s.t
+}
+
+func TestInstanceStringer() {
+ // Tests String method with nil Scope (#55350)
+ fmt.Println(&S[string]{})
+ fmt.Println(&U[string]{})
+}
diff --git a/go/analysis/passes/printf/types.go b/go/analysis/passes/printf/types.go
index 270e917c8..7cbb0bdbf 100644
--- a/go/analysis/passes/printf/types.go
+++ b/go/analysis/passes/printf/types.go
@@ -299,13 +299,3 @@ func isConvertibleToString(typ types.Type) bool {
return false
}
-
-// hasBasicType reports whether x's type is a types.Basic with the given kind.
-func hasBasicType(pass *analysis.Pass, x ast.Expr, kind types.BasicKind) bool {
- t := pass.TypesInfo.Types[x].Type
- if t != nil {
- t = t.Underlying()
- }
- b, ok := t.(*types.Basic)
- return ok && b.Kind() == kind
-}
diff --git a/go/analysis/passes/shadow/shadow.go b/go/analysis/passes/shadow/shadow.go
index b160dcf5b..a19cecd14 100644
--- a/go/analysis/passes/shadow/shadow.go
+++ b/go/analysis/passes/shadow/shadow.go
@@ -120,7 +120,6 @@ func run(pass *analysis.Pass) (interface{}, error) {
// the block, we should complain about it but don't.
// - A variable declared inside a function literal can falsely be identified
// as shadowing a variable in the outer function.
-//
type span struct {
min token.Pos
max token.Pos
diff --git a/go/analysis/passes/sigchanyzer/sigchanyzer.go b/go/analysis/passes/sigchanyzer/sigchanyzer.go
index 0d6c8ebf1..c490a84ea 100644
--- a/go/analysis/passes/sigchanyzer/sigchanyzer.go
+++ b/go/analysis/passes/sigchanyzer/sigchanyzer.go
@@ -50,7 +50,7 @@ func run(pass *analysis.Pass) (interface{}, error) {
}
case *ast.CallExpr:
// Only signal.Notify(make(chan os.Signal), os.Interrupt) is safe,
- // conservatively treate others as not safe, see golang/go#45043
+ // conservatively treat others as not safe, see golang/go#45043
if isBuiltinMake(pass.TypesInfo, arg) {
return
}
diff --git a/go/analysis/passes/sortslice/analyzer.go b/go/analysis/passes/sortslice/analyzer.go
index 5eb957a18..f85837d66 100644
--- a/go/analysis/passes/sortslice/analyzer.go
+++ b/go/analysis/passes/sortslice/analyzer.go
@@ -52,11 +52,20 @@ func run(pass *analysis.Pass) (interface{}, error) {
arg := call.Args[0]
typ := pass.TypesInfo.Types[arg].Type
+
+ if tuple, ok := typ.(*types.Tuple); ok {
+ typ = tuple.At(0).Type() // special case for Slice(f(...))
+ }
+
switch typ.Underlying().(type) {
case *types.Slice, *types.Interface:
return
}
+ // Restore typ to the original type, we may unwrap the tuple above,
+ // typ might not be the type of arg.
+ typ = pass.TypesInfo.Types[arg].Type
+
var fixes []analysis.SuggestedFix
switch v := typ.Underlying().(type) {
case *types.Array:
diff --git a/go/analysis/passes/sortslice/testdata/src/a/a.go b/go/analysis/passes/sortslice/testdata/src/a/a.go
index bc6cc16e9..c6aca8df1 100644
--- a/go/analysis/passes/sortslice/testdata/src/a/a.go
+++ b/go/analysis/passes/sortslice/testdata/src/a/a.go
@@ -6,8 +6,8 @@ import "sort"
func IncorrectSort() {
i := 5
sortFn := func(i, j int) bool { return false }
- sort.Slice(i, sortFn) // want "sort.Slice's argument must be a slice; is called with int"
- sort.SliceStable(i, sortFn) // want "sort.SliceStable's argument must be a slice; is called with int"
+ sort.Slice(i, sortFn) // want "sort.Slice's argument must be a slice; is called with int"
+ sort.SliceStable(i, sortFn) // want "sort.SliceStable's argument must be a slice; is called with int"
sort.SliceIsSorted(i, sortFn) // want "sort.SliceIsSorted's argument must be a slice; is called with int"
}
@@ -62,3 +62,23 @@ func UnderlyingSlice() {
sort.SliceStable(s, sortFn)
sort.SliceIsSorted(s, sortFn)
}
+
+// FunctionResultsAsArguments passes a function which returns two values
+// that satisfy sort.Slice signature. It should not produce a diagnostic.
+func FunctionResultsAsArguments() {
+ s := []string{"a", "z", "ooo"}
+ sort.Slice(less(s))
+ sort.Slice(lessPtr(s)) // want `sort.Slice's argument must be a slice; is called with \(\*\[\]string,.*`
+}
+
+func less(s []string) ([]string, func(i, j int) bool) {
+ return s, func(i, j int) bool {
+ return s[i] < s[j]
+ }
+}
+
+func lessPtr(s []string) (*[]string, func(i, j int) bool) {
+ return &s, func(i, j int) bool {
+ return s[i] < s[j]
+ }
+}
diff --git a/go/analysis/passes/stdmethods/stdmethods.go b/go/analysis/passes/stdmethods/stdmethods.go
index cc9497179..41f455d10 100644
--- a/go/analysis/passes/stdmethods/stdmethods.go
+++ b/go/analysis/passes/stdmethods/stdmethods.go
@@ -134,6 +134,19 @@ func canonicalMethod(pass *analysis.Pass, id *ast.Ident) {
}
}
+ // Special case: Unwrap has two possible signatures.
+ // Check for Unwrap() []error here.
+ if id.Name == "Unwrap" {
+ if args.Len() == 0 && results.Len() == 1 {
+ t := typeString(results.At(0).Type())
+ if t == "error" || t == "[]error" {
+ return
+ }
+ }
+ pass.ReportRangef(id, "method Unwrap() should have signature Unwrap() error or Unwrap() []error")
+ return
+ }
+
// Do the =s (if any) all match?
if !matchParams(pass, expect.args, args, "=") || !matchParams(pass, expect.results, results, "=") {
return
diff --git a/go/analysis/passes/stdmethods/testdata/src/a/a.go b/go/analysis/passes/stdmethods/testdata/src/a/a.go
index c95cf5d2b..2b01f4693 100644
--- a/go/analysis/passes/stdmethods/testdata/src/a/a.go
+++ b/go/analysis/passes/stdmethods/testdata/src/a/a.go
@@ -49,7 +49,7 @@ func (E) Error() string { return "" } // E implements error.
func (E) As() {} // want `method As\(\) should have signature As\((any|interface\{\})\) bool`
func (E) Is() {} // want `method Is\(\) should have signature Is\(error\) bool`
-func (E) Unwrap() {} // want `method Unwrap\(\) should have signature Unwrap\(\) error`
+func (E) Unwrap() {} // want `method Unwrap\(\) should have signature Unwrap\(\) error or Unwrap\(\) \[\]error`
type F int
@@ -57,8 +57,18 @@ func (F) Error() string { return "" } // Both F and *F implement error.
func (*F) As() {} // want `method As\(\) should have signature As\((any|interface\{\})\) bool`
func (*F) Is() {} // want `method Is\(\) should have signature Is\(error\) bool`
-func (*F) Unwrap() {} // want `method Unwrap\(\) should have signature Unwrap\(\) error`
+func (*F) Unwrap() {} // want `method Unwrap\(\) should have signature Unwrap\(\) error or Unwrap\(\) \[\]error`
type G int
func (G) As(interface{}) bool // ok
+
+type W int
+
+func (W) Error() string { return "" }
+func (W) Unwrap() error { return nil } // ok
+
+type M int
+
+func (M) Error() string { return "" }
+func (M) Unwrap() []error { return nil } // ok
diff --git a/go/analysis/passes/stdmethods/testdata/src/typeparams/typeparams.go b/go/analysis/passes/stdmethods/testdata/src/typeparams/typeparams.go
index 72df30d49..3d4146e9b 100644
--- a/go/analysis/passes/stdmethods/testdata/src/typeparams/typeparams.go
+++ b/go/analysis/passes/stdmethods/testdata/src/typeparams/typeparams.go
@@ -30,7 +30,7 @@ func (E[_]) Error() string { return "" } // E implements error.
func (E[P]) As() {} // want `method As\(\) should have signature As\((any|interface\{\})\) bool`
func (E[_]) Is() {} // want `method Is\(\) should have signature Is\(error\) bool`
-func (E[_]) Unwrap() {} // want `method Unwrap\(\) should have signature Unwrap\(\) error`
+func (E[_]) Unwrap() {} // want `method Unwrap\(\) should have signature Unwrap\(\) error or Unwrap\(\) \[\]error`
type F[P any] int
@@ -38,4 +38,4 @@ func (F[_]) Error() string { return "" } // Both F and *F implement error.
func (*F[_]) As() {} // want `method As\(\) should have signature As\((any|interface\{\})\) bool`
func (*F[_]) Is() {} // want `method Is\(\) should have signature Is\(error\) bool`
-func (*F[_]) Unwrap() {} // want `method Unwrap\(\) should have signature Unwrap\(\) error`
+func (*F[_]) Unwrap() {} // want `method Unwrap\(\) should have signature Unwrap\(\) error or Unwrap\(\) \[\]error`
diff --git a/go/analysis/passes/tests/testdata/src/a/go118_test.go b/go/analysis/passes/tests/testdata/src/a/go118_test.go
index dc898daca..e2bc3f3a0 100644
--- a/go/analysis/passes/tests/testdata/src/a/go118_test.go
+++ b/go/analysis/passes/tests/testdata/src/a/go118_test.go
@@ -94,3 +94,8 @@ func FuzzObjectMethod(f *testing.F) {
}
f.Fuzz(obj.myVar) // ok
}
+
+// Test for golang/go#56505: checking fuzz arguments should not panic on *error.
+func FuzzIssue56505(f *testing.F) {
+ f.Fuzz(func(e *error) {}) // want "the first parameter of a fuzz target must be \\*testing.T"
+}
diff --git a/go/analysis/passes/tests/tests.go b/go/analysis/passes/tests/tests.go
index ffa5205dd..935aad00c 100644
--- a/go/analysis/passes/tests/tests.go
+++ b/go/analysis/passes/tests/tests.go
@@ -84,7 +84,7 @@ func run(pass *analysis.Pass) (interface{}, error) {
return nil, nil
}
-// Checks the contents of a fuzz function.
+// checkFuzz checks the contents of a fuzz function.
func checkFuzz(pass *analysis.Pass, fn *ast.FuncDecl) {
params := checkFuzzCall(pass, fn)
if params != nil {
@@ -92,15 +92,17 @@ func checkFuzz(pass *analysis.Pass, fn *ast.FuncDecl) {
}
}
-// Check the arguments of f.Fuzz() calls :
-// 1. f.Fuzz() should call a function and it should be of type (*testing.F).Fuzz().
-// 2. The called function in f.Fuzz(func(){}) should not return result.
-// 3. First argument of func() should be of type *testing.T
-// 4. Second argument onwards should be of type []byte, string, bool, byte,
-// rune, float32, float64, int, int8, int16, int32, int64, uint, uint8, uint16,
-// uint32, uint64
-// 5. func() must not call any *F methods, e.g. (*F).Log, (*F).Error, (*F).Skip
-// The only *F methods that are allowed in the (*F).Fuzz function are (*F).Failed and (*F).Name.
+// checkFuzzCall checks the arguments of f.Fuzz() calls:
+//
+// 1. f.Fuzz() should call a function and it should be of type (*testing.F).Fuzz().
+// 2. The called function in f.Fuzz(func(){}) should not return result.
+// 3. First argument of func() should be of type *testing.T
+// 4. Second argument onwards should be of type []byte, string, bool, byte,
+// rune, float32, float64, int, int8, int16, int32, int64, uint, uint8, uint16,
+// uint32, uint64
+// 5. func() must not call any *F methods, e.g. (*F).Log, (*F).Error, (*F).Skip
+// The only *F methods that are allowed in the (*F).Fuzz function are (*F).Failed and (*F).Name.
+//
// Returns the list of parameters to the fuzz function, if they are valid fuzz parameters.
func checkFuzzCall(pass *analysis.Pass, fn *ast.FuncDecl) (params *types.Tuple) {
ast.Inspect(fn, func(n ast.Node) bool {
@@ -160,7 +162,7 @@ func checkFuzzCall(pass *analysis.Pass, fn *ast.FuncDecl) (params *types.Tuple)
return params
}
-// Check that the arguments of f.Add() calls have the same number and type of arguments as
+// checkAddCalls checks that the arguments of f.Add calls have the same number and type of arguments as
// the signature of the function passed to (*testing.F).Fuzz
func checkAddCalls(pass *analysis.Pass, fn *ast.FuncDecl, params *types.Tuple) {
ast.Inspect(fn, func(n ast.Node) bool {
@@ -267,7 +269,9 @@ func isTestingType(typ types.Type, testingType string) bool {
if !ok {
return false
}
- return named.Obj().Pkg().Path() == "testing" && named.Obj().Name() == testingType
+ obj := named.Obj()
+ // obj.Pkg is nil for the error type.
+ return obj != nil && obj.Pkg() != nil && obj.Pkg().Path() == "testing" && obj.Name() == testingType
}
// Validate that fuzz target function's arguments are of accepted types.
@@ -473,10 +477,12 @@ func checkTest(pass *analysis.Pass, fn *ast.FuncDecl, prefix string) {
if tparams := typeparams.ForFuncType(fn.Type); tparams != nil && len(tparams.List) > 0 {
// Note: cmd/go/internal/load also errors about TestXXX and BenchmarkXXX functions with type parameters.
// We have currently decided to also warn before compilation/package loading. This can help users in IDEs.
+ // TODO(adonovan): use ReportRangef(tparams).
pass.Reportf(fn.Pos(), "%s has type parameters: it will not be run by go test as a %sXXX function", fn.Name.Name, prefix)
}
if !isTestSuffix(fn.Name.Name[len(prefix):]) {
+ // TODO(adonovan): use ReportRangef(fn.Name).
pass.Reportf(fn.Pos(), "%s has malformed name: first letter after '%s' must not be lowercase", fn.Name.Name, prefix)
}
}
diff --git a/go/analysis/passes/timeformat/testdata/src/a/a.go b/go/analysis/passes/timeformat/testdata/src/a/a.go
new file mode 100644
index 000000000..98481446e
--- /dev/null
+++ b/go/analysis/passes/timeformat/testdata/src/a/a.go
@@ -0,0 +1,50 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file contains tests for the timeformat checker.
+
+package a
+
+import (
+ "time"
+
+ "b"
+)
+
+func hasError() {
+ a, _ := time.Parse("2006-02-01 15:04:05", "2021-01-01 00:00:00") // want `2006-02-01 should be 2006-01-02`
+ a.Format(`2006-02-01`) // want `2006-02-01 should be 2006-01-02`
+ a.Format("2006-02-01 15:04:05") // want `2006-02-01 should be 2006-01-02`
+
+ const c = "2006-02-01"
+ a.Format(c) // want `2006-02-01 should be 2006-01-02`
+}
+
+func notHasError() {
+ a, _ := time.Parse("2006-01-02 15:04:05", "2021-01-01 00:00:00")
+ a.Format("2006-01-02")
+
+ const c = "2006-01-02"
+ a.Format(c)
+
+ v := "2006-02-01"
+ a.Format(v) // Allowed though variables.
+
+ m := map[string]string{
+ "y": "2006-02-01",
+ }
+ a.Format(m["y"])
+
+ s := []string{"2006-02-01"}
+ a.Format(s[0])
+
+ a.Format(badFormat())
+
+ o := b.Parse("2006-02-01 15:04:05", "2021-01-01 00:00:00")
+ o.Format("2006-02-01")
+}
+
+func badFormat() string {
+ return "2006-02-01"
+}
diff --git a/go/analysis/passes/timeformat/testdata/src/a/a.go.golden b/go/analysis/passes/timeformat/testdata/src/a/a.go.golden
new file mode 100644
index 000000000..9eccded63
--- /dev/null
+++ b/go/analysis/passes/timeformat/testdata/src/a/a.go.golden
@@ -0,0 +1,50 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file contains tests for the timeformat checker.
+
+package a
+
+import (
+ "time"
+
+ "b"
+)
+
+func hasError() {
+ a, _ := time.Parse("2006-01-02 15:04:05", "2021-01-01 00:00:00") // want `2006-02-01 should be 2006-01-02`
+ a.Format(`2006-01-02`) // want `2006-02-01 should be 2006-01-02`
+ a.Format("2006-01-02 15:04:05") // want `2006-02-01 should be 2006-01-02`
+
+ const c = "2006-02-01"
+ a.Format(c) // want `2006-02-01 should be 2006-01-02`
+}
+
+func notHasError() {
+ a, _ := time.Parse("2006-01-02 15:04:05", "2021-01-01 00:00:00")
+ a.Format("2006-01-02")
+
+ const c = "2006-01-02"
+ a.Format(c)
+
+ v := "2006-02-01"
+ a.Format(v) // Allowed though variables.
+
+ m := map[string]string{
+ "y": "2006-02-01",
+ }
+ a.Format(m["y"])
+
+ s := []string{"2006-02-01"}
+ a.Format(s[0])
+
+ a.Format(badFormat())
+
+ o := b.Parse("2006-02-01 15:04:05", "2021-01-01 00:00:00")
+ o.Format("2006-02-01")
+}
+
+func badFormat() string {
+ return "2006-02-01"
+}
diff --git a/go/analysis/passes/timeformat/testdata/src/b/b.go b/go/analysis/passes/timeformat/testdata/src/b/b.go
new file mode 100644
index 000000000..de5690863
--- /dev/null
+++ b/go/analysis/passes/timeformat/testdata/src/b/b.go
@@ -0,0 +1,11 @@
+package b
+
+type B struct {
+}
+
+func Parse(string, string) B {
+ return B{}
+}
+
+func (b B) Format(string) {
+}
diff --git a/go/analysis/passes/timeformat/timeformat.go b/go/analysis/passes/timeformat/timeformat.go
new file mode 100644
index 000000000..acb198f95
--- /dev/null
+++ b/go/analysis/passes/timeformat/timeformat.go
@@ -0,0 +1,129 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package timeformat defines an Analyzer that checks for the use
+// of time.Format or time.Parse calls with a bad format.
+package timeformat
+
+import (
+ "go/ast"
+ "go/constant"
+ "go/token"
+ "go/types"
+ "strings"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/analysis/passes/inspect"
+ "golang.org/x/tools/go/ast/inspector"
+ "golang.org/x/tools/go/types/typeutil"
+)
+
+const badFormat = "2006-02-01"
+const goodFormat = "2006-01-02"
+
+const Doc = `check for calls of (time.Time).Format or time.Parse with 2006-02-01
+
+The timeformat checker looks for time formats with the 2006-02-01 (yyyy-dd-mm)
+format. Internationally, "yyyy-dd-mm" does not occur in common calendar date
+standards, and so it is more likely that 2006-01-02 (yyyy-mm-dd) was intended.
+`
+
+var Analyzer = &analysis.Analyzer{
+ Name: "timeformat",
+ Doc: Doc,
+ Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Run: run,
+}
+
+func run(pass *analysis.Pass) (interface{}, error) {
+ inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+
+ nodeFilter := []ast.Node{
+ (*ast.CallExpr)(nil),
+ }
+ inspect.Preorder(nodeFilter, func(n ast.Node) {
+ call := n.(*ast.CallExpr)
+ fn, ok := typeutil.Callee(pass.TypesInfo, call).(*types.Func)
+ if !ok {
+ return
+ }
+ if !isTimeDotFormat(fn) && !isTimeDotParse(fn) {
+ return
+ }
+ if len(call.Args) > 0 {
+ arg := call.Args[0]
+ badAt := badFormatAt(pass.TypesInfo, arg)
+
+ if badAt > -1 {
+ // Check if it's a literal string, otherwise we can't suggest a fix.
+ if _, ok := arg.(*ast.BasicLit); ok {
+ pos := int(arg.Pos()) + badAt + 1 // +1 to skip the " or `
+ end := pos + len(badFormat)
+
+ pass.Report(analysis.Diagnostic{
+ Pos: token.Pos(pos),
+ End: token.Pos(end),
+ Message: badFormat + " should be " + goodFormat,
+ SuggestedFixes: []analysis.SuggestedFix{{
+ Message: "Replace " + badFormat + " with " + goodFormat,
+ TextEdits: []analysis.TextEdit{{
+ Pos: token.Pos(pos),
+ End: token.Pos(end),
+ NewText: []byte(goodFormat),
+ }},
+ }},
+ })
+ } else {
+ pass.Reportf(arg.Pos(), badFormat+" should be "+goodFormat)
+ }
+ }
+ }
+ })
+ return nil, nil
+}
+
+func isTimeDotFormat(f *types.Func) bool {
+ if f.Name() != "Format" || f.Pkg().Path() != "time" {
+ return false
+ }
+ sig, ok := f.Type().(*types.Signature)
+ if !ok {
+ return false
+ }
+ // Verify that the receiver is time.Time.
+ recv := sig.Recv()
+ if recv == nil {
+ return false
+ }
+ named, ok := recv.Type().(*types.Named)
+ return ok && named.Obj().Name() == "Time"
+}
+
+func isTimeDotParse(f *types.Func) bool {
+ if f.Name() != "Parse" || f.Pkg().Path() != "time" {
+ return false
+ }
+ // Verify that there is no receiver.
+ sig, ok := f.Type().(*types.Signature)
+ return ok && sig.Recv() == nil
+}
+
+// badFormatAt return the start of a bad format in e or -1 if no bad format is found.
+func badFormatAt(info *types.Info, e ast.Expr) int {
+ tv, ok := info.Types[e]
+ if !ok { // no type info, assume good
+ return -1
+ }
+
+ t, ok := tv.Type.(*types.Basic)
+ if !ok || t.Info()&types.IsString == 0 {
+ return -1
+ }
+
+ if tv.Value == nil {
+ return -1
+ }
+
+ return strings.Index(constant.StringVal(tv.Value), badFormat)
+}
diff --git a/go/analysis/passes/timeformat/timeformat_test.go b/go/analysis/passes/timeformat/timeformat_test.go
new file mode 100644
index 000000000..86bbe1bb3
--- /dev/null
+++ b/go/analysis/passes/timeformat/timeformat_test.go
@@ -0,0 +1,17 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package timeformat_test
+
+import (
+ "testing"
+
+ "golang.org/x/tools/go/analysis/analysistest"
+ "golang.org/x/tools/go/analysis/passes/timeformat"
+)
+
+func Test(t *testing.T) {
+ testdata := analysistest.TestData()
+ analysistest.RunWithSuggestedFixes(t, testdata, timeformat.Analyzer, "a")
+}
diff --git a/go/analysis/passes/unusedwrite/unusedwrite.go b/go/analysis/passes/unusedwrite/unusedwrite.go
index 37a0e784b..9cc45e0a3 100644
--- a/go/analysis/passes/unusedwrite/unusedwrite.go
+++ b/go/analysis/passes/unusedwrite/unusedwrite.go
@@ -41,7 +41,7 @@ Another example is about non-pointer receiver:
`
// Analyzer reports instances of writes to struct fields and arrays
-//that are never read.
+// that are never read.
var Analyzer = &analysis.Analyzer{
Name: "unusedwrite",
Doc: Doc,
@@ -50,40 +50,49 @@ var Analyzer = &analysis.Analyzer{
}
func run(pass *analysis.Pass) (interface{}, error) {
- // Check the writes to struct and array objects.
- checkStore := func(store *ssa.Store) {
- // Consider field/index writes to an object whose elements are copied and not shared.
- // MapUpdate is excluded since only the reference of the map is copied.
- switch addr := store.Addr.(type) {
- case *ssa.FieldAddr:
- if isDeadStore(store, addr.X, addr) {
- // Report the bug.
+ ssainput := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA)
+ for _, fn := range ssainput.SrcFuncs {
+ // TODO(taking): Iterate over fn._Instantiations() once exported. If so, have 1 report per Pos().
+ reports := checkStores(fn)
+ for _, store := range reports {
+ switch addr := store.Addr.(type) {
+ case *ssa.FieldAddr:
pass.Reportf(store.Pos(),
"unused write to field %s",
getFieldName(addr.X.Type(), addr.Field))
- }
- case *ssa.IndexAddr:
- if isDeadStore(store, addr.X, addr) {
- // Report the bug.
+ case *ssa.IndexAddr:
pass.Reportf(store.Pos(),
"unused write to array index %s", addr.Index)
}
}
}
+ return nil, nil
+}
- ssainput := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA)
- for _, fn := range ssainput.SrcFuncs {
- // Visit each block. No need to visit fn.Recover.
- for _, blk := range fn.Blocks {
- for _, instr := range blk.Instrs {
- // Identify writes.
- if store, ok := instr.(*ssa.Store); ok {
- checkStore(store)
+// checkStores returns *Stores in fn whose address is written to but never used.
+func checkStores(fn *ssa.Function) []*ssa.Store {
+ var reports []*ssa.Store
+ // Visit each block. No need to visit fn.Recover.
+ for _, blk := range fn.Blocks {
+ for _, instr := range blk.Instrs {
+ // Identify writes.
+ if store, ok := instr.(*ssa.Store); ok {
+ // Consider field/index writes to an object whose elements are copied and not shared.
+ // MapUpdate is excluded since only the reference of the map is copied.
+ switch addr := store.Addr.(type) {
+ case *ssa.FieldAddr:
+ if isDeadStore(store, addr.X, addr) {
+ reports = append(reports, store)
+ }
+ case *ssa.IndexAddr:
+ if isDeadStore(store, addr.X, addr) {
+ reports = append(reports, store)
+ }
}
}
}
}
- return nil, nil
+ return reports
}
// isDeadStore determines whether a field/index write to an object is dead.
diff --git a/go/analysis/singlechecker/singlechecker.go b/go/analysis/singlechecker/singlechecker.go
index 28530777b..91044ca08 100644
--- a/go/analysis/singlechecker/singlechecker.go
+++ b/go/analysis/singlechecker/singlechecker.go
@@ -11,16 +11,15 @@
// all that is needed to define a standalone tool is a file,
// example.org/findbadness/cmd/findbadness/main.go, containing:
//
-// // The findbadness command runs an analysis.
-// package main
+// // The findbadness command runs an analysis.
+// package main
//
-// import (
-// "example.org/findbadness"
-// "golang.org/x/tools/go/analysis/singlechecker"
-// )
-//
-// func main() { singlechecker.Main(findbadness.Analyzer) }
+// import (
+// "example.org/findbadness"
+// "golang.org/x/tools/go/analysis/singlechecker"
+// )
//
+// func main() { singlechecker.Main(findbadness.Analyzer) }
package singlechecker
import (
diff --git a/go/analysis/unitchecker/main.go b/go/analysis/unitchecker/main.go
index 23acb7ed0..a054a2dce 100644
--- a/go/analysis/unitchecker/main.go
+++ b/go/analysis/unitchecker/main.go
@@ -10,8 +10,8 @@
// It serves as a model for the behavior of the cmd/vet tool in $GOROOT.
// Being based on the unitchecker driver, it must be run by go vet:
//
-// $ go build -o unitchecker main.go
-// $ go vet -vettool=unitchecker my/project/...
+// $ go build -o unitchecker main.go
+// $ go vet -vettool=unitchecker my/project/...
//
// For a checker also capable of running standalone, use multichecker.
package main
diff --git a/go/analysis/unitchecker/unitchecker.go b/go/analysis/unitchecker/unitchecker.go
index b539866dd..37693564e 100644
--- a/go/analysis/unitchecker/unitchecker.go
+++ b/go/analysis/unitchecker/unitchecker.go
@@ -6,13 +6,13 @@
// driver that analyzes a single compilation unit during a build.
// It is invoked by a build system such as "go vet":
//
-// $ go vet -vettool=$(which vet)
+// $ go vet -vettool=$(which vet)
//
// It supports the following command-line protocol:
//
-// -V=full describe executable (to the build tool)
-// -flags describe flags (to the build tool)
-// foo.cfg description of compilation unit (from the build tool)
+// -V=full describe executable (to the build tool)
+// -flags describe flags (to the build tool)
+// foo.cfg description of compilation unit (from the build tool)
//
// This package does not depend on go/packages.
// If you need a standalone tool, use multichecker,
@@ -50,7 +50,7 @@ import (
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/internal/analysisflags"
- "golang.org/x/tools/go/analysis/internal/facts"
+ "golang.org/x/tools/internal/facts"
"golang.org/x/tools/internal/typeparams"
)
@@ -79,11 +79,10 @@ type Config struct {
//
// The protocol required by 'go vet -vettool=...' is that the tool must support:
//
-// -flags describe flags in JSON
-// -V=full describe executable for build caching
-// foo.cfg perform separate modular analyze on the single
-// unit described by a JSON config file foo.cfg.
-//
+// -flags describe flags in JSON
+// -V=full describe executable for build caching
+// foo.cfg perform separate modular analyze on the single
+// unit described by a JSON config file foo.cfg.
func Main(analyzers ...*analysis.Analyzer) {
progname := filepath.Base(os.Args[0])
log.SetFlags(0)
@@ -250,6 +249,10 @@ func run(fset *token.FileSet, cfg *Config, analyzers []*analysis.Analyzer) ([]re
// In VetxOnly mode, analyzers are only for their facts,
// so we can skip any analysis that neither produces facts
// nor depends on any analysis that produces facts.
+ //
+ // TODO(adonovan): fix: the command (and logic!) here are backwards.
+ // It should say "...nor is required by any...". (Issue 443099)
+ //
// Also build a map to hold working state and result.
type action struct {
once sync.Once
@@ -288,13 +291,13 @@ func run(fset *token.FileSet, cfg *Config, analyzers []*analysis.Analyzer) ([]re
analyzers = filtered
// Read facts from imported packages.
- read := func(path string) ([]byte, error) {
- if vetx, ok := cfg.PackageVetx[path]; ok {
+ read := func(imp *types.Package) ([]byte, error) {
+ if vetx, ok := cfg.PackageVetx[imp.Path()]; ok {
return ioutil.ReadFile(vetx)
}
return nil, nil // no .vetx file, no facts
}
- facts, err := facts.Decode(pkg, read)
+ facts, err := facts.NewDecoder(pkg).Decode(read)
if err != nil {
return nil, err
}
@@ -341,6 +344,7 @@ func run(fset *token.FileSet, cfg *Config, analyzers []*analysis.Analyzer) ([]re
Pkg: pkg,
TypesInfo: info,
TypesSizes: tc.Sizes,
+ TypeErrors: nil, // unitchecker doesn't RunDespiteErrors
ResultOf: inputs,
Report: func(d analysis.Diagnostic) { act.diagnostics = append(act.diagnostics, d) },
ImportObjectFact: facts.ImportObjectFact,
diff --git a/go/analysis/unitchecker/unitchecker_test.go b/go/analysis/unitchecker/unitchecker_test.go
index 7e5b848de..197abd9a1 100644
--- a/go/analysis/unitchecker/unitchecker_test.go
+++ b/go/analysis/unitchecker/unitchecker_test.go
@@ -20,6 +20,7 @@ import (
"strings"
"testing"
+ "golang.org/x/tools/go/analysis/passes/assign"
"golang.org/x/tools/go/analysis/passes/findcall"
"golang.org/x/tools/go/analysis/passes/printf"
"golang.org/x/tools/go/analysis/unitchecker"
@@ -41,6 +42,7 @@ func main() {
unitchecker.Main(
findcall.Analyzer,
printf.Analyzer,
+ assign.Analyzer,
)
}
@@ -75,6 +77,13 @@ func _() {
func MyFunc123() {}
`,
+ "c/c.go": `package c
+
+func _() {
+ i := 5
+ i = i
+}
+`,
}}})
defer exported.Cleanup()
@@ -85,29 +94,71 @@ func MyFunc123() {}
([/._\-a-zA-Z0-9]+[\\/]fake[\\/])?b/b.go:6:13: call of MyFunc123\(...\)
([/._\-a-zA-Z0-9]+[\\/]fake[\\/])?b/b.go:7:11: call of MyFunc123\(...\)
`
+ const wantC = `# golang.org/fake/c
+([/._\-a-zA-Z0-9]+[\\/]fake[\\/])?c/c.go:5:5: self-assignment of i to i
+`
const wantAJSON = `# golang.org/fake/a
\{
"golang.org/fake/a": \{
"findcall": \[
\{
"posn": "([/._\-a-zA-Z0-9]+[\\/]fake[\\/])?a/a.go:4:11",
- "message": "call of MyFunc123\(...\)"
+ "message": "call of MyFunc123\(...\)",
+ "suggested_fixes": \[
+ \{
+ "message": "Add '_TEST_'",
+ "edits": \[
+ \{
+ "filename": "([/._\-a-zA-Z0-9]+[\\/]fake[\\/])?a/a.go",
+ "start": 32,
+ "end": 32,
+ "new": "_TEST_"
+ \}
+ \]
+ \}
+ \]
+ \}
+ \]
+ \}
+\}
+`
+ const wantCJSON = `# golang.org/fake/c
+\{
+ "golang.org/fake/c": \{
+ "assign": \[
+ \{
+ "posn": "([/._\-a-zA-Z0-9]+[\\/]fake[\\/])?c/c.go:5:5",
+ "message": "self-assignment of i to i",
+ "suggested_fixes": \[
+ \{
+ "message": "Remove",
+ "edits": \[
+ \{
+ "filename": "([/._\-a-zA-Z0-9]+[\\/]fake[\\/])?c/c.go",
+ "start": 37,
+ "end": 42,
+ "new": ""
+ \}
+ \]
+ \}
+ \]
\}
\]
\}
\}
`
-
for _, test := range []struct {
- args string
- wantOut string
- wantExit int
+ args string
+ wantOut string
+ wantExitError bool
}{
- {args: "golang.org/fake/a", wantOut: wantA, wantExit: 2},
- {args: "golang.org/fake/b", wantOut: wantB, wantExit: 2},
- {args: "golang.org/fake/a golang.org/fake/b", wantOut: wantA + wantB, wantExit: 2},
- {args: "-json golang.org/fake/a", wantOut: wantAJSON, wantExit: 0},
- {args: "-c=0 golang.org/fake/a", wantOut: wantA + "4 MyFunc123\\(\\)\n", wantExit: 2},
+ {args: "golang.org/fake/a", wantOut: wantA, wantExitError: true},
+ {args: "golang.org/fake/b", wantOut: wantB, wantExitError: true},
+ {args: "golang.org/fake/c", wantOut: wantC, wantExitError: true},
+ {args: "golang.org/fake/a golang.org/fake/b", wantOut: wantA + wantB, wantExitError: true},
+ {args: "-json golang.org/fake/a", wantOut: wantAJSON, wantExitError: false},
+ {args: "-json golang.org/fake/c", wantOut: wantCJSON, wantExitError: false},
+ {args: "-c=0 golang.org/fake/a", wantOut: wantA + "4 MyFunc123\\(\\)\n", wantExitError: true},
} {
cmd := exec.Command("go", "vet", "-vettool="+os.Args[0], "-findcall.name=MyFunc123")
cmd.Args = append(cmd.Args, strings.Fields(test.args)...)
@@ -119,13 +170,17 @@ func MyFunc123() {}
if exitErr, ok := err.(*exec.ExitError); ok {
exitcode = exitErr.ExitCode()
}
- if exitcode != test.wantExit {
- t.Errorf("%s: got exit code %d, want %d", test.args, exitcode, test.wantExit)
+ if (exitcode != 0) != test.wantExitError {
+ want := "zero"
+ if test.wantExitError {
+ want = "nonzero"
+ }
+ t.Errorf("%s: got exit code %d, want %s", test.args, exitcode, want)
}
matched, err := regexp.Match(test.wantOut, out)
if err != nil {
- t.Fatal(err)
+ t.Fatalf("regexp.Match(<<%s>>): %v", test.wantOut, err)
}
if !matched {
t.Errorf("%s: got <<%s>>, want match of regexp <<%s>>", test.args, out, test.wantOut)
diff --git a/go/analysis/validate.go b/go/analysis/validate.go
index 23e57bf02..9da5692af 100644
--- a/go/analysis/validate.go
+++ b/go/analysis/validate.go
@@ -14,6 +14,8 @@ import (
// Validate reports an error if any of the analyzers are misconfigured.
// Checks include:
// that the name is a valid identifier;
+// that the Doc is not empty;
+// that the Run is non-nil;
// that the Requires graph is acyclic;
// that analyzer fact types are unique;
// that each fact type is a pointer.
@@ -46,6 +48,9 @@ func Validate(analyzers []*Analyzer) error {
return fmt.Errorf("analyzer %q is undocumented", a)
}
+ if a.Run == nil {
+ return fmt.Errorf("analyzer %q has nil Run", a)
+ }
// fact types
for _, f := range a.FactTypes {
if f == nil {
diff --git a/go/analysis/validate_test.go b/go/analysis/validate_test.go
index 1116034f7..7f4ee2c05 100644
--- a/go/analysis/validate_test.go
+++ b/go/analysis/validate_test.go
@@ -11,33 +11,43 @@ import (
func TestValidate(t *testing.T) {
var (
+ run = func(p *Pass) (interface{}, error) {
+ return nil, nil
+ }
dependsOnSelf = &Analyzer{
Name: "dependsOnSelf",
Doc: "this analyzer depends on itself",
+ Run: run,
}
inCycleA = &Analyzer{
Name: "inCycleA",
Doc: "this analyzer depends on inCycleB",
+ Run: run,
}
inCycleB = &Analyzer{
Name: "inCycleB",
Doc: "this analyzer depends on inCycleA and notInCycleA",
+ Run: run,
}
pointsToCycle = &Analyzer{
Name: "pointsToCycle",
Doc: "this analyzer depends on inCycleA",
+ Run: run,
}
notInCycleA = &Analyzer{
Name: "notInCycleA",
Doc: "this analyzer depends on notInCycleB and notInCycleC",
+ Run: run,
}
notInCycleB = &Analyzer{
Name: "notInCycleB",
Doc: "this analyzer depends on notInCycleC",
+ Run: run,
}
notInCycleC = &Analyzer{
Name: "notInCycleC",
Doc: "this analyzer has no dependencies",
+ Run: run,
}
)
@@ -116,3 +126,27 @@ func TestCycleInRequiresGraphErrorMessage(t *testing.T) {
t.Errorf("error string %s does not contain expected substring %q", errMsg, wantSubstring)
}
}
+
+func TestValidateEmptyDoc(t *testing.T) {
+ withoutDoc := &Analyzer{
+ Name: "withoutDoc",
+ Run: func(p *Pass) (interface{}, error) {
+ return nil, nil
+ },
+ }
+ err := Validate([]*Analyzer{withoutDoc})
+ if err == nil || !strings.Contains(err.Error(), "is undocumented") {
+ t.Errorf("got unexpected error while validating analyzers withoutDoc: %v", err)
+ }
+}
+
+func TestValidateNoRun(t *testing.T) {
+ withoutRun := &Analyzer{
+ Name: "withoutRun",
+ Doc: "this analyzer has no Run",
+ }
+ err := Validate([]*Analyzer{withoutRun})
+ if err == nil || !strings.Contains(err.Error(), "has nil Run") {
+ t.Errorf("got unexpected error while validating analyzers withoutRun: %v", err)
+ }
+}
diff --git a/go/ast/astutil/enclosing.go b/go/ast/astutil/enclosing.go
index a5c6d6d4f..9fa5aa192 100644
--- a/go/ast/astutil/enclosing.go
+++ b/go/ast/astutil/enclosing.go
@@ -22,9 +22,9 @@ import (
// additional whitespace abutting a node to be enclosed by it.
// In this example:
//
-// z := x + y // add them
-// <-A->
-// <----B----->
+// z := x + y // add them
+// <-A->
+// <----B----->
//
// the ast.BinaryExpr(+) node is considered to enclose interval B
// even though its [Pos()..End()) is actually only interval A.
@@ -43,10 +43,10 @@ import (
// interior whitespace of path[0].
// In this example:
//
-// z := x + y // add them
-// <--C--> <---E-->
-// ^
-// D
+// z := x + y // add them
+// <--C--> <---E-->
+// ^
+// D
//
// intervals C, D and E are inexact. C is contained by the
// z-assignment statement, because it spans three of its children (:=,
@@ -54,12 +54,11 @@ import (
// interior whitespace of the assignment. E is considered interior
// whitespace of the BlockStmt containing the assignment.
//
-// Precondition: [start, end) both lie within the same file as root.
-// TODO(adonovan): return (nil, false) in this case and remove precond.
-// Requires FileSet; see loader.tokenFileContainsPos.
-//
-// Postcondition: path is never nil; it always contains at least 'root'.
-//
+// The resulting path is never empty; it always contains at least the
+// 'root' *ast.File. Ideally PathEnclosingInterval would reject
+// intervals that lie wholly or partially outside the range of the
+// file, but unfortunately ast.File records only the token.Pos of
+// the 'package' keyword, but not of the start of the file itself.
func PathEnclosingInterval(root *ast.File, start, end token.Pos) (path []ast.Node, exact bool) {
// fmt.Printf("EnclosingInterval %d %d\n", start, end) // debugging
@@ -135,6 +134,7 @@ func PathEnclosingInterval(root *ast.File, start, end token.Pos) (path []ast.Nod
return false // inexact: overlaps multiple children
}
+ // Ensure [start,end) is nondecreasing.
if start > end {
start, end = end, start
}
@@ -162,7 +162,6 @@ func PathEnclosingInterval(root *ast.File, start, end token.Pos) (path []ast.Nod
// tokenNode is a dummy implementation of ast.Node for a single token.
// They are used transiently by PathEnclosingInterval but never escape
// this package.
-//
type tokenNode struct {
pos token.Pos
end token.Pos
@@ -183,7 +182,6 @@ func tok(pos token.Pos, len int) ast.Node {
// childrenOf returns the direct non-nil children of ast.Node n.
// It may include fake ast.Node implementations for bare tokens.
// it is not safe to call (e.g.) ast.Walk on such nodes.
-//
func childrenOf(n ast.Node) []ast.Node {
var children []ast.Node
@@ -488,7 +486,6 @@ func (sl byPos) Swap(i, j int) {
// TODO(adonovan): in some cases (e.g. Field, FieldList, Ident,
// StarExpr) we could be much more specific given the path to the AST
// root. Perhaps we should do that.
-//
func NodeDescription(n ast.Node) string {
switch n := n.(type) {
case *ast.ArrayType:
diff --git a/go/ast/astutil/enclosing_test.go b/go/ast/astutil/enclosing_test.go
index 5e86ff93c..de96d4496 100644
--- a/go/ast/astutil/enclosing_test.go
+++ b/go/ast/astutil/enclosing_test.go
@@ -40,7 +40,6 @@ func pathToString(path []ast.Node) string {
// findInterval parses input and returns the [start, end) positions of
// the first occurrence of substr in input. f==nil indicates failure;
// an error has already been reported in that case.
-//
func findInterval(t *testing.T, fset *token.FileSet, input, substr string) (f *ast.File, start, end token.Pos) {
f, err := parser.ParseFile(fset, "<input>", input, 0)
if err != nil {
diff --git a/go/ast/astutil/imports.go b/go/ast/astutil/imports.go
index 2087ceec9..18d1adb05 100644
--- a/go/ast/astutil/imports.go
+++ b/go/ast/astutil/imports.go
@@ -22,8 +22,11 @@ func AddImport(fset *token.FileSet, f *ast.File, path string) (added bool) {
// If name is not empty, it is used to rename the import.
//
// For example, calling
+//
// AddNamedImport(fset, f, "pathpkg", "path")
+//
// adds
+//
// import pathpkg "path"
func AddNamedImport(fset *token.FileSet, f *ast.File, name, path string) (added bool) {
if imports(f, name, path) {
@@ -270,8 +273,8 @@ func DeleteNamedImport(fset *token.FileSet, f *ast.File, name, path string) (del
}
if j > 0 {
lastImpspec := gen.Specs[j-1].(*ast.ImportSpec)
- lastLine := fset.Position(lastImpspec.Path.ValuePos).Line
- line := fset.Position(impspec.Path.ValuePos).Line
+ lastLine := fset.PositionFor(lastImpspec.Path.ValuePos, false).Line
+ line := fset.PositionFor(impspec.Path.ValuePos, false).Line
// We deleted an entry but now there may be
// a blank line-sized hole where the import was.
diff --git a/go/ast/astutil/imports_test.go b/go/ast/astutil/imports_test.go
index 68f05ab6d..2a383e467 100644
--- a/go/ast/astutil/imports_test.go
+++ b/go/ast/astutil/imports_test.go
@@ -1654,6 +1654,34 @@ import f "fmt"
`,
unchanged: true,
},
+ // this test panics without PositionFor in DeleteNamedImport
+ {
+ name: "import.44",
+ pkg: "foo.com/other/v3",
+ renamedPkg: "",
+ in: `package main
+//line mah.go:600
+
+import (
+"foo.com/a.thing"
+"foo.com/surprise"
+"foo.com/v1"
+"foo.com/other/v2"
+"foo.com/other/v3"
+)
+`,
+ out: `package main
+
+//line mah.go:600
+
+import (
+ "foo.com/a.thing"
+ "foo.com/other/v2"
+ "foo.com/surprise"
+ "foo.com/v1"
+)
+`,
+ },
}
func TestDeleteImport(t *testing.T) {
diff --git a/go/ast/astutil/rewrite.go b/go/ast/astutil/rewrite.go
index 6d9ca23e2..f430b21b9 100644
--- a/go/ast/astutil/rewrite.go
+++ b/go/ast/astutil/rewrite.go
@@ -41,7 +41,6 @@ type ApplyFunc func(*Cursor) bool
// Children are traversed in the order in which they appear in the
// respective node's struct definition. A package's files are
// traversed in the filenames' alphabetical order.
-//
func Apply(root ast.Node, pre, post ApplyFunc) (result ast.Node) {
parent := &struct{ ast.Node }{root}
defer func() {
@@ -65,8 +64,8 @@ var abort = new(int) // singleton, to signal termination of Apply
// c.Parent(), and f is the field identifier with name c.Name(),
// the following invariants hold:
//
-// p.f == c.Node() if c.Index() < 0
-// p.f[c.Index()] == c.Node() if c.Index() >= 0
+// p.f == c.Node() if c.Index() < 0
+// p.f[c.Index()] == c.Node() if c.Index() >= 0
//
// The methods Replace, Delete, InsertBefore, and InsertAfter
// can be used to change the AST without disrupting Apply.
@@ -294,6 +293,9 @@ func (a *application) apply(parent ast.Node, name string, iter *iterator, n ast.
a.apply(n, "Fields", nil, n.Fields)
case *ast.FuncType:
+ if tparams := typeparams.ForFuncType(n); tparams != nil {
+ a.apply(n, "TypeParams", nil, tparams)
+ }
a.apply(n, "Params", nil, n.Params)
a.apply(n, "Results", nil, n.Results)
@@ -406,6 +408,9 @@ func (a *application) apply(parent ast.Node, name string, iter *iterator, n ast.
case *ast.TypeSpec:
a.apply(n, "Doc", nil, n.Doc)
a.apply(n, "Name", nil, n.Name)
+ if tparams := typeparams.ForTypeSpec(n); tparams != nil {
+ a.apply(n, "TypeParams", nil, tparams)
+ }
a.apply(n, "Type", nil, n.Type)
a.apply(n, "Comment", nil, n.Comment)
diff --git a/go/ast/astutil/rewrite_test.go b/go/ast/astutil/rewrite_test.go
index 9d23170a5..4ef6fe99d 100644
--- a/go/ast/astutil/rewrite_test.go
+++ b/go/ast/astutil/rewrite_test.go
@@ -202,20 +202,30 @@ func init() {
type T[P1, P2 any] int
type R T[int, string]
+
+func F[Q1 any](q Q1) {}
`,
+ // TODO: note how the rewrite adds a trailing comma in "func F".
+ // Is that a bug in the test, or in astutil.Apply?
want: `package p
-type S[P1, P2 any] int32
+type S[R1, P2 any] int32
type R S[int32, string]
+
+func F[X1 any](q X1,) {}
`,
post: func(c *astutil.Cursor) bool {
if ident, ok := c.Node().(*ast.Ident); ok {
- if ident.Name == "int" {
+ switch ident.Name {
+ case "int":
c.Replace(ast.NewIdent("int32"))
- }
- if ident.Name == "T" {
+ case "T":
c.Replace(ast.NewIdent("S"))
+ case "P1":
+ c.Replace(ast.NewIdent("R1"))
+ case "Q1":
+ c.Replace(ast.NewIdent("X1"))
}
}
return true
diff --git a/go/ast/inspector/inspector.go b/go/ast/inspector/inspector.go
index af5e17fee..3fbfebf36 100644
--- a/go/ast/inspector/inspector.go
+++ b/go/ast/inspector/inspector.go
@@ -53,10 +53,13 @@ func New(files []*ast.File) *Inspector {
// of an ast.Node during a traversal.
type event struct {
node ast.Node
- typ uint64 // typeOf(node)
- index int // 1 + index of corresponding pop event, or 0 if this is a pop
+ typ uint64 // typeOf(node) on push event, or union of typ strictly between push and pop events on pop events
+ index int // index of corresponding push or pop event
}
+// TODO: Experiment with storing only the second word of event.node (unsafe.Pointer).
+// Type can be recovered from the sole bit in typ.
+
// Preorder visits all the nodes of the files supplied to New in
// depth-first order. It calls f(n) for each node n before it visits
// n's children.
@@ -72,10 +75,17 @@ func (in *Inspector) Preorder(types []ast.Node, f func(ast.Node)) {
mask := maskOf(types)
for i := 0; i < len(in.events); {
ev := in.events[i]
- if ev.typ&mask != 0 {
- if ev.index > 0 {
+ if ev.index > i {
+ // push
+ if ev.typ&mask != 0 {
f(ev.node)
}
+ pop := ev.index
+ if in.events[pop].typ&mask == 0 {
+ // Subtrees do not contain types: skip them and pop.
+ i = pop + 1
+ continue
+ }
}
i++
}
@@ -94,15 +104,24 @@ func (in *Inspector) Nodes(types []ast.Node, f func(n ast.Node, push bool) (proc
mask := maskOf(types)
for i := 0; i < len(in.events); {
ev := in.events[i]
- if ev.typ&mask != 0 {
- if ev.index > 0 {
- // push
+ if ev.index > i {
+ // push
+ pop := ev.index
+ if ev.typ&mask != 0 {
if !f(ev.node, true) {
- i = ev.index // jump to corresponding pop + 1
+ i = pop + 1 // jump to corresponding pop + 1
continue
}
- } else {
- // pop
+ }
+ if in.events[pop].typ&mask == 0 {
+ // Subtrees do not contain types: skip them.
+ i = pop
+ continue
+ }
+ } else {
+ // pop
+ push := ev.index
+ if in.events[push].typ&mask != 0 {
f(ev.node, false)
}
}
@@ -119,19 +138,26 @@ func (in *Inspector) WithStack(types []ast.Node, f func(n ast.Node, push bool, s
var stack []ast.Node
for i := 0; i < len(in.events); {
ev := in.events[i]
- if ev.index > 0 {
+ if ev.index > i {
// push
+ pop := ev.index
stack = append(stack, ev.node)
if ev.typ&mask != 0 {
if !f(ev.node, true, stack) {
- i = ev.index
+ i = pop + 1
stack = stack[:len(stack)-1]
continue
}
}
+ if in.events[pop].typ&mask == 0 {
+ // Subtrees does not contain types: skip them.
+ i = pop
+ continue
+ }
} else {
// pop
- if ev.typ&mask != 0 {
+ push := ev.index
+ if in.events[push].typ&mask != 0 {
f(ev.node, false, stack)
}
stack = stack[:len(stack)-1]
@@ -157,25 +183,31 @@ func traverse(files []*ast.File) []event {
events := make([]event, 0, capacity)
var stack []event
+ stack = append(stack, event{}) // include an extra event so file nodes have a parent
for _, f := range files {
ast.Inspect(f, func(n ast.Node) bool {
if n != nil {
// push
ev := event{
node: n,
- typ: typeOf(n),
+ typ: 0, // temporarily used to accumulate type bits of subtree
index: len(events), // push event temporarily holds own index
}
stack = append(stack, ev)
events = append(events, ev)
} else {
// pop
- ev := stack[len(stack)-1]
- stack = stack[:len(stack)-1]
+ top := len(stack) - 1
+ ev := stack[top]
+ typ := typeOf(ev.node)
+ push := ev.index
+ parent := top - 1
- events[ev.index].index = len(events) + 1 // make push refer to pop
+ events[push].typ = typ // set type of push
+ stack[parent].typ |= typ | ev.typ // parent's typ contains push and pop's typs.
+ events[push].index = len(events) // make push refer to pop
- ev.index = 0 // turn ev into a pop event
+ stack = stack[:top]
events = append(events, ev)
}
return true
diff --git a/go/ast/inspector/inspector_test.go b/go/ast/inspector/inspector_test.go
index 9e5391896..e88d584b5 100644
--- a/go/ast/inspector/inspector_test.go
+++ b/go/ast/inspector/inspector_test.go
@@ -244,9 +244,11 @@ func typeOf(n ast.Node) string {
// but a break-even point (NewInspector/(ASTInspect-Inspect)) of about 5
// traversals.
//
-// BenchmarkNewInspector 4.5 ms
-// BenchmarkNewInspect 0.33ms
-// BenchmarkASTInspect 1.2 ms
+// BenchmarkASTInspect 1.0 ms
+// BenchmarkNewInspector 2.2 ms
+// BenchmarkInspect 0.39ms
+// BenchmarkInspectFilter 0.01ms
+// BenchmarkInspectCalls 0.14ms
func BenchmarkNewInspector(b *testing.B) {
// Measure one-time construction overhead.
@@ -274,6 +276,42 @@ func BenchmarkInspect(b *testing.B) {
}
}
+func BenchmarkInspectFilter(b *testing.B) {
+ b.StopTimer()
+ inspect := inspector.New(netFiles)
+ b.StartTimer()
+
+ // Measure marginal cost of traversal.
+ nodeFilter := []ast.Node{(*ast.FuncDecl)(nil), (*ast.FuncLit)(nil)}
+ var ndecls, nlits int
+ for i := 0; i < b.N; i++ {
+ inspect.Preorder(nodeFilter, func(n ast.Node) {
+ switch n.(type) {
+ case *ast.FuncDecl:
+ ndecls++
+ case *ast.FuncLit:
+ nlits++
+ }
+ })
+ }
+}
+
+func BenchmarkInspectCalls(b *testing.B) {
+ b.StopTimer()
+ inspect := inspector.New(netFiles)
+ b.StartTimer()
+
+ // Measure marginal cost of traversal.
+ nodeFilter := []ast.Node{(*ast.CallExpr)(nil)}
+ var ncalls int
+ for i := 0; i < b.N; i++ {
+ inspect.Preorder(nodeFilter, func(n ast.Node) {
+ _ = n.(*ast.CallExpr)
+ ncalls++
+ })
+ }
+}
+
func BenchmarkASTInspect(b *testing.B) {
var ndecls, nlits int
for i := 0; i < b.N; i++ {
diff --git a/go/ast/inspector/typeof.go b/go/ast/inspector/typeof.go
index 11f4fc369..703c81395 100644
--- a/go/ast/inspector/typeof.go
+++ b/go/ast/inspector/typeof.go
@@ -11,6 +11,7 @@ package inspector
import (
"go/ast"
+ "math"
"golang.org/x/tools/internal/typeparams"
)
@@ -77,12 +78,14 @@ const (
// typeOf returns a distinct single-bit value that represents the type of n.
//
// Various implementations were benchmarked with BenchmarkNewInspector:
-// GOGC=off
-// - type switch 4.9-5.5ms 2.1ms
-// - binary search over a sorted list of types 5.5-5.9ms 2.5ms
-// - linear scan, frequency-ordered list 5.9-6.1ms 2.7ms
-// - linear scan, unordered list 6.4ms 2.7ms
-// - hash table 6.5ms 3.1ms
+//
+// GOGC=off
+// - type switch 4.9-5.5ms 2.1ms
+// - binary search over a sorted list of types 5.5-5.9ms 2.5ms
+// - linear scan, frequency-ordered list 5.9-6.1ms 2.7ms
+// - linear scan, unordered list 6.4ms 2.7ms
+// - hash table 6.5ms 3.1ms
+//
// A perfect hash seemed like overkill.
//
// The compiler's switch statement is the clear winner
@@ -90,7 +93,6 @@ const (
// with constant conditions and good branch prediction.
// (Sadly it is the most verbose in source code.)
// Binary search suffered from poor branch prediction.
-//
func typeOf(n ast.Node) uint64 {
// Fast path: nearly half of all nodes are identifiers.
if _, ok := n.(*ast.Ident); ok {
@@ -217,7 +219,7 @@ func typeOf(n ast.Node) uint64 {
func maskOf(nodes []ast.Node) uint64 {
if nodes == nil {
- return 1<<64 - 1 // match all node types
+ return math.MaxUint64 // match all node types
}
var mask uint64
for _, n := range nodes {
diff --git a/go/buildutil/allpackages.go b/go/buildutil/allpackages.go
index c0cb03e7b..dfb8cd6c7 100644
--- a/go/buildutil/allpackages.go
+++ b/go/buildutil/allpackages.go
@@ -28,7 +28,6 @@ import (
//
// All I/O is done via the build.Context file system interface,
// which must be concurrency-safe.
-//
func AllPackages(ctxt *build.Context) []string {
var list []string
ForEachPackage(ctxt, func(pkg string, _ error) {
@@ -48,7 +47,6 @@ func AllPackages(ctxt *build.Context) []string {
//
// All I/O is done via the build.Context file system interface,
// which must be concurrency-safe.
-//
func ForEachPackage(ctxt *build.Context, found func(importPath string, err error)) {
ch := make(chan item)
@@ -127,19 +125,18 @@ func allPackages(ctxt *build.Context, root string, ch chan<- item) {
// ExpandPatterns returns the set of packages matched by patterns,
// which may have the following forms:
//
-// golang.org/x/tools/cmd/guru # a single package
-// golang.org/x/tools/... # all packages beneath dir
-// ... # the entire workspace.
+// golang.org/x/tools/cmd/guru # a single package
+// golang.org/x/tools/... # all packages beneath dir
+// ... # the entire workspace.
//
// Order is significant: a pattern preceded by '-' removes matching
// packages from the set. For example, these patterns match all encoding
// packages except encoding/xml:
//
-// encoding/... -encoding/xml
+// encoding/... -encoding/xml
//
// A trailing slash in a pattern is ignored. (Path components of Go
// package names are separated by slash, not the platform's path separator.)
-//
func ExpandPatterns(ctxt *build.Context, patterns []string) map[string]bool {
// TODO(adonovan): support other features of 'go list':
// - "std"/"cmd"/"all" meta-packages
diff --git a/go/buildutil/fakecontext.go b/go/buildutil/fakecontext.go
index 5fc672fd5..15025f645 100644
--- a/go/buildutil/fakecontext.go
+++ b/go/buildutil/fakecontext.go
@@ -30,7 +30,6 @@ import (
// /go/src/ including, for instance, "math" and "math/big".
// ReadDir("/go/src/math/big") would return all the files in the
// "math/big" package.
-//
func FakeContext(pkgs map[string]map[string]string) *build.Context {
clean := func(filename string) string {
f := path.Clean(filepath.ToSlash(filename))
diff --git a/go/buildutil/overlay.go b/go/buildutil/overlay.go
index 8e239086b..bdbfd9314 100644
--- a/go/buildutil/overlay.go
+++ b/go/buildutil/overlay.go
@@ -60,8 +60,7 @@ func OverlayContext(orig *build.Context, overlay map[string][]byte) *build.Conte
// ParseOverlayArchive parses an archive containing Go files and their
// contents. The result is intended to be used with OverlayContext.
//
-//
-// Archive format
+// # Archive format
//
// The archive consists of a series of files. Each file consists of a
// name, a decimal file size and the file contents, separated by
diff --git a/go/buildutil/tags.go b/go/buildutil/tags.go
index 6da0ce484..7cf523bca 100644
--- a/go/buildutil/tags.go
+++ b/go/buildutil/tags.go
@@ -20,7 +20,8 @@ const TagsFlagDoc = "a list of `build tags` to consider satisfied during the bui
// See $GOROOT/src/cmd/go/doc.go for description of 'go build -tags' flag.
//
// Example:
-// flag.Var((*buildutil.TagsFlag)(&build.Default.BuildTags), "tags", buildutil.TagsFlagDoc)
+//
+// flag.Var((*buildutil.TagsFlag)(&build.Default.BuildTags), "tags", buildutil.TagsFlagDoc)
type TagsFlag []string
func (v *TagsFlag) Set(s string) error {
diff --git a/go/buildutil/util.go b/go/buildutil/util.go
index fc923d7a7..bee6390de 100644
--- a/go/buildutil/util.go
+++ b/go/buildutil/util.go
@@ -28,7 +28,6 @@ import (
// filename that will be attached to the ASTs.
//
// TODO(adonovan): call this from go/loader.parseFiles when the tree thaws.
-//
func ParseFile(fset *token.FileSet, ctxt *build.Context, displayPath func(string) string, dir string, file string, mode parser.Mode) (*ast.File, error) {
if !IsAbsPath(ctxt, file) {
file = JoinPath(ctxt, dir, file)
@@ -51,7 +50,6 @@ func ParseFile(fset *token.FileSet, ctxt *build.Context, displayPath func(string
//
// The '...Files []string' fields of the resulting build.Package are not
// populated (build.FindOnly mode).
-//
func ContainingPackage(ctxt *build.Context, dir, filename string) (*build.Package, error) {
if !IsAbsPath(ctxt, filename) {
filename = JoinPath(ctxt, dir, filename)
@@ -82,7 +80,7 @@ func ContainingPackage(ctxt *build.Context, dir, filename string) (*build.Packag
// (go/build.Context defines these as methods, but does not export them.)
-// hasSubdir calls ctxt.HasSubdir (if not nil) or else uses
+// HasSubdir calls ctxt.HasSubdir (if not nil) or else uses
// the local file system to answer the question.
func HasSubdir(ctxt *build.Context, root, dir string) (rel string, ok bool) {
if f := ctxt.HasSubdir; f != nil {
@@ -196,7 +194,6 @@ func SplitPathList(ctxt *build.Context, s string) []string {
// sameFile returns true if x and y have the same basename and denote
// the same file.
-//
func sameFile(x, y string) bool {
if path.Clean(x) == path.Clean(y) {
return true
diff --git a/go/callgraph/callgraph.go b/go/callgraph/callgraph.go
index 2bcc3dcc8..905623753 100644
--- a/go/callgraph/callgraph.go
+++ b/go/callgraph/callgraph.go
@@ -3,7 +3,6 @@
// license that can be found in the LICENSE file.
/*
-
Package callgraph defines the call graph and various algorithms
and utilities to operate on it.
@@ -30,7 +29,6 @@ calling main() and init().
Calls to built-in functions (e.g. panic, println) are not represented
in the call graph; they are treated like built-in operators of the
language.
-
*/
package callgraph // import "golang.org/x/tools/go/callgraph"
@@ -39,6 +37,8 @@ package callgraph // import "golang.org/x/tools/go/callgraph"
// More generally, we could eliminate "uninteresting" nodes such as
// nodes from packages we don't care about.
+// TODO(zpavlinovic): decide how callgraphs handle calls to and from generic function bodies.
+
import (
"fmt"
"go/token"
@@ -51,7 +51,6 @@ import (
// A graph may contain nodes that are not reachable from the root.
// If the call graph is sound, such nodes indicate unreachable
// functions.
-//
type Graph struct {
Root *Node // the distinguished root node
Nodes map[*ssa.Function]*Node // all nodes by function
diff --git a/go/callgraph/callgraph_test.go b/go/callgraph/callgraph_test.go
new file mode 100644
index 000000000..dd6baafa5
--- /dev/null
+++ b/go/callgraph/callgraph_test.go
@@ -0,0 +1,253 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+package callgraph_test
+
+import (
+ "log"
+ "sync"
+ "testing"
+
+ "golang.org/x/tools/go/callgraph"
+ "golang.org/x/tools/go/callgraph/cha"
+ "golang.org/x/tools/go/callgraph/rta"
+ "golang.org/x/tools/go/callgraph/static"
+ "golang.org/x/tools/go/callgraph/vta"
+ "golang.org/x/tools/go/loader"
+ "golang.org/x/tools/go/pointer"
+ "golang.org/x/tools/go/ssa"
+ "golang.org/x/tools/go/ssa/ssautil"
+)
+
+// Benchmarks comparing different callgraph algorithms implemented in
+// x/tools/go/callgraph. Comparison is on both speed, memory and precision.
+// Fewer edges and fewer reachable nodes implies a more precise result.
+// Comparison is done on a hello world http server using net/http.
+//
+// Current results were on an i7 macbook on go version devel go1.20-2730.
+// Number of nodes, edges, and reachable function are expected to vary between
+// go versions. Timing results are expected to vary between machines.
+// BenchmarkStatic-12 53 ms/op 6 MB/op 12113 nodes 37355 edges 1522 reachable
+// BenchmarkCHA-12 86 ms/op 16 MB/op 12113 nodes 131717 edges 7640 reachable
+// BenchmarkRTA-12 110 ms/op 12 MB/op 6566 nodes 42291 edges 5099 reachable
+// BenchmarkPTA-12 1427 ms/op 600 MB/op 8714 nodes 28244 edges 4184 reachable
+// BenchmarkVTA-12 600 ms/op 78 MB/op 12114 nodes 44861 edges 4919 reachable
+// BenchmarkVTA2-12 793 ms/op 104 MB/op 5450 nodes 22208 edges 4042 reachable
+// BenchmarkVTA3-12 977 ms/op 124 MB/op 4621 nodes 19331 edges 3700 reachable
+// BenchmarkVTAAlt-12 372 ms/op 57 MB/op 7763 nodes 29912 edges 4258 reachable
+// BenchmarkVTAAlt2-12 570 ms/op 78 MB/op 4838 nodes 20169 edges 3737 reachable
+//
+// Note:
+// * Static is unsound and may miss real edges.
+// * RTA starts from a main function and only includes reachable functions.
+// * CHA starts from all functions.
+// * VTA, VTA2, and VTA3 are starting from all functions and the CHA callgraph.
+// VTA2 and VTA3 are the result of re-applying VTA to the functions reachable
+// from main() via the callgraph of the previous stage.
+// * VTAAlt, and VTAAlt2 start from the functions reachable from main via the
+// CHA callgraph.
+// * All algorithms are unsound w.r.t. reflection.
+
+const httpEx = `package main
+
+import (
+ "fmt"
+ "net/http"
+)
+
+func hello(w http.ResponseWriter, req *http.Request) {
+ fmt.Fprintf(w, "hello world\n")
+}
+
+func main() {
+ http.HandleFunc("/hello", hello)
+ http.ListenAndServe(":8090", nil)
+}
+`
+
+var (
+ once sync.Once
+ prog *ssa.Program
+ main *ssa.Function
+)
+
+func example() (*ssa.Program, *ssa.Function) {
+ once.Do(func() {
+ var conf loader.Config
+ f, err := conf.ParseFile("<input>", httpEx)
+ if err != nil {
+ log.Fatal(err)
+ }
+ conf.CreateFromFiles(f.Name.Name, f)
+
+ lprog, err := conf.Load()
+ if err != nil {
+ log.Fatalf("test 'package %s': Load: %s", f.Name.Name, err)
+ }
+ prog = ssautil.CreateProgram(lprog, ssa.InstantiateGenerics)
+ prog.Build()
+
+ main = prog.Package(lprog.Created[0].Pkg).Members["main"].(*ssa.Function)
+ })
+ return prog, main
+}
+
+var stats bool = false // print stats?
+
+func logStats(b *testing.B, cnd bool, name string, cg *callgraph.Graph, main *ssa.Function) {
+ if cnd && stats {
+ e := 0
+ for _, n := range cg.Nodes {
+ e += len(n.Out)
+ }
+ r := len(reaches(main, cg, false))
+ b.Logf("%s:\t%d nodes\t%d edges\t%d reachable", name, len(cg.Nodes), e, r)
+ }
+}
+
+func BenchmarkStatic(b *testing.B) {
+ b.StopTimer()
+ prog, main := example()
+ b.StartTimer()
+
+ for i := 0; i < b.N; i++ {
+ cg := static.CallGraph(prog)
+ logStats(b, i == 0, "static", cg, main)
+ }
+}
+
+func BenchmarkCHA(b *testing.B) {
+ b.StopTimer()
+ prog, main := example()
+ b.StartTimer()
+
+ for i := 0; i < b.N; i++ {
+ cg := cha.CallGraph(prog)
+ logStats(b, i == 0, "cha", cg, main)
+ }
+}
+
+func BenchmarkRTA(b *testing.B) {
+ b.StopTimer()
+ _, main := example()
+ b.StartTimer()
+
+ for i := 0; i < b.N; i++ {
+ res := rta.Analyze([]*ssa.Function{main}, true)
+ cg := res.CallGraph
+ logStats(b, i == 0, "rta", cg, main)
+ }
+}
+
+func BenchmarkPTA(b *testing.B) {
+ b.StopTimer()
+ _, main := example()
+ b.StartTimer()
+
+ for i := 0; i < b.N; i++ {
+ config := &pointer.Config{Mains: []*ssa.Package{main.Pkg}, BuildCallGraph: true}
+ res, err := pointer.Analyze(config)
+ if err != nil {
+ b.Fatal(err)
+ }
+ logStats(b, i == 0, "pta", res.CallGraph, main)
+ }
+}
+
+func BenchmarkVTA(b *testing.B) {
+ b.StopTimer()
+ prog, main := example()
+ b.StartTimer()
+
+ for i := 0; i < b.N; i++ {
+ cg := vta.CallGraph(ssautil.AllFunctions(prog), cha.CallGraph(prog))
+ logStats(b, i == 0, "vta", cg, main)
+ }
+}
+
+func BenchmarkVTA2(b *testing.B) {
+ b.StopTimer()
+ prog, main := example()
+ b.StartTimer()
+
+ for i := 0; i < b.N; i++ {
+ vta1 := vta.CallGraph(ssautil.AllFunctions(prog), cha.CallGraph(prog))
+ cg := vta.CallGraph(reaches(main, vta1, true), vta1)
+ logStats(b, i == 0, "vta2", cg, main)
+ }
+}
+
+func BenchmarkVTA3(b *testing.B) {
+ b.StopTimer()
+ prog, main := example()
+ b.StartTimer()
+
+ for i := 0; i < b.N; i++ {
+ vta1 := vta.CallGraph(ssautil.AllFunctions(prog), cha.CallGraph(prog))
+ vta2 := vta.CallGraph(reaches(main, vta1, true), vta1)
+ cg := vta.CallGraph(reaches(main, vta2, true), vta2)
+ logStats(b, i == 0, "vta3", cg, main)
+ }
+}
+
+func BenchmarkVTAAlt(b *testing.B) {
+ b.StopTimer()
+ prog, main := example()
+ b.StartTimer()
+
+ for i := 0; i < b.N; i++ {
+ cha := cha.CallGraph(prog)
+ cg := vta.CallGraph(reaches(main, cha, true), cha) // start from only functions reachable by CHA.
+ logStats(b, i == 0, "vta-alt", cg, main)
+ }
+}
+
+func BenchmarkVTAAlt2(b *testing.B) {
+ b.StopTimer()
+ prog, main := example()
+ b.StartTimer()
+
+ for i := 0; i < b.N; i++ {
+ cha := cha.CallGraph(prog)
+ vta1 := vta.CallGraph(reaches(main, cha, true), cha)
+ cg := vta.CallGraph(reaches(main, vta1, true), vta1)
+ logStats(b, i == 0, "vta-alt2", cg, main)
+ }
+}
+
+// reaches computes the transitive closure of functions forward reachable
+// via calls in cg starting from `sources`. If refs is true, include
+// functions referred to in an instruction.
+func reaches(source *ssa.Function, cg *callgraph.Graph, refs bool) map[*ssa.Function]bool {
+ seen := make(map[*ssa.Function]bool)
+ var visit func(f *ssa.Function)
+ visit = func(f *ssa.Function) {
+ if seen[f] {
+ return
+ }
+ seen[f] = true
+
+ if n := cg.Nodes[f]; n != nil {
+ for _, e := range n.Out {
+ if e.Site != nil {
+ visit(e.Callee.Func)
+ }
+ }
+ }
+
+ if refs {
+ var buf [10]*ssa.Value // avoid alloc in common case
+ for _, b := range f.Blocks {
+ for _, instr := range b.Instrs {
+ for _, op := range instr.Operands(buf[:0]) {
+ if fn, ok := (*op).(*ssa.Function); ok {
+ visit(fn)
+ }
+ }
+ }
+ }
+ }
+ }
+ visit(source)
+ return seen
+}
diff --git a/go/callgraph/cha/cha.go b/go/callgraph/cha/cha.go
index 215ff173d..6296d48d9 100644
--- a/go/callgraph/cha/cha.go
+++ b/go/callgraph/cha/cha.go
@@ -20,9 +20,10 @@
// Since CHA conservatively assumes that all functions are address-taken
// and all concrete types are put into interfaces, it is sound to run on
// partial programs, such as libraries without a main or test function.
-//
package cha // import "golang.org/x/tools/go/callgraph/cha"
+// TODO(zpavlinovic): update CHA for how it handles generic function bodies.
+
import (
"go/types"
@@ -34,12 +35,59 @@ import (
// CallGraph computes the call graph of the specified program using the
// Class Hierarchy Analysis algorithm.
-//
func CallGraph(prog *ssa.Program) *callgraph.Graph {
cg := callgraph.New(nil) // TODO(adonovan) eliminate concept of rooted callgraph
allFuncs := ssautil.AllFunctions(prog)
+ calleesOf := lazyCallees(allFuncs)
+
+ addEdge := func(fnode *callgraph.Node, site ssa.CallInstruction, g *ssa.Function) {
+ gnode := cg.CreateNode(g)
+ callgraph.AddEdge(fnode, site, gnode)
+ }
+
+ addEdges := func(fnode *callgraph.Node, site ssa.CallInstruction, callees []*ssa.Function) {
+ // Because every call to a highly polymorphic and
+ // frequently used abstract method such as
+ // (io.Writer).Write is assumed to call every concrete
+ // Write method in the program, the call graph can
+ // contain a lot of duplication.
+ //
+ // TODO(taking): opt: consider making lazyCallees public.
+ // Using the same benchmarks as callgraph_test.go, removing just
+ // the explicit callgraph.Graph construction is 4x less memory
+ // and is 37% faster.
+ // CHA 86 ms/op 16 MB/op
+ // lazyCallees 63 ms/op 4 MB/op
+ for _, g := range callees {
+ addEdge(fnode, site, g)
+ }
+ }
+
+ for f := range allFuncs {
+ fnode := cg.CreateNode(f)
+ for _, b := range f.Blocks {
+ for _, instr := range b.Instrs {
+ if site, ok := instr.(ssa.CallInstruction); ok {
+ if g := site.Common().StaticCallee(); g != nil {
+ addEdge(fnode, site, g)
+ } else {
+ addEdges(fnode, site, calleesOf(site))
+ }
+ }
+ }
+ }
+ }
+
+ return cg
+}
+
+// lazyCallees returns a function that maps a call site (in a function in fns)
+// to its callees within fns.
+//
+// The resulting function is not concurrency safe.
+func lazyCallees(fns map[*ssa.Function]bool) func(site ssa.CallInstruction) []*ssa.Function {
// funcsBySig contains all functions, keyed by signature. It is
// the effective set of address-taken functions used to resolve
// a dynamic call of a particular signature.
@@ -81,7 +129,7 @@ func CallGraph(prog *ssa.Program) *callgraph.Graph {
return methods
}
- for f := range allFuncs {
+ for f := range fns {
if f.Signature.Recv() == nil {
// Package initializers can never be address-taken.
if f.Name() == "init" && f.Synthetic == "package initializer" {
@@ -95,45 +143,17 @@ func CallGraph(prog *ssa.Program) *callgraph.Graph {
}
}
- addEdge := func(fnode *callgraph.Node, site ssa.CallInstruction, g *ssa.Function) {
- gnode := cg.CreateNode(g)
- callgraph.AddEdge(fnode, site, gnode)
- }
-
- addEdges := func(fnode *callgraph.Node, site ssa.CallInstruction, callees []*ssa.Function) {
- // Because every call to a highly polymorphic and
- // frequently used abstract method such as
- // (io.Writer).Write is assumed to call every concrete
- // Write method in the program, the call graph can
- // contain a lot of duplication.
- //
- // TODO(adonovan): opt: consider factoring the callgraph
- // API so that the Callers component of each edge is a
- // slice of nodes, not a singleton.
- for _, g := range callees {
- addEdge(fnode, site, g)
- }
- }
-
- for f := range allFuncs {
- fnode := cg.CreateNode(f)
- for _, b := range f.Blocks {
- for _, instr := range b.Instrs {
- if site, ok := instr.(ssa.CallInstruction); ok {
- call := site.Common()
- if call.IsInvoke() {
- tiface := call.Value.Type().Underlying().(*types.Interface)
- addEdges(fnode, site, lookupMethods(tiface, call.Method))
- } else if g := call.StaticCallee(); g != nil {
- addEdge(fnode, site, g)
- } else if _, ok := call.Value.(*ssa.Builtin); !ok {
- callees, _ := funcsBySig.At(call.Signature()).([]*ssa.Function)
- addEdges(fnode, site, callees)
- }
- }
- }
+ return func(site ssa.CallInstruction) []*ssa.Function {
+ call := site.Common()
+ if call.IsInvoke() {
+ tiface := call.Value.Type().Underlying().(*types.Interface)
+ return lookupMethods(tiface, call.Method)
+ } else if g := call.StaticCallee(); g != nil {
+ return []*ssa.Function{g}
+ } else if _, ok := call.Value.(*ssa.Builtin); !ok {
+ fns, _ := funcsBySig.At(call.Signature()).([]*ssa.Function)
+ return fns
}
+ return nil
}
-
- return cg
}
diff --git a/go/callgraph/cha/cha_test.go b/go/callgraph/cha/cha_test.go
index 3dc03143b..a12b3d0a3 100644
--- a/go/callgraph/cha/cha_test.go
+++ b/go/callgraph/cha/cha_test.go
@@ -24,7 +24,9 @@ import (
"golang.org/x/tools/go/callgraph"
"golang.org/x/tools/go/callgraph/cha"
"golang.org/x/tools/go/loader"
+ "golang.org/x/tools/go/ssa"
"golang.org/x/tools/go/ssa/ssautil"
+ "golang.org/x/tools/internal/typeparams"
)
var inputs = []string{
@@ -47,19 +49,9 @@ func expectation(f *ast.File) (string, token.Pos) {
// TestCHA runs CHA on each file in inputs, prints the dynamic edges of
// the call graph, and compares it with the golden results embedded in
// the WANT comment at the end of the file.
-//
func TestCHA(t *testing.T) {
for _, filename := range inputs {
- content, err := ioutil.ReadFile(filename)
- if err != nil {
- t.Errorf("couldn't read file '%s': %s", filename, err)
- continue
- }
-
- conf := loader.Config{
- ParserMode: parser.ParseComments,
- }
- f, err := conf.ParseFile(filename, content)
+ prog, f, mainPkg, err := loadProgInfo(filename, ssa.InstantiateGenerics)
if err != nil {
t.Error(err)
continue
@@ -67,34 +59,77 @@ func TestCHA(t *testing.T) {
want, pos := expectation(f)
if pos == token.NoPos {
- t.Errorf("No WANT: comment in %s", filename)
- continue
- }
-
- conf.CreateFromFiles("main", f)
- iprog, err := conf.Load()
- if err != nil {
- t.Error(err)
+ t.Error(fmt.Errorf("No WANT: comment in %s", filename))
continue
}
- prog := ssautil.CreateProgram(iprog, 0)
- mainPkg := prog.Package(iprog.Created[0].Pkg)
- prog.Build()
-
cg := cha.CallGraph(prog)
- if got := printGraph(cg, mainPkg.Pkg); got != want {
+ if got := printGraph(cg, mainPkg.Pkg, "dynamic", "Dynamic calls"); got != want {
t.Errorf("%s: got:\n%s\nwant:\n%s",
prog.Fset.Position(pos), got, want)
}
}
}
-func printGraph(cg *callgraph.Graph, from *types.Package) string {
+// TestCHAGenerics is TestCHA tailored for testing generics,
+func TestCHAGenerics(t *testing.T) {
+ if !typeparams.Enabled {
+ t.Skip("TestCHAGenerics requires type parameters")
+ }
+
+ filename := "testdata/generics.go"
+ prog, f, mainPkg, err := loadProgInfo(filename, ssa.InstantiateGenerics)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ want, pos := expectation(f)
+ if pos == token.NoPos {
+ t.Fatal(fmt.Errorf("No WANT: comment in %s", filename))
+ }
+
+ cg := cha.CallGraph(prog)
+
+ if got := printGraph(cg, mainPkg.Pkg, "", "All calls"); got != want {
+ t.Errorf("%s: got:\n%s\nwant:\n%s",
+ prog.Fset.Position(pos), got, want)
+ }
+}
+
+func loadProgInfo(filename string, mode ssa.BuilderMode) (*ssa.Program, *ast.File, *ssa.Package, error) {
+ content, err := ioutil.ReadFile(filename)
+ if err != nil {
+ return nil, nil, nil, fmt.Errorf("couldn't read file '%s': %s", filename, err)
+ }
+
+ conf := loader.Config{
+ ParserMode: parser.ParseComments,
+ }
+ f, err := conf.ParseFile(filename, content)
+ if err != nil {
+ return nil, nil, nil, err
+ }
+
+ conf.CreateFromFiles("main", f)
+ iprog, err := conf.Load()
+ if err != nil {
+ return nil, nil, nil, err
+ }
+
+ prog := ssautil.CreateProgram(iprog, mode)
+ prog.Build()
+
+ return prog, f, prog.Package(iprog.Created[0].Pkg), nil
+}
+
+// printGraph returns a string representation of cg involving only edges
+// whose description contains edgeMatch. The string representation is
+// prefixed with a desc line.
+func printGraph(cg *callgraph.Graph, from *types.Package, edgeMatch string, desc string) string {
var edges []string
callgraph.GraphVisitEdges(cg, func(e *callgraph.Edge) error {
- if strings.Contains(e.Description(), "dynamic") {
+ if strings.Contains(e.Description(), edgeMatch) {
edges = append(edges, fmt.Sprintf("%s --> %s",
e.Caller.Func.RelString(from),
e.Callee.Func.RelString(from)))
@@ -104,7 +139,7 @@ func printGraph(cg *callgraph.Graph, from *types.Package) string {
sort.Strings(edges)
var buf bytes.Buffer
- buf.WriteString("Dynamic calls\n")
+ buf.WriteString(desc + "\n")
for _, edge := range edges {
fmt.Fprintf(&buf, " %s\n", edge)
}
diff --git a/go/callgraph/cha/testdata/generics.go b/go/callgraph/cha/testdata/generics.go
new file mode 100644
index 000000000..0323c7582
--- /dev/null
+++ b/go/callgraph/cha/testdata/generics.go
@@ -0,0 +1,49 @@
+//go:build ignore
+// +build ignore
+
+package main
+
+// Test of generic function calls.
+
+type I interface {
+ Foo()
+}
+
+type A struct{}
+
+func (a A) Foo() {}
+
+type B struct{}
+
+func (b B) Foo() {}
+
+func instantiated[X I](x X) {
+ x.Foo()
+}
+
+func Bar() {}
+
+func f(h func(), g func(I), k func(A), a A, b B) {
+ h()
+
+ k(a)
+ g(b) // g:func(I) is not matched by instantiated[B]:func(B)
+
+ instantiated[A](a) // static call
+ instantiated[B](b) // static call
+}
+
+// WANT:
+// All calls
+// (*A).Foo --> (A).Foo
+// (*B).Foo --> (B).Foo
+// f --> Bar
+// f --> instantiated[main.A]
+// f --> instantiated[main.A]
+// f --> instantiated[main.B]
+// instantiated --> (*A).Foo
+// instantiated --> (*B).Foo
+// instantiated --> (A).Foo
+// instantiated --> (B).Foo
+// instantiated[main.A] --> (A).Foo
+// instantiated[main.B] --> (B).Foo
diff --git a/go/callgraph/rta/rta.go b/go/callgraph/rta/rta.go
index e6b44606a..2e80415ff 100644
--- a/go/callgraph/rta/rta.go
+++ b/go/callgraph/rta/rta.go
@@ -39,13 +39,16 @@
// analysis, but the algorithm is much faster. For example, running the
// cmd/callgraph tool on its own source takes ~2.1s for RTA and ~5.4s
// for points-to analysis.
-//
package rta // import "golang.org/x/tools/go/callgraph/rta"
// TODO(adonovan): test it by connecting it to the interpreter and
// replacing all "unreachable" functions by a special intrinsic, and
// ensure that that intrinsic is never called.
+// TODO(zpavlinovic): decide if the clients must use ssa.InstantiateGenerics
+// mode when building programs with generics. It might be possible to
+// extend rta to accurately support generics with just ssa.BuilderMode(0).
+
import (
"fmt"
"go/types"
@@ -57,7 +60,6 @@ import (
// A Result holds the results of Rapid Type Analysis, which includes the
// set of reachable functions/methods, runtime types, and the call graph.
-//
type Result struct {
// CallGraph is the discovered callgraph.
// It does not include edges for calls made via reflection.
@@ -262,7 +264,6 @@ func (r *rta) visitFunc(f *ssa.Function) {
// If buildCallGraph is true, Result.CallGraph will contain a call
// graph; otherwise, only the other fields (reachable functions) are
// populated.
-//
func Analyze(roots []*ssa.Function, buildCallGraph bool) *Result {
if len(roots) == 0 {
return nil
@@ -341,7 +342,6 @@ func (r *rta) implementations(I *types.Interface) []types.Type {
// addRuntimeType is called for each concrete type that can be the
// dynamic type of some interface or reflect.Value.
// Adapted from needMethods in go/ssa/builder.go
-//
func (r *rta) addRuntimeType(T types.Type, skip bool) {
if prev, ok := r.result.RuntimeTypes.At(T).(bool); ok {
if skip && !prev {
diff --git a/go/callgraph/rta/rta_test.go b/go/callgraph/rta/rta_test.go
index 9ae1bdf99..67d05d612 100644
--- a/go/callgraph/rta/rta_test.go
+++ b/go/callgraph/rta/rta_test.go
@@ -16,7 +16,7 @@ import (
"go/parser"
"go/token"
"go/types"
- "io/ioutil"
+ "os"
"sort"
"strings"
"testing"
@@ -26,6 +26,7 @@ import (
"golang.org/x/tools/go/loader"
"golang.org/x/tools/go/ssa"
"golang.org/x/tools/go/ssa/ssautil"
+ "golang.org/x/tools/internal/typeparams"
)
var inputs = []string{
@@ -51,19 +52,9 @@ func expectation(f *ast.File) (string, token.Pos) {
// The results string consists of two parts: the set of dynamic call
// edges, "f --> g", one per line, and the set of reachable functions,
// one per line. Each set is sorted.
-//
func TestRTA(t *testing.T) {
for _, filename := range inputs {
- content, err := ioutil.ReadFile(filename)
- if err != nil {
- t.Errorf("couldn't read file '%s': %s", filename, err)
- continue
- }
-
- conf := loader.Config{
- ParserMode: parser.ParseComments,
- }
- f, err := conf.ParseFile(filename, content)
+ prog, f, mainPkg, err := loadProgInfo(filename, ssa.BuilderMode(0))
if err != nil {
t.Error(err)
continue
@@ -75,30 +66,77 @@ func TestRTA(t *testing.T) {
continue
}
- conf.CreateFromFiles("main", f)
- iprog, err := conf.Load()
- if err != nil {
- t.Error(err)
- continue
- }
-
- prog := ssautil.CreateProgram(iprog, 0)
- mainPkg := prog.Package(iprog.Created[0].Pkg)
- prog.Build()
-
res := rta.Analyze([]*ssa.Function{
mainPkg.Func("main"),
mainPkg.Func("init"),
}, true)
- if got := printResult(res, mainPkg.Pkg); got != want {
+ if got := printResult(res, mainPkg.Pkg, "dynamic", "Dynamic calls"); got != want {
t.Errorf("%s: got:\n%s\nwant:\n%s",
prog.Fset.Position(pos), got, want)
}
}
}
-func printResult(res *rta.Result, from *types.Package) string {
+// TestRTAGenerics is TestRTA specialized for testing generics.
+func TestRTAGenerics(t *testing.T) {
+ if !typeparams.Enabled {
+ t.Skip("TestRTAGenerics requires type parameters")
+ }
+
+ filename := "testdata/generics.go"
+ prog, f, mainPkg, err := loadProgInfo(filename, ssa.InstantiateGenerics)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ want, pos := expectation(f)
+ if pos == token.NoPos {
+ t.Fatalf("No WANT: comment in %s", filename)
+ }
+
+ res := rta.Analyze([]*ssa.Function{
+ mainPkg.Func("main"),
+ mainPkg.Func("init"),
+ }, true)
+
+ if got := printResult(res, mainPkg.Pkg, "", "All calls"); got != want {
+ t.Errorf("%s: got:\n%s\nwant:\n%s",
+ prog.Fset.Position(pos), got, want)
+ }
+}
+
+func loadProgInfo(filename string, mode ssa.BuilderMode) (*ssa.Program, *ast.File, *ssa.Package, error) {
+ content, err := os.ReadFile(filename)
+ if err != nil {
+ return nil, nil, nil, fmt.Errorf("couldn't read file '%s': %s", filename, err)
+ }
+
+ conf := loader.Config{
+ ParserMode: parser.ParseComments,
+ }
+ f, err := conf.ParseFile(filename, content)
+ if err != nil {
+ return nil, nil, nil, err
+ }
+
+ conf.CreateFromFiles("main", f)
+ iprog, err := conf.Load()
+ if err != nil {
+ return nil, nil, nil, err
+ }
+
+ prog := ssautil.CreateProgram(iprog, mode)
+ prog.Build()
+
+ return prog, f, prog.Package(iprog.Created[0].Pkg), nil
+}
+
+// printResult returns a string representation of res, i.e., call graph,
+// reachable functions, and reflect types. For call graph, only edges
+// whose description contains edgeMatch are returned and their string
+// representation is prefixed with a desc line.
+func printResult(res *rta.Result, from *types.Package, edgeMatch, desc string) string {
var buf bytes.Buffer
writeSorted := func(ss []string) {
@@ -108,10 +146,10 @@ func printResult(res *rta.Result, from *types.Package) string {
}
}
- buf.WriteString("Dynamic calls\n")
+ buf.WriteString(desc + "\n")
var edges []string
callgraph.GraphVisitEdges(res.CallGraph, func(e *callgraph.Edge) error {
- if strings.Contains(e.Description(), "dynamic") {
+ if strings.Contains(e.Description(), edgeMatch) {
edges = append(edges, fmt.Sprintf("%s --> %s",
e.Caller.Func.RelString(from),
e.Callee.Func.RelString(from)))
diff --git a/go/callgraph/rta/testdata/generics.go b/go/callgraph/rta/testdata/generics.go
new file mode 100644
index 000000000..d962fa43f
--- /dev/null
+++ b/go/callgraph/rta/testdata/generics.go
@@ -0,0 +1,79 @@
+//go:build ignore
+// +build ignore
+
+package main
+
+// Test of generic function calls.
+
+type I interface {
+ Foo()
+}
+
+type A struct{}
+
+func (a A) Foo() {}
+
+type B struct{}
+
+func (b B) Foo() {}
+
+func instantiated[X I](x X) {
+ x.Foo()
+}
+
+var a A
+var b B
+
+func main() {
+ instantiated[A](a) // static call
+ instantiated[B](b) // static call
+
+ local[C]().Foo()
+
+ lambda[A]()()()
+}
+
+func local[X I]() I {
+ var x X
+ return x
+}
+
+type C struct{}
+
+func (c C) Foo() {}
+
+func lambda[X I]() func() func() {
+ return func() func() {
+ var x X
+ return x.Foo
+ }
+}
+
+// WANT:
+// All calls
+// (*C).Foo --> (C).Foo
+// (A).Foo$bound --> (A).Foo
+// instantiated[main.A] --> (A).Foo
+// instantiated[main.B] --> (B).Foo
+// main --> (*C).Foo
+// main --> (A).Foo$bound
+// main --> (C).Foo
+// main --> instantiated[main.A]
+// main --> instantiated[main.B]
+// main --> lambda[main.A]
+// main --> lambda[main.A]$1
+// main --> local[main.C]
+// Reachable functions
+// (*C).Foo
+// (A).Foo
+// (A).Foo$bound
+// (B).Foo
+// (C).Foo
+// instantiated[main.A]
+// instantiated[main.B]
+// lambda[main.A]
+// lambda[main.A]$1
+// local[main.C]
+// Reflect types
+// *C
+// C
diff --git a/go/callgraph/static/static.go b/go/callgraph/static/static.go
index 7c41c1283..62d2364bf 100644
--- a/go/callgraph/static/static.go
+++ b/go/callgraph/static/static.go
@@ -6,6 +6,8 @@
// only static call edges.
package static // import "golang.org/x/tools/go/callgraph/static"
+// TODO(zpavlinovic): update static for how it handles generic function bodies.
+
import (
"golang.org/x/tools/go/callgraph"
"golang.org/x/tools/go/ssa"
@@ -14,7 +16,6 @@ import (
// CallGraph computes the call graph of the specified program
// considering only static calls.
-//
func CallGraph(prog *ssa.Program) *callgraph.Graph {
cg := callgraph.New(nil) // TODO(adonovan) eliminate concept of rooted callgraph
diff --git a/go/callgraph/static/static_test.go b/go/callgraph/static/static_test.go
index e1bfcd707..0a108d3d2 100644
--- a/go/callgraph/static/static_test.go
+++ b/go/callgraph/static/static_test.go
@@ -14,7 +14,9 @@ import (
"golang.org/x/tools/go/callgraph"
"golang.org/x/tools/go/callgraph/static"
"golang.org/x/tools/go/loader"
+ "golang.org/x/tools/go/ssa"
"golang.org/x/tools/go/ssa/ssautil"
+ "golang.org/x/tools/internal/typeparams"
)
const input = `package P
@@ -47,42 +49,94 @@ func h()
var unknown bool
`
-func TestStatic(t *testing.T) {
- conf := loader.Config{ParserMode: parser.ParseComments}
- f, err := conf.ParseFile("P.go", input)
- if err != nil {
- t.Fatal(err)
- }
+const genericsInput = `package P
- conf.CreateFromFiles("P", f)
- iprog, err := conf.Load()
- if err != nil {
- t.Fatal(err)
- }
+type I interface {
+ F()
+}
- P := iprog.Created[0].Pkg
+type A struct{}
- prog := ssautil.CreateProgram(iprog, 0)
- prog.Build()
+func (a A) F() {}
- cg := static.CallGraph(prog)
+type B struct{}
- var edges []string
- callgraph.GraphVisitEdges(cg, func(e *callgraph.Edge) error {
- edges = append(edges, fmt.Sprintf("%s -> %s",
- e.Caller.Func.RelString(P),
- e.Callee.Func.RelString(P)))
- return nil
- })
- sort.Strings(edges)
+func (b B) F() {}
- want := []string{
- "(*C).f -> (C).f",
- "f -> (C).f",
- "f -> f$1",
- "f -> g",
- }
- if !reflect.DeepEqual(edges, want) {
- t.Errorf("Got edges %v, want %v", edges, want)
+func instantiated[X I](x X) {
+ x.F()
+}
+
+func Bar() {}
+
+func f(h func(), a A, b B) {
+ h()
+
+ instantiated[A](a)
+ instantiated[B](b)
+}
+`
+
+func TestStatic(t *testing.T) {
+ for _, e := range []struct {
+ input string
+ want []string
+ // typeparams must be true if input uses type parameters
+ typeparams bool
+ }{
+ {input, []string{
+ "(*C).f -> (C).f",
+ "f -> (C).f",
+ "f -> f$1",
+ "f -> g",
+ }, false},
+ {genericsInput, []string{
+ "(*A).F -> (A).F",
+ "(*B).F -> (B).F",
+ "f -> instantiated[P.A]",
+ "f -> instantiated[P.B]",
+ "instantiated[P.A] -> (A).F",
+ "instantiated[P.B] -> (B).F",
+ }, true},
+ } {
+ if e.typeparams && !typeparams.Enabled {
+ // Skip tests with type parameters when the build
+ // environment is not supporting any.
+ continue
+ }
+
+ conf := loader.Config{ParserMode: parser.ParseComments}
+ f, err := conf.ParseFile("P.go", e.input)
+ if err != nil {
+ t.Error(err)
+ continue
+ }
+
+ conf.CreateFromFiles("P", f)
+ iprog, err := conf.Load()
+ if err != nil {
+ t.Error(err)
+ continue
+ }
+
+ P := iprog.Created[0].Pkg
+
+ prog := ssautil.CreateProgram(iprog, ssa.InstantiateGenerics)
+ prog.Build()
+
+ cg := static.CallGraph(prog)
+
+ var edges []string
+ callgraph.GraphVisitEdges(cg, func(e *callgraph.Edge) error {
+ edges = append(edges, fmt.Sprintf("%s -> %s",
+ e.Caller.Func.RelString(P),
+ e.Callee.Func.RelString(P)))
+ return nil
+ })
+ sort.Strings(edges)
+
+ if !reflect.DeepEqual(edges, e.want) {
+ t.Errorf("Got edges %v, want %v", edges, e.want)
+ }
}
}
diff --git a/go/callgraph/util.go b/go/callgraph/util.go
index a8f89031c..1ab039029 100644
--- a/go/callgraph/util.go
+++ b/go/callgraph/util.go
@@ -11,7 +11,6 @@ import "golang.org/x/tools/go/ssa"
// CalleesOf returns a new set containing all direct callees of the
// caller node.
-//
func CalleesOf(caller *Node) map[*Node]bool {
callees := make(map[*Node]bool)
for _, e := range caller.Out {
@@ -24,7 +23,6 @@ func CalleesOf(caller *Node) map[*Node]bool {
// The edge function is called for each edge in postorder. If it
// returns non-nil, visitation stops and GraphVisitEdges returns that
// value.
-//
func GraphVisitEdges(g *Graph, edge func(*Edge) error) error {
seen := make(map[*Node]bool)
var visit func(n *Node) error
@@ -54,7 +52,6 @@ func GraphVisitEdges(g *Graph, edge func(*Edge) error) error {
// ending at some node for which isEnd() returns true. On success,
// PathSearch returns the path as an ordered list of edges; on
// failure, it returns nil.
-//
func PathSearch(start *Node, isEnd func(*Node) bool) []*Edge {
stack := make([]*Edge, 0, 32)
seen := make(map[*Node]bool)
@@ -82,7 +79,6 @@ func PathSearch(start *Node, isEnd func(*Node) bool) []*Edge {
// synthetic functions (except g.Root and package initializers),
// preserving the topology. In effect, calls to synthetic wrappers
// are "inlined".
-//
func (g *Graph) DeleteSyntheticNodes() {
// Measurements on the standard library and go.tools show that
// resulting graph has ~15% fewer nodes and 4-8% fewer edges
diff --git a/go/callgraph/vta/graph.go b/go/callgraph/vta/graph.go
index ad7ef0e88..2537123f4 100644
--- a/go/callgraph/vta/graph.go
+++ b/go/callgraph/vta/graph.go
@@ -12,6 +12,7 @@ import (
"golang.org/x/tools/go/callgraph"
"golang.org/x/tools/go/ssa"
"golang.org/x/tools/go/types/typeutil"
+ "golang.org/x/tools/internal/typeparams"
)
// node interface for VTA nodes.
@@ -175,9 +176,10 @@ func (f function) String() string {
// We merge such constructs into a single node for simplicity and without
// much precision sacrifice as such variables are rare in practice. Both
// a and b would be represented as the same PtrInterface(I) node in:
-// type I interface
-// var a ***I
-// var b **I
+//
+// type I interface
+// var a ***I
+// var b **I
type nestedPtrInterface struct {
typ types.Type
}
@@ -195,8 +197,9 @@ func (l nestedPtrInterface) String() string {
// constructs into a single node for simplicity and without much precision
// sacrifice as such variables are rare in practice. Both a and b would be
// represented as the same PtrFunction(func()) node in:
-// var a *func()
-// var b **func()
+//
+// var a *func()
+// var b **func()
type nestedPtrFunction struct {
typ types.Type
}
@@ -325,14 +328,16 @@ func (b *builder) instr(instr ssa.Instruction) {
// change type command a := A(b) results in a and b being the
// same value. For concrete type A, there is no interesting flow.
//
- // Note: When A is an interface, most interface casts are handled
+ // When A is an interface, most interface casts are handled
// by the ChangeInterface instruction. The relevant case here is
// when converting a pointer to an interface type. This can happen
// when the underlying interfaces have the same method set.
- // type I interface{ foo() }
- // type J interface{ foo() }
- // var b *I
- // a := (*J)(b)
+ //
+ // type I interface{ foo() }
+ // type J interface{ foo() }
+ // var b *I
+ // a := (*J)(b)
+ //
// When this happens we add flows between a <--> b.
b.addInFlowAliasEdges(b.nodeFromVal(i), b.nodeFromVal(i.X))
case *ssa.TypeAssert:
@@ -371,6 +376,8 @@ func (b *builder) instr(instr ssa.Instruction) {
// SliceToArrayPointer: t1 = slice to array pointer *[4]T <- []T (t0)
// No interesting flow as sliceArrayElem(t1) == sliceArrayElem(t0).
return
+ case *ssa.MultiConvert:
+ b.multiconvert(i)
default:
panic(fmt.Sprintf("unsupported instruction %v\n", instr))
}
@@ -441,7 +448,9 @@ func (b *builder) send(s *ssa.Send) {
}
// selekt generates flows for select statement
-// a = select blocking/nonblocking [c_1 <- t_1, c_2 <- t_2, ..., <- o_1, <- o_2, ...]
+//
+// a = select blocking/nonblocking [c_1 <- t_1, c_2 <- t_2, ..., <- o_1, <- o_2, ...]
+//
// between receiving channel registers c_i and corresponding input register t_i. Further,
// flows are generated between o_i and a[2 + i]. Note that a is a tuple register of type
// <int, bool, r_1, r_2, ...> where the type of r_i is the element type of channel o_i.
@@ -544,8 +553,9 @@ func (b *builder) closure(c *ssa.MakeClosure) {
// panic creates a flow from arguments to panic instructions to return
// registers of all recover statements in the program. Introduces a
// global panic node Panic and
-// 1) for every panic statement p: add p -> Panic
-// 2) for every recover statement r: add Panic -> r (handled in call)
+// 1. for every panic statement p: add p -> Panic
+// 2. for every recover statement r: add Panic -> r (handled in call)
+//
// TODO(zpavlinovic): improve precision by explicitly modeling how panic
// values flow from callees to callers and into deferred recover instructions.
func (b *builder) panic(p *ssa.Panic) {
@@ -563,7 +573,9 @@ func (b *builder) panic(p *ssa.Panic) {
func (b *builder) call(c ssa.CallInstruction) {
// When c is r := recover() call register instruction, we add Recover -> r.
if bf, ok := c.Common().Value.(*ssa.Builtin); ok && bf.Name() == "recover" {
- b.addInFlowEdge(recoverReturn{}, b.nodeFromVal(c.(*ssa.Call)))
+ if v, ok := c.(ssa.Value); ok {
+ b.addInFlowEdge(recoverReturn{}, b.nodeFromVal(v))
+ }
return
}
@@ -581,10 +593,18 @@ func addArgumentFlows(b *builder, c ssa.CallInstruction, f *ssa.Function) {
return
}
cc := c.Common()
- // When c is an unresolved method call (cc.Method != nil), cc.Value contains
- // the receiver object rather than cc.Args[0].
if cc.Method != nil {
- b.addInFlowAliasEdges(b.nodeFromVal(f.Params[0]), b.nodeFromVal(cc.Value))
+ // In principle we don't add interprocedural flows for receiver
+ // objects. At a call site, the receiver object is interface
+ // while the callee object is concrete. The flow from interface
+ // to concrete type in general does not make sense. The exception
+ // is when the concrete type is a named function type (see #57756).
+ //
+ // The flow other way around would bake in information from the
+ // initial call graph.
+ if isFunction(f.Params[0].Type()) {
+ b.addInFlowEdge(b.nodeFromVal(cc.Value), b.nodeFromVal(f.Params[0]))
+ }
}
offset := 0
@@ -638,6 +658,71 @@ func addReturnFlows(b *builder, r *ssa.Return, site ssa.Value) {
}
}
+func (b *builder) multiconvert(c *ssa.MultiConvert) {
+ // TODO(zpavlinovic): decide what to do on MultiConvert long term.
+ // TODO(zpavlinovic): add unit tests.
+ typeSetOf := func(typ types.Type) []*typeparams.Term {
+ // This is a adaptation of x/exp/typeparams.NormalTerms which x/tools cannot depend on.
+ var terms []*typeparams.Term
+ var err error
+ switch typ := typ.(type) {
+ case *typeparams.TypeParam:
+ terms, err = typeparams.StructuralTerms(typ)
+ case *typeparams.Union:
+ terms, err = typeparams.UnionTermSet(typ)
+ case *types.Interface:
+ terms, err = typeparams.InterfaceTermSet(typ)
+ default:
+ // Common case.
+ // Specializing the len=1 case to avoid a slice
+ // had no measurable space/time benefit.
+ terms = []*typeparams.Term{typeparams.NewTerm(false, typ)}
+ }
+
+ if err != nil {
+ return nil
+ }
+ return terms
+ }
+ // isValuePreserving returns true if a conversion from ut_src to
+ // ut_dst is value-preserving, i.e. just a change of type.
+ // Precondition: neither argument is a named type.
+ isValuePreserving := func(ut_src, ut_dst types.Type) bool {
+ // Identical underlying types?
+ if types.IdenticalIgnoreTags(ut_dst, ut_src) {
+ return true
+ }
+
+ switch ut_dst.(type) {
+ case *types.Chan:
+ // Conversion between channel types?
+ _, ok := ut_src.(*types.Chan)
+ return ok
+
+ case *types.Pointer:
+ // Conversion between pointers with identical base types?
+ _, ok := ut_src.(*types.Pointer)
+ return ok
+ }
+ return false
+ }
+ dst_terms := typeSetOf(c.Type())
+ src_terms := typeSetOf(c.X.Type())
+ for _, s := range src_terms {
+ us := s.Type().Underlying()
+ for _, d := range dst_terms {
+ ud := d.Type().Underlying()
+ if isValuePreserving(us, ud) {
+ // This is equivalent to a ChangeType.
+ b.addInFlowAliasEdges(b.nodeFromVal(c), b.nodeFromVal(c.X))
+ return
+ }
+ // This is equivalent to either: SliceToArrayPointer,,
+ // SliceToArrayPointer+Deref, Size 0 Array constant, or a Convert.
+ }
+ }
+}
+
// addInFlowEdge adds s -> d to g if d is node that can have an inflow, i.e., a node
// that represents an interface or an unresolved function value. Otherwise, there
// is no interesting type flow so the edge is omitted.
@@ -649,7 +734,7 @@ func (b *builder) addInFlowEdge(s, d node) {
// Creates const, pointer, global, func, and local nodes based on register instructions.
func (b *builder) nodeFromVal(val ssa.Value) node {
- if p, ok := val.Type().(*types.Pointer); ok && !isInterface(p.Elem()) && !isFunction(p.Elem()) {
+ if p, ok := val.Type().(*types.Pointer); ok && !types.IsInterface(p.Elem()) && !isFunction(p.Elem()) {
// Nested pointer to interfaces are modeled as a special
// nestedPtrInterface node.
if i := interfaceUnderPtr(p.Elem()); i != nil {
@@ -676,14 +761,15 @@ func (b *builder) nodeFromVal(val ssa.Value) node {
default:
panic(fmt.Errorf("unsupported value %v in node creation", val))
}
- return nil
}
// representative returns a unique representative for node `n`. Since
// semantically equivalent types can have different implementations,
// this method guarantees the same implementation is always used.
func (b *builder) representative(n node) node {
- if !hasInitialTypes(n) {
+ if n.Type() == nil {
+ // panicArg and recoverReturn do not have
+ // types and are unique by definition.
return n
}
t := canonicalize(n.Type(), &b.canon)
diff --git a/go/callgraph/vta/graph_test.go b/go/callgraph/vta/graph_test.go
index 8608844dd..8b8c6976f 100644
--- a/go/callgraph/vta/graph_test.go
+++ b/go/callgraph/vta/graph_test.go
@@ -13,6 +13,7 @@ import (
"testing"
"golang.org/x/tools/go/callgraph/cha"
+ "golang.org/x/tools/go/ssa"
"golang.org/x/tools/go/ssa/ssautil"
)
@@ -24,7 +25,7 @@ func TestNodeInterface(t *testing.T) {
// - global variable "gl"
// - "main" function and its
// - first register instruction t0 := *gl
- prog, _, err := testProg("testdata/src/simple.go")
+ prog, _, err := testProg("testdata/src/simple.go", ssa.BuilderMode(0))
if err != nil {
t.Fatalf("couldn't load testdata/src/simple.go program: %v", err)
}
@@ -78,7 +79,7 @@ func TestNodeInterface(t *testing.T) {
func TestVtaGraph(t *testing.T) {
// Get the basic type int from a real program.
- prog, _, err := testProg("testdata/src/simple.go")
+ prog, _, err := testProg("testdata/src/simple.go", ssa.BuilderMode(0))
if err != nil {
t.Fatalf("couldn't load testdata/src/simple.go program: %v", err)
}
@@ -191,7 +192,7 @@ func TestVTAGraphConstruction(t *testing.T) {
"testdata/src/panic.go",
} {
t.Run(file, func(t *testing.T) {
- prog, want, err := testProg(file)
+ prog, want, err := testProg(file, ssa.BuilderMode(0))
if err != nil {
t.Fatalf("couldn't load test file '%s': %s", file, err)
}
diff --git a/go/callgraph/vta/helpers_test.go b/go/callgraph/vta/helpers_test.go
index 0e00aeb28..768365f5b 100644
--- a/go/callgraph/vta/helpers_test.go
+++ b/go/callgraph/vta/helpers_test.go
@@ -35,7 +35,7 @@ func want(f *ast.File) []string {
// testProg returns an ssa representation of a program at
// `path`, assumed to define package "testdata," and the
// test want result as list of strings.
-func testProg(path string) (*ssa.Program, []string, error) {
+func testProg(path string, mode ssa.BuilderMode) (*ssa.Program, []string, error) {
content, err := ioutil.ReadFile(path)
if err != nil {
return nil, nil, err
@@ -56,7 +56,7 @@ func testProg(path string) (*ssa.Program, []string, error) {
return nil, nil, err
}
- prog := ssautil.CreateProgram(iprog, 0)
+ prog := ssautil.CreateProgram(iprog, mode)
// Set debug mode to exercise DebugRef instructions.
prog.Package(iprog.Created[0].Pkg).SetDebugMode(true)
prog.Build()
@@ -87,7 +87,9 @@ func funcName(f *ssa.Function) string {
// callGraphStr stringifes `g` into a list of strings where
// each entry is of the form
-// f: cs1 -> f1, f2, ...; ...; csw -> fx, fy, ...
+//
+// f: cs1 -> f1, f2, ...; ...; csw -> fx, fy, ...
+//
// f is a function, cs1, ..., csw are call sites in f, and
// f1, f2, ..., fx, fy, ... are the resolved callees.
func callGraphStr(g *callgraph.Graph) []string {
diff --git a/go/callgraph/vta/internal/trie/bits.go b/go/callgraph/vta/internal/trie/bits.go
index f2fd0ba83..c3aa15985 100644
--- a/go/callgraph/vta/internal/trie/bits.go
+++ b/go/callgraph/vta/internal/trie/bits.go
@@ -19,11 +19,11 @@ type key uint64
// bitpos is the position of a bit. A position is represented by having a 1
// bit in that position.
// Examples:
-// * 0b0010 is the position of the `1` bit in 2.
-// It is the 3rd most specific bit position in big endian encoding
-// (0b0 and 0b1 are more specific).
-// * 0b0100 is the position of the bit that 1 and 5 disagree on.
-// * 0b0 is a special value indicating that all bit agree.
+// - 0b0010 is the position of the `1` bit in 2.
+// It is the 3rd most specific bit position in big endian encoding
+// (0b0 and 0b1 are more specific).
+// - 0b0100 is the position of the bit that 1 and 5 disagree on.
+// - 0b0 is a special value indicating that all bit agree.
type bitpos uint64
// prefixes represent a set of keys that all agree with the
@@ -35,7 +35,8 @@ type bitpos uint64
// A prefix always mask(p, m) == p.
//
// A key is its own prefix for the bit position 64,
-// e.g. seeing a `prefix(key)` is not a problem.
+// e.g. seeing a `prefix(key)` is not a problem.
+//
// Prefixes should never be turned into keys.
type prefix uint64
@@ -64,8 +65,9 @@ func matchPrefix(k prefix, p prefix, b bitpos) bool {
// In big endian encoding, this value is the [64-(m-1)] most significant bits of k
// followed by a `0` bit at bitpos m, followed m-1 `1` bits.
// Examples:
-// prefix(0b1011) for a bitpos 0b0100 represents the keys:
-// 0b1000, 0b1001, 0b1010, 0b1011, 0b1100, 0b1101, 0b1110, 0b1111
+//
+// prefix(0b1011) for a bitpos 0b0100 represents the keys:
+// 0b1000, 0b1001, 0b1010, 0b1011, 0b1100, 0b1101, 0b1110, 0b1111
//
// This mask function has the property that if matchPrefix(k, p, b), then
// k <= p if and only if zeroBit(k, m). This induces binary search tree tries.
@@ -85,9 +87,10 @@ func ord(m, n bitpos) bool {
// can hold that can also be held by a prefix `q` for some bitpos `n`.
//
// This is equivalent to:
-// m ==n && p == q,
-// higher(m, n) && matchPrefix(q, p, m), or
-// higher(n, m) && matchPrefix(p, q, n)
+//
+// m ==n && p == q,
+// higher(m, n) && matchPrefix(q, p, m), or
+// higher(n, m) && matchPrefix(p, q, n)
func prefixesOverlap(p prefix, m bitpos, q prefix, n bitpos) bool {
fbb := n
if ord(m, n) {
diff --git a/go/callgraph/vta/internal/trie/builder.go b/go/callgraph/vta/internal/trie/builder.go
index 25d3805bc..11ff59b1b 100644
--- a/go/callgraph/vta/internal/trie/builder.go
+++ b/go/callgraph/vta/internal/trie/builder.go
@@ -9,7 +9,9 @@ package trie
// will be stored for the key.
//
// Collision functions must be idempotent:
-// collision(x, x) == x for all x.
+//
+// collision(x, x) == x for all x.
+//
// Collisions functions may be applied whenever a value is inserted
// or two maps are merged, or intersected.
type Collision func(lhs interface{}, rhs interface{}) interface{}
@@ -72,7 +74,8 @@ func (b *Builder) Empty() Map { return Map{b.Scope(), b.empty} }
// in the current scope and handle collisions using the collision function c.
//
// This is roughly corresponds to updating a map[uint64]interface{} by:
-// if _, ok := m[k]; ok { m[k] = c(m[k], v} else { m[k] = v}
+//
+// if _, ok := m[k]; ok { m[k] = c(m[k], v} else { m[k] = v}
//
// An insertion or update happened whenever Insert(m, ...) != m .
func (b *Builder) InsertWith(c Collision, m Map, k uint64, v interface{}) Map {
@@ -85,7 +88,8 @@ func (b *Builder) InsertWith(c Collision, m Map, k uint64, v interface{}) Map {
//
// If there was a previous value mapped by key, keep the previously mapped value.
// This is roughly corresponds to updating a map[uint64]interface{} by:
-// if _, ok := m[k]; ok { m[k] = val }
+//
+// if _, ok := m[k]; ok { m[k] = val }
//
// This is equivalent to b.Merge(m, b.Create({k: v})).
func (b *Builder) Insert(m Map, k uint64, v interface{}) Map {
@@ -94,7 +98,8 @@ func (b *Builder) Insert(m Map, k uint64, v interface{}) Map {
// Updates a (key, value) in the map. This is roughly corresponds to
// updating a map[uint64]interface{} by:
-// m[key] = val
+//
+// m[key] = val
func (b *Builder) Update(m Map, key uint64, val interface{}) Map {
return b.InsertWith(TakeRhs, m, key, val)
}
@@ -148,14 +153,17 @@ func (b *Builder) Remove(m Map, k uint64) Map {
// Intersect Maps lhs and rhs and returns a map with all of the keys in
// both lhs and rhs and the value comes from lhs, i.e.
-// {(k, lhs[k]) | k in lhs, k in rhs}.
+//
+// {(k, lhs[k]) | k in lhs, k in rhs}.
func (b *Builder) Intersect(lhs, rhs Map) Map {
return b.IntersectWith(TakeLhs, lhs, rhs)
}
// IntersectWith take lhs and rhs and returns the intersection
// with the value coming from the collision function, i.e.
-// {(k, c(lhs[k], rhs[k]) ) | k in lhs, k in rhs}.
+//
+// {(k, c(lhs[k], rhs[k]) ) | k in lhs, k in rhs}.
+//
// The elements of the resulting map are always { <k, c(lhs[k], rhs[k]) > }
// for each key k that a key in both lhs and rhs.
func (b *Builder) IntersectWith(c Collision, lhs, rhs Map) Map {
@@ -261,7 +269,9 @@ func (b *Builder) mkLeaf(k key, v interface{}) *leaf {
}
// mkBranch returns the hash-consed representative of the tuple
-// (prefix, branch, left, right)
+//
+// (prefix, branch, left, right)
+//
// in the current scope.
func (b *Builder) mkBranch(p prefix, bp bitpos, left node, right node) *branch {
br := &branch{
diff --git a/go/callgraph/vta/internal/trie/trie.go b/go/callgraph/vta/internal/trie/trie.go
index 160eb21be..511fde515 100644
--- a/go/callgraph/vta/internal/trie/trie.go
+++ b/go/callgraph/vta/internal/trie/trie.go
@@ -10,8 +10,10 @@
// environment abstract domains in program analysis).
//
// This implementation closely follows the paper:
-// C. Okasaki and A. Gill, “Fast mergeable integer maps,” in ACM SIGPLAN
-// Workshop on ML, September 1998, pp. 77–86.
+//
+// C. Okasaki and A. Gill, “Fast mergeable integer maps,” in ACM SIGPLAN
+// Workshop on ML, September 1998, pp. 77–86.
+//
// Each Map is immutable and can be read from concurrently. The map does not
// guarantee that the value pointed to by the interface{} value is not updated
// concurrently.
@@ -36,9 +38,9 @@ import (
// Maps are immutable and can be read from concurrently.
//
// Notes on concurrency:
-// - A Map value itself is an interface and assignments to a Map value can race.
-// - Map does not guarantee that the value pointed to by the interface{} value
-// is not updated concurrently.
+// - A Map value itself is an interface and assignments to a Map value can race.
+// - Map does not guarantee that the value pointed to by the interface{} value
+// is not updated concurrently.
type Map struct {
s Scope
n node
diff --git a/go/callgraph/vta/propagation.go b/go/callgraph/vta/propagation.go
index 5934ebc21..5817e8938 100644
--- a/go/callgraph/vta/propagation.go
+++ b/go/callgraph/vta/propagation.go
@@ -20,53 +20,52 @@ import (
// with ids X and Y s.t. X < Y, Y comes before X in the topological order.
func scc(g vtaGraph) (map[node]int, int) {
// standard data structures used by Tarjan's algorithm.
- var index uint64
+ type state struct {
+ index int
+ lowLink int
+ onStack bool
+ }
+ states := make(map[node]*state, len(g))
var stack []node
- indexMap := make(map[node]uint64)
- lowLink := make(map[node]uint64)
- onStack := make(map[node]bool)
- nodeToSccID := make(map[node]int)
+ nodeToSccID := make(map[node]int, len(g))
sccID := 0
var doSCC func(node)
doSCC = func(n node) {
- indexMap[n] = index
- lowLink[n] = index
- index = index + 1
- onStack[n] = true
+ index := len(states)
+ ns := &state{index: index, lowLink: index, onStack: true}
+ states[n] = ns
stack = append(stack, n)
for s := range g[n] {
- if _, ok := indexMap[s]; !ok {
+ if ss, visited := states[s]; !visited {
// Analyze successor s that has not been visited yet.
doSCC(s)
- lowLink[n] = min(lowLink[n], lowLink[s])
- } else if onStack[s] {
+ ss = states[s]
+ ns.lowLink = min(ns.lowLink, ss.lowLink)
+ } else if ss.onStack {
// The successor is on the stack, meaning it has to be
// in the current SCC.
- lowLink[n] = min(lowLink[n], indexMap[s])
+ ns.lowLink = min(ns.lowLink, ss.index)
}
}
// if n is a root node, pop the stack and generate a new SCC.
- if lowLink[n] == indexMap[n] {
- for {
- w := stack[len(stack)-1]
+ if ns.lowLink == index {
+ var w node
+ for w != n {
+ w = stack[len(stack)-1]
stack = stack[:len(stack)-1]
- onStack[w] = false
+ states[w].onStack = false
nodeToSccID[w] = sccID
- if w == n {
- break
- }
}
sccID++
}
}
- index = 0
for n := range g {
- if _, ok := indexMap[n]; !ok {
+ if _, visited := states[n]; !visited {
doSCC(n)
}
}
@@ -74,7 +73,7 @@ func scc(g vtaGraph) (map[node]int, int) {
return nodeToSccID, sccID
}
-func min(x, y uint64) uint64 {
+func min(x, y int) int {
if x < y {
return x
}
@@ -175,6 +174,18 @@ func nodeTypes(nodes []node, builder *trie.Builder, propTypeId func(p propType)
return &typeSet
}
+// hasInitialTypes check if a node can have initial types.
+// Returns true iff `n` is not a panic, recover, nestedPtr*
+// node, nor a node whose type is an interface.
+func hasInitialTypes(n node) bool {
+ switch n.(type) {
+ case panicArg, recoverReturn, nestedPtrFunction, nestedPtrInterface:
+ return false
+ default:
+ return !types.IsInterface(n.Type())
+ }
+}
+
// getPropType creates a propType for `node` based on its type.
// propType.typ is always node.Type(). If node is function, then
// propType.val is the underlying function; nil otherwise.
diff --git a/go/callgraph/vta/propagation_test.go b/go/callgraph/vta/propagation_test.go
index 96707417f..f4a754f96 100644
--- a/go/callgraph/vta/propagation_test.go
+++ b/go/callgraph/vta/propagation_test.go
@@ -58,7 +58,7 @@ func newLocal(name string, t types.Type) local {
// newNamedType creates a bogus type named `name`.
func newNamedType(name string) *types.Named {
- return types.NewNamed(types.NewTypeName(token.NoPos, nil, name, nil), nil, nil)
+ return types.NewNamed(types.NewTypeName(token.NoPos, nil, name, nil), types.Universe.Lookup("int").Type(), nil)
}
// sccString is a utility for stringifying `nodeToScc`. Every
@@ -123,7 +123,8 @@ func sccEqual(sccs1 []string, sccs2 []string) bool {
// isRevTopSorted checks if sccs of `g` are sorted in reverse
// topological order:
-// for every edge x -> y in g, nodeToScc[x] > nodeToScc[y]
+//
+// for every edge x -> y in g, nodeToScc[x] > nodeToScc[y]
func isRevTopSorted(g vtaGraph, nodeToScc map[node]int) bool {
for n, succs := range g {
for s := range succs {
@@ -148,39 +149,39 @@ func setName(f *ssa.Function, name string) {
// parentheses contain node types and F nodes stand for function
// nodes whose content is function named F:
//
-// no-cycles:
-// t0 (A) -> t1 (B) -> t2 (C)
+// no-cycles:
+// t0 (A) -> t1 (B) -> t2 (C)
//
-// trivial-cycle:
-// <-------- <--------
-// | | | |
-// t0 (A) -> t1 (B) ->
+// trivial-cycle:
+// <-------- <--------
+// | | | |
+// t0 (A) -> t1 (B) ->
//
-// circle-cycle:
-// t0 (A) -> t1 (A) -> t2 (B)
-// | |
-// <--------------------
+// circle-cycle:
+// t0 (A) -> t1 (A) -> t2 (B)
+// | |
+// <--------------------
//
-// fully-connected:
-// t0 (A) <-> t1 (B)
-// \ /
-// t2(C)
+// fully-connected:
+// t0 (A) <-> t1 (B)
+// \ /
+// t2(C)
//
-// subsumed-scc:
-// t0 (A) -> t1 (B) -> t2(B) -> t3 (A)
-// | | | |
-// | <--------- |
-// <-----------------------------
+// subsumed-scc:
+// t0 (A) -> t1 (B) -> t2(B) -> t3 (A)
+// | | | |
+// | <--------- |
+// <-----------------------------
//
-// more-realistic:
-// <--------
-// | |
-// t0 (A) -->
-// ---------->
-// | |
-// t1 (A) -> t2 (B) -> F1 -> F2 -> F3 -> F4
-// | | | |
-// <------- <------------
+// more-realistic:
+// <--------
+// | |
+// t0 (A) -->
+// ---------->
+// | |
+// t1 (A) -> t2 (B) -> F1 -> F2 -> F3 -> F4
+// | | | |
+// <------- <------------
func testSuite() map[string]vtaGraph {
a := newNamedType("A")
b := newNamedType("B")
diff --git a/go/callgraph/vta/testdata/src/callgraph_generics.go b/go/callgraph/vta/testdata/src/callgraph_generics.go
new file mode 100644
index 000000000..da3dca52a
--- /dev/null
+++ b/go/callgraph/vta/testdata/src/callgraph_generics.go
@@ -0,0 +1,71 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// go:build ignore
+
+package testdata
+
+func instantiated[X any](x *X) int {
+ print(x)
+ return 0
+}
+
+type I interface {
+ Bar()
+}
+
+func interfaceInstantiated[X I](x X) {
+ x.Bar()
+}
+
+type A struct{}
+
+func (a A) Bar() {}
+
+type B struct{}
+
+func (b B) Bar() {}
+
+func Foo(a A, b B) {
+ x := true
+ instantiated[bool](&x)
+ y := 1
+ instantiated[int](&y)
+
+ interfaceInstantiated[A](a)
+ interfaceInstantiated[B](b)
+}
+
+// Relevant SSA:
+//func Foo(a A, b B):
+// t0 = local A (a)
+// *t0 = a
+// t1 = local B (b)
+// *t1 = b
+// t2 = new bool (x)
+// *t2 = true:bool
+// t3 = instantiated[bool](t2)
+// t4 = new int (y)
+// *t4 = 1:int
+// t5 = instantiated[int](t4)
+// t6 = *t0
+// t7 = interfaceInstantiated[testdata.A](t6)
+// t8 = *t1
+// t9 = interfaceInstantiated[testdata.B](t8)
+// return
+//
+//func interfaceInstantiated[testdata.B](x B):
+// t0 = local B (x)
+// *t0 = x
+// t1 = *t0
+// t2 = (B).Bar(t1)
+// return
+//
+//func interfaceInstantiated[X I](x X):
+// (external)
+
+// WANT:
+// Foo: instantiated[bool](t2) -> instantiated[bool]; instantiated[int](t4) -> instantiated[int]; interfaceInstantiated[testdata.A](t6) -> interfaceInstantiated[testdata.A]; interfaceInstantiated[testdata.B](t8) -> interfaceInstantiated[testdata.B]
+// interfaceInstantiated[testdata.B]: (B).Bar(t1) -> B.Bar
+// interfaceInstantiated[testdata.A]: (A).Bar(t1) -> A.Bar
diff --git a/go/callgraph/vta/testdata/src/callgraph_issue_57756.go b/go/callgraph/vta/testdata/src/callgraph_issue_57756.go
new file mode 100644
index 000000000..e18f16eba
--- /dev/null
+++ b/go/callgraph/vta/testdata/src/callgraph_issue_57756.go
@@ -0,0 +1,67 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// go:build ignore
+
+package testdata
+
+// Test that the values of a named function type are correctly
+// flowing from interface objects i in i.Foo() to the receiver
+// parameters of callees of i.Foo().
+
+type H func()
+
+func (h H) Do() {
+ h()
+}
+
+type I interface {
+ Do()
+}
+
+func Bar() I {
+ return H(func() {})
+}
+
+func For(g G) {
+ b := Bar()
+ b.Do()
+
+ g[0] = b
+ g.Goo()
+}
+
+type G []I
+
+func (g G) Goo() {
+ g[0].Do()
+}
+
+// Relevant SSA:
+// func Bar$1():
+// return
+//
+// func Bar() I:
+// t0 = changetype H <- func() (Bar$1)
+// t1 = make I <- H (t0)
+//
+// func For():
+// t0 = Bar()
+// t1 = invoke t0.Do()
+// t2 = &g[0:int]
+// *t2 = t0
+// t3 = (G).Goo(g)
+//
+// func (h H) Do():
+// t0 = h()
+//
+// func (g G) Goo():
+// t0 = &g[0:int]
+// t1 = *t0
+// t2 = invoke t1.Do()
+
+// WANT:
+// For: (G).Goo(g) -> G.Goo; Bar() -> Bar; invoke t0.Do() -> H.Do
+// H.Do: h() -> Bar$1
+// G.Goo: invoke t1.Do() -> H.Do
diff --git a/go/callgraph/vta/testdata/src/callgraph_recursive_types.go b/go/callgraph/vta/testdata/src/callgraph_recursive_types.go
new file mode 100644
index 000000000..6c3fef6f7
--- /dev/null
+++ b/go/callgraph/vta/testdata/src/callgraph_recursive_types.go
@@ -0,0 +1,56 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// go:build ignore
+
+package testdata
+
+type I interface {
+ Foo() I
+}
+
+type A struct {
+ i int
+ a *A
+}
+
+func (a *A) Foo() I {
+ return a
+}
+
+type B **B
+
+type C *D
+type D *C
+
+func Bar(a *A, b *B, c *C, d *D) {
+ Baz(a)
+ Baz(a.a)
+
+ sink(*b)
+ sink(*c)
+ sink(*d)
+}
+
+func Baz(i I) {
+ i.Foo()
+}
+
+func sink(i interface{}) {
+ print(i)
+}
+
+// Relevant SSA:
+// func Baz(i I):
+// t0 = invoke i.Foo()
+// return
+//
+// func Bar(a *A, b *B):
+// t0 = make I <- *A (a)
+// t1 = Baz(t0)
+// ...
+
+// WANT:
+// Bar: Baz(t0) -> Baz; Baz(t4) -> Baz; sink(t10) -> sink; sink(t13) -> sink; sink(t7) -> sink
+// Baz: invoke i.Foo() -> A.Foo
diff --git a/go/callgraph/vta/testdata/src/function_alias.go b/go/callgraph/vta/testdata/src/function_alias.go
index b38e0e00d..0a8dffe79 100644
--- a/go/callgraph/vta/testdata/src/function_alias.go
+++ b/go/callgraph/vta/testdata/src/function_alias.go
@@ -33,42 +33,42 @@ func Baz(f func()) {
// t2 = *t1
// *t2 = Baz$1
// t3 = local A (a)
-// t4 = &t3.foo [#0]
-// t5 = *t1
-// t6 = *t5
-// *t4 = t6
+// t4 = *t1
+// t5 = *t4
+// t6 = &t3.foo [#0]
+// *t6 = t5
// t7 = &t3.foo [#0]
// t8 = *t7
// t9 = t8()
-// t10 = &t3.do [#1] *Doer
-// t11 = &t3.foo [#0] *func()
-// t12 = *t11 func()
-// t13 = changetype Doer <- func() (t12) Doer
-// *t10 = t13
+// t10 = &t3.foo [#0] *func()
+// t11 = *t10 func()
+// t12 = &t3.do [#1] *Doer
+// t13 = changetype Doer <- func() (t11) Doer
+// *t12 = t13
// t14 = &t3.do [#1] *Doer
// t15 = *t14 Doer
// t16 = t15() ()
// Flow chain showing that Baz$1 reaches t8():
-// Baz$1 -> t2 <-> PtrFunction(func()) <-> t5 -> t6 -> t4 <-> Field(testdata.A:foo) <-> t7 -> t8
+// Baz$1 -> t2 <-> PtrFunction(func()) <-> t4 -> t5 -> t6 <-> Field(testdata.A:foo) <-> t7 -> t8
// Flow chain showing that Baz$1 reaches t15():
-// Field(testdata.A:foo) <-> t11 -> t12 -> t13 -> t10 <-> Field(testdata.A:do) <-> t14 -> t15
+// Field(testdata.A:foo) <-> t10 -> t11 -> t13 -> t12 <-> Field(testdata.A:do) <-> t14 -> t15
// WANT:
// Local(f) -> Local(t0)
// Local(t0) -> PtrFunction(func())
// Function(Baz$1) -> Local(t2)
-// PtrFunction(func()) -> Local(t0), Local(t2), Local(t5)
+// PtrFunction(func()) -> Local(t0), Local(t2), Local(t4)
// Local(t2) -> PtrFunction(func())
-// Local(t4) -> Field(testdata.A:foo)
-// Local(t5) -> Local(t6), PtrFunction(func())
-// Local(t6) -> Local(t4)
+// Local(t6) -> Field(testdata.A:foo)
+// Local(t4) -> Local(t5), PtrFunction(func())
+// Local(t5) -> Local(t6)
// Local(t7) -> Field(testdata.A:foo), Local(t8)
-// Field(testdata.A:foo) -> Local(t11), Local(t4), Local(t7)
-// Local(t4) -> Field(testdata.A:foo)
-// Field(testdata.A:do) -> Local(t10), Local(t14)
-// Local(t10) -> Field(testdata.A:do)
-// Local(t11) -> Field(testdata.A:foo), Local(t12)
-// Local(t12) -> Local(t13)
-// Local(t13) -> Local(t10)
+// Field(testdata.A:foo) -> Local(t10), Local(t6), Local(t7)
+// Local(t6) -> Field(testdata.A:foo)
+// Field(testdata.A:do) -> Local(t12), Local(t14)
+// Local(t12) -> Field(testdata.A:do)
+// Local(t10) -> Field(testdata.A:foo), Local(t11)
+// Local(t11) -> Local(t13)
+// Local(t13) -> Local(t12)
// Local(t14) -> Field(testdata.A:do), Local(t15)
diff --git a/go/callgraph/vta/testdata/src/panic.go b/go/callgraph/vta/testdata/src/panic.go
index 2d39c70ea..5ef354857 100644
--- a/go/callgraph/vta/testdata/src/panic.go
+++ b/go/callgraph/vta/testdata/src/panic.go
@@ -27,12 +27,12 @@ func recover2() {
func Baz(a A) {
defer recover1()
+ defer recover()
panic(a)
}
// Relevant SSA:
// func recover1():
-// 0:
// t0 = print("only this recover...":string)
// t1 = recover()
// t2 = typeassert,ok t1.(I)
@@ -53,6 +53,7 @@ func Baz(a A) {
// t0 = local A (a)
// *t0 = a
// defer recover1()
+// defer recover()
// t1 = *t0
// t2 = make interface{} <- A (t1)
// panic t2
diff --git a/go/callgraph/vta/utils.go b/go/callgraph/vta/utils.go
index e7a97e2d8..d1831983a 100644
--- a/go/callgraph/vta/utils.go
+++ b/go/callgraph/vta/utils.go
@@ -9,6 +9,7 @@ import (
"golang.org/x/tools/go/callgraph"
"golang.org/x/tools/go/ssa"
+ "golang.org/x/tools/internal/typeparams"
)
func canAlias(n1, n2 node) bool {
@@ -32,13 +33,13 @@ func isReferenceNode(n node) bool {
// hasInFlow checks if a concrete type can flow to node `n`.
// Returns yes iff the type of `n` satisfies one the following:
-// 1) is an interface
-// 2) is a (nested) pointer to interface (needed for, say,
+// 1. is an interface
+// 2. is a (nested) pointer to interface (needed for, say,
// slice elements of nested pointers to interface type)
-// 3) is a function type (needed for higher-order type flow)
-// 4) is a (nested) pointer to function (needed for, say,
+// 3. is a function type (needed for higher-order type flow)
+// 4. is a (nested) pointer to function (needed for, say,
// slice elements of nested pointers to function type)
-// 5) is a global Recover or Panic node
+// 5. is a global Recover or Panic node
func hasInFlow(n node) bool {
if _, ok := n.(panicArg); ok {
return true
@@ -56,24 +57,7 @@ func hasInFlow(n node) bool {
return true
}
- return isInterface(t) || isFunction(t)
-}
-
-// hasInitialTypes check if a node can have initial types.
-// Returns true iff `n` is not a panic or recover node as
-// those are artificial.
-func hasInitialTypes(n node) bool {
- switch n.(type) {
- case panicArg, recoverReturn:
- return false
- default:
- return true
- }
-}
-
-func isInterface(t types.Type) bool {
- _, ok := t.Underlying().(*types.Interface)
- return ok
+ return types.IsInterface(t) || isFunction(t)
}
func isFunction(t types.Type) bool {
@@ -85,48 +69,76 @@ func isFunction(t types.Type) bool {
// pointer to interface and if yes, returns the interface type.
// Otherwise, returns nil.
func interfaceUnderPtr(t types.Type) types.Type {
- p, ok := t.Underlying().(*types.Pointer)
- if !ok {
- return nil
- }
+ seen := make(map[types.Type]bool)
+ var visit func(types.Type) types.Type
+ visit = func(t types.Type) types.Type {
+ if seen[t] {
+ return nil
+ }
+ seen[t] = true
- if isInterface(p.Elem()) {
- return p.Elem()
- }
+ p, ok := t.Underlying().(*types.Pointer)
+ if !ok {
+ return nil
+ }
+
+ if types.IsInterface(p.Elem()) {
+ return p.Elem()
+ }
- return interfaceUnderPtr(p.Elem())
+ return visit(p.Elem())
+ }
+ return visit(t)
}
// functionUnderPtr checks if type `t` is a potentially nested
// pointer to function type and if yes, returns the function type.
// Otherwise, returns nil.
func functionUnderPtr(t types.Type) types.Type {
- p, ok := t.Underlying().(*types.Pointer)
- if !ok {
- return nil
- }
+ seen := make(map[types.Type]bool)
+ var visit func(types.Type) types.Type
+ visit = func(t types.Type) types.Type {
+ if seen[t] {
+ return nil
+ }
+ seen[t] = true
- if isFunction(p.Elem()) {
- return p.Elem()
- }
+ p, ok := t.Underlying().(*types.Pointer)
+ if !ok {
+ return nil
+ }
+
+ if isFunction(p.Elem()) {
+ return p.Elem()
+ }
- return functionUnderPtr(p.Elem())
+ return visit(p.Elem())
+ }
+ return visit(t)
}
// sliceArrayElem returns the element type of type `t` that is
-// expected to be a (pointer to) array or slice, consistent with
+// expected to be a (pointer to) array, slice or string, consistent with
// the ssa.Index and ssa.IndexAddr instructions. Panics otherwise.
func sliceArrayElem(t types.Type) types.Type {
- u := t.Underlying()
-
- if p, ok := u.(*types.Pointer); ok {
- u = p.Elem().Underlying()
- }
-
- if a, ok := u.(*types.Array); ok {
- return a.Elem()
+ switch u := t.Underlying().(type) {
+ case *types.Pointer:
+ return u.Elem().Underlying().(*types.Array).Elem()
+ case *types.Array:
+ return u.Elem()
+ case *types.Slice:
+ return u.Elem()
+ case *types.Basic:
+ return types.Typ[types.Byte]
+ case *types.Interface: // type param.
+ terms, err := typeparams.InterfaceTermSet(u)
+ if err != nil || len(terms) == 0 {
+ panic(t)
+ }
+ return sliceArrayElem(terms[0].Type()) // Element types must match.
+ default:
+ panic(t)
}
- return u.(*types.Slice).Elem()
}
// siteCallees computes a set of callees for call site `c` given program `callgraph`.
diff --git a/go/callgraph/vta/vta.go b/go/callgraph/vta/vta.go
index 98fabe58c..583936003 100644
--- a/go/callgraph/vta/vta.go
+++ b/go/callgraph/vta/vta.go
@@ -3,7 +3,7 @@
// license that can be found in the LICENSE file.
// Package vta computes the call graph of a Go program using the Variable
-// Type Analysis (VTA) algorithm originally described in ``Practical Virtual
+// Type Analysis (VTA) algorithm originally described in “Practical Virtual
// Method Call Resolution for Java," Vijay Sundaresan, Laurie Hendren,
// Chrislain Razafimahefa, Raja Vallée-Rai, Patrick Lam, Etienne Gagnon, and
// Charles Godin.
@@ -18,22 +18,23 @@
//
// A type propagation is a directed, labeled graph. A node can represent
// one of the following:
-// - A field of a struct type.
-// - A local (SSA) variable of a method/function.
-// - All pointers to a non-interface type.
-// - The return value of a method.
-// - All elements in an array.
-// - All elements in a slice.
-// - All elements in a map.
-// - All elements in a channel.
-// - A global variable.
+// - A field of a struct type.
+// - A local (SSA) variable of a method/function.
+// - All pointers to a non-interface type.
+// - The return value of a method.
+// - All elements in an array.
+// - All elements in a slice.
+// - All elements in a map.
+// - All elements in a channel.
+// - A global variable.
+//
// In addition, the implementation used in this package introduces
// a few Go specific kinds of nodes:
-// - (De)references of nested pointers to interfaces are modeled
-// as a unique nestedPtrInterface node in the type propagation graph.
-// - Each function literal is represented as a function node whose
-// internal value is the (SSA) representation of the function. This
-// is done to precisely infer flow of higher-order functions.
+// - (De)references of nested pointers to interfaces are modeled
+// as a unique nestedPtrInterface node in the type propagation graph.
+// - Each function literal is represented as a function node whose
+// internal value is the (SSA) representation of the function. This
+// is done to precisely infer flow of higher-order functions.
//
// Edges in the graph represent flow of types (and function literals) through
// the program. That is, the model 1) typing constraints that are induced by
@@ -53,6 +54,8 @@
// reaching the node representing the call site to create a set of callees.
package vta
+// TODO(zpavlinovic): update VTA for how it handles generic function bodies and instantiation wrappers.
+
import (
"go/types"
diff --git a/go/callgraph/vta/vta_go117_test.go b/go/callgraph/vta/vta_go117_test.go
index 9ce6a8864..04f6980e5 100644
--- a/go/callgraph/vta/vta_go117_test.go
+++ b/go/callgraph/vta/vta_go117_test.go
@@ -11,12 +11,13 @@ import (
"testing"
"golang.org/x/tools/go/callgraph/cha"
+ "golang.org/x/tools/go/ssa"
"golang.org/x/tools/go/ssa/ssautil"
)
func TestVTACallGraphGo117(t *testing.T) {
file := "testdata/src/go117.go"
- prog, want, err := testProg(file)
+ prog, want, err := testProg(file, ssa.BuilderMode(0))
if err != nil {
t.Fatalf("couldn't load test file '%s': %s", file, err)
}
diff --git a/go/callgraph/vta/vta_test.go b/go/callgraph/vta/vta_test.go
index 33ceaf909..549c4af45 100644
--- a/go/callgraph/vta/vta_test.go
+++ b/go/callgraph/vta/vta_test.go
@@ -13,6 +13,7 @@ import (
"golang.org/x/tools/go/callgraph/cha"
"golang.org/x/tools/go/ssa"
"golang.org/x/tools/go/ssa/ssautil"
+ "golang.org/x/tools/internal/typeparams"
)
func TestVTACallGraph(t *testing.T) {
@@ -24,9 +25,11 @@ func TestVTACallGraph(t *testing.T) {
"testdata/src/callgraph_collections.go",
"testdata/src/callgraph_fields.go",
"testdata/src/callgraph_field_funcs.go",
+ "testdata/src/callgraph_recursive_types.go",
+ "testdata/src/callgraph_issue_57756.go",
} {
t.Run(file, func(t *testing.T) {
- prog, want, err := testProg(file)
+ prog, want, err := testProg(file, ssa.BuilderMode(0))
if err != nil {
t.Fatalf("couldn't load test file '%s': %s", file, err)
}
@@ -46,7 +49,7 @@ func TestVTACallGraph(t *testing.T) {
// enabled by having an arbitrary function set as input to CallGraph
// instead of the whole program (i.e., ssautil.AllFunctions(prog)).
func TestVTAProgVsFuncSet(t *testing.T) {
- prog, want, err := testProg("testdata/src/callgraph_nested_ptr.go")
+ prog, want, err := testProg("testdata/src/callgraph_nested_ptr.go", ssa.BuilderMode(0))
if err != nil {
t.Fatalf("couldn't load test `testdata/src/callgraph_nested_ptr.go`: %s", err)
}
@@ -111,3 +114,24 @@ func TestVTAPanicMissingDefinitions(t *testing.T) {
}
}
}
+
+func TestVTACallGraphGenerics(t *testing.T) {
+ if !typeparams.Enabled {
+ t.Skip("TestVTACallGraphGenerics requires type parameters")
+ }
+
+ // TODO(zpavlinovic): add more tests
+ file := "testdata/src/callgraph_generics.go"
+ prog, want, err := testProg(file, ssa.InstantiateGenerics)
+ if err != nil {
+ t.Fatalf("couldn't load test file '%s': %s", file, err)
+ }
+ if len(want) == 0 {
+ t.Fatalf("couldn't find want in `%s`", file)
+ }
+
+ g := CallGraph(ssautil.AllFunctions(prog), cha.CallGraph(prog))
+ if got := callGraphStr(g); !subGraph(want, got) {
+ t.Errorf("computed callgraph %v should contain %v", got, want)
+ }
+}
diff --git a/go/cfg/builder.go b/go/cfg/builder.go
index 7f95a2961..dad6a444d 100644
--- a/go/cfg/builder.go
+++ b/go/cfg/builder.go
@@ -443,7 +443,6 @@ func (b *builder) rangeStmt(s *ast.RangeStmt, label *lblock) {
// Destinations associated with unlabeled for/switch/select stmts.
// We push/pop one of these as we enter/leave each construct and for
// each BranchStmt we scan for the innermost target of the right type.
-//
type targets struct {
tail *targets // rest of stack
_break *Block
@@ -454,7 +453,6 @@ type targets struct {
// Destinations associated with a labeled block.
// We populate these as labels are encountered in forward gotos or
// labeled statements.
-//
type lblock struct {
_goto *Block
_break *Block
@@ -463,7 +461,6 @@ type lblock struct {
// labeledBlock returns the branch target associated with the
// specified label, creating it if needed.
-//
func (b *builder) labeledBlock(label *ast.Ident) *lblock {
lb := b.lblocks[label.Obj]
if lb == nil {
diff --git a/go/cfg/cfg.go b/go/cfg/cfg.go
index 3ebc65f60..37d799f4b 100644
--- a/go/cfg/cfg.go
+++ b/go/cfg/cfg.go
@@ -20,14 +20,14 @@
//
// produces this CFG:
//
-// 1: x := f()
-// x != nil
-// succs: 2, 3
-// 2: T()
-// succs: 4
-// 3: F()
-// succs: 4
-// 4:
+// 1: x := f()
+// x != nil
+// succs: 2, 3
+// 2: T()
+// succs: 4
+// 3: F()
+// succs: 4
+// 4:
//
// The CFG does contain Return statements; even implicit returns are
// materialized (at the position of the function's closing brace).
@@ -36,7 +36,6 @@
// edges, nor the short-circuit semantics of the && and || operators,
// nor abnormal control flow caused by panic. If you need this
// information, use golang.org/x/tools/go/ssa instead.
-//
package cfg
import (
diff --git a/go/expect/expect.go b/go/expect/expect.go
index bb203f58c..f5172ceab 100644
--- a/go/expect/expect.go
+++ b/go/expect/expect.go
@@ -16,20 +16,19 @@ The interpretation of the notes depends on the application.
For example, the test suite for a static checking tool might
use a @diag note to indicate an expected diagnostic:
- fmt.Printf("%s", 1) //@ diag("%s wants a string, got int")
+ fmt.Printf("%s", 1) //@ diag("%s wants a string, got int")
By contrast, the test suite for a source code navigation tool
might use notes to indicate the positions of features of
interest, the actions to be performed by the test,
and their expected outcomes:
- var x = 1 //@ x_decl
- ...
- print(x) //@ definition("x", x_decl)
- print(x) //@ typeof("x", "int")
+ var x = 1 //@ x_decl
+ ...
+ print(x) //@ definition("x", x_decl)
+ print(x) //@ typeof("x", "int")
-
-Note comment syntax
+# Note comment syntax
Note comments always start with the special marker @, which must be the
very first character after the comment opening pair, so //@ or /*@ with no
diff --git a/go/expect/expect_test.go b/go/expect/expect_test.go
index bd25ef831..e9ae40f7e 100644
--- a/go/expect/expect_test.go
+++ b/go/expect/expect_test.go
@@ -43,7 +43,7 @@ func TestMarker(t *testing.T) {
},
},
{
- filename: "testdata/go.mod",
+ filename: "testdata/go.fake.mod",
expectNotes: 2,
expectMarkers: map[string]string{
"αMarker": "αfake1α",
diff --git a/go/expect/testdata/go.fake.mod b/go/expect/testdata/go.fake.mod
new file mode 100644
index 000000000..ca84fcee9
--- /dev/null
+++ b/go/expect/testdata/go.fake.mod
@@ -0,0 +1,9 @@
+// This file is named go.fake.mod so it does not define a real module, which
+// would make the contents of this directory unavailable to the test when run
+// from outside the repository.
+
+module αfake1α //@mark(αMarker, "αfake1α")
+
+go 1.14
+
+require golang.org/modfile v0.0.0 //@mark(βMarker, "require golang.org/modfile v0.0.0")
diff --git a/go/expect/testdata/go.mod b/go/expect/testdata/go.mod
deleted file mode 100644
index d0323eae6..000000000
--- a/go/expect/testdata/go.mod
+++ /dev/null
@@ -1,5 +0,0 @@
-module αfake1α //@mark(αMarker, "αfake1α")
-
-go 1.14
-
-require golang.org/modfile v0.0.0 //@mark(βMarker, "require golang.org/modfile v0.0.0")
diff --git a/go/gccgoexportdata/gccgoexportdata_test.go b/go/gccgoexportdata/gccgoexportdata_test.go
index 0d0410249..39f0981c4 100644
--- a/go/gccgoexportdata/gccgoexportdata_test.go
+++ b/go/gccgoexportdata/gccgoexportdata_test.go
@@ -18,12 +18,12 @@ import (
//
// The testdata/{short,long}.a ELF archive files were produced by:
//
-// $ echo 'package foo; func F()' > foo.go
-// $ gccgo -c -fgo-pkgpath blah foo.go
-// $ objcopy -j .go_export foo.o foo.gox
-// $ ar q short.a foo.gox
-// $ objcopy -j .go_export foo.o name-longer-than-16-bytes.gox
-// $ ar q long.a name-longer-than-16-bytes.gox
+// $ echo 'package foo; func F()' > foo.go
+// $ gccgo -c -fgo-pkgpath blah foo.go
+// $ objcopy -j .go_export foo.o foo.gox
+// $ ar q short.a foo.gox
+// $ objcopy -j .go_export foo.o name-longer-than-16-bytes.gox
+// $ ar q long.a name-longer-than-16-bytes.gox
//
// The file long.a contains an archive string table.
//
diff --git a/go/gcexportdata/example_test.go b/go/gcexportdata/example_test.go
index 7df05abae..7371d31d4 100644
--- a/go/gcexportdata/example_test.go
+++ b/go/gcexportdata/example_test.go
@@ -2,8 +2,8 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-//go:build go1.7 && gc
-// +build go1.7,gc
+//go:build go1.7 && gc && !android && !ios && !js
+// +build go1.7,gc,!android,!ios,!js
package gcexportdata_test
@@ -30,7 +30,6 @@ func ExampleRead() {
log.Fatalf("can't find export data for fmt")
}
fmt.Printf("Package path: %s\n", path)
- fmt.Printf("Export data: %s\n", filepath.Base(filename))
// Open and read the file.
f, err := os.Open(filename)
@@ -51,25 +50,36 @@ func ExampleRead() {
log.Fatal(err)
}
- // Print package information.
+ // We can see all the names in Names.
members := pkg.Scope().Names()
- if members[0] == ".inittask" {
- // An improvement to init handling in 1.13 added ".inittask". Remove so go >= 1.13 and go < 1.13 both pass.
- members = members[1:]
+ foundPrintln := false
+ for _, member := range members {
+ if member == "Println" {
+ foundPrintln = true
+ break
+ }
}
- fmt.Printf("Package members: %s...\n", members[:5])
+ fmt.Print("Package members: ")
+ if foundPrintln {
+ fmt.Println("Println found")
+ } else {
+ fmt.Println("Println not found")
+ }
+
+ // We can also look up a name directly using Lookup.
println := pkg.Scope().Lookup("Println")
- posn := fset.Position(println.Pos())
- posn.Line = 123 // make example deterministic
- typ := strings.ReplaceAll(println.Type().String(), "interface{}", "any") // go 1.18+ uses the 'any' alias
+ // go 1.18+ uses the 'any' alias
+ typ := strings.ReplaceAll(println.Type().String(), "interface{}", "any")
fmt.Printf("Println type: %s\n", typ)
+ posn := fset.Position(println.Pos())
+ // make example deterministic
+ posn.Line = 123
fmt.Printf("Println location: %s\n", slashify(posn))
// Output:
//
// Package path: fmt
- // Export data: fmt.a
- // Package members: [Errorf Formatter Fprint Fprintf Fprintln]...
+ // Package members: Println found
// Println type: func(a ...any) (n int, err error)
// Println location: $GOROOT/src/fmt/print.go:123:1
}
diff --git a/go/gcexportdata/gcexportdata.go b/go/gcexportdata/gcexportdata.go
index cec819d64..165ede0f8 100644
--- a/go/gcexportdata/gcexportdata.go
+++ b/go/gcexportdata/gcexportdata.go
@@ -17,32 +17,46 @@
// developer tools, which will then be able to consume both Go 1.7 and
// Go 1.8 export data files, so they will work before and after the
// Go update. (See discussion at https://golang.org/issue/15651.)
-//
package gcexportdata // import "golang.org/x/tools/go/gcexportdata"
import (
"bufio"
"bytes"
+ "encoding/json"
"fmt"
"go/token"
"go/types"
"io"
- "io/ioutil"
+ "os/exec"
- "golang.org/x/tools/go/internal/gcimporter"
+ "golang.org/x/tools/internal/gcimporter"
)
// Find returns the name of an object (.o) or archive (.a) file
// containing type information for the specified import path,
-// using the workspace layout conventions of go/build.
+// using the go command.
// If no file was found, an empty filename is returned.
//
// A relative srcDir is interpreted relative to the current working directory.
//
// Find also returns the package's resolved (canonical) import path,
// reflecting the effects of srcDir and vendoring on importPath.
+//
+// Deprecated: Use the higher-level API in golang.org/x/tools/go/packages,
+// which is more efficient.
func Find(importPath, srcDir string) (filename, path string) {
- return gcimporter.FindPkg(importPath, srcDir)
+ cmd := exec.Command("go", "list", "-json", "-export", "--", importPath)
+ cmd.Dir = srcDir
+ out, err := cmd.CombinedOutput()
+ if err != nil {
+ return "", ""
+ }
+ var data struct {
+ ImportPath string
+ Export string
+ }
+ json.Unmarshal(out, &data)
+ return data.Export, data.ImportPath
}
// NewReader returns a reader for the export data section of an object
@@ -70,9 +84,26 @@ func NewReader(r io.Reader) (io.Reader, error) {
}
}
+// readAll works the same way as io.ReadAll, but avoids allocations and copies
+// by preallocating a byte slice of the necessary size if the size is known up
+// front. This is always possible when the input is an archive. In that case,
+// NewReader will return the known size using an io.LimitedReader.
+func readAll(r io.Reader) ([]byte, error) {
+ if lr, ok := r.(*io.LimitedReader); ok {
+ data := make([]byte, lr.N)
+ _, err := io.ReadFull(lr, data)
+ return data, err
+ }
+ return io.ReadAll(r)
+}
+
// Read reads export data from in, decodes it, and returns type
// information for the package.
-// The package name is specified by path.
+//
+// The package path (effectively its linker symbol prefix) is
+// specified by path, since unlike the package name, this information
+// may not be recorded in the export data.
+//
// File position information is added to fset.
//
// Read may inspect and add to the imports map to ensure that references
@@ -83,7 +114,7 @@ func NewReader(r io.Reader) (io.Reader, error) {
//
// On return, the state of the reader is undefined.
func Read(in io.Reader, fset *token.FileSet, imports map[string]*types.Package, path string) (*types.Package, error) {
- data, err := ioutil.ReadAll(in)
+ data, err := readAll(in)
if err != nil {
return nil, fmt.Errorf("reading export data for %q: %v", path, err)
}
@@ -92,22 +123,32 @@ func Read(in io.Reader, fset *token.FileSet, imports map[string]*types.Package,
return nil, fmt.Errorf("can't read export data for %q directly from an archive file (call gcexportdata.NewReader first to extract export data)", path)
}
- // The App Engine Go runtime v1.6 uses the old export data format.
- // TODO(adonovan): delete once v1.7 has been around for a while.
- if bytes.HasPrefix(data, []byte("package ")) {
- return gcimporter.ImportData(imports, path, path, bytes.NewReader(data))
- }
-
// The indexed export format starts with an 'i'; the older
// binary export format starts with a 'c', 'd', or 'v'
// (from "version"). Select appropriate importer.
- if len(data) > 0 && data[0] == 'i' {
- _, pkg, err := gcimporter.IImportData(fset, imports, data[1:], path)
- return pkg, err
- }
+ if len(data) > 0 {
+ switch data[0] {
+ case 'i':
+ _, pkg, err := gcimporter.IImportData(fset, imports, data[1:], path)
+ return pkg, err
+
+ case 'v', 'c', 'd':
+ _, pkg, err := gcimporter.BImportData(fset, imports, data, path)
+ return pkg, err
- _, pkg, err := gcimporter.BImportData(fset, imports, data, path)
- return pkg, err
+ case 'u':
+ _, pkg, err := gcimporter.UImportData(fset, imports, data[1:], path)
+ return pkg, err
+
+ default:
+ l := len(data)
+ if l > 10 {
+ l = 10
+ }
+ return nil, fmt.Errorf("unexpected export data with prefix %q for path %s", string(data[:l]), path)
+ }
+ }
+ return nil, fmt.Errorf("empty export data for %s", path)
}
// Write writes encoded type information for the specified package to out.
@@ -130,7 +171,7 @@ func Write(out io.Writer, fset *token.FileSet, pkg *types.Package) error {
//
// Experimental: This API is experimental and may change in the future.
func ReadBundle(in io.Reader, fset *token.FileSet, imports map[string]*types.Package) ([]*types.Package, error) {
- data, err := ioutil.ReadAll(in)
+ data, err := readAll(in)
if err != nil {
return nil, fmt.Errorf("reading export bundle: %v", err)
}
diff --git a/go/gcexportdata/gcexportdata_test.go b/go/gcexportdata/gcexportdata_test.go
deleted file mode 100644
index a0006c02d..000000000
--- a/go/gcexportdata/gcexportdata_test.go
+++ /dev/null
@@ -1,45 +0,0 @@
-// Copyright 2016 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package gcexportdata_test
-
-import (
- "go/token"
- "go/types"
- "log"
- "os"
- "testing"
-
- "golang.org/x/tools/go/gcexportdata"
-)
-
-// Test to ensure that gcexportdata can read files produced by App
-// Engine Go runtime v1.6.
-func TestAppEngine16(t *testing.T) {
- // Open and read the file.
- f, err := os.Open("testdata/errors-ae16.a")
- if err != nil {
- t.Fatal(err)
- }
- defer f.Close()
- r, err := gcexportdata.NewReader(f)
- if err != nil {
- log.Fatalf("reading export data: %v", err)
- }
-
- // Decode the export data.
- fset := token.NewFileSet()
- imports := make(map[string]*types.Package)
- pkg, err := gcexportdata.Read(r, fset, imports, "errors")
- if err != nil {
- log.Fatal(err)
- }
-
- // Print package information.
- got := pkg.Scope().Lookup("New").Type().String()
- want := "func(text string) error"
- if got != want {
- t.Errorf("New.Type = %s, want %s", got, want)
- }
-}
diff --git a/go/gcexportdata/importer.go b/go/gcexportdata/importer.go
index efe221e7e..37a7247e2 100644
--- a/go/gcexportdata/importer.go
+++ b/go/gcexportdata/importer.go
@@ -23,6 +23,8 @@ import (
// or to control the FileSet or access the imports map populated during
// package loading.
//
+// Deprecated: Use the higher-level API in golang.org/x/tools/go/packages,
+// which is more efficient.
func NewImporter(fset *token.FileSet, imports map[string]*types.Package) types.ImporterFrom {
return importer{fset, imports}
}
diff --git a/go/gcexportdata/testdata/errors-ae16.a b/go/gcexportdata/testdata/errors-ae16.a
deleted file mode 100644
index 3f1dad54f..000000000
--- a/go/gcexportdata/testdata/errors-ae16.a
+++ /dev/null
Binary files differ
diff --git a/go/internal/cgo/cgo.go b/go/internal/cgo/cgo.go
index d01fb04a6..3fce48003 100644
--- a/go/internal/cgo/cgo.go
+++ b/go/internal/cgo/cgo.go
@@ -69,7 +69,6 @@ import (
// ProcessFiles invokes the cgo preprocessor on bp.CgoFiles, parses
// the output and returns the resulting ASTs.
-//
func ProcessFiles(bp *build.Package, fset *token.FileSet, DisplayPath func(path string) string, mode parser.Mode) ([]*ast.File, error) {
tmpdir, err := ioutil.TempDir("", strings.Replace(bp.ImportPath, "/", "_", -1)+"_C")
if err != nil {
diff --git a/go/internal/gccgoimporter/parser.go b/go/internal/gccgoimporter/parser.go
index 7f07553e8..9fdb6f8b0 100644
--- a/go/internal/gccgoimporter/parser.go
+++ b/go/internal/gccgoimporter/parser.go
@@ -127,8 +127,10 @@ func (p *parser) parseString() string {
return str
}
-// unquotedString = { unquotedStringChar } .
-// unquotedStringChar = <neither a whitespace nor a ';' char> .
+// parseUnquotedString parses an UnquotedString:
+//
+// unquotedString = { unquotedStringChar } .
+// unquotedStringChar = <neither a whitespace nor a ';' char> .
func (p *parser) parseUnquotedString() string {
if p.tok == scanner.EOF {
p.error("unexpected EOF")
@@ -163,7 +165,10 @@ func (p *parser) parseUnquotedQualifiedName() (path, name string) {
return p.parseQualifiedNameStr(p.parseUnquotedString())
}
-// qualifiedName = [ ["."] unquotedString "." ] unquotedString .
+// parseQualifiedNameStr is given the leading name (unquoted by the caller if necessary)
+// and then parses the remainder of a qualified name:
+//
+// qualifiedName = [ ["."] unquotedString "." ] unquotedString .
//
// The above production uses greedy matching.
func (p *parser) parseQualifiedNameStr(unquotedName string) (pkgpath, name string) {
@@ -191,7 +196,6 @@ func (p *parser) parseQualifiedNameStr(unquotedName string) (pkgpath, name strin
// getPkg returns the package for a given path. If the package is
// not found but we have a package name, create the package and
// add it to the p.imports map.
-//
func (p *parser) getPkg(pkgpath, name string) *types.Package {
// package unsafe is not in the imports map - handle explicitly
if pkgpath == "unsafe" {
@@ -208,7 +212,7 @@ func (p *parser) getPkg(pkgpath, name string) *types.Package {
// parseExportedName is like parseQualifiedName, but
// the package path is resolved to an imported *types.Package.
//
-// ExportedName = string [string] .
+// ExportedName = string [string] .
func (p *parser) parseExportedName() (pkg *types.Package, name string) {
path, name := p.parseQualifiedName()
var pkgname string
@@ -222,7 +226,9 @@ func (p *parser) parseExportedName() (pkg *types.Package, name string) {
return
}
-// Name = QualifiedName | "?" .
+// parseName parses a Name:
+//
+// Name = QualifiedName | "?" .
func (p *parser) parseName() string {
if p.tok == '?' {
// Anonymous.
@@ -241,7 +247,9 @@ func deref(typ types.Type) types.Type {
return typ
}
-// Field = Name Type [string] .
+// parseField parses a Field:
+//
+// Field = Name Type [string] .
func (p *parser) parseField(pkg *types.Package) (field *types.Var, tag string) {
name := p.parseName()
typ, n := p.parseTypeExtended(pkg)
@@ -269,7 +277,9 @@ func (p *parser) parseField(pkg *types.Package) (field *types.Var, tag string) {
return
}
-// Param = Name ["..."] Type .
+// parseParam parses a Param:
+//
+// Param = Name ["..."] Type .
func (p *parser) parseParam(pkg *types.Package) (param *types.Var, isVariadic bool) {
name := p.parseName()
// Ignore names invented for inlinable functions.
@@ -298,7 +308,9 @@ func (p *parser) parseParam(pkg *types.Package) (param *types.Var, isVariadic bo
return
}
-// Var = Name Type .
+// parseVar parses a Var:
+//
+// Var = Name Type .
func (p *parser) parseVar(pkg *types.Package) *types.Var {
name := p.parseName()
v := types.NewVar(token.NoPos, pkg, name, p.parseType(pkg))
@@ -311,7 +323,9 @@ func (p *parser) parseVar(pkg *types.Package) *types.Var {
return v
}
-// Conversion = "convert" "(" Type "," ConstValue ")" .
+// parseConversion parses a Conversion:
+//
+// Conversion = "convert" "(" Type "," ConstValue ")" .
func (p *parser) parseConversion(pkg *types.Package) (val constant.Value, typ types.Type) {
p.expectKeyword("convert")
p.expect('(')
@@ -322,8 +336,10 @@ func (p *parser) parseConversion(pkg *types.Package) (val constant.Value, typ ty
return
}
-// ConstValue = string | "false" | "true" | ["-"] (int ["'"] | FloatOrComplex) | Conversion .
-// FloatOrComplex = float ["i" | ("+"|"-") float "i"] .
+// parseConstValue parses a ConstValue:
+//
+// ConstValue = string | "false" | "true" | ["-"] (int ["'"] | FloatOrComplex) | Conversion .
+// FloatOrComplex = float ["i" | ("+"|"-") float "i"] .
func (p *parser) parseConstValue(pkg *types.Package) (val constant.Value, typ types.Type) {
// v3 changed to $false, $true, $convert, to avoid confusion
// with variable names in inline function bodies.
@@ -429,7 +445,9 @@ func (p *parser) parseConstValue(pkg *types.Package) (val constant.Value, typ ty
return
}
-// Const = Name [Type] "=" ConstValue .
+// parseConst parses a Const:
+//
+// Const = Name [Type] "=" ConstValue .
func (p *parser) parseConst(pkg *types.Package) *types.Const {
name := p.parseName()
var typ types.Type
@@ -510,9 +528,11 @@ func (p *parser) update(t types.Type, nlist []interface{}) {
}
}
-// NamedType = TypeName [ "=" ] Type { Method } .
-// TypeName = ExportedName .
-// Method = "func" "(" Param ")" Name ParamList ResultList [InlineBody] ";" .
+// parseNamedType parses a NamedType:
+//
+// NamedType = TypeName [ "=" ] Type { Method } .
+// TypeName = ExportedName .
+// Method = "func" "(" Param ")" Name ParamList ResultList [InlineBody] ";" .
func (p *parser) parseNamedType(nlist []interface{}) types.Type {
pkg, name := p.parseExportedName()
scope := pkg.Scope()
@@ -629,7 +649,9 @@ func (p *parser) parseInt() int {
return int(n)
}
-// ArrayOrSliceType = "[" [ int ] "]" Type .
+// parseArrayOrSliceType parses an ArrayOrSliceType:
+//
+// ArrayOrSliceType = "[" [ int ] "]" Type .
func (p *parser) parseArrayOrSliceType(pkg *types.Package, nlist []interface{}) types.Type {
p.expect('[')
if p.tok == ']' {
@@ -652,7 +674,9 @@ func (p *parser) parseArrayOrSliceType(pkg *types.Package, nlist []interface{})
return t
}
-// MapType = "map" "[" Type "]" Type .
+// parseMapType parses a MapType:
+//
+// MapType = "map" "[" Type "]" Type .
func (p *parser) parseMapType(pkg *types.Package, nlist []interface{}) types.Type {
p.expectKeyword("map")
@@ -668,7 +692,9 @@ func (p *parser) parseMapType(pkg *types.Package, nlist []interface{}) types.Typ
return t
}
-// ChanType = "chan" ["<-" | "-<"] Type .
+// parseChanType parses a ChanType:
+//
+// ChanType = "chan" ["<-" | "-<"] Type .
func (p *parser) parseChanType(pkg *types.Package, nlist []interface{}) types.Type {
p.expectKeyword("chan")
@@ -695,7 +721,9 @@ func (p *parser) parseChanType(pkg *types.Package, nlist []interface{}) types.Ty
return t
}
-// StructType = "struct" "{" { Field } "}" .
+// parseStructType parses a StructType:
+//
+// StructType = "struct" "{" { Field } "}" .
func (p *parser) parseStructType(pkg *types.Package, nlist []interface{}) types.Type {
p.expectKeyword("struct")
@@ -718,7 +746,9 @@ func (p *parser) parseStructType(pkg *types.Package, nlist []interface{}) types.
return t
}
-// ParamList = "(" [ { Parameter "," } Parameter ] ")" .
+// parseParamList parses a ParamList:
+//
+// ParamList = "(" [ { Parameter "," } Parameter ] ")" .
func (p *parser) parseParamList(pkg *types.Package) (*types.Tuple, bool) {
var list []*types.Var
isVariadic := false
@@ -742,7 +772,9 @@ func (p *parser) parseParamList(pkg *types.Package) (*types.Tuple, bool) {
return types.NewTuple(list...), isVariadic
}
-// ResultList = Type | ParamList .
+// parseResultList parses a ResultList:
+//
+// ResultList = Type | ParamList .
func (p *parser) parseResultList(pkg *types.Package) *types.Tuple {
switch p.tok {
case '<':
@@ -762,7 +794,9 @@ func (p *parser) parseResultList(pkg *types.Package) *types.Tuple {
}
}
-// FunctionType = ParamList ResultList .
+// parseFunctionType parses a FunctionType:
+//
+// FunctionType = ParamList ResultList .
func (p *parser) parseFunctionType(pkg *types.Package, nlist []interface{}) *types.Signature {
t := new(types.Signature)
p.update(t, nlist)
@@ -774,7 +808,9 @@ func (p *parser) parseFunctionType(pkg *types.Package, nlist []interface{}) *typ
return t
}
-// Func = Name FunctionType [InlineBody] .
+// parseFunc parses a Func:
+//
+// Func = Name FunctionType [InlineBody] .
func (p *parser) parseFunc(pkg *types.Package) *types.Func {
if p.tok == '/' {
// Skip an /*asm ID */ comment.
@@ -802,7 +838,9 @@ func (p *parser) parseFunc(pkg *types.Package) *types.Func {
return f
}
-// InterfaceType = "interface" "{" { ("?" Type | Func) ";" } "}" .
+// parseInterfaceType parses an InterfaceType:
+//
+// InterfaceType = "interface" "{" { ("?" Type | Func) ";" } "}" .
func (p *parser) parseInterfaceType(pkg *types.Package, nlist []interface{}) types.Type {
p.expectKeyword("interface")
@@ -831,7 +869,9 @@ func (p *parser) parseInterfaceType(pkg *types.Package, nlist []interface{}) typ
return t
}
-// PointerType = "*" ("any" | Type) .
+// parsePointerType parses a PointerType:
+//
+// PointerType = "*" ("any" | Type) .
func (p *parser) parsePointerType(pkg *types.Package, nlist []interface{}) types.Type {
p.expect('*')
if p.tok == scanner.Ident {
@@ -849,7 +889,9 @@ func (p *parser) parsePointerType(pkg *types.Package, nlist []interface{}) types
return t
}
-// TypeSpec = NamedType | MapType | ChanType | StructType | InterfaceType | PointerType | ArrayOrSliceType | FunctionType .
+// parseTypeSpec parses a TypeSpec:
+//
+// TypeSpec = NamedType | MapType | ChanType | StructType | InterfaceType | PointerType | ArrayOrSliceType | FunctionType .
func (p *parser) parseTypeSpec(pkg *types.Package, nlist []interface{}) types.Type {
switch p.tok {
case scanner.String:
@@ -935,10 +977,11 @@ func lookupBuiltinType(typ int) types.Type {
}[typ]
}
-// Type = "<" "type" ( "-" int | int [ TypeSpec ] ) ">" .
+// parseType parses a Type:
//
-// parseType updates the type map to t for all type numbers n.
+// Type = "<" "type" ( "-" int | int [ TypeSpec ] ) ">" .
//
+// parseType updates the type map to t for all type numbers n.
func (p *parser) parseType(pkg *types.Package, n ...interface{}) types.Type {
p.expect('<')
t, _ := p.parseTypeAfterAngle(pkg, n...)
@@ -1028,7 +1071,9 @@ func (p *parser) skipInlineBody() {
}
}
-// Types = "types" maxp1 exportedp1 (offset length)* .
+// parseTypes parses a Types:
+//
+// Types = "types" maxp1 exportedp1 (offset length)* .
func (p *parser) parseTypes(pkg *types.Package) {
maxp1 := p.parseInt()
exportedp1 := p.parseInt()
@@ -1102,7 +1147,9 @@ func (p *parser) parseSavedType(pkg *types.Package, i int, nlist []interface{})
}
}
-// PackageInit = unquotedString unquotedString int .
+// parsePackageInit parses a PackageInit:
+//
+// PackageInit = unquotedString unquotedString int .
func (p *parser) parsePackageInit() PackageInit {
name := p.parseUnquotedString()
initfunc := p.parseUnquotedString()
@@ -1120,10 +1167,12 @@ func (p *parser) maybeCreatePackage() {
}
}
-// InitDataDirective = ( "v1" | "v2" | "v3" ) ";" |
-// "priority" int ";" |
-// "init" { PackageInit } ";" |
-// "checksum" unquotedString ";" .
+// parseInitDataDirective parses an InitDataDirective:
+//
+// InitDataDirective = ( "v1" | "v2" | "v3" ) ";" |
+// "priority" int ";" |
+// "init" { PackageInit } ";" |
+// "checksum" unquotedString ";" .
func (p *parser) parseInitDataDirective() {
if p.tok != scanner.Ident {
// unexpected token kind; panic
@@ -1173,16 +1222,18 @@ func (p *parser) parseInitDataDirective() {
}
}
-// Directive = InitDataDirective |
-// "package" unquotedString [ unquotedString ] [ unquotedString ] ";" |
-// "pkgpath" unquotedString ";" |
-// "prefix" unquotedString ";" |
-// "import" unquotedString unquotedString string ";" |
-// "indirectimport" unquotedString unquotedstring ";" |
-// "func" Func ";" |
-// "type" Type ";" |
-// "var" Var ";" |
-// "const" Const ";" .
+// parseDirective parses a Directive:
+//
+// Directive = InitDataDirective |
+// "package" unquotedString [ unquotedString ] [ unquotedString ] ";" |
+// "pkgpath" unquotedString ";" |
+// "prefix" unquotedString ";" |
+// "import" unquotedString unquotedString string ";" |
+// "indirectimport" unquotedString unquotedstring ";" |
+// "func" Func ";" |
+// "type" Type ";" |
+// "var" Var ";" |
+// "const" Const ";" .
func (p *parser) parseDirective() {
if p.tok != scanner.Ident {
// unexpected token kind; panic
@@ -1266,7 +1317,9 @@ func (p *parser) parseDirective() {
}
}
-// Package = { Directive } .
+// parsePackage parses a Package:
+//
+// Package = { Directive } .
func (p *parser) parsePackage() *types.Package {
for p.tok != scanner.EOF {
p.parseDirective()
diff --git a/go/internal/gccgoimporter/testenv_test.go b/go/internal/gccgoimporter/testenv_test.go
index 7afa464d9..9be8dcb32 100644
--- a/go/internal/gccgoimporter/testenv_test.go
+++ b/go/internal/gccgoimporter/testenv_test.go
@@ -12,7 +12,7 @@ import (
"testing"
)
-// HasGoBuild reports whether the current system can build programs with ``go build''
+// HasGoBuild reports whether the current system can build programs with “go build”
// and then run them with os.StartProcess or exec.Command.
func HasGoBuild() bool {
switch runtime.GOOS {
@@ -40,7 +40,7 @@ func HasExec() bool {
return true
}
-// MustHaveGoBuild checks that the current system can build programs with ``go build''
+// MustHaveGoBuild checks that the current system can build programs with “go build”
// and then run them with os.StartProcess or exec.Command.
// If not, MustHaveGoBuild calls t.Skip with an explanation.
func MustHaveGoBuild(t *testing.T) {
diff --git a/go/internal/gcimporter/bexport.go b/go/internal/gcimporter/bexport.go
deleted file mode 100644
index 0a3cdb9a3..000000000
--- a/go/internal/gcimporter/bexport.go
+++ /dev/null
@@ -1,851 +0,0 @@
-// Copyright 2016 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Binary package export.
-// This file was derived from $GOROOT/src/cmd/compile/internal/gc/bexport.go;
-// see that file for specification of the format.
-
-package gcimporter
-
-import (
- "bytes"
- "encoding/binary"
- "fmt"
- "go/ast"
- "go/constant"
- "go/token"
- "go/types"
- "math"
- "math/big"
- "sort"
- "strings"
-)
-
-// If debugFormat is set, each integer and string value is preceded by a marker
-// and position information in the encoding. This mechanism permits an importer
-// to recognize immediately when it is out of sync. The importer recognizes this
-// mode automatically (i.e., it can import export data produced with debugging
-// support even if debugFormat is not set at the time of import). This mode will
-// lead to massively larger export data (by a factor of 2 to 3) and should only
-// be enabled during development and debugging.
-//
-// NOTE: This flag is the first flag to enable if importing dies because of
-// (suspected) format errors, and whenever a change is made to the format.
-const debugFormat = false // default: false
-
-// Current export format version. Increase with each format change.
-// Note: The latest binary (non-indexed) export format is at version 6.
-// This exporter is still at level 4, but it doesn't matter since
-// the binary importer can handle older versions just fine.
-// 6: package height (CL 105038) -- NOT IMPLEMENTED HERE
-// 5: improved position encoding efficiency (issue 20080, CL 41619) -- NOT IMPLEMEMTED HERE
-// 4: type name objects support type aliases, uses aliasTag
-// 3: Go1.8 encoding (same as version 2, aliasTag defined but never used)
-// 2: removed unused bool in ODCL export (compiler only)
-// 1: header format change (more regular), export package for _ struct fields
-// 0: Go1.7 encoding
-const exportVersion = 4
-
-// trackAllTypes enables cycle tracking for all types, not just named
-// types. The existing compiler invariants assume that unnamed types
-// that are not completely set up are not used, or else there are spurious
-// errors.
-// If disabled, only named types are tracked, possibly leading to slightly
-// less efficient encoding in rare cases. It also prevents the export of
-// some corner-case type declarations (but those are not handled correctly
-// with with the textual export format either).
-// TODO(gri) enable and remove once issues caused by it are fixed
-const trackAllTypes = false
-
-type exporter struct {
- fset *token.FileSet
- out bytes.Buffer
-
- // object -> index maps, indexed in order of serialization
- strIndex map[string]int
- pkgIndex map[*types.Package]int
- typIndex map[types.Type]int
-
- // position encoding
- posInfoFormat bool
- prevFile string
- prevLine int
-
- // debugging support
- written int // bytes written
- indent int // for trace
-}
-
-// internalError represents an error generated inside this package.
-type internalError string
-
-func (e internalError) Error() string { return "gcimporter: " + string(e) }
-
-func internalErrorf(format string, args ...interface{}) error {
- return internalError(fmt.Sprintf(format, args...))
-}
-
-// BExportData returns binary export data for pkg.
-// If no file set is provided, position info will be missing.
-func BExportData(fset *token.FileSet, pkg *types.Package) (b []byte, err error) {
- if !debug {
- defer func() {
- if e := recover(); e != nil {
- if ierr, ok := e.(internalError); ok {
- err = ierr
- return
- }
- // Not an internal error; panic again.
- panic(e)
- }
- }()
- }
-
- p := exporter{
- fset: fset,
- strIndex: map[string]int{"": 0}, // empty string is mapped to 0
- pkgIndex: make(map[*types.Package]int),
- typIndex: make(map[types.Type]int),
- posInfoFormat: true, // TODO(gri) might become a flag, eventually
- }
-
- // write version info
- // The version string must start with "version %d" where %d is the version
- // number. Additional debugging information may follow after a blank; that
- // text is ignored by the importer.
- p.rawStringln(fmt.Sprintf("version %d", exportVersion))
- var debug string
- if debugFormat {
- debug = "debug"
- }
- p.rawStringln(debug) // cannot use p.bool since it's affected by debugFormat; also want to see this clearly
- p.bool(trackAllTypes)
- p.bool(p.posInfoFormat)
-
- // --- generic export data ---
-
- // populate type map with predeclared "known" types
- for index, typ := range predeclared() {
- p.typIndex[typ] = index
- }
- if len(p.typIndex) != len(predeclared()) {
- return nil, internalError("duplicate entries in type map?")
- }
-
- // write package data
- p.pkg(pkg, true)
- if trace {
- p.tracef("\n")
- }
-
- // write objects
- objcount := 0
- scope := pkg.Scope()
- for _, name := range scope.Names() {
- if !ast.IsExported(name) {
- continue
- }
- if trace {
- p.tracef("\n")
- }
- p.obj(scope.Lookup(name))
- objcount++
- }
-
- // indicate end of list
- if trace {
- p.tracef("\n")
- }
- p.tag(endTag)
-
- // for self-verification only (redundant)
- p.int(objcount)
-
- if trace {
- p.tracef("\n")
- }
-
- // --- end of export data ---
-
- return p.out.Bytes(), nil
-}
-
-func (p *exporter) pkg(pkg *types.Package, emptypath bool) {
- if pkg == nil {
- panic(internalError("unexpected nil pkg"))
- }
-
- // if we saw the package before, write its index (>= 0)
- if i, ok := p.pkgIndex[pkg]; ok {
- p.index('P', i)
- return
- }
-
- // otherwise, remember the package, write the package tag (< 0) and package data
- if trace {
- p.tracef("P%d = { ", len(p.pkgIndex))
- defer p.tracef("} ")
- }
- p.pkgIndex[pkg] = len(p.pkgIndex)
-
- p.tag(packageTag)
- p.string(pkg.Name())
- if emptypath {
- p.string("")
- } else {
- p.string(pkg.Path())
- }
-}
-
-func (p *exporter) obj(obj types.Object) {
- switch obj := obj.(type) {
- case *types.Const:
- p.tag(constTag)
- p.pos(obj)
- p.qualifiedName(obj)
- p.typ(obj.Type())
- p.value(obj.Val())
-
- case *types.TypeName:
- if obj.IsAlias() {
- p.tag(aliasTag)
- p.pos(obj)
- p.qualifiedName(obj)
- } else {
- p.tag(typeTag)
- }
- p.typ(obj.Type())
-
- case *types.Var:
- p.tag(varTag)
- p.pos(obj)
- p.qualifiedName(obj)
- p.typ(obj.Type())
-
- case *types.Func:
- p.tag(funcTag)
- p.pos(obj)
- p.qualifiedName(obj)
- sig := obj.Type().(*types.Signature)
- p.paramList(sig.Params(), sig.Variadic())
- p.paramList(sig.Results(), false)
-
- default:
- panic(internalErrorf("unexpected object %v (%T)", obj, obj))
- }
-}
-
-func (p *exporter) pos(obj types.Object) {
- if !p.posInfoFormat {
- return
- }
-
- file, line := p.fileLine(obj)
- if file == p.prevFile {
- // common case: write line delta
- // delta == 0 means different file or no line change
- delta := line - p.prevLine
- p.int(delta)
- if delta == 0 {
- p.int(-1) // -1 means no file change
- }
- } else {
- // different file
- p.int(0)
- // Encode filename as length of common prefix with previous
- // filename, followed by (possibly empty) suffix. Filenames
- // frequently share path prefixes, so this can save a lot
- // of space and make export data size less dependent on file
- // path length. The suffix is unlikely to be empty because
- // file names tend to end in ".go".
- n := commonPrefixLen(p.prevFile, file)
- p.int(n) // n >= 0
- p.string(file[n:]) // write suffix only
- p.prevFile = file
- p.int(line)
- }
- p.prevLine = line
-}
-
-func (p *exporter) fileLine(obj types.Object) (file string, line int) {
- if p.fset != nil {
- pos := p.fset.Position(obj.Pos())
- file = pos.Filename
- line = pos.Line
- }
- return
-}
-
-func commonPrefixLen(a, b string) int {
- if len(a) > len(b) {
- a, b = b, a
- }
- // len(a) <= len(b)
- i := 0
- for i < len(a) && a[i] == b[i] {
- i++
- }
- return i
-}
-
-func (p *exporter) qualifiedName(obj types.Object) {
- p.string(obj.Name())
- p.pkg(obj.Pkg(), false)
-}
-
-func (p *exporter) typ(t types.Type) {
- if t == nil {
- panic(internalError("nil type"))
- }
-
- // Possible optimization: Anonymous pointer types *T where
- // T is a named type are common. We could canonicalize all
- // such types *T to a single type PT = *T. This would lead
- // to at most one *T entry in typIndex, and all future *T's
- // would be encoded as the respective index directly. Would
- // save 1 byte (pointerTag) per *T and reduce the typIndex
- // size (at the cost of a canonicalization map). We can do
- // this later, without encoding format change.
-
- // if we saw the type before, write its index (>= 0)
- if i, ok := p.typIndex[t]; ok {
- p.index('T', i)
- return
- }
-
- // otherwise, remember the type, write the type tag (< 0) and type data
- if trackAllTypes {
- if trace {
- p.tracef("T%d = {>\n", len(p.typIndex))
- defer p.tracef("<\n} ")
- }
- p.typIndex[t] = len(p.typIndex)
- }
-
- switch t := t.(type) {
- case *types.Named:
- if !trackAllTypes {
- // if we don't track all types, track named types now
- p.typIndex[t] = len(p.typIndex)
- }
-
- p.tag(namedTag)
- p.pos(t.Obj())
- p.qualifiedName(t.Obj())
- p.typ(t.Underlying())
- if !types.IsInterface(t) {
- p.assocMethods(t)
- }
-
- case *types.Array:
- p.tag(arrayTag)
- p.int64(t.Len())
- p.typ(t.Elem())
-
- case *types.Slice:
- p.tag(sliceTag)
- p.typ(t.Elem())
-
- case *dddSlice:
- p.tag(dddTag)
- p.typ(t.elem)
-
- case *types.Struct:
- p.tag(structTag)
- p.fieldList(t)
-
- case *types.Pointer:
- p.tag(pointerTag)
- p.typ(t.Elem())
-
- case *types.Signature:
- p.tag(signatureTag)
- p.paramList(t.Params(), t.Variadic())
- p.paramList(t.Results(), false)
-
- case *types.Interface:
- p.tag(interfaceTag)
- p.iface(t)
-
- case *types.Map:
- p.tag(mapTag)
- p.typ(t.Key())
- p.typ(t.Elem())
-
- case *types.Chan:
- p.tag(chanTag)
- p.int(int(3 - t.Dir())) // hack
- p.typ(t.Elem())
-
- default:
- panic(internalErrorf("unexpected type %T: %s", t, t))
- }
-}
-
-func (p *exporter) assocMethods(named *types.Named) {
- // Sort methods (for determinism).
- var methods []*types.Func
- for i := 0; i < named.NumMethods(); i++ {
- methods = append(methods, named.Method(i))
- }
- sort.Sort(methodsByName(methods))
-
- p.int(len(methods))
-
- if trace && methods != nil {
- p.tracef("associated methods {>\n")
- }
-
- for i, m := range methods {
- if trace && i > 0 {
- p.tracef("\n")
- }
-
- p.pos(m)
- name := m.Name()
- p.string(name)
- if !exported(name) {
- p.pkg(m.Pkg(), false)
- }
-
- sig := m.Type().(*types.Signature)
- p.paramList(types.NewTuple(sig.Recv()), false)
- p.paramList(sig.Params(), sig.Variadic())
- p.paramList(sig.Results(), false)
- p.int(0) // dummy value for go:nointerface pragma - ignored by importer
- }
-
- if trace && methods != nil {
- p.tracef("<\n} ")
- }
-}
-
-type methodsByName []*types.Func
-
-func (x methodsByName) Len() int { return len(x) }
-func (x methodsByName) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
-func (x methodsByName) Less(i, j int) bool { return x[i].Name() < x[j].Name() }
-
-func (p *exporter) fieldList(t *types.Struct) {
- if trace && t.NumFields() > 0 {
- p.tracef("fields {>\n")
- defer p.tracef("<\n} ")
- }
-
- p.int(t.NumFields())
- for i := 0; i < t.NumFields(); i++ {
- if trace && i > 0 {
- p.tracef("\n")
- }
- p.field(t.Field(i))
- p.string(t.Tag(i))
- }
-}
-
-func (p *exporter) field(f *types.Var) {
- if !f.IsField() {
- panic(internalError("field expected"))
- }
-
- p.pos(f)
- p.fieldName(f)
- p.typ(f.Type())
-}
-
-func (p *exporter) iface(t *types.Interface) {
- // TODO(gri): enable importer to load embedded interfaces,
- // then emit Embeddeds and ExplicitMethods separately here.
- p.int(0)
-
- n := t.NumMethods()
- if trace && n > 0 {
- p.tracef("methods {>\n")
- defer p.tracef("<\n} ")
- }
- p.int(n)
- for i := 0; i < n; i++ {
- if trace && i > 0 {
- p.tracef("\n")
- }
- p.method(t.Method(i))
- }
-}
-
-func (p *exporter) method(m *types.Func) {
- sig := m.Type().(*types.Signature)
- if sig.Recv() == nil {
- panic(internalError("method expected"))
- }
-
- p.pos(m)
- p.string(m.Name())
- if m.Name() != "_" && !ast.IsExported(m.Name()) {
- p.pkg(m.Pkg(), false)
- }
-
- // interface method; no need to encode receiver.
- p.paramList(sig.Params(), sig.Variadic())
- p.paramList(sig.Results(), false)
-}
-
-func (p *exporter) fieldName(f *types.Var) {
- name := f.Name()
-
- if f.Anonymous() {
- // anonymous field - we distinguish between 3 cases:
- // 1) field name matches base type name and is exported
- // 2) field name matches base type name and is not exported
- // 3) field name doesn't match base type name (alias name)
- bname := basetypeName(f.Type())
- if name == bname {
- if ast.IsExported(name) {
- name = "" // 1) we don't need to know the field name or package
- } else {
- name = "?" // 2) use unexported name "?" to force package export
- }
- } else {
- // 3) indicate alias and export name as is
- // (this requires an extra "@" but this is a rare case)
- p.string("@")
- }
- }
-
- p.string(name)
- if name != "" && !ast.IsExported(name) {
- p.pkg(f.Pkg(), false)
- }
-}
-
-func basetypeName(typ types.Type) string {
- switch typ := deref(typ).(type) {
- case *types.Basic:
- return typ.Name()
- case *types.Named:
- return typ.Obj().Name()
- default:
- return "" // unnamed type
- }
-}
-
-func (p *exporter) paramList(params *types.Tuple, variadic bool) {
- // use negative length to indicate unnamed parameters
- // (look at the first parameter only since either all
- // names are present or all are absent)
- n := params.Len()
- if n > 0 && params.At(0).Name() == "" {
- n = -n
- }
- p.int(n)
- for i := 0; i < params.Len(); i++ {
- q := params.At(i)
- t := q.Type()
- if variadic && i == params.Len()-1 {
- t = &dddSlice{t.(*types.Slice).Elem()}
- }
- p.typ(t)
- if n > 0 {
- name := q.Name()
- p.string(name)
- if name != "_" {
- p.pkg(q.Pkg(), false)
- }
- }
- p.string("") // no compiler-specific info
- }
-}
-
-func (p *exporter) value(x constant.Value) {
- if trace {
- p.tracef("= ")
- }
-
- switch x.Kind() {
- case constant.Bool:
- tag := falseTag
- if constant.BoolVal(x) {
- tag = trueTag
- }
- p.tag(tag)
-
- case constant.Int:
- if v, exact := constant.Int64Val(x); exact {
- // common case: x fits into an int64 - use compact encoding
- p.tag(int64Tag)
- p.int64(v)
- return
- }
- // uncommon case: large x - use float encoding
- // (powers of 2 will be encoded efficiently with exponent)
- p.tag(floatTag)
- p.float(constant.ToFloat(x))
-
- case constant.Float:
- p.tag(floatTag)
- p.float(x)
-
- case constant.Complex:
- p.tag(complexTag)
- p.float(constant.Real(x))
- p.float(constant.Imag(x))
-
- case constant.String:
- p.tag(stringTag)
- p.string(constant.StringVal(x))
-
- case constant.Unknown:
- // package contains type errors
- p.tag(unknownTag)
-
- default:
- panic(internalErrorf("unexpected value %v (%T)", x, x))
- }
-}
-
-func (p *exporter) float(x constant.Value) {
- if x.Kind() != constant.Float {
- panic(internalErrorf("unexpected constant %v, want float", x))
- }
- // extract sign (there is no -0)
- sign := constant.Sign(x)
- if sign == 0 {
- // x == 0
- p.int(0)
- return
- }
- // x != 0
-
- var f big.Float
- if v, exact := constant.Float64Val(x); exact {
- // float64
- f.SetFloat64(v)
- } else if num, denom := constant.Num(x), constant.Denom(x); num.Kind() == constant.Int {
- // TODO(gri): add big.Rat accessor to constant.Value.
- r := valueToRat(num)
- f.SetRat(r.Quo(r, valueToRat(denom)))
- } else {
- // Value too large to represent as a fraction => inaccessible.
- // TODO(gri): add big.Float accessor to constant.Value.
- f.SetFloat64(math.MaxFloat64) // FIXME
- }
-
- // extract exponent such that 0.5 <= m < 1.0
- var m big.Float
- exp := f.MantExp(&m)
-
- // extract mantissa as *big.Int
- // - set exponent large enough so mant satisfies mant.IsInt()
- // - get *big.Int from mant
- m.SetMantExp(&m, int(m.MinPrec()))
- mant, acc := m.Int(nil)
- if acc != big.Exact {
- panic(internalError("internal error"))
- }
-
- p.int(sign)
- p.int(exp)
- p.string(string(mant.Bytes()))
-}
-
-func valueToRat(x constant.Value) *big.Rat {
- // Convert little-endian to big-endian.
- // I can't believe this is necessary.
- bytes := constant.Bytes(x)
- for i := 0; i < len(bytes)/2; i++ {
- bytes[i], bytes[len(bytes)-1-i] = bytes[len(bytes)-1-i], bytes[i]
- }
- return new(big.Rat).SetInt(new(big.Int).SetBytes(bytes))
-}
-
-func (p *exporter) bool(b bool) bool {
- if trace {
- p.tracef("[")
- defer p.tracef("= %v] ", b)
- }
-
- x := 0
- if b {
- x = 1
- }
- p.int(x)
- return b
-}
-
-// ----------------------------------------------------------------------------
-// Low-level encoders
-
-func (p *exporter) index(marker byte, index int) {
- if index < 0 {
- panic(internalError("invalid index < 0"))
- }
- if debugFormat {
- p.marker('t')
- }
- if trace {
- p.tracef("%c%d ", marker, index)
- }
- p.rawInt64(int64(index))
-}
-
-func (p *exporter) tag(tag int) {
- if tag >= 0 {
- panic(internalError("invalid tag >= 0"))
- }
- if debugFormat {
- p.marker('t')
- }
- if trace {
- p.tracef("%s ", tagString[-tag])
- }
- p.rawInt64(int64(tag))
-}
-
-func (p *exporter) int(x int) {
- p.int64(int64(x))
-}
-
-func (p *exporter) int64(x int64) {
- if debugFormat {
- p.marker('i')
- }
- if trace {
- p.tracef("%d ", x)
- }
- p.rawInt64(x)
-}
-
-func (p *exporter) string(s string) {
- if debugFormat {
- p.marker('s')
- }
- if trace {
- p.tracef("%q ", s)
- }
- // if we saw the string before, write its index (>= 0)
- // (the empty string is mapped to 0)
- if i, ok := p.strIndex[s]; ok {
- p.rawInt64(int64(i))
- return
- }
- // otherwise, remember string and write its negative length and bytes
- p.strIndex[s] = len(p.strIndex)
- p.rawInt64(-int64(len(s)))
- for i := 0; i < len(s); i++ {
- p.rawByte(s[i])
- }
-}
-
-// marker emits a marker byte and position information which makes
-// it easy for a reader to detect if it is "out of sync". Used for
-// debugFormat format only.
-func (p *exporter) marker(m byte) {
- p.rawByte(m)
- // Enable this for help tracking down the location
- // of an incorrect marker when running in debugFormat.
- if false && trace {
- p.tracef("#%d ", p.written)
- }
- p.rawInt64(int64(p.written))
-}
-
-// rawInt64 should only be used by low-level encoders.
-func (p *exporter) rawInt64(x int64) {
- var tmp [binary.MaxVarintLen64]byte
- n := binary.PutVarint(tmp[:], x)
- for i := 0; i < n; i++ {
- p.rawByte(tmp[i])
- }
-}
-
-// rawStringln should only be used to emit the initial version string.
-func (p *exporter) rawStringln(s string) {
- for i := 0; i < len(s); i++ {
- p.rawByte(s[i])
- }
- p.rawByte('\n')
-}
-
-// rawByte is the bottleneck interface to write to p.out.
-// rawByte escapes b as follows (any encoding does that
-// hides '$'):
-//
-// '$' => '|' 'S'
-// '|' => '|' '|'
-//
-// Necessary so other tools can find the end of the
-// export data by searching for "$$".
-// rawByte should only be used by low-level encoders.
-func (p *exporter) rawByte(b byte) {
- switch b {
- case '$':
- // write '$' as '|' 'S'
- b = 'S'
- fallthrough
- case '|':
- // write '|' as '|' '|'
- p.out.WriteByte('|')
- p.written++
- }
- p.out.WriteByte(b)
- p.written++
-}
-
-// tracef is like fmt.Printf but it rewrites the format string
-// to take care of indentation.
-func (p *exporter) tracef(format string, args ...interface{}) {
- if strings.ContainsAny(format, "<>\n") {
- var buf bytes.Buffer
- for i := 0; i < len(format); i++ {
- // no need to deal with runes
- ch := format[i]
- switch ch {
- case '>':
- p.indent++
- continue
- case '<':
- p.indent--
- continue
- }
- buf.WriteByte(ch)
- if ch == '\n' {
- for j := p.indent; j > 0; j-- {
- buf.WriteString(". ")
- }
- }
- }
- format = buf.String()
- }
- fmt.Printf(format, args...)
-}
-
-// Debugging support.
-// (tagString is only used when tracing is enabled)
-var tagString = [...]string{
- // Packages
- -packageTag: "package",
-
- // Types
- -namedTag: "named type",
- -arrayTag: "array",
- -sliceTag: "slice",
- -dddTag: "ddd",
- -structTag: "struct",
- -pointerTag: "pointer",
- -signatureTag: "signature",
- -interfaceTag: "interface",
- -mapTag: "map",
- -chanTag: "chan",
-
- // Values
- -falseTag: "false",
- -trueTag: "true",
- -int64Tag: "int64",
- -floatTag: "float",
- -fractionTag: "fraction",
- -complexTag: "complex",
- -stringTag: "string",
- -unknownTag: "unknown",
-
- // Type aliases
- -aliasTag: "alias",
-}
diff --git a/go/internal/gcimporter/bexport_test.go b/go/internal/gcimporter/bexport_test.go
deleted file mode 100644
index 3da5397eb..000000000
--- a/go/internal/gcimporter/bexport_test.go
+++ /dev/null
@@ -1,551 +0,0 @@
-// Copyright 2016 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package gcimporter_test
-
-import (
- "fmt"
- "go/ast"
- "go/build"
- "go/constant"
- "go/parser"
- "go/token"
- "go/types"
- "path/filepath"
- "reflect"
- "runtime"
- "sort"
- "strings"
- "testing"
-
- "golang.org/x/tools/go/ast/inspector"
- "golang.org/x/tools/go/buildutil"
- "golang.org/x/tools/go/internal/gcimporter"
- "golang.org/x/tools/go/loader"
- "golang.org/x/tools/internal/typeparams"
- "golang.org/x/tools/internal/typeparams/genericfeatures"
-)
-
-var isRace = false
-
-func TestBExportData_stdlib(t *testing.T) {
- if runtime.Compiler == "gccgo" {
- t.Skip("gccgo standard library is inaccessible")
- }
- if runtime.GOOS == "android" {
- t.Skipf("incomplete std lib on %s", runtime.GOOS)
- }
- if isRace {
- t.Skipf("stdlib tests take too long in race mode and flake on builders")
- }
- if testing.Short() {
- t.Skip("skipping RAM hungry test in -short mode")
- }
-
- // Load, parse and type-check the program.
- ctxt := build.Default // copy
- ctxt.GOPATH = "" // disable GOPATH
- conf := loader.Config{
- Build: &ctxt,
- AllowErrors: true,
- TypeChecker: types.Config{
- Error: func(err error) { t.Log(err) },
- },
- }
- for _, path := range buildutil.AllPackages(conf.Build) {
- conf.Import(path)
- }
-
- // Create a package containing type and value errors to ensure
- // they are properly encoded/decoded.
- f, err := conf.ParseFile("haserrors/haserrors.go", `package haserrors
-const UnknownValue = "" + 0
-type UnknownType undefined
-`)
- if err != nil {
- t.Fatal(err)
- }
- conf.CreateFromFiles("haserrors", f)
-
- prog, err := conf.Load()
- if err != nil {
- t.Fatalf("Load failed: %v", err)
- }
-
- numPkgs := len(prog.AllPackages)
- if want := minStdlibPackages; numPkgs < want {
- t.Errorf("Loaded only %d packages, want at least %d", numPkgs, want)
- }
-
- checked := 0
- for pkg, info := range prog.AllPackages {
- if info.Files == nil {
- continue // empty directory
- }
- // Binary export does not support generic code.
- inspect := inspector.New(info.Files)
- if genericfeatures.ForPackage(inspect, &info.Info) != 0 {
- t.Logf("skipping package %q which uses generics", pkg.Path())
- continue
- }
- checked++
- exportdata, err := gcimporter.BExportData(conf.Fset, pkg)
- if err != nil {
- t.Fatal(err)
- }
-
- imports := make(map[string]*types.Package)
- fset2 := token.NewFileSet()
- n, pkg2, err := gcimporter.BImportData(fset2, imports, exportdata, pkg.Path())
- if err != nil {
- t.Errorf("BImportData(%s): %v", pkg.Path(), err)
- continue
- }
- if n != len(exportdata) {
- t.Errorf("BImportData(%s) decoded %d bytes, want %d",
- pkg.Path(), n, len(exportdata))
- }
-
- // Compare the packages' corresponding members.
- for _, name := range pkg.Scope().Names() {
- if !ast.IsExported(name) {
- continue
- }
- obj1 := pkg.Scope().Lookup(name)
- obj2 := pkg2.Scope().Lookup(name)
- if obj2 == nil {
- t.Errorf("%s.%s not found, want %s", pkg.Path(), name, obj1)
- continue
- }
-
- fl1 := fileLine(conf.Fset, obj1)
- fl2 := fileLine(fset2, obj2)
- if fl1 != fl2 {
- t.Errorf("%s.%s: got posn %s, want %s",
- pkg.Path(), name, fl2, fl1)
- }
-
- if err := equalObj(obj1, obj2); err != nil {
- t.Errorf("%s.%s: %s\ngot: %s\nwant: %s",
- pkg.Path(), name, err, obj2, obj1)
- }
- }
- }
- if want := minStdlibPackages; checked < want {
- t.Errorf("Checked only %d packages, want at least %d", checked, want)
- }
-}
-
-func fileLine(fset *token.FileSet, obj types.Object) string {
- posn := fset.Position(obj.Pos())
- filename := filepath.Clean(strings.ReplaceAll(posn.Filename, "$GOROOT", runtime.GOROOT()))
- return fmt.Sprintf("%s:%d", filename, posn.Line)
-}
-
-// equalObj reports how x and y differ. They are assumed to belong to
-// different universes so cannot be compared directly.
-func equalObj(x, y types.Object) error {
- if reflect.TypeOf(x) != reflect.TypeOf(y) {
- return fmt.Errorf("%T vs %T", x, y)
- }
- xt := x.Type()
- yt := y.Type()
- switch x.(type) {
- case *types.Var, *types.Func:
- // ok
- case *types.Const:
- xval := x.(*types.Const).Val()
- yval := y.(*types.Const).Val()
- // Use string comparison for floating-point values since rounding is permitted.
- if constant.Compare(xval, token.NEQ, yval) &&
- !(xval.Kind() == constant.Float && xval.String() == yval.String()) {
- return fmt.Errorf("unequal constants %s vs %s", xval, yval)
- }
- case *types.TypeName:
- xt = xt.Underlying()
- yt = yt.Underlying()
- default:
- return fmt.Errorf("unexpected %T", x)
- }
- return equalType(xt, yt)
-}
-
-func equalType(x, y types.Type) error {
- if reflect.TypeOf(x) != reflect.TypeOf(y) {
- return fmt.Errorf("unequal kinds: %T vs %T", x, y)
- }
- switch x := x.(type) {
- case *types.Interface:
- y := y.(*types.Interface)
- // TODO(gri): enable separate emission of Embedded interfaces
- // and ExplicitMethods then use this logic.
- // if x.NumEmbeddeds() != y.NumEmbeddeds() {
- // return fmt.Errorf("unequal number of embedded interfaces: %d vs %d",
- // x.NumEmbeddeds(), y.NumEmbeddeds())
- // }
- // for i := 0; i < x.NumEmbeddeds(); i++ {
- // xi := x.Embedded(i)
- // yi := y.Embedded(i)
- // if xi.String() != yi.String() {
- // return fmt.Errorf("mismatched %th embedded interface: %s vs %s",
- // i, xi, yi)
- // }
- // }
- // if x.NumExplicitMethods() != y.NumExplicitMethods() {
- // return fmt.Errorf("unequal methods: %d vs %d",
- // x.NumExplicitMethods(), y.NumExplicitMethods())
- // }
- // for i := 0; i < x.NumExplicitMethods(); i++ {
- // xm := x.ExplicitMethod(i)
- // ym := y.ExplicitMethod(i)
- // if xm.Name() != ym.Name() {
- // return fmt.Errorf("mismatched %th method: %s vs %s", i, xm, ym)
- // }
- // if err := equalType(xm.Type(), ym.Type()); err != nil {
- // return fmt.Errorf("mismatched %s method: %s", xm.Name(), err)
- // }
- // }
- if x.NumMethods() != y.NumMethods() {
- return fmt.Errorf("unequal methods: %d vs %d",
- x.NumMethods(), y.NumMethods())
- }
- for i := 0; i < x.NumMethods(); i++ {
- xm := x.Method(i)
- ym := y.Method(i)
- if xm.Name() != ym.Name() {
- return fmt.Errorf("mismatched %dth method: %s vs %s", i, xm, ym)
- }
- if err := equalType(xm.Type(), ym.Type()); err != nil {
- return fmt.Errorf("mismatched %s method: %s", xm.Name(), err)
- }
- }
- // Constraints are handled explicitly in the *TypeParam case below, so we
- // don't yet need to consider embeddeds here.
- // TODO(rfindley): consider the type set here.
- case *types.Array:
- y := y.(*types.Array)
- if x.Len() != y.Len() {
- return fmt.Errorf("unequal array lengths: %d vs %d", x.Len(), y.Len())
- }
- if err := equalType(x.Elem(), y.Elem()); err != nil {
- return fmt.Errorf("array elements: %s", err)
- }
- case *types.Basic:
- y := y.(*types.Basic)
- if x.Kind() != y.Kind() {
- return fmt.Errorf("unequal basic types: %s vs %s", x, y)
- }
- case *types.Chan:
- y := y.(*types.Chan)
- if x.Dir() != y.Dir() {
- return fmt.Errorf("unequal channel directions: %d vs %d", x.Dir(), y.Dir())
- }
- if err := equalType(x.Elem(), y.Elem()); err != nil {
- return fmt.Errorf("channel elements: %s", err)
- }
- case *types.Map:
- y := y.(*types.Map)
- if err := equalType(x.Key(), y.Key()); err != nil {
- return fmt.Errorf("map keys: %s", err)
- }
- if err := equalType(x.Elem(), y.Elem()); err != nil {
- return fmt.Errorf("map values: %s", err)
- }
- case *types.Named:
- y := y.(*types.Named)
- return cmpNamed(x, y)
- case *types.Pointer:
- y := y.(*types.Pointer)
- if err := equalType(x.Elem(), y.Elem()); err != nil {
- return fmt.Errorf("pointer elements: %s", err)
- }
- case *types.Signature:
- y := y.(*types.Signature)
- if err := equalType(x.Params(), y.Params()); err != nil {
- return fmt.Errorf("parameters: %s", err)
- }
- if err := equalType(x.Results(), y.Results()); err != nil {
- return fmt.Errorf("results: %s", err)
- }
- if x.Variadic() != y.Variadic() {
- return fmt.Errorf("unequal variadicity: %t vs %t",
- x.Variadic(), y.Variadic())
- }
- if (x.Recv() != nil) != (y.Recv() != nil) {
- return fmt.Errorf("unequal receivers: %s vs %s", x.Recv(), y.Recv())
- }
- if x.Recv() != nil {
- // TODO(adonovan): fix: this assertion fires for interface methods.
- // The type of the receiver of an interface method is a named type
- // if the Package was loaded from export data, or an unnamed (interface)
- // type if the Package was produced by type-checking ASTs.
- // if err := equalType(x.Recv().Type(), y.Recv().Type()); err != nil {
- // return fmt.Errorf("receiver: %s", err)
- // }
- }
- if err := equalTypeParams(typeparams.ForSignature(x), typeparams.ForSignature(y)); err != nil {
- return fmt.Errorf("type params: %s", err)
- }
- if err := equalTypeParams(typeparams.RecvTypeParams(x), typeparams.RecvTypeParams(y)); err != nil {
- return fmt.Errorf("recv type params: %s", err)
- }
- case *types.Slice:
- y := y.(*types.Slice)
- if err := equalType(x.Elem(), y.Elem()); err != nil {
- return fmt.Errorf("slice elements: %s", err)
- }
- case *types.Struct:
- y := y.(*types.Struct)
- if x.NumFields() != y.NumFields() {
- return fmt.Errorf("unequal struct fields: %d vs %d",
- x.NumFields(), y.NumFields())
- }
- for i := 0; i < x.NumFields(); i++ {
- xf := x.Field(i)
- yf := y.Field(i)
- if xf.Name() != yf.Name() {
- return fmt.Errorf("mismatched fields: %s vs %s", xf, yf)
- }
- if err := equalType(xf.Type(), yf.Type()); err != nil {
- return fmt.Errorf("struct field %s: %s", xf.Name(), err)
- }
- if x.Tag(i) != y.Tag(i) {
- return fmt.Errorf("struct field %s has unequal tags: %q vs %q",
- xf.Name(), x.Tag(i), y.Tag(i))
- }
- }
- case *types.Tuple:
- y := y.(*types.Tuple)
- if x.Len() != y.Len() {
- return fmt.Errorf("unequal tuple lengths: %d vs %d", x.Len(), y.Len())
- }
- for i := 0; i < x.Len(); i++ {
- if err := equalType(x.At(i).Type(), y.At(i).Type()); err != nil {
- return fmt.Errorf("tuple element %d: %s", i, err)
- }
- }
- case *typeparams.TypeParam:
- y := y.(*typeparams.TypeParam)
- if x.String() != y.String() {
- return fmt.Errorf("unequal named types: %s vs %s", x, y)
- }
- // For now, just compare constraints by type string to short-circuit
- // cycles. We have to make interfaces explicit as export data currently
- // doesn't support marking interfaces as implicit.
- // TODO(rfindley): remove makeExplicit once export data contains an
- // implicit bit.
- xc := makeExplicit(x.Constraint()).String()
- yc := makeExplicit(y.Constraint()).String()
- if xc != yc {
- return fmt.Errorf("unequal constraints: %s vs %s", xc, yc)
- }
-
- default:
- panic(fmt.Sprintf("unexpected %T type", x))
- }
- return nil
-}
-
-// cmpNamed compares two named types x and y, returning an error for any
-// discrepancies. It does not compare their underlying types.
-func cmpNamed(x, y *types.Named) error {
- xOrig := typeparams.NamedTypeOrigin(x)
- yOrig := typeparams.NamedTypeOrigin(y)
- if xOrig.String() != yOrig.String() {
- return fmt.Errorf("unequal named types: %s vs %s", x, y)
- }
- if err := equalTypeParams(typeparams.ForNamed(x), typeparams.ForNamed(y)); err != nil {
- return fmt.Errorf("type parameters: %s", err)
- }
- if err := equalTypeArgs(typeparams.NamedTypeArgs(x), typeparams.NamedTypeArgs(y)); err != nil {
- return fmt.Errorf("type arguments: %s", err)
- }
- if x.NumMethods() != y.NumMethods() {
- return fmt.Errorf("unequal methods: %d vs %d",
- x.NumMethods(), y.NumMethods())
- }
- // Unfortunately method sorting is not canonical, so sort before comparing.
- var xms, yms []*types.Func
- for i := 0; i < x.NumMethods(); i++ {
- xms = append(xms, x.Method(i))
- yms = append(yms, y.Method(i))
- }
- for _, ms := range [][]*types.Func{xms, yms} {
- sort.Slice(ms, func(i, j int) bool {
- return ms[i].Name() < ms[j].Name()
- })
- }
- for i, xm := range xms {
- ym := yms[i]
- if xm.Name() != ym.Name() {
- return fmt.Errorf("mismatched %dth method: %s vs %s", i, xm, ym)
- }
- // Calling equalType here leads to infinite recursion, so just compare
- // strings.
- if xm.String() != ym.String() {
- return fmt.Errorf("unequal methods: %s vs %s", x, y)
- }
- }
- return nil
-}
-
-// makeExplicit returns an explicit version of typ, if typ is an implicit
-// interface. Otherwise it returns typ unmodified.
-func makeExplicit(typ types.Type) types.Type {
- if iface, _ := typ.(*types.Interface); iface != nil && typeparams.IsImplicit(iface) {
- var methods []*types.Func
- for i := 0; i < iface.NumExplicitMethods(); i++ {
- methods = append(methods, iface.Method(i))
- }
- var embeddeds []types.Type
- for i := 0; i < iface.NumEmbeddeds(); i++ {
- embeddeds = append(embeddeds, iface.EmbeddedType(i))
- }
- return types.NewInterfaceType(methods, embeddeds)
- }
- return typ
-}
-
-func equalTypeArgs(x, y *typeparams.TypeList) error {
- if x.Len() != y.Len() {
- return fmt.Errorf("unequal lengths: %d vs %d", x.Len(), y.Len())
- }
- for i := 0; i < x.Len(); i++ {
- if err := equalType(x.At(i), y.At(i)); err != nil {
- return fmt.Errorf("type %d: %s", i, err)
- }
- }
- return nil
-}
-
-func equalTypeParams(x, y *typeparams.TypeParamList) error {
- if x.Len() != y.Len() {
- return fmt.Errorf("unequal lengths: %d vs %d", x.Len(), y.Len())
- }
- for i := 0; i < x.Len(); i++ {
- if err := equalType(x.At(i), y.At(i)); err != nil {
- return fmt.Errorf("type parameter %d: %s", i, err)
- }
- }
- return nil
-}
-
-// TestVeryLongFile tests the position of an import object declared in
-// a very long input file. Line numbers greater than maxlines are
-// reported as line 1, not garbage or token.NoPos.
-func TestVeryLongFile(t *testing.T) {
- // parse and typecheck
- longFile := "package foo" + strings.Repeat("\n", 123456) + "var X int"
- fset1 := token.NewFileSet()
- f, err := parser.ParseFile(fset1, "foo.go", longFile, 0)
- if err != nil {
- t.Fatal(err)
- }
- var conf types.Config
- pkg, err := conf.Check("foo", fset1, []*ast.File{f}, nil)
- if err != nil {
- t.Fatal(err)
- }
-
- // export
- exportdata, err := gcimporter.BExportData(fset1, pkg)
- if err != nil {
- t.Fatal(err)
- }
-
- // import
- imports := make(map[string]*types.Package)
- fset2 := token.NewFileSet()
- _, pkg2, err := gcimporter.BImportData(fset2, imports, exportdata, pkg.Path())
- if err != nil {
- t.Fatalf("BImportData(%s): %v", pkg.Path(), err)
- }
-
- // compare
- posn1 := fset1.Position(pkg.Scope().Lookup("X").Pos())
- posn2 := fset2.Position(pkg2.Scope().Lookup("X").Pos())
- if want := "foo.go:1:1"; posn2.String() != want {
- t.Errorf("X position = %s, want %s (orig was %s)",
- posn2, want, posn1)
- }
-}
-
-const src = `
-package p
-
-type (
- T0 = int32
- T1 = struct{}
- T2 = struct{ T1 }
- Invalid = foo // foo is undeclared
-)
-`
-
-func checkPkg(t *testing.T, pkg *types.Package, label string) {
- T1 := types.NewStruct(nil, nil)
- T2 := types.NewStruct([]*types.Var{types.NewField(0, pkg, "T1", T1, true)}, nil)
-
- for _, test := range []struct {
- name string
- typ types.Type
- }{
- {"T0", types.Typ[types.Int32]},
- {"T1", T1},
- {"T2", T2},
- {"Invalid", types.Typ[types.Invalid]},
- } {
- obj := pkg.Scope().Lookup(test.name)
- if obj == nil {
- t.Errorf("%s: %s not found", label, test.name)
- continue
- }
- tname, _ := obj.(*types.TypeName)
- if tname == nil {
- t.Errorf("%s: %v not a type name", label, obj)
- continue
- }
- if !tname.IsAlias() {
- t.Errorf("%s: %v: not marked as alias", label, tname)
- continue
- }
- if got := tname.Type(); !types.Identical(got, test.typ) {
- t.Errorf("%s: %v: got %v; want %v", label, tname, got, test.typ)
- }
- }
-}
-
-func TestTypeAliases(t *testing.T) {
- // parse and typecheck
- fset1 := token.NewFileSet()
- f, err := parser.ParseFile(fset1, "p.go", src, 0)
- if err != nil {
- t.Fatal(err)
- }
- var conf types.Config
- pkg1, err := conf.Check("p", fset1, []*ast.File{f}, nil)
- if err == nil {
- // foo in undeclared in src; we should see an error
- t.Fatal("invalid source type-checked without error")
- }
- if pkg1 == nil {
- // despite incorrect src we should see a (partially) type-checked package
- t.Fatal("nil package returned")
- }
- checkPkg(t, pkg1, "export")
-
- // export
- exportdata, err := gcimporter.BExportData(fset1, pkg1)
- if err != nil {
- t.Fatal(err)
- }
-
- // import
- imports := make(map[string]*types.Package)
- fset2 := token.NewFileSet()
- _, pkg2, err := gcimporter.BImportData(fset2, imports, exportdata, pkg1.Path())
- if err != nil {
- t.Fatalf("BImportData(%s): %v", pkg1.Path(), err)
- }
- checkPkg(t, pkg2, "import")
-}
diff --git a/go/internal/gcimporter/bimport.go b/go/internal/gcimporter/bimport.go
deleted file mode 100644
index b85de0147..000000000
--- a/go/internal/gcimporter/bimport.go
+++ /dev/null
@@ -1,1053 +0,0 @@
-// Copyright 2015 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// This file is a copy of $GOROOT/src/go/internal/gcimporter/bimport.go.
-
-package gcimporter
-
-import (
- "encoding/binary"
- "fmt"
- "go/constant"
- "go/token"
- "go/types"
- "sort"
- "strconv"
- "strings"
- "sync"
- "unicode"
- "unicode/utf8"
-)
-
-type importer struct {
- imports map[string]*types.Package
- data []byte
- importpath string
- buf []byte // for reading strings
- version int // export format version
-
- // object lists
- strList []string // in order of appearance
- pathList []string // in order of appearance
- pkgList []*types.Package // in order of appearance
- typList []types.Type // in order of appearance
- interfaceList []*types.Interface // for delayed completion only
- trackAllTypes bool
-
- // position encoding
- posInfoFormat bool
- prevFile string
- prevLine int
- fake fakeFileSet
-
- // debugging support
- debugFormat bool
- read int // bytes read
-}
-
-// BImportData imports a package from the serialized package data
-// and returns the number of bytes consumed and a reference to the package.
-// If the export data version is not recognized or the format is otherwise
-// compromised, an error is returned.
-func BImportData(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string) (_ int, pkg *types.Package, err error) {
- // catch panics and return them as errors
- const currentVersion = 6
- version := -1 // unknown version
- defer func() {
- if e := recover(); e != nil {
- // Return a (possibly nil or incomplete) package unchanged (see #16088).
- if version > currentVersion {
- err = fmt.Errorf("cannot import %q (%v), export data is newer version - update tool", path, e)
- } else {
- err = fmt.Errorf("cannot import %q (%v), possibly version skew - reinstall package", path, e)
- }
- }
- }()
-
- p := importer{
- imports: imports,
- data: data,
- importpath: path,
- version: version,
- strList: []string{""}, // empty string is mapped to 0
- pathList: []string{""}, // empty string is mapped to 0
- fake: fakeFileSet{
- fset: fset,
- files: make(map[string]*fileInfo),
- },
- }
- defer p.fake.setLines() // set lines for files in fset
-
- // read version info
- var versionstr string
- if b := p.rawByte(); b == 'c' || b == 'd' {
- // Go1.7 encoding; first byte encodes low-level
- // encoding format (compact vs debug).
- // For backward-compatibility only (avoid problems with
- // old installed packages). Newly compiled packages use
- // the extensible format string.
- // TODO(gri) Remove this support eventually; after Go1.8.
- if b == 'd' {
- p.debugFormat = true
- }
- p.trackAllTypes = p.rawByte() == 'a'
- p.posInfoFormat = p.int() != 0
- versionstr = p.string()
- if versionstr == "v1" {
- version = 0
- }
- } else {
- // Go1.8 extensible encoding
- // read version string and extract version number (ignore anything after the version number)
- versionstr = p.rawStringln(b)
- if s := strings.SplitN(versionstr, " ", 3); len(s) >= 2 && s[0] == "version" {
- if v, err := strconv.Atoi(s[1]); err == nil && v > 0 {
- version = v
- }
- }
- }
- p.version = version
-
- // read version specific flags - extend as necessary
- switch p.version {
- // case currentVersion:
- // ...
- // fallthrough
- case currentVersion, 5, 4, 3, 2, 1:
- p.debugFormat = p.rawStringln(p.rawByte()) == "debug"
- p.trackAllTypes = p.int() != 0
- p.posInfoFormat = p.int() != 0
- case 0:
- // Go1.7 encoding format - nothing to do here
- default:
- errorf("unknown bexport format version %d (%q)", p.version, versionstr)
- }
-
- // --- generic export data ---
-
- // populate typList with predeclared "known" types
- p.typList = append(p.typList, predeclared()...)
-
- // read package data
- pkg = p.pkg()
-
- // read objects of phase 1 only (see cmd/compile/internal/gc/bexport.go)
- objcount := 0
- for {
- tag := p.tagOrIndex()
- if tag == endTag {
- break
- }
- p.obj(tag)
- objcount++
- }
-
- // self-verification
- if count := p.int(); count != objcount {
- errorf("got %d objects; want %d", objcount, count)
- }
-
- // ignore compiler-specific import data
-
- // complete interfaces
- // TODO(gri) re-investigate if we still need to do this in a delayed fashion
- for _, typ := range p.interfaceList {
- typ.Complete()
- }
-
- // record all referenced packages as imports
- list := append(([]*types.Package)(nil), p.pkgList[1:]...)
- sort.Sort(byPath(list))
- pkg.SetImports(list)
-
- // package was imported completely and without errors
- pkg.MarkComplete()
-
- return p.read, pkg, nil
-}
-
-func errorf(format string, args ...interface{}) {
- panic(fmt.Sprintf(format, args...))
-}
-
-func (p *importer) pkg() *types.Package {
- // if the package was seen before, i is its index (>= 0)
- i := p.tagOrIndex()
- if i >= 0 {
- return p.pkgList[i]
- }
-
- // otherwise, i is the package tag (< 0)
- if i != packageTag {
- errorf("unexpected package tag %d version %d", i, p.version)
- }
-
- // read package data
- name := p.string()
- var path string
- if p.version >= 5 {
- path = p.path()
- } else {
- path = p.string()
- }
- if p.version >= 6 {
- p.int() // package height; unused by go/types
- }
-
- // we should never see an empty package name
- if name == "" {
- errorf("empty package name in import")
- }
-
- // an empty path denotes the package we are currently importing;
- // it must be the first package we see
- if (path == "") != (len(p.pkgList) == 0) {
- errorf("package path %q for pkg index %d", path, len(p.pkgList))
- }
-
- // if the package was imported before, use that one; otherwise create a new one
- if path == "" {
- path = p.importpath
- }
- pkg := p.imports[path]
- if pkg == nil {
- pkg = types.NewPackage(path, name)
- p.imports[path] = pkg
- } else if pkg.Name() != name {
- errorf("conflicting names %s and %s for package %q", pkg.Name(), name, path)
- }
- p.pkgList = append(p.pkgList, pkg)
-
- return pkg
-}
-
-// objTag returns the tag value for each object kind.
-func objTag(obj types.Object) int {
- switch obj.(type) {
- case *types.Const:
- return constTag
- case *types.TypeName:
- return typeTag
- case *types.Var:
- return varTag
- case *types.Func:
- return funcTag
- default:
- errorf("unexpected object: %v (%T)", obj, obj) // panics
- panic("unreachable")
- }
-}
-
-func sameObj(a, b types.Object) bool {
- // Because unnamed types are not canonicalized, we cannot simply compare types for
- // (pointer) identity.
- // Ideally we'd check equality of constant values as well, but this is good enough.
- return objTag(a) == objTag(b) && types.Identical(a.Type(), b.Type())
-}
-
-func (p *importer) declare(obj types.Object) {
- pkg := obj.Pkg()
- if alt := pkg.Scope().Insert(obj); alt != nil {
- // This can only trigger if we import a (non-type) object a second time.
- // Excluding type aliases, this cannot happen because 1) we only import a package
- // once; and b) we ignore compiler-specific export data which may contain
- // functions whose inlined function bodies refer to other functions that
- // were already imported.
- // However, type aliases require reexporting the original type, so we need
- // to allow it (see also the comment in cmd/compile/internal/gc/bimport.go,
- // method importer.obj, switch case importing functions).
- // TODO(gri) review/update this comment once the gc compiler handles type aliases.
- if !sameObj(obj, alt) {
- errorf("inconsistent import:\n\t%v\npreviously imported as:\n\t%v\n", obj, alt)
- }
- }
-}
-
-func (p *importer) obj(tag int) {
- switch tag {
- case constTag:
- pos := p.pos()
- pkg, name := p.qualifiedName()
- typ := p.typ(nil, nil)
- val := p.value()
- p.declare(types.NewConst(pos, pkg, name, typ, val))
-
- case aliasTag:
- // TODO(gri) verify type alias hookup is correct
- pos := p.pos()
- pkg, name := p.qualifiedName()
- typ := p.typ(nil, nil)
- p.declare(types.NewTypeName(pos, pkg, name, typ))
-
- case typeTag:
- p.typ(nil, nil)
-
- case varTag:
- pos := p.pos()
- pkg, name := p.qualifiedName()
- typ := p.typ(nil, nil)
- p.declare(types.NewVar(pos, pkg, name, typ))
-
- case funcTag:
- pos := p.pos()
- pkg, name := p.qualifiedName()
- params, isddd := p.paramList()
- result, _ := p.paramList()
- sig := types.NewSignature(nil, params, result, isddd)
- p.declare(types.NewFunc(pos, pkg, name, sig))
-
- default:
- errorf("unexpected object tag %d", tag)
- }
-}
-
-const deltaNewFile = -64 // see cmd/compile/internal/gc/bexport.go
-
-func (p *importer) pos() token.Pos {
- if !p.posInfoFormat {
- return token.NoPos
- }
-
- file := p.prevFile
- line := p.prevLine
- delta := p.int()
- line += delta
- if p.version >= 5 {
- if delta == deltaNewFile {
- if n := p.int(); n >= 0 {
- // file changed
- file = p.path()
- line = n
- }
- }
- } else {
- if delta == 0 {
- if n := p.int(); n >= 0 {
- // file changed
- file = p.prevFile[:n] + p.string()
- line = p.int()
- }
- }
- }
- p.prevFile = file
- p.prevLine = line
-
- return p.fake.pos(file, line, 0)
-}
-
-// Synthesize a token.Pos
-type fakeFileSet struct {
- fset *token.FileSet
- files map[string]*fileInfo
-}
-
-type fileInfo struct {
- file *token.File
- lastline int
-}
-
-const maxlines = 64 * 1024
-
-func (s *fakeFileSet) pos(file string, line, column int) token.Pos {
- // TODO(mdempsky): Make use of column.
-
- // Since we don't know the set of needed file positions, we reserve maxlines
- // positions per file. We delay calling token.File.SetLines until all
- // positions have been calculated (by way of fakeFileSet.setLines), so that
- // we can avoid setting unnecessary lines. See also golang/go#46586.
- f := s.files[file]
- if f == nil {
- f = &fileInfo{file: s.fset.AddFile(file, -1, maxlines)}
- s.files[file] = f
- }
- if line > maxlines {
- line = 1
- }
- if line > f.lastline {
- f.lastline = line
- }
-
- // Return a fake position assuming that f.file consists only of newlines.
- return token.Pos(f.file.Base() + line - 1)
-}
-
-func (s *fakeFileSet) setLines() {
- fakeLinesOnce.Do(func() {
- fakeLines = make([]int, maxlines)
- for i := range fakeLines {
- fakeLines[i] = i
- }
- })
- for _, f := range s.files {
- f.file.SetLines(fakeLines[:f.lastline])
- }
-}
-
-var (
- fakeLines []int
- fakeLinesOnce sync.Once
-)
-
-func (p *importer) qualifiedName() (pkg *types.Package, name string) {
- name = p.string()
- pkg = p.pkg()
- return
-}
-
-func (p *importer) record(t types.Type) {
- p.typList = append(p.typList, t)
-}
-
-// A dddSlice is a types.Type representing ...T parameters.
-// It only appears for parameter types and does not escape
-// the importer.
-type dddSlice struct {
- elem types.Type
-}
-
-func (t *dddSlice) Underlying() types.Type { return t }
-func (t *dddSlice) String() string { return "..." + t.elem.String() }
-
-// parent is the package which declared the type; parent == nil means
-// the package currently imported. The parent package is needed for
-// exported struct fields and interface methods which don't contain
-// explicit package information in the export data.
-//
-// A non-nil tname is used as the "owner" of the result type; i.e.,
-// the result type is the underlying type of tname. tname is used
-// to give interface methods a named receiver type where possible.
-func (p *importer) typ(parent *types.Package, tname *types.Named) types.Type {
- // if the type was seen before, i is its index (>= 0)
- i := p.tagOrIndex()
- if i >= 0 {
- return p.typList[i]
- }
-
- // otherwise, i is the type tag (< 0)
- switch i {
- case namedTag:
- // read type object
- pos := p.pos()
- parent, name := p.qualifiedName()
- scope := parent.Scope()
- obj := scope.Lookup(name)
-
- // if the object doesn't exist yet, create and insert it
- if obj == nil {
- obj = types.NewTypeName(pos, parent, name, nil)
- scope.Insert(obj)
- }
-
- if _, ok := obj.(*types.TypeName); !ok {
- errorf("pkg = %s, name = %s => %s", parent, name, obj)
- }
-
- // associate new named type with obj if it doesn't exist yet
- t0 := types.NewNamed(obj.(*types.TypeName), nil, nil)
-
- // but record the existing type, if any
- tname := obj.Type().(*types.Named) // tname is either t0 or the existing type
- p.record(tname)
-
- // read underlying type
- t0.SetUnderlying(p.typ(parent, t0))
-
- // interfaces don't have associated methods
- if types.IsInterface(t0) {
- return tname
- }
-
- // read associated methods
- for i := p.int(); i > 0; i-- {
- // TODO(gri) replace this with something closer to fieldName
- pos := p.pos()
- name := p.string()
- if !exported(name) {
- p.pkg()
- }
-
- recv, _ := p.paramList() // TODO(gri) do we need a full param list for the receiver?
- params, isddd := p.paramList()
- result, _ := p.paramList()
- p.int() // go:nointerface pragma - discarded
-
- sig := types.NewSignature(recv.At(0), params, result, isddd)
- t0.AddMethod(types.NewFunc(pos, parent, name, sig))
- }
-
- return tname
-
- case arrayTag:
- t := new(types.Array)
- if p.trackAllTypes {
- p.record(t)
- }
-
- n := p.int64()
- *t = *types.NewArray(p.typ(parent, nil), n)
- return t
-
- case sliceTag:
- t := new(types.Slice)
- if p.trackAllTypes {
- p.record(t)
- }
-
- *t = *types.NewSlice(p.typ(parent, nil))
- return t
-
- case dddTag:
- t := new(dddSlice)
- if p.trackAllTypes {
- p.record(t)
- }
-
- t.elem = p.typ(parent, nil)
- return t
-
- case structTag:
- t := new(types.Struct)
- if p.trackAllTypes {
- p.record(t)
- }
-
- *t = *types.NewStruct(p.fieldList(parent))
- return t
-
- case pointerTag:
- t := new(types.Pointer)
- if p.trackAllTypes {
- p.record(t)
- }
-
- *t = *types.NewPointer(p.typ(parent, nil))
- return t
-
- case signatureTag:
- t := new(types.Signature)
- if p.trackAllTypes {
- p.record(t)
- }
-
- params, isddd := p.paramList()
- result, _ := p.paramList()
- *t = *types.NewSignature(nil, params, result, isddd)
- return t
-
- case interfaceTag:
- // Create a dummy entry in the type list. This is safe because we
- // cannot expect the interface type to appear in a cycle, as any
- // such cycle must contain a named type which would have been
- // first defined earlier.
- // TODO(gri) Is this still true now that we have type aliases?
- // See issue #23225.
- n := len(p.typList)
- if p.trackAllTypes {
- p.record(nil)
- }
-
- var embeddeds []types.Type
- for n := p.int(); n > 0; n-- {
- p.pos()
- embeddeds = append(embeddeds, p.typ(parent, nil))
- }
-
- t := newInterface(p.methodList(parent, tname), embeddeds)
- p.interfaceList = append(p.interfaceList, t)
- if p.trackAllTypes {
- p.typList[n] = t
- }
- return t
-
- case mapTag:
- t := new(types.Map)
- if p.trackAllTypes {
- p.record(t)
- }
-
- key := p.typ(parent, nil)
- val := p.typ(parent, nil)
- *t = *types.NewMap(key, val)
- return t
-
- case chanTag:
- t := new(types.Chan)
- if p.trackAllTypes {
- p.record(t)
- }
-
- dir := chanDir(p.int())
- val := p.typ(parent, nil)
- *t = *types.NewChan(dir, val)
- return t
-
- default:
- errorf("unexpected type tag %d", i) // panics
- panic("unreachable")
- }
-}
-
-func chanDir(d int) types.ChanDir {
- // tag values must match the constants in cmd/compile/internal/gc/go.go
- switch d {
- case 1 /* Crecv */ :
- return types.RecvOnly
- case 2 /* Csend */ :
- return types.SendOnly
- case 3 /* Cboth */ :
- return types.SendRecv
- default:
- errorf("unexpected channel dir %d", d)
- return 0
- }
-}
-
-func (p *importer) fieldList(parent *types.Package) (fields []*types.Var, tags []string) {
- if n := p.int(); n > 0 {
- fields = make([]*types.Var, n)
- tags = make([]string, n)
- for i := range fields {
- fields[i], tags[i] = p.field(parent)
- }
- }
- return
-}
-
-func (p *importer) field(parent *types.Package) (*types.Var, string) {
- pos := p.pos()
- pkg, name, alias := p.fieldName(parent)
- typ := p.typ(parent, nil)
- tag := p.string()
-
- anonymous := false
- if name == "" {
- // anonymous field - typ must be T or *T and T must be a type name
- switch typ := deref(typ).(type) {
- case *types.Basic: // basic types are named types
- pkg = nil // // objects defined in Universe scope have no package
- name = typ.Name()
- case *types.Named:
- name = typ.Obj().Name()
- default:
- errorf("named base type expected")
- }
- anonymous = true
- } else if alias {
- // anonymous field: we have an explicit name because it's an alias
- anonymous = true
- }
-
- return types.NewField(pos, pkg, name, typ, anonymous), tag
-}
-
-func (p *importer) methodList(parent *types.Package, baseType *types.Named) (methods []*types.Func) {
- if n := p.int(); n > 0 {
- methods = make([]*types.Func, n)
- for i := range methods {
- methods[i] = p.method(parent, baseType)
- }
- }
- return
-}
-
-func (p *importer) method(parent *types.Package, baseType *types.Named) *types.Func {
- pos := p.pos()
- pkg, name, _ := p.fieldName(parent)
- // If we don't have a baseType, use a nil receiver.
- // A receiver using the actual interface type (which
- // we don't know yet) will be filled in when we call
- // types.Interface.Complete.
- var recv *types.Var
- if baseType != nil {
- recv = types.NewVar(token.NoPos, parent, "", baseType)
- }
- params, isddd := p.paramList()
- result, _ := p.paramList()
- sig := types.NewSignature(recv, params, result, isddd)
- return types.NewFunc(pos, pkg, name, sig)
-}
-
-func (p *importer) fieldName(parent *types.Package) (pkg *types.Package, name string, alias bool) {
- name = p.string()
- pkg = parent
- if pkg == nil {
- // use the imported package instead
- pkg = p.pkgList[0]
- }
- if p.version == 0 && name == "_" {
- // version 0 didn't export a package for _ fields
- return
- }
- switch name {
- case "":
- // 1) field name matches base type name and is exported: nothing to do
- case "?":
- // 2) field name matches base type name and is not exported: need package
- name = ""
- pkg = p.pkg()
- case "@":
- // 3) field name doesn't match type name (alias)
- name = p.string()
- alias = true
- fallthrough
- default:
- if !exported(name) {
- pkg = p.pkg()
- }
- }
- return
-}
-
-func (p *importer) paramList() (*types.Tuple, bool) {
- n := p.int()
- if n == 0 {
- return nil, false
- }
- // negative length indicates unnamed parameters
- named := true
- if n < 0 {
- n = -n
- named = false
- }
- // n > 0
- params := make([]*types.Var, n)
- isddd := false
- for i := range params {
- params[i], isddd = p.param(named)
- }
- return types.NewTuple(params...), isddd
-}
-
-func (p *importer) param(named bool) (*types.Var, bool) {
- t := p.typ(nil, nil)
- td, isddd := t.(*dddSlice)
- if isddd {
- t = types.NewSlice(td.elem)
- }
-
- var pkg *types.Package
- var name string
- if named {
- name = p.string()
- if name == "" {
- errorf("expected named parameter")
- }
- if name != "_" {
- pkg = p.pkg()
- }
- if i := strings.Index(name, "·"); i > 0 {
- name = name[:i] // cut off gc-specific parameter numbering
- }
- }
-
- // read and discard compiler-specific info
- p.string()
-
- return types.NewVar(token.NoPos, pkg, name, t), isddd
-}
-
-func exported(name string) bool {
- ch, _ := utf8.DecodeRuneInString(name)
- return unicode.IsUpper(ch)
-}
-
-func (p *importer) value() constant.Value {
- switch tag := p.tagOrIndex(); tag {
- case falseTag:
- return constant.MakeBool(false)
- case trueTag:
- return constant.MakeBool(true)
- case int64Tag:
- return constant.MakeInt64(p.int64())
- case floatTag:
- return p.float()
- case complexTag:
- re := p.float()
- im := p.float()
- return constant.BinaryOp(re, token.ADD, constant.MakeImag(im))
- case stringTag:
- return constant.MakeString(p.string())
- case unknownTag:
- return constant.MakeUnknown()
- default:
- errorf("unexpected value tag %d", tag) // panics
- panic("unreachable")
- }
-}
-
-func (p *importer) float() constant.Value {
- sign := p.int()
- if sign == 0 {
- return constant.MakeInt64(0)
- }
-
- exp := p.int()
- mant := []byte(p.string()) // big endian
-
- // remove leading 0's if any
- for len(mant) > 0 && mant[0] == 0 {
- mant = mant[1:]
- }
-
- // convert to little endian
- // TODO(gri) go/constant should have a more direct conversion function
- // (e.g., once it supports a big.Float based implementation)
- for i, j := 0, len(mant)-1; i < j; i, j = i+1, j-1 {
- mant[i], mant[j] = mant[j], mant[i]
- }
-
- // adjust exponent (constant.MakeFromBytes creates an integer value,
- // but mant represents the mantissa bits such that 0.5 <= mant < 1.0)
- exp -= len(mant) << 3
- if len(mant) > 0 {
- for msd := mant[len(mant)-1]; msd&0x80 == 0; msd <<= 1 {
- exp++
- }
- }
-
- x := constant.MakeFromBytes(mant)
- switch {
- case exp < 0:
- d := constant.Shift(constant.MakeInt64(1), token.SHL, uint(-exp))
- x = constant.BinaryOp(x, token.QUO, d)
- case exp > 0:
- x = constant.Shift(x, token.SHL, uint(exp))
- }
-
- if sign < 0 {
- x = constant.UnaryOp(token.SUB, x, 0)
- }
- return x
-}
-
-// ----------------------------------------------------------------------------
-// Low-level decoders
-
-func (p *importer) tagOrIndex() int {
- if p.debugFormat {
- p.marker('t')
- }
-
- return int(p.rawInt64())
-}
-
-func (p *importer) int() int {
- x := p.int64()
- if int64(int(x)) != x {
- errorf("exported integer too large")
- }
- return int(x)
-}
-
-func (p *importer) int64() int64 {
- if p.debugFormat {
- p.marker('i')
- }
-
- return p.rawInt64()
-}
-
-func (p *importer) path() string {
- if p.debugFormat {
- p.marker('p')
- }
- // if the path was seen before, i is its index (>= 0)
- // (the empty string is at index 0)
- i := p.rawInt64()
- if i >= 0 {
- return p.pathList[i]
- }
- // otherwise, i is the negative path length (< 0)
- a := make([]string, -i)
- for n := range a {
- a[n] = p.string()
- }
- s := strings.Join(a, "/")
- p.pathList = append(p.pathList, s)
- return s
-}
-
-func (p *importer) string() string {
- if p.debugFormat {
- p.marker('s')
- }
- // if the string was seen before, i is its index (>= 0)
- // (the empty string is at index 0)
- i := p.rawInt64()
- if i >= 0 {
- return p.strList[i]
- }
- // otherwise, i is the negative string length (< 0)
- if n := int(-i); n <= cap(p.buf) {
- p.buf = p.buf[:n]
- } else {
- p.buf = make([]byte, n)
- }
- for i := range p.buf {
- p.buf[i] = p.rawByte()
- }
- s := string(p.buf)
- p.strList = append(p.strList, s)
- return s
-}
-
-func (p *importer) marker(want byte) {
- if got := p.rawByte(); got != want {
- errorf("incorrect marker: got %c; want %c (pos = %d)", got, want, p.read)
- }
-
- pos := p.read
- if n := int(p.rawInt64()); n != pos {
- errorf("incorrect position: got %d; want %d", n, pos)
- }
-}
-
-// rawInt64 should only be used by low-level decoders.
-func (p *importer) rawInt64() int64 {
- i, err := binary.ReadVarint(p)
- if err != nil {
- errorf("read error: %v", err)
- }
- return i
-}
-
-// rawStringln should only be used to read the initial version string.
-func (p *importer) rawStringln(b byte) string {
- p.buf = p.buf[:0]
- for b != '\n' {
- p.buf = append(p.buf, b)
- b = p.rawByte()
- }
- return string(p.buf)
-}
-
-// needed for binary.ReadVarint in rawInt64
-func (p *importer) ReadByte() (byte, error) {
- return p.rawByte(), nil
-}
-
-// byte is the bottleneck interface for reading p.data.
-// It unescapes '|' 'S' to '$' and '|' '|' to '|'.
-// rawByte should only be used by low-level decoders.
-func (p *importer) rawByte() byte {
- b := p.data[0]
- r := 1
- if b == '|' {
- b = p.data[1]
- r = 2
- switch b {
- case 'S':
- b = '$'
- case '|':
- // nothing to do
- default:
- errorf("unexpected escape sequence in export data")
- }
- }
- p.data = p.data[r:]
- p.read += r
- return b
-
-}
-
-// ----------------------------------------------------------------------------
-// Export format
-
-// Tags. Must be < 0.
-const (
- // Objects
- packageTag = -(iota + 1)
- constTag
- typeTag
- varTag
- funcTag
- endTag
-
- // Types
- namedTag
- arrayTag
- sliceTag
- dddTag
- structTag
- pointerTag
- signatureTag
- interfaceTag
- mapTag
- chanTag
-
- // Values
- falseTag
- trueTag
- int64Tag
- floatTag
- fractionTag // not used by gc
- complexTag
- stringTag
- nilTag // only used by gc (appears in exported inlined function bodies)
- unknownTag // not used by gc (only appears in packages with errors)
-
- // Type aliases
- aliasTag
-)
-
-var predeclOnce sync.Once
-var predecl []types.Type // initialized lazily
-
-func predeclared() []types.Type {
- predeclOnce.Do(func() {
- // initialize lazily to be sure that all
- // elements have been initialized before
- predecl = []types.Type{ // basic types
- types.Typ[types.Bool],
- types.Typ[types.Int],
- types.Typ[types.Int8],
- types.Typ[types.Int16],
- types.Typ[types.Int32],
- types.Typ[types.Int64],
- types.Typ[types.Uint],
- types.Typ[types.Uint8],
- types.Typ[types.Uint16],
- types.Typ[types.Uint32],
- types.Typ[types.Uint64],
- types.Typ[types.Uintptr],
- types.Typ[types.Float32],
- types.Typ[types.Float64],
- types.Typ[types.Complex64],
- types.Typ[types.Complex128],
- types.Typ[types.String],
-
- // basic type aliases
- types.Universe.Lookup("byte").Type(),
- types.Universe.Lookup("rune").Type(),
-
- // error
- types.Universe.Lookup("error").Type(),
-
- // untyped types
- types.Typ[types.UntypedBool],
- types.Typ[types.UntypedInt],
- types.Typ[types.UntypedRune],
- types.Typ[types.UntypedFloat],
- types.Typ[types.UntypedComplex],
- types.Typ[types.UntypedString],
- types.Typ[types.UntypedNil],
-
- // package unsafe
- types.Typ[types.UnsafePointer],
-
- // invalid type
- types.Typ[types.Invalid], // only appears in packages with errors
-
- // used internally by gc; never used by this package or in .a files
- anyType{},
- }
- predecl = append(predecl, additionalPredeclared()...)
- })
- return predecl
-}
-
-type anyType struct{}
-
-func (t anyType) Underlying() types.Type { return t }
-func (t anyType) String() string { return "any" }
diff --git a/go/internal/gcimporter/exportdata.go b/go/internal/gcimporter/exportdata.go
deleted file mode 100644
index f6437feb1..000000000
--- a/go/internal/gcimporter/exportdata.go
+++ /dev/null
@@ -1,99 +0,0 @@
-// Copyright 2011 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// This file is a copy of $GOROOT/src/go/internal/gcimporter/exportdata.go.
-
-// This file implements FindExportData.
-
-package gcimporter
-
-import (
- "bufio"
- "fmt"
- "io"
- "strconv"
- "strings"
-)
-
-func readGopackHeader(r *bufio.Reader) (name string, size int64, err error) {
- // See $GOROOT/include/ar.h.
- hdr := make([]byte, 16+12+6+6+8+10+2)
- _, err = io.ReadFull(r, hdr)
- if err != nil {
- return
- }
- // leave for debugging
- if false {
- fmt.Printf("header: %s", hdr)
- }
- s := strings.TrimSpace(string(hdr[16+12+6+6+8:][:10]))
- length, err := strconv.Atoi(s)
- size = int64(length)
- if err != nil || hdr[len(hdr)-2] != '`' || hdr[len(hdr)-1] != '\n' {
- err = fmt.Errorf("invalid archive header")
- return
- }
- name = strings.TrimSpace(string(hdr[:16]))
- return
-}
-
-// FindExportData positions the reader r at the beginning of the
-// export data section of an underlying GC-created object/archive
-// file by reading from it. The reader must be positioned at the
-// start of the file before calling this function. The hdr result
-// is the string before the export data, either "$$" or "$$B".
-// The size result is the length of the export data in bytes, or -1 if not known.
-func FindExportData(r *bufio.Reader) (hdr string, size int64, err error) {
- // Read first line to make sure this is an object file.
- line, err := r.ReadSlice('\n')
- if err != nil {
- err = fmt.Errorf("can't find export data (%v)", err)
- return
- }
-
- if string(line) == "!<arch>\n" {
- // Archive file. Scan to __.PKGDEF.
- var name string
- if name, size, err = readGopackHeader(r); err != nil {
- return
- }
-
- // First entry should be __.PKGDEF.
- if name != "__.PKGDEF" {
- err = fmt.Errorf("go archive is missing __.PKGDEF")
- return
- }
-
- // Read first line of __.PKGDEF data, so that line
- // is once again the first line of the input.
- if line, err = r.ReadSlice('\n'); err != nil {
- err = fmt.Errorf("can't find export data (%v)", err)
- return
- }
- size -= int64(len(line))
- }
-
- // Now at __.PKGDEF in archive or still at beginning of file.
- // Either way, line should begin with "go object ".
- if !strings.HasPrefix(string(line), "go object ") {
- err = fmt.Errorf("not a Go object file")
- return
- }
-
- // Skip over object header to export data.
- // Begins after first line starting with $$.
- for line[0] != '$' {
- if line, err = r.ReadSlice('\n'); err != nil {
- err = fmt.Errorf("can't find export data (%v)", err)
- return
- }
- size -= int64(len(line))
- }
- hdr = string(line)
- if size < 0 {
- size = -1
- }
-
- return
-}
diff --git a/go/internal/gcimporter/gcimporter.go b/go/internal/gcimporter/gcimporter.go
deleted file mode 100644
index 3ab66830d..000000000
--- a/go/internal/gcimporter/gcimporter.go
+++ /dev/null
@@ -1,1084 +0,0 @@
-// Copyright 2011 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// This file is a modified copy of $GOROOT/src/go/internal/gcimporter/gcimporter.go,
-// but it also contains the original source-based importer code for Go1.6.
-// Once we stop supporting 1.6, we can remove that code.
-
-// Package gcimporter provides various functions for reading
-// gc-generated object files that can be used to implement the
-// Importer interface defined by the Go 1.5 standard library package.
-package gcimporter // import "golang.org/x/tools/go/internal/gcimporter"
-
-import (
- "bufio"
- "errors"
- "fmt"
- "go/build"
- "go/constant"
- "go/token"
- "go/types"
- "io"
- "io/ioutil"
- "os"
- "path/filepath"
- "sort"
- "strconv"
- "strings"
- "text/scanner"
-)
-
-const (
- // Enable debug during development: it adds some additional checks, and
- // prevents errors from being recovered.
- debug = false
-
- // If trace is set, debugging output is printed to std out.
- trace = false
-)
-
-var pkgExts = [...]string{".a", ".o"}
-
-// FindPkg returns the filename and unique package id for an import
-// path based on package information provided by build.Import (using
-// the build.Default build.Context). A relative srcDir is interpreted
-// relative to the current working directory.
-// If no file was found, an empty filename is returned.
-//
-func FindPkg(path, srcDir string) (filename, id string) {
- if path == "" {
- return
- }
-
- var noext string
- switch {
- default:
- // "x" -> "$GOPATH/pkg/$GOOS_$GOARCH/x.ext", "x"
- // Don't require the source files to be present.
- if abs, err := filepath.Abs(srcDir); err == nil { // see issue 14282
- srcDir = abs
- }
- bp, _ := build.Import(path, srcDir, build.FindOnly|build.AllowBinary)
- if bp.PkgObj == "" {
- id = path // make sure we have an id to print in error message
- return
- }
- noext = strings.TrimSuffix(bp.PkgObj, ".a")
- id = bp.ImportPath
-
- case build.IsLocalImport(path):
- // "./x" -> "/this/directory/x.ext", "/this/directory/x"
- noext = filepath.Join(srcDir, path)
- id = noext
-
- case filepath.IsAbs(path):
- // for completeness only - go/build.Import
- // does not support absolute imports
- // "/x" -> "/x.ext", "/x"
- noext = path
- id = path
- }
-
- if false { // for debugging
- if path != id {
- fmt.Printf("%s -> %s\n", path, id)
- }
- }
-
- // try extensions
- for _, ext := range pkgExts {
- filename = noext + ext
- if f, err := os.Stat(filename); err == nil && !f.IsDir() {
- return
- }
- }
-
- filename = "" // not found
- return
-}
-
-// ImportData imports a package by reading the gc-generated export data,
-// adds the corresponding package object to the packages map indexed by id,
-// and returns the object.
-//
-// The packages map must contains all packages already imported. The data
-// reader position must be the beginning of the export data section. The
-// filename is only used in error messages.
-//
-// If packages[id] contains the completely imported package, that package
-// can be used directly, and there is no need to call this function (but
-// there is also no harm but for extra time used).
-//
-func ImportData(packages map[string]*types.Package, filename, id string, data io.Reader) (pkg *types.Package, err error) {
- // support for parser error handling
- defer func() {
- switch r := recover().(type) {
- case nil:
- // nothing to do
- case importError:
- err = r
- default:
- panic(r) // internal error
- }
- }()
-
- var p parser
- p.init(filename, id, data, packages)
- pkg = p.parseExport()
-
- return
-}
-
-// Import imports a gc-generated package given its import path and srcDir, adds
-// the corresponding package object to the packages map, and returns the object.
-// The packages map must contain all packages already imported.
-//
-func Import(packages map[string]*types.Package, path, srcDir string, lookup func(path string) (io.ReadCloser, error)) (pkg *types.Package, err error) {
- var rc io.ReadCloser
- var filename, id string
- if lookup != nil {
- // With custom lookup specified, assume that caller has
- // converted path to a canonical import path for use in the map.
- if path == "unsafe" {
- return types.Unsafe, nil
- }
- id = path
-
- // No need to re-import if the package was imported completely before.
- if pkg = packages[id]; pkg != nil && pkg.Complete() {
- return
- }
- f, err := lookup(path)
- if err != nil {
- return nil, err
- }
- rc = f
- } else {
- filename, id = FindPkg(path, srcDir)
- if filename == "" {
- if path == "unsafe" {
- return types.Unsafe, nil
- }
- return nil, fmt.Errorf("can't find import: %q", id)
- }
-
- // no need to re-import if the package was imported completely before
- if pkg = packages[id]; pkg != nil && pkg.Complete() {
- return
- }
-
- // open file
- f, err := os.Open(filename)
- if err != nil {
- return nil, err
- }
- defer func() {
- if err != nil {
- // add file name to error
- err = fmt.Errorf("%s: %v", filename, err)
- }
- }()
- rc = f
- }
- defer rc.Close()
-
- var hdr string
- buf := bufio.NewReader(rc)
- if hdr, _, err = FindExportData(buf); err != nil {
- return
- }
-
- switch hdr {
- case "$$\n":
- // Work-around if we don't have a filename; happens only if lookup != nil.
- // Either way, the filename is only needed for importer error messages, so
- // this is fine.
- if filename == "" {
- filename = path
- }
- return ImportData(packages, filename, id, buf)
-
- case "$$B\n":
- var data []byte
- data, err = ioutil.ReadAll(buf)
- if err != nil {
- break
- }
-
- // TODO(gri): allow clients of go/importer to provide a FileSet.
- // Or, define a new standard go/types/gcexportdata package.
- fset := token.NewFileSet()
-
- // The indexed export format starts with an 'i'; the older
- // binary export format starts with a 'c', 'd', or 'v'
- // (from "version"). Select appropriate importer.
- if len(data) > 0 && data[0] == 'i' {
- _, pkg, err = IImportData(fset, packages, data[1:], id)
- } else {
- _, pkg, err = BImportData(fset, packages, data, id)
- }
-
- default:
- err = fmt.Errorf("unknown export data header: %q", hdr)
- }
-
- return
-}
-
-// ----------------------------------------------------------------------------
-// Parser
-
-// TODO(gri) Imported objects don't have position information.
-// Ideally use the debug table line info; alternatively
-// create some fake position (or the position of the
-// import). That way error messages referring to imported
-// objects can print meaningful information.
-
-// parser parses the exports inside a gc compiler-produced
-// object/archive file and populates its scope with the results.
-type parser struct {
- scanner scanner.Scanner
- tok rune // current token
- lit string // literal string; only valid for Ident, Int, String tokens
- id string // package id of imported package
- sharedPkgs map[string]*types.Package // package id -> package object (across importer)
- localPkgs map[string]*types.Package // package id -> package object (just this package)
-}
-
-func (p *parser) init(filename, id string, src io.Reader, packages map[string]*types.Package) {
- p.scanner.Init(src)
- p.scanner.Error = func(_ *scanner.Scanner, msg string) { p.error(msg) }
- p.scanner.Mode = scanner.ScanIdents | scanner.ScanInts | scanner.ScanChars | scanner.ScanStrings | scanner.ScanComments | scanner.SkipComments
- p.scanner.Whitespace = 1<<'\t' | 1<<' '
- p.scanner.Filename = filename // for good error messages
- p.next()
- p.id = id
- p.sharedPkgs = packages
- if debug {
- // check consistency of packages map
- for _, pkg := range packages {
- if pkg.Name() == "" {
- fmt.Printf("no package name for %s\n", pkg.Path())
- }
- }
- }
-}
-
-func (p *parser) next() {
- p.tok = p.scanner.Scan()
- switch p.tok {
- case scanner.Ident, scanner.Int, scanner.Char, scanner.String, '·':
- p.lit = p.scanner.TokenText()
- default:
- p.lit = ""
- }
- if debug {
- fmt.Printf("%s: %q -> %q\n", scanner.TokenString(p.tok), p.scanner.TokenText(), p.lit)
- }
-}
-
-func declTypeName(pkg *types.Package, name string) *types.TypeName {
- scope := pkg.Scope()
- if obj := scope.Lookup(name); obj != nil {
- return obj.(*types.TypeName)
- }
- obj := types.NewTypeName(token.NoPos, pkg, name, nil)
- // a named type may be referred to before the underlying type
- // is known - set it up
- types.NewNamed(obj, nil, nil)
- scope.Insert(obj)
- return obj
-}
-
-// ----------------------------------------------------------------------------
-// Error handling
-
-// Internal errors are boxed as importErrors.
-type importError struct {
- pos scanner.Position
- err error
-}
-
-func (e importError) Error() string {
- return fmt.Sprintf("import error %s (byte offset = %d): %s", e.pos, e.pos.Offset, e.err)
-}
-
-func (p *parser) error(err interface{}) {
- if s, ok := err.(string); ok {
- err = errors.New(s)
- }
- // panic with a runtime.Error if err is not an error
- panic(importError{p.scanner.Pos(), err.(error)})
-}
-
-func (p *parser) errorf(format string, args ...interface{}) {
- p.error(fmt.Sprintf(format, args...))
-}
-
-func (p *parser) expect(tok rune) string {
- lit := p.lit
- if p.tok != tok {
- p.errorf("expected %s, got %s (%s)", scanner.TokenString(tok), scanner.TokenString(p.tok), lit)
- }
- p.next()
- return lit
-}
-
-func (p *parser) expectSpecial(tok string) {
- sep := 'x' // not white space
- i := 0
- for i < len(tok) && p.tok == rune(tok[i]) && sep > ' ' {
- sep = p.scanner.Peek() // if sep <= ' ', there is white space before the next token
- p.next()
- i++
- }
- if i < len(tok) {
- p.errorf("expected %q, got %q", tok, tok[0:i])
- }
-}
-
-func (p *parser) expectKeyword(keyword string) {
- lit := p.expect(scanner.Ident)
- if lit != keyword {
- p.errorf("expected keyword %s, got %q", keyword, lit)
- }
-}
-
-// ----------------------------------------------------------------------------
-// Qualified and unqualified names
-
-// PackageId = string_lit .
-//
-func (p *parser) parsePackageID() string {
- id, err := strconv.Unquote(p.expect(scanner.String))
- if err != nil {
- p.error(err)
- }
- // id == "" stands for the imported package id
- // (only known at time of package installation)
- if id == "" {
- id = p.id
- }
- return id
-}
-
-// PackageName = ident .
-//
-func (p *parser) parsePackageName() string {
- return p.expect(scanner.Ident)
-}
-
-// dotIdentifier = ( ident | '·' ) { ident | int | '·' } .
-func (p *parser) parseDotIdent() string {
- ident := ""
- if p.tok != scanner.Int {
- sep := 'x' // not white space
- for (p.tok == scanner.Ident || p.tok == scanner.Int || p.tok == '·') && sep > ' ' {
- ident += p.lit
- sep = p.scanner.Peek() // if sep <= ' ', there is white space before the next token
- p.next()
- }
- }
- if ident == "" {
- p.expect(scanner.Ident) // use expect() for error handling
- }
- return ident
-}
-
-// QualifiedName = "@" PackageId "." ( "?" | dotIdentifier ) .
-//
-func (p *parser) parseQualifiedName() (id, name string) {
- p.expect('@')
- id = p.parsePackageID()
- p.expect('.')
- // Per rev f280b8a485fd (10/2/2013), qualified names may be used for anonymous fields.
- if p.tok == '?' {
- p.next()
- } else {
- name = p.parseDotIdent()
- }
- return
-}
-
-// getPkg returns the package for a given id. If the package is
-// not found, create the package and add it to the p.localPkgs
-// and p.sharedPkgs maps. name is the (expected) name of the
-// package. If name == "", the package name is expected to be
-// set later via an import clause in the export data.
-//
-// id identifies a package, usually by a canonical package path like
-// "encoding/json" but possibly by a non-canonical import path like
-// "./json".
-//
-func (p *parser) getPkg(id, name string) *types.Package {
- // package unsafe is not in the packages maps - handle explicitly
- if id == "unsafe" {
- return types.Unsafe
- }
-
- pkg := p.localPkgs[id]
- if pkg == nil {
- // first import of id from this package
- pkg = p.sharedPkgs[id]
- if pkg == nil {
- // first import of id by this importer;
- // add (possibly unnamed) pkg to shared packages
- pkg = types.NewPackage(id, name)
- p.sharedPkgs[id] = pkg
- }
- // add (possibly unnamed) pkg to local packages
- if p.localPkgs == nil {
- p.localPkgs = make(map[string]*types.Package)
- }
- p.localPkgs[id] = pkg
- } else if name != "" {
- // package exists already and we have an expected package name;
- // make sure names match or set package name if necessary
- if pname := pkg.Name(); pname == "" {
- pkg.SetName(name)
- } else if pname != name {
- p.errorf("%s package name mismatch: %s (given) vs %s (expected)", id, pname, name)
- }
- }
- return pkg
-}
-
-// parseExportedName is like parseQualifiedName, but
-// the package id is resolved to an imported *types.Package.
-//
-func (p *parser) parseExportedName() (pkg *types.Package, name string) {
- id, name := p.parseQualifiedName()
- pkg = p.getPkg(id, "")
- return
-}
-
-// ----------------------------------------------------------------------------
-// Types
-
-// BasicType = identifier .
-//
-func (p *parser) parseBasicType() types.Type {
- id := p.expect(scanner.Ident)
- obj := types.Universe.Lookup(id)
- if obj, ok := obj.(*types.TypeName); ok {
- return obj.Type()
- }
- p.errorf("not a basic type: %s", id)
- return nil
-}
-
-// ArrayType = "[" int_lit "]" Type .
-//
-func (p *parser) parseArrayType(parent *types.Package) types.Type {
- // "[" already consumed and lookahead known not to be "]"
- lit := p.expect(scanner.Int)
- p.expect(']')
- elem := p.parseType(parent)
- n, err := strconv.ParseInt(lit, 10, 64)
- if err != nil {
- p.error(err)
- }
- return types.NewArray(elem, n)
-}
-
-// MapType = "map" "[" Type "]" Type .
-//
-func (p *parser) parseMapType(parent *types.Package) types.Type {
- p.expectKeyword("map")
- p.expect('[')
- key := p.parseType(parent)
- p.expect(']')
- elem := p.parseType(parent)
- return types.NewMap(key, elem)
-}
-
-// Name = identifier | "?" | QualifiedName .
-//
-// For unqualified and anonymous names, the returned package is the parent
-// package unless parent == nil, in which case the returned package is the
-// package being imported. (The parent package is not nil if the name
-// is an unqualified struct field or interface method name belonging to a
-// type declared in another package.)
-//
-// For qualified names, the returned package is nil (and not created if
-// it doesn't exist yet) unless materializePkg is set (which creates an
-// unnamed package with valid package path). In the latter case, a
-// subsequent import clause is expected to provide a name for the package.
-//
-func (p *parser) parseName(parent *types.Package, materializePkg bool) (pkg *types.Package, name string) {
- pkg = parent
- if pkg == nil {
- pkg = p.sharedPkgs[p.id]
- }
- switch p.tok {
- case scanner.Ident:
- name = p.lit
- p.next()
- case '?':
- // anonymous
- p.next()
- case '@':
- // exported name prefixed with package path
- pkg = nil
- var id string
- id, name = p.parseQualifiedName()
- if materializePkg {
- pkg = p.getPkg(id, "")
- }
- default:
- p.error("name expected")
- }
- return
-}
-
-func deref(typ types.Type) types.Type {
- if p, _ := typ.(*types.Pointer); p != nil {
- return p.Elem()
- }
- return typ
-}
-
-// Field = Name Type [ string_lit ] .
-//
-func (p *parser) parseField(parent *types.Package) (*types.Var, string) {
- pkg, name := p.parseName(parent, true)
-
- if name == "_" {
- // Blank fields should be package-qualified because they
- // are unexported identifiers, but gc does not qualify them.
- // Assuming that the ident belongs to the current package
- // causes types to change during re-exporting, leading
- // to spurious "can't assign A to B" errors from go/types.
- // As a workaround, pretend all blank fields belong
- // to the same unique dummy package.
- const blankpkg = "<_>"
- pkg = p.getPkg(blankpkg, blankpkg)
- }
-
- typ := p.parseType(parent)
- anonymous := false
- if name == "" {
- // anonymous field - typ must be T or *T and T must be a type name
- switch typ := deref(typ).(type) {
- case *types.Basic: // basic types are named types
- pkg = nil // objects defined in Universe scope have no package
- name = typ.Name()
- case *types.Named:
- name = typ.Obj().Name()
- default:
- p.errorf("anonymous field expected")
- }
- anonymous = true
- }
- tag := ""
- if p.tok == scanner.String {
- s := p.expect(scanner.String)
- var err error
- tag, err = strconv.Unquote(s)
- if err != nil {
- p.errorf("invalid struct tag %s: %s", s, err)
- }
- }
- return types.NewField(token.NoPos, pkg, name, typ, anonymous), tag
-}
-
-// StructType = "struct" "{" [ FieldList ] "}" .
-// FieldList = Field { ";" Field } .
-//
-func (p *parser) parseStructType(parent *types.Package) types.Type {
- var fields []*types.Var
- var tags []string
-
- p.expectKeyword("struct")
- p.expect('{')
- for i := 0; p.tok != '}' && p.tok != scanner.EOF; i++ {
- if i > 0 {
- p.expect(';')
- }
- fld, tag := p.parseField(parent)
- if tag != "" && tags == nil {
- tags = make([]string, i)
- }
- if tags != nil {
- tags = append(tags, tag)
- }
- fields = append(fields, fld)
- }
- p.expect('}')
-
- return types.NewStruct(fields, tags)
-}
-
-// Parameter = ( identifier | "?" ) [ "..." ] Type [ string_lit ] .
-//
-func (p *parser) parseParameter() (par *types.Var, isVariadic bool) {
- _, name := p.parseName(nil, false)
- // remove gc-specific parameter numbering
- if i := strings.Index(name, "·"); i >= 0 {
- name = name[:i]
- }
- if p.tok == '.' {
- p.expectSpecial("...")
- isVariadic = true
- }
- typ := p.parseType(nil)
- if isVariadic {
- typ = types.NewSlice(typ)
- }
- // ignore argument tag (e.g. "noescape")
- if p.tok == scanner.String {
- p.next()
- }
- // TODO(gri) should we provide a package?
- par = types.NewVar(token.NoPos, nil, name, typ)
- return
-}
-
-// Parameters = "(" [ ParameterList ] ")" .
-// ParameterList = { Parameter "," } Parameter .
-//
-func (p *parser) parseParameters() (list []*types.Var, isVariadic bool) {
- p.expect('(')
- for p.tok != ')' && p.tok != scanner.EOF {
- if len(list) > 0 {
- p.expect(',')
- }
- par, variadic := p.parseParameter()
- list = append(list, par)
- if variadic {
- if isVariadic {
- p.error("... not on final argument")
- }
- isVariadic = true
- }
- }
- p.expect(')')
-
- return
-}
-
-// Signature = Parameters [ Result ] .
-// Result = Type | Parameters .
-//
-func (p *parser) parseSignature(recv *types.Var) *types.Signature {
- params, isVariadic := p.parseParameters()
-
- // optional result type
- var results []*types.Var
- if p.tok == '(' {
- var variadic bool
- results, variadic = p.parseParameters()
- if variadic {
- p.error("... not permitted on result type")
- }
- }
-
- return types.NewSignature(recv, types.NewTuple(params...), types.NewTuple(results...), isVariadic)
-}
-
-// InterfaceType = "interface" "{" [ MethodList ] "}" .
-// MethodList = Method { ";" Method } .
-// Method = Name Signature .
-//
-// The methods of embedded interfaces are always "inlined"
-// by the compiler and thus embedded interfaces are never
-// visible in the export data.
-//
-func (p *parser) parseInterfaceType(parent *types.Package) types.Type {
- var methods []*types.Func
-
- p.expectKeyword("interface")
- p.expect('{')
- for i := 0; p.tok != '}' && p.tok != scanner.EOF; i++ {
- if i > 0 {
- p.expect(';')
- }
- pkg, name := p.parseName(parent, true)
- sig := p.parseSignature(nil)
- methods = append(methods, types.NewFunc(token.NoPos, pkg, name, sig))
- }
- p.expect('}')
-
- // Complete requires the type's embedded interfaces to be fully defined,
- // but we do not define any
- return newInterface(methods, nil).Complete()
-}
-
-// ChanType = ( "chan" [ "<-" ] | "<-" "chan" ) Type .
-//
-func (p *parser) parseChanType(parent *types.Package) types.Type {
- dir := types.SendRecv
- if p.tok == scanner.Ident {
- p.expectKeyword("chan")
- if p.tok == '<' {
- p.expectSpecial("<-")
- dir = types.SendOnly
- }
- } else {
- p.expectSpecial("<-")
- p.expectKeyword("chan")
- dir = types.RecvOnly
- }
- elem := p.parseType(parent)
- return types.NewChan(dir, elem)
-}
-
-// Type =
-// BasicType | TypeName | ArrayType | SliceType | StructType |
-// PointerType | FuncType | InterfaceType | MapType | ChanType |
-// "(" Type ")" .
-//
-// BasicType = ident .
-// TypeName = ExportedName .
-// SliceType = "[" "]" Type .
-// PointerType = "*" Type .
-// FuncType = "func" Signature .
-//
-func (p *parser) parseType(parent *types.Package) types.Type {
- switch p.tok {
- case scanner.Ident:
- switch p.lit {
- default:
- return p.parseBasicType()
- case "struct":
- return p.parseStructType(parent)
- case "func":
- // FuncType
- p.next()
- return p.parseSignature(nil)
- case "interface":
- return p.parseInterfaceType(parent)
- case "map":
- return p.parseMapType(parent)
- case "chan":
- return p.parseChanType(parent)
- }
- case '@':
- // TypeName
- pkg, name := p.parseExportedName()
- return declTypeName(pkg, name).Type()
- case '[':
- p.next() // look ahead
- if p.tok == ']' {
- // SliceType
- p.next()
- return types.NewSlice(p.parseType(parent))
- }
- return p.parseArrayType(parent)
- case '*':
- // PointerType
- p.next()
- return types.NewPointer(p.parseType(parent))
- case '<':
- return p.parseChanType(parent)
- case '(':
- // "(" Type ")"
- p.next()
- typ := p.parseType(parent)
- p.expect(')')
- return typ
- }
- p.errorf("expected type, got %s (%q)", scanner.TokenString(p.tok), p.lit)
- return nil
-}
-
-// ----------------------------------------------------------------------------
-// Declarations
-
-// ImportDecl = "import" PackageName PackageId .
-//
-func (p *parser) parseImportDecl() {
- p.expectKeyword("import")
- name := p.parsePackageName()
- p.getPkg(p.parsePackageID(), name)
-}
-
-// int_lit = [ "+" | "-" ] { "0" ... "9" } .
-//
-func (p *parser) parseInt() string {
- s := ""
- switch p.tok {
- case '-':
- s = "-"
- p.next()
- case '+':
- p.next()
- }
- return s + p.expect(scanner.Int)
-}
-
-// number = int_lit [ "p" int_lit ] .
-//
-func (p *parser) parseNumber() (typ *types.Basic, val constant.Value) {
- // mantissa
- mant := constant.MakeFromLiteral(p.parseInt(), token.INT, 0)
- if mant == nil {
- panic("invalid mantissa")
- }
-
- if p.lit == "p" {
- // exponent (base 2)
- p.next()
- exp, err := strconv.ParseInt(p.parseInt(), 10, 0)
- if err != nil {
- p.error(err)
- }
- if exp < 0 {
- denom := constant.MakeInt64(1)
- denom = constant.Shift(denom, token.SHL, uint(-exp))
- typ = types.Typ[types.UntypedFloat]
- val = constant.BinaryOp(mant, token.QUO, denom)
- return
- }
- if exp > 0 {
- mant = constant.Shift(mant, token.SHL, uint(exp))
- }
- typ = types.Typ[types.UntypedFloat]
- val = mant
- return
- }
-
- typ = types.Typ[types.UntypedInt]
- val = mant
- return
-}
-
-// ConstDecl = "const" ExportedName [ Type ] "=" Literal .
-// Literal = bool_lit | int_lit | float_lit | complex_lit | rune_lit | string_lit .
-// bool_lit = "true" | "false" .
-// complex_lit = "(" float_lit "+" float_lit "i" ")" .
-// rune_lit = "(" int_lit "+" int_lit ")" .
-// string_lit = `"` { unicode_char } `"` .
-//
-func (p *parser) parseConstDecl() {
- p.expectKeyword("const")
- pkg, name := p.parseExportedName()
-
- var typ0 types.Type
- if p.tok != '=' {
- // constant types are never structured - no need for parent type
- typ0 = p.parseType(nil)
- }
-
- p.expect('=')
- var typ types.Type
- var val constant.Value
- switch p.tok {
- case scanner.Ident:
- // bool_lit
- if p.lit != "true" && p.lit != "false" {
- p.error("expected true or false")
- }
- typ = types.Typ[types.UntypedBool]
- val = constant.MakeBool(p.lit == "true")
- p.next()
-
- case '-', scanner.Int:
- // int_lit
- typ, val = p.parseNumber()
-
- case '(':
- // complex_lit or rune_lit
- p.next()
- if p.tok == scanner.Char {
- p.next()
- p.expect('+')
- typ = types.Typ[types.UntypedRune]
- _, val = p.parseNumber()
- p.expect(')')
- break
- }
- _, re := p.parseNumber()
- p.expect('+')
- _, im := p.parseNumber()
- p.expectKeyword("i")
- p.expect(')')
- typ = types.Typ[types.UntypedComplex]
- val = constant.BinaryOp(re, token.ADD, constant.MakeImag(im))
-
- case scanner.Char:
- // rune_lit
- typ = types.Typ[types.UntypedRune]
- val = constant.MakeFromLiteral(p.lit, token.CHAR, 0)
- p.next()
-
- case scanner.String:
- // string_lit
- typ = types.Typ[types.UntypedString]
- val = constant.MakeFromLiteral(p.lit, token.STRING, 0)
- p.next()
-
- default:
- p.errorf("expected literal got %s", scanner.TokenString(p.tok))
- }
-
- if typ0 == nil {
- typ0 = typ
- }
-
- pkg.Scope().Insert(types.NewConst(token.NoPos, pkg, name, typ0, val))
-}
-
-// TypeDecl = "type" ExportedName Type .
-//
-func (p *parser) parseTypeDecl() {
- p.expectKeyword("type")
- pkg, name := p.parseExportedName()
- obj := declTypeName(pkg, name)
-
- // The type object may have been imported before and thus already
- // have a type associated with it. We still need to parse the type
- // structure, but throw it away if the object already has a type.
- // This ensures that all imports refer to the same type object for
- // a given type declaration.
- typ := p.parseType(pkg)
-
- if name := obj.Type().(*types.Named); name.Underlying() == nil {
- name.SetUnderlying(typ)
- }
-}
-
-// VarDecl = "var" ExportedName Type .
-//
-func (p *parser) parseVarDecl() {
- p.expectKeyword("var")
- pkg, name := p.parseExportedName()
- typ := p.parseType(pkg)
- pkg.Scope().Insert(types.NewVar(token.NoPos, pkg, name, typ))
-}
-
-// Func = Signature [ Body ] .
-// Body = "{" ... "}" .
-//
-func (p *parser) parseFunc(recv *types.Var) *types.Signature {
- sig := p.parseSignature(recv)
- if p.tok == '{' {
- p.next()
- for i := 1; i > 0; p.next() {
- switch p.tok {
- case '{':
- i++
- case '}':
- i--
- }
- }
- }
- return sig
-}
-
-// MethodDecl = "func" Receiver Name Func .
-// Receiver = "(" ( identifier | "?" ) [ "*" ] ExportedName ")" .
-//
-func (p *parser) parseMethodDecl() {
- // "func" already consumed
- p.expect('(')
- recv, _ := p.parseParameter() // receiver
- p.expect(')')
-
- // determine receiver base type object
- base := deref(recv.Type()).(*types.Named)
-
- // parse method name, signature, and possibly inlined body
- _, name := p.parseName(nil, false)
- sig := p.parseFunc(recv)
-
- // methods always belong to the same package as the base type object
- pkg := base.Obj().Pkg()
-
- // add method to type unless type was imported before
- // and method exists already
- // TODO(gri) This leads to a quadratic algorithm - ok for now because method counts are small.
- base.AddMethod(types.NewFunc(token.NoPos, pkg, name, sig))
-}
-
-// FuncDecl = "func" ExportedName Func .
-//
-func (p *parser) parseFuncDecl() {
- // "func" already consumed
- pkg, name := p.parseExportedName()
- typ := p.parseFunc(nil)
- pkg.Scope().Insert(types.NewFunc(token.NoPos, pkg, name, typ))
-}
-
-// Decl = [ ImportDecl | ConstDecl | TypeDecl | VarDecl | FuncDecl | MethodDecl ] "\n" .
-//
-func (p *parser) parseDecl() {
- if p.tok == scanner.Ident {
- switch p.lit {
- case "import":
- p.parseImportDecl()
- case "const":
- p.parseConstDecl()
- case "type":
- p.parseTypeDecl()
- case "var":
- p.parseVarDecl()
- case "func":
- p.next() // look ahead
- if p.tok == '(' {
- p.parseMethodDecl()
- } else {
- p.parseFuncDecl()
- }
- }
- }
- p.expect('\n')
-}
-
-// ----------------------------------------------------------------------------
-// Export
-
-// Export = "PackageClause { Decl } "$$" .
-// PackageClause = "package" PackageName [ "safe" ] "\n" .
-//
-func (p *parser) parseExport() *types.Package {
- p.expectKeyword("package")
- name := p.parsePackageName()
- if p.tok == scanner.Ident && p.lit == "safe" {
- // package was compiled with -u option - ignore
- p.next()
- }
- p.expect('\n')
-
- pkg := p.getPkg(p.id, name)
-
- for p.tok != '$' && p.tok != scanner.EOF {
- p.parseDecl()
- }
-
- if ch := p.scanner.Peek(); p.tok != '$' || ch != '$' {
- // don't call next()/expect() since reading past the
- // export data may cause scanner errors (e.g. NUL chars)
- p.errorf("expected '$$', got %s %c", scanner.TokenString(p.tok), ch)
- }
-
- if n := p.scanner.ErrorCount; n != 0 {
- p.errorf("expected no scanner errors, got %d", n)
- }
-
- // Record all locally referenced packages as imports.
- var imports []*types.Package
- for id, pkg2 := range p.localPkgs {
- if pkg2.Name() == "" {
- p.errorf("%s package has no name", id)
- }
- if id == p.id {
- continue // avoid self-edge
- }
- imports = append(imports, pkg2)
- }
- sort.Sort(byPath(imports))
- pkg.SetImports(imports)
-
- // package was imported completely and without errors
- pkg.MarkComplete()
-
- return pkg
-}
-
-type byPath []*types.Package
-
-func (a byPath) Len() int { return len(a) }
-func (a byPath) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
-func (a byPath) Less(i, j int) bool { return a[i].Path() < a[j].Path() }
diff --git a/go/internal/gcimporter/gcimporter_test.go b/go/internal/gcimporter/gcimporter_test.go
deleted file mode 100644
index 6baab0128..000000000
--- a/go/internal/gcimporter/gcimporter_test.go
+++ /dev/null
@@ -1,611 +0,0 @@
-// Copyright 2011 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// This file is a copy of $GOROOT/src/go/internal/gcimporter/gcimporter_test.go,
-// adjusted to make it build with code from (std lib) internal/testenv copied.
-
-package gcimporter
-
-import (
- "bytes"
- "fmt"
- "go/build"
- "go/constant"
- "go/types"
- "io/ioutil"
- "os"
- "os/exec"
- "path/filepath"
- "runtime"
- "strings"
- "testing"
- "time"
-
- "golang.org/x/tools/internal/testenv"
-)
-
-func TestMain(m *testing.M) {
- testenv.ExitIfSmallMachine()
- os.Exit(m.Run())
-}
-
-// ----------------------------------------------------------------------------
-
-func needsCompiler(t *testing.T, compiler string) {
- if runtime.Compiler == compiler {
- return
- }
- switch compiler {
- case "gc":
- t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler)
- }
-}
-
-// compile runs the compiler on filename, with dirname as the working directory,
-// and writes the output file to outdirname.
-func compile(t *testing.T, dirname, filename, outdirname string) string {
- testenv.NeedsGoBuild(t)
-
- // filename must end with ".go"
- if !strings.HasSuffix(filename, ".go") {
- t.Fatalf("filename doesn't end in .go: %s", filename)
- }
- basename := filepath.Base(filename)
- outname := filepath.Join(outdirname, basename[:len(basename)-2]+"o")
- cmd := exec.Command("go", "tool", "compile", "-p=p", "-o", outname, filename)
- cmd.Dir = dirname
- out, err := cmd.CombinedOutput()
- if err != nil {
- t.Logf("%s", out)
- t.Fatalf("go tool compile %s failed: %s", filename, err)
- }
- return outname
-}
-
-func testPath(t *testing.T, path, srcDir string) *types.Package {
- t0 := time.Now()
- pkg, err := Import(make(map[string]*types.Package), path, srcDir, nil)
- if err != nil {
- t.Errorf("testPath(%s): %s", path, err)
- return nil
- }
- t.Logf("testPath(%s): %v", path, time.Since(t0))
- return pkg
-}
-
-const maxTime = 30 * time.Second
-
-func testDir(t *testing.T, dir string, endTime time.Time) (nimports int) {
- dirname := filepath.Join(runtime.GOROOT(), "pkg", runtime.GOOS+"_"+runtime.GOARCH, dir)
- list, err := ioutil.ReadDir(dirname)
- if err != nil {
- t.Fatalf("testDir(%s): %s", dirname, err)
- }
- for _, f := range list {
- if time.Now().After(endTime) {
- t.Log("testing time used up")
- return
- }
- switch {
- case !f.IsDir():
- // try extensions
- for _, ext := range pkgExts {
- if strings.HasSuffix(f.Name(), ext) {
- name := f.Name()[0 : len(f.Name())-len(ext)] // remove extension
- if testPath(t, filepath.Join(dir, name), dir) != nil {
- nimports++
- }
- }
- }
- case f.IsDir():
- nimports += testDir(t, filepath.Join(dir, f.Name()), endTime)
- }
- }
- return
-}
-
-func mktmpdir(t *testing.T) string {
- tmpdir, err := ioutil.TempDir("", "gcimporter_test")
- if err != nil {
- t.Fatal("mktmpdir:", err)
- }
- if err := os.Mkdir(filepath.Join(tmpdir, "testdata"), 0700); err != nil {
- os.RemoveAll(tmpdir)
- t.Fatal("mktmpdir:", err)
- }
- return tmpdir
-}
-
-const testfile = "exports.go"
-
-func TestImportTestdata(t *testing.T) {
- needsCompiler(t, "gc")
-
- tmpdir := mktmpdir(t)
- defer os.RemoveAll(tmpdir)
-
- compile(t, "testdata", testfile, filepath.Join(tmpdir, "testdata"))
-
- // filename should end with ".go"
- filename := testfile[:len(testfile)-3]
- if pkg := testPath(t, "./testdata/"+filename, tmpdir); pkg != nil {
- // The package's Imports list must include all packages
- // explicitly imported by testfile, plus all packages
- // referenced indirectly via exported objects in testfile.
- // With the textual export format (when run against Go1.6),
- // the list may also include additional packages that are
- // not strictly required for import processing alone (they
- // are exported to err "on the safe side").
- // For now, we just test the presence of a few packages
- // that we know are there for sure.
- got := fmt.Sprint(pkg.Imports())
- for _, want := range []string{"go/ast", "go/token"} {
- if !strings.Contains(got, want) {
- t.Errorf(`Package("exports").Imports() = %s, does not contain %s`, got, want)
- }
- }
- }
-}
-
-func TestVersionHandling(t *testing.T) {
- if debug {
- t.Skip("TestVersionHandling panics in debug mode")
- }
-
- // This package only handles gc export data.
- needsCompiler(t, "gc")
-
- const dir = "./testdata/versions"
- list, err := ioutil.ReadDir(dir)
- if err != nil {
- t.Fatal(err)
- }
-
- tmpdir := mktmpdir(t)
- defer os.RemoveAll(tmpdir)
- corruptdir := filepath.Join(tmpdir, "testdata", "versions")
- if err := os.Mkdir(corruptdir, 0700); err != nil {
- t.Fatal(err)
- }
-
- for _, f := range list {
- name := f.Name()
- if !strings.HasSuffix(name, ".a") {
- continue // not a package file
- }
- if strings.Contains(name, "corrupted") {
- continue // don't process a leftover corrupted file
- }
- pkgpath := "./" + name[:len(name)-2]
-
- if testing.Verbose() {
- t.Logf("importing %s", name)
- }
-
- // test that export data can be imported
- _, err := Import(make(map[string]*types.Package), pkgpath, dir, nil)
- if err != nil {
- // ok to fail if it fails with a newer version error for select files
- if strings.Contains(err.Error(), "newer version") {
- switch name {
- case "test_go1.11_999b.a", "test_go1.11_999i.a":
- continue
- }
- // fall through
- }
- t.Errorf("import %q failed: %v", pkgpath, err)
- continue
- }
-
- // create file with corrupted export data
- // 1) read file
- data, err := ioutil.ReadFile(filepath.Join(dir, name))
- if err != nil {
- t.Fatal(err)
- }
- // 2) find export data
- i := bytes.Index(data, []byte("\n$$B\n")) + 5
- j := bytes.Index(data[i:], []byte("\n$$\n")) + i
- if i < 0 || j < 0 || i > j {
- t.Fatalf("export data section not found (i = %d, j = %d)", i, j)
- }
- // 3) corrupt the data (increment every 7th byte)
- for k := j - 13; k >= i; k -= 7 {
- data[k]++
- }
- // 4) write the file
- pkgpath += "_corrupted"
- filename := filepath.Join(corruptdir, pkgpath) + ".a"
- ioutil.WriteFile(filename, data, 0666)
-
- // test that importing the corrupted file results in an error
- _, err = Import(make(map[string]*types.Package), pkgpath, corruptdir, nil)
- if err == nil {
- t.Errorf("import corrupted %q succeeded", pkgpath)
- } else if msg := err.Error(); !strings.Contains(msg, "version skew") {
- t.Errorf("import %q error incorrect (%s)", pkgpath, msg)
- }
- }
-}
-
-func TestImportStdLib(t *testing.T) {
- // This package only handles gc export data.
- needsCompiler(t, "gc")
-
- dt := maxTime
- if testing.Short() && os.Getenv("GO_BUILDER_NAME") == "" {
- dt = 10 * time.Millisecond
- }
- nimports := testDir(t, "", time.Now().Add(dt)) // installed packages
- t.Logf("tested %d imports", nimports)
-}
-
-var importedObjectTests = []struct {
- name string
- want string
-}{
- // non-interfaces
- {"crypto.Hash", "type Hash uint"},
- {"go/ast.ObjKind", "type ObjKind int"},
- {"go/types.Qualifier", "type Qualifier func(*Package) string"},
- {"go/types.Comparable", "func Comparable(T Type) bool"},
- {"math.Pi", "const Pi untyped float"},
- {"math.Sin", "func Sin(x float64) float64"},
- {"go/ast.NotNilFilter", "func NotNilFilter(_ string, v reflect.Value) bool"},
- {"go/internal/gcimporter.FindPkg", "func FindPkg(path string, srcDir string) (filename string, id string)"},
-
- // interfaces
- {"context.Context", "type Context interface{Deadline() (deadline time.Time, ok bool); Done() <-chan struct{}; Err() error; Value(key any) any}"},
- {"crypto.Decrypter", "type Decrypter interface{Decrypt(rand io.Reader, msg []byte, opts DecrypterOpts) (plaintext []byte, err error); Public() PublicKey}"},
- {"encoding.BinaryMarshaler", "type BinaryMarshaler interface{MarshalBinary() (data []byte, err error)}"},
- {"io.Reader", "type Reader interface{Read(p []byte) (n int, err error)}"},
- {"io.ReadWriter", "type ReadWriter interface{Reader; Writer}"},
- {"go/ast.Node", "type Node interface{End() go/token.Pos; Pos() go/token.Pos}"},
- {"go/types.Type", "type Type interface{String() string; Underlying() Type}"},
-}
-
-// TODO(rsc): Delete this init func after x/tools no longer needs to test successfully with Go 1.17.
-func init() {
- if build.Default.ReleaseTags[len(build.Default.ReleaseTags)-1] <= "go1.17" {
- for i := range importedObjectTests {
- if importedObjectTests[i].name == "context.Context" {
- // Expand any to interface{}.
- importedObjectTests[i].want = "type Context interface{Deadline() (deadline time.Time, ok bool); Done() <-chan struct{}; Err() error; Value(key interface{}) interface{}}"
- }
- }
- }
-}
-
-func TestImportedTypes(t *testing.T) {
- testenv.NeedsGo1Point(t, 11)
- // This package only handles gc export data.
- needsCompiler(t, "gc")
-
- for _, test := range importedObjectTests {
- obj := importObject(t, test.name)
- if obj == nil {
- continue // error reported elsewhere
- }
- got := types.ObjectString(obj, types.RelativeTo(obj.Pkg()))
-
- // TODO(rsc): Delete this block once go.dev/cl/368254 lands.
- if got != test.want && test.want == strings.ReplaceAll(got, "interface{}", "any") {
- got = test.want
- }
-
- if got != test.want {
- t.Errorf("%s: got %q; want %q", test.name, got, test.want)
- }
-
- if named, _ := obj.Type().(*types.Named); named != nil {
- verifyInterfaceMethodRecvs(t, named, 0)
- }
- }
-}
-
-func TestImportedConsts(t *testing.T) {
- testenv.NeedsGo1Point(t, 11)
- tests := []struct {
- name string
- want constant.Kind
- }{
- {"math.Pi", constant.Float},
- {"math.MaxFloat64", constant.Float},
- {"math.MaxInt64", constant.Int},
- }
-
- for _, test := range tests {
- obj := importObject(t, test.name)
- if got := obj.(*types.Const).Val().Kind(); got != test.want {
- t.Errorf("%s: imported as constant.Kind(%v), want constant.Kind(%v)", test.name, got, test.want)
- }
- }
-}
-
-// importObject imports the object specified by a name of the form
-// <import path>.<object name>, e.g. go/types.Type.
-//
-// If any errors occur they are reported via t and the resulting object will
-// be nil.
-func importObject(t *testing.T, name string) types.Object {
- s := strings.Split(name, ".")
- if len(s) != 2 {
- t.Fatal("inconsistent test data")
- }
- importPath := s[0]
- objName := s[1]
-
- pkg, err := Import(make(map[string]*types.Package), importPath, ".", nil)
- if err != nil {
- t.Error(err)
- return nil
- }
-
- obj := pkg.Scope().Lookup(objName)
- if obj == nil {
- t.Errorf("%s: object not found", name)
- return nil
- }
- return obj
-}
-
-// verifyInterfaceMethodRecvs verifies that method receiver types
-// are named if the methods belong to a named interface type.
-func verifyInterfaceMethodRecvs(t *testing.T, named *types.Named, level int) {
- // avoid endless recursion in case of an embedding bug that lead to a cycle
- if level > 10 {
- t.Errorf("%s: embeds itself", named)
- return
- }
-
- iface, _ := named.Underlying().(*types.Interface)
- if iface == nil {
- return // not an interface
- }
-
- // check explicitly declared methods
- for i := 0; i < iface.NumExplicitMethods(); i++ {
- m := iface.ExplicitMethod(i)
- recv := m.Type().(*types.Signature).Recv()
- if recv == nil {
- t.Errorf("%s: missing receiver type", m)
- continue
- }
- if recv.Type() != named {
- t.Errorf("%s: got recv type %s; want %s", m, recv.Type(), named)
- }
- }
-
- // check embedded interfaces (if they are named, too)
- for i := 0; i < iface.NumEmbeddeds(); i++ {
- // embedding of interfaces cannot have cycles; recursion will terminate
- if etype, _ := iface.EmbeddedType(i).(*types.Named); etype != nil {
- verifyInterfaceMethodRecvs(t, etype, level+1)
- }
- }
-}
-
-func TestIssue5815(t *testing.T) {
- // This package only handles gc export data.
- needsCompiler(t, "gc")
-
- pkg := importPkg(t, "strings", ".")
-
- scope := pkg.Scope()
- for _, name := range scope.Names() {
- obj := scope.Lookup(name)
- if obj.Pkg() == nil {
- t.Errorf("no pkg for %s", obj)
- }
- if tname, _ := obj.(*types.TypeName); tname != nil {
- named := tname.Type().(*types.Named)
- for i := 0; i < named.NumMethods(); i++ {
- m := named.Method(i)
- if m.Pkg() == nil {
- t.Errorf("no pkg for %s", m)
- }
- }
- }
- }
-}
-
-// Smoke test to ensure that imported methods get the correct package.
-func TestCorrectMethodPackage(t *testing.T) {
- // This package only handles gc export data.
- needsCompiler(t, "gc")
-
- imports := make(map[string]*types.Package)
- _, err := Import(imports, "net/http", ".", nil)
- if err != nil {
- t.Fatal(err)
- }
-
- mutex := imports["sync"].Scope().Lookup("Mutex").(*types.TypeName).Type()
- mset := types.NewMethodSet(types.NewPointer(mutex)) // methods of *sync.Mutex
- sel := mset.Lookup(nil, "Lock")
- lock := sel.Obj().(*types.Func)
- if got, want := lock.Pkg().Path(), "sync"; got != want {
- t.Errorf("got package path %q; want %q", got, want)
- }
-}
-
-func TestIssue13566(t *testing.T) {
- // This package only handles gc export data.
- needsCompiler(t, "gc")
-
- // On windows, we have to set the -D option for the compiler to avoid having a drive
- // letter and an illegal ':' in the import path - just skip it (see also issue #3483).
- if runtime.GOOS == "windows" {
- t.Skip("avoid dealing with relative paths/drive letters on windows")
- }
-
- tmpdir := mktmpdir(t)
- defer os.RemoveAll(tmpdir)
- testoutdir := filepath.Join(tmpdir, "testdata")
-
- // b.go needs to be compiled from the output directory so that the compiler can
- // find the compiled package a. We pass the full path to compile() so that we
- // don't have to copy the file to that directory.
- bpath, err := filepath.Abs(filepath.Join("testdata", "b.go"))
- if err != nil {
- t.Fatal(err)
- }
- compile(t, "testdata", "a.go", testoutdir)
- compile(t, testoutdir, bpath, testoutdir)
-
- // import must succeed (test for issue at hand)
- pkg := importPkg(t, "./testdata/b", tmpdir)
-
- // make sure all indirectly imported packages have names
- for _, imp := range pkg.Imports() {
- if imp.Name() == "" {
- t.Errorf("no name for %s package", imp.Path())
- }
- }
-}
-
-func TestIssue13898(t *testing.T) {
- // This package only handles gc export data.
- needsCompiler(t, "gc")
-
- // import go/internal/gcimporter which imports go/types partially
- imports := make(map[string]*types.Package)
- _, err := Import(imports, "go/internal/gcimporter", ".", nil)
- if err != nil {
- t.Fatal(err)
- }
-
- // look for go/types package
- var goTypesPkg *types.Package
- for path, pkg := range imports {
- if path == "go/types" {
- goTypesPkg = pkg
- break
- }
- }
- if goTypesPkg == nil {
- t.Fatal("go/types not found")
- }
-
- // look for go/types.Object type
- obj := lookupObj(t, goTypesPkg.Scope(), "Object")
- typ, ok := obj.Type().(*types.Named)
- if !ok {
- t.Fatalf("go/types.Object type is %v; wanted named type", typ)
- }
-
- // lookup go/types.Object.Pkg method
- m, index, indirect := types.LookupFieldOrMethod(typ, false, nil, "Pkg")
- if m == nil {
- t.Fatalf("go/types.Object.Pkg not found (index = %v, indirect = %v)", index, indirect)
- }
-
- // the method must belong to go/types
- if m.Pkg().Path() != "go/types" {
- t.Fatalf("found %v; want go/types", m.Pkg())
- }
-}
-
-func TestIssue15517(t *testing.T) {
- // This package only handles gc export data.
- needsCompiler(t, "gc")
-
- // On windows, we have to set the -D option for the compiler to avoid having a drive
- // letter and an illegal ':' in the import path - just skip it (see also issue #3483).
- if runtime.GOOS == "windows" {
- t.Skip("avoid dealing with relative paths/drive letters on windows")
- }
-
- tmpdir := mktmpdir(t)
- defer os.RemoveAll(tmpdir)
-
- compile(t, "testdata", "p.go", filepath.Join(tmpdir, "testdata"))
-
- // Multiple imports of p must succeed without redeclaration errors.
- // We use an import path that's not cleaned up so that the eventual
- // file path for the package is different from the package path; this
- // will expose the error if it is present.
- //
- // (Issue: Both the textual and the binary importer used the file path
- // of the package to be imported as key into the shared packages map.
- // However, the binary importer then used the package path to identify
- // the imported package to mark it as complete; effectively marking the
- // wrong package as complete. By using an "unclean" package path, the
- // file and package path are different, exposing the problem if present.
- // The same issue occurs with vendoring.)
- imports := make(map[string]*types.Package)
- for i := 0; i < 3; i++ {
- if _, err := Import(imports, "./././testdata/p", tmpdir, nil); err != nil {
- t.Fatal(err)
- }
- }
-}
-
-func TestIssue15920(t *testing.T) {
- // This package only handles gc export data.
- needsCompiler(t, "gc")
-
- // On windows, we have to set the -D option for the compiler to avoid having a drive
- // letter and an illegal ':' in the import path - just skip it (see also issue #3483).
- if runtime.GOOS == "windows" {
- t.Skip("avoid dealing with relative paths/drive letters on windows")
- }
-
- compileAndImportPkg(t, "issue15920")
-}
-
-func TestIssue20046(t *testing.T) {
- // This package only handles gc export data.
- needsCompiler(t, "gc")
-
- // On windows, we have to set the -D option for the compiler to avoid having a drive
- // letter and an illegal ':' in the import path - just skip it (see also issue #3483).
- if runtime.GOOS == "windows" {
- t.Skip("avoid dealing with relative paths/drive letters on windows")
- }
-
- // "./issue20046".V.M must exist
- pkg := compileAndImportPkg(t, "issue20046")
- obj := lookupObj(t, pkg.Scope(), "V")
- if m, index, indirect := types.LookupFieldOrMethod(obj.Type(), false, nil, "M"); m == nil {
- t.Fatalf("V.M not found (index = %v, indirect = %v)", index, indirect)
- }
-}
-
-func TestIssue25301(t *testing.T) {
- testenv.NeedsGo1Point(t, 11)
- // This package only handles gc export data.
- needsCompiler(t, "gc")
-
- // On windows, we have to set the -D option for the compiler to avoid having a drive
- // letter and an illegal ':' in the import path - just skip it (see also issue #3483).
- if runtime.GOOS == "windows" {
- t.Skip("avoid dealing with relative paths/drive letters on windows")
- }
-
- compileAndImportPkg(t, "issue25301")
-}
-
-func importPkg(t *testing.T, path, srcDir string) *types.Package {
- pkg, err := Import(make(map[string]*types.Package), path, srcDir, nil)
- if err != nil {
- t.Fatal(err)
- }
- return pkg
-}
-
-func compileAndImportPkg(t *testing.T, name string) *types.Package {
- tmpdir := mktmpdir(t)
- defer os.RemoveAll(tmpdir)
- compile(t, "testdata", name+".go", filepath.Join(tmpdir, "testdata"))
- return importPkg(t, "./testdata/"+name, tmpdir)
-}
-
-func lookupObj(t *testing.T, scope *types.Scope, name string) types.Object {
- if obj := scope.Lookup(name); obj != nil {
- return obj
- }
- t.Fatalf("%s not found", name)
- return nil
-}
diff --git a/go/internal/gcimporter/iexport.go b/go/internal/gcimporter/iexport.go
deleted file mode 100644
index 9a4ff329e..000000000
--- a/go/internal/gcimporter/iexport.go
+++ /dev/null
@@ -1,1010 +0,0 @@
-// Copyright 2019 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Indexed binary package export.
-// This file was derived from $GOROOT/src/cmd/compile/internal/gc/iexport.go;
-// see that file for specification of the format.
-
-package gcimporter
-
-import (
- "bytes"
- "encoding/binary"
- "fmt"
- "go/ast"
- "go/constant"
- "go/token"
- "go/types"
- "io"
- "math/big"
- "reflect"
- "sort"
- "strconv"
- "strings"
-
- "golang.org/x/tools/internal/typeparams"
-)
-
-// Current bundled export format version. Increase with each format change.
-// 0: initial implementation
-const bundleVersion = 0
-
-// IExportData writes indexed export data for pkg to out.
-//
-// If no file set is provided, position info will be missing.
-// The package path of the top-level package will not be recorded,
-// so that calls to IImportData can override with a provided package path.
-func IExportData(out io.Writer, fset *token.FileSet, pkg *types.Package) error {
- return iexportCommon(out, fset, false, iexportVersion, []*types.Package{pkg})
-}
-
-// IExportBundle writes an indexed export bundle for pkgs to out.
-func IExportBundle(out io.Writer, fset *token.FileSet, pkgs []*types.Package) error {
- return iexportCommon(out, fset, true, iexportVersion, pkgs)
-}
-
-func iexportCommon(out io.Writer, fset *token.FileSet, bundle bool, version int, pkgs []*types.Package) (err error) {
- if !debug {
- defer func() {
- if e := recover(); e != nil {
- if ierr, ok := e.(internalError); ok {
- err = ierr
- return
- }
- // Not an internal error; panic again.
- panic(e)
- }
- }()
- }
-
- p := iexporter{
- fset: fset,
- version: version,
- allPkgs: map[*types.Package]bool{},
- stringIndex: map[string]uint64{},
- declIndex: map[types.Object]uint64{},
- tparamNames: map[types.Object]string{},
- typIndex: map[types.Type]uint64{},
- }
- if !bundle {
- p.localpkg = pkgs[0]
- }
-
- for i, pt := range predeclared() {
- p.typIndex[pt] = uint64(i)
- }
- if len(p.typIndex) > predeclReserved {
- panic(internalErrorf("too many predeclared types: %d > %d", len(p.typIndex), predeclReserved))
- }
-
- // Initialize work queue with exported declarations.
- for _, pkg := range pkgs {
- scope := pkg.Scope()
- for _, name := range scope.Names() {
- if ast.IsExported(name) {
- p.pushDecl(scope.Lookup(name))
- }
- }
-
- if bundle {
- // Ensure pkg and its imports are included in the index.
- p.allPkgs[pkg] = true
- for _, imp := range pkg.Imports() {
- p.allPkgs[imp] = true
- }
- }
- }
-
- // Loop until no more work.
- for !p.declTodo.empty() {
- p.doDecl(p.declTodo.popHead())
- }
-
- // Append indices to data0 section.
- dataLen := uint64(p.data0.Len())
- w := p.newWriter()
- w.writeIndex(p.declIndex)
-
- if bundle {
- w.uint64(uint64(len(pkgs)))
- for _, pkg := range pkgs {
- w.pkg(pkg)
- imps := pkg.Imports()
- w.uint64(uint64(len(imps)))
- for _, imp := range imps {
- w.pkg(imp)
- }
- }
- }
- w.flush()
-
- // Assemble header.
- var hdr intWriter
- if bundle {
- hdr.uint64(bundleVersion)
- }
- hdr.uint64(uint64(p.version))
- hdr.uint64(uint64(p.strings.Len()))
- hdr.uint64(dataLen)
-
- // Flush output.
- io.Copy(out, &hdr)
- io.Copy(out, &p.strings)
- io.Copy(out, &p.data0)
-
- return nil
-}
-
-// writeIndex writes out an object index. mainIndex indicates whether
-// we're writing out the main index, which is also read by
-// non-compiler tools and includes a complete package description
-// (i.e., name and height).
-func (w *exportWriter) writeIndex(index map[types.Object]uint64) {
- type pkgObj struct {
- obj types.Object
- name string // qualified name; differs from obj.Name for type params
- }
- // Build a map from packages to objects from that package.
- pkgObjs := map[*types.Package][]pkgObj{}
-
- // For the main index, make sure to include every package that
- // we reference, even if we're not exporting (or reexporting)
- // any symbols from it.
- if w.p.localpkg != nil {
- pkgObjs[w.p.localpkg] = nil
- }
- for pkg := range w.p.allPkgs {
- pkgObjs[pkg] = nil
- }
-
- for obj := range index {
- name := w.p.exportName(obj)
- pkgObjs[obj.Pkg()] = append(pkgObjs[obj.Pkg()], pkgObj{obj, name})
- }
-
- var pkgs []*types.Package
- for pkg, objs := range pkgObjs {
- pkgs = append(pkgs, pkg)
-
- sort.Slice(objs, func(i, j int) bool {
- return objs[i].name < objs[j].name
- })
- }
-
- sort.Slice(pkgs, func(i, j int) bool {
- return w.exportPath(pkgs[i]) < w.exportPath(pkgs[j])
- })
-
- w.uint64(uint64(len(pkgs)))
- for _, pkg := range pkgs {
- w.string(w.exportPath(pkg))
- w.string(pkg.Name())
- w.uint64(uint64(0)) // package height is not needed for go/types
-
- objs := pkgObjs[pkg]
- w.uint64(uint64(len(objs)))
- for _, obj := range objs {
- w.string(obj.name)
- w.uint64(index[obj.obj])
- }
- }
-}
-
-// exportName returns the 'exported' name of an object. It differs from
-// obj.Name() only for type parameters (see tparamExportName for details).
-func (p *iexporter) exportName(obj types.Object) (res string) {
- if name := p.tparamNames[obj]; name != "" {
- return name
- }
- return obj.Name()
-}
-
-type iexporter struct {
- fset *token.FileSet
- out *bytes.Buffer
- version int
-
- localpkg *types.Package
-
- // allPkgs tracks all packages that have been referenced by
- // the export data, so we can ensure to include them in the
- // main index.
- allPkgs map[*types.Package]bool
-
- declTodo objQueue
-
- strings intWriter
- stringIndex map[string]uint64
-
- data0 intWriter
- declIndex map[types.Object]uint64
- tparamNames map[types.Object]string // typeparam->exported name
- typIndex map[types.Type]uint64
-
- indent int // for tracing support
-}
-
-func (p *iexporter) trace(format string, args ...interface{}) {
- if !trace {
- // Call sites should also be guarded, but having this check here allows
- // easily enabling/disabling debug trace statements.
- return
- }
- fmt.Printf(strings.Repeat("..", p.indent)+format+"\n", args...)
-}
-
-// stringOff returns the offset of s within the string section.
-// If not already present, it's added to the end.
-func (p *iexporter) stringOff(s string) uint64 {
- off, ok := p.stringIndex[s]
- if !ok {
- off = uint64(p.strings.Len())
- p.stringIndex[s] = off
-
- p.strings.uint64(uint64(len(s)))
- p.strings.WriteString(s)
- }
- return off
-}
-
-// pushDecl adds n to the declaration work queue, if not already present.
-func (p *iexporter) pushDecl(obj types.Object) {
- // Package unsafe is known to the compiler and predeclared.
- // Caller should not ask us to do export it.
- if obj.Pkg() == types.Unsafe {
- panic("cannot export package unsafe")
- }
-
- if _, ok := p.declIndex[obj]; ok {
- return
- }
-
- p.declIndex[obj] = ^uint64(0) // mark obj present in work queue
- p.declTodo.pushTail(obj)
-}
-
-// exportWriter handles writing out individual data section chunks.
-type exportWriter struct {
- p *iexporter
-
- data intWriter
- currPkg *types.Package
- prevFile string
- prevLine int64
- prevColumn int64
-}
-
-func (w *exportWriter) exportPath(pkg *types.Package) string {
- if pkg == w.p.localpkg {
- return ""
- }
- return pkg.Path()
-}
-
-func (p *iexporter) doDecl(obj types.Object) {
- if trace {
- p.trace("exporting decl %v (%T)", obj, obj)
- p.indent++
- defer func() {
- p.indent--
- p.trace("=> %s", obj)
- }()
- }
- w := p.newWriter()
- w.setPkg(obj.Pkg(), false)
-
- switch obj := obj.(type) {
- case *types.Var:
- w.tag('V')
- w.pos(obj.Pos())
- w.typ(obj.Type(), obj.Pkg())
-
- case *types.Func:
- sig, _ := obj.Type().(*types.Signature)
- if sig.Recv() != nil {
- panic(internalErrorf("unexpected method: %v", sig))
- }
-
- // Function.
- if typeparams.ForSignature(sig).Len() == 0 {
- w.tag('F')
- } else {
- w.tag('G')
- }
- w.pos(obj.Pos())
- // The tparam list of the function type is the declaration of the type
- // params. So, write out the type params right now. Then those type params
- // will be referenced via their type offset (via typOff) in all other
- // places in the signature and function where they are used.
- //
- // While importing the type parameters, tparamList computes and records
- // their export name, so that it can be later used when writing the index.
- if tparams := typeparams.ForSignature(sig); tparams.Len() > 0 {
- w.tparamList(obj.Name(), tparams, obj.Pkg())
- }
- w.signature(sig)
-
- case *types.Const:
- w.tag('C')
- w.pos(obj.Pos())
- w.value(obj.Type(), obj.Val())
-
- case *types.TypeName:
- t := obj.Type()
-
- if tparam, ok := t.(*typeparams.TypeParam); ok {
- w.tag('P')
- w.pos(obj.Pos())
- constraint := tparam.Constraint()
- if p.version >= iexportVersionGo1_18 {
- implicit := false
- if iface, _ := constraint.(*types.Interface); iface != nil {
- implicit = typeparams.IsImplicit(iface)
- }
- w.bool(implicit)
- }
- w.typ(constraint, obj.Pkg())
- break
- }
-
- if obj.IsAlias() {
- w.tag('A')
- w.pos(obj.Pos())
- w.typ(t, obj.Pkg())
- break
- }
-
- // Defined type.
- named, ok := t.(*types.Named)
- if !ok {
- panic(internalErrorf("%s is not a defined type", t))
- }
-
- if typeparams.ForNamed(named).Len() == 0 {
- w.tag('T')
- } else {
- w.tag('U')
- }
- w.pos(obj.Pos())
-
- if typeparams.ForNamed(named).Len() > 0 {
- // While importing the type parameters, tparamList computes and records
- // their export name, so that it can be later used when writing the index.
- w.tparamList(obj.Name(), typeparams.ForNamed(named), obj.Pkg())
- }
-
- underlying := obj.Type().Underlying()
- w.typ(underlying, obj.Pkg())
-
- if types.IsInterface(t) {
- break
- }
-
- n := named.NumMethods()
- w.uint64(uint64(n))
- for i := 0; i < n; i++ {
- m := named.Method(i)
- w.pos(m.Pos())
- w.string(m.Name())
- sig, _ := m.Type().(*types.Signature)
-
- // Receiver type parameters are type arguments of the receiver type, so
- // their name must be qualified before exporting recv.
- if rparams := typeparams.RecvTypeParams(sig); rparams.Len() > 0 {
- prefix := obj.Name() + "." + m.Name()
- for i := 0; i < rparams.Len(); i++ {
- rparam := rparams.At(i)
- name := tparamExportName(prefix, rparam)
- w.p.tparamNames[rparam.Obj()] = name
- }
- }
- w.param(sig.Recv())
- w.signature(sig)
- }
-
- default:
- panic(internalErrorf("unexpected object: %v", obj))
- }
-
- p.declIndex[obj] = w.flush()
-}
-
-func (w *exportWriter) tag(tag byte) {
- w.data.WriteByte(tag)
-}
-
-func (w *exportWriter) pos(pos token.Pos) {
- if w.p.version >= iexportVersionPosCol {
- w.posV1(pos)
- } else {
- w.posV0(pos)
- }
-}
-
-func (w *exportWriter) posV1(pos token.Pos) {
- if w.p.fset == nil {
- w.int64(0)
- return
- }
-
- p := w.p.fset.Position(pos)
- file := p.Filename
- line := int64(p.Line)
- column := int64(p.Column)
-
- deltaColumn := (column - w.prevColumn) << 1
- deltaLine := (line - w.prevLine) << 1
-
- if file != w.prevFile {
- deltaLine |= 1
- }
- if deltaLine != 0 {
- deltaColumn |= 1
- }
-
- w.int64(deltaColumn)
- if deltaColumn&1 != 0 {
- w.int64(deltaLine)
- if deltaLine&1 != 0 {
- w.string(file)
- }
- }
-
- w.prevFile = file
- w.prevLine = line
- w.prevColumn = column
-}
-
-func (w *exportWriter) posV0(pos token.Pos) {
- if w.p.fset == nil {
- w.int64(0)
- return
- }
-
- p := w.p.fset.Position(pos)
- file := p.Filename
- line := int64(p.Line)
-
- // When file is the same as the last position (common case),
- // we can save a few bytes by delta encoding just the line
- // number.
- //
- // Note: Because data objects may be read out of order (or not
- // at all), we can only apply delta encoding within a single
- // object. This is handled implicitly by tracking prevFile and
- // prevLine as fields of exportWriter.
-
- if file == w.prevFile {
- delta := line - w.prevLine
- w.int64(delta)
- if delta == deltaNewFile {
- w.int64(-1)
- }
- } else {
- w.int64(deltaNewFile)
- w.int64(line) // line >= 0
- w.string(file)
- w.prevFile = file
- }
- w.prevLine = line
-}
-
-func (w *exportWriter) pkg(pkg *types.Package) {
- // Ensure any referenced packages are declared in the main index.
- w.p.allPkgs[pkg] = true
-
- w.string(w.exportPath(pkg))
-}
-
-func (w *exportWriter) qualifiedIdent(obj types.Object) {
- name := w.p.exportName(obj)
-
- // Ensure any referenced declarations are written out too.
- w.p.pushDecl(obj)
- w.string(name)
- w.pkg(obj.Pkg())
-}
-
-func (w *exportWriter) typ(t types.Type, pkg *types.Package) {
- w.data.uint64(w.p.typOff(t, pkg))
-}
-
-func (p *iexporter) newWriter() *exportWriter {
- return &exportWriter{p: p}
-}
-
-func (w *exportWriter) flush() uint64 {
- off := uint64(w.p.data0.Len())
- io.Copy(&w.p.data0, &w.data)
- return off
-}
-
-func (p *iexporter) typOff(t types.Type, pkg *types.Package) uint64 {
- off, ok := p.typIndex[t]
- if !ok {
- w := p.newWriter()
- w.doTyp(t, pkg)
- off = predeclReserved + w.flush()
- p.typIndex[t] = off
- }
- return off
-}
-
-func (w *exportWriter) startType(k itag) {
- w.data.uint64(uint64(k))
-}
-
-func (w *exportWriter) doTyp(t types.Type, pkg *types.Package) {
- if trace {
- w.p.trace("exporting type %s (%T)", t, t)
- w.p.indent++
- defer func() {
- w.p.indent--
- w.p.trace("=> %s", t)
- }()
- }
- switch t := t.(type) {
- case *types.Named:
- if targs := typeparams.NamedTypeArgs(t); targs.Len() > 0 {
- w.startType(instanceType)
- // TODO(rfindley): investigate if this position is correct, and if it
- // matters.
- w.pos(t.Obj().Pos())
- w.typeList(targs, pkg)
- w.typ(typeparams.NamedTypeOrigin(t), pkg)
- return
- }
- w.startType(definedType)
- w.qualifiedIdent(t.Obj())
-
- case *typeparams.TypeParam:
- w.startType(typeParamType)
- w.qualifiedIdent(t.Obj())
-
- case *types.Pointer:
- w.startType(pointerType)
- w.typ(t.Elem(), pkg)
-
- case *types.Slice:
- w.startType(sliceType)
- w.typ(t.Elem(), pkg)
-
- case *types.Array:
- w.startType(arrayType)
- w.uint64(uint64(t.Len()))
- w.typ(t.Elem(), pkg)
-
- case *types.Chan:
- w.startType(chanType)
- // 1 RecvOnly; 2 SendOnly; 3 SendRecv
- var dir uint64
- switch t.Dir() {
- case types.RecvOnly:
- dir = 1
- case types.SendOnly:
- dir = 2
- case types.SendRecv:
- dir = 3
- }
- w.uint64(dir)
- w.typ(t.Elem(), pkg)
-
- case *types.Map:
- w.startType(mapType)
- w.typ(t.Key(), pkg)
- w.typ(t.Elem(), pkg)
-
- case *types.Signature:
- w.startType(signatureType)
- w.setPkg(pkg, true)
- w.signature(t)
-
- case *types.Struct:
- w.startType(structType)
- w.setPkg(pkg, true)
-
- n := t.NumFields()
- w.uint64(uint64(n))
- for i := 0; i < n; i++ {
- f := t.Field(i)
- w.pos(f.Pos())
- w.string(f.Name())
- w.typ(f.Type(), pkg)
- w.bool(f.Anonymous())
- w.string(t.Tag(i)) // note (or tag)
- }
-
- case *types.Interface:
- w.startType(interfaceType)
- w.setPkg(pkg, true)
-
- n := t.NumEmbeddeds()
- w.uint64(uint64(n))
- for i := 0; i < n; i++ {
- ft := t.EmbeddedType(i)
- tPkg := pkg
- if named, _ := ft.(*types.Named); named != nil {
- w.pos(named.Obj().Pos())
- } else {
- w.pos(token.NoPos)
- }
- w.typ(ft, tPkg)
- }
-
- n = t.NumExplicitMethods()
- w.uint64(uint64(n))
- for i := 0; i < n; i++ {
- m := t.ExplicitMethod(i)
- w.pos(m.Pos())
- w.string(m.Name())
- sig, _ := m.Type().(*types.Signature)
- w.signature(sig)
- }
-
- case *typeparams.Union:
- w.startType(unionType)
- nt := t.Len()
- w.uint64(uint64(nt))
- for i := 0; i < nt; i++ {
- term := t.Term(i)
- w.bool(term.Tilde())
- w.typ(term.Type(), pkg)
- }
-
- default:
- panic(internalErrorf("unexpected type: %v, %v", t, reflect.TypeOf(t)))
- }
-}
-
-func (w *exportWriter) setPkg(pkg *types.Package, write bool) {
- if write {
- w.pkg(pkg)
- }
-
- w.currPkg = pkg
-}
-
-func (w *exportWriter) signature(sig *types.Signature) {
- w.paramList(sig.Params())
- w.paramList(sig.Results())
- if sig.Params().Len() > 0 {
- w.bool(sig.Variadic())
- }
-}
-
-func (w *exportWriter) typeList(ts *typeparams.TypeList, pkg *types.Package) {
- w.uint64(uint64(ts.Len()))
- for i := 0; i < ts.Len(); i++ {
- w.typ(ts.At(i), pkg)
- }
-}
-
-func (w *exportWriter) tparamList(prefix string, list *typeparams.TypeParamList, pkg *types.Package) {
- ll := uint64(list.Len())
- w.uint64(ll)
- for i := 0; i < list.Len(); i++ {
- tparam := list.At(i)
- // Set the type parameter exportName before exporting its type.
- exportName := tparamExportName(prefix, tparam)
- w.p.tparamNames[tparam.Obj()] = exportName
- w.typ(list.At(i), pkg)
- }
-}
-
-const blankMarker = "$"
-
-// tparamExportName returns the 'exported' name of a type parameter, which
-// differs from its actual object name: it is prefixed with a qualifier, and
-// blank type parameter names are disambiguated by their index in the type
-// parameter list.
-func tparamExportName(prefix string, tparam *typeparams.TypeParam) string {
- assert(prefix != "")
- name := tparam.Obj().Name()
- if name == "_" {
- name = blankMarker + strconv.Itoa(tparam.Index())
- }
- return prefix + "." + name
-}
-
-// tparamName returns the real name of a type parameter, after stripping its
-// qualifying prefix and reverting blank-name encoding. See tparamExportName
-// for details.
-func tparamName(exportName string) string {
- // Remove the "path" from the type param name that makes it unique.
- ix := strings.LastIndex(exportName, ".")
- if ix < 0 {
- errorf("malformed type parameter export name %s: missing prefix", exportName)
- }
- name := exportName[ix+1:]
- if strings.HasPrefix(name, blankMarker) {
- return "_"
- }
- return name
-}
-
-func (w *exportWriter) paramList(tup *types.Tuple) {
- n := tup.Len()
- w.uint64(uint64(n))
- for i := 0; i < n; i++ {
- w.param(tup.At(i))
- }
-}
-
-func (w *exportWriter) param(obj types.Object) {
- w.pos(obj.Pos())
- w.localIdent(obj)
- w.typ(obj.Type(), obj.Pkg())
-}
-
-func (w *exportWriter) value(typ types.Type, v constant.Value) {
- w.typ(typ, nil)
- if w.p.version >= iexportVersionGo1_18 {
- w.int64(int64(v.Kind()))
- }
-
- switch b := typ.Underlying().(*types.Basic); b.Info() & types.IsConstType {
- case types.IsBoolean:
- w.bool(constant.BoolVal(v))
- case types.IsInteger:
- var i big.Int
- if i64, exact := constant.Int64Val(v); exact {
- i.SetInt64(i64)
- } else if ui64, exact := constant.Uint64Val(v); exact {
- i.SetUint64(ui64)
- } else {
- i.SetString(v.ExactString(), 10)
- }
- w.mpint(&i, typ)
- case types.IsFloat:
- f := constantToFloat(v)
- w.mpfloat(f, typ)
- case types.IsComplex:
- w.mpfloat(constantToFloat(constant.Real(v)), typ)
- w.mpfloat(constantToFloat(constant.Imag(v)), typ)
- case types.IsString:
- w.string(constant.StringVal(v))
- default:
- if b.Kind() == types.Invalid {
- // package contains type errors
- break
- }
- panic(internalErrorf("unexpected type %v (%v)", typ, typ.Underlying()))
- }
-}
-
-// constantToFloat converts a constant.Value with kind constant.Float to a
-// big.Float.
-func constantToFloat(x constant.Value) *big.Float {
- x = constant.ToFloat(x)
- // Use the same floating-point precision (512) as cmd/compile
- // (see Mpprec in cmd/compile/internal/gc/mpfloat.go).
- const mpprec = 512
- var f big.Float
- f.SetPrec(mpprec)
- if v, exact := constant.Float64Val(x); exact {
- // float64
- f.SetFloat64(v)
- } else if num, denom := constant.Num(x), constant.Denom(x); num.Kind() == constant.Int {
- // TODO(gri): add big.Rat accessor to constant.Value.
- n := valueToRat(num)
- d := valueToRat(denom)
- f.SetRat(n.Quo(n, d))
- } else {
- // Value too large to represent as a fraction => inaccessible.
- // TODO(gri): add big.Float accessor to constant.Value.
- _, ok := f.SetString(x.ExactString())
- assert(ok)
- }
- return &f
-}
-
-// mpint exports a multi-precision integer.
-//
-// For unsigned types, small values are written out as a single
-// byte. Larger values are written out as a length-prefixed big-endian
-// byte string, where the length prefix is encoded as its complement.
-// For example, bytes 0, 1, and 2 directly represent the integer
-// values 0, 1, and 2; while bytes 255, 254, and 253 indicate a 1-,
-// 2-, and 3-byte big-endian string follow.
-//
-// Encoding for signed types use the same general approach as for
-// unsigned types, except small values use zig-zag encoding and the
-// bottom bit of length prefix byte for large values is reserved as a
-// sign bit.
-//
-// The exact boundary between small and large encodings varies
-// according to the maximum number of bytes needed to encode a value
-// of type typ. As a special case, 8-bit types are always encoded as a
-// single byte.
-//
-// TODO(mdempsky): Is this level of complexity really worthwhile?
-func (w *exportWriter) mpint(x *big.Int, typ types.Type) {
- basic, ok := typ.Underlying().(*types.Basic)
- if !ok {
- panic(internalErrorf("unexpected type %v (%T)", typ.Underlying(), typ.Underlying()))
- }
-
- signed, maxBytes := intSize(basic)
-
- negative := x.Sign() < 0
- if !signed && negative {
- panic(internalErrorf("negative unsigned integer; type %v, value %v", typ, x))
- }
-
- b := x.Bytes()
- if len(b) > 0 && b[0] == 0 {
- panic(internalErrorf("leading zeros"))
- }
- if uint(len(b)) > maxBytes {
- panic(internalErrorf("bad mpint length: %d > %d (type %v, value %v)", len(b), maxBytes, typ, x))
- }
-
- maxSmall := 256 - maxBytes
- if signed {
- maxSmall = 256 - 2*maxBytes
- }
- if maxBytes == 1 {
- maxSmall = 256
- }
-
- // Check if x can use small value encoding.
- if len(b) <= 1 {
- var ux uint
- if len(b) == 1 {
- ux = uint(b[0])
- }
- if signed {
- ux <<= 1
- if negative {
- ux--
- }
- }
- if ux < maxSmall {
- w.data.WriteByte(byte(ux))
- return
- }
- }
-
- n := 256 - uint(len(b))
- if signed {
- n = 256 - 2*uint(len(b))
- if negative {
- n |= 1
- }
- }
- if n < maxSmall || n >= 256 {
- panic(internalErrorf("encoding mistake: %d, %v, %v => %d", len(b), signed, negative, n))
- }
-
- w.data.WriteByte(byte(n))
- w.data.Write(b)
-}
-
-// mpfloat exports a multi-precision floating point number.
-//
-// The number's value is decomposed into mantissa × 2**exponent, where
-// mantissa is an integer. The value is written out as mantissa (as a
-// multi-precision integer) and then the exponent, except exponent is
-// omitted if mantissa is zero.
-func (w *exportWriter) mpfloat(f *big.Float, typ types.Type) {
- if f.IsInf() {
- panic("infinite constant")
- }
-
- // Break into f = mant × 2**exp, with 0.5 <= mant < 1.
- var mant big.Float
- exp := int64(f.MantExp(&mant))
-
- // Scale so that mant is an integer.
- prec := mant.MinPrec()
- mant.SetMantExp(&mant, int(prec))
- exp -= int64(prec)
-
- manti, acc := mant.Int(nil)
- if acc != big.Exact {
- panic(internalErrorf("mantissa scaling failed for %f (%s)", f, acc))
- }
- w.mpint(manti, typ)
- if manti.Sign() != 0 {
- w.int64(exp)
- }
-}
-
-func (w *exportWriter) bool(b bool) bool {
- var x uint64
- if b {
- x = 1
- }
- w.uint64(x)
- return b
-}
-
-func (w *exportWriter) int64(x int64) { w.data.int64(x) }
-func (w *exportWriter) uint64(x uint64) { w.data.uint64(x) }
-func (w *exportWriter) string(s string) { w.uint64(w.p.stringOff(s)) }
-
-func (w *exportWriter) localIdent(obj types.Object) {
- // Anonymous parameters.
- if obj == nil {
- w.string("")
- return
- }
-
- name := obj.Name()
- if name == "_" {
- w.string("_")
- return
- }
-
- w.string(name)
-}
-
-type intWriter struct {
- bytes.Buffer
-}
-
-func (w *intWriter) int64(x int64) {
- var buf [binary.MaxVarintLen64]byte
- n := binary.PutVarint(buf[:], x)
- w.Write(buf[:n])
-}
-
-func (w *intWriter) uint64(x uint64) {
- var buf [binary.MaxVarintLen64]byte
- n := binary.PutUvarint(buf[:], x)
- w.Write(buf[:n])
-}
-
-func assert(cond bool) {
- if !cond {
- panic("internal error: assertion failed")
- }
-}
-
-// The below is copied from go/src/cmd/compile/internal/gc/syntax.go.
-
-// objQueue is a FIFO queue of types.Object. The zero value of objQueue is
-// a ready-to-use empty queue.
-type objQueue struct {
- ring []types.Object
- head, tail int
-}
-
-// empty returns true if q contains no Nodes.
-func (q *objQueue) empty() bool {
- return q.head == q.tail
-}
-
-// pushTail appends n to the tail of the queue.
-func (q *objQueue) pushTail(obj types.Object) {
- if len(q.ring) == 0 {
- q.ring = make([]types.Object, 16)
- } else if q.head+len(q.ring) == q.tail {
- // Grow the ring.
- nring := make([]types.Object, len(q.ring)*2)
- // Copy the old elements.
- part := q.ring[q.head%len(q.ring):]
- if q.tail-q.head <= len(part) {
- part = part[:q.tail-q.head]
- copy(nring, part)
- } else {
- pos := copy(nring, part)
- copy(nring[pos:], q.ring[:q.tail%len(q.ring)])
- }
- q.ring, q.head, q.tail = nring, 0, q.tail-q.head
- }
-
- q.ring[q.tail%len(q.ring)] = obj
- q.tail++
-}
-
-// popHead pops a node from the head of the queue. It panics if q is empty.
-func (q *objQueue) popHead() types.Object {
- if q.empty() {
- panic("dequeue empty")
- }
- obj := q.ring[q.head%len(q.ring)]
- q.head++
- return obj
-}
diff --git a/go/internal/gcimporter/iexport_common_test.go b/go/internal/gcimporter/iexport_common_test.go
deleted file mode 100644
index abc6aa64b..000000000
--- a/go/internal/gcimporter/iexport_common_test.go
+++ /dev/null
@@ -1,16 +0,0 @@
-// Copyright 2021 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package gcimporter
-
-// Temporarily expose version-related functionality so that we can test at
-// specific export data versions.
-
-var IExportCommon = iexportCommon
-
-const (
- IExportVersion = iexportVersion
- IExportVersionGenerics = iexportVersionGenerics
- IExportVersionGo1_18 = iexportVersionGo1_18
-)
diff --git a/go/internal/gcimporter/iexport_go118_test.go b/go/internal/gcimporter/iexport_go118_test.go
deleted file mode 100644
index 5dfa2580f..000000000
--- a/go/internal/gcimporter/iexport_go118_test.go
+++ /dev/null
@@ -1,254 +0,0 @@
-// Copyright 2021 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-//go:build go1.18
-// +build go1.18
-
-package gcimporter_test
-
-import (
- "bytes"
- "fmt"
- "go/ast"
- "go/importer"
- "go/parser"
- "go/token"
- "go/types"
- "os"
- "path/filepath"
- "runtime"
- "strings"
- "testing"
-
- "golang.org/x/tools/go/internal/gcimporter"
-)
-
-// TODO(rfindley): migrate this to testdata, as has been done in the standard library.
-func TestGenericExport(t *testing.T) {
- const src = `
-package generic
-
-type Any any
-
-type T[A, B any] struct { Left A; Right B }
-
-func (T[P, Q]) m() {}
-
-var X T[int, string] = T[int, string]{1, "hi"}
-
-func ToInt[P interface{ ~int }](p P) int { return int(p) }
-
-var IntID = ToInt[int]
-
-type G[C comparable] int
-
-func ImplicitFunc[T ~int]() {}
-
-type ImplicitType[T ~int] int
-
-// Exercise constant import/export
-const C1 = 42
-const C2 int = 42
-const C3 float64 = 42
-
-type Constraint[T any] interface {
- m(T)
-}
-
-// TODO(rfindley): revert to multiple blanks once the restriction on multiple
-// blanks is removed from the type checker.
-// type Blanks[_ any, _ Constraint[int]] int
-// func (Blanks[_, _]) m() {}
-type Blanks[_ any] int
-func (Blanks[_]) m() {}
-`
- testExportSrc(t, []byte(src))
-}
-
-func testExportSrc(t *testing.T, src []byte) {
- // This package only handles gc export data.
- if runtime.Compiler != "gc" {
- t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler)
- }
-
- fset := token.NewFileSet()
- f, err := parser.ParseFile(fset, "g.go", src, 0)
- if err != nil {
- t.Fatal(err)
- }
- conf := types.Config{
- Importer: importer.Default(),
- }
- pkg, err := conf.Check("", fset, []*ast.File{f}, nil)
- if err != nil {
- t.Fatal(err)
- }
-
- // export
- version := gcimporter.IExportVersion
- data, err := iexport(fset, version, pkg)
- if err != nil {
- t.Fatal(err)
- }
-
- testPkgData(t, fset, version, pkg, data)
-}
-
-func TestImportTypeparamTests(t *testing.T) {
- // Check go files in test/typeparam.
- rootDir := filepath.Join(runtime.GOROOT(), "test", "typeparam")
- list, err := os.ReadDir(rootDir)
- if err != nil {
- t.Fatal(err)
- }
-
- if isUnifiedBuilder() {
- t.Skip("unified export data format is currently unsupported")
- }
-
- for _, entry := range list {
- if entry.IsDir() || !strings.HasSuffix(entry.Name(), ".go") {
- // For now, only consider standalone go files.
- continue
- }
-
- t.Run(entry.Name(), func(t *testing.T) {
- filename := filepath.Join(rootDir, entry.Name())
- src, err := os.ReadFile(filename)
- if err != nil {
- t.Fatal(err)
- }
-
- if !bytes.HasPrefix(src, []byte("// run")) && !bytes.HasPrefix(src, []byte("// compile")) {
- // We're bypassing the logic of run.go here, so be conservative about
- // the files we consider in an attempt to make this test more robust to
- // changes in test/typeparams.
- t.Skipf("not detected as a run test")
- }
-
- testExportSrc(t, src)
- })
- }
-}
-
-func TestRecursiveExport_Issue51219(t *testing.T) {
- const srca = `
-package a
-
-type Interaction[DataT InteractionDataConstraint] struct {
-}
-
-type InteractionDataConstraint interface {
- []byte |
- UserCommandInteractionData
-}
-
-type UserCommandInteractionData struct {
- resolvedInteractionWithOptions
-}
-
-type resolvedInteractionWithOptions struct {
- Resolved Resolved
-}
-
-type Resolved struct {
- Users ResolvedData[User]
-}
-
-type ResolvedData[T ResolvedDataConstraint] map[uint64]T
-
-type ResolvedDataConstraint interface {
- User | Message
-}
-
-type User struct{}
-
-type Message struct {
- Interaction *Interaction[[]byte]
-}
-`
-
- const srcb = `
-package b
-
-import (
- "a"
-)
-
-// InteractionRequest is an incoming request Interaction
-type InteractionRequest[T a.InteractionDataConstraint] struct {
- a.Interaction[T]
-}
-`
-
- const srcp = `
-package p
-
-import (
- "b"
-)
-
-// ResponseWriterMock mocks corde's ResponseWriter interface
-type ResponseWriterMock struct {
- x b.InteractionRequest[[]byte]
-}
-`
-
- importer := &testImporter{
- src: map[string][]byte{
- "a": []byte(srca),
- "b": []byte(srcb),
- "p": []byte(srcp),
- },
- pkgs: make(map[string]*types.Package),
- }
- _, err := importer.Import("p")
- if err != nil {
- t.Fatal(err)
- }
-}
-
-// testImporter is a helper to test chains of imports using export data.
-type testImporter struct {
- src map[string][]byte // original source
- pkgs map[string]*types.Package // memoized imported packages
-}
-
-func (t *testImporter) Import(path string) (*types.Package, error) {
- if pkg, ok := t.pkgs[path]; ok {
- return pkg, nil
- }
- src, ok := t.src[path]
- if !ok {
- return nil, fmt.Errorf("unknown path %v", path)
- }
-
- // Type-check, but don't return this package directly.
- fset := token.NewFileSet()
- f, err := parser.ParseFile(fset, path+".go", src, 0)
- if err != nil {
- return nil, err
- }
- conf := types.Config{
- Importer: t,
- }
- pkg, err := conf.Check(path, fset, []*ast.File{f}, nil)
- if err != nil {
- return nil, err
- }
-
- // Export and import to get the package imported from export data.
- exportdata, err := iexport(fset, gcimporter.IExportVersion, pkg)
- if err != nil {
- return nil, err
- }
- imports := make(map[string]*types.Package)
- fset2 := token.NewFileSet()
- _, pkg2, err := gcimporter.IImportData(fset2, imports, exportdata, pkg.Path())
- if err != nil {
- return nil, err
- }
- t.pkgs[path] = pkg2
- return pkg2, nil
-}
diff --git a/go/internal/gcimporter/iexport_test.go b/go/internal/gcimporter/iexport_test.go
deleted file mode 100644
index f0e83e519..000000000
--- a/go/internal/gcimporter/iexport_test.go
+++ /dev/null
@@ -1,405 +0,0 @@
-// Copyright 2019 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// This is a copy of bexport_test.go for iexport.go.
-
-//go:build go1.11
-// +build go1.11
-
-package gcimporter_test
-
-import (
- "bufio"
- "bytes"
- "fmt"
- "go/ast"
- "go/build"
- "go/constant"
- "go/parser"
- "go/token"
- "go/types"
- "io/ioutil"
- "math/big"
- "os"
- "reflect"
- "runtime"
- "sort"
- "strings"
- "testing"
-
- "golang.org/x/tools/go/ast/inspector"
- "golang.org/x/tools/go/buildutil"
- "golang.org/x/tools/go/internal/gcimporter"
- "golang.org/x/tools/go/loader"
- "golang.org/x/tools/internal/typeparams/genericfeatures"
-)
-
-func readExportFile(filename string) ([]byte, error) {
- f, err := os.Open(filename)
- if err != nil {
- return nil, err
- }
- defer f.Close()
-
- buf := bufio.NewReader(f)
- if _, _, err := gcimporter.FindExportData(buf); err != nil {
- return nil, err
- }
-
- if ch, err := buf.ReadByte(); err != nil {
- return nil, err
- } else if ch != 'i' {
- return nil, fmt.Errorf("unexpected byte: %v", ch)
- }
-
- return ioutil.ReadAll(buf)
-}
-
-func iexport(fset *token.FileSet, version int, pkg *types.Package) ([]byte, error) {
- var buf bytes.Buffer
- if err := gcimporter.IExportCommon(&buf, fset, false, version, []*types.Package{pkg}); err != nil {
- return nil, err
- }
- return buf.Bytes(), nil
-}
-
-// isUnifiedBuilder reports whether we are executing on a go builder that uses
-// unified export data.
-func isUnifiedBuilder() bool {
- return os.Getenv("GO_BUILDER_NAME") == "linux-amd64-unified"
-}
-
-const minStdlibPackages = 248
-
-func TestIExportData_stdlib(t *testing.T) {
- if runtime.Compiler == "gccgo" {
- t.Skip("gccgo standard library is inaccessible")
- }
- if runtime.GOOS == "android" {
- t.Skipf("incomplete std lib on %s", runtime.GOOS)
- }
- if isRace {
- t.Skipf("stdlib tests take too long in race mode and flake on builders")
- }
- if testing.Short() {
- t.Skip("skipping RAM hungry test in -short mode")
- }
-
- // Load, parse and type-check the program.
- ctxt := build.Default // copy
- ctxt.GOPATH = "" // disable GOPATH
- conf := loader.Config{
- Build: &ctxt,
- AllowErrors: true,
- TypeChecker: types.Config{
- Sizes: types.SizesFor(ctxt.Compiler, ctxt.GOARCH),
- Error: func(err error) { t.Log(err) },
- },
- }
- for _, path := range buildutil.AllPackages(conf.Build) {
- conf.Import(path)
- }
-
- // Create a package containing type and value errors to ensure
- // they are properly encoded/decoded.
- f, err := conf.ParseFile("haserrors/haserrors.go", `package haserrors
-const UnknownValue = "" + 0
-type UnknownType undefined
-`)
- if err != nil {
- t.Fatal(err)
- }
- conf.CreateFromFiles("haserrors", f)
-
- prog, err := conf.Load()
- if err != nil {
- t.Fatalf("Load failed: %v", err)
- }
-
- var sorted []*types.Package
- isUnified := isUnifiedBuilder()
- for pkg, info := range prog.AllPackages {
- // Temporarily skip packages that use generics on the unified builder, to
- // fix TryBots.
- //
- // TODO(#48595): fix this test with GOEXPERIMENT=unified.
- inspect := inspector.New(info.Files)
- features := genericfeatures.ForPackage(inspect, &info.Info)
- if isUnified && features != 0 {
- t.Logf("skipping package %q which uses generics", pkg.Path())
- continue
- }
- if info.Files != nil { // non-empty directory
- sorted = append(sorted, pkg)
- }
- }
- sort.Slice(sorted, func(i, j int) bool {
- return sorted[i].Path() < sorted[j].Path()
- })
-
- version := gcimporter.IExportVersion
- numPkgs := len(sorted)
- if want := minStdlibPackages; numPkgs < want {
- t.Errorf("Loaded only %d packages, want at least %d", numPkgs, want)
- }
-
- for _, pkg := range sorted {
- if exportdata, err := iexport(conf.Fset, version, pkg); err != nil {
- t.Error(err)
- } else {
- testPkgData(t, conf.Fset, version, pkg, exportdata)
- }
-
- if pkg.Name() == "main" || pkg.Name() == "haserrors" {
- // skip; no export data
- } else if bp, err := ctxt.Import(pkg.Path(), "", build.FindOnly); err != nil {
- t.Log("warning:", err)
- } else if exportdata, err := readExportFile(bp.PkgObj); err != nil {
- t.Log("warning:", err)
- } else {
- testPkgData(t, conf.Fset, version, pkg, exportdata)
- }
- }
-
- var bundle bytes.Buffer
- if err := gcimporter.IExportBundle(&bundle, conf.Fset, sorted); err != nil {
- t.Fatal(err)
- }
- fset2 := token.NewFileSet()
- imports := make(map[string]*types.Package)
- pkgs2, err := gcimporter.IImportBundle(fset2, imports, bundle.Bytes())
- if err != nil {
- t.Fatal(err)
- }
-
- for i, pkg := range sorted {
- testPkg(t, conf.Fset, version, pkg, fset2, pkgs2[i])
- }
-}
-
-func testPkgData(t *testing.T, fset *token.FileSet, version int, pkg *types.Package, exportdata []byte) {
- imports := make(map[string]*types.Package)
- fset2 := token.NewFileSet()
- _, pkg2, err := gcimporter.IImportData(fset2, imports, exportdata, pkg.Path())
- if err != nil {
- t.Errorf("IImportData(%s): %v", pkg.Path(), err)
- }
-
- testPkg(t, fset, version, pkg, fset2, pkg2)
-}
-
-func testPkg(t *testing.T, fset *token.FileSet, version int, pkg *types.Package, fset2 *token.FileSet, pkg2 *types.Package) {
- if _, err := iexport(fset2, version, pkg2); err != nil {
- t.Errorf("reexport %q: %v", pkg.Path(), err)
- }
-
- // Compare the packages' corresponding members.
- for _, name := range pkg.Scope().Names() {
- if !ast.IsExported(name) {
- continue
- }
- obj1 := pkg.Scope().Lookup(name)
- obj2 := pkg2.Scope().Lookup(name)
- if obj2 == nil {
- t.Errorf("%s.%s not found, want %s", pkg.Path(), name, obj1)
- continue
- }
-
- fl1 := fileLine(fset, obj1)
- fl2 := fileLine(fset2, obj2)
- if fl1 != fl2 {
- t.Errorf("%s.%s: got posn %s, want %s",
- pkg.Path(), name, fl2, fl1)
- }
-
- if err := cmpObj(obj1, obj2); err != nil {
- t.Errorf("%s.%s: %s\ngot: %s\nwant: %s",
- pkg.Path(), name, err, obj2, obj1)
- }
- }
-}
-
-// TestVeryLongFile tests the position of an import object declared in
-// a very long input file. Line numbers greater than maxlines are
-// reported as line 1, not garbage or token.NoPos.
-func TestIExportData_long(t *testing.T) {
- // parse and typecheck
- longFile := "package foo" + strings.Repeat("\n", 123456) + "var X int"
- fset1 := token.NewFileSet()
- f, err := parser.ParseFile(fset1, "foo.go", longFile, 0)
- if err != nil {
- t.Fatal(err)
- }
- var conf types.Config
- pkg, err := conf.Check("foo", fset1, []*ast.File{f}, nil)
- if err != nil {
- t.Fatal(err)
- }
-
- // export
- exportdata, err := iexport(fset1, gcimporter.IExportVersion, pkg)
- if err != nil {
- t.Fatal(err)
- }
-
- // import
- imports := make(map[string]*types.Package)
- fset2 := token.NewFileSet()
- _, pkg2, err := gcimporter.IImportData(fset2, imports, exportdata, pkg.Path())
- if err != nil {
- t.Fatalf("IImportData(%s): %v", pkg.Path(), err)
- }
-
- // compare
- posn1 := fset1.Position(pkg.Scope().Lookup("X").Pos())
- posn2 := fset2.Position(pkg2.Scope().Lookup("X").Pos())
- if want := "foo.go:1:1"; posn2.String() != want {
- t.Errorf("X position = %s, want %s (orig was %s)",
- posn2, want, posn1)
- }
-}
-
-func TestIExportData_typealiases(t *testing.T) {
- // parse and typecheck
- fset1 := token.NewFileSet()
- f, err := parser.ParseFile(fset1, "p.go", src, 0)
- if err != nil {
- t.Fatal(err)
- }
- var conf types.Config
- pkg1, err := conf.Check("p", fset1, []*ast.File{f}, nil)
- if err == nil {
- // foo in undeclared in src; we should see an error
- t.Fatal("invalid source type-checked without error")
- }
- if pkg1 == nil {
- // despite incorrect src we should see a (partially) type-checked package
- t.Fatal("nil package returned")
- }
- checkPkg(t, pkg1, "export")
-
- // export
- // use a nil fileset here to confirm that it doesn't panic
- exportdata, err := iexport(nil, gcimporter.IExportVersion, pkg1)
- if err != nil {
- t.Fatal(err)
- }
-
- // import
- imports := make(map[string]*types.Package)
- fset2 := token.NewFileSet()
- _, pkg2, err := gcimporter.IImportData(fset2, imports, exportdata, pkg1.Path())
- if err != nil {
- t.Fatalf("IImportData(%s): %v", pkg1.Path(), err)
- }
- checkPkg(t, pkg2, "import")
-}
-
-// cmpObj reports how x and y differ. They are assumed to belong to different
-// universes so cannot be compared directly. It is an adapted version of
-// equalObj in bexport_test.go.
-func cmpObj(x, y types.Object) error {
- if reflect.TypeOf(x) != reflect.TypeOf(y) {
- return fmt.Errorf("%T vs %T", x, y)
- }
- xt := x.Type()
- yt := y.Type()
- switch x := x.(type) {
- case *types.Var, *types.Func:
- // ok
- case *types.Const:
- xval := x.Val()
- yval := y.(*types.Const).Val()
- equal := constant.Compare(xval, token.EQL, yval)
- if !equal {
- // try approx. comparison
- xkind := xval.Kind()
- ykind := yval.Kind()
- if xkind == constant.Complex || ykind == constant.Complex {
- equal = same(constant.Real(xval), constant.Real(yval)) &&
- same(constant.Imag(xval), constant.Imag(yval))
- } else if xkind == constant.Float || ykind == constant.Float {
- equal = same(xval, yval)
- } else if xkind == constant.Unknown && ykind == constant.Unknown {
- equal = true
- }
- }
- if !equal {
- return fmt.Errorf("unequal constants %s vs %s", xval, yval)
- }
- case *types.TypeName:
- if xalias, yalias := x.IsAlias(), y.(*types.TypeName).IsAlias(); xalias != yalias {
- return fmt.Errorf("mismatching IsAlias(): %s vs %s", x, y)
- }
- // equalType does not recurse into the underlying types of named types, so
- // we must pass the underlying type explicitly here. However, in doing this
- // we may skip checking the features of the named types themselves, in
- // situations where the type name is not referenced by the underlying or
- // any other top-level declarations. Therefore, we must explicitly compare
- // named types here, before passing their underlying types into equalType.
- xn, _ := xt.(*types.Named)
- yn, _ := yt.(*types.Named)
- if (xn == nil) != (yn == nil) {
- return fmt.Errorf("mismatching types: %T vs %T", xt, yt)
- }
- if xn != nil {
- if err := cmpNamed(xn, yn); err != nil {
- return err
- }
- }
- xt = xt.Underlying()
- yt = yt.Underlying()
- default:
- return fmt.Errorf("unexpected %T", x)
- }
- return equalType(xt, yt)
-}
-
-// Use the same floating-point precision (512) as cmd/compile
-// (see Mpprec in cmd/compile/internal/gc/mpfloat.go).
-const mpprec = 512
-
-// same compares non-complex numeric values and reports if they are approximately equal.
-func same(x, y constant.Value) bool {
- xf := constantToFloat(x)
- yf := constantToFloat(y)
- d := new(big.Float).Sub(xf, yf)
- d.Abs(d)
- eps := big.NewFloat(1.0 / (1 << (mpprec - 1))) // allow for 1 bit of error
- return d.Cmp(eps) < 0
-}
-
-// copy of the function with the same name in iexport.go.
-func constantToFloat(x constant.Value) *big.Float {
- var f big.Float
- f.SetPrec(mpprec)
- if v, exact := constant.Float64Val(x); exact {
- // float64
- f.SetFloat64(v)
- } else if num, denom := constant.Num(x), constant.Denom(x); num.Kind() == constant.Int {
- // TODO(gri): add big.Rat accessor to constant.Value.
- n := valueToRat(num)
- d := valueToRat(denom)
- f.SetRat(n.Quo(n, d))
- } else {
- // Value too large to represent as a fraction => inaccessible.
- // TODO(gri): add big.Float accessor to constant.Value.
- _, ok := f.SetString(x.ExactString())
- if !ok {
- panic("should not reach here")
- }
- }
- return &f
-}
-
-// copy of the function with the same name in iexport.go.
-func valueToRat(x constant.Value) *big.Rat {
- // Convert little-endian to big-endian.
- // I can't believe this is necessary.
- bytes := constant.Bytes(x)
- for i := 0; i < len(bytes)/2; i++ {
- bytes[i], bytes[len(bytes)-1-i] = bytes[len(bytes)-1-i], bytes[i]
- }
- return new(big.Rat).SetInt(new(big.Int).SetBytes(bytes))
-}
diff --git a/go/internal/gcimporter/iimport.go b/go/internal/gcimporter/iimport.go
deleted file mode 100644
index 1d5650ae4..000000000
--- a/go/internal/gcimporter/iimport.go
+++ /dev/null
@@ -1,898 +0,0 @@
-// Copyright 2018 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Indexed package import.
-// See cmd/compile/internal/gc/iexport.go for the export data format.
-
-// This file is a copy of $GOROOT/src/go/internal/gcimporter/iimport.go.
-
-package gcimporter
-
-import (
- "bytes"
- "encoding/binary"
- "fmt"
- "go/constant"
- "go/token"
- "go/types"
- "io"
- "sort"
- "strings"
-
- "golang.org/x/tools/internal/typeparams"
-)
-
-type intReader struct {
- *bytes.Reader
- path string
-}
-
-func (r *intReader) int64() int64 {
- i, err := binary.ReadVarint(r.Reader)
- if err != nil {
- errorf("import %q: read varint error: %v", r.path, err)
- }
- return i
-}
-
-func (r *intReader) uint64() uint64 {
- i, err := binary.ReadUvarint(r.Reader)
- if err != nil {
- errorf("import %q: read varint error: %v", r.path, err)
- }
- return i
-}
-
-// Keep this in sync with constants in iexport.go.
-const (
- iexportVersionGo1_11 = 0
- iexportVersionPosCol = 1
- iexportVersionGo1_18 = 2
- iexportVersionGenerics = 2
-)
-
-type ident struct {
- pkg string
- name string
-}
-
-const predeclReserved = 32
-
-type itag uint64
-
-const (
- // Types
- definedType itag = iota
- pointerType
- sliceType
- arrayType
- chanType
- mapType
- signatureType
- structType
- interfaceType
- typeParamType
- instanceType
- unionType
-)
-
-// IImportData imports a package from the serialized package data
-// and returns 0 and a reference to the package.
-// If the export data version is not recognized or the format is otherwise
-// compromised, an error is returned.
-func IImportData(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string) (int, *types.Package, error) {
- pkgs, err := iimportCommon(fset, imports, data, false, path)
- if err != nil {
- return 0, nil, err
- }
- return 0, pkgs[0], nil
-}
-
-// IImportBundle imports a set of packages from the serialized package bundle.
-func IImportBundle(fset *token.FileSet, imports map[string]*types.Package, data []byte) ([]*types.Package, error) {
- return iimportCommon(fset, imports, data, true, "")
-}
-
-func iimportCommon(fset *token.FileSet, imports map[string]*types.Package, data []byte, bundle bool, path string) (pkgs []*types.Package, err error) {
- const currentVersion = 1
- version := int64(-1)
- if !debug {
- defer func() {
- if e := recover(); e != nil {
- if bundle {
- err = fmt.Errorf("%v", e)
- } else if version > currentVersion {
- err = fmt.Errorf("cannot import %q (%v), export data is newer version - update tool", path, e)
- } else {
- err = fmt.Errorf("cannot import %q (%v), possibly version skew - reinstall package", path, e)
- }
- }
- }()
- }
-
- r := &intReader{bytes.NewReader(data), path}
-
- if bundle {
- bundleVersion := r.uint64()
- switch bundleVersion {
- case bundleVersion:
- default:
- errorf("unknown bundle format version %d", bundleVersion)
- }
- }
-
- version = int64(r.uint64())
- switch version {
- case iexportVersionGo1_18, iexportVersionPosCol, iexportVersionGo1_11:
- default:
- if version > iexportVersionGo1_18 {
- errorf("unstable iexport format version %d, just rebuild compiler and std library", version)
- } else {
- errorf("unknown iexport format version %d", version)
- }
- }
-
- sLen := int64(r.uint64())
- dLen := int64(r.uint64())
-
- whence, _ := r.Seek(0, io.SeekCurrent)
- stringData := data[whence : whence+sLen]
- declData := data[whence+sLen : whence+sLen+dLen]
- r.Seek(sLen+dLen, io.SeekCurrent)
-
- p := iimporter{
- version: int(version),
- ipath: path,
-
- stringData: stringData,
- stringCache: make(map[uint64]string),
- pkgCache: make(map[uint64]*types.Package),
-
- declData: declData,
- pkgIndex: make(map[*types.Package]map[string]uint64),
- typCache: make(map[uint64]types.Type),
- // Separate map for typeparams, keyed by their package and unique
- // name.
- tparamIndex: make(map[ident]types.Type),
-
- fake: fakeFileSet{
- fset: fset,
- files: make(map[string]*fileInfo),
- },
- }
- defer p.fake.setLines() // set lines for files in fset
-
- for i, pt := range predeclared() {
- p.typCache[uint64(i)] = pt
- }
-
- pkgList := make([]*types.Package, r.uint64())
- for i := range pkgList {
- pkgPathOff := r.uint64()
- pkgPath := p.stringAt(pkgPathOff)
- pkgName := p.stringAt(r.uint64())
- _ = r.uint64() // package height; unused by go/types
-
- if pkgPath == "" {
- pkgPath = path
- }
- pkg := imports[pkgPath]
- if pkg == nil {
- pkg = types.NewPackage(pkgPath, pkgName)
- imports[pkgPath] = pkg
- } else if pkg.Name() != pkgName {
- errorf("conflicting names %s and %s for package %q", pkg.Name(), pkgName, path)
- }
-
- p.pkgCache[pkgPathOff] = pkg
-
- nameIndex := make(map[string]uint64)
- for nSyms := r.uint64(); nSyms > 0; nSyms-- {
- name := p.stringAt(r.uint64())
- nameIndex[name] = r.uint64()
- }
-
- p.pkgIndex[pkg] = nameIndex
- pkgList[i] = pkg
- }
-
- if bundle {
- pkgs = make([]*types.Package, r.uint64())
- for i := range pkgs {
- pkg := p.pkgAt(r.uint64())
- imps := make([]*types.Package, r.uint64())
- for j := range imps {
- imps[j] = p.pkgAt(r.uint64())
- }
- pkg.SetImports(imps)
- pkgs[i] = pkg
- }
- } else {
- if len(pkgList) == 0 {
- errorf("no packages found for %s", path)
- panic("unreachable")
- }
- pkgs = pkgList[:1]
-
- // record all referenced packages as imports
- list := append(([]*types.Package)(nil), pkgList[1:]...)
- sort.Sort(byPath(list))
- pkgs[0].SetImports(list)
- }
-
- for _, pkg := range pkgs {
- if pkg.Complete() {
- continue
- }
-
- names := make([]string, 0, len(p.pkgIndex[pkg]))
- for name := range p.pkgIndex[pkg] {
- names = append(names, name)
- }
- sort.Strings(names)
- for _, name := range names {
- p.doDecl(pkg, name)
- }
-
- // package was imported completely and without errors
- pkg.MarkComplete()
- }
-
- // SetConstraint can't be called if the constraint type is not yet complete.
- // When type params are created in the 'P' case of (*importReader).obj(),
- // the associated constraint type may not be complete due to recursion.
- // Therefore, we defer calling SetConstraint there, and call it here instead
- // after all types are complete.
- for _, d := range p.later {
- typeparams.SetTypeParamConstraint(d.t, d.constraint)
- }
-
- for _, typ := range p.interfaceList {
- typ.Complete()
- }
-
- return pkgs, nil
-}
-
-type setConstraintArgs struct {
- t *typeparams.TypeParam
- constraint types.Type
-}
-
-type iimporter struct {
- version int
- ipath string
-
- stringData []byte
- stringCache map[uint64]string
- pkgCache map[uint64]*types.Package
-
- declData []byte
- pkgIndex map[*types.Package]map[string]uint64
- typCache map[uint64]types.Type
- tparamIndex map[ident]types.Type
-
- fake fakeFileSet
- interfaceList []*types.Interface
-
- // Arguments for calls to SetConstraint that are deferred due to recursive types
- later []setConstraintArgs
-
- indent int // for tracing support
-}
-
-func (p *iimporter) trace(format string, args ...interface{}) {
- if !trace {
- // Call sites should also be guarded, but having this check here allows
- // easily enabling/disabling debug trace statements.
- return
- }
- fmt.Printf(strings.Repeat("..", p.indent)+format+"\n", args...)
-}
-
-func (p *iimporter) doDecl(pkg *types.Package, name string) {
- if debug {
- p.trace("import decl %s", name)
- p.indent++
- defer func() {
- p.indent--
- p.trace("=> %s", name)
- }()
- }
- // See if we've already imported this declaration.
- if obj := pkg.Scope().Lookup(name); obj != nil {
- return
- }
-
- off, ok := p.pkgIndex[pkg][name]
- if !ok {
- errorf("%v.%v not in index", pkg, name)
- }
-
- r := &importReader{p: p, currPkg: pkg}
- r.declReader.Reset(p.declData[off:])
-
- r.obj(name)
-}
-
-func (p *iimporter) stringAt(off uint64) string {
- if s, ok := p.stringCache[off]; ok {
- return s
- }
-
- slen, n := binary.Uvarint(p.stringData[off:])
- if n <= 0 {
- errorf("varint failed")
- }
- spos := off + uint64(n)
- s := string(p.stringData[spos : spos+slen])
- p.stringCache[off] = s
- return s
-}
-
-func (p *iimporter) pkgAt(off uint64) *types.Package {
- if pkg, ok := p.pkgCache[off]; ok {
- return pkg
- }
- path := p.stringAt(off)
- errorf("missing package %q in %q", path, p.ipath)
- return nil
-}
-
-func (p *iimporter) typAt(off uint64, base *types.Named) types.Type {
- if t, ok := p.typCache[off]; ok && canReuse(base, t) {
- return t
- }
-
- if off < predeclReserved {
- errorf("predeclared type missing from cache: %v", off)
- }
-
- r := &importReader{p: p}
- r.declReader.Reset(p.declData[off-predeclReserved:])
- t := r.doType(base)
-
- if canReuse(base, t) {
- p.typCache[off] = t
- }
- return t
-}
-
-// canReuse reports whether the type rhs on the RHS of the declaration for def
-// may be re-used.
-//
-// Specifically, if def is non-nil and rhs is an interface type with methods, it
-// may not be re-used because we have a convention of setting the receiver type
-// for interface methods to def.
-func canReuse(def *types.Named, rhs types.Type) bool {
- if def == nil {
- return true
- }
- iface, _ := rhs.(*types.Interface)
- if iface == nil {
- return true
- }
- // Don't use iface.Empty() here as iface may not be complete.
- return iface.NumEmbeddeds() == 0 && iface.NumExplicitMethods() == 0
-}
-
-type importReader struct {
- p *iimporter
- declReader bytes.Reader
- currPkg *types.Package
- prevFile string
- prevLine int64
- prevColumn int64
-}
-
-func (r *importReader) obj(name string) {
- tag := r.byte()
- pos := r.pos()
-
- switch tag {
- case 'A':
- typ := r.typ()
-
- r.declare(types.NewTypeName(pos, r.currPkg, name, typ))
-
- case 'C':
- typ, val := r.value()
-
- r.declare(types.NewConst(pos, r.currPkg, name, typ, val))
-
- case 'F', 'G':
- var tparams []*typeparams.TypeParam
- if tag == 'G' {
- tparams = r.tparamList()
- }
- sig := r.signature(nil, nil, tparams)
- r.declare(types.NewFunc(pos, r.currPkg, name, sig))
-
- case 'T', 'U':
- // Types can be recursive. We need to setup a stub
- // declaration before recursing.
- obj := types.NewTypeName(pos, r.currPkg, name, nil)
- named := types.NewNamed(obj, nil, nil)
- // Declare obj before calling r.tparamList, so the new type name is recognized
- // if used in the constraint of one of its own typeparams (see #48280).
- r.declare(obj)
- if tag == 'U' {
- tparams := r.tparamList()
- typeparams.SetForNamed(named, tparams)
- }
-
- underlying := r.p.typAt(r.uint64(), named).Underlying()
- named.SetUnderlying(underlying)
-
- if !isInterface(underlying) {
- for n := r.uint64(); n > 0; n-- {
- mpos := r.pos()
- mname := r.ident()
- recv := r.param()
-
- // If the receiver has any targs, set those as the
- // rparams of the method (since those are the
- // typeparams being used in the method sig/body).
- base := baseType(recv.Type())
- assert(base != nil)
- targs := typeparams.NamedTypeArgs(base)
- var rparams []*typeparams.TypeParam
- if targs.Len() > 0 {
- rparams = make([]*typeparams.TypeParam, targs.Len())
- for i := range rparams {
- rparams[i] = targs.At(i).(*typeparams.TypeParam)
- }
- }
- msig := r.signature(recv, rparams, nil)
-
- named.AddMethod(types.NewFunc(mpos, r.currPkg, mname, msig))
- }
- }
-
- case 'P':
- // We need to "declare" a typeparam in order to have a name that
- // can be referenced recursively (if needed) in the type param's
- // bound.
- if r.p.version < iexportVersionGenerics {
- errorf("unexpected type param type")
- }
- name0 := tparamName(name)
- tn := types.NewTypeName(pos, r.currPkg, name0, nil)
- t := typeparams.NewTypeParam(tn, nil)
-
- // To handle recursive references to the typeparam within its
- // bound, save the partial type in tparamIndex before reading the bounds.
- id := ident{r.currPkg.Name(), name}
- r.p.tparamIndex[id] = t
- var implicit bool
- if r.p.version >= iexportVersionGo1_18 {
- implicit = r.bool()
- }
- constraint := r.typ()
- if implicit {
- iface, _ := constraint.(*types.Interface)
- if iface == nil {
- errorf("non-interface constraint marked implicit")
- }
- typeparams.MarkImplicit(iface)
- }
- // The constraint type may not be complete, if we
- // are in the middle of a type recursion involving type
- // constraints. So, we defer SetConstraint until we have
- // completely set up all types in ImportData.
- r.p.later = append(r.p.later, setConstraintArgs{t: t, constraint: constraint})
-
- case 'V':
- typ := r.typ()
-
- r.declare(types.NewVar(pos, r.currPkg, name, typ))
-
- default:
- errorf("unexpected tag: %v", tag)
- }
-}
-
-func (r *importReader) declare(obj types.Object) {
- obj.Pkg().Scope().Insert(obj)
-}
-
-func (r *importReader) value() (typ types.Type, val constant.Value) {
- typ = r.typ()
- if r.p.version >= iexportVersionGo1_18 {
- // TODO: add support for using the kind.
- _ = constant.Kind(r.int64())
- }
-
- switch b := typ.Underlying().(*types.Basic); b.Info() & types.IsConstType {
- case types.IsBoolean:
- val = constant.MakeBool(r.bool())
-
- case types.IsString:
- val = constant.MakeString(r.string())
-
- case types.IsInteger:
- val = r.mpint(b)
-
- case types.IsFloat:
- val = r.mpfloat(b)
-
- case types.IsComplex:
- re := r.mpfloat(b)
- im := r.mpfloat(b)
- val = constant.BinaryOp(re, token.ADD, constant.MakeImag(im))
-
- default:
- if b.Kind() == types.Invalid {
- val = constant.MakeUnknown()
- return
- }
- errorf("unexpected type %v", typ) // panics
- panic("unreachable")
- }
-
- return
-}
-
-func intSize(b *types.Basic) (signed bool, maxBytes uint) {
- if (b.Info() & types.IsUntyped) != 0 {
- return true, 64
- }
-
- switch b.Kind() {
- case types.Float32, types.Complex64:
- return true, 3
- case types.Float64, types.Complex128:
- return true, 7
- }
-
- signed = (b.Info() & types.IsUnsigned) == 0
- switch b.Kind() {
- case types.Int8, types.Uint8:
- maxBytes = 1
- case types.Int16, types.Uint16:
- maxBytes = 2
- case types.Int32, types.Uint32:
- maxBytes = 4
- default:
- maxBytes = 8
- }
-
- return
-}
-
-func (r *importReader) mpint(b *types.Basic) constant.Value {
- signed, maxBytes := intSize(b)
-
- maxSmall := 256 - maxBytes
- if signed {
- maxSmall = 256 - 2*maxBytes
- }
- if maxBytes == 1 {
- maxSmall = 256
- }
-
- n, _ := r.declReader.ReadByte()
- if uint(n) < maxSmall {
- v := int64(n)
- if signed {
- v >>= 1
- if n&1 != 0 {
- v = ^v
- }
- }
- return constant.MakeInt64(v)
- }
-
- v := -n
- if signed {
- v = -(n &^ 1) >> 1
- }
- if v < 1 || uint(v) > maxBytes {
- errorf("weird decoding: %v, %v => %v", n, signed, v)
- }
-
- buf := make([]byte, v)
- io.ReadFull(&r.declReader, buf)
-
- // convert to little endian
- // TODO(gri) go/constant should have a more direct conversion function
- // (e.g., once it supports a big.Float based implementation)
- for i, j := 0, len(buf)-1; i < j; i, j = i+1, j-1 {
- buf[i], buf[j] = buf[j], buf[i]
- }
-
- x := constant.MakeFromBytes(buf)
- if signed && n&1 != 0 {
- x = constant.UnaryOp(token.SUB, x, 0)
- }
- return x
-}
-
-func (r *importReader) mpfloat(b *types.Basic) constant.Value {
- x := r.mpint(b)
- if constant.Sign(x) == 0 {
- return x
- }
-
- exp := r.int64()
- switch {
- case exp > 0:
- x = constant.Shift(x, token.SHL, uint(exp))
- // Ensure that the imported Kind is Float, else this constant may run into
- // bitsize limits on overlarge integers. Eventually we can instead adopt
- // the approach of CL 288632, but that CL relies on go/constant APIs that
- // were introduced in go1.13.
- //
- // TODO(rFindley): sync the logic here with tip Go once we no longer
- // support go1.12.
- x = constant.ToFloat(x)
- case exp < 0:
- d := constant.Shift(constant.MakeInt64(1), token.SHL, uint(-exp))
- x = constant.BinaryOp(x, token.QUO, d)
- }
- return x
-}
-
-func (r *importReader) ident() string {
- return r.string()
-}
-
-func (r *importReader) qualifiedIdent() (*types.Package, string) {
- name := r.string()
- pkg := r.pkg()
- return pkg, name
-}
-
-func (r *importReader) pos() token.Pos {
- if r.p.version >= iexportVersionPosCol {
- r.posv1()
- } else {
- r.posv0()
- }
-
- if r.prevFile == "" && r.prevLine == 0 && r.prevColumn == 0 {
- return token.NoPos
- }
- return r.p.fake.pos(r.prevFile, int(r.prevLine), int(r.prevColumn))
-}
-
-func (r *importReader) posv0() {
- delta := r.int64()
- if delta != deltaNewFile {
- r.prevLine += delta
- } else if l := r.int64(); l == -1 {
- r.prevLine += deltaNewFile
- } else {
- r.prevFile = r.string()
- r.prevLine = l
- }
-}
-
-func (r *importReader) posv1() {
- delta := r.int64()
- r.prevColumn += delta >> 1
- if delta&1 != 0 {
- delta = r.int64()
- r.prevLine += delta >> 1
- if delta&1 != 0 {
- r.prevFile = r.string()
- }
- }
-}
-
-func (r *importReader) typ() types.Type {
- return r.p.typAt(r.uint64(), nil)
-}
-
-func isInterface(t types.Type) bool {
- _, ok := t.(*types.Interface)
- return ok
-}
-
-func (r *importReader) pkg() *types.Package { return r.p.pkgAt(r.uint64()) }
-func (r *importReader) string() string { return r.p.stringAt(r.uint64()) }
-
-func (r *importReader) doType(base *types.Named) (res types.Type) {
- k := r.kind()
- if debug {
- r.p.trace("importing type %d (base: %s)", k, base)
- r.p.indent++
- defer func() {
- r.p.indent--
- r.p.trace("=> %s", res)
- }()
- }
- switch k {
- default:
- errorf("unexpected kind tag in %q: %v", r.p.ipath, k)
- return nil
-
- case definedType:
- pkg, name := r.qualifiedIdent()
- r.p.doDecl(pkg, name)
- return pkg.Scope().Lookup(name).(*types.TypeName).Type()
- case pointerType:
- return types.NewPointer(r.typ())
- case sliceType:
- return types.NewSlice(r.typ())
- case arrayType:
- n := r.uint64()
- return types.NewArray(r.typ(), int64(n))
- case chanType:
- dir := chanDir(int(r.uint64()))
- return types.NewChan(dir, r.typ())
- case mapType:
- return types.NewMap(r.typ(), r.typ())
- case signatureType:
- r.currPkg = r.pkg()
- return r.signature(nil, nil, nil)
-
- case structType:
- r.currPkg = r.pkg()
-
- fields := make([]*types.Var, r.uint64())
- tags := make([]string, len(fields))
- for i := range fields {
- fpos := r.pos()
- fname := r.ident()
- ftyp := r.typ()
- emb := r.bool()
- tag := r.string()
-
- fields[i] = types.NewField(fpos, r.currPkg, fname, ftyp, emb)
- tags[i] = tag
- }
- return types.NewStruct(fields, tags)
-
- case interfaceType:
- r.currPkg = r.pkg()
-
- embeddeds := make([]types.Type, r.uint64())
- for i := range embeddeds {
- _ = r.pos()
- embeddeds[i] = r.typ()
- }
-
- methods := make([]*types.Func, r.uint64())
- for i := range methods {
- mpos := r.pos()
- mname := r.ident()
-
- // TODO(mdempsky): Matches bimport.go, but I
- // don't agree with this.
- var recv *types.Var
- if base != nil {
- recv = types.NewVar(token.NoPos, r.currPkg, "", base)
- }
-
- msig := r.signature(recv, nil, nil)
- methods[i] = types.NewFunc(mpos, r.currPkg, mname, msig)
- }
-
- typ := newInterface(methods, embeddeds)
- r.p.interfaceList = append(r.p.interfaceList, typ)
- return typ
-
- case typeParamType:
- if r.p.version < iexportVersionGenerics {
- errorf("unexpected type param type")
- }
- pkg, name := r.qualifiedIdent()
- id := ident{pkg.Name(), name}
- if t, ok := r.p.tparamIndex[id]; ok {
- // We're already in the process of importing this typeparam.
- return t
- }
- // Otherwise, import the definition of the typeparam now.
- r.p.doDecl(pkg, name)
- return r.p.tparamIndex[id]
-
- case instanceType:
- if r.p.version < iexportVersionGenerics {
- errorf("unexpected instantiation type")
- }
- // pos does not matter for instances: they are positioned on the original
- // type.
- _ = r.pos()
- len := r.uint64()
- targs := make([]types.Type, len)
- for i := range targs {
- targs[i] = r.typ()
- }
- baseType := r.typ()
- // The imported instantiated type doesn't include any methods, so
- // we must always use the methods of the base (orig) type.
- // TODO provide a non-nil *Environment
- t, _ := typeparams.Instantiate(nil, baseType, targs, false)
- return t
-
- case unionType:
- if r.p.version < iexportVersionGenerics {
- errorf("unexpected instantiation type")
- }
- terms := make([]*typeparams.Term, r.uint64())
- for i := range terms {
- terms[i] = typeparams.NewTerm(r.bool(), r.typ())
- }
- return typeparams.NewUnion(terms)
- }
-}
-
-func (r *importReader) kind() itag {
- return itag(r.uint64())
-}
-
-func (r *importReader) signature(recv *types.Var, rparams []*typeparams.TypeParam, tparams []*typeparams.TypeParam) *types.Signature {
- params := r.paramList()
- results := r.paramList()
- variadic := params.Len() > 0 && r.bool()
- return typeparams.NewSignatureType(recv, rparams, tparams, params, results, variadic)
-}
-
-func (r *importReader) tparamList() []*typeparams.TypeParam {
- n := r.uint64()
- if n == 0 {
- return nil
- }
- xs := make([]*typeparams.TypeParam, n)
- for i := range xs {
- // Note: the standard library importer is tolerant of nil types here,
- // though would panic in SetTypeParams.
- xs[i] = r.typ().(*typeparams.TypeParam)
- }
- return xs
-}
-
-func (r *importReader) paramList() *types.Tuple {
- xs := make([]*types.Var, r.uint64())
- for i := range xs {
- xs[i] = r.param()
- }
- return types.NewTuple(xs...)
-}
-
-func (r *importReader) param() *types.Var {
- pos := r.pos()
- name := r.ident()
- typ := r.typ()
- return types.NewParam(pos, r.currPkg, name, typ)
-}
-
-func (r *importReader) bool() bool {
- return r.uint64() != 0
-}
-
-func (r *importReader) int64() int64 {
- n, err := binary.ReadVarint(&r.declReader)
- if err != nil {
- errorf("readVarint: %v", err)
- }
- return n
-}
-
-func (r *importReader) uint64() uint64 {
- n, err := binary.ReadUvarint(&r.declReader)
- if err != nil {
- errorf("readUvarint: %v", err)
- }
- return n
-}
-
-func (r *importReader) byte() byte {
- x, err := r.declReader.ReadByte()
- if err != nil {
- errorf("declReader.ReadByte: %v", err)
- }
- return x
-}
-
-func baseType(typ types.Type) *types.Named {
- // pointer receivers are never types.Named types
- if p, _ := typ.(*types.Pointer); p != nil {
- typ = p.Elem()
- }
- // receiver base types are always (possibly generic) types.Named types
- n, _ := typ.(*types.Named)
- return n
-}
diff --git a/go/internal/gcimporter/newInterface10.go b/go/internal/gcimporter/newInterface10.go
deleted file mode 100644
index 8b163e3d0..000000000
--- a/go/internal/gcimporter/newInterface10.go
+++ /dev/null
@@ -1,22 +0,0 @@
-// Copyright 2018 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-//go:build !go1.11
-// +build !go1.11
-
-package gcimporter
-
-import "go/types"
-
-func newInterface(methods []*types.Func, embeddeds []types.Type) *types.Interface {
- named := make([]*types.Named, len(embeddeds))
- for i, e := range embeddeds {
- var ok bool
- named[i], ok = e.(*types.Named)
- if !ok {
- panic("embedding of non-defined interfaces in interfaces is not supported before Go 1.11")
- }
- }
- return types.NewInterface(methods, named)
-}
diff --git a/go/internal/gcimporter/newInterface11.go b/go/internal/gcimporter/newInterface11.go
deleted file mode 100644
index 49984f40f..000000000
--- a/go/internal/gcimporter/newInterface11.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// Copyright 2018 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-//go:build go1.11
-// +build go1.11
-
-package gcimporter
-
-import "go/types"
-
-func newInterface(methods []*types.Func, embeddeds []types.Type) *types.Interface {
- return types.NewInterfaceType(methods, embeddeds)
-}
diff --git a/go/internal/gcimporter/support_go117.go b/go/internal/gcimporter/support_go117.go
deleted file mode 100644
index d892273ef..000000000
--- a/go/internal/gcimporter/support_go117.go
+++ /dev/null
@@ -1,16 +0,0 @@
-// Copyright 2021 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-//go:build !go1.18
-// +build !go1.18
-
-package gcimporter
-
-import "go/types"
-
-const iexportVersion = iexportVersionGo1_11
-
-func additionalPredeclared() []types.Type {
- return nil
-}
diff --git a/go/internal/gcimporter/support_go118.go b/go/internal/gcimporter/support_go118.go
deleted file mode 100644
index a99384323..000000000
--- a/go/internal/gcimporter/support_go118.go
+++ /dev/null
@@ -1,23 +0,0 @@
-// Copyright 2021 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-//go:build go1.18
-// +build go1.18
-
-package gcimporter
-
-import "go/types"
-
-const iexportVersion = iexportVersionGenerics
-
-// additionalPredeclared returns additional predeclared types in go.1.18.
-func additionalPredeclared() []types.Type {
- return []types.Type{
- // comparable
- types.Universe.Lookup("comparable").Type(),
-
- // any
- types.Universe.Lookup("any").Type(),
- }
-}
diff --git a/go/internal/gcimporter/testdata/a.go b/go/internal/gcimporter/testdata/a.go
deleted file mode 100644
index 56e4292cd..000000000
--- a/go/internal/gcimporter/testdata/a.go
+++ /dev/null
@@ -1,14 +0,0 @@
-// Copyright 2016 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Input for TestIssue13566
-
-package a
-
-import "encoding/json"
-
-type A struct {
- a *A
- json json.RawMessage
-}
diff --git a/go/internal/gcimporter/testdata/b.go b/go/internal/gcimporter/testdata/b.go
deleted file mode 100644
index 419667820..000000000
--- a/go/internal/gcimporter/testdata/b.go
+++ /dev/null
@@ -1,11 +0,0 @@
-// Copyright 2016 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Input for TestIssue13566
-
-package b
-
-import "./a"
-
-type A a.A
diff --git a/go/internal/gcimporter/testdata/exports.go b/go/internal/gcimporter/testdata/exports.go
deleted file mode 100644
index 8ee28b094..000000000
--- a/go/internal/gcimporter/testdata/exports.go
+++ /dev/null
@@ -1,89 +0,0 @@
-// Copyright 2011 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// This file is used to generate an object file which
-// serves as test file for gcimporter_test.go.
-
-package exports
-
-import (
- "go/ast"
-)
-
-// Issue 3682: Correctly read dotted identifiers from export data.
-const init1 = 0
-
-func init() {}
-
-const (
- C0 int = 0
- C1 = 3.14159265
- C2 = 2.718281828i
- C3 = -123.456e-789
- C4 = +123.456E+789
- C5 = 1234i
- C6 = "foo\n"
- C7 = `bar\n`
-)
-
-type (
- T1 int
- T2 [10]int
- T3 []int
- T4 *int
- T5 chan int
- T6a chan<- int
- T6b chan (<-chan int)
- T6c chan<- (chan int)
- T7 <-chan *ast.File
- T8 struct{}
- T9 struct {
- a int
- b, c float32
- d []string `go:"tag"`
- }
- T10 struct {
- T8
- T9
- _ *T10
- }
- T11 map[int]string
- T12 interface{}
- T13 interface {
- m1()
- m2(int) float32
- }
- T14 interface {
- T12
- T13
- m3(x ...struct{}) []T9
- }
- T15 func()
- T16 func(int)
- T17 func(x int)
- T18 func() float32
- T19 func() (x float32)
- T20 func(...interface{})
- T21 struct{ next *T21 }
- T22 struct{ link *T23 }
- T23 struct{ link *T22 }
- T24 *T24
- T25 *T26
- T26 *T27
- T27 *T25
- T28 func(T28) T28
-)
-
-var (
- V0 int
- V1 = -991.0
-)
-
-func F1() {}
-func F2(x int) {}
-func F3() int { return 0 }
-func F4() float32 { return 0 }
-func F5(a, b, c int, u, v, w struct{ x, y T1 }, more ...interface{}) (p, q, r chan<- T10)
-
-func (p *T1) M1()
diff --git a/go/internal/gcimporter/testdata/issue15920.go b/go/internal/gcimporter/testdata/issue15920.go
deleted file mode 100644
index c70f7d826..000000000
--- a/go/internal/gcimporter/testdata/issue15920.go
+++ /dev/null
@@ -1,11 +0,0 @@
-// Copyright 2016 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package p
-
-// The underlying type of Error is the underlying type of error.
-// Make sure we can import this again without problems.
-type Error error
-
-func F() Error { return nil }
diff --git a/go/internal/gcimporter/testdata/issue20046.go b/go/internal/gcimporter/testdata/issue20046.go
deleted file mode 100644
index c63ee821c..000000000
--- a/go/internal/gcimporter/testdata/issue20046.go
+++ /dev/null
@@ -1,9 +0,0 @@
-// Copyright 2017 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package p
-
-var V interface {
- M()
-}
diff --git a/go/internal/gcimporter/testdata/issue25301.go b/go/internal/gcimporter/testdata/issue25301.go
deleted file mode 100644
index e3dc98b4e..000000000
--- a/go/internal/gcimporter/testdata/issue25301.go
+++ /dev/null
@@ -1,17 +0,0 @@
-// Copyright 2018 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package issue25301
-
-type (
- A = interface {
- M()
- }
- T interface {
- A
- }
- S struct{}
-)
-
-func (S) M() { println("m") }
diff --git a/go/internal/gcimporter/testdata/p.go b/go/internal/gcimporter/testdata/p.go
deleted file mode 100644
index 9e2e70576..000000000
--- a/go/internal/gcimporter/testdata/p.go
+++ /dev/null
@@ -1,13 +0,0 @@
-// Copyright 2016 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Input for TestIssue15517
-
-package p
-
-const C = 0
-
-var V int
-
-func F() {}
diff --git a/go/internal/gcimporter/testdata/versions/test.go b/go/internal/gcimporter/testdata/versions/test.go
deleted file mode 100644
index 6362adc21..000000000
--- a/go/internal/gcimporter/testdata/versions/test.go
+++ /dev/null
@@ -1,30 +0,0 @@
-// Copyright 2016 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// This file is a copy of $GOROOT/src/go/internal/gcimporter/testdata/versions.test.go.
-
-// To create a test case for a new export format version,
-// build this package with the latest compiler and store
-// the resulting .a file appropriately named in the versions
-// directory. The VersionHandling test will pick it up.
-//
-// In the testdata/versions:
-//
-// go build -o test_go1.$X_$Y.a test.go
-//
-// with $X = Go version and $Y = export format version
-// (add 'b' or 'i' to distinguish between binary and
-// indexed format starting with 1.11 as long as both
-// formats are supported).
-//
-// Make sure this source is extended such that it exercises
-// whatever export format change has taken place.
-
-package test
-
-// Any release before and including Go 1.7 didn't encode
-// the package for a blank struct field.
-type BlankField struct {
- _ int
-}
diff --git a/go/internal/gcimporter/testdata/versions/test_go1.11_0i.a b/go/internal/gcimporter/testdata/versions/test_go1.11_0i.a
deleted file mode 100644
index b00fefed0..000000000
--- a/go/internal/gcimporter/testdata/versions/test_go1.11_0i.a
+++ /dev/null
Binary files differ
diff --git a/go/internal/gcimporter/testdata/versions/test_go1.11_6b.a b/go/internal/gcimporter/testdata/versions/test_go1.11_6b.a
deleted file mode 100644
index c0a211e91..000000000
--- a/go/internal/gcimporter/testdata/versions/test_go1.11_6b.a
+++ /dev/null
Binary files differ
diff --git a/go/internal/gcimporter/testdata/versions/test_go1.11_999b.a b/go/internal/gcimporter/testdata/versions/test_go1.11_999b.a
deleted file mode 100644
index c35d22dce..000000000
--- a/go/internal/gcimporter/testdata/versions/test_go1.11_999b.a
+++ /dev/null
Binary files differ
diff --git a/go/internal/gcimporter/testdata/versions/test_go1.11_999i.a b/go/internal/gcimporter/testdata/versions/test_go1.11_999i.a
deleted file mode 100644
index 99401d7c3..000000000
--- a/go/internal/gcimporter/testdata/versions/test_go1.11_999i.a
+++ /dev/null
Binary files differ
diff --git a/go/internal/gcimporter/testdata/versions/test_go1.7_0.a b/go/internal/gcimporter/testdata/versions/test_go1.7_0.a
deleted file mode 100644
index edb6c3f25..000000000
--- a/go/internal/gcimporter/testdata/versions/test_go1.7_0.a
+++ /dev/null
Binary files differ
diff --git a/go/internal/gcimporter/testdata/versions/test_go1.7_1.a b/go/internal/gcimporter/testdata/versions/test_go1.7_1.a
deleted file mode 100644
index 554d04a72..000000000
--- a/go/internal/gcimporter/testdata/versions/test_go1.7_1.a
+++ /dev/null
Binary files differ
diff --git a/go/internal/gcimporter/testdata/versions/test_go1.8_4.a b/go/internal/gcimporter/testdata/versions/test_go1.8_4.a
deleted file mode 100644
index 26b853165..000000000
--- a/go/internal/gcimporter/testdata/versions/test_go1.8_4.a
+++ /dev/null
Binary files differ
diff --git a/go/internal/gcimporter/testdata/versions/test_go1.8_5.a b/go/internal/gcimporter/testdata/versions/test_go1.8_5.a
deleted file mode 100644
index 60e52efea..000000000
--- a/go/internal/gcimporter/testdata/versions/test_go1.8_5.a
+++ /dev/null
Binary files differ
diff --git a/go/loader/doc.go b/go/loader/doc.go
index c5aa31c1a..e35b1fd7d 100644
--- a/go/loader/doc.go
+++ b/go/loader/doc.go
@@ -20,36 +20,35 @@
// be called any number of times. Finally, these are followed by a
// call to Load() to actually load and type-check the program.
//
-// var conf loader.Config
+// var conf loader.Config
//
-// // Use the command-line arguments to specify
-// // a set of initial packages to load from source.
-// // See FromArgsUsage for help.
-// rest, err := conf.FromArgs(os.Args[1:], wantTests)
+// // Use the command-line arguments to specify
+// // a set of initial packages to load from source.
+// // See FromArgsUsage for help.
+// rest, err := conf.FromArgs(os.Args[1:], wantTests)
//
-// // Parse the specified files and create an ad hoc package with path "foo".
-// // All files must have the same 'package' declaration.
-// conf.CreateFromFilenames("foo", "foo.go", "bar.go")
+// // Parse the specified files and create an ad hoc package with path "foo".
+// // All files must have the same 'package' declaration.
+// conf.CreateFromFilenames("foo", "foo.go", "bar.go")
//
-// // Create an ad hoc package with path "foo" from
-// // the specified already-parsed files.
-// // All ASTs must have the same 'package' declaration.
-// conf.CreateFromFiles("foo", parsedFiles)
+// // Create an ad hoc package with path "foo" from
+// // the specified already-parsed files.
+// // All ASTs must have the same 'package' declaration.
+// conf.CreateFromFiles("foo", parsedFiles)
//
-// // Add "runtime" to the set of packages to be loaded.
-// conf.Import("runtime")
+// // Add "runtime" to the set of packages to be loaded.
+// conf.Import("runtime")
//
-// // Adds "fmt" and "fmt_test" to the set of packages
-// // to be loaded. "fmt" will include *_test.go files.
-// conf.ImportWithTests("fmt")
+// // Adds "fmt" and "fmt_test" to the set of packages
+// // to be loaded. "fmt" will include *_test.go files.
+// conf.ImportWithTests("fmt")
//
-// // Finally, load all the packages specified by the configuration.
-// prog, err := conf.Load()
+// // Finally, load all the packages specified by the configuration.
+// prog, err := conf.Load()
//
// See examples_test.go for examples of API usage.
//
-//
-// CONCEPTS AND TERMINOLOGY
+// # CONCEPTS AND TERMINOLOGY
//
// The WORKSPACE is the set of packages accessible to the loader. The
// workspace is defined by Config.Build, a *build.Context. The
@@ -92,7 +91,6 @@
// The INITIAL packages are those specified in the configuration. A
// DEPENDENCY is a package loaded to satisfy an import in an initial
// package or another dependency.
-//
package loader
// IMPLEMENTATION NOTES
diff --git a/go/loader/loader.go b/go/loader/loader.go
index 3ba91f7c5..edf62c2cc 100644
--- a/go/loader/loader.go
+++ b/go/loader/loader.go
@@ -179,7 +179,6 @@ type Program struct {
// for a single package.
//
// Not mutated once exposed via the API.
-//
type PackageInfo struct {
Pkg *types.Package
Importable bool // true if 'import "Pkg.Path()"' would resolve to this
@@ -217,7 +216,6 @@ func (conf *Config) fset() *token.FileSet {
// src specifies the parser input as a string, []byte, or io.Reader, and
// filename is its apparent name. If src is nil, the contents of
// filename are read from the file system.
-//
func (conf *Config) ParseFile(filename string, src interface{}) (*ast.File, error) {
// TODO(adonovan): use conf.build() etc like parseFiles does.
return parser.ParseFile(conf.fset(), filename, src, conf.ParserMode)
@@ -262,7 +260,6 @@ A '--' argument terminates the list of packages.
//
// Only superficial errors are reported at this stage; errors dependent
// on I/O are detected during Load.
-//
func (conf *Config) FromArgs(args []string, xtest bool) ([]string, error) {
var rest []string
for i, arg := range args {
@@ -300,14 +297,12 @@ func (conf *Config) FromArgs(args []string, xtest bool) ([]string, error) {
// CreateFromFilenames is a convenience function that adds
// a conf.CreatePkgs entry to create a package of the specified *.go
// files.
-//
func (conf *Config) CreateFromFilenames(path string, filenames ...string) {
conf.CreatePkgs = append(conf.CreatePkgs, PkgSpec{Path: path, Filenames: filenames})
}
// CreateFromFiles is a convenience function that adds a conf.CreatePkgs
// entry to create package of the specified path and parsed files.
-//
func (conf *Config) CreateFromFiles(path string, files ...*ast.File) {
conf.CreatePkgs = append(conf.CreatePkgs, PkgSpec{Path: path, Files: files})
}
@@ -321,12 +316,10 @@ func (conf *Config) CreateFromFiles(path string, files ...*ast.File) {
// In addition, if any *_test.go files contain a "package x_test"
// declaration, an additional package comprising just those files will
// be added to CreatePkgs.
-//
func (conf *Config) ImportWithTests(path string) { conf.addImport(path, true) }
// Import is a convenience function that adds path to ImportPkgs, the
// set of initial packages that will be imported from source.
-//
func (conf *Config) Import(path string) { conf.addImport(path, false) }
func (conf *Config) addImport(path string, tests bool) {
@@ -345,7 +338,6 @@ func (conf *Config) addImport(path string, tests bool) {
// exact is defined as for astutil.PathEnclosingInterval.
//
// The zero value is returned if not found.
-//
func (prog *Program) PathEnclosingInterval(start, end token.Pos) (pkg *PackageInfo, path []ast.Node, exact bool) {
for _, info := range prog.AllPackages {
for _, f := range info.Files {
@@ -368,7 +360,6 @@ func (prog *Program) PathEnclosingInterval(start, end token.Pos) (pkg *PackageIn
// InitialPackages returns a new slice containing the set of initial
// packages (Created + Imported) in unspecified order.
-//
func (prog *Program) InitialPackages() []*PackageInfo {
infos := make([]*PackageInfo, 0, len(prog.Created)+len(prog.Imported))
infos = append(infos, prog.Created...)
@@ -435,7 +426,6 @@ type findpkgValue struct {
// Upon completion, exactly one of info and err is non-nil:
// info on successful creation of a package, err otherwise.
// A successful package may still contain type errors.
-//
type importInfo struct {
path string // import path
info *PackageInfo // results of typechecking (including errors)
@@ -475,7 +465,6 @@ type importError struct {
// false, Load will fail if any package had an error.
//
// It is an error if no packages were loaded.
-//
func (conf *Config) Load() (*Program, error) {
// Create a simple default error handler for parse/type errors.
if conf.TypeChecker.Error == nil {
@@ -732,10 +721,10 @@ func (conf *Config) build() *build.Context {
// errors that were encountered.
//
// 'which' indicates which files to include:
-// 'g': include non-test *.go source files (GoFiles + processed CgoFiles)
-// 't': include in-package *_test.go source files (TestGoFiles)
-// 'x': include external *_test.go source files. (XTestGoFiles)
//
+// 'g': include non-test *.go source files (GoFiles + processed CgoFiles)
+// 't': include in-package *_test.go source files (TestGoFiles)
+// 'x': include external *_test.go source files. (XTestGoFiles)
func (conf *Config) parsePackageFiles(bp *build.Package, which rune) ([]*ast.File, []error) {
if bp.ImportPath == "unsafe" {
return nil, nil
@@ -776,7 +765,6 @@ func (conf *Config) parsePackageFiles(bp *build.Package, which rune) ([]*ast.Fil
// in the package's PackageInfo).
//
// Idempotent.
-//
func (imp *importer) doImport(from *PackageInfo, to string) (*types.Package, error) {
if to == "C" {
// This should be unreachable, but ad hoc packages are
@@ -868,7 +856,6 @@ func (imp *importer) findPackage(importPath, fromDir string, mode build.ImportMo
//
// fromDir is the directory containing the import declaration that
// caused these imports.
-//
func (imp *importer) importAll(fromPath, fromDir string, imports map[string]bool, mode build.ImportMode) (infos []*PackageInfo, errors []importError) {
if fromPath != "" {
// We're loading a set of imports.
@@ -951,7 +938,6 @@ func (imp *importer) findPath(from, to string) []string {
// caller must call awaitCompletion() before accessing its info field.
//
// startLoad is concurrency-safe and idempotent.
-//
func (imp *importer) startLoad(bp *build.Package) *importInfo {
path := bp.ImportPath
imp.importedMu.Lock()
@@ -995,7 +981,6 @@ func (imp *importer) load(bp *build.Package) *PackageInfo {
//
// cycleCheck determines whether the imports within files create
// dependency edges that should be checked for potential cycles.
-//
func (imp *importer) addFiles(info *PackageInfo, files []*ast.File, cycleCheck bool) {
// Ensure the dependencies are loaded, in parallel.
var fromPath string
diff --git a/go/loader/stdlib_test.go b/go/loader/stdlib_test.go
index b55aa8ffa..f3f3e39bf 100644
--- a/go/loader/stdlib_test.go
+++ b/go/loader/stdlib_test.go
@@ -127,6 +127,8 @@ func TestCgoOption(t *testing.T) {
// or the std library is incomplete (Android).
case "android", "plan9", "solaris", "windows":
t.Skipf("no cgo or incomplete std lib on %s", runtime.GOOS)
+ case "darwin":
+ t.Skipf("golang/go#58493: file locations in this test are stale on darwin")
}
// In nocgo builds (e.g. linux-amd64-nocgo),
// there is no "runtime/cgo" package,
diff --git a/go/loader/util.go b/go/loader/util.go
index 7f38dd740..3a80acae6 100644
--- a/go/loader/util.go
+++ b/go/loader/util.go
@@ -27,7 +27,6 @@ var ioLimit = make(chan bool, 10)
//
// I/O is done via ctxt, which may specify a virtual file system.
// displayPath is used to transform the filenames attached to the ASTs.
-//
func parseFiles(fset *token.FileSet, ctxt *build.Context, displayPath func(string) string, dir string, files []string, mode parser.Mode) ([]*ast.File, []error) {
if displayPath == nil {
displayPath = func(path string) string { return path }
diff --git a/go/packages/doc.go b/go/packages/doc.go
index 4bfe28a51..da4ab89fe 100644
--- a/go/packages/doc.go
+++ b/go/packages/doc.go
@@ -67,7 +67,6 @@ Most tools should pass their command-line arguments (after any flags)
uninterpreted to the loader, so that the loader can interpret them
according to the conventions of the underlying build system.
See the Example function for typical usage.
-
*/
package packages // import "golang.org/x/tools/go/packages"
diff --git a/go/packages/golist.go b/go/packages/golist.go
index 7aa97f7be..6bb7168d2 100644
--- a/go/packages/golist.go
+++ b/go/packages/golist.go
@@ -26,7 +26,6 @@ import (
"golang.org/x/tools/go/internal/packagesdriver"
"golang.org/x/tools/internal/gocommand"
"golang.org/x/tools/internal/packagesinternal"
- "golang.org/x/xerrors"
)
// debug controls verbose logging.
@@ -61,6 +60,7 @@ func (r *responseDeduper) addAll(dr *driverResponse) {
for _, root := range dr.Roots {
r.addRoot(root)
}
+ r.dr.GoVersion = dr.GoVersion
}
func (r *responseDeduper) addPackage(p *Package) {
@@ -303,11 +303,12 @@ func (state *golistState) runContainsQueries(response *responseDeduper, queries
}
dirResponse, err := state.createDriverResponse(pattern)
- // If there was an error loading the package, or the package is returned
- // with errors, try to load the file as an ad-hoc package.
+ // If there was an error loading the package, or no packages are returned,
+ // or the package is returned with errors, try to load the file as an
+ // ad-hoc package.
// Usually the error will appear in a returned package, but may not if we're
// in module mode and the ad-hoc is located outside a module.
- if err != nil || len(dirResponse.Packages) == 1 && len(dirResponse.Packages[0].GoFiles) == 0 &&
+ if err != nil || len(dirResponse.Packages) == 0 || len(dirResponse.Packages) == 1 && len(dirResponse.Packages[0].GoFiles) == 0 &&
len(dirResponse.Packages[0].Errors) == 1 {
var queryErr error
if dirResponse, queryErr = state.adhocPackage(pattern, query); queryErr != nil {
@@ -446,15 +447,22 @@ func (state *golistState) createDriverResponse(words ...string) (*driverResponse
// Run "go list" for complete
// information on the specified packages.
- buf, err := state.invokeGo("list", golistargs(state.cfg, words)...)
+ goVersion, err := state.getGoVersion()
if err != nil {
return nil, err
}
+ buf, err := state.invokeGo("list", golistargs(state.cfg, words, goVersion)...)
+ if err != nil {
+ return nil, err
+ }
+
seen := make(map[string]*jsonPackage)
pkgs := make(map[string]*Package)
additionalErrors := make(map[string][]Error)
// Decode the JSON and convert it to Package form.
- var response driverResponse
+ response := &driverResponse{
+ GoVersion: goVersion,
+ }
for dec := json.NewDecoder(buf); dec.More(); {
p := new(jsonPackage)
if err := dec.Decode(p); err != nil {
@@ -596,17 +604,12 @@ func (state *golistState) createDriverResponse(words ...string) (*driverResponse
// Work around https://golang.org/issue/28749:
// cmd/go puts assembly, C, and C++ files in CompiledGoFiles.
- // Filter out any elements of CompiledGoFiles that are also in OtherFiles.
- // We have to keep this workaround in place until go1.12 is a distant memory.
- if len(pkg.OtherFiles) > 0 {
- other := make(map[string]bool, len(pkg.OtherFiles))
- for _, f := range pkg.OtherFiles {
- other[f] = true
- }
-
+ // Remove files from CompiledGoFiles that are non-go files
+ // (or are not files that look like they are from the cache).
+ if len(pkg.CompiledGoFiles) > 0 {
out := pkg.CompiledGoFiles[:0]
for _, f := range pkg.CompiledGoFiles {
- if other[f] {
+ if ext := filepath.Ext(f); ext != ".go" && ext != "" { // ext == "" means the file is from the cache, so probably cgo-processed file
continue
}
out = append(out, f)
@@ -726,7 +729,7 @@ func (state *golistState) createDriverResponse(words ...string) (*driverResponse
}
sort.Slice(response.Packages, func(i, j int) bool { return response.Packages[i].ID < response.Packages[j].ID })
- return &response, nil
+ return response, nil
}
func (state *golistState) shouldAddFilenameFromError(p *jsonPackage) bool {
@@ -752,6 +755,7 @@ func (state *golistState) shouldAddFilenameFromError(p *jsonPackage) bool {
return len(p.Error.ImportStack) == 0 || p.Error.ImportStack[len(p.Error.ImportStack)-1] == p.ImportPath
}
+// getGoVersion returns the effective minor version of the go command.
func (state *golistState) getGoVersion() (int, error) {
state.goVersionOnce.Do(func() {
state.goVersion, state.goVersionError = gocommand.GoVersion(state.ctx, state.cfgInvocation(), state.cfg.gocmdRunner)
@@ -809,10 +813,76 @@ func absJoin(dir string, fileses ...[]string) (res []string) {
return res
}
-func golistargs(cfg *Config, words []string) []string {
+func jsonFlag(cfg *Config, goVersion int) string {
+ if goVersion < 19 {
+ return "-json"
+ }
+ var fields []string
+ added := make(map[string]bool)
+ addFields := func(fs ...string) {
+ for _, f := range fs {
+ if !added[f] {
+ added[f] = true
+ fields = append(fields, f)
+ }
+ }
+ }
+ addFields("Name", "ImportPath", "Error") // These fields are always needed
+ if cfg.Mode&NeedFiles != 0 || cfg.Mode&NeedTypes != 0 {
+ addFields("Dir", "GoFiles", "IgnoredGoFiles", "IgnoredOtherFiles", "CFiles",
+ "CgoFiles", "CXXFiles", "MFiles", "HFiles", "FFiles", "SFiles",
+ "SwigFiles", "SwigCXXFiles", "SysoFiles")
+ if cfg.Tests {
+ addFields("TestGoFiles", "XTestGoFiles")
+ }
+ }
+ if cfg.Mode&NeedTypes != 0 {
+ // CompiledGoFiles seems to be required for the test case TestCgoNoSyntax,
+ // even when -compiled isn't passed in.
+ // TODO(#52435): Should we make the test ask for -compiled, or automatically
+ // request CompiledGoFiles in certain circumstances?
+ addFields("Dir", "CompiledGoFiles")
+ }
+ if cfg.Mode&NeedCompiledGoFiles != 0 {
+ addFields("Dir", "CompiledGoFiles", "Export")
+ }
+ if cfg.Mode&NeedImports != 0 {
+ // When imports are requested, DepOnly is used to distinguish between packages
+ // explicitly requested and transitive imports of those packages.
+ addFields("DepOnly", "Imports", "ImportMap")
+ if cfg.Tests {
+ addFields("TestImports", "XTestImports")
+ }
+ }
+ if cfg.Mode&NeedDeps != 0 {
+ addFields("DepOnly")
+ }
+ if usesExportData(cfg) {
+ // Request Dir in the unlikely case Export is not absolute.
+ addFields("Dir", "Export")
+ }
+ if cfg.Mode&needInternalForTest != 0 {
+ addFields("ForTest")
+ }
+ if cfg.Mode&needInternalDepsErrors != 0 {
+ addFields("DepsErrors")
+ }
+ if cfg.Mode&NeedModule != 0 {
+ addFields("Module")
+ }
+ if cfg.Mode&NeedEmbedFiles != 0 {
+ addFields("EmbedFiles")
+ }
+ if cfg.Mode&NeedEmbedPatterns != 0 {
+ addFields("EmbedPatterns")
+ }
+ return "-json=" + strings.Join(fields, ",")
+}
+
+func golistargs(cfg *Config, words []string, goVersion int) []string {
const findFlags = NeedImports | NeedTypes | NeedSyntax | NeedTypesInfo
fullargs := []string{
- "-e", "-json",
+ "-e", jsonFlag(cfg, goVersion),
fmt.Sprintf("-compiled=%t", cfg.Mode&(NeedCompiledGoFiles|NeedSyntax|NeedTypes|NeedTypesInfo|NeedTypesSizes) != 0),
fmt.Sprintf("-test=%t", cfg.Tests),
fmt.Sprintf("-export=%t", usesExportData(cfg)),
@@ -883,7 +953,7 @@ func (state *golistState) invokeGo(verb string, args ...string) (*bytes.Buffer,
if !ok {
// Catastrophic error:
// - context cancellation
- return nil, xerrors.Errorf("couldn't run 'go': %w", err)
+ return nil, fmt.Errorf("couldn't run 'go': %w", err)
}
// Old go version?
diff --git a/go/packages/overlay_test.go b/go/packages/overlay_test.go
index f2164c274..4318739eb 100644
--- a/go/packages/overlay_test.go
+++ b/go/packages/overlay_test.go
@@ -109,8 +109,6 @@ func TestOverlayChangesTestPackageName(t *testing.T) {
testAllOrModulesParallel(t, testOverlayChangesTestPackageName)
}
func testOverlayChangesTestPackageName(t *testing.T, exporter packagestest.Exporter) {
- testenv.NeedsGo1Point(t, 16)
-
exported := packagestest.Export(t, exporter, []packagestest.Module{{
Name: "fake",
Files: map[string]interface{}{
@@ -717,8 +715,6 @@ func TestInvalidFilesBeforeOverlay(t *testing.T) {
}
func testInvalidFilesBeforeOverlay(t *testing.T, exporter packagestest.Exporter) {
- testenv.NeedsGo1Point(t, 15)
-
exported := packagestest.Export(t, exporter, []packagestest.Module{
{
Name: "golang.org/fake",
@@ -756,8 +752,6 @@ func TestInvalidFilesBeforeOverlayContains(t *testing.T) {
testAllOrModulesParallel(t, testInvalidFilesBeforeOverlayContains)
}
func testInvalidFilesBeforeOverlayContains(t *testing.T, exporter packagestest.Exporter) {
- testenv.NeedsGo1Point(t, 15)
-
exported := packagestest.Export(t, exporter, []packagestest.Module{
{
Name: "golang.org/fake",
@@ -1046,6 +1040,7 @@ func Hi() {
// This does not use go/packagestest because it needs to write a replace
// directive with an absolute path in one of the module's go.mod files.
func TestOverlaysInReplace(t *testing.T) {
+ testenv.NeedsGoPackages(t)
t.Parallel()
// Create module b.com in a temporary directory. Do not add any Go files
diff --git a/go/packages/packages.go b/go/packages/packages.go
index 2442845fe..0f1505b80 100644
--- a/go/packages/packages.go
+++ b/go/packages/packages.go
@@ -15,10 +15,12 @@ import (
"go/scanner"
"go/token"
"go/types"
+ "io"
"io/ioutil"
"log"
"os"
"path/filepath"
+ "runtime"
"strings"
"sync"
"time"
@@ -71,6 +73,13 @@ const (
// NeedTypesSizes adds TypesSizes.
NeedTypesSizes
+ // needInternalDepsErrors adds the internal deps errors field for use by gopls.
+ needInternalDepsErrors
+
+ // needInternalForTest adds the internal forTest field.
+ // Tests must also be set on the context for this field to be populated.
+ needInternalForTest
+
// typecheckCgo enables full support for type checking cgo. Requires Go 1.15+.
// Modifies CompiledGoFiles and Types, and has no effect on its own.
typecheckCgo
@@ -226,6 +235,11 @@ type driverResponse struct {
// Imports will be connected and then type and syntax information added in a
// later pass (see refine).
Packages []*Package
+
+ // GoVersion is the minor version number used by the driver
+ // (e.g. the go command on the PATH) when selecting .go files.
+ // Zero means unknown.
+ GoVersion int
}
// Load loads and returns the Go packages named by the given patterns.
@@ -249,7 +263,7 @@ func Load(cfg *Config, patterns ...string) ([]*Package, error) {
return nil, err
}
l.sizes = response.Sizes
- return l.refine(response.Roots, response.Packages...)
+ return l.refine(response)
}
// defaultDriver is a driver that implements go/packages' fallback behavior.
@@ -290,6 +304,9 @@ type Package struct {
// of the package, or while parsing or type-checking its files.
Errors []Error
+ // TypeErrors contains the subset of errors produced during type checking.
+ TypeErrors []types.Error
+
// GoFiles lists the absolute file paths of the package's Go source files.
GoFiles []string
@@ -403,6 +420,8 @@ func init() {
config.(*Config).modFlag = value
}
packagesinternal.TypecheckCgo = int(typecheckCgo)
+ packagesinternal.DepsErrors = int(needInternalDepsErrors)
+ packagesinternal.ForTest = int(needInternalForTest)
}
// An Error describes a problem with a package's metadata, syntax, or types.
@@ -523,6 +542,7 @@ type loaderPackage struct {
needsrc bool // load from source (Mode >= LoadTypes)
needtypes bool // type information is either requested or depended on
initial bool // package was matched by a pattern
+ goVersion int // minor version number of go command on PATH
}
// loader holds the working state of a single call to load.
@@ -609,7 +629,8 @@ func newLoader(cfg *Config) *loader {
// refine connects the supplied packages into a graph and then adds type and
// and syntax information as requested by the LoadMode.
-func (ld *loader) refine(roots []string, list ...*Package) ([]*Package, error) {
+func (ld *loader) refine(response *driverResponse) ([]*Package, error) {
+ roots := response.Roots
rootMap := make(map[string]int, len(roots))
for i, root := range roots {
rootMap[root] = i
@@ -617,7 +638,7 @@ func (ld *loader) refine(roots []string, list ...*Package) ([]*Package, error) {
ld.pkgs = make(map[string]*loaderPackage)
// first pass, fixup and build the map and roots
var initial = make([]*loaderPackage, len(roots))
- for _, pkg := range list {
+ for _, pkg := range response.Packages {
rootIndex := -1
if i, found := rootMap[pkg.ID]; found {
rootIndex = i
@@ -639,6 +660,7 @@ func (ld *loader) refine(roots []string, list ...*Package) ([]*Package, error) {
Package: pkg,
needtypes: needtypes,
needsrc: needsrc,
+ goVersion: response.GoVersion,
}
ld.pkgs[lpkg.ID] = lpkg
if rootIndex >= 0 {
@@ -856,12 +878,19 @@ func (ld *loader) loadPackage(lpkg *loaderPackage) {
// never has to create a types.Package for an indirect dependency,
// which would then require that such created packages be explicitly
// inserted back into the Import graph as a final step after export data loading.
+ // (Hence this return is after the Types assignment.)
// The Diamond test exercises this case.
if !lpkg.needtypes && !lpkg.needsrc {
return
}
if !lpkg.needsrc {
- ld.loadFromExportData(lpkg)
+ if err := ld.loadFromExportData(lpkg); err != nil {
+ lpkg.Errors = append(lpkg.Errors, Error{
+ Pos: "-",
+ Msg: err.Error(),
+ Kind: UnknownError, // e.g. can't find/open/parse export data
+ })
+ }
return // not a source package, don't get syntax trees
}
@@ -893,6 +922,7 @@ func (ld *loader) loadPackage(lpkg *loaderPackage) {
case types.Error:
// from type checker
+ lpkg.TypeErrors = append(lpkg.TypeErrors, err)
errs = append(errs, Error{
Pos: err.Fset.Position(err.Pos).String(),
Msg: err.Msg,
@@ -914,11 +944,41 @@ func (ld *loader) loadPackage(lpkg *loaderPackage) {
lpkg.Errors = append(lpkg.Errors, errs...)
}
+ // If the go command on the PATH is newer than the runtime,
+ // then the go/{scanner,ast,parser,types} packages from the
+ // standard library may be unable to process the files
+ // selected by go list.
+ //
+ // There is currently no way to downgrade the effective
+ // version of the go command (see issue 52078), so we proceed
+ // with the newer go command but, in case of parse or type
+ // errors, we emit an additional diagnostic.
+ //
+ // See:
+ // - golang.org/issue/52078 (flag to set release tags)
+ // - golang.org/issue/50825 (gopls legacy version support)
+ // - golang.org/issue/55883 (go/packages confusing error)
+ //
+ // Should we assert a hard minimum of (currently) go1.16 here?
+ var runtimeVersion int
+ if _, err := fmt.Sscanf(runtime.Version(), "go1.%d", &runtimeVersion); err == nil && runtimeVersion < lpkg.goVersion {
+ defer func() {
+ if len(lpkg.Errors) > 0 {
+ appendError(Error{
+ Pos: "-",
+ Msg: fmt.Sprintf("This application uses version go1.%d of the source-processing packages but runs version go1.%d of 'go list'. It may fail to process source files that rely on newer language features. If so, rebuild the application using a newer version of Go.", runtimeVersion, lpkg.goVersion),
+ Kind: UnknownError,
+ })
+ }
+ }()
+ }
+
if ld.Config.Mode&NeedTypes != 0 && len(lpkg.CompiledGoFiles) == 0 && lpkg.ExportFile != "" {
// The config requested loading sources and types, but sources are missing.
// Add an error to the package and fall back to loading from export data.
appendError(Error{"-", fmt.Sprintf("sources missing for package %s", lpkg.ID), ParseError})
- ld.loadFromExportData(lpkg)
+ _ = ld.loadFromExportData(lpkg) // ignore any secondary errors
+
return // can't get syntax trees for this package
}
@@ -972,7 +1032,7 @@ func (ld *loader) loadPackage(lpkg *loaderPackage) {
tc := &types.Config{
Importer: importer,
- // Type-check bodies of functions only in non-initial packages.
+ // Type-check bodies of functions only in initial packages.
// Example: for import graph A->B->C and initial packages {A,C},
// we can ignore function bodies in B.
IgnoreFuncBodies: ld.Mode&NeedDeps == 0 && !lpkg.initial,
@@ -1079,7 +1139,6 @@ func (ld *loader) parseFile(filename string) (*ast.File, error) {
//
// Because files are scanned in parallel, the token.Pos
// positions of the resulting ast.Files are not ordered.
-//
func (ld *loader) parseFiles(filenames []string) ([]*ast.File, []error) {
var wg sync.WaitGroup
n := len(filenames)
@@ -1123,7 +1182,6 @@ func (ld *loader) parseFiles(filenames []string) ([]*ast.File, []error) {
// sameFile returns true if x and y have the same basename and denote
// the same file.
-//
func sameFile(x, y string) bool {
if x == y {
// It could be the case that y doesn't exist.
@@ -1144,9 +1202,10 @@ func sameFile(x, y string) bool {
return false
}
-// loadFromExportData returns type information for the specified
+// loadFromExportData ensures that type information is present for the specified
// package, loading it from an export data file on the first request.
-func (ld *loader) loadFromExportData(lpkg *loaderPackage) (*types.Package, error) {
+// On success it sets lpkg.Types to a new Package.
+func (ld *loader) loadFromExportData(lpkg *loaderPackage) error {
if lpkg.PkgPath == "" {
log.Fatalf("internal error: Package %s has no PkgPath", lpkg)
}
@@ -1157,8 +1216,8 @@ func (ld *loader) loadFromExportData(lpkg *loaderPackage) (*types.Package, error
// must be sequential. (Finer-grained locking would require
// changes to the gcexportdata API.)
//
- // The exportMu lock guards the Package.Pkg field and the
- // types.Package it points to, for each Package in the graph.
+ // The exportMu lock guards the lpkg.Types field and the
+ // types.Package it points to, for each loaderPackage in the graph.
//
// Not all accesses to Package.Pkg need to be protected by exportMu:
// graph ordering ensures that direct dependencies of source
@@ -1167,18 +1226,18 @@ func (ld *loader) loadFromExportData(lpkg *loaderPackage) (*types.Package, error
defer ld.exportMu.Unlock()
if tpkg := lpkg.Types; tpkg != nil && tpkg.Complete() {
- return tpkg, nil // cache hit
+ return nil // cache hit
}
lpkg.IllTyped = true // fail safe
if lpkg.ExportFile == "" {
// Errors while building export data will have been printed to stderr.
- return nil, fmt.Errorf("no export data file")
+ return fmt.Errorf("no export data file")
}
f, err := os.Open(lpkg.ExportFile)
if err != nil {
- return nil, err
+ return err
}
defer f.Close()
@@ -1190,7 +1249,7 @@ func (ld *loader) loadFromExportData(lpkg *loaderPackage) (*types.Package, error
// queries.)
r, err := gcexportdata.NewReader(f)
if err != nil {
- return nil, fmt.Errorf("reading %s: %v", lpkg.ExportFile, err)
+ return fmt.Errorf("reading %s: %v", lpkg.ExportFile, err)
}
// Build the view.
@@ -1234,7 +1293,12 @@ func (ld *loader) loadFromExportData(lpkg *loaderPackage) (*types.Package, error
// (May modify incomplete packages in view but not create new ones.)
tpkg, err := gcexportdata.Read(r, ld.Fset, view, lpkg.PkgPath)
if err != nil {
- return nil, fmt.Errorf("reading %s: %v", lpkg.ExportFile, err)
+ return fmt.Errorf("reading %s: %v", lpkg.ExportFile, err)
+ }
+ if _, ok := view["go.shape"]; ok {
+ // Account for the pseudopackage "go.shape" that gets
+ // created by generic code.
+ viewLen++
}
if viewLen != len(view) {
log.Panicf("golang.org/x/tools/go/packages: unexpected new packages during load of %s", lpkg.PkgPath)
@@ -1242,8 +1306,7 @@ func (ld *loader) loadFromExportData(lpkg *loaderPackage) (*types.Package, error
lpkg.Types = tpkg
lpkg.IllTyped = false
-
- return tpkg, nil
+ return nil
}
// impliedLoadMode returns loadMode with its dependencies.
@@ -1259,3 +1322,5 @@ func impliedLoadMode(loadMode LoadMode) LoadMode {
func usesExportData(cfg *Config) bool {
return cfg.Mode&NeedExportFile != 0 || cfg.Mode&NeedTypes != 0 && cfg.Mode&NeedDeps == 0
}
+
+var _ interface{} = io.Discard // assert build toolchain is go1.16 or later
diff --git a/go/packages/packages_test.go b/go/packages/packages_test.go
index 796edb6b7..0da72851c 100644
--- a/go/packages/packages_test.go
+++ b/go/packages/packages_test.go
@@ -2471,10 +2471,55 @@ func testIssue37098(t *testing.T, exporter packagestest.Exporter) {
}
}
+// TestIssue56632 checks that CompiledGoFiles does not contain non-go files regardless of
+// whether the NeedFiles mode bit is set.
+func TestIssue56632(t *testing.T) {
+ t.Parallel()
+ testenv.NeedsGoBuild(t)
+ testenv.NeedsTool(t, "cgo")
+
+ exported := packagestest.Export(t, packagestest.GOPATH, []packagestest.Module{{
+ Name: "golang.org/issue56632",
+ Files: map[string]interface{}{
+ "a/a.go": `package a`,
+ "a/a_cgo.go": `package a
+
+import "C"`,
+ "a/a.s": ``,
+ "a/a.c": ``,
+ }}})
+ defer exported.Cleanup()
+
+ modes := []packages.LoadMode{packages.NeedCompiledGoFiles, packages.NeedCompiledGoFiles | packages.NeedFiles, packages.NeedImports | packages.NeedCompiledGoFiles, packages.NeedImports | packages.NeedFiles | packages.NeedCompiledGoFiles}
+ for _, mode := range modes {
+ exported.Config.Mode = mode
+
+ initial, err := packages.Load(exported.Config, "golang.org/issue56632/a")
+ if err != nil {
+ t.Fatalf("failed to load package: %v", err)
+ }
+
+ if len(initial) != 1 {
+ t.Errorf("expected 3 packages, got %d", len(initial))
+ }
+
+ p := initial[0]
+
+ if len(p.Errors) != 0 {
+ t.Errorf("expected no errors, got %v", p.Errors)
+ }
+
+ for _, f := range p.CompiledGoFiles {
+ if strings.HasSuffix(f, ".s") || strings.HasSuffix(f, ".c") {
+ t.Errorf("expected no non-Go CompiledGoFiles, got file %q in CompiledGoFiles", f)
+ }
+ }
+ }
+}
+
// TestInvalidFilesInXTest checks the fix for golang/go#37971 in Go 1.15.
func TestInvalidFilesInXTest(t *testing.T) { testAllOrModulesParallel(t, testInvalidFilesInXTest) }
func testInvalidFilesInXTest(t *testing.T, exporter packagestest.Exporter) {
- testenv.NeedsGo1Point(t, 15)
exported := packagestest.Export(t, exporter, []packagestest.Module{
{
Name: "golang.org/fake",
@@ -2501,7 +2546,6 @@ func testInvalidFilesInXTest(t *testing.T, exporter packagestest.Exporter) {
func TestTypecheckCgo(t *testing.T) { testAllOrModulesParallel(t, testTypecheckCgo) }
func testTypecheckCgo(t *testing.T, exporter packagestest.Exporter) {
- testenv.NeedsGo1Point(t, 15)
testenv.NeedsTool(t, "cgo")
const cgo = `package cgo
@@ -2673,8 +2717,6 @@ func TestInvalidPackageName(t *testing.T) {
}
func testInvalidPackageName(t *testing.T, exporter packagestest.Exporter) {
- testenv.NeedsGo1Point(t, 15)
-
exported := packagestest.Export(t, exporter, []packagestest.Module{{
Name: "golang.org/fake",
Files: map[string]interface{}{
@@ -2709,6 +2751,31 @@ func TestEmptyEnvironment(t *testing.T) {
}
}
+func TestPackageLoadSingleFile(t *testing.T) {
+ tmp, err := ioutil.TempDir("", "a")
+ if err != nil {
+ t.Fatal(err)
+ }
+ defer os.RemoveAll(tmp)
+
+ filename := filepath.Join(tmp, "a.go")
+
+ if err := ioutil.WriteFile(filename, []byte(`package main; func main() { println("hello world") }`), 0775); err != nil {
+ t.Fatal(err)
+ }
+
+ pkgs, err := packages.Load(&packages.Config{Mode: packages.LoadSyntax, Dir: tmp}, "file="+filename)
+ if err != nil {
+ t.Fatalf("could not load package: %v", err)
+ }
+ if len(pkgs) != 1 {
+ t.Fatalf("expected one package to be loaded, got %d", len(pkgs))
+ }
+ if len(pkgs[0].CompiledGoFiles) != 1 || pkgs[0].CompiledGoFiles[0] != filename {
+ t.Fatalf("expected one compiled go file (%q), got %v", filename, pkgs[0].CompiledGoFiles)
+ }
+}
+
func errorMessages(errors []packages.Error) []string {
var msgs []string
for _, err := range errors {
diff --git a/go/packages/packagestest/expect.go b/go/packages/packagestest/expect.go
index c1781e7b9..92c20a64a 100644
--- a/go/packages/packagestest/expect.go
+++ b/go/packages/packagestest/expect.go
@@ -16,7 +16,6 @@ import (
"golang.org/x/tools/go/expect"
"golang.org/x/tools/go/packages"
- "golang.org/x/tools/internal/span"
)
const (
@@ -41,24 +40,27 @@ const (
// call the Mark method to add the marker to the global set.
// You can register the "mark" method to override these in your own call to
// Expect. The bound Mark function is usable directly in your method map, so
-// exported.Expect(map[string]interface{}{"mark": exported.Mark})
+//
+// exported.Expect(map[string]interface{}{"mark": exported.Mark})
+//
// replicates the built in behavior.
//
-// Method invocation
+// # Method invocation
//
// When invoking a method the expressions in the parameter list need to be
// converted to values to be passed to the method.
// There are a very limited set of types the arguments are allowed to be.
-// expect.Note : passed the Note instance being evaluated.
-// string : can be supplied either a string literal or an identifier.
-// int : can only be supplied an integer literal.
-// *regexp.Regexp : can only be supplied a regular expression literal
-// token.Pos : has a file position calculated as described below.
-// token.Position : has a file position calculated as described below.
-// expect.Range: has a start and end position as described below.
-// interface{} : will be passed any value
//
-// Position calculation
+// expect.Note : passed the Note instance being evaluated.
+// string : can be supplied either a string literal or an identifier.
+// int : can only be supplied an integer literal.
+// *regexp.Regexp : can only be supplied a regular expression literal
+// token.Pos : has a file position calculated as described below.
+// token.Position : has a file position calculated as described below.
+// expect.Range: has a start and end position as described below.
+// interface{} : will be passed any value
+//
+// # Position calculation
//
// There is some extra handling when a parameter is being coerced into a
// token.Pos, token.Position or Range type argument.
@@ -121,14 +123,31 @@ func (e *Exported) Expect(methods map[string]interface{}) error {
return nil
}
-// Range is a type alias for span.Range for backwards compatibility, prefer
-// using span.Range directly.
-type Range = span.Range
+// A Range represents an interval within a source file in go/token notation.
+type Range struct {
+ TokFile *token.File // non-nil
+ Start, End token.Pos // both valid and within range of TokFile
+}
+
+// A rangeSetter abstracts a variable that can be set from a Range value.
+//
+// The parameter conversion machinery will automatically construct a
+// variable of type T and call the SetRange method on its address if
+// *T implements rangeSetter. This allows alternative notations of
+// source ranges to interoperate transparently with this package.
+//
+// This type intentionally does not mention Range itself, to avoid a
+// dependency from the application's range type upon this package.
+//
+// Currently this is a secret back door for use only by gopls.
+type rangeSetter interface {
+ SetRange(file *token.File, start, end token.Pos)
+}
// Mark adds a new marker to the known set.
func (e *Exported) Mark(name string, r Range) {
if e.markers == nil {
- e.markers = make(map[string]span.Range)
+ e.markers = make(map[string]Range)
}
e.markers[name] = r
}
@@ -218,22 +237,22 @@ func (e *Exported) getMarkers() error {
return nil
}
// set markers early so that we don't call getMarkers again from Expect
- e.markers = make(map[string]span.Range)
+ e.markers = make(map[string]Range)
return e.Expect(map[string]interface{}{
markMethod: e.Mark,
})
}
var (
- noteType = reflect.TypeOf((*expect.Note)(nil))
- identifierType = reflect.TypeOf(expect.Identifier(""))
- posType = reflect.TypeOf(token.Pos(0))
- positionType = reflect.TypeOf(token.Position{})
- rangeType = reflect.TypeOf(span.Range{})
- spanType = reflect.TypeOf(span.Span{})
- fsetType = reflect.TypeOf((*token.FileSet)(nil))
- regexType = reflect.TypeOf((*regexp.Regexp)(nil))
- exportedType = reflect.TypeOf((*Exported)(nil))
+ noteType = reflect.TypeOf((*expect.Note)(nil))
+ identifierType = reflect.TypeOf(expect.Identifier(""))
+ posType = reflect.TypeOf(token.Pos(0))
+ positionType = reflect.TypeOf(token.Position{})
+ rangeType = reflect.TypeOf(Range{})
+ rangeSetterType = reflect.TypeOf((*rangeSetter)(nil)).Elem()
+ fsetType = reflect.TypeOf((*token.FileSet)(nil))
+ regexType = reflect.TypeOf((*regexp.Regexp)(nil))
+ exportedType = reflect.TypeOf((*Exported)(nil))
)
// converter converts from a marker's argument parsed from the comment to
@@ -292,17 +311,16 @@ func (e *Exported) buildConverter(pt reflect.Type) (converter, error) {
}
return reflect.ValueOf(r), remains, nil
}, nil
- case pt == spanType:
+ case reflect.PtrTo(pt).AssignableTo(rangeSetterType):
+ // (*pt).SetRange method exists: call it.
return func(n *expect.Note, args []interface{}) (reflect.Value, []interface{}, error) {
r, remains, err := e.rangeConverter(n, args)
if err != nil {
return reflect.Value{}, nil, err
}
- spn, err := r.Span()
- if err != nil {
- return reflect.Value{}, nil, err
- }
- return reflect.ValueOf(spn), remains, nil
+ v := reflect.New(pt)
+ v.Interface().(rangeSetter).SetRange(r.TokFile, r.Start, r.End)
+ return v.Elem(), remains, nil
}, nil
case pt == identifierType:
return func(n *expect.Note, args []interface{}) (reflect.Value, []interface{}, error) {
@@ -405,9 +423,10 @@ func (e *Exported) buildConverter(pt reflect.Type) (converter, error) {
}
}
-func (e *Exported) rangeConverter(n *expect.Note, args []interface{}) (span.Range, []interface{}, error) {
+func (e *Exported) rangeConverter(n *expect.Note, args []interface{}) (Range, []interface{}, error) {
+ tokFile := e.ExpectFileSet.File(n.Pos)
if len(args) < 1 {
- return span.Range{}, nil, fmt.Errorf("missing argument")
+ return Range{}, nil, fmt.Errorf("missing argument")
}
arg := args[0]
args = args[1:]
@@ -416,37 +435,62 @@ func (e *Exported) rangeConverter(n *expect.Note, args []interface{}) (span.Rang
// handle the special identifiers
switch arg {
case eofIdentifier:
- // end of file identifier, look up the current file
- f := e.ExpectFileSet.File(n.Pos)
- eof := f.Pos(f.Size())
- return span.Range{FileSet: e.ExpectFileSet, Start: eof, End: token.NoPos}, args, nil
+ // end of file identifier
+ eof := tokFile.Pos(tokFile.Size())
+ return newRange(tokFile, eof, eof), args, nil
default:
// look up an marker by name
mark, ok := e.markers[string(arg)]
if !ok {
- return span.Range{}, nil, fmt.Errorf("cannot find marker %v", arg)
+ return Range{}, nil, fmt.Errorf("cannot find marker %v", arg)
}
return mark, args, nil
}
case string:
start, end, err := expect.MatchBefore(e.ExpectFileSet, e.FileContents, n.Pos, arg)
if err != nil {
- return span.Range{}, nil, err
+ return Range{}, nil, err
}
- if start == token.NoPos {
- return span.Range{}, nil, fmt.Errorf("%v: pattern %s did not match", e.ExpectFileSet.Position(n.Pos), arg)
+ if !start.IsValid() {
+ return Range{}, nil, fmt.Errorf("%v: pattern %s did not match", e.ExpectFileSet.Position(n.Pos), arg)
}
- return span.Range{FileSet: e.ExpectFileSet, Start: start, End: end}, args, nil
+ return newRange(tokFile, start, end), args, nil
case *regexp.Regexp:
start, end, err := expect.MatchBefore(e.ExpectFileSet, e.FileContents, n.Pos, arg)
if err != nil {
- return span.Range{}, nil, err
+ return Range{}, nil, err
}
- if start == token.NoPos {
- return span.Range{}, nil, fmt.Errorf("%v: pattern %s did not match", e.ExpectFileSet.Position(n.Pos), arg)
+ if !start.IsValid() {
+ return Range{}, nil, fmt.Errorf("%v: pattern %s did not match", e.ExpectFileSet.Position(n.Pos), arg)
}
- return span.Range{FileSet: e.ExpectFileSet, Start: start, End: end}, args, nil
+ return newRange(tokFile, start, end), args, nil
default:
- return span.Range{}, nil, fmt.Errorf("cannot convert %v to pos", arg)
+ return Range{}, nil, fmt.Errorf("cannot convert %v to pos", arg)
+ }
+}
+
+// newRange creates a new Range from a token.File and two valid positions within it.
+func newRange(file *token.File, start, end token.Pos) Range {
+ fileBase := file.Base()
+ fileEnd := fileBase + file.Size()
+ if !start.IsValid() {
+ panic("invalid start token.Pos")
+ }
+ if !end.IsValid() {
+ panic("invalid end token.Pos")
+ }
+ if int(start) < fileBase || int(start) > fileEnd {
+ panic(fmt.Sprintf("invalid start: %d not in [%d, %d]", start, fileBase, fileEnd))
+ }
+ if int(end) < fileBase || int(end) > fileEnd {
+ panic(fmt.Sprintf("invalid end: %d not in [%d, %d]", end, fileBase, fileEnd))
+ }
+ if start > end {
+ panic("invalid start: greater than end")
+ }
+ return Range{
+ TokFile: file,
+ Start: start,
+ End: end,
}
}
diff --git a/go/packages/packagestest/expect_test.go b/go/packages/packagestest/expect_test.go
index 2587f580b..46d96d61f 100644
--- a/go/packages/packagestest/expect_test.go
+++ b/go/packages/packagestest/expect_test.go
@@ -10,7 +10,6 @@ import (
"golang.org/x/tools/go/expect"
"golang.org/x/tools/go/packages/packagestest"
- "golang.org/x/tools/internal/span"
)
func TestExpect(t *testing.T) {
@@ -43,7 +42,7 @@ func TestExpect(t *testing.T) {
}
},
"directNote": func(n *expect.Note) {},
- "range": func(r span.Range) {
+ "range": func(r packagestest.Range) {
if r.Start == token.NoPos || r.Start == 0 {
t.Errorf("Range had no valid starting position")
}
diff --git a/go/packages/packagestest/export.go b/go/packages/packagestest/export.go
index d792c3c3d..b687a44fb 100644
--- a/go/packages/packagestest/export.go
+++ b/go/packages/packagestest/export.go
@@ -9,7 +9,7 @@ By changing the exporter used, you can create projects for multiple build
systems from the same description, and run the same tests on them in many
cases.
-Example
+# Example
As an example of packagestest use, consider the following test that runs
the 'go list' command on the specified modules:
@@ -60,7 +60,6 @@ Running the test with verbose output will print:
main_test.go:36: 'go list gopher.example/...' with Modules mode layout:
gopher.example/repoa/a
gopher.example/repob/b
-
*/
package packagestest
@@ -80,9 +79,7 @@ import (
"golang.org/x/tools/go/expect"
"golang.org/x/tools/go/packages"
- "golang.org/x/tools/internal/span"
"golang.org/x/tools/internal/testenv"
- "golang.org/x/xerrors"
)
var (
@@ -131,7 +128,7 @@ type Exported struct {
primary string // the first non GOROOT module that was exported
written map[string]map[string]string // the full set of exported files
notes []*expect.Note // The list of expectations extracted from go source files
- markers map[string]span.Range // The set of markers extracted from go source files
+ markers map[string]Range // The set of markers extracted from go source files
}
// Exporter implementations are responsible for converting from the generic description of some
@@ -248,7 +245,7 @@ func Export(t testing.TB, exporter Exporter, modules []Module) *Exported {
switch value := value.(type) {
case Writer:
if err := value(fullpath); err != nil {
- if xerrors.Is(err, ErrUnsupported) {
+ if errors.Is(err, ErrUnsupported) {
t.Skip(err)
}
t.Fatal(err)
@@ -340,7 +337,7 @@ func Symlink(source string) Writer {
mode := os.ModePerm
if err == nil {
mode = stat.Mode()
- } else if !xerrors.Is(err, os.ErrNotExist) {
+ } else if !errors.Is(err, os.ErrNotExist) {
// We couldn't open the source, but it might exist. We don't expect to be
// able to portably create a symlink to a file we can't see.
return symlinkErr
@@ -452,17 +449,19 @@ func copyFile(dest, source string, perm os.FileMode) error {
// GroupFilesByModules attempts to map directories to the modules within each directory.
// This function assumes that the folder is structured in the following way:
-// - dir
-// - primarymod
-// - .go files
-// - packages
-// - go.mod (optional)
-// - modules
-// - repoa
-// - mod1
-// - .go files
-// - packages
-// - go.mod (optional)
+//
+// dir/
+// primarymod/
+// *.go files
+// packages
+// go.mod (optional)
+// modules/
+// repoa/
+// mod1/
+// *.go files
+// packages
+// go.mod (optional)
+//
// It scans the directory tree anchored at root and adds a Copy writer to the
// map for every file found.
// This is to enable the common case in tests where you have a full copy of the
diff --git a/go/packages/packagestest/gopath.go b/go/packages/packagestest/gopath.go
index 54016859b..d56f523ed 100644
--- a/go/packages/packagestest/gopath.go
+++ b/go/packages/packagestest/gopath.go
@@ -12,26 +12,33 @@ import (
// GOPATH is the exporter that produces GOPATH layouts.
// Each "module" is put in it's own GOPATH entry to help test complex cases.
// Given the two files
-// golang.org/repoa#a/a.go
-// golang.org/repob#b/b.go
+//
+// golang.org/repoa#a/a.go
+// golang.org/repob#b/b.go
+//
// You would get the directory layout
-// /sometemporarydirectory
-// ├── repoa
-// │ └── src
-// │ └── golang.org
-// │ └── repoa
-// │ └── a
-// │ └── a.go
-// └── repob
-// └── src
-// └── golang.org
-// └── repob
-// └── b
-// └── b.go
+//
+// /sometemporarydirectory
+// ├── repoa
+// │ └── src
+// │ └── golang.org
+// │ └── repoa
+// │ └── a
+// │ └── a.go
+// └── repob
+// └── src
+// └── golang.org
+// └── repob
+// └── b
+// └── b.go
+//
// GOPATH would be set to
-// /sometemporarydirectory/repoa;/sometemporarydirectory/repob
+//
+// /sometemporarydirectory/repoa;/sometemporarydirectory/repob
+//
// and the working directory would be
-// /sometemporarydirectory/repoa/src
+//
+// /sometemporarydirectory/repoa/src
var GOPATH = gopath{}
func init() {
diff --git a/go/packages/packagestest/modules.go b/go/packages/packagestest/modules.go
index 2c4356747..69a6c935d 100644
--- a/go/packages/packagestest/modules.go
+++ b/go/packages/packagestest/modules.go
@@ -23,20 +23,25 @@ import (
// Each "repository" is put in it's own module, and the module file generated
// will have replace directives for all other modules.
// Given the two files
-// golang.org/repoa#a/a.go
-// golang.org/repob#b/b.go
+//
+// golang.org/repoa#a/a.go
+// golang.org/repob#b/b.go
+//
// You would get the directory layout
-// /sometemporarydirectory
-// ├── repoa
-// │ ├── a
-// │ │ └── a.go
-// │ └── go.mod
-// └── repob
-// ├── b
-// │ └── b.go
-// └── go.mod
+//
+// /sometemporarydirectory
+// ├── repoa
+// │ ├── a
+// │ │ └── a.go
+// │ └── go.mod
+// └── repob
+// ├── b
+// │ └── b.go
+// └── go.mod
+//
// and the working directory would be
-// /sometemporarydirectory/repoa
+//
+// /sometemporarydirectory/repoa
var Modules = modules{}
type modules struct{}
diff --git a/go/packages/packagestest/modules_test.go b/go/packages/packagestest/modules_test.go
index 6f627b1e5..de290ead9 100644
--- a/go/packages/packagestest/modules_test.go
+++ b/go/packages/packagestest/modules_test.go
@@ -9,11 +9,9 @@ import (
"testing"
"golang.org/x/tools/go/packages/packagestest"
- "golang.org/x/tools/internal/testenv"
)
func TestModulesExport(t *testing.T) {
- testenv.NeedsGo1Point(t, 11)
exported := packagestest.Export(t, packagestest.Modules, testdata)
defer exported.Cleanup()
// Check that the cfg contains all the right bits
diff --git a/go/pointer/analysis.go b/go/pointer/analysis.go
index 0abb04dd8..e3c85ede4 100644
--- a/go/pointer/analysis.go
+++ b/go/pointer/analysis.go
@@ -16,6 +16,7 @@ import (
"runtime"
"runtime/debug"
"sort"
+ "strings"
"golang.org/x/tools/go/callgraph"
"golang.org/x/tools/go/ssa"
@@ -46,7 +47,6 @@ const (
//
// (Note: most variables called 'obj' are not *objects but nodeids
// such that a.nodes[obj].obj != nil.)
-//
type object struct {
// flags is a bitset of the node type (ot*) flags defined above.
flags uint32
@@ -59,8 +59,8 @@ type object struct {
//
// ssa.Value for an object allocated by an SSA operation.
// types.Type for an rtype instance object or *rtype-tagged object.
- // string for an instrinsic object, e.g. the array behind os.Args.
- // nil for an object allocated by an instrinsic.
+ // string for an intrinsic object, e.g. the array behind os.Args.
+ // nil for an object allocated by an intrinsic.
// (cgn provides the identity of the intrinsic.)
data interface{}
@@ -82,7 +82,6 @@ type nodeid uint32
//
// Nodes that are pointed-to locations ("labels") have an enclosing
// object (see analysis.enclosingObject).
-//
type node struct {
// If non-nil, this node is the start of an object
// (addressable memory location).
@@ -215,7 +214,6 @@ func (a *analysis) computeTrackBits() {
//
// Pointer analysis of a transitively closed well-typed program should
// always succeed. An error can occur only due to an internal bug.
-//
func Analyze(config *Config) (result *Result, err error) {
if config.Mains == nil {
return nil, fmt.Errorf("no main/test packages to analyze (check $GOROOT/$GOPATH)")
@@ -361,7 +359,6 @@ func Analyze(config *Config) (result *Result, err error) {
// callEdge is called for each edge in the callgraph.
// calleeid is the callee's object node (has otFunction flag).
-//
func (a *analysis) callEdge(caller *cgnode, site *callsite, calleeid nodeid) {
obj := a.nodes[calleeid].obj
if obj.flags&otFunction == 0 {
@@ -381,12 +378,27 @@ func (a *analysis) callEdge(caller *cgnode, site *callsite, calleeid nodeid) {
fmt.Fprintf(a.log, "\tcall edge %s -> %s\n", site, callee)
}
- // Warn about calls to non-intrinsic external functions.
+ // Warn about calls to functions that are handled unsoundly.
// TODO(adonovan): de-dup these messages.
- if fn := callee.fn; fn.Blocks == nil && a.findIntrinsic(fn) == nil {
+ fn := callee.fn
+
+ // Warn about calls to non-intrinsic external functions.
+ if fn.Blocks == nil && a.findIntrinsic(fn) == nil {
a.warnf(site.pos(), "unsound call to unknown intrinsic: %s", fn)
a.warnf(fn.Pos(), " (declared here)")
}
+
+ // Warn about calls to generic function bodies.
+ if fn.TypeParams().Len() > 0 && len(fn.TypeArgs()) == 0 {
+ a.warnf(site.pos(), "unsound call to generic function body: %s (build with ssa.InstantiateGenerics)", fn)
+ a.warnf(fn.Pos(), " (declared here)")
+ }
+
+ // Warn about calls to instantiation wrappers of generics functions.
+ if fn.Origin() != nil && strings.HasPrefix(fn.Synthetic, "instantiation wrapper ") {
+ a.warnf(site.pos(), "unsound call to instantiation wrapper of generic: %s (build with ssa.InstantiateGenerics)", fn)
+ a.warnf(fn.Pos(), " (declared here)")
+ }
}
// dumpSolution writes the PTS solution to the specified file.
@@ -394,7 +406,6 @@ func (a *analysis) callEdge(caller *cgnode, site *callsite, calleeid nodeid) {
// It only dumps the nodes that existed before solving. The order in
// which solver-created nodes are created depends on pre-solver
// optimization, so we can't include them in the cross-check.
-//
func (a *analysis) dumpSolution(filename string, N int) {
f, err := os.Create(filename)
if err != nil {
@@ -422,7 +433,6 @@ func (a *analysis) dumpSolution(filename string, N int) {
// showCounts logs the size of the constraint system. A typical
// optimized distribution is 65% copy, 13% load, 11% addr, 5%
// offsetAddr, 4% store, 2% others.
-//
func (a *analysis) showCounts() {
if a.log != nil {
counts := make(map[reflect.Type]int)
diff --git a/go/pointer/api.go b/go/pointer/api.go
index 2a13a6781..8c9a8c775 100644
--- a/go/pointer/api.go
+++ b/go/pointer/api.go
@@ -28,7 +28,11 @@ type Config struct {
// dependencies of any main package may still affect the
// analysis result, because they contribute runtime types and
// thus methods.
+ //
// TODO(adonovan): investigate whether this is desirable.
+ //
+ // Calls to generic functions will be unsound unless packages
+ // are built using the ssa.InstantiateGenerics builder mode.
Mains []*ssa.Package
// Reflection determines whether to handle reflection
@@ -93,7 +97,7 @@ func (c *Config) AddQuery(v ssa.Value) {
c.Queries[v] = struct{}{}
}
-// AddQuery adds v to Config.IndirectQueries.
+// AddIndirectQuery adds v to Config.IndirectQueries.
// Precondition: CanPoint(v.Type().Underlying().(*types.Pointer).Elem()).
func (c *Config) AddIndirectQuery(v ssa.Value) {
if c.IndirectQueries == nil {
@@ -128,9 +132,10 @@ func (c *Config) AddIndirectQuery(v ssa.Value) {
// before analysis has finished has undefined behavior.
//
// Example:
-// // given v, which represents a function call to 'fn() (int, []*T)', and
-// // 'type T struct { F *int }', the following query will access the field F.
-// c.AddExtendedQuery(v, "x[1][0].F")
+//
+// // given v, which represents a function call to 'fn() (int, []*T)', and
+// // 'type T struct { F *int }', the following query will access the field F.
+// c.AddExtendedQuery(v, "x[1][0].F")
func (c *Config) AddExtendedQuery(v ssa.Value, query string) (*Pointer, error) {
ops, _, err := parseExtendedQuery(v.Type(), query)
if err != nil {
@@ -160,7 +165,6 @@ type Warning struct {
// A Result contains the results of a pointer analysis.
//
// See Config for how to request the various Result components.
-//
type Result struct {
CallGraph *callgraph.Graph // discovered call graph
Queries map[ssa.Value]Pointer // pts(v) for each v in Config.Queries.
@@ -172,7 +176,6 @@ type Result struct {
//
// A Pointer doesn't have a unique type because pointers of distinct
// types may alias the same object.
-//
type Pointer struct {
a *analysis
n nodeid
@@ -223,7 +226,6 @@ func (s PointsToSet) Labels() []*Label {
// map value is the PointsToSet for pointers of that type.
//
// The result is empty unless CanHaveDynamicTypes(T).
-//
func (s PointsToSet) DynamicTypes() *typeutil.Map {
var tmap typeutil.Map
tmap.SetHasher(s.a.hasher)
diff --git a/go/pointer/callgraph.go b/go/pointer/callgraph.go
index 48e152e4a..0b7aba52a 100644
--- a/go/pointer/callgraph.go
+++ b/go/pointer/callgraph.go
@@ -39,7 +39,6 @@ func (n *cgnode) String() string {
// it is implicitly context-sensitive.
// callsites never represent calls to built-ins;
// they are handled as intrinsics.
-//
type callsite struct {
targets nodeid // pts(·) contains objects for dynamically called functions
instr ssa.CallInstruction // the call instruction; nil for synthetic/intrinsic
diff --git a/go/pointer/doc.go b/go/pointer/doc.go
index e317cf5c3..aca343b88 100644
--- a/go/pointer/doc.go
+++ b/go/pointer/doc.go
@@ -3,7 +3,6 @@
// license that can be found in the LICENSE file.
/*
-
Package pointer implements Andersen's analysis, an inclusion-based
pointer analysis algorithm first described in (Andersen, 1994).
@@ -22,8 +21,7 @@ demonstrates both of these features. Clients should not request more
information than they need since it may increase the cost of the
analysis significantly.
-
-CLASSIFICATION
+# CLASSIFICATION
Our algorithm is INCLUSION-BASED: the points-to sets for x and y will
be related by pts(y) ⊇ pts(x) if the program contains the statement
@@ -44,7 +42,9 @@ of their calling context.
It has a CONTEXT-SENSITIVE HEAP: objects are named by both allocation
site and context, so the objects returned by two distinct calls to f:
- func f() *T { return new(T) }
+
+ func f() *T { return new(T) }
+
are distinguished up to the limits of the calling context.
It is a WHOLE PROGRAM analysis: it requires SSA-form IR for the
@@ -52,16 +52,14 @@ complete Go program and summaries for native code.
See the (Hind, PASTE'01) survey paper for an explanation of these terms.
-
-SOUNDNESS
+# SOUNDNESS
The analysis is fully sound when invoked on pure Go programs that do not
use reflection or unsafe.Pointer conversions. In other words, if there
is any possible execution of the program in which pointer P may point to
object O, the analysis will report that fact.
-
-REFLECTION
+# REFLECTION
By default, the "reflect" library is ignored by the analysis, as if all
its functions were no-ops, but if the client enables the Reflection flag,
@@ -77,17 +75,18 @@ Most but not all reflection operations are supported.
In particular, addressable reflect.Values are not yet implemented, so
operations such as (reflect.Value).Set have no analytic effect.
-
-UNSAFE POINTER CONVERSIONS
+# UNSAFE POINTER CONVERSIONS
The pointer analysis makes no attempt to understand aliasing between the
operand x and result y of an unsafe.Pointer conversion:
- y = (*T)(unsafe.Pointer(x))
+
+ y = (*T)(unsafe.Pointer(x))
+
It is as if the conversion allocated an entirely new object:
- y = new(T)
+ y = new(T)
-NATIVE CODE
+# NATIVE CODE
The analysis cannot model the aliasing effects of functions written in
languages other than Go, such as runtime intrinsics in C or assembly, or
@@ -100,7 +99,7 @@ effects of native code.
------------------------------------------------------------------------
-IMPLEMENTATION
+# IMPLEMENTATION
The remaining documentation is intended for package maintainers and
pointer analysis specialists. Maintainers should have a solid
@@ -118,8 +117,7 @@ operations.) This improves the traction of presolver optimisations,
but imposes certain restrictions, e.g. potential context sensitivity
is limited since all variants must be created a priori.
-
-TERMINOLOGY
+# TERMINOLOGY
A type is said to be "pointer-like" if it is a reference to an object.
Pointer-like types include pointers and also interfaces, maps, channels,
@@ -134,8 +132,7 @@ It means: for each node index p in pts(src), the node index p+offset is
in pts(dst). Similarly *dst+offset=src is used for store constraints
and dst=src+offset for offset-address constraints.
-
-NODES
+# NODES
Nodes are the key datastructure of the analysis, and have a dual role:
they represent both constraint variables (equivalence classes of
@@ -166,8 +163,7 @@ simple edges (or copy constraints) represent value flow. Complex
edges (load, store, etc) trigger the creation of new simple edges
during the solving phase.
-
-OBJECTS
+# OBJECTS
Conceptually, an "object" is a contiguous sequence of nodes denoting
an addressable location: something that a pointer can point to. The
@@ -175,12 +171,12 @@ first node of an object has a non-nil obj field containing information
about the allocation: its size, context, and ssa.Value.
Objects include:
- - functions and globals;
- - variable allocations in the stack frame or heap;
- - maps, channels and slices created by calls to make();
- - allocations to construct an interface;
- - allocations caused by conversions, e.g. []byte(str).
- - arrays allocated by calls to append();
+ - functions and globals;
+ - variable allocations in the stack frame or heap;
+ - maps, channels and slices created by calls to make();
+ - allocations to construct an interface;
+ - allocations caused by conversions, e.g. []byte(str).
+ - arrays allocated by calls to append();
Many objects have no Go types. For example, the func, map and chan type
kinds in Go are all varieties of pointers, but their respective objects
@@ -198,14 +194,13 @@ of the empty type struct{}. (All arrays are treated as if of length 1,
so there are no empty arrays. The empty tuple is never address-taken,
so is never an object.)
-
-TAGGED OBJECTS
+# TAGGED OBJECTS
An tagged object has the following layout:
- T -- obj.flags ⊇ {otTagged}
- v
- ...
+ T -- obj.flags ⊇ {otTagged}
+ v
+ ...
The T node's typ field is the dynamic type of the "payload": the value
v which follows, flattened out. The T node's obj has the otTagged
@@ -219,331 +214,353 @@ Tagged objects may be indirect (obj.flags ⊇ {otIndirect}) meaning that
the value v is not of type T but *T; this is used only for
reflect.Values that represent lvalues. (These are not implemented yet.)
-
-ANALYSIS ABSTRACTION OF EACH TYPE
+# ANALYSIS ABSTRACTION OF EACH TYPE
Variables of the following "scalar" types may be represented by a
single node: basic types, pointers, channels, maps, slices, 'func'
pointers, interfaces.
-Pointers
- Nothing to say here, oddly.
-
-Basic types (bool, string, numbers, unsafe.Pointer)
- Currently all fields in the flattening of a type, including
- non-pointer basic types such as int, are represented in objects and
- values. Though non-pointer nodes within values are uninteresting,
- non-pointer nodes in objects may be useful (if address-taken)
- because they permit the analysis to deduce, in this example,
-
- var s struct{ ...; x int; ... }
- p := &s.x
-
- that p points to s.x. If we ignored such object fields, we could only
- say that p points somewhere within s.
-
- All other basic types are ignored. Expressions of these types have
- zero nodeid, and fields of these types within aggregate other types
- are omitted.
-
- unsafe.Pointers are not modelled as pointers, so a conversion of an
- unsafe.Pointer to *T is (unsoundly) treated equivalent to new(T).
-
-Channels
- An expression of type 'chan T' is a kind of pointer that points
- exclusively to channel objects, i.e. objects created by MakeChan (or
- reflection).
-
- 'chan T' is treated like *T.
- *ssa.MakeChan is treated as equivalent to new(T).
- *ssa.Send and receive (*ssa.UnOp(ARROW)) and are equivalent to store
- and load.
-
-Maps
- An expression of type 'map[K]V' is a kind of pointer that points
- exclusively to map objects, i.e. objects created by MakeMap (or
- reflection).
-
- map K[V] is treated like *M where M = struct{k K; v V}.
- *ssa.MakeMap is equivalent to new(M).
- *ssa.MapUpdate is equivalent to *y=x where *y and x have type M.
- *ssa.Lookup is equivalent to y=x.v where x has type *M.
-
-Slices
- A slice []T, which dynamically resembles a struct{array *T, len, cap int},
- is treated as if it were just a *T pointer; the len and cap fields are
- ignored.
-
- *ssa.MakeSlice is treated like new([1]T): an allocation of a
- singleton array.
- *ssa.Index on a slice is equivalent to a load.
- *ssa.IndexAddr on a slice returns the address of the sole element of the
- slice, i.e. the same address.
- *ssa.Slice is treated as a simple copy.
-
-Functions
- An expression of type 'func...' is a kind of pointer that points
- exclusively to function objects.
-
- A function object has the following layout:
-
- identity -- typ:*types.Signature; obj.flags ⊇ {otFunction}
- params_0 -- (the receiver, if a method)
- ...
- params_n-1
- results_0
- ...
- results_m-1
-
- There may be multiple function objects for the same *ssa.Function
- due to context-sensitive treatment of some functions.
-
- The first node is the function's identity node.
- Associated with every callsite is a special "targets" variable,
- whose pts() contains the identity node of each function to which
- the call may dispatch. Identity words are not otherwise used during
- the analysis, but we construct the call graph from the pts()
- solution for such nodes.
-
- The following block of contiguous nodes represents the flattened-out
- types of the parameters ("P-block") and results ("R-block") of the
- function object.
-
- The treatment of free variables of closures (*ssa.FreeVar) is like
- that of global variables; it is not context-sensitive.
- *ssa.MakeClosure instructions create copy edges to Captures.
-
- A Go value of type 'func' (i.e. a pointer to one or more functions)
- is a pointer whose pts() contains function objects. The valueNode()
- for an *ssa.Function returns a singleton for that function.
-
-Interfaces
- An expression of type 'interface{...}' is a kind of pointer that
- points exclusively to tagged objects. All tagged objects pointed to
- by an interface are direct (the otIndirect flag is clear) and
- concrete (the tag type T is not itself an interface type). The
- associated ssa.Value for an interface's tagged objects may be an
- *ssa.MakeInterface instruction, or nil if the tagged object was
- created by an instrinsic (e.g. reflection).
-
- Constructing an interface value causes generation of constraints for
- all of the concrete type's methods; we can't tell a priori which
- ones may be called.
-
- TypeAssert y = x.(T) is implemented by a dynamic constraint
- triggered by each tagged object O added to pts(x): a typeFilter
- constraint if T is an interface type, or an untag constraint if T is
- a concrete type. A typeFilter tests whether O.typ implements T; if
- so, O is added to pts(y). An untagFilter tests whether O.typ is
- assignable to T,and if so, a copy edge O.v -> y is added.
-
- ChangeInterface is a simple copy because the representation of
- tagged objects is independent of the interface type (in contrast
- to the "method tables" approach used by the gc runtime).
-
- y := Invoke x.m(...) is implemented by allocating contiguous P/R
- blocks for the callsite and adding a dynamic rule triggered by each
- tagged object added to pts(x). The rule adds param/results copy
- edges to/from each discovered concrete method.
-
- (Q. Why do we model an interface as a pointer to a pair of type and
- value, rather than as a pair of a pointer to type and a pointer to
- value?
- A. Control-flow joins would merge interfaces ({T1}, {V1}) and ({T2},
- {V2}) to make ({T1,T2}, {V1,V2}), leading to the infeasible and
- type-unsafe combination (T1,V2). Treating the value and its concrete
- type as inseparable makes the analysis type-safe.)
-
-reflect.Value
- A reflect.Value is modelled very similar to an interface{}, i.e. as
- a pointer exclusively to tagged objects, but with two generalizations.
-
- 1) a reflect.Value that represents an lvalue points to an indirect
- (obj.flags ⊇ {otIndirect}) tagged object, which has a similar
- layout to an tagged object except that the value is a pointer to
- the dynamic type. Indirect tagged objects preserve the correct
- aliasing so that mutations made by (reflect.Value).Set can be
- observed.
-
- Indirect objects only arise when an lvalue is derived from an
- rvalue by indirection, e.g. the following code:
-
- type S struct { X T }
- var s S
- var i interface{} = &s // i points to a *S-tagged object (from MakeInterface)
- v1 := reflect.ValueOf(i) // v1 points to same *S-tagged object as i
- v2 := v1.Elem() // v2 points to an indirect S-tagged object, pointing to s
- v3 := v2.FieldByName("X") // v3 points to an indirect int-tagged object, pointing to s.X
- v3.Set(y) // pts(s.X) ⊇ pts(y)
-
- Whether indirect or not, the concrete type of the tagged object
- corresponds to the user-visible dynamic type, and the existence
- of a pointer is an implementation detail.
-
- (NB: indirect tagged objects are not yet implemented)
-
- 2) The dynamic type tag of a tagged object pointed to by a
- reflect.Value may be an interface type; it need not be concrete.
-
- This arises in code such as this:
- tEface := reflect.TypeOf(new(interface{}).Elem() // interface{}
- eface := reflect.Zero(tEface)
- pts(eface) is a singleton containing an interface{}-tagged
- object. That tagged object's payload is an interface{} value,
- i.e. the pts of the payload contains only concrete-tagged
- objects, although in this example it's the zero interface{} value,
- so its pts is empty.
-
-reflect.Type
- Just as in the real "reflect" library, we represent a reflect.Type
- as an interface whose sole implementation is the concrete type,
- *reflect.rtype. (This choice is forced on us by go/types: clients
- cannot fabricate types with arbitrary method sets.)
-
- rtype instances are canonical: there is at most one per dynamic
- type. (rtypes are in fact large structs but since identity is all
- that matters, we represent them by a single node.)
-
- The payload of each *rtype-tagged object is an *rtype pointer that
- points to exactly one such canonical rtype object. We exploit this
- by setting the node.typ of the payload to the dynamic type, not
- '*rtype'. This saves us an indirection in each resolution rule. As
- an optimisation, *rtype-tagged objects are canonicalized too.
+Pointers:
+
+Nothing to say here, oddly.
+
+Basic types (bool, string, numbers, unsafe.Pointer):
+
+Currently all fields in the flattening of a type, including
+non-pointer basic types such as int, are represented in objects and
+values. Though non-pointer nodes within values are uninteresting,
+non-pointer nodes in objects may be useful (if address-taken)
+because they permit the analysis to deduce, in this example,
+
+ var s struct{ ...; x int; ... }
+ p := &s.x
+
+that p points to s.x. If we ignored such object fields, we could only
+say that p points somewhere within s.
+
+All other basic types are ignored. Expressions of these types have
+zero nodeid, and fields of these types within aggregate other types
+are omitted.
+
+unsafe.Pointers are not modelled as pointers, so a conversion of an
+unsafe.Pointer to *T is (unsoundly) treated equivalent to new(T).
+
+Channels:
+
+An expression of type 'chan T' is a kind of pointer that points
+exclusively to channel objects, i.e. objects created by MakeChan (or
+reflection).
+
+'chan T' is treated like *T.
+*ssa.MakeChan is treated as equivalent to new(T).
+*ssa.Send and receive (*ssa.UnOp(ARROW)) and are equivalent to store
+
+ and load.
+
+Maps:
+
+An expression of type 'map[K]V' is a kind of pointer that points
+exclusively to map objects, i.e. objects created by MakeMap (or
+reflection).
+
+map K[V] is treated like *M where M = struct{k K; v V}.
+*ssa.MakeMap is equivalent to new(M).
+*ssa.MapUpdate is equivalent to *y=x where *y and x have type M.
+*ssa.Lookup is equivalent to y=x.v where x has type *M.
+Slices:
+
+A slice []T, which dynamically resembles a struct{array *T, len, cap int},
+is treated as if it were just a *T pointer; the len and cap fields are
+ignored.
+
+*ssa.MakeSlice is treated like new([1]T): an allocation of a
+
+ singleton array.
+
+*ssa.Index on a slice is equivalent to a load.
+*ssa.IndexAddr on a slice returns the address of the sole element of the
+slice, i.e. the same address.
+*ssa.Slice is treated as a simple copy.
+
+Functions:
+
+An expression of type 'func...' is a kind of pointer that points
+exclusively to function objects.
+
+A function object has the following layout:
+
+ identity -- typ:*types.Signature; obj.flags ⊇ {otFunction}
+ params_0 -- (the receiver, if a method)
+ ...
+ params_n-1
+ results_0
+ ...
+ results_m-1
+
+There may be multiple function objects for the same *ssa.Function
+due to context-sensitive treatment of some functions.
+
+The first node is the function's identity node.
+Associated with every callsite is a special "targets" variable,
+whose pts() contains the identity node of each function to which
+the call may dispatch. Identity words are not otherwise used during
+the analysis, but we construct the call graph from the pts()
+solution for such nodes.
+
+The following block of contiguous nodes represents the flattened-out
+types of the parameters ("P-block") and results ("R-block") of the
+function object.
+
+The treatment of free variables of closures (*ssa.FreeVar) is like
+that of global variables; it is not context-sensitive.
+*ssa.MakeClosure instructions create copy edges to Captures.
+
+A Go value of type 'func' (i.e. a pointer to one or more functions)
+is a pointer whose pts() contains function objects. The valueNode()
+for an *ssa.Function returns a singleton for that function.
+
+Interfaces:
+
+An expression of type 'interface{...}' is a kind of pointer that
+points exclusively to tagged objects. All tagged objects pointed to
+by an interface are direct (the otIndirect flag is clear) and
+concrete (the tag type T is not itself an interface type). The
+associated ssa.Value for an interface's tagged objects may be an
+*ssa.MakeInterface instruction, or nil if the tagged object was
+created by an instrinsic (e.g. reflection).
+
+Constructing an interface value causes generation of constraints for
+all of the concrete type's methods; we can't tell a priori which
+ones may be called.
+
+TypeAssert y = x.(T) is implemented by a dynamic constraint
+triggered by each tagged object O added to pts(x): a typeFilter
+constraint if T is an interface type, or an untag constraint if T is
+a concrete type. A typeFilter tests whether O.typ implements T; if
+so, O is added to pts(y). An untagFilter tests whether O.typ is
+assignable to T,and if so, a copy edge O.v -> y is added.
+
+ChangeInterface is a simple copy because the representation of
+tagged objects is independent of the interface type (in contrast
+to the "method tables" approach used by the gc runtime).
+
+y := Invoke x.m(...) is implemented by allocating contiguous P/R
+blocks for the callsite and adding a dynamic rule triggered by each
+tagged object added to pts(x). The rule adds param/results copy
+edges to/from each discovered concrete method.
+
+(Q. Why do we model an interface as a pointer to a pair of type and
+value, rather than as a pair of a pointer to type and a pointer to
+value?
+A. Control-flow joins would merge interfaces ({T1}, {V1}) and ({T2},
+{V2}) to make ({T1,T2}, {V1,V2}), leading to the infeasible and
+type-unsafe combination (T1,V2). Treating the value and its concrete
+type as inseparable makes the analysis type-safe.)
+
+Type parameters:
+
+Type parameters are not directly supported by the analysis.
+Calls to generic functions will be left as if they had empty bodies.
+Users of the package are expected to use the ssa.InstantiateGenerics
+builder mode when building code that uses or depends on code
+containing generics.
+
+reflect.Value:
+
+A reflect.Value is modelled very similar to an interface{}, i.e. as
+a pointer exclusively to tagged objects, but with two generalizations.
+
+1. a reflect.Value that represents an lvalue points to an indirect
+(obj.flags ⊇ {otIndirect}) tagged object, which has a similar
+layout to an tagged object except that the value is a pointer to
+the dynamic type. Indirect tagged objects preserve the correct
+aliasing so that mutations made by (reflect.Value).Set can be
+observed.
+
+Indirect objects only arise when an lvalue is derived from an
+rvalue by indirection, e.g. the following code:
+
+ type S struct { X T }
+ var s S
+ var i interface{} = &s // i points to a *S-tagged object (from MakeInterface)
+ v1 := reflect.ValueOf(i) // v1 points to same *S-tagged object as i
+ v2 := v1.Elem() // v2 points to an indirect S-tagged object, pointing to s
+ v3 := v2.FieldByName("X") // v3 points to an indirect int-tagged object, pointing to s.X
+ v3.Set(y) // pts(s.X) ⊇ pts(y)
+
+Whether indirect or not, the concrete type of the tagged object
+corresponds to the user-visible dynamic type, and the existence
+of a pointer is an implementation detail.
+
+(NB: indirect tagged objects are not yet implemented)
+
+2. The dynamic type tag of a tagged object pointed to by a
+reflect.Value may be an interface type; it need not be concrete.
+
+This arises in code such as this:
+
+ tEface := reflect.TypeOf(new(interface{}).Elem() // interface{}
+ eface := reflect.Zero(tEface)
+
+pts(eface) is a singleton containing an interface{}-tagged
+object. That tagged object's payload is an interface{} value,
+i.e. the pts of the payload contains only concrete-tagged
+objects, although in this example it's the zero interface{} value,
+so its pts is empty.
+
+reflect.Type:
+
+Just as in the real "reflect" library, we represent a reflect.Type
+as an interface whose sole implementation is the concrete type,
+*reflect.rtype. (This choice is forced on us by go/types: clients
+cannot fabricate types with arbitrary method sets.)
+
+rtype instances are canonical: there is at most one per dynamic
+type. (rtypes are in fact large structs but since identity is all
+that matters, we represent them by a single node.)
+
+The payload of each *rtype-tagged object is an *rtype pointer that
+points to exactly one such canonical rtype object. We exploit this
+by setting the node.typ of the payload to the dynamic type, not
+'*rtype'. This saves us an indirection in each resolution rule. As
+an optimisation, *rtype-tagged objects are canonicalized too.
Aggregate types:
Aggregate types are treated as if all directly contained
aggregates are recursively flattened out.
-Structs
- *ssa.Field y = x.f creates a simple edge to y from x's node at f's offset.
+Structs:
- *ssa.FieldAddr y = &x->f requires a dynamic closure rule to create
- simple edges for each struct discovered in pts(x).
+*ssa.Field y = x.f creates a simple edge to y from x's node at f's offset.
- The nodes of a struct consist of a special 'identity' node (whose
- type is that of the struct itself), followed by the nodes for all
- the struct's fields, recursively flattened out. A pointer to the
- struct is a pointer to its identity node. That node allows us to
- distinguish a pointer to a struct from a pointer to its first field.
+*ssa.FieldAddr y = &x->f requires a dynamic closure rule to create
- Field offsets are logical field offsets (plus one for the identity
- node), so the sizes of the fields can be ignored by the analysis.
+ simple edges for each struct discovered in pts(x).
- (The identity node is non-traditional but enables the distinction
- described above, which is valuable for code comprehension tools.
- Typical pointer analyses for C, whose purpose is compiler
- optimization, must soundly model unsafe.Pointer (void*) conversions,
- and this requires fidelity to the actual memory layout using physical
- field offsets.)
+The nodes of a struct consist of a special 'identity' node (whose
+type is that of the struct itself), followed by the nodes for all
+the struct's fields, recursively flattened out. A pointer to the
+struct is a pointer to its identity node. That node allows us to
+distinguish a pointer to a struct from a pointer to its first field.
- *ssa.Field y = x.f creates a simple edge to y from x's node at f's offset.
+Field offsets are logical field offsets (plus one for the identity
+node), so the sizes of the fields can be ignored by the analysis.
- *ssa.FieldAddr y = &x->f requires a dynamic closure rule to create
- simple edges for each struct discovered in pts(x).
+(The identity node is non-traditional but enables the distinction
+described above, which is valuable for code comprehension tools.
+Typical pointer analyses for C, whose purpose is compiler
+optimization, must soundly model unsafe.Pointer (void*) conversions,
+and this requires fidelity to the actual memory layout using physical
+field offsets.)
-Arrays
- We model an array by an identity node (whose type is that of the
- array itself) followed by a node representing all the elements of
- the array; the analysis does not distinguish elements with different
- indices. Effectively, an array is treated like struct{elem T}, a
- load y=x[i] like y=x.elem, and a store x[i]=y like x.elem=y; the
- index i is ignored.
+*ssa.Field y = x.f creates a simple edge to y from x's node at f's offset.
- A pointer to an array is pointer to its identity node. (A slice is
- also a pointer to an array's identity node.) The identity node
- allows us to distinguish a pointer to an array from a pointer to one
- of its elements, but it is rather costly because it introduces more
- offset constraints into the system. Furthermore, sound treatment of
- unsafe.Pointer would require us to dispense with this node.
+*ssa.FieldAddr y = &x->f requires a dynamic closure rule to create
- Arrays may be allocated by Alloc, by make([]T), by calls to append,
- and via reflection.
+ simple edges for each struct discovered in pts(x).
-Tuples (T, ...)
- Tuples are treated like structs with naturally numbered fields.
- *ssa.Extract is analogous to *ssa.Field.
+Arrays:
- However, tuples have no identity field since by construction, they
- cannot be address-taken.
+We model an array by an identity node (whose type is that of the
+array itself) followed by a node representing all the elements of
+the array; the analysis does not distinguish elements with different
+indices. Effectively, an array is treated like struct{elem T}, a
+load y=x[i] like y=x.elem, and a store x[i]=y like x.elem=y; the
+index i is ignored.
+A pointer to an array is pointer to its identity node. (A slice is
+also a pointer to an array's identity node.) The identity node
+allows us to distinguish a pointer to an array from a pointer to one
+of its elements, but it is rather costly because it introduces more
+offset constraints into the system. Furthermore, sound treatment of
+unsafe.Pointer would require us to dispense with this node.
-FUNCTION CALLS
+Arrays may be allocated by Alloc, by make([]T), by calls to append,
+and via reflection.
- There are three kinds of function call:
- (1) static "call"-mode calls of functions.
- (2) dynamic "call"-mode calls of functions.
- (3) dynamic "invoke"-mode calls of interface methods.
- Cases 1 and 2 apply equally to methods and standalone functions.
+Tuples (T, ...):
- Static calls.
- A static call consists three steps:
- - finding the function object of the callee;
- - creating copy edges from the actual parameter value nodes to the
- P-block in the function object (this includes the receiver if
- the callee is a method);
- - creating copy edges from the R-block in the function object to
- the value nodes for the result of the call.
+Tuples are treated like structs with naturally numbered fields.
+*ssa.Extract is analogous to *ssa.Field.
- A static function call is little more than two struct value copies
- between the P/R blocks of caller and callee:
+However, tuples have no identity field since by construction, they
+cannot be address-taken.
- callee.P = caller.P
- caller.R = callee.R
+# FUNCTION CALLS
- Context sensitivity
+There are three kinds of function call:
+ 1. static "call"-mode calls of functions.
+ 2. dynamic "call"-mode calls of functions.
+ 3. dynamic "invoke"-mode calls of interface methods.
- Static calls (alone) may be treated context sensitively,
- i.e. each callsite may cause a distinct re-analysis of the
- callee, improving precision. Our current context-sensitivity
- policy treats all intrinsics and getter/setter methods in this
- manner since such functions are small and seem like an obvious
- source of spurious confluences, though this has not yet been
- evaluated.
+Cases 1 and 2 apply equally to methods and standalone functions.
- Dynamic function calls
+Static calls:
- Dynamic calls work in a similar manner except that the creation of
- copy edges occurs dynamically, in a similar fashion to a pair of
- struct copies in which the callee is indirect:
+A static call consists three steps:
+ - finding the function object of the callee;
+ - creating copy edges from the actual parameter value nodes to the
+ P-block in the function object (this includes the receiver if
+ the callee is a method);
+ - creating copy edges from the R-block in the function object to
+ the value nodes for the result of the call.
- callee->P = caller.P
- caller.R = callee->R
+A static function call is little more than two struct value copies
+between the P/R blocks of caller and callee:
- (Recall that the function object's P- and R-blocks are contiguous.)
+ callee.P = caller.P
+ caller.R = callee.R
- Interface method invocation
+Context sensitivity: Static calls (alone) may be treated context sensitively,
+i.e. each callsite may cause a distinct re-analysis of the
+callee, improving precision. Our current context-sensitivity
+policy treats all intrinsics and getter/setter methods in this
+manner since such functions are small and seem like an obvious
+source of spurious confluences, though this has not yet been
+evaluated.
- For invoke-mode calls, we create a params/results block for the
- callsite and attach a dynamic closure rule to the interface. For
- each new tagged object that flows to the interface, we look up
- the concrete method, find its function object, and connect its P/R
- blocks to the callsite's P/R blocks, adding copy edges to the graph
- during solving.
+Dynamic function calls:
- Recording call targets
+Dynamic calls work in a similar manner except that the creation of
+copy edges occurs dynamically, in a similar fashion to a pair of
+struct copies in which the callee is indirect:
- The analysis notifies its clients of each callsite it encounters,
- passing a CallSite interface. Among other things, the CallSite
- contains a synthetic constraint variable ("targets") whose
- points-to solution includes the set of all function objects to
- which the call may dispatch.
+ callee->P = caller.P
+ caller.R = callee->R
- It is via this mechanism that the callgraph is made available.
- Clients may also elect to be notified of callgraph edges directly;
- internally this just iterates all "targets" variables' pts(·)s.
+(Recall that the function object's P- and R-blocks are contiguous.)
+Interface method invocation:
-PRESOLVER
+For invoke-mode calls, we create a params/results block for the
+callsite and attach a dynamic closure rule to the interface. For
+each new tagged object that flows to the interface, we look up
+the concrete method, find its function object, and connect its P/R
+blocks to the callsite's P/R blocks, adding copy edges to the graph
+during solving.
+
+Recording call targets:
+
+The analysis notifies its clients of each callsite it encounters,
+passing a CallSite interface. Among other things, the CallSite
+contains a synthetic constraint variable ("targets") whose
+points-to solution includes the set of all function objects to
+which the call may dispatch.
+
+It is via this mechanism that the callgraph is made available.
+Clients may also elect to be notified of callgraph edges directly;
+internally this just iterates all "targets" variables' pts(·)s.
+
+# PRESOLVER
We implement Hash-Value Numbering (HVN), a pre-solver constraint
optimization described in Hardekopf & Lin, SAS'07. This is documented
in more detail in hvn.go. We intend to add its cousins HR and HU in
future.
-
-SOLVER
+# SOLVER
The solver is currently a naive Andersen-style implementation; it does
not perform online cycle detection, though we plan to add solver
@@ -565,8 +582,7 @@ range, and thus the efficiency of the representation.
Partly thanks to avoiding map iteration, the execution of the solver is
100% deterministic, a great help during debugging.
-
-FURTHER READING
+# FURTHER READING
Andersen, L. O. 1994. Program analysis and specialization for the C
programming language. Ph.D. dissertation. DIKU, University of
@@ -605,6 +621,5 @@ for scaling points-to analysis. In Proceedings of the ACM SIGPLAN 2000
conference on Programming language design and implementation (PLDI '00).
ACM, New York, NY, USA, 47-56. DOI=10.1145/349299.349310
http://doi.acm.org/10.1145/349299.349310
-
*/
package pointer // import "golang.org/x/tools/go/pointer"
diff --git a/go/pointer/example_test.go b/go/pointer/example_test.go
index 673de7a49..00017df6e 100644
--- a/go/pointer/example_test.go
+++ b/go/pointer/example_test.go
@@ -19,7 +19,6 @@ import (
// obtain a conservative call-graph of a Go program.
// It also shows how to compute the points-to set of a variable,
// in this case, (C).f's ch parameter.
-//
func Example() {
const myprog = `
package main
@@ -62,7 +61,7 @@ func main() {
}
// Create SSA-form program representation.
- prog := ssautil.CreateProgram(iprog, 0)
+ prog := ssautil.CreateProgram(iprog, ssa.InstantiateGenerics)
mainPkg := prog.Package(iprog.Created[0].Pkg)
// Build SSA code for bodies of all functions in the whole program.
diff --git a/go/pointer/gen.go b/go/pointer/gen.go
index ef5108a5b..5e527f21a 100644
--- a/go/pointer/gen.go
+++ b/go/pointer/gen.go
@@ -14,9 +14,11 @@ import (
"fmt"
"go/token"
"go/types"
+ "strings"
"golang.org/x/tools/go/callgraph"
"golang.org/x/tools/go/ssa"
+ "golang.org/x/tools/internal/typeparams"
)
var (
@@ -37,7 +39,6 @@ func (a *analysis) nextNode() nodeid {
// analytically uninteresting.
//
// comment explains the origin of the nodes, as a debugging aid.
-//
func (a *analysis) addNodes(typ types.Type, comment string) nodeid {
id := a.nextNode()
for _, fi := range a.flatten(typ) {
@@ -56,7 +57,6 @@ func (a *analysis) addNodes(typ types.Type, comment string) nodeid {
//
// comment explains the origin of the nodes, as a debugging aid.
// subelement indicates the subelement, e.g. ".a.b[*].c".
-//
func (a *analysis) addOneNode(typ types.Type, comment string, subelement *fieldInfo) nodeid {
id := a.nextNode()
a.nodes = append(a.nodes, &node{typ: typ, subelement: subelement, solve: new(solverState)})
@@ -69,7 +69,6 @@ func (a *analysis) addOneNode(typ types.Type, comment string, subelement *fieldI
// setValueNode associates node id with the value v.
// cgn identifies the context iff v is a local variable.
-//
func (a *analysis) setValueNode(v ssa.Value, id nodeid, cgn *cgnode) {
if cgn != nil {
a.localval[v] = id
@@ -125,7 +124,6 @@ func (a *analysis) setValueNode(v ssa.Value, id nodeid, cgn *cgnode) {
//
// obj is the start node of the object, from a prior call to nextNode.
// Its size, flags and optional data will be updated.
-//
func (a *analysis) endObject(obj nodeid, cgn *cgnode, data interface{}) *object {
// Ensure object is non-empty by padding;
// the pad will be the object node.
@@ -150,7 +148,6 @@ func (a *analysis) endObject(obj nodeid, cgn *cgnode, data interface{}) *object
//
// For a context-sensitive contour, callersite identifies the sole
// callsite; for shared contours, caller is nil.
-//
func (a *analysis) makeFunctionObject(fn *ssa.Function, callersite *callsite) nodeid {
if a.log != nil {
fmt.Fprintf(a.log, "\t---- makeFunctionObject %s\n", fn)
@@ -190,7 +187,6 @@ func (a *analysis) makeTagged(typ types.Type, cgn *cgnode, data interface{}) nod
// payload points to the sole rtype object for T.
//
// TODO(adonovan): move to reflect.go; it's part of the solver really.
-//
func (a *analysis) makeRtype(T types.Type) nodeid {
if v := a.rtypes.At(T); v != nil {
return v.(nodeid)
@@ -210,7 +206,7 @@ func (a *analysis) makeRtype(T types.Type) nodeid {
return id
}
-// rtypeValue returns the type of the *reflect.rtype-tagged object obj.
+// rtypeTaggedValue returns the type of the *reflect.rtype-tagged object obj.
func (a *analysis) rtypeTaggedValue(obj nodeid) types.Type {
tDyn, t, _ := a.taggedValue(obj)
if tDyn != a.reflectRtypePtr {
@@ -222,7 +218,6 @@ func (a *analysis) rtypeTaggedValue(obj nodeid) types.Type {
// valueNode returns the id of the value node for v, creating it (and
// the association) as needed. It may return zero for uninteresting
// values containing no pointers.
-//
func (a *analysis) valueNode(v ssa.Value) nodeid {
// Value nodes for locals are created en masse by genFunc.
if id, ok := a.localval[v]; ok {
@@ -247,7 +242,6 @@ func (a *analysis) valueNode(v ssa.Value) nodeid {
// valueOffsetNode ascertains the node for tuple/struct value v,
// then returns the node for its subfield #index.
-//
func (a *analysis) valueOffsetNode(v ssa.Value, index int) nodeid {
id := a.valueNode(v)
if id == 0 {
@@ -264,7 +258,6 @@ func (a *analysis) isTaggedObject(obj nodeid) bool {
// taggedValue returns the dynamic type tag, the (first node of the)
// payload, and the indirect flag of the tagged object starting at id.
// Panic ensues if !isTaggedObject(id).
-//
func (a *analysis) taggedValue(obj nodeid) (tDyn types.Type, v nodeid, indirect bool) {
n := a.nodes[obj]
flags := n.obj.flags
@@ -276,7 +269,6 @@ func (a *analysis) taggedValue(obj nodeid) (tDyn types.Type, v nodeid, indirect
// funcParams returns the first node of the params (P) block of the
// function whose object node (obj.flags&otFunction) is id.
-//
func (a *analysis) funcParams(id nodeid) nodeid {
n := a.nodes[id]
if n.obj == nil || n.obj.flags&otFunction == 0 {
@@ -287,7 +279,6 @@ func (a *analysis) funcParams(id nodeid) nodeid {
// funcResults returns the first node of the results (R) block of the
// function whose object node (obj.flags&otFunction) is id.
-//
func (a *analysis) funcResults(id nodeid) nodeid {
n := a.nodes[id]
if n.obj == nil || n.obj.flags&otFunction == 0 {
@@ -305,7 +296,6 @@ func (a *analysis) funcResults(id nodeid) nodeid {
// copy creates a constraint of the form dst = src.
// sizeof is the width (in logical fields) of the copied type.
-//
func (a *analysis) copy(dst, src nodeid, sizeof uint32) {
if src == dst || sizeof == 0 {
return // trivial
@@ -337,7 +327,6 @@ func (a *analysis) addressOf(T types.Type, id, obj nodeid) {
// load creates a load constraint of the form dst = src[offset].
// offset is the pointer offset in logical fields.
// sizeof is the width (in logical fields) of the loaded type.
-//
func (a *analysis) load(dst, src nodeid, offset, sizeof uint32) {
if dst == 0 {
return // load of non-pointerlike value
@@ -358,7 +347,6 @@ func (a *analysis) load(dst, src nodeid, offset, sizeof uint32) {
// store creates a store constraint of the form dst[offset] = src.
// offset is the pointer offset in logical fields.
// sizeof is the width (in logical fields) of the stored type.
-//
func (a *analysis) store(dst, src nodeid, offset uint32, sizeof uint32) {
if src == 0 {
return // store of non-pointerlike value
@@ -379,7 +367,6 @@ func (a *analysis) store(dst, src nodeid, offset uint32, sizeof uint32) {
// offsetAddr creates an offsetAddr constraint of the form dst = &src.#offset.
// offset is the field offset in logical fields.
// T is the type of the address.
-//
func (a *analysis) offsetAddr(T types.Type, dst, src nodeid, offset uint32) {
if !a.shouldTrack(T) {
return
@@ -398,7 +385,6 @@ func (a *analysis) offsetAddr(T types.Type, dst, src nodeid, offset uint32) {
// typeAssert creates a typeFilter or untag constraint of the form dst = src.(T):
// typeFilter for an interface, untag for a concrete type.
// The exact flag is specified as for untagConstraint.
-//
func (a *analysis) typeAssert(T types.Type, dst, src nodeid, exact bool) {
if isInterface(T) {
a.addConstraint(&typeFilterConstraint{T, dst, src})
@@ -417,7 +403,6 @@ func (a *analysis) addConstraint(c constraint) {
// copyElems generates load/store constraints for *dst = *src,
// where src and dst are slices or *arrays.
-//
func (a *analysis) copyElems(cgn *cgnode, typ types.Type, dst, src ssa.Value) {
tmp := a.addNodes(typ, "copy")
sz := a.sizeof(typ)
@@ -553,7 +538,6 @@ func (a *analysis) genBuiltinCall(instr ssa.CallInstruction, cgn *cgnode) {
// choose a policy. The current policy, rather arbitrarily, is true
// for intrinsics and accessor methods (actually: short, single-block,
// call-free functions). This is just a starting point.
-//
func (a *analysis) shouldUseContext(fn *ssa.Function) bool {
if a.findIntrinsic(fn) != nil {
return true // treat intrinsics context-sensitively
@@ -705,11 +689,13 @@ func (a *analysis) genInvoke(caller *cgnode, site *callsite, call *ssa.CallCommo
// practice it occurs rarely, so we special case for reflect.Type.)
//
// In effect we treat this:
-// var rt reflect.Type = ...
-// rt.F()
+//
+// var rt reflect.Type = ...
+// rt.F()
+//
// as this:
-// rt.(*reflect.rtype).F()
//
+// rt.(*reflect.rtype).F()
func (a *analysis) genInvokeReflectType(caller *cgnode, site *callsite, call *ssa.CallCommon, result nodeid) {
// Unpack receiver into rtype
rtype := a.addOneNode(a.reflectRtypePtr, "rtype.recv", nil)
@@ -789,13 +775,15 @@ func (a *analysis) genCall(caller *cgnode, instr ssa.CallInstruction) {
// a simple copy constraint when the sole destination is known a priori.
//
// Some SSA instructions always have singletons points-to sets:
-// Alloc, Function, Global, MakeChan, MakeClosure, MakeInterface, MakeMap, MakeSlice.
+//
+// Alloc, Function, Global, MakeChan, MakeClosure, MakeInterface, MakeMap, MakeSlice.
+//
// Others may be singletons depending on their operands:
-// FreeVar, Const, Convert, FieldAddr, IndexAddr, Slice, SliceToArrayPointer.
+//
+// FreeVar, Const, Convert, FieldAddr, IndexAddr, Slice, SliceToArrayPointer.
//
// Idempotent. Objects are created as needed, possibly via recursion
// down the SSA value graph, e.g IndexAddr(FieldAddr(Alloc))).
-//
func (a *analysis) objectNode(cgn *cgnode, v ssa.Value) nodeid {
switch v.(type) {
case *ssa.Global, *ssa.Function, *ssa.Const, *ssa.FreeVar:
@@ -992,7 +980,10 @@ func (a *analysis) genInstr(cgn *cgnode, instr ssa.Instruction) {
a.sizeof(instr.Type()))
case *ssa.Index:
- a.copy(a.valueNode(instr), 1+a.valueNode(instr.X), a.sizeof(instr.Type()))
+ _, isstring := typeparams.CoreType(instr.X.Type()).(*types.Basic)
+ if !isstring {
+ a.copy(a.valueNode(instr), 1+a.valueNode(instr.X), a.sizeof(instr.Type()))
+ }
case *ssa.Select:
recv := a.valueOffsetNode(instr, 2) // instr : (index, recvOk, recv0, ... recv_n-1)
@@ -1156,7 +1147,6 @@ func (a *analysis) makeCGNode(fn *ssa.Function, obj nodeid, callersite *callsite
// genRootCalls generates the synthetic root of the callgraph and the
// initial calls from it to the analysis scope, such as main, a test
// or a library.
-//
func (a *analysis) genRootCalls() *cgnode {
r := a.prog.NewFunction("<root>", new(types.Signature), "root of callgraph")
root := a.makeCGNode(r, 0, nil)
@@ -1217,6 +1207,19 @@ func (a *analysis) genFunc(cgn *cgnode) {
return
}
+ if fn.TypeParams().Len() > 0 && len(fn.TypeArgs()) == 0 {
+ // Body of generic function.
+ // We'll warn about calls to such functions at the end.
+ return
+ }
+
+ if strings.HasPrefix(fn.Synthetic, "instantiation wrapper ") {
+ // instantiation wrapper of a generic function.
+ // These may contain type coercions which are not currently supported.
+ // We'll warn about calls to such functions at the end.
+ return
+ }
+
if a.log != nil {
fmt.Fprintln(a.log, "; Creating nodes for local values")
}
diff --git a/go/pointer/hvn.go b/go/pointer/hvn.go
index 52fd479fa..ad25cdfa4 100644
--- a/go/pointer/hvn.go
+++ b/go/pointer/hvn.go
@@ -174,14 +174,14 @@ import (
// peLabel have identical points-to solutions.
//
// The numbers are allocated consecutively like so:
-// 0 not a pointer
+//
+// 0 not a pointer
// 1..N-1 addrConstraints (equals the constraint's .src field, hence sparse)
// ... offsetAddr constraints
// ... SCCs (with indirect nodes or multiple inputs)
//
// Each PE label denotes a set of pointers containing a single addr, a
// single offsetAddr, or some set of other PE labels.
-//
type peLabel int
type hvn struct {
@@ -212,7 +212,6 @@ type onodeid uint32
// the source, i.e. against the flow of values: they are dependencies.
// Implicit edges are used for SCC computation, but not for gathering
// incoming labels.
-//
type onode struct {
rep onodeid // index of representative of SCC in offline constraint graph
@@ -244,7 +243,6 @@ func (h *hvn) ref(id onodeid) onodeid {
// hvn computes pointer-equivalence labels (peLabels) using the Hash-based
// Value Numbering (HVN) algorithm described in Hardekopf & Lin, SAS'07.
-//
func (a *analysis) hvn() {
start("HVN")
@@ -455,28 +453,27 @@ func (c *invokeConstraint) presolve(h *hvn) {
// markIndirectNodes marks as indirect nodes whose points-to relations
// are not entirely captured by the offline graph, including:
//
-// (a) All address-taken nodes (including the following nodes within
-// the same object). This is described in the paper.
+// (a) All address-taken nodes (including the following nodes within
+// the same object). This is described in the paper.
//
// The most subtle cause of indirect nodes is the generation of
// store-with-offset constraints since the offline graph doesn't
// represent them. A global audit of constraint generation reveals the
// following uses of store-with-offset:
//
-// (b) genDynamicCall, for P-blocks of dynamically called functions,
-// to which dynamic copy edges will be added to them during
-// solving: from storeConstraint for standalone functions,
-// and from invokeConstraint for methods.
-// All such P-blocks must be marked indirect.
-// (c) MakeUpdate, to update the value part of a map object.
-// All MakeMap objects's value parts must be marked indirect.
-// (d) copyElems, to update the destination array.
-// All array elements must be marked indirect.
+// (b) genDynamicCall, for P-blocks of dynamically called functions,
+// to which dynamic copy edges will be added to them during
+// solving: from storeConstraint for standalone functions,
+// and from invokeConstraint for methods.
+// All such P-blocks must be marked indirect.
+// (c) MakeUpdate, to update the value part of a map object.
+// All MakeMap objects's value parts must be marked indirect.
+// (d) copyElems, to update the destination array.
+// All array elements must be marked indirect.
//
// Not all indirect marking happens here. ref() nodes are marked
// indirect at construction, and each constraint's presolve() method may
// mark additional nodes.
-//
func (h *hvn) markIndirectNodes() {
// (a) all address-taken nodes, plus all nodes following them
// within the same object, since these may be indirectly
@@ -761,7 +758,6 @@ func (h *hvn) coalesce(x, y onodeid) {
// labels assigned by the hvn, and uses it to simplify the main
// constraint graph, eliminating non-pointer nodes and duplicate
// constraints.
-//
func (h *hvn) simplify() {
// canon maps each peLabel to its canonical main node.
canon := make([]nodeid, h.label)
diff --git a/go/pointer/intrinsics.go b/go/pointer/intrinsics.go
index b7e2b1403..43bb8e8fc 100644
--- a/go/pointer/intrinsics.go
+++ b/go/pointer/intrinsics.go
@@ -159,7 +159,6 @@ func init() {
// findIntrinsic returns the constraint generation function for an
// intrinsic function fn, or nil if the function should be handled normally.
-//
func (a *analysis) findIntrinsic(fn *ssa.Function) intrinsic {
// Consult the *Function-keyed cache.
// A cached nil indicates a normal non-intrinsic function.
@@ -220,7 +219,6 @@ func (a *analysis) isReflect(fn *ssa.Function) bool {
//
// We sometimes violate condition #3 if the function creates only
// non-function labels, as the control-flow graph is still sound.
-//
func ext۰NoEffect(a *analysis, cgn *cgnode) {}
func ext۰NotYetImplemented(a *analysis, cgn *cgnode) {
diff --git a/go/pointer/labels.go b/go/pointer/labels.go
index 7d64ef6a4..5a1e1999c 100644
--- a/go/pointer/labels.go
+++ b/go/pointer/labels.go
@@ -17,15 +17,15 @@ import (
// channel, 'func', slice or interface.
//
// Labels include:
-// - functions
-// - globals
-// - tagged objects, representing interfaces and reflect.Values
-// - arrays created by conversions (e.g. []byte("foo"), []byte(s))
-// - stack- and heap-allocated variables (including composite literals)
-// - channels, maps and arrays created by make()
-// - intrinsic or reflective operations that allocate (e.g. append, reflect.New)
-// - intrinsic objects, e.g. the initial array behind os.Args.
-// - and their subelements, e.g. "alloc.y[*].z"
+// - functions
+// - globals
+// - tagged objects, representing interfaces and reflect.Values
+// - arrays created by conversions (e.g. []byte("foo"), []byte(s))
+// - stack- and heap-allocated variables (including composite literals)
+// - channels, maps and arrays created by make()
+// - intrinsic or reflective operations that allocate (e.g. append, reflect.New)
+// - intrinsic objects, e.g. the initial array behind os.Args.
+// - and their subelements, e.g. "alloc.y[*].z"
//
// Labels are so varied that they defy good generalizations;
// some have no value, no callgraph node, or no position.
@@ -33,7 +33,6 @@ import (
// maps, channels, functions, tagged objects.
//
// At most one of Value() or ReflectType() may return non-nil.
-//
type Label struct {
obj *object // the addressable memory location containing this label
subelement *fieldInfo // subelement path within obj, e.g. ".a.b[*].c"
@@ -47,7 +46,6 @@ func (l Label) Value() ssa.Value {
// ReflectType returns the type represented by this label if it is an
// reflect.rtype instance object or *reflect.rtype-tagged object.
-//
func (l Label) ReflectType() types.Type {
rtype, _ := l.obj.data.(types.Type)
return rtype
@@ -55,7 +53,6 @@ func (l Label) ReflectType() types.Type {
// Path returns the path to the subelement of the object containing
// this label. For example, ".x[*].y".
-//
func (l Label) Path() string {
return l.subelement.path()
}
@@ -79,23 +76,24 @@ func (l Label) Pos() token.Pos {
// String returns the printed form of this label.
//
// Examples: Object type:
-// x (a variable)
-// (sync.Mutex).Lock (a function)
-// convert (array created by conversion)
-// makemap (map allocated via make)
-// makechan (channel allocated via make)
-// makeinterface (tagged object allocated by makeinterface)
-// <alloc in reflect.Zero> (allocation in instrinsic)
-// sync.Mutex (a reflect.rtype instance)
-// <command-line arguments> (an intrinsic object)
+//
+// x (a variable)
+// (sync.Mutex).Lock (a function)
+// convert (array created by conversion)
+// makemap (map allocated via make)
+// makechan (channel allocated via make)
+// makeinterface (tagged object allocated by makeinterface)
+// <alloc in reflect.Zero> (allocation in instrinsic)
+// sync.Mutex (a reflect.rtype instance)
+// <command-line arguments> (an intrinsic object)
//
// Labels within compound objects have subelement paths:
-// x.y[*].z (a struct variable, x)
-// append.y[*].z (array allocated by append)
-// makeslice.y[*].z (array allocated via make)
//
-// TODO(adonovan): expose func LabelString(*types.Package, Label).
+// x.y[*].z (a struct variable, x)
+// append.y[*].z (array allocated by append)
+// makeslice.y[*].z (array allocated via make)
//
+// TODO(adonovan): expose func LabelString(*types.Package, Label).
func (l Label) String() string {
var s string
switch v := l.obj.data.(type) {
diff --git a/go/pointer/opt.go b/go/pointer/opt.go
index 6defea11f..bbd411c2e 100644
--- a/go/pointer/opt.go
+++ b/go/pointer/opt.go
@@ -27,7 +27,6 @@ import "fmt"
//
// Renumbering makes the PTA log inscrutable. To aid debugging, later
// phases (e.g. HVN) must not rely on it having occurred.
-//
func (a *analysis) renumber() {
if a.log != nil {
fmt.Fprintf(a.log, "\n\n==== Renumbering\n\n")
diff --git a/go/internal/gcimporter/israce_test.go b/go/pointer/pointer_race_test.go
index 885ba1c01..d3c9b475e 100644
--- a/go/internal/gcimporter/israce_test.go
+++ b/go/pointer/pointer_race_test.go
@@ -1,12 +1,12 @@
-// Copyright 2019 The Go Authors. All rights reserved.
+// Copyright 2022 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build race
// +build race
-package gcimporter_test
+package pointer_test
func init() {
- isRace = true
+ raceEnabled = true
}
diff --git a/go/pointer/pointer_test.go b/go/pointer/pointer_test.go
index 1ac5b6c9f..1fa54f6e8 100644
--- a/go/pointer/pointer_test.go
+++ b/go/pointer/pointer_test.go
@@ -34,6 +34,7 @@ import (
"golang.org/x/tools/go/ssa"
"golang.org/x/tools/go/ssa/ssautil"
"golang.org/x/tools/go/types/typeutil"
+ "golang.org/x/tools/internal/typeparams"
)
var inputs = []string{
@@ -65,72 +66,79 @@ var inputs = []string{
// "testdata/timer.go", // TODO(adonovan): fix broken assumptions about runtime timers
}
+var raceEnabled = false
+
// Expectation grammar:
//
// @calls f -> g
//
-// A 'calls' expectation asserts that edge (f, g) appears in the
-// callgraph. f and g are notated as per Function.String(), which
-// may contain spaces (e.g. promoted method in anon struct).
+// A 'calls' expectation asserts that edge (f, g) appears in the
+// callgraph. f and g are notated as per Function.String(), which
+// may contain spaces (e.g. promoted method in anon struct).
//
// @pointsto a | b | c
//
-// A 'pointsto' expectation asserts that the points-to set of its
-// operand contains exactly the set of labels {a,b,c} notated as per
-// labelString.
+// A 'pointsto' expectation asserts that the points-to set of its
+// operand contains exactly the set of labels {a,b,c} notated as per
+// labelString.
+//
+// A 'pointsto' expectation must appear on the same line as a
+// print(x) statement; the expectation's operand is x.
//
-// A 'pointsto' expectation must appear on the same line as a
-// print(x) statement; the expectation's operand is x.
+// If one of the strings is "...", the expectation asserts that the
+// points-to set at least the other labels.
//
-// If one of the strings is "...", the expectation asserts that the
-// points-to set at least the other labels.
+// We use '|' because label names may contain spaces, e.g. methods
+// of anonymous structs.
//
-// We use '|' because label names may contain spaces, e.g. methods
-// of anonymous structs.
+// Assertions within generic functions are treated as a union of all
+// of the instantiations.
//
-// From a theoretical perspective, concrete types in interfaces are
-// labels too, but they are represented differently and so have a
-// different expectation, @types, below.
+// From a theoretical perspective, concrete types in interfaces are
+// labels too, but they are represented differently and so have a
+// different expectation, @types, below.
//
// @types t | u | v
//
-// A 'types' expectation asserts that the set of possible dynamic
-// types of its interface operand is exactly {t,u,v}, notated per
-// go/types.Type.String(). In other words, it asserts that the type
-// component of the interface may point to that set of concrete type
-// literals. It also works for reflect.Value, though the types
-// needn't be concrete in that case.
+// A 'types' expectation asserts that the set of possible dynamic
+// types of its interface operand is exactly {t,u,v}, notated per
+// go/types.Type.String(). In other words, it asserts that the type
+// component of the interface may point to that set of concrete type
+// literals. It also works for reflect.Value, though the types
+// needn't be concrete in that case.
//
-// A 'types' expectation must appear on the same line as a
-// print(x) statement; the expectation's operand is x.
+// A 'types' expectation must appear on the same line as a
+// print(x) statement; the expectation's operand is x.
//
-// If one of the strings is "...", the expectation asserts that the
-// interface's type may point to at least the other types.
+// If one of the strings is "...", the expectation asserts that the
+// interface's type may point to at least the other types.
//
-// We use '|' because type names may contain spaces.
+// We use '|' because type names may contain spaces.
+//
+// Assertions within generic functions are treated as a union of all
+// of the instantiations.
//
// @warning "regexp"
//
-// A 'warning' expectation asserts that the analysis issues a
-// warning that matches the regular expression within the string
-// literal.
+// A 'warning' expectation asserts that the analysis issues a
+// warning that matches the regular expression within the string
+// literal.
//
// @line id
//
-// A line directive associates the name "id" with the current
-// file:line. The string form of labels will use this id instead of
-// a file:line, making @pointsto expectations more robust against
-// perturbations in the source file.
-// (NB, anon functions still include line numbers.)
-//
+// A line directive associates the name "id" with the current
+// file:line. The string form of labels will use this id instead of
+// a file:line, making @pointsto expectations more robust against
+// perturbations in the source file.
+// (NB, anon functions still include line numbers.)
type expectation struct {
kind string // "pointsto" | "pointstoquery" | "types" | "calls" | "warning"
filepath string
linenum int // source line number, 1-based
args []string
- query string // extended query
- extended *pointer.Pointer // extended query pointer
- types []types.Type // for types
+ query string // extended query
+ extended []*pointer.Pointer // extended query pointer [per instantiation]
+ types []types.Type // for types
}
func (e *expectation) String() string {
@@ -147,18 +155,43 @@ func (e *expectation) needsProbe() bool {
return e.kind == "pointsto" || e.kind == "pointstoquery" || e.kind == "types"
}
-// Find probe (call to print(x)) of same source file/line as expectation.
-func findProbe(prog *ssa.Program, probes map[*ssa.CallCommon]bool, queries map[ssa.Value]pointer.Pointer, e *expectation) (site *ssa.CallCommon, pts pointer.PointsToSet) {
+// Find probes (call to print(x)) of same source file/line as expectation.
+//
+// May match multiple calls for different instantiations.
+func findProbes(prog *ssa.Program, probes map[*ssa.CallCommon]bool, e *expectation) []*ssa.CallCommon {
+ var calls []*ssa.CallCommon
for call := range probes {
pos := prog.Fset.Position(call.Pos())
if pos.Line == e.linenum && pos.Filename == e.filepath {
// TODO(adonovan): send this to test log (display only on failure).
// fmt.Printf("%s:%d: info: found probe for %s: %s\n",
// e.filepath, e.linenum, e, p.arg0) // debugging
- return call, queries[call.Args[0]].PointsTo()
+ calls = append(calls, call)
}
}
- return // e.g. analysis didn't reach this call
+ return calls
+}
+
+// Find points to sets of probes (call to print(x)).
+func probesPointTo(calls []*ssa.CallCommon, queries map[ssa.Value]pointer.Pointer) []pointer.PointsToSet {
+ ptss := make([]pointer.PointsToSet, len(calls))
+ for i, call := range calls {
+ ptss[i] = queries[call.Args[0]].PointsTo()
+ }
+ return ptss
+}
+
+// Find the types of the probes (call to print(x)).
+// Returns an error if type of the probe cannot point.
+func probesPointToTypes(calls []*ssa.CallCommon) ([]types.Type, error) {
+ tProbes := make([]types.Type, len(calls))
+ for i, call := range calls {
+ tProbes[i] = call.Args[0].Type()
+ if !pointer.CanPoint(tProbes[i]) {
+ return nil, fmt.Errorf("expectation on non-pointerlike operand: %s", tProbes[i])
+ }
+ }
+ return tProbes, nil
}
func doOneInput(t *testing.T, input, fpath string) bool {
@@ -177,7 +210,8 @@ func doOneInput(t *testing.T, input, fpath string) bool {
}
// SSA creation + building.
- prog, ssaPkgs := ssautil.AllPackages(pkgs, ssa.SanityCheckFunctions)
+ mode := ssa.SanityCheckFunctions | ssa.InstantiateGenerics
+ prog, ssaPkgs := ssautil.AllPackages(pkgs, mode)
prog.Build()
// main underlying packages.Package.
@@ -197,12 +231,24 @@ func doOneInput(t *testing.T, input, fpath string) bool {
}
}
+ // files in mainPpkg.
+ mainFiles := make(map[*token.File]bool)
+ for _, syn := range mainPpkg.Syntax {
+ mainFiles[prog.Fset.File(syn.Pos())] = true
+ }
+
// Find all calls to the built-in print(x). Analytically,
// print is a no-op, but it's a convenient hook for testing
// the PTS of an expression, so our tests use it.
+ // Exclude generic bodies as these should be dead code for pointer.
+ // Instance of generics are included.
probes := make(map[*ssa.CallCommon]bool)
for fn := range ssautil.AllFunctions(prog) {
- if fn.Pkg == mainpkg {
+ if isGenericBody(fn) {
+ continue // skip generic bodies
+ }
+ // TODO(taking): Switch to a more principled check like fn.declaredPackage() == mainPkg if Origin is exported.
+ if fn.Pkg == mainpkg || (fn.Pkg == nil && mainFiles[prog.Fset.File(fn.Pos())]) {
for _, b := range fn.Blocks {
for _, instr := range b.Instrs {
if instr, ok := instr.(ssa.CallInstruction); ok {
@@ -311,18 +357,16 @@ func doOneInput(t *testing.T, input, fpath string) bool {
Mains: []*ssa.Package{ptrmain},
Log: &log,
}
-probeLoop:
for probe := range probes {
v := probe.Args[0]
pos := prog.Fset.Position(probe.Pos())
for _, e := range exps {
if e.linenum == pos.Line && e.filepath == pos.Filename && e.kind == "pointstoquery" {
- var err error
- e.extended, err = config.AddExtendedQuery(v, e.query)
+ extended, err := config.AddExtendedQuery(v, e.query)
if err != nil {
panic(err)
}
- continue probeLoop
+ e.extended = append(e.extended, extended)
}
}
if pointer.CanPoint(v.Type()) {
@@ -345,34 +389,42 @@ probeLoop:
// Check the expectations.
for _, e := range exps {
- var call *ssa.CallCommon
- var pts pointer.PointsToSet
- var tProbe types.Type
+ var tProbes []types.Type
+ var calls []*ssa.CallCommon
+ var ptss []pointer.PointsToSet
if e.needsProbe() {
- if call, pts = findProbe(prog, probes, result.Queries, e); call == nil {
+ calls = findProbes(prog, probes, e)
+ if len(calls) == 0 {
ok = false
e.errorf("unreachable print() statement has expectation %s", e)
continue
}
- if e.extended != nil {
- pts = e.extended.PointsTo()
+ if e.extended == nil {
+ ptss = probesPointTo(calls, result.Queries)
+ } else {
+ ptss = make([]pointer.PointsToSet, len(e.extended))
+ for i, p := range e.extended {
+ ptss[i] = p.PointsTo()
+ }
}
- tProbe = call.Args[0].Type()
- if !pointer.CanPoint(tProbe) {
+
+ var err error
+ tProbes, err = probesPointToTypes(calls)
+ if err != nil {
ok = false
- e.errorf("expectation on non-pointerlike operand: %s", tProbe)
+ e.errorf(err.Error())
continue
}
}
switch e.kind {
case "pointsto", "pointstoquery":
- if !checkPointsToExpectation(e, pts, lineMapping, prog) {
+ if !checkPointsToExpectation(e, ptss, lineMapping, prog) {
ok = false
}
case "types":
- if !checkTypesExpectation(e, pts, tProbe) {
+ if !checkTypesExpectation(e, ptss, tProbes) {
ok = false
}
@@ -417,7 +469,7 @@ func labelString(l *pointer.Label, lineMapping map[string]string, prog *ssa.Prog
return str
}
-func checkPointsToExpectation(e *expectation, pts pointer.PointsToSet, lineMapping map[string]string, prog *ssa.Program) bool {
+func checkPointsToExpectation(e *expectation, ptss []pointer.PointsToSet, lineMapping map[string]string, prog *ssa.Program) bool {
expected := make(map[string]int)
surplus := make(map[string]int)
exact := true
@@ -430,12 +482,14 @@ func checkPointsToExpectation(e *expectation, pts pointer.PointsToSet, lineMappi
}
// Find the set of labels that the probe's
// argument (x in print(x)) may point to.
- for _, label := range pts.Labels() {
- name := labelString(label, lineMapping, prog)
- if expected[name] > 0 {
- expected[name]--
- } else if exact {
- surplus[name]++
+ for _, pts := range ptss { // treat ptss as union of points-to sets.
+ for _, label := range pts.Labels() {
+ name := labelString(label, lineMapping, prog)
+ if expected[name] > 0 {
+ expected[name]--
+ } else if exact {
+ surplus[name]++
+ }
}
}
// Report multiset difference:
@@ -457,7 +511,7 @@ func checkPointsToExpectation(e *expectation, pts pointer.PointsToSet, lineMappi
return ok
}
-func checkTypesExpectation(e *expectation, pts pointer.PointsToSet, typ types.Type) bool {
+func checkTypesExpectation(e *expectation, ptss []pointer.PointsToSet, typs []types.Type) bool {
var expected typeutil.Map
var surplus typeutil.Map
exact := true
@@ -469,18 +523,26 @@ func checkTypesExpectation(e *expectation, pts pointer.PointsToSet, typ types.Ty
expected.Set(g, struct{}{})
}
- if !pointer.CanHaveDynamicTypes(typ) {
- e.errorf("@types expectation requires an interface- or reflect.Value-typed operand, got %s", typ)
+ if len(typs) != len(ptss) {
+ e.errorf("@types expectation internal error differing number of types(%d) and points to sets (%d)", len(typs), len(ptss))
return false
}
// Find the set of types that the probe's
// argument (x in print(x)) may contain.
- for _, T := range pts.DynamicTypes().Keys() {
- if expected.At(T) != nil {
- expected.Delete(T)
- } else if exact {
- surplus.Set(T, struct{}{})
+ for i := range ptss {
+ var Ts []types.Type
+ if pointer.CanHaveDynamicTypes(typs[i]) {
+ Ts = ptss[i].DynamicTypes().Keys()
+ } else {
+ Ts = append(Ts, typs[i]) // static type
+ }
+ for _, T := range Ts {
+ if expected.At(T) != nil {
+ expected.Delete(T)
+ } else if exact {
+ surplus.Set(T, struct{}{})
+ }
}
}
// Report set difference:
@@ -554,10 +616,6 @@ func TestInput(t *testing.T) {
if testing.Short() {
t.Skip("skipping in short mode; this test requires tons of memory; https://golang.org/issue/14113")
}
- if unsafe.Sizeof(unsafe.Pointer(nil)) <= 4 {
- t.Skip("skipping memory-intensive test on platform with small address space; https://golang.org/issue/14113")
- }
- ok := true
wd, err := os.Getwd()
if err != nil {
@@ -572,24 +630,44 @@ func TestInput(t *testing.T) {
fmt.Fprintf(os.Stderr, "Entering directory `%s'\n", wd)
for _, filename := range inputs {
- content, err := ioutil.ReadFile(filename)
- if err != nil {
- t.Errorf("couldn't read file '%s': %s", filename, err)
- continue
- }
+ filename := filename
+ t.Run(filename, func(t *testing.T) {
+ if filename == "testdata/a_test.go" {
+ // For some reason this particular file is way more expensive than the others.
+ if unsafe.Sizeof(unsafe.Pointer(nil)) <= 4 {
+ t.Skip("skipping memory-intensive test on platform with small address space; https://golang.org/issue/14113")
+ }
+ if raceEnabled {
+ t.Skip("skipping memory-intensive test under race detector; https://golang.org/issue/14113")
+ }
+ } else {
+ t.Parallel()
+ }
- fpath, err := filepath.Abs(filename)
- if err != nil {
- t.Errorf("couldn't get absolute path for '%s': %s", filename, err)
- }
+ content, err := ioutil.ReadFile(filename)
+ if err != nil {
+ t.Fatalf("couldn't read file '%s': %s", filename, err)
+ }
- if !doOneInput(t, string(content), fpath) {
- ok = false
- }
+ fpath, err := filepath.Abs(filename)
+ if err != nil {
+ t.Fatalf("couldn't get absolute path for '%s': %s", filename, err)
+ }
+
+ if !doOneInput(t, string(content), fpath) {
+ t.Fail()
+ }
+ })
}
- if !ok {
- t.Fail()
+}
+
+// isGenericBody returns true if fn is the body of a generic function.
+func isGenericBody(fn *ssa.Function) bool {
+ sig := fn.Signature
+ if typeparams.ForSignature(sig).Len() > 0 || typeparams.RecvTypeParams(sig).Len() > 0 {
+ return fn.Synthetic == ""
}
+ return false
}
// join joins the elements of multiset with " | "s.
@@ -616,3 +694,34 @@ func split(s, sep string) (r []string) {
}
return
}
+
+func TestTypeParam(t *testing.T) {
+ if !typeparams.Enabled {
+ t.Skip("TestTypeParamInput requires type parameters")
+ }
+ // Based on TestInput. Keep this up to date with that.
+ filename := "testdata/typeparams.go"
+
+ if testing.Short() {
+ t.Skip("skipping in short mode; this test requires tons of memory; https://golang.org/issue/14113")
+ }
+
+ wd, err := os.Getwd()
+ if err != nil {
+ t.Fatalf("os.Getwd: %s", err)
+ }
+ fmt.Fprintf(os.Stderr, "Entering directory `%s'\n", wd)
+
+ content, err := ioutil.ReadFile(filename)
+ if err != nil {
+ t.Fatalf("couldn't read file '%s': %s", filename, err)
+ }
+ fpath, err := filepath.Abs(filename)
+ if err != nil {
+ t.Errorf("couldn't get absolute path for '%s': %s", filename, err)
+ }
+
+ if !doOneInput(t, string(content), fpath) {
+ t.Fail()
+ }
+}
diff --git a/go/pointer/reflect.go b/go/pointer/reflect.go
index 7aa1a9cb8..3762dd8d4 100644
--- a/go/pointer/reflect.go
+++ b/go/pointer/reflect.go
@@ -1024,7 +1024,7 @@ func ext۰reflect۰ChanOf(a *analysis, cgn *cgnode) {
var dir reflect.ChanDir // unknown
if site := cgn.callersite; site != nil {
if c, ok := site.instr.Common().Args[0].(*ssa.Const); ok {
- v, _ := constant.Int64Val(c.Value)
+ v := c.Int64()
if 0 <= v && v <= int64(reflect.BothDir) {
dir = reflect.ChanDir(v)
}
@@ -1751,8 +1751,7 @@ func ext۰reflect۰rtype۰InOut(a *analysis, cgn *cgnode, out bool) {
index := -1
if site := cgn.callersite; site != nil {
if c, ok := site.instr.Common().Args[0].(*ssa.Const); ok {
- v, _ := constant.Int64Val(c.Value)
- index = int(v)
+ index = int(c.Int64())
}
}
a.addConstraint(&rtypeInOutConstraint{
@@ -1943,14 +1942,13 @@ func ext۰reflect۰rtype۰Method(a *analysis, cgn *cgnode) {
// types they create to ensure termination of the algorithm in cases
// where the output of a type constructor flows to its input, e.g.
//
-// func f(t reflect.Type) {
-// f(reflect.PtrTo(t))
-// }
+// func f(t reflect.Type) {
+// f(reflect.PtrTo(t))
+// }
//
// It does this by limiting the type height to k, but this still leaves
// a potentially exponential (4^k) number of of types that may be
// enumerated in pathological cases.
-//
func typeHeight(T types.Type) int {
switch T := T.(type) {
case *types.Chan:
diff --git a/go/pointer/solve.go b/go/pointer/solve.go
index 0fdd098b0..7a41b78a8 100644
--- a/go/pointer/solve.go
+++ b/go/pointer/solve.go
@@ -91,7 +91,6 @@ func (a *analysis) solve() {
// and adds them to the graph, ensuring
// that new constraints are applied to pre-existing labels and
// that pre-existing constraints are applied to new labels.
-//
func (a *analysis) processNewConstraints() {
// Take the slice of new constraints.
// (May grow during call to solveConstraints.)
@@ -151,7 +150,6 @@ func (a *analysis) processNewConstraints() {
// solveConstraints applies each resolution rule attached to node n to
// the set of labels delta. It may generate new constraints in
// a.constraints.
-//
func (a *analysis) solveConstraints(n *node, delta *nodeset) {
if delta.IsEmpty() {
return
@@ -199,7 +197,6 @@ func (a *analysis) addWork(id nodeid) {
//
// The size of the copy is implicitly 1.
// It returns true if pts(dst) changed.
-//
func (a *analysis) onlineCopy(dst, src nodeid) bool {
if dst != src {
if nsrc := a.nodes[src]; nsrc.solve.copyTo.add(dst) {
@@ -221,7 +218,6 @@ func (a *analysis) onlineCopy(dst, src nodeid) bool {
//
// TODO(adonovan): now that we support a.copy() during solving, we
// could eliminate onlineCopyN, but it's much slower. Investigate.
-//
func (a *analysis) onlineCopyN(dst, src nodeid, sizeof uint32) uint32 {
for i := uint32(0); i < sizeof; i++ {
if a.onlineCopy(dst, src) {
diff --git a/go/pointer/stdlib_test.go b/go/pointer/stdlib_test.go
index 3ba42a171..978cfb8fe 100644
--- a/go/pointer/stdlib_test.go
+++ b/go/pointer/stdlib_test.go
@@ -46,7 +46,7 @@ func TestStdlib(t *testing.T) {
}
// Create SSA packages.
- prog, _ := ssautil.AllPackages(pkgs, 0)
+ prog, _ := ssautil.AllPackages(pkgs, ssa.InstantiateGenerics)
prog.Build()
numPkgs := len(prog.AllPackages())
diff --git a/go/pointer/testdata/typeparams.go b/go/pointer/testdata/typeparams.go
new file mode 100644
index 000000000..461ba4437
--- /dev/null
+++ b/go/pointer/testdata/typeparams.go
@@ -0,0 +1,68 @@
+//go:build ignore
+// +build ignore
+
+package main
+
+import (
+ "fmt"
+ "os"
+)
+
+type S[T any] struct{ t T }
+
+var theSint S[int]
+var theSbool S[bool]
+
+func (s *S[T]) String() string {
+ print(s) // @pointsto command-line-arguments.theSbool | command-line-arguments.theSint
+ return ""
+}
+
+func Type[T any]() {
+ var x *T
+ print(x) // @types *int | *bool
+}
+
+func Caller[T any]() {
+ var s *S[T]
+ _ = s.String()
+}
+
+var a int
+var b bool
+
+type t[T any] struct {
+ a *map[string]chan *T
+}
+
+func fn[T any](a *T) {
+ m := make(map[string]chan *T)
+ m[""] = make(chan *T, 1)
+ m[""] <- a
+ x := []t[T]{t[T]{a: &m}}
+ print(x) // @pointstoquery <-(*x[i].a)[key] command-line-arguments.a | command-line-arguments.b
+}
+
+func main() {
+ // os.Args is considered intrinsically allocated,
+ // but may also be set explicitly (e.g. on Windows), hence '...'.
+ print(os.Args) // @pointsto <command-line args> | ...
+ fmt.Println("Hello!", &theSint)
+ fmt.Println("World!", &theSbool)
+
+ Type[int]() // call
+ f := Type[bool] // call through a variable
+ _ = Type[string] // not called so will not appear in Type's print.
+ f()
+
+ Caller[int]()
+ Caller[bool]()
+
+ fn(&a)
+ fn(&b)
+}
+
+// @calls (*fmt.pp).handleMethods -> (*command-line-arguments.S[int]).String[int]
+// @calls (*fmt.pp).handleMethods -> (*command-line-arguments.S[bool]).String[bool]
+// @calls command-line-arguments.Caller[int] -> (*command-line-arguments.S[int]).String[int]
+// @calls command-line-arguments.Caller[bool] -> (*command-line-arguments.S[bool]).String[bool]
diff --git a/go/pointer/util.go b/go/pointer/util.go
index 5bdd623c0..17728aa06 100644
--- a/go/pointer/util.go
+++ b/go/pointer/util.go
@@ -8,12 +8,13 @@ import (
"bytes"
"fmt"
"go/types"
- exec "golang.org/x/sys/execabs"
"log"
"os"
"runtime"
"time"
+ exec "golang.org/x/sys/execabs"
+
"golang.org/x/tools/container/intsets"
)
@@ -35,7 +36,6 @@ func CanPoint(T types.Type) bool {
// CanHaveDynamicTypes reports whether the type T can "hold" dynamic types,
// i.e. is an interface (incl. reflect.Type) or a reflect.Value.
-//
func CanHaveDynamicTypes(T types.Type) bool {
switch T := T.(type) {
case *types.Named:
@@ -69,17 +69,21 @@ func deref(typ types.Type) types.Type {
// of a type T: the subelement's type and its path from the root of T.
//
// For example, for this type:
-// type line struct{ points []struct{x, y int} }
+//
+// type line struct{ points []struct{x, y int} }
+//
// flatten() of the inner struct yields the following []fieldInfo:
-// struct{ x, y int } ""
-// int ".x"
-// int ".y"
+//
+// struct{ x, y int } ""
+// int ".x"
+// int ".y"
+//
// and flatten(line) yields:
-// struct{ points []struct{x, y int} } ""
-// struct{ x, y int } ".points[*]"
-// int ".points[*].x
-// int ".points[*].y"
//
+// struct{ points []struct{x, y int} } ""
+// struct{ x, y int } ".points[*]"
+// int ".points[*].x
+// int ".points[*].y"
type fieldInfo struct {
typ types.Type
@@ -89,7 +93,6 @@ type fieldInfo struct {
}
// path returns a user-friendly string describing the subelement path.
-//
func (fi *fieldInfo) path() string {
var buf bytes.Buffer
for p := fi; p != nil; p = p.tail {
@@ -113,7 +116,6 @@ func (fi *fieldInfo) path() string {
// reflect.Value is considered pointerlike, similar to interface{}.
//
// Callers must not mutate the result.
-//
func (a *analysis) flatten(t types.Type) []*fieldInfo {
fl, ok := a.flattenMemo[t]
if !ok {
@@ -124,7 +126,7 @@ func (a *analysis) flatten(t types.Type) []*fieldInfo {
// Debuggability hack: don't remove
// the named type from interfaces as
// they're very verbose.
- fl = append(fl, &fieldInfo{typ: t})
+ fl = append(fl, &fieldInfo{typ: t}) // t may be a type param
} else {
fl = a.flatten(u)
}
diff --git a/go/ssa/TODO b/go/ssa/TODO
new file mode 100644
index 000000000..6c35253c7
--- /dev/null
+++ b/go/ssa/TODO
@@ -0,0 +1,16 @@
+-*- text -*-
+
+SSA Generics to-do list
+===========================
+
+DOCUMENTATION:
+- Read me for internals
+
+TYPE PARAMETERIZED GENERIC FUNCTIONS:
+- sanity.go updates.
+- Check source functions going to generics.
+- Tests, tests, tests...
+
+USAGE:
+- Back fill users for handling ssa.InstantiateGenerics being off.
+
diff --git a/go/ssa/block.go b/go/ssa/block.go
index 35f317332..28170c787 100644
--- a/go/ssa/block.go
+++ b/go/ssa/block.go
@@ -19,14 +19,12 @@ func (b *BasicBlock) Parent() *Function { return b.parent }
// String returns a human-readable label of this block.
// It is not guaranteed unique within the function.
-//
func (b *BasicBlock) String() string {
return fmt.Sprintf("%d", b.Index)
}
// emit appends an instruction to the current basic block.
// If the instruction defines a Value, it is returned.
-//
func (b *BasicBlock) emit(i Instruction) Value {
i.setBlock(b)
b.Instrs = append(b.Instrs, i)
@@ -63,7 +61,6 @@ func (b *BasicBlock) phis() []Instruction {
// replacePred replaces all occurrences of p in b's predecessor list with q.
// Ordinarily there should be at most one.
-//
func (b *BasicBlock) replacePred(p, q *BasicBlock) {
for i, pred := range b.Preds {
if pred == p {
@@ -74,7 +71,6 @@ func (b *BasicBlock) replacePred(p, q *BasicBlock) {
// replaceSucc replaces all occurrences of p in b's successor list with q.
// Ordinarily there should be at most one.
-//
func (b *BasicBlock) replaceSucc(p, q *BasicBlock) {
for i, succ := range b.Succs {
if succ == p {
@@ -86,7 +82,6 @@ func (b *BasicBlock) replaceSucc(p, q *BasicBlock) {
// removePred removes all occurrences of p in b's
// predecessor list and φ-nodes.
// Ordinarily there should be at most one.
-//
func (b *BasicBlock) removePred(p *BasicBlock) {
phis := b.phis()
diff --git a/go/ssa/blockopt.go b/go/ssa/blockopt.go
index e79260a21..7dabce8ca 100644
--- a/go/ssa/blockopt.go
+++ b/go/ssa/blockopt.go
@@ -31,7 +31,6 @@ func markReachable(b *BasicBlock) {
// deleteUnreachableBlocks marks all reachable blocks of f and
// eliminates (nils) all others, including possibly cyclic subgraphs.
-//
func deleteUnreachableBlocks(f *Function) {
const white, black = 0, -1
// We borrow b.Index temporarily as the mark bit.
@@ -61,7 +60,6 @@ func deleteUnreachableBlocks(f *Function) {
// jumpThreading attempts to apply simple jump-threading to block b,
// in which a->b->c become a->c if b is just a Jump.
// The result is true if the optimization was applied.
-//
func jumpThreading(f *Function, b *BasicBlock) bool {
if b.Index == 0 {
return false // don't apply to entry block
@@ -108,7 +106,6 @@ func jumpThreading(f *Function, b *BasicBlock) bool {
// fuseBlocks attempts to apply the block fusion optimization to block
// a, in which a->b becomes ab if len(a.Succs)==len(b.Preds)==1.
// The result is true if the optimization was applied.
-//
func fuseBlocks(f *Function, a *BasicBlock) bool {
if len(a.Succs) != 1 {
return false
@@ -150,7 +147,6 @@ func fuseBlocks(f *Function, a *BasicBlock) bool {
// optimizeBlocks() performs some simple block optimizations on a
// completed function: dead block elimination, block fusion, jump
// threading.
-//
func optimizeBlocks(f *Function) {
deleteUnreachableBlocks(f)
diff --git a/go/ssa/builder.go b/go/ssa/builder.go
index ac85541c9..be8d36a6e 100644
--- a/go/ssa/builder.go
+++ b/go/ssa/builder.go
@@ -24,10 +24,86 @@ package ssa
// TODO(adonovan): indeed, building functions is now embarrassingly parallel.
// Audit for concurrency then benchmark using more goroutines.
//
-// The builder's and Program's indices (maps) are populated and
+// State:
+//
+// The Package's and Program's indices (maps) are populated and
// mutated during the CREATE phase, but during the BUILD phase they
// remain constant. The sole exception is Prog.methodSets and its
// related maps, which are protected by a dedicated mutex.
+//
+// Generic functions declared in a package P can be instantiated from functions
+// outside of P. This happens independently of the CREATE and BUILD phase of P.
+//
+// Locks:
+//
+// Mutexes are currently acquired according to the following order:
+// Prog.methodsMu ⊃ canonizer.mu ⊃ printMu
+// where x ⊃ y denotes that y can be acquired while x is held
+// and x cannot be acquired while y is held.
+//
+// Synthetics:
+//
+// During the BUILD phase new functions can be created and built. These include:
+// - wrappers (wrappers, bounds, thunks)
+// - generic function instantiations
+// These functions do not belong to a specific Pkg (Pkg==nil). Instead the
+// Package that led to them being CREATED is obligated to ensure these
+// are BUILT during the BUILD phase of the Package.
+//
+// Runtime types:
+//
+// A concrete type is a type that is fully monomorphized with concrete types,
+// i.e. it cannot reach a TypeParam type.
+// Some concrete types require full runtime type information. Cases
+// include checking whether a type implements an interface or
+// interpretation by the reflect package. All such types that may require
+// this information will have all of their method sets built and will be added to Prog.methodSets.
+// A type T is considered to require runtime type information if it is
+// a runtime type and has a non-empty method set and either:
+// - T flows into a MakeInterface instructions,
+// - T appears in a concrete exported member, or
+// - T is a type reachable from a type S that has non-empty method set.
+// For any such type T, method sets must be created before the BUILD
+// phase of the package is done.
+//
+// Function literals:
+//
+// The BUILD phase of a function literal (anonymous function) is tied to the
+// BUILD phase of the enclosing parent function. The FreeVars of an anonymous
+// function are discovered by building the anonymous function. This in turn
+// changes which variables must be bound in a MakeClosure instruction in the
+// parent. Anonymous functions also track where they are referred to in their
+// parent function.
+//
+// Happens-before:
+//
+// The above discussion leads to the following happens-before relation for
+// the BUILD and CREATE phases.
+// The happens-before relation (with X<Y denoting X happens-before Y) are:
+// - CREATE fn < fn.startBody() < fn.finishBody() < fn.built
+// for any function fn.
+// - anon.parent.startBody() < CREATE anon, and
+// anon.finishBody() < anon.parent().finishBody() < anon.built < fn.built
+// for an anonymous function anon (i.e. anon.parent() != nil).
+// - CREATE fn.Pkg < CREATE fn
+// for a declared function fn (i.e. fn.Pkg != nil)
+// - fn.built < BUILD pkg done
+// for any function fn created during the CREATE or BUILD phase of a package
+// pkg. This includes declared and synthetic functions.
+//
+// Program.MethodValue:
+//
+// Program.MethodValue may trigger new wrapper and instantiation functions to
+// be created. It has the same obligation to BUILD created functions as a
+// Package.
+//
+// Program.NewFunction:
+//
+// This is a low level operation for creating functions that do not exist in
+// the source. Use with caution.
+//
+// TODO(taking): Use consistent terminology for "concrete".
+// TODO(taking): Use consistent terminology for "monomorphization"/"instantiate"/"expand".
import (
"fmt"
@@ -37,6 +113,8 @@ import (
"go/types"
"os"
"sync"
+
+ "golang.org/x/tools/internal/typeparams"
)
type opaqueType struct {
@@ -58,7 +136,7 @@ var (
tString = types.Typ[types.String]
tUntypedNil = types.Typ[types.UntypedNil]
tRangeIter = &opaqueType{nil, "iter"} // the type of all "range" iterators
- tEface = types.NewInterface(nil, nil).Complete()
+ tEface = types.NewInterfaceType(nil, nil).Complete()
// SSA Value constants.
vZero = intConst(0)
@@ -68,13 +146,17 @@ var (
// builder holds state associated with the package currently being built.
// Its methods contain all the logic for AST-to-SSA conversion.
-type builder struct{}
+type builder struct {
+ // Invariant: 0 <= rtypes <= finished <= created.Len()
+ created *creator // functions created during building
+ finished int // Invariant: create[i].built holds for i in [0,finished)
+ rtypes int // Invariant: all of the runtime types for create[i] have been added for i in [0,rtypes)
+}
// cond emits to fn code to evaluate boolean condition e and jump
// to t or f depending on its value, performing various simplifications.
//
// Postcondition: fn.currentBlock is nil.
-//
func (b *builder) cond(fn *Function, e ast.Expr, t, f *BasicBlock) {
switch e := e.(type) {
case *ast.ParenExpr:
@@ -117,7 +199,6 @@ func (b *builder) cond(fn *Function, e ast.Expr, t, f *BasicBlock) {
// logicalBinop emits code to fn to evaluate e, a &&- or
// ||-expression whose reified boolean value is wanted.
// The value is returned.
-//
func (b *builder) logicalBinop(fn *Function, e *ast.BinaryExpr) Value {
rhs := fn.newBasicBlock("binop.rhs")
done := fn.newBasicBlock("binop.done")
@@ -178,7 +259,6 @@ func (b *builder) logicalBinop(fn *Function, e *ast.BinaryExpr) Value {
// assignment or return statement, and "value,ok" uses of
// TypeAssertExpr, IndexExpr (when X is a map), and UnaryExpr (when Op
// is token.ARROW).
-//
func (b *builder) exprN(fn *Function, e ast.Expr) Value {
typ := fn.typeOf(e).(*types.Tuple)
switch e := e.(type) {
@@ -195,7 +275,7 @@ func (b *builder) exprN(fn *Function, e ast.Expr) Value {
return fn.emit(&c)
case *ast.IndexExpr:
- mapt := fn.typeOf(e.X).Underlying().(*types.Map)
+ mapt := typeparams.CoreType(fn.typeOf(e.X)).(*types.Map) // ,ok must be a map.
lookup := &Lookup{
X: b.expr(fn, e.X),
Index: emitConv(fn, b.expr(fn, e.Index), mapt.Key()),
@@ -228,11 +308,11 @@ func (b *builder) exprN(fn *Function, e ast.Expr) Value {
// The result is nil if no special handling was required; in this case
// the caller should treat this like an ordinary library function
// call.
-//
func (b *builder) builtin(fn *Function, obj *types.Builtin, args []ast.Expr, typ types.Type, pos token.Pos) Value {
+ typ = fn.typ(typ)
switch obj.Name() {
case "make":
- switch typ.Underlying().(type) {
+ switch ct := typeparams.CoreType(typ).(type) {
case *types.Slice:
n := b.expr(fn, args[1])
m := n
@@ -242,7 +322,7 @@ func (b *builder) builtin(fn *Function, obj *types.Builtin, args []ast.Expr, typ
if m, ok := m.(*Const); ok {
// treat make([]T, n, m) as new([m]T)[:n]
cap := m.Int64()
- at := types.NewArray(typ.Underlying().(*types.Slice).Elem(), cap)
+ at := types.NewArray(ct.Elem(), cap)
alloc := emitNew(fn, at, pos)
alloc.Comment = "makeslice"
v := &Slice{
@@ -293,6 +373,8 @@ func (b *builder) builtin(fn *Function, obj *types.Builtin, args []ast.Expr, typ
// We must still evaluate the value, though. (If it
// was side-effect free, the whole call would have
// been constant-folded.)
+ //
+ // Type parameters are always non-constant so use Underlying.
t := deref(fn.typeOf(args[0])).Underlying()
if at, ok := t.(*types.Array); ok {
b.expr(fn, args[0]) // for effects only
@@ -319,10 +401,10 @@ func (b *builder) builtin(fn *Function, obj *types.Builtin, args []ast.Expr, typ
// addressable expression e as being a potentially escaping pointer
// value. For example, in this code:
//
-// a := A{
-// b: [1]B{B{c: 1}}
-// }
-// return &a.b[0].c
+// a := A{
+// b: [1]B{B{c: 1}}
+// }
+// return &a.b[0].c
//
// the application of & causes a.b[0].c to have its address taken,
// which means that ultimately the local variable a must be
@@ -333,7 +415,6 @@ func (b *builder) builtin(fn *Function, obj *types.Builtin, args []ast.Expr, typ
// - &x, including when implicit in method call or composite literals.
// - a[:] iff a is an array (not *array)
// - references to variables in lexically enclosing functions.
-//
func (b *builder) addr(fn *Function, e ast.Expr, escaping bool) lvalue {
switch e := e.(type) {
case *ast.Ident:
@@ -367,53 +448,67 @@ func (b *builder) addr(fn *Function, e ast.Expr, escaping bool) lvalue {
return b.addr(fn, e.X, escaping)
case *ast.SelectorExpr:
- sel, ok := fn.info.Selections[e]
- if !ok {
+ sel := fn.selection(e)
+ if sel == nil {
// qualified identifier
return b.addr(fn, e.Sel, escaping)
}
- if sel.Kind() != types.FieldVal {
+ if sel.kind != types.FieldVal {
panic(sel)
}
wantAddr := true
v := b.receiver(fn, e.X, wantAddr, escaping, sel)
- last := len(sel.Index()) - 1
- return &address{
- addr: emitFieldSelection(fn, v, sel.Index()[last], true, e.Sel),
- pos: e.Sel.Pos(),
- expr: e.Sel,
+ index := sel.index[len(sel.index)-1]
+ fld := typeparams.CoreType(deref(v.Type())).(*types.Struct).Field(index)
+
+ // Due to the two phases of resolving AssignStmt, a panic from x.f = p()
+ // when x is nil is required to come after the side-effects of
+ // evaluating x and p().
+ emit := func(fn *Function) Value {
+ return emitFieldSelection(fn, v, index, true, e.Sel)
}
+ return &lazyAddress{addr: emit, t: fld.Type(), pos: e.Sel.Pos(), expr: e.Sel}
case *ast.IndexExpr:
+ xt := fn.typeOf(e.X)
+ elem, mode := indexType(xt)
var x Value
var et types.Type
- switch t := fn.typeOf(e.X).Underlying().(type) {
- case *types.Array:
+ switch mode {
+ case ixArrVar: // array, array|slice, array|*array, or array|*array|slice.
x = b.addr(fn, e.X, escaping).address(fn)
- et = types.NewPointer(t.Elem())
- case *types.Pointer: // *array
- x = b.expr(fn, e.X)
- et = types.NewPointer(t.Elem().Underlying().(*types.Array).Elem())
- case *types.Slice:
+ et = types.NewPointer(elem)
+ case ixVar: // *array, slice, *array|slice
x = b.expr(fn, e.X)
- et = types.NewPointer(t.Elem())
- case *types.Map:
+ et = types.NewPointer(elem)
+ case ixMap:
+ mt := typeparams.CoreType(xt).(*types.Map)
return &element{
m: b.expr(fn, e.X),
- k: emitConv(fn, b.expr(fn, e.Index), t.Key()),
- t: t.Elem(),
+ k: emitConv(fn, b.expr(fn, e.Index), mt.Key()),
+ t: mt.Elem(),
pos: e.Lbrack,
}
default:
- panic("unexpected container type in IndexExpr: " + t.String())
+ panic("unexpected container type in IndexExpr: " + xt.String())
}
- v := &IndexAddr{
- X: x,
- Index: emitConv(fn, b.expr(fn, e.Index), tInt),
+ index := b.expr(fn, e.Index)
+ if isUntyped(index.Type()) {
+ index = emitConv(fn, index, tInt)
}
- v.setPos(e.Lbrack)
- v.setType(et)
- return &address{addr: fn.emit(v), pos: e.Lbrack, expr: e}
+ // Due to the two phases of resolving AssignStmt, a panic from x[i] = p()
+ // when x is nil or i is out-of-bounds is required to come after the
+ // side-effects of evaluating x, i and p().
+ emit := func(fn *Function) Value {
+ v := &IndexAddr{
+ X: x,
+ Index: index,
+ }
+ v.setPos(e.Lbrack)
+ v.setType(et)
+ return fn.emit(v)
+ }
+ return &lazyAddress{addr: emit, t: deref(et), pos: e.Lbrack, expr: e}
case *ast.StarExpr:
return &address{addr: b.expr(fn, e.X), pos: e.Star, expr: e}
@@ -452,7 +547,6 @@ func (sb *storebuf) emit(fn *Function) {
// storebuf sb so that they can be executed later. This allows correct
// in-place update of existing variables when the RHS is a composite
// literal that may reference parts of the LHS.
-//
func (b *builder) assign(fn *Function, loc lvalue, e ast.Expr, isZero bool, sb *storebuf) {
// Can we initialize it in place?
if e, ok := unparen(e).(*ast.CompositeLit); ok {
@@ -473,7 +567,7 @@ func (b *builder) assign(fn *Function, loc lvalue, e ast.Expr, isZero bool, sb *
}
if _, ok := loc.(*address); ok {
- if isInterface(loc.typ()) {
+ if isNonTypeParamInterface(loc.typ()) {
// e.g. var x interface{} = T{...}
// Can't in-place initialize an interface value.
// Fall back to copying.
@@ -511,7 +605,6 @@ func (b *builder) assign(fn *Function, loc lvalue, e ast.Expr, isZero bool, sb *
// expr lowers a single-result expression e to SSA form, emitting code
// to fn and returning the Value defined by the expression.
-//
func (b *builder) expr(fn *Function, e ast.Expr) Value {
e = unparen(e)
@@ -519,7 +612,7 @@ func (b *builder) expr(fn *Function, e ast.Expr) Value {
// Is expression a constant?
if tv.Value != nil {
- return NewConst(tv.Value, tv.Type)
+ return NewConst(tv.Value, fn.typ(tv.Type))
}
var v Value
@@ -544,22 +637,30 @@ func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value {
case *ast.FuncLit:
fn2 := &Function{
- name: fmt.Sprintf("%s$%d", fn.Name(), 1+len(fn.AnonFuncs)),
- Signature: fn.typeOf(e.Type).Underlying().(*types.Signature),
- pos: e.Type.Func,
- parent: fn,
- Pkg: fn.Pkg,
- Prog: fn.Prog,
- syntax: e,
- info: fn.info,
+ name: fmt.Sprintf("%s$%d", fn.Name(), 1+len(fn.AnonFuncs)),
+ Signature: fn.typeOf(e.Type).(*types.Signature),
+ pos: e.Type.Func,
+ parent: fn,
+ anonIdx: int32(len(fn.AnonFuncs)),
+ Pkg: fn.Pkg,
+ Prog: fn.Prog,
+ syntax: e,
+ topLevelOrigin: nil, // use anonIdx to lookup an anon instance's origin.
+ typeparams: fn.typeparams, // share the parent's type parameters.
+ typeargs: fn.typeargs, // share the parent's type arguments.
+ info: fn.info,
+ subst: fn.subst, // share the parent's type substitutions.
}
fn.AnonFuncs = append(fn.AnonFuncs, fn2)
- b.buildFunction(fn2)
+ b.created.Add(fn2)
+ b.buildFunctionBody(fn2)
+ // fn2 is not done BUILDing. fn2.referrers can still be updated.
+ // fn2 is done BUILDing after fn.finishBody().
if fn2.FreeVars == nil {
return fn2
}
v := &MakeClosure{Fn: fn2}
- v.setType(tv.Type)
+ v.setType(fn.typ(tv.Type))
for _, fv := range fn2.FreeVars {
v.Bindings = append(v.Bindings, fv.outer)
fv.outer = nil
@@ -567,13 +668,13 @@ func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value {
return fn.emit(v)
case *ast.TypeAssertExpr: // single-result form only
- return emitTypeAssert(fn, b.expr(fn, e.X), tv.Type, e.Lparen)
+ return emitTypeAssert(fn, b.expr(fn, e.X), fn.typ(tv.Type), e.Lparen)
case *ast.CallExpr:
if fn.info.Types[e.Fun].IsType() {
// Explicit type conversion, e.g. string(x) or big.Int(x)
x := b.expr(fn, e.Args[0])
- y := emitConv(fn, x, tv.Type)
+ y := emitConv(fn, x, fn.typ(tv.Type))
if y != x {
switch y := y.(type) {
case *Convert:
@@ -584,6 +685,8 @@ func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value {
y.pos = e.Lparen
case *SliceToArrayPointer:
y.pos = e.Lparen
+ case *UnOp: // conversion from slice to array.
+ y.pos = e.Lparen
}
}
return y
@@ -591,7 +694,7 @@ func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value {
// Call to "intrinsic" built-ins, e.g. new, make, panic.
if id, ok := unparen(e.Fun).(*ast.Ident); ok {
if obj, ok := fn.info.Uses[id].(*types.Builtin); ok {
- if v := b.builtin(fn, obj, e.Args, tv.Type, e.Lparen); v != nil {
+ if v := b.builtin(fn, obj, e.Args, fn.typ(tv.Type), e.Lparen); v != nil {
return v
}
}
@@ -599,7 +702,7 @@ func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value {
// Regular function call.
var v Call
b.setCall(fn, e, &v.Call)
- v.setType(tv.Type)
+ v.setType(fn.typ(tv.Type))
return fn.emit(&v)
case *ast.UnaryExpr:
@@ -622,7 +725,7 @@ func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value {
X: b.expr(fn, e.X),
}
v.setPos(e.OpPos)
- v.setType(tv.Type)
+ v.setType(fn.typ(tv.Type))
return fn.emit(v)
default:
panic(e.Op)
@@ -635,12 +738,12 @@ func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value {
case token.SHL, token.SHR:
fallthrough
case token.ADD, token.SUB, token.MUL, token.QUO, token.REM, token.AND, token.OR, token.XOR, token.AND_NOT:
- return emitArith(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), tv.Type, e.OpPos)
+ return emitArith(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), fn.typ(tv.Type), e.OpPos)
case token.EQL, token.NEQ, token.GTR, token.LSS, token.LEQ, token.GEQ:
cmp := emitCompare(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), e.OpPos)
// The type of x==y may be UntypedBool.
- return emitConv(fn, cmp, types.Default(tv.Type))
+ return emitConv(fn, cmp, types.Default(fn.typ(tv.Type)))
default:
panic("illegal op in BinaryExpr: " + e.Op.String())
}
@@ -648,21 +751,27 @@ func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value {
case *ast.SliceExpr:
var low, high, max Value
var x Value
- switch fn.typeOf(e.X).Underlying().(type) {
+ xtyp := fn.typeOf(e.X)
+ switch typeparams.CoreType(xtyp).(type) {
case *types.Array:
// Potentially escaping.
x = b.addr(fn, e.X, true).address(fn)
case *types.Basic, *types.Slice, *types.Pointer: // *array
x = b.expr(fn, e.X)
default:
- panic("unreachable")
- }
- if e.High != nil {
- high = b.expr(fn, e.High)
+ // core type exception?
+ if isBytestring(xtyp) {
+ x = b.expr(fn, e.X) // bytestring is handled as string and []byte.
+ } else {
+ panic("unexpected sequence type in SliceExpr")
+ }
}
if e.Low != nil {
low = b.expr(fn, e.Low)
}
+ if e.High != nil {
+ high = b.expr(fn, e.High)
+ }
if e.Slice3 {
max = b.expr(fn, e.Max)
}
@@ -673,7 +782,7 @@ func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value {
Max: max,
}
v.setPos(e.Lbrack)
- v.setType(tv.Type)
+ v.setType(fn.typ(tv.Type))
return fn.emit(v)
case *ast.Ident:
@@ -681,108 +790,146 @@ func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value {
// Universal built-in or nil?
switch obj := obj.(type) {
case *types.Builtin:
- return &Builtin{name: obj.Name(), sig: tv.Type.(*types.Signature)}
+ return &Builtin{name: obj.Name(), sig: fn.instanceType(e).(*types.Signature)}
case *types.Nil:
- return nilConst(tv.Type)
+ return zeroConst(fn.instanceType(e))
}
// Package-level func or var?
if v := fn.Prog.packageLevelMember(obj); v != nil {
if g, ok := v.(*Global); ok {
return emitLoad(fn, g) // var (address)
}
- return v.(*Function) // (func)
+ callee := v.(*Function) // (func)
+ if callee.typeparams.Len() > 0 {
+ targs := fn.subst.types(instanceArgs(fn.info, e))
+ callee = fn.Prog.needsInstance(callee, targs, b.created)
+ }
+ return callee
}
// Local var.
return emitLoad(fn, fn.lookup(obj, false)) // var (address)
case *ast.SelectorExpr:
- sel, ok := fn.info.Selections[e]
- if !ok {
+ sel := fn.selection(e)
+ if sel == nil {
// builtin unsafe.{Add,Slice}
if obj, ok := fn.info.Uses[e.Sel].(*types.Builtin); ok {
- return &Builtin{name: obj.Name(), sig: tv.Type.(*types.Signature)}
+ return &Builtin{name: obj.Name(), sig: fn.typ(tv.Type).(*types.Signature)}
}
// qualified identifier
return b.expr(fn, e.Sel)
}
- switch sel.Kind() {
+ switch sel.kind {
case types.MethodExpr:
// (*T).f or T.f, the method f from the method-set of type T.
// The result is a "thunk".
- return emitConv(fn, makeThunk(fn.Prog, sel), tv.Type)
+ thunk := makeThunk(fn.Prog, sel, b.created)
+ return emitConv(fn, thunk, fn.typ(tv.Type))
case types.MethodVal:
// e.f where e is an expression and f is a method.
// The result is a "bound".
- obj := sel.Obj().(*types.Func)
- rt := recvType(obj)
+ obj := sel.obj.(*types.Func)
+ rt := fn.typ(recvType(obj))
wantAddr := isPointer(rt)
escaping := true
v := b.receiver(fn, e.X, wantAddr, escaping, sel)
- if isInterface(rt) {
- // If v has interface type I,
+
+ if types.IsInterface(rt) {
+ // If v may be an interface type I (after instantiating),
// we must emit a check that v is non-nil.
- // We use: typeassert v.(I).
- emitTypeAssert(fn, v, rt, token.NoPos)
+ if recv, ok := sel.recv.(*typeparams.TypeParam); ok {
+ // Emit a nil check if any possible instantiation of the
+ // type parameter is an interface type.
+ if typeSetOf(recv).Len() > 0 {
+ // recv has a concrete term its typeset.
+ // So it cannot be instantiated as an interface.
+ //
+ // Example:
+ // func _[T interface{~int; Foo()}] () {
+ // var v T
+ // _ = v.Foo // <-- MethodVal
+ // }
+ } else {
+ // rt may be instantiated as an interface.
+ // Emit nil check: typeassert (any(v)).(any).
+ emitTypeAssert(fn, emitConv(fn, v, tEface), tEface, token.NoPos)
+ }
+ } else {
+ // non-type param interface
+ // Emit nil check: typeassert v.(I).
+ emitTypeAssert(fn, v, rt, token.NoPos)
+ }
+ }
+ if targs := receiverTypeArgs(obj); len(targs) > 0 {
+ // obj is generic.
+ obj = fn.Prog.canon.instantiateMethod(obj, fn.subst.types(targs), fn.Prog.ctxt)
}
c := &MakeClosure{
- Fn: makeBound(fn.Prog, obj),
+ Fn: makeBound(fn.Prog, obj, b.created),
Bindings: []Value{v},
}
c.setPos(e.Sel.Pos())
- c.setType(tv.Type)
+ c.setType(fn.typ(tv.Type))
return fn.emit(c)
case types.FieldVal:
- indices := sel.Index()
+ indices := sel.index
last := len(indices) - 1
v := b.expr(fn, e.X)
- v = emitImplicitSelections(fn, v, indices[:last])
+ v = emitImplicitSelections(fn, v, indices[:last], e.Pos())
v = emitFieldSelection(fn, v, indices[last], false, e.Sel)
return v
}
panic("unexpected expression-relative selector")
+ case *typeparams.IndexListExpr:
+ // f[X, Y] must be a generic function
+ if !instance(fn.info, e.X) {
+ panic("unexpected expression-could not match index list to instantiation")
+ }
+ return b.expr(fn, e.X) // Handle instantiation within the *Ident or *SelectorExpr cases.
+
case *ast.IndexExpr:
- switch t := fn.typeOf(e.X).Underlying().(type) {
- case *types.Array:
- // Non-addressable array (in a register).
- v := &Index{
- X: b.expr(fn, e.X),
- Index: emitConv(fn, b.expr(fn, e.Index), tInt),
- }
- v.setPos(e.Lbrack)
- v.setType(t.Elem())
- return fn.emit(v)
+ if instance(fn.info, e.X) {
+ return b.expr(fn, e.X) // Handle instantiation within the *Ident or *SelectorExpr cases.
+ }
+ // not a generic instantiation.
+ xt := fn.typeOf(e.X)
+ switch et, mode := indexType(xt); mode {
+ case ixVar:
+ // Addressable slice/array; use IndexAddr and Load.
+ return b.addr(fn, e, false).load(fn)
- case *types.Map:
- // Maps are not addressable.
- mapt := fn.typeOf(e.X).Underlying().(*types.Map)
- v := &Lookup{
+ case ixArrVar, ixValue:
+ // An array in a register, a string or a combined type that contains
+ // either an [_]array (ixArrVar) or string (ixValue).
+
+ // Note: for ixArrVar and CoreType(xt)==nil can be IndexAddr and Load.
+ index := b.expr(fn, e.Index)
+ if isUntyped(index.Type()) {
+ index = emitConv(fn, index, tInt)
+ }
+ v := &Index{
X: b.expr(fn, e.X),
- Index: emitConv(fn, b.expr(fn, e.Index), mapt.Key()),
+ Index: index,
}
v.setPos(e.Lbrack)
- v.setType(mapt.Elem())
+ v.setType(et)
return fn.emit(v)
- case *types.Basic: // => string
- // Strings are not addressable.
+ case ixMap:
+ ct := typeparams.CoreType(xt).(*types.Map)
v := &Lookup{
X: b.expr(fn, e.X),
- Index: b.expr(fn, e.Index),
+ Index: emitConv(fn, b.expr(fn, e.Index), ct.Key()),
}
v.setPos(e.Lbrack)
- v.setType(tByte)
+ v.setType(ct.Elem())
return fn.emit(v)
-
- case *types.Slice, *types.Pointer: // *array
- // Addressable slice/array; use IndexAddr and Load.
- return b.addr(fn, e, false).load(fn)
-
default:
- panic("unexpected container type in IndexExpr: " + t.String())
+ panic("unexpected container type in IndexExpr: " + xt.String())
}
case *ast.CompositeLit, *ast.StarExpr:
@@ -806,21 +953,21 @@ func (b *builder) stmtList(fn *Function, list []ast.Stmt) {
// selections of sel.
//
// wantAddr requests that the result is an an address. If
-// !sel.Indirect(), this may require that e be built in addr() mode; it
+// !sel.indirect, this may require that e be built in addr() mode; it
// must thus be addressable.
//
// escaping is defined as per builder.addr().
-//
-func (b *builder) receiver(fn *Function, e ast.Expr, wantAddr, escaping bool, sel *types.Selection) Value {
+func (b *builder) receiver(fn *Function, e ast.Expr, wantAddr, escaping bool, sel *selection) Value {
var v Value
- if wantAddr && !sel.Indirect() && !isPointer(fn.typeOf(e)) {
+ if wantAddr && !sel.indirect && !isPointer(fn.typeOf(e)) {
v = b.addr(fn, e, escaping).address(fn)
} else {
v = b.expr(fn, e)
}
- last := len(sel.Index()) - 1
- v = emitImplicitSelections(fn, v, sel.Index()[:last])
+ last := len(sel.index) - 1
+ // The position of implicit selection is the position of the inducing receiver expression.
+ v = emitImplicitSelections(fn, v, sel.index[:last], e.Pos())
if !wantAddr && isPointer(v.Type()) {
v = emitLoad(fn, v)
}
@@ -830,32 +977,36 @@ func (b *builder) receiver(fn *Function, e ast.Expr, wantAddr, escaping bool, se
// setCallFunc populates the function parts of a CallCommon structure
// (Func, Method, Recv, Args[0]) based on the kind of invocation
// occurring in e.
-//
func (b *builder) setCallFunc(fn *Function, e *ast.CallExpr, c *CallCommon) {
c.pos = e.Lparen
// Is this a method call?
if selector, ok := unparen(e.Fun).(*ast.SelectorExpr); ok {
- sel, ok := fn.info.Selections[selector]
- if ok && sel.Kind() == types.MethodVal {
- obj := sel.Obj().(*types.Func)
+ sel := fn.selection(selector)
+ if sel != nil && sel.kind == types.MethodVal {
+ obj := sel.obj.(*types.Func)
recv := recvType(obj)
+
wantAddr := isPointer(recv)
escaping := true
v := b.receiver(fn, selector.X, wantAddr, escaping, sel)
- if isInterface(recv) {
+ if types.IsInterface(recv) {
// Invoke-mode call.
- c.Value = v
+ c.Value = v // possibly type param
c.Method = obj
} else {
// "Call"-mode call.
- c.Value = fn.Prog.declaredFunc(obj)
+ callee := fn.Prog.originFunc(obj)
+ if callee.typeparams.Len() > 0 {
+ callee = fn.Prog.needsInstance(callee, receiverTypeArgs(obj), b.created)
+ }
+ c.Value = callee
c.Args = append(c.Args, v)
}
return
}
- // sel.Kind()==MethodExpr indicates T.f() or (*T).f():
+ // sel.kind==MethodExpr indicates T.f() or (*T).f():
// a statically dispatched call to the method f in the
// method-set of T or *T. T may be an interface.
//
@@ -893,7 +1044,6 @@ func (b *builder) setCallFunc(fn *Function, e *ast.CallExpr, c *CallCommon) {
// emitCallArgs emits to f code for the actual parameters of call e to
// a (possibly built-in) function of effective type sig.
// The argument values are appended to args, which is then returned.
-//
func (b *builder) emitCallArgs(fn *Function, sig *types.Signature, e *ast.CallExpr, args []Value) []Value {
// f(x, y, z...): pass slice z straight through.
if e.Ellipsis != 0 {
@@ -938,7 +1088,7 @@ func (b *builder) emitCallArgs(fn *Function, sig *types.Signature, e *ast.CallEx
st := sig.Params().At(np).Type().(*types.Slice)
vt := st.Elem()
if len(varargs) == 0 {
- args = append(args, nilConst(st))
+ args = append(args, zeroConst(st))
} else {
// Replace a suffix of args with a slice containing it.
at := types.NewArray(vt, int64(len(varargs)))
@@ -965,13 +1115,12 @@ func (b *builder) emitCallArgs(fn *Function, sig *types.Signature, e *ast.CallEx
// setCall emits to fn code to evaluate all the parameters of a function
// call e, and populates *c with those values.
-//
func (b *builder) setCall(fn *Function, e *ast.CallExpr, c *CallCommon) {
// First deal with the f(...) part and optional receiver.
b.setCallFunc(fn, e, c)
// Then append the other actual parameters.
- sig, _ := fn.typeOf(e.Fun).Underlying().(*types.Signature)
+ sig, _ := typeparams.CoreType(fn.typeOf(e.Fun)).(*types.Signature)
if sig == nil {
panic(fmt.Sprintf("no signature for call of %s", e.Fun))
}
@@ -980,13 +1129,11 @@ func (b *builder) setCall(fn *Function, e *ast.CallExpr, c *CallCommon) {
// assignOp emits to fn code to perform loc <op>= val.
func (b *builder) assignOp(fn *Function, loc lvalue, val Value, op token.Token, pos token.Pos) {
- oldv := loc.load(fn)
- loc.store(fn, emitArith(fn, op, oldv, emitConv(fn, val, oldv.Type()), loc.typ(), pos))
+ loc.store(fn, emitArith(fn, op, loc.load(fn), val, loc.typ(), pos))
}
// localValueSpec emits to fn code to define all of the vars in the
// function-local ValueSpec, spec.
-//
func (b *builder) localValueSpec(fn *Function, spec *ast.ValueSpec) {
switch {
case len(spec.Values) == len(spec.Names):
@@ -1029,7 +1176,6 @@ func (b *builder) localValueSpec(fn *Function, spec *ast.ValueSpec) {
// isDef is true if this is a short variable declaration (:=).
//
// Note the similarity with localValueSpec.
-//
func (b *builder) assignStmt(fn *Function, lhss, rhss []ast.Expr, isDef bool) {
// Side effects of all LHSs and RHSs must occur in left-to-right order.
lvals := make([]lvalue, len(lhss))
@@ -1095,8 +1241,10 @@ func (b *builder) arrayLen(fn *Function, elts []ast.Expr) int64 {
//
// Because the elements of a composite literal may refer to the
// variables being updated, as in the second line below,
+//
// x := T{a: 1}
// x = T{a: x.a}
+//
// all the reads must occur before all the writes. Thus all stores to
// loc are emitted to the storebuf sb for later execution.
//
@@ -1104,10 +1252,33 @@ func (b *builder) arrayLen(fn *Function, elts []ast.Expr) int64 {
// case when the type name is implicit. e.g. in []*T{{}}, the inner
// literal has type *T behaves like &T{}.
// In that case, addr must hold a T, not a *T.
-//
func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero bool, sb *storebuf) {
- typ := deref(fn.typeOf(e))
- switch t := typ.Underlying().(type) {
+ typ := deref(fn.typeOf(e)) // type with name [may be type param]
+ t := deref(typeparams.CoreType(typ)).Underlying() // core type for comp lit case
+ // Computing typ and t is subtle as these handle pointer types.
+ // For example, &T{...} is valid even for maps and slices.
+ // Also typ should refer to T (not *T) while t should be the core type of T.
+ //
+ // To show the ordering to take into account, consider the composite literal
+ // expressions `&T{f: 1}` and `{f: 1}` within the expression `[]S{{f: 1}}` here:
+ // type N struct{f int}
+ // func _[T N, S *N]() {
+ // _ = &T{f: 1}
+ // _ = []S{{f: 1}}
+ // }
+ // For `&T{f: 1}`, we compute `typ` and `t` as:
+ // typeOf(&T{f: 1}) == *T
+ // deref(*T) == T (typ)
+ // CoreType(T) == N
+ // deref(N) == N
+ // N.Underlying() == struct{f int} (t)
+ // For `{f: 1}` in `[]S{{f: 1}}`, we compute `typ` and `t` as:
+ // typeOf({f: 1}) == S
+ // deref(S) == S (typ)
+ // CoreType(S) == *N
+ // deref(*N) == N
+ // N.Underlying() == struct{f int} (t)
+ switch t := t.(type) {
case *types.Struct:
if !isZero && len(e.Elts) != t.NumFields() {
// memclear
@@ -1135,6 +1306,7 @@ func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero
X: addr,
Field: fieldIndex,
}
+ faddr.setPos(pos)
faddr.setType(types.NewPointer(sf.Type()))
fn.emit(faddr)
b.assign(fn, &address{addr: faddr, pos: pos, expr: e}, e, isZero, sb)
@@ -1243,7 +1415,6 @@ func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero
// switchStmt emits to fn code for the switch statement s, optionally
// labelled by label.
-//
func (b *builder) switchStmt(fn *Function, s *ast.SwitchStmt, label *lblock) {
// We treat SwitchStmt like a sequential if-else chain.
// Multiway dispatch can be recovered later by ssautil.Switches()
@@ -1329,7 +1500,6 @@ func (b *builder) switchStmt(fn *Function, s *ast.SwitchStmt, label *lblock) {
// typeSwitchStmt emits to fn code for the type switch statement s, optionally
// labelled by label.
-//
func (b *builder) typeSwitchStmt(fn *Function, s *ast.TypeSwitchStmt, label *lblock) {
// We treat TypeSwitchStmt like a sequential if-else chain.
// Multiway dispatch can be recovered later by ssautil.Switches().
@@ -1407,7 +1577,7 @@ func (b *builder) typeSwitchStmt(fn *Function, s *ast.TypeSwitchStmt, label *lbl
casetype = fn.typeOf(cond)
var condv Value
if casetype == tUntypedNil {
- condv = emitCompare(fn, token.EQL, x, nilConst(x.Type()), cond.Pos())
+ condv = emitCompare(fn, token.EQL, x, zeroConst(x.Type()), cond.Pos())
ti = x
} else {
yok := emitTypeTest(fn, x, casetype, cc.Case)
@@ -1452,7 +1622,6 @@ func (b *builder) typeCaseBody(fn *Function, cc *ast.CaseClause, x Value, done *
// selectStmt emits to fn code for the select statement s, optionally
// labelled by label.
-//
func (b *builder) selectStmt(fn *Function, s *ast.SelectStmt, label *lblock) {
// A blocking select of a single case degenerates to a
// simple send or receive.
@@ -1491,12 +1660,12 @@ func (b *builder) selectStmt(fn *Function, s *ast.SelectStmt, label *lblock) {
case *ast.SendStmt: // ch<- i
ch := b.expr(fn, comm.Chan)
+ chtyp := typeparams.CoreType(fn.typ(ch.Type())).(*types.Chan)
st = &SelectState{
Dir: types.SendOnly,
Chan: ch,
- Send: emitConv(fn, b.expr(fn, comm.Value),
- ch.Type().Underlying().(*types.Chan).Elem()),
- Pos: comm.Arrow,
+ Send: emitConv(fn, b.expr(fn, comm.Value), chtyp.Elem()),
+ Pos: comm.Arrow,
}
if debugInfo {
st.DebugNode = comm
@@ -1548,8 +1717,8 @@ func (b *builder) selectStmt(fn *Function, s *ast.SelectStmt, label *lblock) {
vars = append(vars, varIndex, varOk)
for _, st := range states {
if st.Dir == types.RecvOnly {
- tElem := st.Chan.Type().Underlying().(*types.Chan).Elem()
- vars = append(vars, anonVar(tElem))
+ chtyp := typeparams.CoreType(fn.typ(st.Chan.Type())).(*types.Chan)
+ vars = append(vars, anonVar(chtyp.Elem()))
}
}
sel.setType(types.NewTuple(vars...))
@@ -1634,7 +1803,6 @@ func (b *builder) selectStmt(fn *Function, s *ast.SelectStmt, label *lblock) {
// forStmt emits to fn code for the for statement s, optionally
// labelled by label.
-//
func (b *builder) forStmt(fn *Function, s *ast.ForStmt, label *lblock) {
// ...init...
// jump loop
@@ -1691,7 +1859,6 @@ func (b *builder) forStmt(fn *Function, s *ast.ForStmt, label *lblock) {
// over array, *array or slice value x.
// The v result is defined only if tv is non-nil.
// forPos is the position of the "for" token.
-//
func (b *builder) rangeIndexed(fn *Function, x Value, tv types.Type, pos token.Pos) (k, v Value, loop, done *BasicBlock) {
//
// length = len(x)
@@ -1715,6 +1882,8 @@ func (b *builder) rangeIndexed(fn *Function, x Value, tv types.Type, pos token.P
// elimination if x is pure, static unrolling, etc.
// Ranging over a nil *array may have >0 iterations.
// We still generate code for x, in case it has effects.
+ //
+ // TypeParams do not have constant length. Use underlying instead of core type.
length = intConst(arr.Len())
} else {
// length = len(x).
@@ -1747,7 +1916,7 @@ func (b *builder) rangeIndexed(fn *Function, x Value, tv types.Type, pos token.P
k = emitLoad(fn, index)
if tv != nil {
- switch t := x.Type().Underlying().(type) {
+ switch t := typeparams.CoreType(x.Type()).(type) {
case *types.Array:
instr := &Index{
X: x,
@@ -1786,7 +1955,6 @@ func (b *builder) rangeIndexed(fn *Function, x Value, tv types.Type, pos token.P
// Range/Next/Extract to iterate over map or string value x.
// tk and tv are the types of the key/value results k and v, or nil
// if the respective component is not wanted.
-//
func (b *builder) rangeIter(fn *Function, x Value, tk, tv types.Type, pos token.Pos) (k, v Value, loop, done *BasicBlock) {
//
// it = range x
@@ -1818,11 +1986,9 @@ func (b *builder) rangeIter(fn *Function, x Value, tk, tv types.Type, pos token.
emitJump(fn, loop)
fn.currentBlock = loop
- _, isString := x.Type().Underlying().(*types.Basic)
-
okv := &Next{
Iter: it,
- IsString: isString,
+ IsString: isBasic(typeparams.CoreType(x.Type())),
}
okv.setType(types.NewTuple(
varOk,
@@ -1850,7 +2016,6 @@ func (b *builder) rangeIter(fn *Function, x Value, tk, tv types.Type, pos token.
// tk is the channel's element type, or nil if the k result is
// not wanted
// pos is the position of the '=' or ':=' token.
-//
func (b *builder) rangeChan(fn *Function, x Value, tk types.Type, pos token.Pos) (k Value, loop, done *BasicBlock) {
//
// loop: (target of continue)
@@ -1873,7 +2038,7 @@ func (b *builder) rangeChan(fn *Function, x Value, tk types.Type, pos token.Pos)
}
recv.setPos(pos)
recv.setType(types.NewTuple(
- newVar("k", x.Type().Underlying().(*types.Chan).Elem()),
+ newVar("k", typeparams.CoreType(x.Type()).(*types.Chan).Elem()),
varOk,
))
ko := fn.emit(recv)
@@ -1889,7 +2054,6 @@ func (b *builder) rangeChan(fn *Function, x Value, tk types.Type, pos token.Pos)
// rangeStmt emits to fn code for the range statement s, optionally
// labelled by label.
-//
func (b *builder) rangeStmt(fn *Function, s *ast.RangeStmt, label *lblock) {
var tk, tv types.Type
if s.Key != nil && !isBlankIdent(s.Key) {
@@ -1918,7 +2082,7 @@ func (b *builder) rangeStmt(fn *Function, s *ast.RangeStmt, label *lblock) {
var k, v Value
var loop, done *BasicBlock
- switch rt := x.Type().Underlying().(type) {
+ switch rt := typeparams.CoreType(x.Type()).(type) {
case *types.Slice, *types.Array, *types.Pointer: // *array
k, v, loop, done = b.rangeIndexed(fn, x, tv, s.For)
@@ -1996,11 +2160,11 @@ start:
b.expr(fn, s.X)
case *ast.SendStmt:
+ chtyp := typeparams.CoreType(fn.typeOf(s.Chan)).(*types.Chan)
fn.emit(&Send{
Chan: b.expr(fn, s.Chan),
- X: emitConv(fn, b.expr(fn, s.Value),
- fn.typeOf(s.Chan).Underlying().(*types.Chan).Elem()),
- pos: s.Arrow,
+ X: emitConv(fn, b.expr(fn, s.Value), chtyp.Elem()),
+ pos: s.Arrow,
})
case *ast.IncDecStmt:
@@ -2157,6 +2321,18 @@ start:
// buildFunction builds SSA code for the body of function fn. Idempotent.
func (b *builder) buildFunction(fn *Function) {
+ if !fn.built {
+ assert(fn.parent == nil, "anonymous functions should not be built by buildFunction()")
+ b.buildFunctionBody(fn)
+ fn.done()
+ }
+}
+
+// buildFunctionBody builds SSA code for the body of function fn.
+//
+// fn is not done building until fn.done() is called.
+func (b *builder) buildFunctionBody(fn *Function) {
+ // TODO(taking): see if this check is reachable.
if fn.Blocks != nil {
return // building already started
}
@@ -2166,7 +2342,9 @@ func (b *builder) buildFunction(fn *Function) {
var functype *ast.FuncType
switch n := fn.syntax.(type) {
case nil:
- return // not a Go source function. (Synthetic, or from object file.)
+ if fn.Params != nil {
+ return // not a Go source function. (Synthetic, or from object file.)
+ }
case *ast.FuncDecl:
functype = n.Type
recvField = n.Recv
@@ -2198,6 +2376,13 @@ func (b *builder) buildFunction(fn *Function) {
}
return
}
+
+ // Build instantiation wrapper around generic body?
+ if fn.topLevelOrigin != nil && fn.subst == nil {
+ buildInstantiationWrapper(fn)
+ return
+ }
+
if fn.Prog.mode&LogSource != 0 {
defer logStack("build function %s @ %s", fn, fn.Prog.Fset.Position(fn.pos))()
}
@@ -2218,22 +2403,45 @@ func (b *builder) buildFunction(fn *Function) {
fn.finishBody()
}
-// buildFuncDecl builds SSA code for the function or method declared
-// by decl in package pkg.
+// buildCreated does the BUILD phase for each function created by builder that is not yet BUILT.
+// Functions are built using buildFunction.
//
-func (b *builder) buildFuncDecl(pkg *Package, decl *ast.FuncDecl) {
- id := decl.Name
- if isBlankIdent(id) {
- return // discard
+// May add types that require runtime type information to builder.
+func (b *builder) buildCreated() {
+ for ; b.finished < b.created.Len(); b.finished++ {
+ fn := b.created.At(b.finished)
+ b.buildFunction(fn)
}
- fn := pkg.objects[pkg.info.Defs[id]].(*Function)
- if decl.Recv == nil && id.Name == "init" {
- var v Call
- v.Call.Value = fn
- v.setType(types.NewTuple())
- pkg.init.emit(&v)
+}
+
+// Adds any needed runtime type information for the created functions.
+//
+// May add newly CREATEd functions that may need to be built or runtime type information.
+//
+// EXCLUSIVE_LOCKS_ACQUIRED(prog.methodsMu)
+func (b *builder) needsRuntimeTypes() {
+ if b.created.Len() == 0 {
+ return
+ }
+ prog := b.created.At(0).Prog
+
+ var rtypes []types.Type
+ for ; b.rtypes < b.finished; b.rtypes++ {
+ fn := b.created.At(b.rtypes)
+ rtypes = append(rtypes, mayNeedRuntimeTypes(fn)...)
+ }
+
+ // Calling prog.needMethodsOf(T) on a basic type T is a no-op.
+ // Filter out the basic types to reduce acquiring prog.methodsMu.
+ rtypes = nonbasicTypes(rtypes)
+
+ for _, T := range rtypes {
+ prog.needMethodsOf(T, b.created)
}
- b.buildFunction(fn)
+}
+
+func (b *builder) done() bool {
+ return b.rtypes >= b.created.Len()
}
// Build calls Package.Build for each package in prog.
@@ -2243,7 +2451,6 @@ func (b *builder) buildFuncDecl(pkg *Package, decl *ast.FuncDecl) {
// need only build a single package.
//
// Build is idempotent and thread-safe.
-//
func (prog *Program) Build() {
var wg sync.WaitGroup
for _, p := range prog.packages {
@@ -2267,7 +2474,6 @@ func (prog *Program) Build() {
// error-free).
//
// Build is idempotent and thread-safe.
-//
func (p *Package) Build() { p.buildOnce.Do(p.build) }
func (p *Package) build() {
@@ -2276,16 +2482,30 @@ func (p *Package) build() {
}
// Ensure we have runtime type info for all exported members.
+ // Additionally filter for just concrete types that can be runtime types.
+ //
// TODO(adonovan): ideally belongs in memberFromObject, but
// that would require package creation in topological order.
for name, mem := range p.Members {
- if ast.IsExported(name) {
- p.Prog.needMethodsOf(mem.Type())
+ isGround := func(m Member) bool {
+ switch m := m.(type) {
+ case *Type:
+ named, _ := m.Type().(*types.Named)
+ return named == nil || typeparams.ForNamed(named) == nil
+ case *Function:
+ return m.typeparams.Len() == 0
+ }
+ return true // *NamedConst, *Global
+ }
+ if ast.IsExported(name) && isGround(mem) {
+ p.Prog.needMethodsOf(mem.Type(), &p.created)
}
}
if p.Prog.mode&LogSource != 0 {
defer logStack("build %s", p)()
}
+
+ b := builder{created: &p.created}
init := p.init
init.startBody()
@@ -2314,9 +2534,10 @@ func (p *Package) build() {
}
}
- var b builder
-
// Initialize package-level vars in correct order.
+ if len(p.info.InitOrder) > 0 && len(p.files) == 0 {
+ panic("no source files provided for package. cannot initialize globals")
+ }
for _, varinit := range p.info.InitOrder {
if init.Prog.mode&LogSource != 0 {
fmt.Fprintf(os.Stderr, "build global initializer %v @ %s\n",
@@ -2343,13 +2564,18 @@ func (p *Package) build() {
}
}
- // Build all package-level functions, init functions
- // and methods, including unreachable/blank ones.
- // We build them in source order, but it's not significant.
+ // Call all of the declared init() functions in source order.
for _, file := range p.files {
for _, decl := range file.Decls {
if decl, ok := decl.(*ast.FuncDecl); ok {
- b.buildFuncDecl(p, decl)
+ id := decl.Name
+ if !isBlankIdent(id) && id.Name == "init" && decl.Recv == nil {
+ fn := p.objects[p.info.Defs[id]].(*Function)
+ var v Call
+ v.Call.Value = fn
+ v.setType(types.NewTuple())
+ p.init.emit(&v)
+ }
}
}
}
@@ -2361,8 +2587,28 @@ func (p *Package) build() {
}
init.emit(new(Return))
init.finishBody()
+ init.done()
+
+ // Build all CREATEd functions and add runtime types.
+ // These Functions include package-level functions, init functions, methods, and synthetic (including unreachable/blank ones).
+ // Builds any functions CREATEd while building this package.
+ //
+ // Initially the created functions for the package are:
+ // [init, decl0, ... , declN]
+ // Where decl0, ..., declN are declared functions in source order, but it's not significant.
+ //
+ // As these are built, more functions (function literals, wrappers, etc.) can be CREATEd.
+ // Iterate until we reach a fixed point.
+ //
+ // Wait for init() to be BUILT as that cannot be built by buildFunction().
+ //
+ for !b.done() {
+ b.buildCreated() // build any CREATEd and not BUILT function. May add runtime types.
+ b.needsRuntimeTypes() // Add all of the runtime type information. May CREATE Functions.
+ }
- p.info = nil // We no longer need ASTs or go/types deductions.
+ p.info = nil // We no longer need ASTs or go/types deductions.
+ p.created = nil // We no longer need created functions.
if p.Prog.mode&SanityCheckFunctions != 0 {
sanityCheckPackage(p)
diff --git a/go/ssa/builder_generic_test.go b/go/ssa/builder_generic_test.go
new file mode 100644
index 000000000..2588f74c5
--- /dev/null
+++ b/go/ssa/builder_generic_test.go
@@ -0,0 +1,679 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa_test
+
+import (
+ "fmt"
+ "go/parser"
+ "go/token"
+ "reflect"
+ "sort"
+ "testing"
+
+ "golang.org/x/tools/go/expect"
+ "golang.org/x/tools/go/loader"
+ "golang.org/x/tools/go/ssa"
+ "golang.org/x/tools/internal/typeparams"
+)
+
+// TestGenericBodies tests that bodies of generic functions and methods containing
+// different constructs can be built in BuilderMode(0).
+//
+// Each test specifies the contents of package containing a single go file.
+// Each call print(arg0, arg1, ...) to the builtin print function
+// in ssa is correlated a comment at the end of the line of the form:
+//
+// //@ types(a, b, c)
+//
+// where a, b and c are the types of the arguments to the print call
+// serialized using go/types.Type.String().
+// See x/tools/go/expect for details on the syntax.
+func TestGenericBodies(t *testing.T) {
+ if !typeparams.Enabled {
+ t.Skip("TestGenericBodies requires type parameters")
+ }
+ for _, test := range []struct {
+ pkg string // name of the package.
+ contents string // contents of the Go package.
+ }{
+ {
+ pkg: "p",
+ contents: `
+ package p
+
+ func f(x int) {
+ var i interface{}
+ print(i, 0) //@ types("interface{}", int)
+ print() //@ types()
+ print(x) //@ types(int)
+ }
+ `,
+ },
+ {
+ pkg: "q",
+ contents: `
+ package q
+
+ func f[T any](x T) {
+ print(x) //@ types(T)
+ }
+ `,
+ },
+ {
+ pkg: "r",
+ contents: `
+ package r
+
+ func f[T ~int]() {
+ var x T
+ print(x) //@ types(T)
+ }
+ `,
+ },
+ {
+ pkg: "s",
+ contents: `
+ package s
+
+ func a[T ~[4]byte](x T) {
+ for k, v := range x {
+ print(x, k, v) //@ types(T, int, byte)
+ }
+ }
+ func b[T ~*[4]byte](x T) {
+ for k, v := range x {
+ print(x, k, v) //@ types(T, int, byte)
+ }
+ }
+ func c[T ~[]byte](x T) {
+ for k, v := range x {
+ print(x, k, v) //@ types(T, int, byte)
+ }
+ }
+ func d[T ~string](x T) {
+ for k, v := range x {
+ print(x, k, v) //@ types(T, int, rune)
+ }
+ }
+ func e[T ~map[int]string](x T) {
+ for k, v := range x {
+ print(x, k, v) //@ types(T, int, string)
+ }
+ }
+ func f[T ~chan string](x T) {
+ for v := range x {
+ print(x, v) //@ types(T, string)
+ }
+ }
+
+ func From() {
+ type A [4]byte
+ print(a[A]) //@ types("func(x s.A)")
+
+ type B *[4]byte
+ print(b[B]) //@ types("func(x s.B)")
+
+ type C []byte
+ print(c[C]) //@ types("func(x s.C)")
+
+ type D string
+ print(d[D]) //@ types("func(x s.D)")
+
+ type E map[int]string
+ print(e[E]) //@ types("func(x s.E)")
+
+ type F chan string
+ print(f[F]) //@ types("func(x s.F)")
+ }
+ `,
+ },
+ {
+ pkg: "t",
+ contents: `
+ package t
+
+ func f[S any, T ~chan S](x T) {
+ for v := range x {
+ print(x, v) //@ types(T, S)
+ }
+ }
+
+ func From() {
+ type F chan string
+ print(f[string, F]) //@ types("func(x t.F)")
+ }
+ `,
+ },
+ {
+ pkg: "u",
+ contents: `
+ package u
+
+ func fibonacci[T ~chan int](c, quit T) {
+ x, y := 0, 1
+ for {
+ select {
+ case c <- x:
+ x, y = y, x+y
+ case <-quit:
+ print(c, quit, x, y) //@ types(T, T, int, int)
+ return
+ }
+ }
+ }
+ func start[T ~chan int](c, quit T) {
+ go func() {
+ for i := 0; i < 10; i++ {
+ print(<-c) //@ types(int)
+ }
+ quit <- 0
+ }()
+ }
+ func From() {
+ type F chan int
+ c := make(F)
+ quit := make(F)
+ print(start[F], c, quit) //@ types("func(c u.F, quit u.F)", "u.F", "u.F")
+ print(fibonacci[F], c, quit) //@ types("func(c u.F, quit u.F)", "u.F", "u.F")
+ }
+ `,
+ },
+ {
+ pkg: "v",
+ contents: `
+ package v
+
+ func f[T ~struct{ x int; y string }](i int) T {
+ u := []T{ T{0, "lorem"}, T{1, "ipsum"}}
+ return u[i]
+ }
+ func From() {
+ type S struct{ x int; y string }
+ print(f[S]) //@ types("func(i int) v.S")
+ }
+ `,
+ },
+ {
+ pkg: "w",
+ contents: `
+ package w
+
+ func f[T ~[4]int8](x T, l, h int) []int8 {
+ return x[l:h]
+ }
+ func g[T ~*[4]int16](x T, l, h int) []int16 {
+ return x[l:h]
+ }
+ func h[T ~[]int32](x T, l, h int) T {
+ return x[l:h]
+ }
+ func From() {
+ type F [4]int8
+ type G *[4]int16
+ type H []int32
+ print(f[F](F{}, 0, 0)) //@ types("[]int8")
+ print(g[G](nil, 0, 0)) //@ types("[]int16")
+ print(h[H](nil, 0, 0)) //@ types("w.H")
+ }
+ `,
+ },
+ {
+ pkg: "x",
+ contents: `
+ package x
+
+ func h[E any, T ~[]E](x T, l, h int) []E {
+ s := x[l:h]
+ print(s) //@ types("T")
+ return s
+ }
+ func From() {
+ type H []int32
+ print(h[int32, H](nil, 0, 0)) //@ types("[]int32")
+ }
+ `,
+ },
+ {
+ pkg: "y",
+ contents: `
+ package y
+
+ // Test "make" builtin with different forms on core types and
+ // when capacities are constants or variable.
+ func h[E any, T ~[]E](m, n int) {
+ print(make(T, 3)) //@ types(T)
+ print(make(T, 3, 5)) //@ types(T)
+ print(make(T, m)) //@ types(T)
+ print(make(T, m, n)) //@ types(T)
+ }
+ func i[K comparable, E any, T ~map[K]E](m int) {
+ print(make(T)) //@ types(T)
+ print(make(T, 5)) //@ types(T)
+ print(make(T, m)) //@ types(T)
+ }
+ func j[E any, T ~chan E](m int) {
+ print(make(T)) //@ types(T)
+ print(make(T, 6)) //@ types(T)
+ print(make(T, m)) //@ types(T)
+ }
+ func From() {
+ type H []int32
+ h[int32, H](3, 4)
+ type I map[int8]H
+ i[int8, H, I](5)
+ type J chan I
+ j[I, J](6)
+ }
+ `,
+ },
+ {
+ pkg: "z",
+ contents: `
+ package z
+
+ func h[T ~[4]int](x T) {
+ print(len(x), cap(x)) //@ types(int, int)
+ }
+ func i[T ~[4]byte | []int | ~chan uint8](x T) {
+ print(len(x), cap(x)) //@ types(int, int)
+ }
+ func j[T ~[4]int | any | map[string]int]() {
+ print(new(T)) //@ types("*T")
+ }
+ func k[T ~[4]int | any | map[string]int](x T) {
+ print(x) //@ types(T)
+ panic(x)
+ }
+ `,
+ },
+ {
+ pkg: "a",
+ contents: `
+ package a
+
+ func f[E any, F ~func() E](x F) {
+ print(x, x()) //@ types(F, E)
+ }
+ func From() {
+ type T func() int
+ f[int, T](func() int { return 0 })
+ f[int, func() int](func() int { return 1 })
+ }
+ `,
+ },
+ {
+ pkg: "b",
+ contents: `
+ package b
+
+ func f[E any, M ~map[string]E](m M) {
+ y, ok := m["lorem"]
+ print(m, y, ok) //@ types(M, E, bool)
+ }
+ func From() {
+ type O map[string][]int
+ f(O{"lorem": []int{0, 1, 2, 3}})
+ }
+ `,
+ },
+ {
+ pkg: "c",
+ contents: `
+ package c
+
+ func a[T interface{ []int64 | [5]int64 }](x T) int64 {
+ print(x, x[2], x[3]) //@ types(T, int64, int64)
+ x[2] = 5
+ return x[3]
+ }
+ func b[T interface{ []byte | string }](x T) byte {
+ print(x, x[3]) //@ types(T, byte)
+ return x[3]
+ }
+ func c[T interface{ []byte }](x T) byte {
+ print(x, x[2], x[3]) //@ types(T, byte, byte)
+ x[2] = 'b'
+ return x[3]
+ }
+ func d[T interface{ map[int]int64 }](x T) int64 {
+ print(x, x[2], x[3]) //@ types(T, int64, int64)
+ x[2] = 43
+ return x[3]
+ }
+ func e[T ~string](t T) {
+ print(t, t[0]) //@ types(T, uint8)
+ }
+ func f[T ~string|[]byte](t T) {
+ print(t, t[0]) //@ types(T, uint8)
+ }
+ func g[T []byte](t T) {
+ print(t, t[0]) //@ types(T, byte)
+ }
+ func h[T ~[4]int|[]int](t T) {
+ print(t, t[0]) //@ types(T, int)
+ }
+ func i[T ~[4]int|*[4]int|[]int](t T) {
+ print(t, t[0]) //@ types(T, int)
+ }
+ func j[T ~[4]int|*[4]int|[]int](t T) {
+ print(t, &t[0]) //@ types(T, "*int")
+ }
+ `,
+ },
+ {
+ pkg: "d",
+ contents: `
+ package d
+
+ type MyInt int
+ type Other int
+ type MyInterface interface{ foo() }
+
+ // ChangeType tests
+ func ct0(x int) { v := MyInt(x); print(x, v) /*@ types(int, "d.MyInt")*/ }
+ func ct1[T MyInt | Other, S int ](x S) { v := T(x); print(x, v) /*@ types(S, T)*/ }
+ func ct2[T int, S MyInt | int ](x S) { v := T(x); print(x, v) /*@ types(S, T)*/ }
+ func ct3[T MyInt | Other, S MyInt | int ](x S) { v := T(x) ; print(x, v) /*@ types(S, T)*/ }
+
+ // Convert tests
+ func co0[T int | int8](x MyInt) { v := T(x); print(x, v) /*@ types("d.MyInt", T)*/}
+ func co1[T int | int8](x T) { v := MyInt(x); print(x, v) /*@ types(T, "d.MyInt")*/ }
+ func co2[S, T int | int8](x T) { v := S(x); print(x, v) /*@ types(T, S)*/ }
+
+ // MakeInterface tests
+ func mi0[T MyInterface](x T) { v := MyInterface(x); print(x, v) /*@ types(T, "d.MyInterface")*/ }
+
+ // NewConst tests
+ func nc0[T any]() { v := (*T)(nil); print(v) /*@ types("*T")*/}
+
+ // SliceToArrayPointer
+ func sl0[T *[4]int | *[2]int](x []int) { v := T(x); print(x, v) /*@ types("[]int", T)*/ }
+ func sl1[T *[4]int | *[2]int, S []int](x S) { v := T(x); print(x, v) /*@ types(S, T)*/ }
+ `,
+ },
+ {
+ pkg: "e",
+ contents: `
+ package e
+
+ func c[T interface{ foo() string }](x T) {
+ print(x, x.foo, x.foo()) /*@ types(T, "func() string", string)*/
+ }
+ `,
+ },
+ {
+ pkg: "f",
+ contents: `package f
+
+ func eq[T comparable](t T, i interface{}) bool {
+ return t == i
+ }
+ `,
+ },
+ {
+ pkg: "g",
+ contents: `package g
+ type S struct{ f int }
+ func c[P *S]() []P { return []P{{f: 1}} }
+ `,
+ },
+ {
+ pkg: "h",
+ contents: `package h
+ func sign[bytes []byte | string](s bytes) (bool, bool) {
+ neg := false
+ if len(s) > 0 && (s[0] == '-' || s[0] == '+') {
+ neg = s[0] == '-'
+ s = s[1:]
+ }
+ return !neg, len(s) > 0
+ }`,
+ },
+ {
+ pkg: "i",
+ contents: `package i
+ func digits[bytes []byte | string](s bytes) bool {
+ for _, c := range []byte(s) {
+ if c < '0' || '9' < c {
+ return false
+ }
+ }
+ return true
+ }`,
+ },
+ {
+ pkg: "j",
+ contents: `
+ package j
+
+ type E interface{}
+
+ func Foo[T E, PT interface{ *T }]() T {
+ pt := PT(new(T))
+ x := *pt
+ print(x) /*@ types(T)*/
+ return x
+ }
+ `,
+ },
+ } {
+ test := test
+ t.Run(test.pkg, func(t *testing.T) {
+ // Parse
+ conf := loader.Config{ParserMode: parser.ParseComments}
+ fname := test.pkg + ".go"
+ f, err := conf.ParseFile(fname, test.contents)
+ if err != nil {
+ t.Fatalf("parse: %v", err)
+ }
+ conf.CreateFromFiles(test.pkg, f)
+
+ // Load
+ lprog, err := conf.Load()
+ if err != nil {
+ t.Fatalf("Load: %v", err)
+ }
+
+ // Create and build SSA
+ prog := ssa.NewProgram(lprog.Fset, ssa.SanityCheckFunctions)
+ for _, info := range lprog.AllPackages {
+ if info.TransitivelyErrorFree {
+ prog.CreatePackage(info.Pkg, info.Files, &info.Info, info.Importable)
+ }
+ }
+ p := prog.Package(lprog.Package(test.pkg).Pkg)
+ p.Build()
+
+ // Collect calls to the builtin print function.
+ probes := make(map[*ssa.CallCommon]bool)
+ for _, mem := range p.Members {
+ if fn, ok := mem.(*ssa.Function); ok {
+ for _, bb := range fn.Blocks {
+ for _, i := range bb.Instrs {
+ if i, ok := i.(ssa.CallInstruction); ok {
+ call := i.Common()
+ if b, ok := call.Value.(*ssa.Builtin); ok && b.Name() == "print" {
+ probes[i.Common()] = true
+ }
+ }
+ }
+ }
+ }
+ }
+
+ // Collect all notes in f, i.e. comments starting with "//@ types".
+ notes, err := expect.ExtractGo(prog.Fset, f)
+ if err != nil {
+ t.Errorf("expect.ExtractGo: %v", err)
+ }
+
+ // Matches each probe with a note that has the same line.
+ sameLine := func(x, y token.Pos) bool {
+ xp := prog.Fset.Position(x)
+ yp := prog.Fset.Position(y)
+ return xp.Filename == yp.Filename && xp.Line == yp.Line
+ }
+ expectations := make(map[*ssa.CallCommon]*expect.Note)
+ for call := range probes {
+ var match *expect.Note
+ for _, note := range notes {
+ if note.Name == "types" && sameLine(call.Pos(), note.Pos) {
+ match = note // first match is good enough.
+ break
+ }
+ }
+ if match != nil {
+ expectations[call] = match
+ } else {
+ t.Errorf("Unmatched probe: %v", call)
+ }
+ }
+
+ // Check each expectation.
+ for call, note := range expectations {
+ var args []string
+ for _, a := range call.Args {
+ args = append(args, a.Type().String())
+ }
+ if got, want := fmt.Sprint(args), fmt.Sprint(note.Args); got != want {
+ t.Errorf("Arguments to print() were expected to be %q. got %q", want, got)
+ }
+ }
+ })
+ }
+}
+
+// TestInstructionString tests serializing instructions via Instruction.String().
+func TestInstructionString(t *testing.T) {
+ if !typeparams.Enabled {
+ t.Skip("TestInstructionString requires type parameters")
+ }
+ // Tests (ssa.Instruction).String(). Instructions are from a single go file.
+ // The Instructions tested are those that match a comment of the form:
+ //
+ // //@ instrs(f, kind, strs...)
+ //
+ // where f is the name of the function, kind is the type of the instructions matched
+ // within the function, and tests that the String() value for all of the instructions
+ // matched of String() is strs (in some order).
+ // See x/tools/go/expect for details on the syntax.
+
+ const contents = `
+ package p
+
+ //@ instrs("f", "*ssa.TypeAssert")
+ //@ instrs("f", "*ssa.Call", "print(nil:interface{}, 0:int)")
+ func f(x int) { // non-generic smoke test.
+ var i interface{}
+ print(i, 0)
+ }
+
+ //@ instrs("h", "*ssa.Alloc", "local T (u)")
+ //@ instrs("h", "*ssa.FieldAddr", "&t0.x [#0]")
+ func h[T ~struct{ x string }]() T {
+ u := T{"lorem"}
+ return u
+ }
+
+ //@ instrs("c", "*ssa.TypeAssert", "typeassert t0.(interface{})")
+ //@ instrs("c", "*ssa.Call", "invoke x.foo()")
+ func c[T interface{ foo() string }](x T) {
+ _ = x.foo
+ _ = x.foo()
+ }
+
+ //@ instrs("d", "*ssa.TypeAssert", "typeassert t0.(interface{})")
+ //@ instrs("d", "*ssa.Call", "invoke x.foo()")
+ func d[T interface{ foo() string; comparable }](x T) {
+ _ = x.foo
+ _ = x.foo()
+ }
+ `
+
+ // Parse
+ conf := loader.Config{ParserMode: parser.ParseComments}
+ const fname = "p.go"
+ f, err := conf.ParseFile(fname, contents)
+ if err != nil {
+ t.Fatalf("parse: %v", err)
+ }
+ conf.CreateFromFiles("p", f)
+
+ // Load
+ lprog, err := conf.Load()
+ if err != nil {
+ t.Fatalf("Load: %v", err)
+ }
+
+ // Create and build SSA
+ prog := ssa.NewProgram(lprog.Fset, ssa.SanityCheckFunctions)
+ for _, info := range lprog.AllPackages {
+ if info.TransitivelyErrorFree {
+ prog.CreatePackage(info.Pkg, info.Files, &info.Info, info.Importable)
+ }
+ }
+ p := prog.Package(lprog.Package("p").Pkg)
+ p.Build()
+
+ // Collect all notes in f, i.e. comments starting with "//@ instr".
+ notes, err := expect.ExtractGo(prog.Fset, f)
+ if err != nil {
+ t.Errorf("expect.ExtractGo: %v", err)
+ }
+
+ // Expectation is a {function, type string} -> {want, matches}
+ // where matches is all Instructions.String() that match the key.
+ // Each expecation is that some permutation of matches is wants.
+ type expKey struct {
+ function string
+ kind string
+ }
+ type expValue struct {
+ wants []string
+ matches []string
+ }
+ expectations := make(map[expKey]*expValue)
+ for _, note := range notes {
+ if note.Name == "instrs" {
+ if len(note.Args) < 2 {
+ t.Error("Had @instrs annotation without at least 2 arguments")
+ continue
+ }
+ fn, kind := fmt.Sprint(note.Args[0]), fmt.Sprint(note.Args[1])
+ var wants []string
+ for _, arg := range note.Args[2:] {
+ wants = append(wants, fmt.Sprint(arg))
+ }
+ expectations[expKey{fn, kind}] = &expValue{wants, nil}
+ }
+ }
+
+ // Collect all Instructions that match the expectations.
+ for _, mem := range p.Members {
+ if fn, ok := mem.(*ssa.Function); ok {
+ for _, bb := range fn.Blocks {
+ for _, i := range bb.Instrs {
+ kind := fmt.Sprintf("%T", i)
+ if e := expectations[expKey{fn.Name(), kind}]; e != nil {
+ e.matches = append(e.matches, i.String())
+ }
+ }
+ }
+ }
+ }
+
+ // Check each expectation.
+ for key, value := range expectations {
+ if _, ok := p.Members[key.function]; !ok {
+ t.Errorf("Expectation on %s does not match a member in %s", key.function, p.Pkg.Name())
+ }
+ got, want := value.matches, value.wants
+ sort.Strings(got)
+ sort.Strings(want)
+ if !reflect.DeepEqual(want, got) {
+ t.Errorf("Within %s wanted instructions of kind %s: %q. got %q", key.function, key.kind, want, got)
+ }
+ }
+}
diff --git a/go/ssa/builder_go117_test.go b/go/ssa/builder_go117_test.go
index f6545e5e2..699859705 100644
--- a/go/ssa/builder_go117_test.go
+++ b/go/ssa/builder_go117_test.go
@@ -57,7 +57,6 @@ func TestBuildPackageFailuresGo117(t *testing.T) {
importer types.Importer
}{
{"slice to array pointer - source is not a slice", "package p; var s [4]byte; var _ = (*[4]byte)(s)", nil},
- {"slice to array pointer - dest is not a pointer", "package p; var s []byte; var _ = ([4]byte)(s)", nil},
{"slice to array pointer - dest pointer elem is not an array", "package p; var s []byte; var _ = (*byte)(s)", nil},
}
diff --git a/go/ssa/builder_go120_test.go b/go/ssa/builder_go120_test.go
new file mode 100644
index 000000000..acdd182c5
--- /dev/null
+++ b/go/ssa/builder_go120_test.go
@@ -0,0 +1,102 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.20
+// +build go1.20
+
+package ssa_test
+
+import (
+ "go/ast"
+ "go/parser"
+ "go/token"
+ "go/types"
+ "testing"
+
+ "golang.org/x/tools/go/ssa"
+ "golang.org/x/tools/go/ssa/ssautil"
+)
+
+func TestBuildPackageGo120(t *testing.T) {
+ tests := []struct {
+ name string
+ src string
+ importer types.Importer
+ }{
+ {"slice to array", "package p; var s []byte; var _ = ([4]byte)(s)", nil},
+ {"slice to zero length array", "package p; var s []byte; var _ = ([0]byte)(s)", nil},
+ {"slice to zero length array type parameter", "package p; var s []byte; func f[T ~[0]byte]() { tmp := (T)(s); var z T; _ = tmp == z}", nil},
+ {"slice to non-zero length array type parameter", "package p; var s []byte; func h[T ~[1]byte | [4]byte]() { tmp := T(s); var z T; _ = tmp == z}", nil},
+ {"slice to maybe-zero length array type parameter", "package p; var s []byte; func g[T ~[0]byte | [4]byte]() { tmp := T(s); var z T; _ = tmp == z}", nil},
+ {
+ "rune sequence to sequence cast patterns", `
+ package p
+ // Each of fXX functions describes a 1.20 legal cast between sequences of runes
+ // as []rune, pointers to rune arrays, rune arrays, or strings.
+ //
+ // Comments listed given the current emitted instructions [approximately].
+ // If multiple conversions are needed, these are seperated by |.
+ // rune was selected as it leads to string casts (byte is similar).
+ // The length 2 is not significant.
+ // Multiple array lengths may occur in a cast in practice (including 0).
+ func f00[S string, D string](s S) { _ = D(s) } // ChangeType
+ func f01[S string, D []rune](s S) { _ = D(s) } // Convert
+ func f02[S string, D []rune | string](s S) { _ = D(s) } // ChangeType | Convert
+ func f03[S [2]rune, D [2]rune](s S) { _ = D(s) } // ChangeType
+ func f04[S *[2]rune, D *[2]rune](s S) { _ = D(s) } // ChangeType
+ func f05[S []rune, D string](s S) { _ = D(s) } // Convert
+ func f06[S []rune, D [2]rune](s S) { _ = D(s) } // SliceToArrayPointer; Deref
+ func f07[S []rune, D [2]rune | string](s S) { _ = D(s) } // SliceToArrayPointer; Deref | Convert
+ func f08[S []rune, D *[2]rune](s S) { _ = D(s) } // SliceToArrayPointer
+ func f09[S []rune, D *[2]rune | string](s S) { _ = D(s) } // SliceToArrayPointer; Deref | Convert
+ func f10[S []rune, D *[2]rune | [2]rune](s S) { _ = D(s) } // SliceToArrayPointer | SliceToArrayPointer; Deref
+ func f11[S []rune, D *[2]rune | [2]rune | string](s S) { _ = D(s) } // SliceToArrayPointer | SliceToArrayPointer; Deref | Convert
+ func f12[S []rune, D []rune](s S) { _ = D(s) } // ChangeType
+ func f13[S []rune, D []rune | string](s S) { _ = D(s) } // Convert | ChangeType
+ func f14[S []rune, D []rune | [2]rune](s S) { _ = D(s) } // ChangeType | SliceToArrayPointer; Deref
+ func f15[S []rune, D []rune | [2]rune | string](s S) { _ = D(s) } // ChangeType | SliceToArrayPointer; Deref | Convert
+ func f16[S []rune, D []rune | *[2]rune](s S) { _ = D(s) } // ChangeType | SliceToArrayPointer
+ func f17[S []rune, D []rune | *[2]rune | string](s S) { _ = D(s) } // ChangeType | SliceToArrayPointer | Convert
+ func f18[S []rune, D []rune | *[2]rune | [2]rune](s S) { _ = D(s) } // ChangeType | SliceToArrayPointer | SliceToArrayPointer; Deref
+ func f19[S []rune, D []rune | *[2]rune | [2]rune | string](s S) { _ = D(s) } // ChangeType | SliceToArrayPointer | SliceToArrayPointer; Deref | Convert
+ func f20[S []rune | string, D string](s S) { _ = D(s) } // Convert | ChangeType
+ func f21[S []rune | string, D []rune](s S) { _ = D(s) } // Convert | ChangeType
+ func f22[S []rune | string, D []rune | string](s S) { _ = D(s) } // ChangeType | Convert | Convert | ChangeType
+ func f23[S []rune | [2]rune, D [2]rune](s S) { _ = D(s) } // SliceToArrayPointer; Deref | ChangeType
+ func f24[S []rune | *[2]rune, D *[2]rune](s S) { _ = D(s) } // SliceToArrayPointer | ChangeType
+ `, nil,
+ },
+ {
+ "matching named and underlying types", `
+ package p
+ type a string
+ type b string
+ func g0[S []rune | a | b, D []rune | a | b](s S) { _ = D(s) }
+ func g1[S []rune | ~string, D []rune | a | b](s S) { _ = D(s) }
+ func g2[S []rune | a | b, D []rune | ~string](s S) { _ = D(s) }
+ func g3[S []rune | ~string, D []rune |~string](s S) { _ = D(s) }
+ `, nil,
+ },
+ }
+
+ for _, tc := range tests {
+ tc := tc
+ t.Run(tc.name, func(t *testing.T) {
+ t.Parallel()
+ fset := token.NewFileSet()
+ f, err := parser.ParseFile(fset, "p.go", tc.src, 0)
+ if err != nil {
+ t.Error(err)
+ }
+ files := []*ast.File{f}
+
+ pkg := types.NewPackage("p", "")
+ conf := &types.Config{Importer: tc.importer}
+ _, _, err = ssautil.BuildPackage(conf, fset, pkg, files, ssa.SanityCheckFunctions)
+ if err != nil {
+ t.Errorf("unexpected error: %v", err)
+ }
+ })
+ }
+}
diff --git a/go/ssa/builder_test.go b/go/ssa/builder_test.go
index c45f930b3..b3bb09c5e 100644
--- a/go/ssa/builder_test.go
+++ b/go/ssa/builder_test.go
@@ -6,20 +6,26 @@ package ssa_test
import (
"bytes"
+ "fmt"
"go/ast"
+ "go/build"
"go/importer"
"go/parser"
"go/token"
"go/types"
"os"
+ "path/filepath"
"reflect"
"sort"
"strings"
"testing"
+ "golang.org/x/tools/go/buildutil"
"golang.org/x/tools/go/loader"
"golang.org/x/tools/go/ssa"
"golang.org/x/tools/go/ssa/ssautil"
+ "golang.org/x/tools/internal/testenv"
+ "golang.org/x/tools/internal/typeparams"
)
func isEmpty(f *ssa.Function) bool { return f.Blocks == nil }
@@ -27,6 +33,8 @@ func isEmpty(f *ssa.Function) bool { return f.Blocks == nil }
// Tests that programs partially loaded from gc object files contain
// functions with no code for the external portions, but are otherwise ok.
func TestBuildPackage(t *testing.T) {
+ testenv.NeedsGoBuild(t) // for importer.Default()
+
input := `
package main
@@ -38,7 +46,7 @@ import (
func main() {
var t testing.T
- t.Parallel() // static call to external declared method
+ t.Parallel() // static call to external declared method
t.Fail() // static call to promoted external declared method
testing.Short() // static call to external package-level function
@@ -57,8 +65,9 @@ func main() {
// Build an SSA program from the parsed file.
// Load its dependencies from gc binary export data.
+ mode := ssa.SanityCheckFunctions
mainPkg, _, err := ssautil.BuildPackage(&types.Config{Importer: importer.Default()}, fset,
- types.NewPackage("main", ""), []*ast.File{f}, ssa.SanityCheckFunctions)
+ types.NewPackage("main", ""), []*ast.File{f}, mode)
if err != nil {
t.Error(err)
return
@@ -158,6 +167,8 @@ func main() {
// TestRuntimeTypes tests that (*Program).RuntimeTypes() includes all necessary types.
func TestRuntimeTypes(t *testing.T) {
+ testenv.NeedsGoBuild(t) // for importer.Default()
+
tests := []struct {
input string
want []string
@@ -215,6 +226,18 @@ func TestRuntimeTypes(t *testing.T) {
nil,
},
}
+
+ if typeparams.Enabled {
+ tests = append(tests, []struct {
+ input string
+ want []string
+ }{
+ // MakeInterface does not create runtime type for parameterized types.
+ {`package N; var g interface{}; func f[S any]() { var v []S; g = v }; `,
+ nil,
+ },
+ }...)
+ }
for _, test := range tests {
// Parse the file.
fset := token.NewFileSet()
@@ -226,8 +249,9 @@ func TestRuntimeTypes(t *testing.T) {
// Create a single-file main package.
// Load dependencies from gc binary export data.
+ mode := ssa.SanityCheckFunctions
ssapkg, _, err := ssautil.BuildPackage(&types.Config{Importer: importer.Default()}, fset,
- types.NewPackage("p", ""), []*ast.File{f}, ssa.SanityCheckFunctions)
+ types.NewPackage("p", ""), []*ast.File{f}, mode)
if err != nil {
t.Errorf("test %q: %s", test.input[:15], err)
continue
@@ -374,7 +398,7 @@ var (
}
// Create and build SSA
- prog := ssautil.CreateProgram(lprog, 0)
+ prog := ssautil.CreateProgram(lprog, ssa.BuilderMode(0))
prog.Build()
// Enumerate reachable synthetic functions
@@ -480,7 +504,7 @@ func h(error)
}
// Create and build SSA
- prog := ssautil.CreateProgram(lprog, 0)
+ prog := ssautil.CreateProgram(lprog, ssa.BuilderMode(0))
p := prog.Package(lprog.Package("p").Pkg)
p.Build()
g := p.Func("g")
@@ -498,3 +522,486 @@ func h(error)
t.Errorf("expected a single Phi (for the range index), got %d", phis)
}
}
+
+// TestGenericDecls ensures that *unused* generic types, methods and functions
+// signatures can be built.
+//
+// TODO(taking): Add calls from non-generic functions to instantiations of generic functions.
+// TODO(taking): Add globals with types that are instantiations of generic functions.
+func TestGenericDecls(t *testing.T) {
+ if !typeparams.Enabled {
+ t.Skip("TestGenericDecls only works with type parameters enabled.")
+ }
+ const input = `
+package p
+
+import "unsafe"
+
+type Pointer[T any] struct {
+ v unsafe.Pointer
+}
+
+func (x *Pointer[T]) Load() *T {
+ return (*T)(LoadPointer(&x.v))
+}
+
+func Load[T any](x *Pointer[T]) *T {
+ return x.Load()
+}
+
+func LoadPointer(addr *unsafe.Pointer) (val unsafe.Pointer)
+`
+ // The SSA members for this package should look something like this:
+ // func LoadPointer func(addr *unsafe.Pointer) (val unsafe.Pointer)
+ // type Pointer struct{v unsafe.Pointer}
+ // method (*Pointer[T any]) Load() *T
+ // func init func()
+ // var init$guard bool
+
+ // Parse
+ var conf loader.Config
+ f, err := conf.ParseFile("<input>", input)
+ if err != nil {
+ t.Fatalf("parse: %v", err)
+ }
+ conf.CreateFromFiles("p", f)
+
+ // Load
+ lprog, err := conf.Load()
+ if err != nil {
+ t.Fatalf("Load: %v", err)
+ }
+
+ // Create and build SSA
+ prog := ssautil.CreateProgram(lprog, ssa.BuilderMode(0))
+ p := prog.Package(lprog.Package("p").Pkg)
+ p.Build()
+
+ if load := p.Func("Load"); typeparams.ForSignature(load.Signature).Len() != 1 {
+ t.Errorf("expected a single type param T for Load got %q", load.Signature)
+ }
+ if ptr := p.Type("Pointer"); typeparams.ForNamed(ptr.Type().(*types.Named)).Len() != 1 {
+ t.Errorf("expected a single type param T for Pointer got %q", ptr.Type())
+ }
+}
+
+func TestGenericWrappers(t *testing.T) {
+ if !typeparams.Enabled {
+ t.Skip("TestGenericWrappers only works with type parameters enabled.")
+ }
+ const input = `
+package p
+
+type S[T any] struct {
+ t *T
+}
+
+func (x S[T]) M() T {
+ return *(x.t)
+}
+
+var thunk = S[int].M
+
+var g S[int]
+var bound = g.M
+
+type R[T any] struct{ S[T] }
+
+var indirect = R[int].M
+`
+ // The relevant SSA members for this package should look something like this:
+ // var bound func() int
+ // var thunk func(S[int]) int
+ // var wrapper func(R[int]) int
+
+ // Parse
+ var conf loader.Config
+ f, err := conf.ParseFile("<input>", input)
+ if err != nil {
+ t.Fatalf("parse: %v", err)
+ }
+ conf.CreateFromFiles("p", f)
+
+ // Load
+ lprog, err := conf.Load()
+ if err != nil {
+ t.Fatalf("Load: %v", err)
+ }
+
+ for _, mode := range []ssa.BuilderMode{ssa.BuilderMode(0), ssa.InstantiateGenerics} {
+ // Create and build SSA
+ prog := ssautil.CreateProgram(lprog, mode)
+ p := prog.Package(lprog.Package("p").Pkg)
+ p.Build()
+
+ for _, entry := range []struct {
+ name string // name of the package variable
+ typ string // type of the package variable
+ wrapper string // wrapper function to which the package variable is set
+ callee string // callee within the wrapper function
+ }{
+ {
+ "bound",
+ "*func() int",
+ "(p.S[int]).M$bound",
+ "(p.S[int]).M[int]",
+ },
+ {
+ "thunk",
+ "*func(p.S[int]) int",
+ "(p.S[int]).M$thunk",
+ "(p.S[int]).M[int]",
+ },
+ {
+ "indirect",
+ "*func(p.R[int]) int",
+ "(p.R[int]).M$thunk",
+ "(p.S[int]).M[int]",
+ },
+ } {
+ entry := entry
+ t.Run(entry.name, func(t *testing.T) {
+ v := p.Var(entry.name)
+ if v == nil {
+ t.Fatalf("Did not find variable for %q in %s", entry.name, p.String())
+ }
+ if v.Type().String() != entry.typ {
+ t.Errorf("Expected type for variable %s: %q. got %q", v, entry.typ, v.Type())
+ }
+
+ // Find the wrapper for v. This is stored exactly once in init.
+ var wrapper *ssa.Function
+ for _, bb := range p.Func("init").Blocks {
+ for _, i := range bb.Instrs {
+ if store, ok := i.(*ssa.Store); ok && v == store.Addr {
+ switch val := store.Val.(type) {
+ case *ssa.Function:
+ wrapper = val
+ case *ssa.MakeClosure:
+ wrapper = val.Fn.(*ssa.Function)
+ }
+ }
+ }
+ }
+ if wrapper == nil {
+ t.Fatalf("failed to find wrapper function for %s", entry.name)
+ }
+ if wrapper.String() != entry.wrapper {
+ t.Errorf("Expected wrapper function %q. got %q", wrapper, entry.wrapper)
+ }
+
+ // Find the callee within the wrapper. There should be exactly one call.
+ var callee *ssa.Function
+ for _, bb := range wrapper.Blocks {
+ for _, i := range bb.Instrs {
+ if call, ok := i.(*ssa.Call); ok {
+ callee = call.Call.StaticCallee()
+ }
+ }
+ }
+ if callee == nil {
+ t.Fatalf("failed to find callee within wrapper %s", wrapper)
+ }
+ if callee.String() != entry.callee {
+ t.Errorf("Expected callee in wrapper %q is %q. got %q", v, entry.callee, callee)
+ }
+ })
+ }
+ }
+}
+
+// TestTypeparamTest builds SSA over compilable examples in $GOROOT/test/typeparam/*.go.
+
+func TestTypeparamTest(t *testing.T) {
+ if !typeparams.Enabled {
+ return
+ }
+
+ // Tests use a fake goroot to stub out standard libraries with delcarations in
+ // testdata/src. Decreases runtime from ~80s to ~1s.
+
+ dir := filepath.Join(build.Default.GOROOT, "test", "typeparam")
+
+ // Collect all of the .go files in
+ list, err := os.ReadDir(dir)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ for _, entry := range list {
+ if entry.Name() == "issue58513.go" {
+ continue // uses runtime.Caller; unimplemented by go/ssa/interp
+ }
+ if entry.IsDir() || !strings.HasSuffix(entry.Name(), ".go") {
+ continue // Consider standalone go files.
+ }
+ input := filepath.Join(dir, entry.Name())
+ t.Run(entry.Name(), func(t *testing.T) {
+ src, err := os.ReadFile(input)
+ if err != nil {
+ t.Fatal(err)
+ }
+ // Only build test files that can be compiled, or compiled and run.
+ if !bytes.HasPrefix(src, []byte("// run")) && !bytes.HasPrefix(src, []byte("// compile")) {
+ t.Skipf("not detected as a run test")
+ }
+
+ t.Logf("Input: %s\n", input)
+
+ ctx := build.Default // copy
+ ctx.GOROOT = "testdata" // fake goroot. Makes tests ~1s. tests take ~80s.
+
+ reportErr := func(err error) {
+ t.Error(err)
+ }
+ conf := loader.Config{Build: &ctx, TypeChecker: types.Config{Error: reportErr}}
+ if _, err := conf.FromArgs([]string{input}, true); err != nil {
+ t.Fatalf("FromArgs(%s) failed: %s", input, err)
+ }
+
+ iprog, err := conf.Load()
+ if iprog != nil {
+ for _, pkg := range iprog.Created {
+ for i, e := range pkg.Errors {
+ t.Errorf("Loading pkg %s error[%d]=%s", pkg, i, e)
+ }
+ }
+ }
+ if err != nil {
+ t.Fatalf("conf.Load(%s) failed: %s", input, err)
+ }
+
+ mode := ssa.SanityCheckFunctions | ssa.InstantiateGenerics
+ prog := ssautil.CreateProgram(iprog, mode)
+ prog.Build()
+ })
+ }
+}
+
+// TestOrderOfOperations ensures order of operations are as intended.
+func TestOrderOfOperations(t *testing.T) {
+ // Testing for the order of operations within an expression is done
+ // by collecting the sequence of direct function calls within a *Function.
+ // Callees are all external functions so they cannot be safely re-ordered by ssa.
+ const input = `
+package p
+
+func a() int
+func b() int
+func c() int
+
+func slice(s []int) []int { return s[a():b()] }
+func sliceMax(s []int) []int { return s[a():b():c()] }
+
+`
+
+ // Parse
+ var conf loader.Config
+ f, err := conf.ParseFile("<input>", input)
+ if err != nil {
+ t.Fatalf("parse: %v", err)
+ }
+ conf.CreateFromFiles("p", f)
+
+ // Load
+ lprog, err := conf.Load()
+ if err != nil {
+ t.Fatalf("Load: %v", err)
+ }
+
+ // Create and build SSA
+ prog := ssautil.CreateProgram(lprog, ssa.BuilderMode(0))
+ p := prog.Package(lprog.Package("p").Pkg)
+ p.Build()
+
+ for _, item := range []struct {
+ fn string
+ want string // sequence of calls within the function.
+ }{
+ {"sliceMax", "[a() b() c()]"},
+ {"slice", "[a() b()]"},
+ } {
+ fn := p.Func(item.fn)
+ want := item.want
+ t.Run(item.fn, func(t *testing.T) {
+ t.Parallel()
+
+ var calls []string
+ for _, b := range fn.Blocks {
+ for _, instr := range b.Instrs {
+ if call, ok := instr.(ssa.CallInstruction); ok {
+ calls = append(calls, call.String())
+ }
+ }
+ }
+ if got := fmt.Sprint(calls); got != want {
+ fn.WriteTo(os.Stderr)
+ t.Errorf("Expected sequence of function calls in %s was %s. got %s", fn, want, got)
+ }
+ })
+ }
+}
+
+// TestGenericFunctionSelector ensures generic functions from other packages can be selected.
+func TestGenericFunctionSelector(t *testing.T) {
+ if !typeparams.Enabled {
+ t.Skip("TestGenericFunctionSelector uses type parameters.")
+ }
+
+ pkgs := map[string]map[string]string{
+ "main": {"m.go": `package main; import "a"; func main() { a.F[int](); a.G[int,string](); a.H(0) }`},
+ "a": {"a.go": `package a; func F[T any](){}; func G[S, T any](){}; func H[T any](a T){} `},
+ }
+
+ for _, mode := range []ssa.BuilderMode{
+ ssa.SanityCheckFunctions,
+ ssa.SanityCheckFunctions | ssa.InstantiateGenerics,
+ } {
+ conf := loader.Config{
+ Build: buildutil.FakeContext(pkgs),
+ }
+ conf.Import("main")
+
+ lprog, err := conf.Load()
+ if err != nil {
+ t.Errorf("Load failed: %s", err)
+ }
+ if lprog == nil {
+ t.Fatalf("Load returned nil *Program")
+ }
+ // Create and build SSA
+ prog := ssautil.CreateProgram(lprog, mode)
+ p := prog.Package(lprog.Package("main").Pkg)
+ p.Build()
+
+ var callees []string // callees of the CallInstruction.String() in main().
+ for _, b := range p.Func("main").Blocks {
+ for _, i := range b.Instrs {
+ if call, ok := i.(ssa.CallInstruction); ok {
+ if callee := call.Common().StaticCallee(); call != nil {
+ callees = append(callees, callee.String())
+ } else {
+ t.Errorf("CallInstruction without StaticCallee() %q", call)
+ }
+ }
+ }
+ }
+ sort.Strings(callees) // ignore the order in the code.
+
+ want := "[a.F[int] a.G[int string] a.H[int]]"
+ if got := fmt.Sprint(callees); got != want {
+ t.Errorf("Expected main() to contain calls %v. got %v", want, got)
+ }
+ }
+}
+
+func TestIssue58491(t *testing.T) {
+ // Test that a local type reaches type param in instantiation.
+ testenv.NeedsGo1Point(t, 18)
+ src := `
+ package p
+
+ func foo[T any](blocking func() (T, error)) error {
+ type result struct {
+ res T
+ error // ensure the method set of result is non-empty
+ }
+
+ res := make(chan result, 1)
+ go func() {
+ var r result
+ r.res, r.error = blocking()
+ res <- r
+ }()
+ r := <-res
+ err := r // require the rtype for result when instantiated
+ return err
+ }
+ var Inst = foo[int]
+ `
+ fset := token.NewFileSet()
+ f, err := parser.ParseFile(fset, "p.go", src, 0)
+ if err != nil {
+ t.Error(err)
+ }
+ files := []*ast.File{f}
+
+ pkg := types.NewPackage("p", "")
+ conf := &types.Config{}
+ p, _, err := ssautil.BuildPackage(conf, fset, pkg, files, ssa.SanityCheckFunctions|ssa.InstantiateGenerics)
+ if err != nil {
+ t.Fatalf("unexpected error: %v", err)
+ }
+
+ // Find the local type result instantiated with int.
+ var found bool
+ for _, rt := range p.Prog.RuntimeTypes() {
+ if n, ok := rt.(*types.Named); ok {
+ if u, ok := n.Underlying().(*types.Struct); ok {
+ found = true
+ if got, want := n.String(), "p.result"; got != want {
+ t.Errorf("Expected the name %s got: %s", want, got)
+ }
+ if got, want := u.String(), "struct{res int; error}"; got != want {
+ t.Errorf("Expected the underlying type of %s to be %s. got %s", n, want, got)
+ }
+ }
+ }
+ }
+ if !found {
+ t.Error("Failed to find any Named to struct types")
+ }
+}
+
+func TestIssue58491Rec(t *testing.T) {
+ // Roughly the same as TestIssue58491 but with a recursive type.
+ testenv.NeedsGo1Point(t, 18)
+ src := `
+ package p
+
+ func foo[T any]() error {
+ type result struct {
+ res T
+ next *result
+ error // ensure the method set of result is non-empty
+ }
+
+ r := &result{}
+ err := r // require the rtype for result when instantiated
+ return err
+ }
+ var Inst = foo[int]
+ `
+ fset := token.NewFileSet()
+ f, err := parser.ParseFile(fset, "p.go", src, 0)
+ if err != nil {
+ t.Error(err)
+ }
+ files := []*ast.File{f}
+
+ pkg := types.NewPackage("p", "")
+ conf := &types.Config{}
+ p, _, err := ssautil.BuildPackage(conf, fset, pkg, files, ssa.SanityCheckFunctions|ssa.InstantiateGenerics)
+ if err != nil {
+ t.Fatalf("unexpected error: %v", err)
+ }
+
+ // Find the local type result instantiated with int.
+ var found bool
+ for _, rt := range p.Prog.RuntimeTypes() {
+ if n, ok := rt.(*types.Named); ok {
+ if u, ok := n.Underlying().(*types.Struct); ok {
+ found = true
+ if got, want := n.String(), "p.result"; got != want {
+ t.Errorf("Expected the name %s got: %s", want, got)
+ }
+ if got, want := u.String(), "struct{res int; next *p.result; error}"; got != want {
+ t.Errorf("Expected the underlying type of %s to be %s. got %s", n, want, got)
+ }
+ }
+ }
+ }
+ if !found {
+ t.Error("Failed to find any Named to struct types")
+ }
+}
diff --git a/go/ssa/const.go b/go/ssa/const.go
index f43792e7f..4a51a2cb4 100644
--- a/go/ssa/const.go
+++ b/go/ssa/const.go
@@ -12,68 +12,73 @@ import (
"go/token"
"go/types"
"strconv"
+ "strings"
+
+ "golang.org/x/tools/internal/typeparams"
)
// NewConst returns a new constant of the specified value and type.
// val must be valid according to the specification of Const.Value.
-//
func NewConst(val constant.Value, typ types.Type) *Const {
+ if val == nil {
+ switch soleTypeKind(typ) {
+ case types.IsBoolean:
+ val = constant.MakeBool(false)
+ case types.IsInteger:
+ val = constant.MakeInt64(0)
+ case types.IsString:
+ val = constant.MakeString("")
+ }
+ }
return &Const{typ, val}
}
+// soleTypeKind returns a BasicInfo for which constant.Value can
+// represent all zero values for the types in the type set.
+//
+// types.IsBoolean for false is a representative.
+// types.IsInteger for 0
+// types.IsString for ""
+// 0 otherwise.
+func soleTypeKind(typ types.Type) types.BasicInfo {
+ // State records the set of possible zero values (false, 0, "").
+ // Candidates (perhaps all) are eliminated during the type-set
+ // iteration, which executes at least once.
+ state := types.IsBoolean | types.IsInteger | types.IsString
+ underIs(typeSetOf(typ), func(t types.Type) bool {
+ var c types.BasicInfo
+ if t, ok := t.(*types.Basic); ok {
+ c = t.Info()
+ }
+ if c&types.IsNumeric != 0 { // int/float/complex
+ c = types.IsInteger
+ }
+ state = state & c
+ return state != 0
+ })
+ return state
+}
+
// intConst returns an 'int' constant that evaluates to i.
// (i is an int64 in case the host is narrower than the target.)
func intConst(i int64) *Const {
return NewConst(constant.MakeInt64(i), tInt)
}
-// nilConst returns a nil constant of the specified type, which may
-// be any reference type, including interfaces.
-//
-func nilConst(typ types.Type) *Const {
- return NewConst(nil, typ)
-}
-
// stringConst returns a 'string' constant that evaluates to s.
func stringConst(s string) *Const {
return NewConst(constant.MakeString(s), tString)
}
-// zeroConst returns a new "zero" constant of the specified type,
-// which must not be an array or struct type: the zero values of
-// aggregates are well-defined but cannot be represented by Const.
-//
+// zeroConst returns a new "zero" constant of the specified type.
func zeroConst(t types.Type) *Const {
- switch t := t.(type) {
- case *types.Basic:
- switch {
- case t.Info()&types.IsBoolean != 0:
- return NewConst(constant.MakeBool(false), t)
- case t.Info()&types.IsNumeric != 0:
- return NewConst(constant.MakeInt64(0), t)
- case t.Info()&types.IsString != 0:
- return NewConst(constant.MakeString(""), t)
- case t.Kind() == types.UnsafePointer:
- fallthrough
- case t.Kind() == types.UntypedNil:
- return nilConst(t)
- default:
- panic(fmt.Sprint("zeroConst for unexpected type:", t))
- }
- case *types.Pointer, *types.Slice, *types.Interface, *types.Chan, *types.Map, *types.Signature:
- return nilConst(t)
- case *types.Named:
- return NewConst(zeroConst(t.Underlying()).Value, t)
- case *types.Array, *types.Struct, *types.Tuple:
- panic(fmt.Sprint("zeroConst applied to aggregate:", t))
- }
- panic(fmt.Sprint("zeroConst: unexpected ", t))
+ return NewConst(nil, t)
}
func (c *Const) RelString(from *types.Package) string {
var s string
if c.Value == nil {
- s = "nil"
+ s = zeroString(c.typ, from)
} else if c.Value.Kind() == constant.String {
s = constant.StringVal(c.Value)
const max = 20
@@ -88,6 +93,44 @@ func (c *Const) RelString(from *types.Package) string {
return s + ":" + relType(c.Type(), from)
}
+// zeroString returns the string representation of the "zero" value of the type t.
+func zeroString(t types.Type, from *types.Package) string {
+ switch t := t.(type) {
+ case *types.Basic:
+ switch {
+ case t.Info()&types.IsBoolean != 0:
+ return "false"
+ case t.Info()&types.IsNumeric != 0:
+ return "0"
+ case t.Info()&types.IsString != 0:
+ return `""`
+ case t.Kind() == types.UnsafePointer:
+ fallthrough
+ case t.Kind() == types.UntypedNil:
+ return "nil"
+ default:
+ panic(fmt.Sprint("zeroString for unexpected type:", t))
+ }
+ case *types.Pointer, *types.Slice, *types.Interface, *types.Chan, *types.Map, *types.Signature:
+ return "nil"
+ case *types.Named:
+ return zeroString(t.Underlying(), from)
+ case *types.Array, *types.Struct:
+ return relType(t, from) + "{}"
+ case *types.Tuple:
+ // Tuples are not normal values.
+ // We are currently format as "(t[0], ..., t[n])". Could be something else.
+ components := make([]string, t.Len())
+ for i := 0; i < t.Len(); i++ {
+ components[i] = zeroString(t.At(i).Type(), from)
+ }
+ return "(" + strings.Join(components, ", ") + ")"
+ case *typeparams.TypeParam:
+ return "*new(" + relType(t, from) + ")"
+ }
+ panic(fmt.Sprint("zeroString: unexpected ", t))
+}
+
func (c *Const) Name() string {
return c.RelString(nil)
}
@@ -110,16 +153,36 @@ func (c *Const) Pos() token.Pos {
return token.NoPos
}
-// IsNil returns true if this constant represents a typed or untyped nil value.
+// IsNil returns true if this constant is a nil value of
+// a nillable reference type (pointer, slice, channel, map, or function),
+// a basic interface type, or
+// a type parameter all of whose possible instantiations are themselves nillable.
func (c *Const) IsNil() bool {
- return c.Value == nil
+ return c.Value == nil && nillable(c.typ)
+}
+
+// nillable reports whether *new(T) == nil is legal for type T.
+func nillable(t types.Type) bool {
+ if typeparams.IsTypeParam(t) {
+ return underIs(typeSetOf(t), func(u types.Type) bool {
+ // empty type set (u==nil) => any underlying types => not nillable
+ return u != nil && nillable(u)
+ })
+ }
+ switch t.Underlying().(type) {
+ case *types.Pointer, *types.Slice, *types.Chan, *types.Map, *types.Signature:
+ return true
+ case *types.Interface:
+ return true // basic interface.
+ default:
+ return false
+ }
}
// TODO(adonovan): move everything below into golang.org/x/tools/go/ssa/interp.
// Int64 returns the numeric value of this constant truncated to fit
// a signed 64-bit integer.
-//
func (c *Const) Int64() int64 {
switch x := constant.ToInt(c.Value); x.Kind() {
case constant.Int:
@@ -136,7 +199,6 @@ func (c *Const) Int64() int64 {
// Uint64 returns the numeric value of this constant truncated to fit
// an unsigned 64-bit integer.
-//
func (c *Const) Uint64() uint64 {
switch x := constant.ToInt(c.Value); x.Kind() {
case constant.Int:
@@ -153,17 +215,17 @@ func (c *Const) Uint64() uint64 {
// Float64 returns the numeric value of this constant truncated to fit
// a float64.
-//
func (c *Const) Float64() float64 {
- f, _ := constant.Float64Val(c.Value)
+ x := constant.ToFloat(c.Value) // (c.Value == nil) => x.Kind() == Unknown
+ f, _ := constant.Float64Val(x)
return f
}
// Complex128 returns the complex value of this constant truncated to
// fit a complex128.
-//
func (c *Const) Complex128() complex128 {
- re, _ := constant.Float64Val(constant.Real(c.Value))
- im, _ := constant.Float64Val(constant.Imag(c.Value))
+ x := constant.ToComplex(c.Value) // (c.Value == nil) => x.Kind() == Unknown
+ re, _ := constant.Float64Val(constant.Real(x))
+ im, _ := constant.Float64Val(constant.Imag(x))
return complex(re, im)
}
diff --git a/go/ssa/const_test.go b/go/ssa/const_test.go
new file mode 100644
index 000000000..131fe1ace
--- /dev/null
+++ b/go/ssa/const_test.go
@@ -0,0 +1,104 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa_test
+
+import (
+ "go/ast"
+ "go/constant"
+ "go/parser"
+ "go/token"
+ "go/types"
+ "math/big"
+ "strings"
+ "testing"
+
+ "golang.org/x/tools/go/ssa"
+ "golang.org/x/tools/internal/typeparams"
+)
+
+func TestConstString(t *testing.T) {
+ if !typeparams.Enabled {
+ t.Skip("TestConstString requires type parameters.")
+ }
+
+ const source = `
+ package P
+
+ type Named string
+
+ func fn() (int, bool, string)
+ func gen[T int]() {}
+ `
+ fset := token.NewFileSet()
+ f, err := parser.ParseFile(fset, "p.go", source, 0)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ var conf types.Config
+ pkg, err := conf.Check("P", fset, []*ast.File{f}, nil)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ for _, test := range []struct {
+ expr string // type expression
+ constant interface{} // constant value
+ want string // expected String() value
+ }{
+ {"int", int64(0), "0:int"},
+ {"int64", int64(0), "0:int64"},
+ {"float32", int64(0), "0:float32"},
+ {"float32", big.NewFloat(1.5), "1.5:float32"},
+ {"bool", false, "false:bool"},
+ {"string", "", `"":string`},
+ {"Named", "", `"":P.Named`},
+ {"struct{x string}", nil, "struct{x string}{}:struct{x string}"},
+ {"[]int", nil, "nil:[]int"},
+ {"[3]int", nil, "[3]int{}:[3]int"},
+ {"*int", nil, "nil:*int"},
+ {"interface{}", nil, "nil:interface{}"},
+ {"interface{string}", nil, `"":interface{string}`},
+ {"interface{int|int64}", nil, "0:interface{int|int64}"},
+ {"interface{bool}", nil, "false:interface{bool}"},
+ {"interface{bool|int}", nil, "nil:interface{bool|int}"},
+ {"interface{int|string}", nil, "nil:interface{int|string}"},
+ {"interface{bool|string}", nil, "nil:interface{bool|string}"},
+ {"interface{struct{x string}}", nil, "nil:interface{struct{x string}}"},
+ {"interface{int|int64}", int64(1), "1:interface{int|int64}"},
+ {"interface{~bool}", true, "true:interface{~bool}"},
+ {"interface{Named}", "lorem ipsum", `"lorem ipsum":interface{P.Named}`},
+ {"func() (int, bool, string)", nil, "nil:func() (int, bool, string)"},
+ } {
+ // Eval() expr for its type.
+ tv, err := types.Eval(fset, pkg, 0, test.expr)
+ if err != nil {
+ t.Fatalf("Eval(%s) failed: %v", test.expr, err)
+ }
+ var val constant.Value
+ if test.constant != nil {
+ val = constant.Make(test.constant)
+ }
+ c := ssa.NewConst(val, tv.Type)
+ got := strings.ReplaceAll(c.String(), " | ", "|") // Accept both interface{a | b} and interface{a|b}.
+ if got != test.want {
+ t.Errorf("ssa.NewConst(%v, %s).String() = %v, want %v", val, tv.Type, got, test.want)
+ }
+ }
+
+ // Test tuples
+ fn := pkg.Scope().Lookup("fn")
+ tup := fn.Type().(*types.Signature).Results()
+ if got, want := ssa.NewConst(nil, tup).String(), `(0, false, ""):(int, bool, string)`; got != want {
+ t.Errorf("ssa.NewConst(%v, %s).String() = %v, want %v", nil, tup, got, want)
+ }
+
+ // Test type-param
+ gen := pkg.Scope().Lookup("gen")
+ tp := typeparams.ForSignature(gen.Type().(*types.Signature)).At(0)
+ if got, want := ssa.NewConst(nil, tp).String(), "0:T"; got != want {
+ t.Errorf("ssa.NewConst(%v, %s).String() = %v, want %v", nil, tup, got, want)
+ }
+}
diff --git a/go/ssa/coretype.go b/go/ssa/coretype.go
new file mode 100644
index 000000000..128d61e42
--- /dev/null
+++ b/go/ssa/coretype.go
@@ -0,0 +1,159 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+import (
+ "go/types"
+
+ "golang.org/x/tools/internal/typeparams"
+)
+
+// Utilities for dealing with core types.
+
+// isBytestring returns true if T has the same terms as interface{[]byte | string}.
+// These act like a core type for some operations: slice expressions, append and copy.
+//
+// See https://go.dev/ref/spec#Core_types for the details on bytestring.
+func isBytestring(T types.Type) bool {
+ U := T.Underlying()
+ if _, ok := U.(*types.Interface); !ok {
+ return false
+ }
+
+ tset := typeSetOf(U)
+ if tset.Len() != 2 {
+ return false
+ }
+ hasBytes, hasString := false, false
+ underIs(tset, func(t types.Type) bool {
+ switch {
+ case isString(t):
+ hasString = true
+ case isByteSlice(t):
+ hasBytes = true
+ }
+ return hasBytes || hasString
+ })
+ return hasBytes && hasString
+}
+
+// termList is a list of types.
+type termList []*typeparams.Term // type terms of the type set
+func (s termList) Len() int { return len(s) }
+func (s termList) At(i int) types.Type { return s[i].Type() }
+
+// typeSetOf returns the type set of typ. Returns an empty typeset on an error.
+func typeSetOf(typ types.Type) termList {
+ // This is a adaptation of x/exp/typeparams.NormalTerms which x/tools cannot depend on.
+ var terms []*typeparams.Term
+ var err error
+ switch typ := typ.(type) {
+ case *typeparams.TypeParam:
+ terms, err = typeparams.StructuralTerms(typ)
+ case *typeparams.Union:
+ terms, err = typeparams.UnionTermSet(typ)
+ case *types.Interface:
+ terms, err = typeparams.InterfaceTermSet(typ)
+ default:
+ // Common case.
+ // Specializing the len=1 case to avoid a slice
+ // had no measurable space/time benefit.
+ terms = []*typeparams.Term{typeparams.NewTerm(false, typ)}
+ }
+
+ if err != nil {
+ return termList(nil)
+ }
+ return termList(terms)
+}
+
+// underIs calls f with the underlying types of the specific type terms
+// of s and reports whether all calls to f returned true. If there are
+// no specific terms, underIs returns the result of f(nil).
+func underIs(s termList, f func(types.Type) bool) bool {
+ if s.Len() == 0 {
+ return f(nil)
+ }
+ for i := 0; i < s.Len(); i++ {
+ u := s.At(i).Underlying()
+ if !f(u) {
+ return false
+ }
+ }
+ return true
+}
+
+// indexType returns the element type and index mode of a IndexExpr over a type.
+// It returns (nil, invalid) if the type is not indexable; this should never occur in a well-typed program.
+func indexType(typ types.Type) (types.Type, indexMode) {
+ switch U := typ.Underlying().(type) {
+ case *types.Array:
+ return U.Elem(), ixArrVar
+ case *types.Pointer:
+ if arr, ok := U.Elem().Underlying().(*types.Array); ok {
+ return arr.Elem(), ixVar
+ }
+ case *types.Slice:
+ return U.Elem(), ixVar
+ case *types.Map:
+ return U.Elem(), ixMap
+ case *types.Basic:
+ return tByte, ixValue // must be a string
+ case *types.Interface:
+ tset := typeSetOf(U)
+ if tset.Len() == 0 {
+ return nil, ixInvalid // no underlying terms or error is empty.
+ }
+
+ elem, mode := indexType(tset.At(0))
+ for i := 1; i < tset.Len() && mode != ixInvalid; i++ {
+ e, m := indexType(tset.At(i))
+ if !types.Identical(elem, e) { // if type checked, just a sanity check
+ return nil, ixInvalid
+ }
+ // Update the mode to the most constrained address type.
+ mode = mode.meet(m)
+ }
+ if mode != ixInvalid {
+ return elem, mode
+ }
+ }
+ return nil, ixInvalid
+}
+
+// An indexMode specifies the (addressing) mode of an index operand.
+//
+// Addressing mode of an index operation is based on the set of
+// underlying types.
+// Hasse diagram of the indexMode meet semi-lattice:
+//
+// ixVar ixMap
+// | |
+// ixArrVar |
+// | |
+// ixValue |
+// \ /
+// ixInvalid
+type indexMode byte
+
+const (
+ ixInvalid indexMode = iota // index is invalid
+ ixValue // index is a computed value (not addressable)
+ ixArrVar // like ixVar, but index operand contains an array
+ ixVar // index is an addressable variable
+ ixMap // index is a map index expression (acts like a variable on lhs, commaok on rhs of an assignment)
+)
+
+// meet is the address type that is constrained by both x and y.
+func (x indexMode) meet(y indexMode) indexMode {
+ if (x == ixMap || y == ixMap) && x != y {
+ return ixInvalid
+ }
+ // Use int representation and return min.
+ if x < y {
+ return y
+ }
+ return x
+}
diff --git a/go/ssa/coretype_test.go b/go/ssa/coretype_test.go
new file mode 100644
index 000000000..74fe4db16
--- /dev/null
+++ b/go/ssa/coretype_test.go
@@ -0,0 +1,105 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa_test
+
+import (
+ "go/ast"
+ "go/parser"
+ "go/token"
+ "go/types"
+ "testing"
+
+ "golang.org/x/tools/internal/typeparams"
+)
+
+func TestCoreType(t *testing.T) {
+ if !typeparams.Enabled {
+ t.Skip("TestCoreType requires type parameters.")
+ }
+
+ const source = `
+ package P
+
+ type Named int
+
+ type A any
+ type B interface{~int}
+ type C interface{int}
+ type D interface{Named}
+ type E interface{~int|interface{Named}}
+ type F interface{~int|~float32}
+ type G interface{chan int|interface{chan int}}
+ type H interface{chan int|chan float32}
+ type I interface{chan<- int|chan int}
+ type J interface{chan int|chan<- int}
+ type K interface{<-chan int|chan int}
+ type L interface{chan int|<-chan int}
+ type M interface{chan int|chan Named}
+ type N interface{<-chan int|chan<- int}
+ type O interface{chan int|bool}
+ type P struct{ Named }
+ type Q interface{ Foo() }
+ type R interface{ Foo() ; Named }
+ type S interface{ Foo() ; ~int }
+
+ type T interface{chan int|interface{chan int}|<-chan int}
+`
+
+ fset := token.NewFileSet()
+ f, err := parser.ParseFile(fset, "hello.go", source, 0)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ var conf types.Config
+ pkg, err := conf.Check("P", fset, []*ast.File{f}, nil)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ for _, test := range []struct {
+ expr string // type expression of Named type
+ want string // expected core type (or "<nil>" if none)
+ }{
+ {"Named", "int"}, // Underlying type is not interface.
+ {"A", "<nil>"}, // Interface has no terms.
+ {"B", "int"}, // Tilde term.
+ {"C", "int"}, // Non-tilde term.
+ {"D", "int"}, // Named term.
+ {"E", "int"}, // Identical underlying types.
+ {"F", "<nil>"}, // Differing underlying types.
+ {"G", "chan int"}, // Identical Element types.
+ {"H", "<nil>"}, // Element type int has differing underlying type to float32.
+ {"I", "chan<- int"}, // SendRecv followed by SendOnly
+ {"J", "chan<- int"}, // SendOnly followed by SendRecv
+ {"K", "<-chan int"}, // RecvOnly followed by SendRecv
+ {"L", "<-chan int"}, // SendRecv followed by RecvOnly
+ {"M", "<nil>"}, // Element type int is not *identical* to Named.
+ {"N", "<nil>"}, // Differing channel directions
+ {"O", "<nil>"}, // A channel followed by a non-channel.
+ {"P", "struct{P.Named}"}, // Embedded type.
+ {"Q", "<nil>"}, // interface type with no terms and functions
+ {"R", "int"}, // interface type with both terms and functions.
+ {"S", "int"}, // interface type with a tilde term
+ {"T", "<-chan int"}, // Prefix of 2 terms that are identical before switching to channel.
+ } {
+ // Eval() expr for its type.
+ tv, err := types.Eval(fset, pkg, 0, test.expr)
+ if err != nil {
+ t.Fatalf("Eval(%s) failed: %v", test.expr, err)
+ }
+
+ ct := typeparams.CoreType(tv.Type)
+ var got string
+ if ct == nil {
+ got = "<nil>"
+ } else {
+ got = ct.String()
+ }
+ if got != test.want {
+ t.Errorf("CoreType(%s) = %v, want %v", test.expr, got, test.want)
+ }
+ }
+}
diff --git a/go/ssa/create.go b/go/ssa/create.go
index 69cd93713..ccb20e796 100644
--- a/go/ssa/create.go
+++ b/go/ssa/create.go
@@ -16,24 +16,29 @@ import (
"sync"
"golang.org/x/tools/go/types/typeutil"
+ "golang.org/x/tools/internal/typeparams"
)
// NewProgram returns a new SSA Program.
//
// mode controls diagnostics and checking during SSA construction.
-//
func NewProgram(fset *token.FileSet, mode BuilderMode) *Program {
prog := &Program{
- Fset: fset,
- imported: make(map[string]*Package),
- packages: make(map[*types.Package]*Package),
- thunks: make(map[selectionKey]*Function),
- bounds: make(map[*types.Func]*Function),
- mode: mode,
+ Fset: fset,
+ imported: make(map[string]*Package),
+ packages: make(map[*types.Package]*Package),
+ thunks: make(map[selectionKey]*Function),
+ bounds: make(map[boundsKey]*Function),
+ mode: mode,
+ canon: newCanonizer(),
+ ctxt: typeparams.NewContext(),
+ instances: make(map[*Function]*instanceSet),
+ parameterized: tpWalker{seen: make(map[types.Type]bool)},
}
h := typeutil.MakeHasher() // protected by methodsMu, in effect
prog.methodSets.SetHasher(h)
+ prog.runtimeTypes.SetHasher(h)
return prog
}
@@ -44,7 +49,6 @@ func NewProgram(fset *token.FileSet, mode BuilderMode) *Program {
// For objects from Go source code, syntax is the associated syntax
// tree (for funcs and vars only); it will be used during the build
// phase.
-//
func memberFromObject(pkg *Package, obj types.Object, syntax ast.Node) {
name := obj.Name()
switch obj := obj.(type) {
@@ -85,19 +89,33 @@ func memberFromObject(pkg *Package, obj types.Object, syntax ast.Node) {
pkg.ninit++
name = fmt.Sprintf("init#%d", pkg.ninit)
}
+
+ // Collect type parameters if this is a generic function/method.
+ var tparams *typeparams.TypeParamList
+ if rtparams := typeparams.RecvTypeParams(sig); rtparams.Len() > 0 {
+ tparams = rtparams
+ } else if sigparams := typeparams.ForSignature(sig); sigparams.Len() > 0 {
+ tparams = sigparams
+ }
+
fn := &Function{
- name: name,
- object: obj,
- Signature: sig,
- syntax: syntax,
- pos: obj.Pos(),
- Pkg: pkg,
- Prog: pkg.Prog,
- info: pkg.info,
+ name: name,
+ object: obj,
+ Signature: sig,
+ syntax: syntax,
+ pos: obj.Pos(),
+ Pkg: pkg,
+ Prog: pkg.Prog,
+ typeparams: tparams,
+ info: pkg.info,
}
+ pkg.created.Add(fn)
if syntax == nil {
fn.Synthetic = "loaded from gc object file"
}
+ if tparams.Len() > 0 {
+ fn.Prog.createInstanceSet(fn)
+ }
pkg.objects[obj] = fn
if sig.Recv() == nil {
@@ -112,7 +130,6 @@ func memberFromObject(pkg *Package, obj types.Object, syntax ast.Node) {
// membersFromDecl populates package pkg with members for each
// typechecker object (var, func, const or type) associated with the
// specified decl.
-//
func membersFromDecl(pkg *Package, decl ast.Decl) {
switch decl := decl.(type) {
case *ast.GenDecl: // import, const, type or var
@@ -152,6 +169,19 @@ func membersFromDecl(pkg *Package, decl ast.Decl) {
}
}
+// creator tracks functions that have finished their CREATE phases.
+//
+// All Functions belong to the same Program. May have differing packages.
+//
+// creators are not thread-safe.
+type creator []*Function
+
+func (c *creator) Add(fn *Function) {
+ *c = append(*c, fn)
+}
+func (c *creator) At(i int) *Function { return (*c)[i] }
+func (c *creator) Len() int { return len(*c) }
+
// CreatePackage constructs and returns an SSA Package from the
// specified type-checked, error-free file ASTs, and populates its
// Members mapping.
@@ -161,7 +191,6 @@ func membersFromDecl(pkg *Package, decl ast.Decl) {
//
// The real work of building SSA form for each function is not done
// until a subsequent call to Package.Build().
-//
func (prog *Program) CreatePackage(pkg *types.Package, files []*ast.File, info *types.Info, importable bool) *Package {
p := &Package{
Prog: prog,
@@ -182,6 +211,7 @@ func (prog *Program) CreatePackage(pkg *types.Package, files []*ast.File, info *
info: p.info,
}
p.Members[p.init.name] = p.init
+ p.created.Add(p.init)
// CREATE phase.
// Allocate all package members: vars, funcs, consts and types.
@@ -243,7 +273,6 @@ var printMu sync.Mutex
// AllPackages returns a new slice containing all packages in the
// program prog in unspecified order.
-//
func (prog *Program) AllPackages() []*Package {
pkgs := make([]*Package, 0, len(prog.packages))
for _, pkg := range prog.packages {
@@ -265,7 +294,6 @@ func (prog *Program) AllPackages() []*Package {
// false---yet this function remains very convenient.
// Clients should use (*Program).Package instead where possible.
// SSA doesn't really need a string-keyed map of packages.
-//
func (prog *Program) ImportedPackage(path string) *Package {
return prog.imported[path]
}
diff --git a/go/ssa/doc.go b/go/ssa/doc.go
index 6885bedb3..afda476b3 100644
--- a/go/ssa/doc.go
+++ b/go/ssa/doc.go
@@ -41,60 +41,61 @@
//
// The primary interfaces of this package are:
//
-// - Member: a named member of a Go package.
-// - Value: an expression that yields a value.
-// - Instruction: a statement that consumes values and performs computation.
-// - Node: a Value or Instruction (emphasizing its membership in the SSA value graph)
+// - Member: a named member of a Go package.
+// - Value: an expression that yields a value.
+// - Instruction: a statement that consumes values and performs computation.
+// - Node: a Value or Instruction (emphasizing its membership in the SSA value graph)
//
// A computation that yields a result implements both the Value and
// Instruction interfaces. The following table shows for each
// concrete type which of these interfaces it implements.
//
-// Value? Instruction? Member?
-// *Alloc ✔ ✔
-// *BinOp ✔ ✔
-// *Builtin ✔
-// *Call ✔ ✔
-// *ChangeInterface ✔ ✔
-// *ChangeType ✔ ✔
-// *Const ✔
-// *Convert ✔ ✔
-// *DebugRef ✔
-// *Defer ✔
-// *Extract ✔ ✔
-// *Field ✔ ✔
-// *FieldAddr ✔ ✔
-// *FreeVar ✔
-// *Function ✔ ✔ (func)
-// *Global ✔ ✔ (var)
-// *Go ✔
-// *If ✔
-// *Index ✔ ✔
-// *IndexAddr ✔ ✔
-// *Jump ✔
-// *Lookup ✔ ✔
-// *MakeChan ✔ ✔
-// *MakeClosure ✔ ✔
-// *MakeInterface ✔ ✔
-// *MakeMap ✔ ✔
-// *MakeSlice ✔ ✔
-// *MapUpdate ✔
-// *NamedConst ✔ (const)
-// *Next ✔ ✔
-// *Panic ✔
-// *Parameter ✔
-// *Phi ✔ ✔
-// *Range ✔ ✔
-// *Return ✔
-// *RunDefers ✔
-// *Select ✔ ✔
-// *Send ✔
-// *Slice ✔ ✔
-// *SliceToArrayPointer ✔ ✔
-// *Store ✔
-// *Type ✔ (type)
-// *TypeAssert ✔ ✔
-// *UnOp ✔ ✔
+// Value? Instruction? Member?
+// *Alloc ✔ ✔
+// *BinOp ✔ ✔
+// *Builtin ✔
+// *Call ✔ ✔
+// *ChangeInterface ✔ ✔
+// *ChangeType ✔ ✔
+// *Const ✔
+// *Convert ✔ ✔
+// *DebugRef ✔
+// *Defer ✔
+// *Extract ✔ ✔
+// *Field ✔ ✔
+// *FieldAddr ✔ ✔
+// *FreeVar ✔
+// *Function ✔ ✔ (func)
+// *GenericConvert ✔ ✔
+// *Global ✔ ✔ (var)
+// *Go ✔
+// *If ✔
+// *Index ✔ ✔
+// *IndexAddr ✔ ✔
+// *Jump ✔
+// *Lookup ✔ ✔
+// *MakeChan ✔ ✔
+// *MakeClosure ✔ ✔
+// *MakeInterface ✔ ✔
+// *MakeMap ✔ ✔
+// *MakeSlice ✔ ✔
+// *MapUpdate ✔
+// *NamedConst ✔ (const)
+// *Next ✔ ✔
+// *Panic ✔
+// *Parameter ✔
+// *Phi ✔ ✔
+// *Range ✔ ✔
+// *Return ✔
+// *RunDefers ✔
+// *Select ✔ ✔
+// *Send ✔
+// *Slice ✔ ✔
+// *SliceToArrayPointer ✔ ✔
+// *Store ✔
+// *Type ✔ (type)
+// *TypeAssert ✔ ✔
+// *UnOp ✔ ✔
//
// Other key types in this package include: Program, Package, Function
// and BasicBlock.
@@ -122,5 +123,4 @@
// of trying to determine corresponding elements across the four
// domains of source locations, ast.Nodes, types.Objects,
// ssa.Values/Instructions.
-//
package ssa // import "golang.org/x/tools/go/ssa"
diff --git a/go/ssa/dom.go b/go/ssa/dom.go
index 822fe9772..66a2f5e6e 100644
--- a/go/ssa/dom.go
+++ b/go/ssa/dom.go
@@ -29,12 +29,10 @@ import (
// its parent in the dominator tree, if any.
// Neither the entry node (b.Index==0) nor recover node
// (b==b.Parent().Recover()) have a parent.
-//
func (b *BasicBlock) Idom() *BasicBlock { return b.dom.idom }
// Dominees returns the list of blocks that b immediately dominates:
// its children in the dominator tree.
-//
func (b *BasicBlock) Dominees() []*BasicBlock { return b.dom.children }
// Dominates reports whether b dominates c.
@@ -50,7 +48,6 @@ func (a byDomPreorder) Less(i, j int) bool { return a[i].dom.pre < a[j].dom.pre
// DomPreorder returns a new slice containing the blocks of f in
// dominator tree preorder.
-//
func (f *Function) DomPreorder() []*BasicBlock {
n := len(f.Blocks)
order := make(byDomPreorder, n)
@@ -110,7 +107,6 @@ func (lt *ltState) link(v, w *BasicBlock) {
// buildDomTree computes the dominator tree of f using the LT algorithm.
// Precondition: all blocks are reachable (e.g. optimizeBlocks has been run).
-//
func buildDomTree(f *Function) {
// The step numbers refer to the original LT paper; the
// reordering is due to Georgiadis.
@@ -210,7 +206,6 @@ func buildDomTree(f *Function) {
// numberDomTree sets the pre- and post-order numbers of a depth-first
// traversal of the dominator tree rooted at v. These are used to
// answer dominance queries in constant time.
-//
func numberDomTree(v *BasicBlock, pre, post int32) (int32, int32) {
v.dom.pre = pre
pre++
@@ -228,7 +223,6 @@ func numberDomTree(v *BasicBlock, pre, post int32) (int32, int32) {
// computed by the LT algorithm by comparing against the dominance
// relation computed by a naive Kildall-style forward dataflow
// analysis (Algorithm 10.16 from the "Dragon" book).
-//
func sanityCheckDomTree(f *Function) {
n := len(f.Blocks)
@@ -309,7 +303,7 @@ func sanityCheckDomTree(f *Function) {
// Printing functions ----------------------------------------
-// printDomTree prints the dominator tree as text, using indentation.
+// printDomTreeText prints the dominator tree as text, using indentation.
func printDomTreeText(buf *bytes.Buffer, v *BasicBlock, indent int) {
fmt.Fprintf(buf, "%*s%s\n", 4*indent, "", v)
for _, child := range v.dom.children {
diff --git a/go/ssa/emit.go b/go/ssa/emit.go
index 576e0245a..1731c7975 100644
--- a/go/ssa/emit.go
+++ b/go/ssa/emit.go
@@ -11,11 +11,12 @@ import (
"go/ast"
"go/token"
"go/types"
+
+ "golang.org/x/tools/internal/typeparams"
)
// emitNew emits to f a new (heap Alloc) instruction allocating an
// object of type typ. pos is the optional source location.
-//
func emitNew(f *Function, typ types.Type, pos token.Pos) *Alloc {
v := &Alloc{Heap: true}
v.setType(types.NewPointer(typ))
@@ -26,17 +27,15 @@ func emitNew(f *Function, typ types.Type, pos token.Pos) *Alloc {
// emitLoad emits to f an instruction to load the address addr into a
// new temporary, and returns the value so defined.
-//
func emitLoad(f *Function, addr Value) *UnOp {
v := &UnOp{Op: token.MUL, X: addr}
- v.setType(deref(addr.Type()))
+ v.setType(deref(typeparams.CoreType(addr.Type())))
f.emit(v)
return v
}
// emitDebugRef emits to f a DebugRef pseudo-instruction associating
// expression e with value v.
-//
func emitDebugRef(f *Function, e ast.Expr, v Value, isAddr bool) {
if !f.debugInfo() {
return // debugging not enabled
@@ -68,7 +67,6 @@ func emitDebugRef(f *Function, e ast.Expr, v Value, isAddr bool) {
// where op is an eager shift, logical or arithmetic operation.
// (Use emitCompare() for comparisons and Builder.logicalBinop() for
// non-eager operations.)
-//
func emitArith(f *Function, op token.Token, x, y Value, t types.Type, pos token.Pos) Value {
switch op {
case token.SHL, token.SHR:
@@ -78,7 +76,7 @@ func emitArith(f *Function, op token.Token, x, y Value, t types.Type, pos token.
// There is a runtime panic if y is signed and <0. Instead of inserting a check for y<0
// and converting to an unsigned value (like the compiler) leave y as is.
- if b, ok := y.Type().Underlying().(*types.Basic); ok && b.Info()&types.IsUntyped != 0 {
+ if isUntyped(y.Type().Underlying()) {
// Untyped conversion:
// Spec https://go.dev/ref/spec#Operators:
// The right operand in a shift expression must have integer type or be an untyped constant
@@ -106,7 +104,6 @@ func emitArith(f *Function, op token.Token, x, y Value, t types.Type, pos token.
// emitCompare emits to f code compute the boolean result of
// comparison comparison 'x op y'.
-//
func emitCompare(f *Function, op token.Token, x, y Value, pos token.Pos) Value {
xt := x.Type().Underlying()
yt := y.Type().Underlying()
@@ -126,9 +123,9 @@ func emitCompare(f *Function, op token.Token, x, y Value, pos token.Pos) Value {
if types.Identical(xt, yt) {
// no conversion necessary
- } else if _, ok := xt.(*types.Interface); ok {
+ } else if isNonTypeParamInterface(x.Type()) {
y = emitConv(f, y, x.Type())
- } else if _, ok := yt.(*types.Interface); ok {
+ } else if isNonTypeParamInterface(y.Type()) {
x = emitConv(f, x, y.Type())
} else if _, ok := x.(*Const); ok {
x = emitConv(f, x, y.Type())
@@ -151,7 +148,6 @@ func emitCompare(f *Function, op token.Token, x, y Value, pos token.Pos) Value {
// isValuePreserving returns true if a conversion from ut_src to
// ut_dst is value-preserving, i.e. just a change of type.
// Precondition: neither argument is a named type.
-//
func isValuePreserving(ut_src, ut_dst types.Type) bool {
// Identical underlying types?
if structTypesIdentical(ut_dst, ut_src) {
@@ -176,7 +172,6 @@ func isValuePreserving(ut_src, ut_dst types.Type) bool {
// and returns the converted value. Implicit conversions are required
// by language assignability rules in assignments, parameter passing,
// etc.
-//
func emitConv(f *Function, val Value, typ types.Type) Value {
t_src := val.Type()
@@ -184,21 +179,20 @@ func emitConv(f *Function, val Value, typ types.Type) Value {
if types.Identical(t_src, typ) {
return val
}
-
ut_dst := typ.Underlying()
ut_src := t_src.Underlying()
- // Just a change of type, but not value or representation?
- if isValuePreserving(ut_src, ut_dst) {
- c := &ChangeType{X: val}
- c.setType(typ)
- return f.emit(c)
- }
-
// Conversion to, or construction of a value of, an interface type?
- if _, ok := ut_dst.(*types.Interface); ok {
+ if isNonTypeParamInterface(typ) {
+ // Interface name change?
+ if isValuePreserving(ut_src, ut_dst) {
+ c := &ChangeType{X: val}
+ c.setType(typ)
+ return f.emit(c)
+ }
+
// Assignment from one interface type to another?
- if _, ok := ut_src.(*types.Interface); ok {
+ if isNonTypeParamInterface(t_src) {
c := &ChangeInterface{X: val}
c.setType(typ)
return f.emit(c)
@@ -206,7 +200,7 @@ func emitConv(f *Function, val Value, typ types.Type) Value {
// Untyped nil constant? Return interface-typed nil constant.
if ut_src == tUntypedNil {
- return nilConst(typ)
+ return zeroConst(typ)
}
// Convert (non-nil) "untyped" literals to their default type.
@@ -214,15 +208,88 @@ func emitConv(f *Function, val Value, typ types.Type) Value {
val = emitConv(f, val, types.Default(ut_src))
}
- f.Pkg.Prog.needMethodsOf(val.Type())
mi := &MakeInterface{X: val}
mi.setType(typ)
return f.emit(mi)
}
+ // In the common case, the typesets of src and dst are singletons
+ // and we emit an appropriate conversion. But if either contains
+ // a type parameter, the conversion may represent a cross product,
+ // in which case which we emit a MultiConvert.
+ dst_terms := typeSetOf(ut_dst)
+ src_terms := typeSetOf(ut_src)
+
+ // conversionCase describes an instruction pattern that maybe emitted to
+ // model d <- s for d in dst_terms and s in src_terms.
+ // Multiple conversions can match the same pattern.
+ type conversionCase uint8
+ const (
+ changeType conversionCase = 1 << iota
+ sliceToArray
+ sliceToArrayPtr
+ sliceTo0Array
+ sliceTo0ArrayPtr
+ convert
+ )
+ classify := func(s, d types.Type) conversionCase {
+ // Just a change of type, but not value or representation?
+ if isValuePreserving(s, d) {
+ return changeType
+ }
+
+ // Conversion from slice to array or slice to array pointer?
+ if slice, ok := s.(*types.Slice); ok {
+ var arr *types.Array
+ var ptr bool
+ // Conversion from slice to array pointer?
+ switch d := d.(type) {
+ case *types.Array:
+ arr = d
+ case *types.Pointer:
+ arr, _ = d.Elem().Underlying().(*types.Array)
+ ptr = true
+ }
+ if arr != nil && types.Identical(slice.Elem(), arr.Elem()) {
+ if arr.Len() == 0 {
+ if ptr {
+ return sliceTo0ArrayPtr
+ } else {
+ return sliceTo0Array
+ }
+ }
+ if ptr {
+ return sliceToArrayPtr
+ } else {
+ return sliceToArray
+ }
+ }
+ }
+
+ // The only remaining case in well-typed code is a representation-
+ // changing conversion of basic types (possibly with []byte/[]rune).
+ if !isBasic(s) && !isBasic(d) {
+ panic(fmt.Sprintf("in %s: cannot convert term %s (%s [within %s]) to type %s [within %s]", f, val, val.Type(), s, typ, d))
+ }
+ return convert
+ }
+
+ var classifications conversionCase
+ for _, s := range src_terms {
+ us := s.Type().Underlying()
+ for _, d := range dst_terms {
+ ud := d.Type().Underlying()
+ classifications |= classify(us, ud)
+ }
+ }
+ if classifications == 0 {
+ panic(fmt.Sprintf("in %s: cannot convert %s (%s) to %s", f, val, val.Type(), typ))
+ }
+
// Conversion of a compile-time constant value?
if c, ok := val.(*Const); ok {
- if _, ok := ut_dst.(*types.Basic); ok || c.IsNil() {
+ // Conversion to a basic type?
+ if isBasic(ut_dst) {
// Conversion of a compile-time constant to
// another constant type results in a new
// constant of the destination type and
@@ -230,38 +297,80 @@ func emitConv(f *Function, val Value, typ types.Type) Value {
// We don't truncate the value yet.
return NewConst(c.Value, typ)
}
+ // Can we always convert from zero value without panicking?
+ const mayPanic = sliceToArray | sliceToArrayPtr
+ if c.Value == nil && classifications&mayPanic == 0 {
+ return NewConst(nil, typ)
+ }
// We're converting from constant to non-constant type,
// e.g. string -> []byte/[]rune.
}
- // Conversion from slice to array pointer?
- if slice, ok := ut_src.(*types.Slice); ok {
- if ptr, ok := ut_dst.(*types.Pointer); ok {
- if arr, ok := ptr.Elem().Underlying().(*types.Array); ok && types.Identical(slice.Elem(), arr.Elem()) {
- c := &SliceToArrayPointer{X: val}
- c.setType(ut_dst)
- return f.emit(c)
- }
- }
- }
- // A representation-changing conversion?
- // At least one of {ut_src,ut_dst} must be *Basic.
- // (The other may be []byte or []rune.)
- _, ok1 := ut_src.(*types.Basic)
- _, ok2 := ut_dst.(*types.Basic)
- if ok1 || ok2 {
+ switch classifications {
+ case changeType: // representation-preserving change
+ c := &ChangeType{X: val}
+ c.setType(typ)
+ return f.emit(c)
+
+ case sliceToArrayPtr, sliceTo0ArrayPtr: // slice to array pointer
+ c := &SliceToArrayPointer{X: val}
+ c.setType(typ)
+ return f.emit(c)
+
+ case sliceToArray: // slice to arrays (not zero-length)
+ ptype := types.NewPointer(typ)
+ p := &SliceToArrayPointer{X: val}
+ p.setType(ptype)
+ x := f.emit(p)
+ unOp := &UnOp{Op: token.MUL, X: x}
+ unOp.setType(typ)
+ return f.emit(unOp)
+
+ case sliceTo0Array: // slice to zero-length arrays (constant)
+ return zeroConst(typ)
+
+ case convert: // representation-changing conversion
c := &Convert{X: val}
c.setType(typ)
return f.emit(c)
+
+ default: // multiple conversion
+ c := &MultiConvert{X: val, from: src_terms, to: dst_terms}
+ c.setType(typ)
+ return f.emit(c)
}
+}
- panic(fmt.Sprintf("in %s: cannot convert %s (%s) to %s", f, val, val.Type(), typ))
+// emitTypeCoercion emits to f code to coerce the type of a
+// Value v to exactly type typ, and returns the coerced value.
+//
+// Requires that coercing v.Typ() to typ is a value preserving change.
+//
+// Currently used only when v.Type() is a type instance of typ or vice versa.
+// A type v is a type instance of a type t if there exists a
+// type parameter substitution σ s.t. σ(v) == t. Example:
+//
+// σ(func(T) T) == func(int) int for σ == [T ↦ int]
+//
+// This happens in instantiation wrappers for conversion
+// from an instantiation to a parameterized type (and vice versa)
+// with σ substituting f.typeparams by f.typeargs.
+func emitTypeCoercion(f *Function, v Value, typ types.Type) Value {
+ if types.Identical(v.Type(), typ) {
+ return v // no coercion needed
+ }
+ // TODO(taking): for instances should we record which side is the instance?
+ c := &ChangeType{
+ X: v,
+ }
+ c.setType(typ)
+ f.emit(c)
+ return c
}
// emitStore emits to f an instruction to store value val at location
// addr, applying implicit conversions as required by assignability rules.
-//
func emitStore(f *Function, addr, val Value, pos token.Pos) *Store {
s := &Store{
Addr: addr,
@@ -274,7 +383,6 @@ func emitStore(f *Function, addr, val Value, pos token.Pos) *Store {
// emitJump emits to f a jump to target, and updates the control-flow graph.
// Postcondition: f.currentBlock is nil.
-//
func emitJump(f *Function, target *BasicBlock) {
b := f.currentBlock
b.emit(new(Jump))
@@ -285,7 +393,6 @@ func emitJump(f *Function, target *BasicBlock) {
// emitIf emits to f a conditional jump to tblock or fblock based on
// cond, and updates the control-flow graph.
// Postcondition: f.currentBlock is nil.
-//
func emitIf(f *Function, cond Value, tblock, fblock *BasicBlock) {
b := f.currentBlock
b.emit(&If{Cond: cond})
@@ -296,7 +403,6 @@ func emitIf(f *Function, cond Value, tblock, fblock *BasicBlock) {
// emitExtract emits to f an instruction to extract the index'th
// component of tuple. It returns the extracted value.
-//
func emitExtract(f *Function, tuple Value, index int) Value {
e := &Extract{Tuple: tuple, Index: index}
e.setType(tuple.Type().(*types.Tuple).At(index).Type())
@@ -305,7 +411,6 @@ func emitExtract(f *Function, tuple Value, index int) Value {
// emitTypeAssert emits to f a type assertion value := x.(t) and
// returns the value. x.Type() must be an interface.
-//
func emitTypeAssert(f *Function, x Value, t types.Type, pos token.Pos) Value {
a := &TypeAssert{X: x, AssertedType: t}
a.setPos(pos)
@@ -315,7 +420,6 @@ func emitTypeAssert(f *Function, x Value, t types.Type, pos token.Pos) Value {
// emitTypeTest emits to f a type test value,ok := x.(t) and returns
// a (value, ok) tuple. x.Type() must be an interface.
-//
func emitTypeTest(f *Function, x Value, t types.Type, pos token.Pos) Value {
a := &TypeAssert{
X: x,
@@ -335,7 +439,6 @@ func emitTypeTest(f *Function, x Value, t types.Type, pos token.Pos) Value {
// Intended for wrapper methods.
// Precondition: f does/will not use deferred procedure calls.
// Postcondition: f.currentBlock is nil.
-//
func emitTailCall(f *Function, call *Call) {
tresults := f.Signature.Results()
nr := tresults.Len()
@@ -372,16 +475,16 @@ func emitTailCall(f *Function, call *Call) {
// If v is the address of a struct, the result will be the address of
// a field; if it is the value of a struct, the result will be the
// value of a field.
-//
-func emitImplicitSelections(f *Function, v Value, indices []int) Value {
+func emitImplicitSelections(f *Function, v Value, indices []int, pos token.Pos) Value {
for _, index := range indices {
- fld := deref(v.Type()).Underlying().(*types.Struct).Field(index)
+ fld := typeparams.CoreType(deref(v.Type())).(*types.Struct).Field(index)
if isPointer(v.Type()) {
instr := &FieldAddr{
X: v,
Field: index,
}
+ instr.setPos(pos)
instr.setType(types.NewPointer(fld.Type()))
v = f.emit(instr)
// Load the field's value iff indirectly embedded.
@@ -393,6 +496,7 @@ func emitImplicitSelections(f *Function, v Value, indices []int) Value {
X: v,
Field: index,
}
+ instr.setPos(pos)
instr.setType(fld.Type())
v = f.emit(instr)
}
@@ -406,9 +510,8 @@ func emitImplicitSelections(f *Function, v Value, indices []int) Value {
// will be the field's address; otherwise the result will be the
// field's value.
// Ident id is used for position and debug info.
-//
func emitFieldSelection(f *Function, v Value, index int, wantAddr bool, id *ast.Ident) Value {
- fld := deref(v.Type()).Underlying().(*types.Struct).Field(index)
+ fld := typeparams.CoreType(deref(v.Type())).(*types.Struct).Field(index)
if isPointer(v.Type()) {
instr := &FieldAddr{
X: v,
@@ -436,7 +539,6 @@ func emitFieldSelection(f *Function, v Value, index int, wantAddr bool, id *ast.
// zeroValue emits to f code to produce a zero value of type t,
// and returns it.
-//
func zeroValue(f *Function, t types.Type) Value {
switch t.Underlying().(type) {
case *types.Struct, *types.Array:
@@ -454,7 +556,6 @@ func zeroValue(f *Function, t types.Type) Value {
// type.
//
// Idempotent.
-//
func createRecoverBlock(f *Function) {
if f.Recover != nil {
return // already created
diff --git a/go/ssa/example_test.go b/go/ssa/example_test.go
index 2ab9e9926..9a5fd4369 100644
--- a/go/ssa/example_test.go
+++ b/go/ssa/example_test.go
@@ -2,6 +2,9 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
+//go:build !android && !ios && !js
+// +build !android,!ios,!js
+
package ssa_test
import (
@@ -48,7 +51,6 @@ func main() {
// Build and run the ssadump.go program if you want a standalone tool
// with similar functionality. It is located at
// golang.org/x/tools/cmd/ssadump.
-//
func Example_buildPackage() {
// Replace interface{} with any for this test.
ssa.SetNormalizeAnyForTesting(true)
@@ -159,7 +161,7 @@ func Example_loadWholeProgram() {
}
// Create SSA packages for well-typed packages and their dependencies.
- prog, pkgs := ssautil.AllPackages(initial, ssa.PrintPackages)
+ prog, pkgs := ssautil.AllPackages(initial, ssa.PrintPackages|ssa.InstantiateGenerics)
_ = pkgs
// Build SSA code for the whole program.
diff --git a/go/ssa/func.go b/go/ssa/func.go
index 8fc089e5d..57f5f718f 100644
--- a/go/ssa/func.go
+++ b/go/ssa/func.go
@@ -15,6 +15,8 @@ import (
"io"
"os"
"strings"
+
+ "golang.org/x/tools/internal/typeparams"
)
// Like ObjectOf, but panics instead of returning nil.
@@ -31,15 +33,62 @@ func (f *Function) objectOf(id *ast.Ident) types.Object {
// Only valid during f's create and build phases.
func (f *Function) typeOf(e ast.Expr) types.Type {
if T := f.info.TypeOf(e); T != nil {
- return T
+ return f.typ(T)
}
panic(fmt.Sprintf("no type for %T @ %s", e, f.Prog.Fset.Position(e.Pos())))
}
+// typ is the locally instantiated type of T. T==typ(T) if f is not an instantiation.
+func (f *Function) typ(T types.Type) types.Type {
+ return f.subst.typ(T)
+}
+
+// If id is an Instance, returns info.Instances[id].Type.
+// Otherwise returns f.typeOf(id).
+func (f *Function) instanceType(id *ast.Ident) types.Type {
+ if t, ok := typeparams.GetInstances(f.info)[id]; ok {
+ return t.Type
+ }
+ return f.typeOf(id)
+}
+
+// selection returns a *selection corresponding to f.info.Selections[selector]
+// with potential updates for type substitution.
+func (f *Function) selection(selector *ast.SelectorExpr) *selection {
+ sel := f.info.Selections[selector]
+ if sel == nil {
+ return nil
+ }
+
+ switch sel.Kind() {
+ case types.MethodExpr, types.MethodVal:
+ if recv := f.typ(sel.Recv()); recv != sel.Recv() {
+ // recv changed during type substitution.
+ pkg := f.declaredPackage().Pkg
+ obj, index, indirect := types.LookupFieldOrMethod(recv, true, pkg, sel.Obj().Name())
+
+ // sig replaces sel.Type(). See (types.Selection).Typ() for details.
+ sig := obj.Type().(*types.Signature)
+ sig = changeRecv(sig, newVar(sig.Recv().Name(), recv))
+ if sel.Kind() == types.MethodExpr {
+ sig = recvAsFirstArg(sig)
+ }
+ return &selection{
+ kind: sel.Kind(),
+ recv: recv,
+ typ: sig,
+ obj: obj,
+ index: index,
+ indirect: indirect,
+ }
+ }
+ }
+ return toSelection(sel)
+}
+
// Destinations associated with unlabelled for/switch/select stmts.
// We push/pop one of these as we enter/leave each construct and for
// each BranchStmt we scan for the innermost target of the right type.
-//
type targets struct {
tail *targets // rest of stack
_break *BasicBlock
@@ -50,7 +99,6 @@ type targets struct {
// Destinations associated with a labelled block.
// We populate these as labels are encountered in forward gotos or
// labelled statements.
-//
type lblock struct {
_goto *BasicBlock
_break *BasicBlock
@@ -59,22 +107,21 @@ type lblock struct {
// labelledBlock returns the branch target associated with the
// specified label, creating it if needed.
-//
func (f *Function) labelledBlock(label *ast.Ident) *lblock {
- lb := f.lblocks[label.Obj]
+ obj := f.objectOf(label)
+ lb := f.lblocks[obj]
if lb == nil {
lb = &lblock{_goto: f.newBasicBlock(label.Name)}
if f.lblocks == nil {
- f.lblocks = make(map[*ast.Object]*lblock)
+ f.lblocks = make(map[types.Object]*lblock)
}
- f.lblocks[label.Obj] = lb
+ f.lblocks[obj] = lb
}
return lb
}
// addParam adds a (non-escaping) parameter to f.Params of the
// specified name, type and source position.
-//
func (f *Function) addParam(name string, typ types.Type, pos token.Pos) *Parameter {
v := &Parameter{
name: name,
@@ -91,7 +138,7 @@ func (f *Function) addParamObj(obj types.Object) *Parameter {
if name == "" {
name = fmt.Sprintf("arg%d", len(f.Params))
}
- param := f.addParam(name, obj.Type(), obj.Pos())
+ param := f.addParam(name, f.typ(obj.Type()), obj.Pos())
param.object = obj
return param
}
@@ -99,11 +146,10 @@ func (f *Function) addParamObj(obj types.Object) *Parameter {
// addSpilledParam declares a parameter that is pre-spilled to the
// stack; the function body will load/store the spilled location.
// Subsequent lifting will eliminate spills where possible.
-//
func (f *Function) addSpilledParam(obj types.Object) {
param := f.addParamObj(obj)
spill := &Alloc{Comment: obj.Name()}
- spill.setType(types.NewPointer(obj.Type()))
+ spill.setType(types.NewPointer(param.Type()))
spill.setPos(obj.Pos())
f.objects[obj] = spill
f.Locals = append(f.Locals, spill)
@@ -113,7 +159,6 @@ func (f *Function) addSpilledParam(obj types.Object) {
// startBody initializes the function prior to generating SSA code for its body.
// Precondition: f.Type() already set.
-//
func (f *Function) startBody() {
f.currentBlock = f.newBasicBlock("entry")
f.objects = make(map[types.Object]Value) // needed for some synthetics, e.g. init
@@ -127,7 +172,6 @@ func (f *Function) startBody() {
// f.startBody() was called. f.info != nil.
// Postcondition:
// len(f.Params) == len(f.Signature.Params) + (f.Signature.Recv() ? 1 : 0)
-//
func (f *Function) createSyntacticParams(recv *ast.FieldList, functype *ast.FuncType) {
// Receiver (at most one inner iteration).
if recv != nil {
@@ -174,7 +218,6 @@ type setNumable interface {
// numberRegisters assigns numbers to all SSA registers
// (value-defining Instructions) in f, to aid debugging.
// (Non-Instruction Values are named at construction.)
-//
func numberRegisters(f *Function) {
v := 0
for _, b := range f.Blocks {
@@ -207,7 +250,39 @@ func buildReferrers(f *Function) {
}
}
-// finishBody() finalizes the function after SSA code generation of its body.
+// mayNeedRuntimeTypes returns all of the types in the body of fn that might need runtime types.
+//
+// EXCLUSIVE_LOCKS_ACQUIRED(meth.Prog.methodsMu)
+func mayNeedRuntimeTypes(fn *Function) []types.Type {
+ // Collect all types that may need rtypes, i.e. those that flow into an interface.
+ var ts []types.Type
+ for _, bb := range fn.Blocks {
+ for _, instr := range bb.Instrs {
+ if mi, ok := instr.(*MakeInterface); ok {
+ ts = append(ts, mi.X.Type())
+ }
+ }
+ }
+
+ // Types that contain a parameterized type are considered to not be runtime types.
+ if fn.typeparams.Len() == 0 {
+ return ts // No potentially parameterized types.
+ }
+ // Filter parameterized types, in place.
+ fn.Prog.methodsMu.Lock()
+ defer fn.Prog.methodsMu.Unlock()
+ filtered := ts[:0]
+ for _, t := range ts {
+ if !fn.Prog.parameterized.isParameterized(t) {
+ filtered = append(filtered, t)
+ }
+ }
+ return filtered
+}
+
+// finishBody() finalizes the contents of the function after SSA code generation of its body.
+//
+// The function is not done being built until done() is called.
func (f *Function) finishBody() {
f.objects = nil
f.currentBlock = nil
@@ -248,23 +323,38 @@ func (f *Function) finishBody() {
// clear remaining stateful variables
f.namedResults = nil // (used by lifting)
f.info = nil
+ f.subst = nil
- numberRegisters(f)
+ numberRegisters(f) // uses f.namedRegisters
+}
- if f.Prog.mode&PrintFunctions != 0 {
- printMu.Lock()
- f.WriteTo(os.Stdout)
- printMu.Unlock()
- }
+// After this, function is done with BUILD phase.
+func (f *Function) done() {
+ assert(f.parent == nil, "done called on an anonymous function")
+
+ var visit func(*Function)
+ visit = func(f *Function) {
+ for _, anon := range f.AnonFuncs {
+ visit(anon) // anon is done building before f.
+ }
+
+ f.built = true // function is done with BUILD phase
+
+ if f.Prog.mode&PrintFunctions != 0 {
+ printMu.Lock()
+ f.WriteTo(os.Stdout)
+ printMu.Unlock()
+ }
- if f.Prog.mode&SanityCheckFunctions != 0 {
- mustSanityCheck(f, nil)
+ if f.Prog.mode&SanityCheckFunctions != 0 {
+ mustSanityCheck(f, nil)
+ }
}
+ visit(f)
}
// removeNilBlocks eliminates nils from f.Blocks and updates each
// BasicBlock.Index. Use this after any pass that may delete blocks.
-//
func (f *Function) removeNilBlocks() {
j := 0
for _, b := range f.Blocks {
@@ -285,7 +375,6 @@ func (f *Function) removeNilBlocks() {
// functions will include full debug info. This greatly increases the
// size of the instruction stream, and causes Functions to depend upon
// the ASTs, potentially keeping them live in memory for longer.
-//
func (pkg *Package) SetDebugMode(debug bool) {
// TODO(adonovan): do we want ast.File granularity?
pkg.debug = debug
@@ -299,7 +388,6 @@ func (f *Function) debugInfo() bool {
// addNamedLocal creates a local variable, adds it to function f and
// returns it. Its name and type are taken from obj. Subsequent
// calls to f.lookup(obj) will return the same local.
-//
func (f *Function) addNamedLocal(obj types.Object) *Alloc {
l := f.addLocal(obj.Type(), obj.Pos())
l.Comment = obj.Name()
@@ -313,8 +401,8 @@ func (f *Function) addLocalForIdent(id *ast.Ident) *Alloc {
// addLocal creates an anonymous local variable of type typ, adds it
// to function f and returns it. pos is the optional source location.
-//
func (f *Function) addLocal(typ types.Type, pos token.Pos) *Alloc {
+ typ = f.typ(typ)
v := &Alloc{}
v.setType(types.NewPointer(typ))
v.setPos(pos)
@@ -327,7 +415,6 @@ func (f *Function) addLocal(typ types.Type, pos token.Pos) *Alloc {
// that is local to function f or one of its enclosing functions.
// If escaping, the reference comes from a potentially escaping pointer
// expression and the referent must be heap-allocated.
-//
func (f *Function) lookup(obj types.Object, escaping bool) Value {
if v, ok := f.objects[obj]; ok {
if alloc, ok := v.(*Alloc); ok && escaping {
@@ -365,13 +452,14 @@ func (f *Function) emit(instr Instruction) Value {
// The specific formatting rules are not guaranteed and may change.
//
// Examples:
-// "math.IsNaN" // a package-level function
-// "(*bytes.Buffer).Bytes" // a declared method or a wrapper
-// "(*bytes.Buffer).Bytes$thunk" // thunk (func wrapping method; receiver is param 0)
-// "(*bytes.Buffer).Bytes$bound" // bound (func wrapping method; receiver supplied by closure)
-// "main.main$1" // an anonymous function in main
-// "main.init#1" // a declared init function
-// "main.init" // the synthesized package initializer
+//
+// "math.IsNaN" // a package-level function
+// "(*bytes.Buffer).Bytes" // a declared method or a wrapper
+// "(*bytes.Buffer).Bytes$thunk" // thunk (func wrapping method; receiver is param 0)
+// "(*bytes.Buffer).Bytes$bound" // bound (func wrapping method; receiver supplied by closure)
+// "main.main$1" // an anonymous function in main
+// "main.init#1" // a declared init function
+// "main.init" // the synthesized package initializer
//
// When these functions are referred to from within the same package
// (i.e. from == f.Pkg.Object), they are rendered without the package path.
@@ -381,7 +469,6 @@ func (f *Function) emit(instr Instruction) Value {
// (But two methods may have the same name "(T).f" if one is a synthetic
// wrapper promoting a non-exported method "f" from another package; in
// that case, the strings are equal but the identifiers "f" are distinct.)
-//
func (f *Function) RelString(from *types.Package) string {
// Anonymous?
if f.parent != nil {
@@ -404,7 +491,7 @@ func (f *Function) RelString(from *types.Package) string {
// Thunk?
if f.method != nil {
- return f.relMethod(from, f.method.Recv())
+ return f.relMethod(from, f.method.recv)
}
// Bound?
@@ -448,9 +535,8 @@ func (fn *Function) declaredPackage() *Package {
switch {
case fn.Pkg != nil:
return fn.Pkg // non-generic function
- // generics:
- // case fn.Origin != nil:
- // return fn.Origin.pkg // instance of a named generic function
+ case fn.topLevelOrigin != nil:
+ return fn.topLevelOrigin.Pkg // instance of a named generic function
case fn.parent != nil:
return fn.parent.declaredPackage() // instance of an anonymous [generic] function
default:
@@ -572,7 +658,6 @@ func WriteFunction(buf *bytes.Buffer, f *Function) {
// newBasicBlock adds to f a new basic block and returns it. It does
// not automatically become the current block for subsequent calls to emit.
// comment is an optional string for more readable debugging output.
-//
func (f *Function) newBasicBlock(comment string) *BasicBlock {
b := &BasicBlock{
Index: len(f.Blocks),
@@ -598,7 +683,6 @@ func (f *Function) newBasicBlock(comment string) *BasicBlock {
// "reflect" package, etc.
//
// TODO(adonovan): think harder about the API here.
-//
func (prog *Program) NewFunction(name string, sig *types.Signature, provenance string) *Function {
return &Function{Prog: prog, name: name, Signature: sig, Synthetic: provenance}
}
@@ -616,5 +700,4 @@ func (n extentNode) End() token.Pos { return n[1] }
// the result is the *ast.FuncDecl or *ast.FuncLit that declared the
// function. Otherwise, it is an opaque Node providing only position
// information; this avoids pinning the AST in memory.
-//
func (f *Function) Syntax() ast.Node { return f.syntax }
diff --git a/go/ssa/instantiate.go b/go/ssa/instantiate.go
new file mode 100644
index 000000000..38249dea2
--- /dev/null
+++ b/go/ssa/instantiate.go
@@ -0,0 +1,177 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+import (
+ "fmt"
+ "go/ast"
+ "go/types"
+
+ "golang.org/x/tools/internal/typeparams"
+)
+
+// _Instances returns all of the instances generated by runtime types for this function in an unspecified order.
+//
+// Thread-safe.
+//
+// This is an experimental interface! It may change without warning.
+func (prog *Program) _Instances(fn *Function) []*Function {
+ if fn.typeparams.Len() == 0 || len(fn.typeargs) > 0 {
+ return nil
+ }
+
+ prog.methodsMu.Lock()
+ defer prog.methodsMu.Unlock()
+ return prog.instances[fn].list()
+}
+
+// A set of instantiations of a generic function fn.
+type instanceSet struct {
+ fn *Function // fn.typeparams.Len() > 0 and len(fn.typeargs) == 0.
+ instances map[*typeList]*Function // canonical type arguments to an instance.
+ syntax *ast.FuncDecl // fn.syntax copy for instantiating after fn is done. nil on synthetic packages.
+ info *types.Info // fn.pkg.info copy for building after fn is done.. nil on synthetic packages.
+
+ // TODO(taking): Consider ways to allow for clearing syntax and info when done building.
+ // May require a public API change as MethodValue can request these be built after prog.Build() is done.
+}
+
+func (insts *instanceSet) list() []*Function {
+ if insts == nil {
+ return nil
+ }
+
+ fns := make([]*Function, 0, len(insts.instances))
+ for _, fn := range insts.instances {
+ fns = append(fns, fn)
+ }
+ return fns
+}
+
+// createInstanceSet adds a new instanceSet for a generic function fn if one does not exist.
+//
+// Precondition: fn is a package level declaration (function or method).
+//
+// EXCLUSIVE_LOCKS_ACQUIRED(prog.methodMu)
+func (prog *Program) createInstanceSet(fn *Function) {
+ assert(fn.typeparams.Len() > 0 && len(fn.typeargs) == 0, "Can only create instance sets for generic functions")
+
+ prog.methodsMu.Lock()
+ defer prog.methodsMu.Unlock()
+
+ syntax, _ := fn.syntax.(*ast.FuncDecl)
+ assert((syntax == nil) == (fn.syntax == nil), "fn.syntax is either nil or a *ast.FuncDecl")
+
+ if _, ok := prog.instances[fn]; !ok {
+ prog.instances[fn] = &instanceSet{
+ fn: fn,
+ syntax: syntax,
+ info: fn.info,
+ }
+ }
+}
+
+// needsInstance returns a Function that is the instantiation of fn with the type arguments targs.
+//
+// Any CREATEd instance is added to cr.
+//
+// EXCLUSIVE_LOCKS_ACQUIRED(prog.methodMu)
+func (prog *Program) needsInstance(fn *Function, targs []types.Type, cr *creator) *Function {
+ prog.methodsMu.Lock()
+ defer prog.methodsMu.Unlock()
+
+ return prog.lookupOrCreateInstance(fn, targs, cr)
+}
+
+// lookupOrCreateInstance returns a Function that is the instantiation of fn with the type arguments targs.
+//
+// Any CREATEd instance is added to cr.
+//
+// EXCLUSIVE_LOCKS_REQUIRED(prog.methodMu)
+func (prog *Program) lookupOrCreateInstance(fn *Function, targs []types.Type, cr *creator) *Function {
+ return prog.instances[fn].lookupOrCreate(targs, &prog.parameterized, cr)
+}
+
+// lookupOrCreate returns the instantiation of insts.fn using targs.
+// If the instantiation is created, this is added to cr.
+func (insts *instanceSet) lookupOrCreate(targs []types.Type, parameterized *tpWalker, cr *creator) *Function {
+ if insts.instances == nil {
+ insts.instances = make(map[*typeList]*Function)
+ }
+
+ fn := insts.fn
+ prog := fn.Prog
+
+ // canonicalize on a tuple of targs. Sig is not unique.
+ //
+ // func A[T any]() {
+ // var x T
+ // fmt.Println("%T", x)
+ // }
+ key := prog.canon.List(targs)
+ if inst, ok := insts.instances[key]; ok {
+ return inst
+ }
+
+ // CREATE instance/instantiation wrapper
+ var syntax ast.Node
+ if insts.syntax != nil {
+ syntax = insts.syntax
+ }
+
+ var sig *types.Signature
+ var obj *types.Func
+ if recv := fn.Signature.Recv(); recv != nil {
+ // method
+ m := fn.object.(*types.Func)
+ obj = prog.canon.instantiateMethod(m, targs, prog.ctxt)
+ sig = obj.Type().(*types.Signature)
+ } else {
+ instSig, err := typeparams.Instantiate(prog.ctxt, fn.Signature, targs, false)
+ if err != nil {
+ panic(err)
+ }
+ instance, ok := instSig.(*types.Signature)
+ if !ok {
+ panic("Instantiate of a Signature returned a non-signature")
+ }
+ obj = fn.object.(*types.Func) // instantiation does not exist yet
+ sig = prog.canon.Type(instance).(*types.Signature)
+ }
+
+ var synthetic string
+ var subst *subster
+
+ concrete := !parameterized.anyParameterized(targs)
+
+ if prog.mode&InstantiateGenerics != 0 && concrete {
+ synthetic = fmt.Sprintf("instance of %s", fn.Name())
+ scope := typeparams.OriginMethod(obj).Scope()
+ subst = makeSubster(prog.ctxt, scope, fn.typeparams, targs, false)
+ } else {
+ synthetic = fmt.Sprintf("instantiation wrapper of %s", fn.Name())
+ }
+
+ name := fmt.Sprintf("%s%s", fn.Name(), targs) // may not be unique
+ instance := &Function{
+ name: name,
+ object: obj,
+ Signature: sig,
+ Synthetic: synthetic,
+ syntax: syntax,
+ topLevelOrigin: fn,
+ pos: obj.Pos(),
+ Pkg: nil,
+ Prog: fn.Prog,
+ typeparams: fn.typeparams, // share with origin
+ typeargs: targs,
+ info: insts.info, // on synthetic packages info is nil.
+ subst: subst,
+ }
+
+ cr.Add(instance)
+ insts.instances[key] = instance
+ return instance
+}
diff --git a/go/ssa/instantiate_test.go b/go/ssa/instantiate_test.go
new file mode 100644
index 000000000..cd33e7e65
--- /dev/null
+++ b/go/ssa/instantiate_test.go
@@ -0,0 +1,361 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+// Note: Tests use unexported method _Instances.
+
+import (
+ "bytes"
+ "fmt"
+ "go/types"
+ "reflect"
+ "sort"
+ "strings"
+ "testing"
+
+ "golang.org/x/tools/go/loader"
+ "golang.org/x/tools/internal/typeparams"
+)
+
+// loadProgram creates loader.Program out of p.
+func loadProgram(p string) (*loader.Program, error) {
+ // Parse
+ var conf loader.Config
+ f, err := conf.ParseFile("<input>", p)
+ if err != nil {
+ return nil, fmt.Errorf("parse: %v", err)
+ }
+ conf.CreateFromFiles("p", f)
+
+ // Load
+ lprog, err := conf.Load()
+ if err != nil {
+ return nil, fmt.Errorf("Load: %v", err)
+ }
+ return lprog, nil
+}
+
+// buildPackage builds and returns ssa representation of package pkg of lprog.
+func buildPackage(lprog *loader.Program, pkg string, mode BuilderMode) *Package {
+ prog := NewProgram(lprog.Fset, mode)
+
+ for _, info := range lprog.AllPackages {
+ prog.CreatePackage(info.Pkg, info.Files, &info.Info, info.Importable)
+ }
+
+ p := prog.Package(lprog.Package(pkg).Pkg)
+ p.Build()
+ return p
+}
+
+// TestNeedsInstance ensures that new method instances can be created via needsInstance,
+// that TypeArgs are as expected, and can be accessed via _Instances.
+func TestNeedsInstance(t *testing.T) {
+ if !typeparams.Enabled {
+ return
+ }
+ const input = `
+package p
+
+import "unsafe"
+
+type Pointer[T any] struct {
+ v unsafe.Pointer
+}
+
+func (x *Pointer[T]) Load() *T {
+ return (*T)(LoadPointer(&x.v))
+}
+
+func LoadPointer(addr *unsafe.Pointer) (val unsafe.Pointer)
+`
+ // The SSA members for this package should look something like this:
+ // func LoadPointer func(addr *unsafe.Pointer) (val unsafe.Pointer)
+ // type Pointer struct{v unsafe.Pointer}
+ // method (*Pointer[T any]) Load() *T
+ // func init func()
+ // var init$guard bool
+
+ lprog, err := loadProgram(input)
+ if err != err {
+ t.Fatal(err)
+ }
+
+ for _, mode := range []BuilderMode{BuilderMode(0), InstantiateGenerics} {
+ // Create and build SSA
+ p := buildPackage(lprog, "p", mode)
+ prog := p.Prog
+
+ ptr := p.Type("Pointer").Type().(*types.Named)
+ if ptr.NumMethods() != 1 {
+ t.Fatalf("Expected Pointer to have 1 method. got %d", ptr.NumMethods())
+ }
+
+ obj := ptr.Method(0)
+ if obj.Name() != "Load" {
+ t.Errorf("Expected Pointer to have method named 'Load'. got %q", obj.Name())
+ }
+
+ meth := prog.FuncValue(obj)
+
+ var cr creator
+ intSliceTyp := types.NewSlice(types.Typ[types.Int])
+ instance := prog.needsInstance(meth, []types.Type{intSliceTyp}, &cr)
+ if len(cr) != 1 {
+ t.Errorf("Expected first instance to create a function. got %d created functions", len(cr))
+ }
+ if instance.Origin() != meth {
+ t.Errorf("Expected Origin of %s to be %s. got %s", instance, meth, instance.Origin())
+ }
+ if len(instance.TypeArgs()) != 1 || !types.Identical(instance.TypeArgs()[0], intSliceTyp) {
+ t.Errorf("Expected TypeArgs of %s to be %v. got %v", instance, []types.Type{intSliceTyp}, instance.typeargs)
+ }
+ instances := prog._Instances(meth)
+ if want := []*Function{instance}; !reflect.DeepEqual(instances, want) {
+ t.Errorf("Expected instances of %s to be %v. got %v", meth, want, instances)
+ }
+
+ // A second request with an identical type returns the same Function.
+ second := prog.needsInstance(meth, []types.Type{types.NewSlice(types.Typ[types.Int])}, &cr)
+ if second != instance || len(cr) != 1 {
+ t.Error("Expected second identical instantiation to not create a function")
+ }
+
+ // Add a second instance.
+ inst2 := prog.needsInstance(meth, []types.Type{types.NewSlice(types.Typ[types.Uint])}, &cr)
+ instances = prog._Instances(meth)
+
+ // Note: instance.Name() < inst2.Name()
+ sort.Slice(instances, func(i, j int) bool {
+ return instances[i].Name() < instances[j].Name()
+ })
+ if want := []*Function{instance, inst2}; !reflect.DeepEqual(instances, want) {
+ t.Errorf("Expected instances of %s to be %v. got %v", meth, want, instances)
+ }
+
+ // build and sanity check manually created instance.
+ var b builder
+ b.buildFunction(instance)
+ var buf bytes.Buffer
+ if !sanityCheck(instance, &buf) {
+ t.Errorf("sanityCheck of %s failed with: %s", instance, buf.String())
+ }
+ }
+}
+
+// TestCallsToInstances checks that calles of calls to generic functions,
+// without monomorphization, are wrappers around the origin generic function.
+func TestCallsToInstances(t *testing.T) {
+ if !typeparams.Enabled {
+ return
+ }
+ const input = `
+package p
+
+type I interface {
+ Foo()
+}
+
+type A int
+func (a A) Foo() {}
+
+type J[T any] interface{ Bar() T }
+type K[T any] struct{ J[T] }
+
+func Id[T any] (t T) T {
+ return t
+}
+
+func Lambda[T I]() func() func(T) {
+ return func() func(T) {
+ return T.Foo
+ }
+}
+
+func NoOp[T any]() {}
+
+func Bar[T interface { Foo(); ~int | ~string }, U any] (t T, u U) {
+ Id[U](u)
+ Id[T](t)
+}
+
+func Make[T any]() interface{} {
+ NoOp[K[T]]()
+ return nil
+}
+
+func entry(i int, a A) int {
+ Lambda[A]()()(a)
+
+ x := Make[int]()
+ if j, ok := x.(interface{ Bar() int }); ok {
+ print(j)
+ }
+
+ Bar[A, int](a, i)
+
+ return Id[int](i)
+}
+`
+ lprog, err := loadProgram(input)
+ if err != err {
+ t.Fatal(err)
+ }
+
+ p := buildPackage(lprog, "p", SanityCheckFunctions)
+ prog := p.Prog
+
+ for _, ti := range []struct {
+ orig string
+ instance string
+ tparams string
+ targs string
+ chTypeInstrs int // number of ChangeType instructions in f's body
+ }{
+ {"Id", "Id[int]", "[T]", "[int]", 2},
+ {"Lambda", "Lambda[p.A]", "[T]", "[p.A]", 1},
+ {"Make", "Make[int]", "[T]", "[int]", 0},
+ {"NoOp", "NoOp[p.K[T]]", "[T]", "[p.K[T]]", 0},
+ } {
+ test := ti
+ t.Run(test.instance, func(t *testing.T) {
+ f := p.Members[test.orig].(*Function)
+ if f == nil {
+ t.Fatalf("origin function not found")
+ }
+
+ i := instanceOf(f, test.instance, prog)
+ if i == nil {
+ t.Fatalf("instance not found")
+ }
+
+ // for logging on failures
+ var body strings.Builder
+ i.WriteTo(&body)
+ t.Log(body.String())
+
+ if len(i.Blocks) != 1 {
+ t.Fatalf("body has more than 1 block")
+ }
+
+ if instrs := changeTypeInstrs(i.Blocks[0]); instrs != test.chTypeInstrs {
+ t.Errorf("want %v instructions; got %v", test.chTypeInstrs, instrs)
+ }
+
+ if test.tparams != tparams(i) {
+ t.Errorf("want %v type params; got %v", test.tparams, tparams(i))
+ }
+
+ if test.targs != targs(i) {
+ t.Errorf("want %v type arguments; got %v", test.targs, targs(i))
+ }
+ })
+ }
+}
+
+func instanceOf(f *Function, name string, prog *Program) *Function {
+ for _, i := range prog._Instances(f) {
+ if i.Name() == name {
+ return i
+ }
+ }
+ return nil
+}
+
+func tparams(f *Function) string {
+ tplist := f.TypeParams()
+ var tps []string
+ for i := 0; i < tplist.Len(); i++ {
+ tps = append(tps, tplist.At(i).String())
+ }
+ return fmt.Sprint(tps)
+}
+
+func targs(f *Function) string {
+ var tas []string
+ for _, ta := range f.TypeArgs() {
+ tas = append(tas, ta.String())
+ }
+ return fmt.Sprint(tas)
+}
+
+func changeTypeInstrs(b *BasicBlock) int {
+ cnt := 0
+ for _, i := range b.Instrs {
+ if _, ok := i.(*ChangeType); ok {
+ cnt++
+ }
+ }
+ return cnt
+}
+
+func TestInstanceUniqueness(t *testing.T) {
+ if !typeparams.Enabled {
+ return
+ }
+ const input = `
+package p
+
+func H[T any](t T) {
+ print(t)
+}
+
+func F[T any](t T) {
+ H[T](t)
+ H[T](t)
+ H[T](t)
+}
+
+func G[T any](t T) {
+ H[T](t)
+ H[T](t)
+}
+
+func Foo[T any, S any](t T, s S) {
+ Foo[S, T](s, t)
+ Foo[T, S](t, s)
+}
+`
+ lprog, err := loadProgram(input)
+ if err != err {
+ t.Fatal(err)
+ }
+
+ p := buildPackage(lprog, "p", SanityCheckFunctions)
+ prog := p.Prog
+
+ for _, test := range []struct {
+ orig string
+ instances string
+ }{
+ {"H", "[p.H[T] p.H[T]]"},
+ {"Foo", "[p.Foo[S T] p.Foo[T S]]"},
+ } {
+ t.Run(test.orig, func(t *testing.T) {
+ f := p.Members[test.orig].(*Function)
+ if f == nil {
+ t.Fatalf("origin function not found")
+ }
+
+ instances := prog._Instances(f)
+ sort.Slice(instances, func(i, j int) bool { return instances[i].Name() < instances[j].Name() })
+
+ if got := fmt.Sprintf("%v", instances); !reflect.DeepEqual(got, test.instances) {
+ t.Errorf("got %v instances, want %v", got, test.instances)
+ }
+ })
+ }
+}
+
+// instancesStr returns a sorted slice of string
+// representation of instances.
+func instancesStr(instances []*Function) []string {
+ var is []string
+ for _, i := range instances {
+ is = append(is, fmt.Sprintf("%v", i))
+ }
+ sort.Strings(is)
+ return is
+}
diff --git a/go/ssa/interp/interp.go b/go/ssa/interp/interp.go
index bf7862289..58cac4642 100644
--- a/go/ssa/interp/interp.go
+++ b/go/ssa/interp/interp.go
@@ -76,16 +76,16 @@ type methodSet map[string]*ssa.Function
// State shared between all interpreted goroutines.
type interpreter struct {
- osArgs []value // the value of os.Args
- prog *ssa.Program // the SSA program
- globals map[ssa.Value]*value // addresses of global variables (immutable)
- mode Mode // interpreter options
- reflectPackage *ssa.Package // the fake reflect package
- errorMethods methodSet // the method set of reflect.error, which implements the error interface.
- rtypeMethods methodSet // the method set of rtype, which implements the reflect.Type interface.
- runtimeErrorString types.Type // the runtime.errorString type
- sizes types.Sizes // the effective type-sizing function
- goroutines int32 // atomically updated
+ osArgs []value // the value of os.Args
+ prog *ssa.Program // the SSA program
+ globals map[*ssa.Global]*value // addresses of global variables (immutable)
+ mode Mode // interpreter options
+ reflectPackage *ssa.Package // the fake reflect package
+ errorMethods methodSet // the method set of reflect.error, which implements the error interface.
+ rtypeMethods methodSet // the method set of rtype, which implements the reflect.Type interface.
+ runtimeErrorString types.Type // the runtime.errorString type
+ sizes types.Sizes // the effective type-sizing function
+ goroutines int32 // atomically updated
}
type deferred struct {
@@ -131,7 +131,6 @@ func (fr *frame) get(key ssa.Value) value {
// runDefer runs a deferred call d.
// It always returns normally, but may set or clear fr.panic.
-//
func (fr *frame) runDefer(d *deferred) {
if fr.i.mode&EnableTracing != 0 {
fmt.Fprintf(os.Stderr, "%s: invoking deferred function call\n",
@@ -160,7 +159,6 @@ func (fr *frame) runDefer(d *deferred) {
//
// If there was no initial state of panic, or it was recovered from,
// runDefers returns normally.
-//
func (fr *frame) runDefers() {
for d := fr.defers; d != nil; d = d.tail {
fr.runDefer(d)
@@ -279,7 +277,7 @@ func visitInstr(fr *frame, instr ssa.Instruction) continuation {
}()
case *ssa.MakeChan:
- fr.env[instr] = make(chan value, asInt(fr.get(instr.Size)))
+ fr.env[instr] = make(chan value, asInt64(fr.get(instr.Size)))
case *ssa.Alloc:
var addr *value
@@ -294,17 +292,20 @@ func visitInstr(fr *frame, instr ssa.Instruction) continuation {
*addr = zero(deref(instr.Type()))
case *ssa.MakeSlice:
- slice := make([]value, asInt(fr.get(instr.Cap)))
+ slice := make([]value, asInt64(fr.get(instr.Cap)))
tElt := instr.Type().Underlying().(*types.Slice).Elem()
for i := range slice {
slice[i] = zero(tElt)
}
- fr.env[instr] = slice[:asInt(fr.get(instr.Len))]
+ fr.env[instr] = slice[:asInt64(fr.get(instr.Len))]
case *ssa.MakeMap:
- reserve := 0
+ var reserve int64
if instr.Reserve != nil {
- reserve = asInt(fr.get(instr.Reserve))
+ reserve = asInt64(fr.get(instr.Reserve))
+ }
+ if !fitsInt(reserve, fr.i.sizes) {
+ panic(fmt.Sprintf("ssa.MakeMap.Reserve value %d does not fit in int", reserve))
}
fr.env[instr] = makeMap(instr.Type().Underlying().(*types.Map).Key(), reserve)
@@ -325,15 +326,25 @@ func visitInstr(fr *frame, instr ssa.Instruction) continuation {
idx := fr.get(instr.Index)
switch x := x.(type) {
case []value:
- fr.env[instr] = &x[asInt(idx)]
+ fr.env[instr] = &x[asInt64(idx)]
case *value: // *array
- fr.env[instr] = &(*x).(array)[asInt(idx)]
+ fr.env[instr] = &(*x).(array)[asInt64(idx)]
default:
panic(fmt.Sprintf("unexpected x type in IndexAddr: %T", x))
}
case *ssa.Index:
- fr.env[instr] = fr.get(instr.X).(array)[asInt(fr.get(instr.Index))]
+ x := fr.get(instr.X)
+ idx := fr.get(instr.Index)
+
+ switch x := x.(type) {
+ case array:
+ fr.env[instr] = x[asInt64(idx)]
+ case string:
+ fr.env[instr] = x[asInt64(idx)]
+ default:
+ panic(fmt.Sprintf("unexpected x type in Index: %T", x))
+ }
case *ssa.Lookup:
fr.env[instr] = lookup(instr, fr.get(instr.X), fr.get(instr.Index))
@@ -426,7 +437,6 @@ func visitInstr(fr *frame, instr ssa.Instruction) continuation {
// prepareCall determines the function value and argument values for a
// function call in a Call, Go or Defer instruction, performing
// interface method lookup if needed.
-//
func prepareCall(fr *frame, call *ssa.CallCommon) (fn value, args []value) {
v := fr.get(call.Value)
if call.Method == nil {
@@ -455,7 +465,6 @@ func prepareCall(fr *frame, call *ssa.CallCommon) (fn value, args []value) {
// call interprets a call to a function (function, builtin or closure)
// fn with arguments args, returning its result.
// callpos is the position of the callsite.
-//
func call(i *interpreter, caller *frame, callpos token.Pos, fn value, args []value) value {
switch fn := fn.(type) {
case *ssa.Function:
@@ -481,7 +490,6 @@ func loc(fset *token.FileSet, pos token.Pos) string {
// callSSA interprets a call to function fn with arguments args,
// and lexical environment env, returning its result.
// callpos is the position of the callsite.
-//
func callSSA(i *interpreter, caller *frame, callpos token.Pos, fn *ssa.Function, args []value, env []value) value {
if i.mode&EnableTracing != 0 {
fset := fn.Prog.Fset
@@ -510,6 +518,12 @@ func callSSA(i *interpreter, caller *frame, callpos token.Pos, fn *ssa.Function,
panic("no code for function: " + name)
}
}
+
+ // generic function body?
+ if fn.TypeParams().Len() > 0 && len(fn.TypeArgs()) == 0 {
+ panic("interp requires ssa.BuilderMode to include InstantiateGenerics to execute generics")
+ }
+
fr.env = make(map[ssa.Value]value)
fr.block = fn.Blocks[0]
fr.locals = make([]value, len(fn.Locals))
@@ -548,7 +562,6 @@ func callSSA(i *interpreter, caller *frame, callpos token.Pos, fn *ssa.Function,
// After a recovered panic in a function with NRPs, fr.result is
// undefined and fr.block contains the block at which to resume
// control.
-//
func runFrame(fr *frame) {
defer func() {
if fr.block == nil {
@@ -641,10 +654,12 @@ func setGlobal(i *interpreter, pkg *ssa.Package, name string, v value) {
//
// The SSA program must include the "runtime" package.
//
+// Type parameterized functions must have been built with
+// InstantiateGenerics in the ssa.BuilderMode to be interpreted.
func Interpret(mainpkg *ssa.Package, mode Mode, sizes types.Sizes, filename string, args []string) (exitCode int) {
i := &interpreter{
prog: mainpkg.Prog,
- globals: make(map[ssa.Value]*value),
+ globals: make(map[*ssa.Global]*value),
mode: mode,
sizes: sizes,
goroutines: 1,
diff --git a/go/ssa/interp/interp_go120_test.go b/go/ssa/interp/interp_go120_test.go
new file mode 100644
index 000000000..d8eb2c213
--- /dev/null
+++ b/go/ssa/interp/interp_go120_test.go
@@ -0,0 +1,12 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.20
+// +build go1.20
+
+package interp_test
+
+func init() {
+ testdataTests = append(testdataTests, "slice2array.go")
+}
diff --git a/go/ssa/interp/interp_test.go b/go/ssa/interp/interp_test.go
index 1b43742c8..70ddceec7 100644
--- a/go/ssa/interp/interp_test.go
+++ b/go/ssa/interp/interp_test.go
@@ -31,6 +31,7 @@ import (
"golang.org/x/tools/go/ssa"
"golang.org/x/tools/go/ssa/interp"
"golang.org/x/tools/go/ssa/ssautil"
+ "golang.org/x/tools/internal/typeparams"
)
// Each line contains a space-separated list of $GOROOT/test/
@@ -111,6 +112,7 @@ var testdataTests = []string{
"complit.go",
"convert.go",
"coverage.go",
+ "deepequal.go",
"defer.go",
"fieldprom.go",
"ifaceconv.go",
@@ -122,6 +124,24 @@ var testdataTests = []string{
"recover.go",
"reflect.go",
"static.go",
+ "width32.go",
+
+ "fixedbugs/issue52342.go",
+}
+
+func init() {
+ if typeparams.Enabled {
+ testdataTests = append(testdataTests, "fixedbugs/issue52835.go")
+ testdataTests = append(testdataTests, "fixedbugs/issue55086.go")
+ testdataTests = append(testdataTests, "typeassert.go")
+ testdataTests = append(testdataTests, "zeros.go")
+ }
+}
+
+// Specific GOARCH to use for a test case in go.tools/go/ssa/interp/testdata/.
+// Defaults to amd64 otherwise.
+var testdataArchs = map[string]string{
+ "width32.go": "386",
}
func run(t *testing.T, input string) bool {
@@ -139,6 +159,9 @@ func run(t *testing.T, input string) bool {
ctx.GOROOT = "testdata" // fake goroot
ctx.GOOS = "linux"
ctx.GOARCH = "amd64"
+ if arch, ok := testdataArchs[filepath.Base(input)]; ok {
+ ctx.GOARCH = arch
+ }
conf := loader.Config{Build: &ctx}
if _, err := conf.FromArgs([]string{input}, true); err != nil {
@@ -169,7 +192,9 @@ func run(t *testing.T, input string) bool {
return false
}
- prog := ssautil.CreateProgram(iprog, ssa.SanityCheckFunctions)
+ bmode := ssa.InstantiateGenerics | ssa.SanityCheckFunctions
+ // bmode |= ssa.PrintFunctions // enable for debugging
+ prog := ssautil.CreateProgram(iprog, bmode)
prog.Build()
mainPkg := prog.Package(iprog.Created[0].Pkg)
@@ -179,8 +204,12 @@ func run(t *testing.T, input string) bool {
interp.CapturedOutput = new(bytes.Buffer)
+ sizes := types.SizesFor("gc", ctx.GOARCH)
hint = fmt.Sprintf("To trace execution, run:\n%% go build golang.org/x/tools/cmd/ssadump && ./ssadump -build=C -test -run --interp=T %s\n", input)
- exitCode := interp.Interpret(mainPkg, 0, &types.StdSizes{WordSize: 8, MaxAlign: 8}, input, []string{})
+ var imode interp.Mode // default mode
+ // imode |= interp.DisableRecover // enable for debugging
+ // imode |= interp.EnableTracing // enable for debugging
+ exitCode := interp.Interpret(mainPkg, imode, sizes, input, []string{})
if exitCode != 0 {
t.Fatalf("interpreting %s: exit code was %d", input, exitCode)
}
@@ -213,7 +242,6 @@ func TestTestdataFiles(t *testing.T) {
if err != nil {
log.Fatal(err)
}
-
var failures []string
for _, input := range testdataTests {
if !run(t, filepath.Join(cwd, "testdata", input)) {
@@ -234,3 +262,66 @@ func TestGorootTest(t *testing.T) {
}
printFailures(failures)
}
+
+// TestTypeparamTest runs the interpreter on runnable examples
+// in $GOROOT/test/typeparam/*.go.
+
+func TestTypeparamTest(t *testing.T) {
+ if !typeparams.Enabled {
+ return
+ }
+
+ // Skip known failures for the given reason.
+ // TODO(taking): Address these.
+ skip := map[string]string{
+ "chans.go": "interp tests do not support runtime.SetFinalizer",
+ "issue23536.go": "unknown reason",
+ "issue376214.go": "unknown issue with variadic cast on bytes",
+ "issue48042.go": "interp tests do not handle reflect.Value.SetInt",
+ "issue47716.go": "interp tests do not handle unsafe.Sizeof",
+ "issue50419.go": "interp tests do not handle dispatch to String() correctly",
+ "issue51733.go": "interp does not handle unsafe casts",
+ "ordered.go": "math.NaN() comparisons not being handled correctly",
+ "orderedmap.go": "interp tests do not support runtime.SetFinalizer",
+ "stringer.go": "unknown reason",
+ "issue48317.go": "interp tests do not support encoding/json",
+ "issue48318.go": "interp tests do not support encoding/json",
+ "issue58513.go": "interp tests do not support runtime.Caller",
+ }
+ // Collect all of the .go files in dir that are runnable.
+ dir := filepath.Join(build.Default.GOROOT, "test", "typeparam")
+ list, err := os.ReadDir(dir)
+ if err != nil {
+ t.Fatal(err)
+ }
+ var inputs []string
+ for _, entry := range list {
+ if entry.IsDir() || !strings.HasSuffix(entry.Name(), ".go") {
+ continue // Consider standalone go files.
+ }
+ if reason := skip[entry.Name()]; reason != "" {
+ t.Logf("skipping %q due to %s.", entry.Name(), reason)
+ continue
+ }
+ input := filepath.Join(dir, entry.Name())
+ src, err := os.ReadFile(input)
+ if err != nil {
+ t.Fatal(err)
+ }
+ // Only build test files that can be compiled, or compiled and run.
+ if bytes.HasPrefix(src, []byte("// run")) && !bytes.HasPrefix(src, []byte("// rundir")) {
+ inputs = append(inputs, input)
+ } else {
+ t.Logf("Not a `// run` file: %s", entry.Name())
+ }
+ }
+
+ var failures []string
+ for _, input := range inputs {
+ t.Log("running", input)
+ if !run(t, input) {
+ failures = append(failures, input)
+ }
+ }
+ printFailures(failures)
+}
diff --git a/go/ssa/interp/map.go b/go/ssa/interp/map.go
index 92ccf9034..f5d5f230b 100644
--- a/go/ssa/interp/map.go
+++ b/go/ssa/interp/map.go
@@ -38,7 +38,7 @@ type hashmap struct {
// makeMap returns an empty initialized map of key type kt,
// preallocating space for reserve elements.
-func makeMap(kt types.Type, reserve int) value {
+func makeMap(kt types.Type, reserve int64) value {
if usesBuiltinMap(kt) {
return make(map[value]value, reserve)
}
diff --git a/go/ssa/interp/ops.go b/go/ssa/interp/ops.go
index 3bc6a4e32..39830bc8f 100644
--- a/go/ssa/interp/ops.go
+++ b/go/ssa/interp/ops.go
@@ -34,9 +34,10 @@ type exitPanic int
// constValue returns the value of the constant with the
// dynamic type tag appropriate for c.Type().
func constValue(c *ssa.Const) value {
- if c.IsNil() {
- return zero(c.Type()) // typed nil
+ if c.Value == nil {
+ return zero(c.Type()) // typed zero
}
+ // c is not a type parameter so it's underlying type is basic.
if t, ok := c.Type().Underlying().(*types.Basic); ok {
// TODO(adonovan): eliminate untyped constants from SSA form.
@@ -87,34 +88,46 @@ func constValue(c *ssa.Const) value {
panic(fmt.Sprintf("constValue: %s", c))
}
-// asInt converts x, which must be an integer, to an int suitable for
-// use as a slice or array index or operand to make().
-func asInt(x value) int {
+// fitsInt returns true if x fits in type int according to sizes.
+func fitsInt(x int64, sizes types.Sizes) bool {
+ intSize := sizes.Sizeof(types.Typ[types.Int])
+ if intSize < sizes.Sizeof(types.Typ[types.Int64]) {
+ maxInt := int64(1)<<(intSize-1) - 1
+ minInt := -int64(1) << (intSize - 1)
+ return minInt <= x && x <= maxInt
+ }
+ return true
+}
+
+// asInt64 converts x, which must be an integer, to an int64.
+//
+// Callers that need a value directly usable as an int should combine this with fitsInt().
+func asInt64(x value) int64 {
switch x := x.(type) {
case int:
- return x
+ return int64(x)
case int8:
- return int(x)
+ return int64(x)
case int16:
- return int(x)
+ return int64(x)
case int32:
- return int(x)
+ return int64(x)
case int64:
- return int(x)
+ return x
case uint:
- return int(x)
+ return int64(x)
case uint8:
- return int(x)
+ return int64(x)
case uint16:
- return int(x)
+ return int64(x)
case uint32:
- return int(x)
+ return int64(x)
case uint64:
- return int(x)
+ return int64(x)
case uintptr:
- return int(x)
+ return int64(x)
}
- panic(fmt.Sprintf("cannot convert %T to int", x))
+ panic(fmt.Sprintf("cannot convert %T to int64", x))
}
// asUint64 converts x, which must be an unsigned integer, to a uint64
@@ -268,19 +281,19 @@ func slice(x, lo, hi, max value) value {
Cap = cap(a)
}
- l := 0
+ l := int64(0)
if lo != nil {
- l = asInt(lo)
+ l = asInt64(lo)
}
- h := Len
+ h := int64(Len)
if hi != nil {
- h = asInt(hi)
+ h = asInt64(hi)
}
- m := Cap
+ m := int64(Cap)
if max != nil {
- m = asInt(max)
+ m = asInt64(max)
}
switch x := x.(type) {
@@ -295,7 +308,7 @@ func slice(x, lo, hi, max value) value {
panic(fmt.Sprintf("slice: unexpected X type: %T", x))
}
-// lookup returns x[idx] where x is a map or string.
+// lookup returns x[idx] where x is a map.
func lookup(instr *ssa.Lookup, x, idx value) value {
switch x := x.(type) { // map or string
case map[value]value, *hashmap:
@@ -315,8 +328,6 @@ func lookup(instr *ssa.Lookup, x, idx value) value {
v = tuple{v, ok}
}
return v
- case string:
- return x[asInt(idx)]
}
panic(fmt.Sprintf("unexpected x type in Lookup: %T", x))
}
@@ -324,7 +335,6 @@ func lookup(instr *ssa.Lookup, x, idx value) value {
// binop implements all arithmetic and logical binary operators for
// numeric datatypes and strings. Both operands must have identical
// dynamic type.
-//
func binop(op token.Token, t types.Type, x, y value) value {
switch op {
case token.ADD:
@@ -798,7 +808,6 @@ func binop(op token.Token, t types.Type, x, y value) value {
// appropriate for type t.
// If t is a reference type, at most one of x or y may be a nil value
// of that type.
-//
func eqnil(t types.Type, x, y value) bool {
switch t.Underlying().(type) {
case *types.Map, *types.Signature, *types.Slice:
@@ -907,7 +916,6 @@ func unop(instr *ssa.UnOp, x value) value {
// typeAssert checks whether dynamic type of itf is instr.AssertedType.
// It returns the extracted value on success, and panics on failure,
// unless instr.CommaOk, in which case it always returns a "value,ok" tuple.
-//
func typeAssert(i *interpreter, instr *ssa.TypeAssert, itf iface) value {
var v value
err := ""
@@ -924,6 +932,8 @@ func typeAssert(i *interpreter, instr *ssa.TypeAssert, itf iface) value {
} else {
err = fmt.Sprintf("interface conversion: interface is %s, not %s", itf.t, instr.AssertedType)
}
+ // Note: if instr.Underlying==true ever becomes reachable from interp check that
+ // types.Identical(itf.t.Underlying(), instr.AssertedType)
if err != "" {
if !instr.CommaOk {
@@ -944,7 +954,6 @@ func typeAssert(i *interpreter, instr *ssa.TypeAssert, itf iface) value {
// failure if "BUG" appears in the combined stdout/stderr output, even
// if it exits zero. This is a global variable shared by all
// interpreters in the same process.)
-//
var CapturedOutput *bytes.Buffer
var capturedOutputMu sync.Mutex
@@ -1117,10 +1126,11 @@ func rangeIter(x value, t types.Type) iter {
// widen widens a basic typed value x to the widest type of its
// category, one of:
-// bool, int64, uint64, float64, complex128, string.
+//
+// bool, int64, uint64, float64, complex128, string.
+//
// This is inefficient but reduces the size of the cross-product of
// cases we have to consider.
-//
func widen(x value) value {
switch y := x.(type) {
case bool, int64, uint64, float64, complex128, string, unsafe.Pointer:
@@ -1154,7 +1164,6 @@ func widen(x value) value {
// conv converts the value x of type t_src to type t_dst and returns
// the result.
// Possible cases are described with the ssa.Convert operator.
-//
func conv(t_dst, t_src types.Type, x value) value {
ut_src := t_src.Underlying()
ut_dst := t_dst.Underlying()
@@ -1388,18 +1397,15 @@ func conv(t_dst, t_src types.Type, x value) value {
// sliceToArrayPointer converts the value x of type slice to type t_dst
// a pointer to array and returns the result.
func sliceToArrayPointer(t_dst, t_src types.Type, x value) value {
- utSrc := t_src.Underlying()
- utDst := t_dst.Underlying()
-
- if _, ok := utSrc.(*types.Slice); ok {
- if utSrc, ok := utDst.(*types.Pointer); ok {
- if arr, ok := utSrc.Elem().(*types.Array); ok {
+ if _, ok := t_src.Underlying().(*types.Slice); ok {
+ if ptr, ok := t_dst.Underlying().(*types.Pointer); ok {
+ if arr, ok := ptr.Elem().Underlying().(*types.Array); ok {
x := x.([]value)
if arr.Len() > int64(len(x)) {
panic("array length is greater than slice length")
}
if x == nil {
- return zero(utSrc)
+ return zero(t_dst)
}
v := value(array(x[:arr.Len()]))
return &v
@@ -1413,7 +1419,6 @@ func sliceToArrayPointer(t_dst, t_src types.Type, x value) value {
// checkInterface checks that the method set of x implements the
// interface itype.
// On success it returns "", on failure, an error message.
-//
func checkInterface(i *interpreter, itype *types.Interface, x iface) string {
if meth, _ := types.MissingMethod(x.t, itype, true); meth != nil {
return fmt.Sprintf("interface conversion: %v is not %v: missing method %s",
diff --git a/go/ssa/interp/reflect.go b/go/ssa/interp/reflect.go
index 0a4465b0b..9f2f9e1e4 100644
--- a/go/ssa/interp/reflect.go
+++ b/go/ssa/interp/reflect.go
@@ -407,7 +407,11 @@ func ext۰reflect۰Value۰Elem(fr *frame, args []value) value {
case iface:
return makeReflectValue(x.t, x.v)
case *value:
- return makeReflectValue(rV2T(args[0]).t.Underlying().(*types.Pointer).Elem(), *x)
+ var v value
+ if x != nil {
+ v = *x
+ }
+ return makeReflectValue(rV2T(args[0]).t.Underlying().(*types.Pointer).Elem(), v)
default:
panic(fmt.Sprintf("reflect.(Value).Elem(%T)", x))
}
diff --git a/go/ssa/interp/testdata/boundmeth.go b/go/ssa/interp/testdata/boundmeth.go
index 69937f9d3..47b940685 100644
--- a/go/ssa/interp/testdata/boundmeth.go
+++ b/go/ssa/interp/testdata/boundmeth.go
@@ -123,7 +123,8 @@ func nilInterfaceMethodValue() {
r := fmt.Sprint(recover())
// runtime panic string varies across toolchains
if r != "interface conversion: interface is nil, not error" &&
- r != "runtime error: invalid memory address or nil pointer dereference" {
+ r != "runtime error: invalid memory address or nil pointer dereference" &&
+ r != "method value: interface is nil" {
panic("want runtime panic from nil interface method value, got " + r)
}
}()
diff --git a/go/ssa/interp/testdata/convert.go b/go/ssa/interp/testdata/convert.go
index 0dcf13bdd..76310405f 100644
--- a/go/ssa/interp/testdata/convert.go
+++ b/go/ssa/interp/testdata/convert.go
@@ -22,6 +22,15 @@ func main() {
},
"runtime error: negative shift amount",
)
+ wantPanic(
+ func() {
+ const maxInt32 = 1<<31 - 1
+ var idx int64 = maxInt32*2 + 8
+ x := make([]int, 16)
+ _ = x[idx]
+ },
+ "runtime error: runtime error: index out of range [4294967302] with length 16",
+ )
}
func wantPanic(fn func(), s string) {
diff --git a/go/ssa/interp/testdata/deepequal.go b/go/ssa/interp/testdata/deepequal.go
new file mode 100644
index 000000000..4fad2d657
--- /dev/null
+++ b/go/ssa/interp/testdata/deepequal.go
@@ -0,0 +1,93 @@
+// This interpreter test is designed to test the test copy of DeepEqual.
+//
+// Validate this file with 'go run' after editing.
+
+package main
+
+import "reflect"
+
+func assert(cond bool) {
+ if !cond {
+ panic("failed")
+ }
+}
+
+type X int
+type Y struct {
+ y *Y
+ z [3]int
+}
+
+var (
+ a = []int{0, 1, 2, 3}
+ b = []X{0, 1, 2, 3}
+ c = map[int]string{0: "zero", 1: "one"}
+ d = map[X]string{0: "zero", 1: "one"}
+ e = &Y{}
+ f = (*Y)(nil)
+ g = &Y{y: e}
+ h *Y
+)
+
+func init() {
+ h = &Y{} // h->h
+ h.y = h
+}
+
+func main() {
+ assert(reflect.DeepEqual(nil, nil))
+ assert(reflect.DeepEqual((*int)(nil), (*int)(nil)))
+ assert(!reflect.DeepEqual(nil, (*int)(nil)))
+
+ assert(reflect.DeepEqual(0, 0))
+ assert(!reflect.DeepEqual(0, int64(0)))
+
+ assert(!reflect.DeepEqual("", 0))
+
+ assert(reflect.DeepEqual(a, []int{0, 1, 2, 3}))
+ assert(!reflect.DeepEqual(a, []int{0, 1, 2}))
+ assert(!reflect.DeepEqual(a, []int{0, 1, 0, 3}))
+
+ assert(reflect.DeepEqual(b, []X{0, 1, 2, 3}))
+ assert(!reflect.DeepEqual(b, []X{0, 1, 0, 3}))
+
+ assert(reflect.DeepEqual(c, map[int]string{0: "zero", 1: "one"}))
+ assert(!reflect.DeepEqual(c, map[int]string{0: "zero", 1: "one", 2: "two"}))
+ assert(!reflect.DeepEqual(c, map[int]string{1: "one", 2: "two"}))
+ assert(!reflect.DeepEqual(c, map[int]string{1: "one"}))
+
+ assert(reflect.DeepEqual(d, map[X]string{0: "zero", 1: "one"}))
+ assert(!reflect.DeepEqual(d, map[int]string{0: "zero", 1: "one"}))
+
+ assert(reflect.DeepEqual(e, &Y{}))
+ assert(reflect.DeepEqual(e, &Y{z: [3]int{0, 0, 0}}))
+ assert(!reflect.DeepEqual(e, &Y{z: [3]int{0, 1, 0}}))
+
+ assert(reflect.DeepEqual(f, (*Y)(nil)))
+ assert(!reflect.DeepEqual(f, nil))
+
+ // eq_h -> eq_h. Pointer structure and elements are equal so DeepEqual.
+ eq_h := &Y{}
+ eq_h.y = eq_h
+ assert(reflect.DeepEqual(h, eq_h))
+
+ // deepeq_h->h->h. Pointed to elem of (deepeq_h, h) are (h,h). (h,h) are deep equal so h and deepeq_h are DeepEqual.
+ deepeq_h := &Y{}
+ deepeq_h.y = h
+ assert(reflect.DeepEqual(h, deepeq_h))
+
+ distinct := []interface{}{a, b, c, d, e, f, g, h}
+ for x := range distinct {
+ for y := range distinct {
+ assert((x == y) == reflect.DeepEqual(distinct[x], distinct[y]))
+ }
+ }
+
+ // anonymous struct types.
+ assert(reflect.DeepEqual(struct{}{}, struct{}{}))
+ assert(reflect.DeepEqual(struct{ x int }{1}, struct{ x int }{1}))
+ assert(!reflect.DeepEqual(struct{ x int }{}, struct{ x int }{5}))
+ assert(!reflect.DeepEqual(struct{ x, y int }{0, 1}, struct{ x int }{0}))
+ assert(reflect.DeepEqual(struct{ x, y int }{2, 3}, struct{ x, y int }{2, 3}))
+ assert(!reflect.DeepEqual(struct{ x, y int }{4, 5}, struct{ x, y int }{4, 6}))
+}
diff --git a/go/ssa/interp/testdata/fixedbugs/issue52342.go b/go/ssa/interp/testdata/fixedbugs/issue52342.go
new file mode 100644
index 000000000..2e1cc63cf
--- /dev/null
+++ b/go/ssa/interp/testdata/fixedbugs/issue52342.go
@@ -0,0 +1,17 @@
+package main
+
+func main() {
+ var d byte
+
+ d = 1
+ d <<= 256
+ if d != 0 {
+ panic(d)
+ }
+
+ d = 1
+ d >>= 256
+ if d != 0 {
+ panic(d)
+ }
+}
diff --git a/go/ssa/interp/testdata/fixedbugs/issue52835.go b/go/ssa/interp/testdata/fixedbugs/issue52835.go
new file mode 100644
index 000000000..f1d99abb7
--- /dev/null
+++ b/go/ssa/interp/testdata/fixedbugs/issue52835.go
@@ -0,0 +1,27 @@
+package main
+
+var called bool
+
+type I interface {
+ Foo()
+}
+
+type A struct{}
+
+func (a A) Foo() {
+ called = true
+}
+
+func lambda[X I]() func() func() {
+ return func() func() {
+ var x X
+ return x.Foo
+ }
+}
+
+func main() {
+ lambda[A]()()()
+ if !called {
+ panic(called)
+ }
+}
diff --git a/go/ssa/interp/testdata/fixedbugs/issue55086.go b/go/ssa/interp/testdata/fixedbugs/issue55086.go
new file mode 100644
index 000000000..84c81e91a
--- /dev/null
+++ b/go/ssa/interp/testdata/fixedbugs/issue55086.go
@@ -0,0 +1,132 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package main
+
+func a() (r string) {
+ s := "initial"
+ var p *struct{ i int }
+ defer func() {
+ recover()
+ r = s
+ }()
+
+ s, p.i = "set", 2 // s must be set before p.i panics
+ return "unreachable"
+}
+
+func b() (r string) {
+ s := "initial"
+ fn := func() []int { panic("") }
+ defer func() {
+ recover()
+ r = s
+ }()
+
+ s, fn()[0] = "set", 2 // fn() panics before any assignment occurs
+ return "unreachable"
+}
+
+func c() (r string) {
+ s := "initial"
+ var p map[int]int
+ defer func() {
+ recover()
+ r = s
+ }()
+
+ s, p[0] = "set", 2 //s must be set before p[0] index panics"
+ return "unreachable"
+}
+
+func d() (r string) {
+ s := "initial"
+ var p map[int]int
+ defer func() {
+ recover()
+ r = s
+ }()
+ fn := func() int { panic("") }
+
+ s, p[0] = "set", fn() // fn() panics before s is set
+ return "unreachable"
+}
+
+func e() (r string) {
+ s := "initial"
+ p := map[int]int{}
+ defer func() {
+ recover()
+ r = s
+ }()
+ fn := func() int { panic("") }
+
+ s, p[fn()] = "set", 0 // fn() panics before any assignment occurs
+ return "unreachable"
+}
+
+func f() (r string) {
+ s := "initial"
+ p := []int{}
+ defer func() {
+ recover()
+ r = s
+ }()
+
+ s, p[1] = "set", 0 // p[1] panics after s is set
+ return "unreachable"
+}
+
+func g() (r string) {
+ s := "initial"
+ p := map[any]any{}
+ defer func() {
+ recover()
+ r = s
+ }()
+ var i any = func() {}
+ s, p[i] = "set", 0 // p[i] panics after s is set
+ return "unreachable"
+}
+
+func h() (r string) {
+ fail := false
+ defer func() {
+ recover()
+ if fail {
+ r = "fail"
+ } else {
+ r = "success"
+ }
+ }()
+
+ type T struct{ f int }
+ var p *struct{ *T }
+
+ // The implicit "p.T" operand should be evaluated in phase 1 (and panic),
+ // before the "fail = true" assignment in phase 2.
+ fail, p.f = true, 0
+ return "unreachable"
+}
+
+func main() {
+ for _, test := range []struct {
+ fn func() string
+ want string
+ desc string
+ }{
+ {a, "set", "s must be set before p.i panics"},
+ {b, "initial", "p() panics before s is set"},
+ {c, "set", "s must be set before p[0] index panics"},
+ {d, "initial", "fn() panics before s is set"},
+ {e, "initial", "fn() panics before s is set"},
+ {f, "set", "p[1] panics after s is set"},
+ {g, "set", "p[i] panics after s is set"},
+ {h, "success", "p.T panics before fail is set"},
+ } {
+ if test.fn() != test.want {
+ panic(test.desc)
+ }
+ }
+}
diff --git a/go/ssa/interp/testdata/slice2array.go b/go/ssa/interp/testdata/slice2array.go
new file mode 100644
index 000000000..84e6b7330
--- /dev/null
+++ b/go/ssa/interp/testdata/slice2array.go
@@ -0,0 +1,92 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Test for slice to array conversion introduced in go1.20
+// See: https://tip.golang.org/ref/spec#Conversions_from_slice_to_array_pointer
+
+package main
+
+func main() {
+ s := make([]byte, 3, 4)
+ s[0], s[1], s[2] = 2, 3, 5
+ a := ([2]byte)(s)
+ s[0] = 7
+
+ if a != [2]byte{2, 3} {
+ panic("converted from non-nil slice to array")
+ }
+
+ {
+ var s []int
+ a := ([0]int)(s)
+ if a != [0]int{} {
+ panic("zero len array is not equal")
+ }
+ }
+
+ if emptyToEmptyDoesNotPanic() {
+ panic("no panic expected from emptyToEmptyDoesNotPanic()")
+ }
+ if !threeToFourDoesPanic() {
+ panic("panic expected from threeToFourDoesPanic()")
+ }
+
+ if !fourPanicsWhileOneDoesNot[[4]int]() {
+ panic("panic expected from fourPanicsWhileOneDoesNot[[4]int]()")
+ }
+ if fourPanicsWhileOneDoesNot[[1]int]() {
+ panic("no panic expected from fourPanicsWhileOneDoesNot[[1]int]()")
+ }
+
+ if !fourPanicsWhileZeroDoesNot[[4]int]() {
+ panic("panic expected from fourPanicsWhileZeroDoesNot[[4]int]()")
+ }
+ if fourPanicsWhileZeroDoesNot[[0]int]() {
+ panic("no panic expected from fourPanicsWhileZeroDoesNot[[0]int]()")
+ }
+}
+
+func emptyToEmptyDoesNotPanic() (raised bool) {
+ defer func() {
+ if e := recover(); e != nil {
+ raised = true
+ }
+ }()
+ var s []int
+ _ = ([0]int)(s)
+ return false
+}
+
+func threeToFourDoesPanic() (raised bool) {
+ defer func() {
+ if e := recover(); e != nil {
+ raised = true
+ }
+ }()
+ s := make([]int, 3, 5)
+ _ = ([4]int)(s)
+ return false
+}
+
+func fourPanicsWhileOneDoesNot[T [1]int | [4]int]() (raised bool) {
+ defer func() {
+ if e := recover(); e != nil {
+ raised = true
+ }
+ }()
+ s := make([]int, 3, 5)
+ _ = T(s)
+ return false
+}
+
+func fourPanicsWhileZeroDoesNot[T [0]int | [4]int]() (raised bool) {
+ defer func() {
+ if e := recover(); e != nil {
+ raised = true
+ }
+ }()
+ var s []int
+ _ = T(s)
+ return false
+}
diff --git a/go/ssa/interp/testdata/slice2arrayptr.go b/go/ssa/interp/testdata/slice2arrayptr.go
index ff2d9b55c..d9d8804d3 100644
--- a/go/ssa/interp/testdata/slice2arrayptr.go
+++ b/go/ssa/interp/testdata/slice2arrayptr.go
@@ -32,6 +32,8 @@ func main() {
},
"runtime error: array length is greater than slice length",
)
+
+ f()
}
type arr [2]int
diff --git a/go/ssa/interp/testdata/src/encoding/encoding.go b/go/ssa/interp/testdata/src/encoding/encoding.go
new file mode 100644
index 000000000..73e9de494
--- /dev/null
+++ b/go/ssa/interp/testdata/src/encoding/encoding.go
@@ -0,0 +1,15 @@
+package encoding
+
+type BinaryMarshaler interface {
+ MarshalBinary() (data []byte, err error)
+}
+type BinaryUnmarshaler interface {
+ UnmarshalBinary(data []byte) error
+}
+
+type TextMarshaler interface {
+ MarshalText() (text []byte, err error)
+}
+type TextUnmarshaler interface {
+ UnmarshalText(text []byte) error
+}
diff --git a/go/ssa/interp/testdata/src/log/log.go b/go/ssa/interp/testdata/src/log/log.go
index 8897c1d21..9a57e8c1c 100644
--- a/go/ssa/interp/testdata/src/log/log.go
+++ b/go/ssa/interp/testdata/src/log/log.go
@@ -8,8 +8,16 @@ import (
func Println(v ...interface{}) {
fmt.Println(v...)
}
+func Printf(format string, v ...interface{}) {
+ fmt.Printf(format, v...)
+}
func Fatalln(v ...interface{}) {
Println(v...)
os.Exit(1)
}
+
+func Fatalf(format string, v ...interface{}) {
+ Printf(format, v...)
+ os.Exit(1)
+}
diff --git a/go/ssa/interp/testdata/src/reflect/deepequal.go b/go/ssa/interp/testdata/src/reflect/deepequal.go
new file mode 100644
index 000000000..a48e4dafa
--- /dev/null
+++ b/go/ssa/interp/testdata/src/reflect/deepequal.go
@@ -0,0 +1,109 @@
+package reflect
+
+// Not an actual implementation of DeepEqual. This is a model that supports
+// the bare minimum needed to get through testing interp.
+//
+// Does not handle cycles.
+//
+// Note: unclear if reflect.go can support this.
+func DeepEqual(x, y interface{}) bool {
+ if x == nil || y == nil {
+ return x == y
+ }
+ v1 := ValueOf(x)
+ v2 := ValueOf(y)
+
+ return deepValueEqual(v1, v2, make(map[visit]bool))
+}
+
+// Key for the visitedMap in deepValueEqual.
+type visit struct {
+ a1, a2 uintptr
+ typ Type
+}
+
+func deepValueEqual(v1, v2 Value, visited map[visit]bool) bool {
+ if !v1.IsValid() || !v2.IsValid() {
+ return v1.IsValid() == v2.IsValid()
+ }
+ if v1.Type() != v2.Type() {
+ return false
+ }
+
+ // Short circuit on reference types that can lead to cycles in comparison.
+ switch v1.Kind() {
+ case Pointer, Map, Slice, Interface:
+ k := visit{v1.Pointer(), v2.Pointer(), v1.Type()} // Not safe for moving GC.
+ if visited[k] {
+ // The comparison algorithm assumes that all checks in progress are true when it reencounters them.
+ return true
+ }
+ visited[k] = true
+ }
+
+ switch v1.Kind() {
+ case Array:
+ for i := 0; i < v1.Len(); i++ {
+ if !deepValueEqual(v1.Index(i), v2.Index(i), visited) {
+ return false
+ }
+ }
+ return true
+ case Slice:
+ if v1.IsNil() != v2.IsNil() {
+ return false
+ }
+ if v1.Len() != v2.Len() {
+ return false
+ }
+ if v1.Pointer() == v2.Pointer() {
+ return true
+ }
+ for i := 0; i < v1.Len(); i++ {
+ if !deepValueEqual(v1.Index(i), v2.Index(i), visited) {
+ return false
+ }
+ }
+ return true
+ case Interface:
+ if v1.IsNil() || v2.IsNil() {
+ return v1.IsNil() == v2.IsNil()
+ }
+ return deepValueEqual(v1.Elem(), v2.Elem(), visited)
+ case Ptr:
+ if v1.Pointer() == v2.Pointer() {
+ return true
+ }
+ return deepValueEqual(v1.Elem(), v2.Elem(), visited)
+ case Struct:
+ for i, n := 0, v1.NumField(); i < n; i++ {
+ if !deepValueEqual(v1.Field(i), v2.Field(i), visited) {
+ return false
+ }
+ }
+ return true
+ case Map:
+ if v1.IsNil() != v2.IsNil() {
+ return false
+ }
+ if v1.Len() != v2.Len() {
+ return false
+ }
+ if v1.Pointer() == v2.Pointer() {
+ return true
+ }
+ for _, k := range v1.MapKeys() {
+ val1 := v1.MapIndex(k)
+ val2 := v2.MapIndex(k)
+ if !val1.IsValid() || !val2.IsValid() || !deepValueEqual(val1, val2, visited) {
+ return false
+ }
+ }
+ return true
+ case Func:
+ return v1.IsNil() && v2.IsNil()
+ default:
+ // Normal equality suffices
+ return v1.Interface() == v2.Interface() // try interface comparison as a fallback.
+ }
+}
diff --git a/go/ssa/interp/testdata/src/reflect/reflect.go b/go/ssa/interp/testdata/src/reflect/reflect.go
index 8a23d272f..207e7dcfd 100644
--- a/go/ssa/interp/testdata/src/reflect/reflect.go
+++ b/go/ssa/interp/testdata/src/reflect/reflect.go
@@ -11,9 +11,20 @@ type Value struct {
func (Value) String() string
-func (Value) Elem() string
+func (Value) Elem() Value
func (Value) Kind() Kind
func (Value) Int() int64
+func (Value) IsValid() bool
+func (Value) IsNil() bool
+func (Value) Len() int
+func (Value) Pointer() uintptr
+func (Value) Index(i int) Value
+func (Value) Type() Type
+func (Value) Field(int) Value
+func (Value) MapIndex(Value) Value
+func (Value) MapKeys() []Value
+func (Value) NumField() int
+func (Value) Interface() interface{}
func SliceOf(Type) Type
diff --git a/go/ssa/interp/testdata/typeassert.go b/go/ssa/interp/testdata/typeassert.go
new file mode 100644
index 000000000..792a7558f
--- /dev/null
+++ b/go/ssa/interp/testdata/typeassert.go
@@ -0,0 +1,32 @@
+// Tests of type asserts.
+// Requires type parameters.
+package typeassert
+
+type fooer interface{ foo() string }
+
+type X int
+
+func (_ X) foo() string { return "x" }
+
+func f[T fooer](x T) func() string {
+ return x.foo
+}
+
+func main() {
+ if f[X](0)() != "x" {
+ panic("f[X]() != 'x'")
+ }
+
+ p := false
+ func() {
+ defer func() {
+ if recover() != nil {
+ p = true
+ }
+ }()
+ f[fooer](nil) // panics on x.foo when T is an interface and nil.
+ }()
+ if !p {
+ panic("f[fooer] did not panic")
+ }
+}
diff --git a/go/ssa/interp/testdata/width32.go b/go/ssa/interp/testdata/width32.go
new file mode 100644
index 000000000..a032ba44c
--- /dev/null
+++ b/go/ssa/interp/testdata/width32.go
@@ -0,0 +1,42 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Test interpretation on 32 bit widths.
+
+package main
+
+func main() {
+ mapSize()
+}
+
+func mapSize() {
+ // Tests for the size argument of make on a map type.
+ const tooBigFor32 = 1<<33 - 1
+ wantPanic(
+ func() {
+ _ = make(map[int]int, int64(tooBigFor32))
+ },
+ "runtime error: ssa.MakeMap.Reserve value 8589934591 does not fit in int",
+ )
+
+ // TODO: Enable the following if sizeof(int) can be different for host and target.
+ // _ = make(map[int]int, tooBigFor32)
+ //
+ // Second arg to make in `make(map[int]int, tooBigFor32)` is an untyped int and
+ // is converted into an int explicitly in ssa.
+ // This has a different value on 32 and 64 bit systems.
+}
+
+func wantPanic(fn func(), s string) {
+ defer func() {
+ err := recover()
+ if err == nil {
+ panic("expected panic")
+ }
+ if got := err.(error).Error(); got != s {
+ panic("expected panic " + s + " got " + got)
+ }
+ }()
+ fn()
+}
diff --git a/go/ssa/interp/testdata/zeros.go b/go/ssa/interp/testdata/zeros.go
new file mode 100644
index 000000000..509c78a36
--- /dev/null
+++ b/go/ssa/interp/testdata/zeros.go
@@ -0,0 +1,45 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Test interpretation on zero values with type params.
+package zeros
+
+func assert(cond bool, msg string) {
+ if !cond {
+ panic(msg)
+ }
+}
+
+func tp0[T int | string | float64]() T { return T(0) }
+
+func tpFalse[T ~bool]() T { return T(false) }
+
+func tpEmptyString[T string | []byte]() T { return T("") }
+
+func tpNil[T *int | []byte]() T { return T(nil) }
+
+func main() {
+ // zero values
+ var zi int
+ var zf float64
+ var zs string
+
+ assert(zi == int(0), "zero value of int is int(0)")
+ assert(zf == float64(0), "zero value of float64 is float64(0)")
+ assert(zs != string(0), "zero value of string is not string(0)")
+
+ assert(zi == tp0[int](), "zero value of int is int(0)")
+ assert(zf == tp0[float64](), "zero value of float64 is float64(0)")
+ assert(zs != tp0[string](), "zero value of string is not string(0)")
+
+ assert(zf == -0.0, "constant -0.0 is converted to 0.0")
+
+ assert(!tpFalse[bool](), "zero value of bool is false")
+
+ assert(tpEmptyString[string]() == zs, `zero value of string is string("")`)
+ assert(len(tpEmptyString[[]byte]()) == 0, `[]byte("") is empty`)
+
+ assert(tpNil[*int]() == nil, "nil is nil")
+ assert(tpNil[[]byte]() == nil, "nil is nil")
+}
diff --git a/go/ssa/lift.go b/go/ssa/lift.go
index 048e9b032..945536bbb 100644
--- a/go/ssa/lift.go
+++ b/go/ssa/lift.go
@@ -44,6 +44,8 @@ import (
"go/types"
"math/big"
"os"
+
+ "golang.org/x/tools/internal/typeparams"
)
// If true, show diagnostic information at each step of lifting.
@@ -61,7 +63,6 @@ const debugLifting = false
//
// domFrontier's methods mutate the slice's elements but not its
// length, so their receivers needn't be pointers.
-//
type domFrontier [][]*BasicBlock
func (df domFrontier) add(u, v *BasicBlock) {
@@ -127,7 +128,6 @@ func removeInstr(refs []Instruction, instr Instruction) []Instruction {
// - fn has no dead blocks (blockopt has run).
// - Def/use info (Operands and Referrers) is up-to-date.
// - The dominator tree is up-to-date.
-//
func lift(fn *Function) {
// TODO(adonovan): opt: lots of little optimizations may be
// worthwhile here, especially if they cause us to avoid
@@ -382,12 +382,10 @@ type newPhiMap map[*BasicBlock][]newPhi
// and returns true.
//
// fresh is a source of fresh ids for phi nodes.
-//
func liftAlloc(df domFrontier, alloc *Alloc, newPhis newPhiMap, fresh *int) bool {
- // Don't lift aggregates into registers, because we don't have
- // a way to express their zero-constants.
+ // TODO(taking): zero constants of aggregated types can now be lifted.
switch deref(alloc.Type()).Underlying().(type) {
- case *types.Array, *types.Struct:
+ case *types.Array, *types.Struct, *typeparams.TypeParam:
return false
}
@@ -491,7 +489,6 @@ func liftAlloc(df domFrontier, alloc *Alloc, newPhis newPhiMap, fresh *int) bool
// replaceAll replaces all intraprocedural uses of x with y,
// updating x.Referrers and y.Referrers.
// Precondition: x.Referrers() != nil, i.e. x must be local to some function.
-//
func replaceAll(x, y Value) {
var rands []*Value
pxrefs := x.Referrers()
@@ -514,7 +511,6 @@ func replaceAll(x, y Value) {
// renamed returns the value to which alloc is being renamed,
// constructing it lazily if it's the implicit zero initialization.
-//
func renamed(renaming []Value, alloc *Alloc) Value {
v := renaming[alloc.index]
if v == nil {
@@ -533,7 +529,6 @@ func renamed(renaming []Value, alloc *Alloc) Value {
// renaming is a map from *Alloc (keyed by index number) to its
// dominating stored value; newPhis[x] is the set of new φ-nodes to be
// prepended to block x.
-//
func rename(u *BasicBlock, renaming []Value, newPhis newPhiMap) {
// Each φ-node becomes the new name for its associated Alloc.
for _, np := range newPhis[u] {
diff --git a/go/ssa/lvalue.go b/go/ssa/lvalue.go
index 4d85be3ec..51122b8e8 100644
--- a/go/ssa/lvalue.go
+++ b/go/ssa/lvalue.go
@@ -16,7 +16,6 @@ import (
// An lvalue represents an assignable location that may appear on the
// left-hand side of an assignment. This is a generalization of a
// pointer to permit updates to elements of maps.
-//
type lvalue interface {
store(fn *Function, v Value) // stores v into the location
load(fn *Function) Value // loads the contents of the location
@@ -57,13 +56,12 @@ func (a *address) typ() types.Type {
}
// An element is an lvalue represented by m[k], the location of an
-// element of a map or string. These locations are not addressable
+// element of a map. These locations are not addressable
// since pointers cannot be formed from them, but they do support
-// load(), and in the case of maps, store().
-//
+// load() and store().
type element struct {
- m, k Value // map or string
- t types.Type // map element type or string byte type
+ m, k Value // map
+ t types.Type // map element type
pos token.Pos // source position of colon ({k:v}) or lbrack (m[k]=v)
}
@@ -88,16 +86,51 @@ func (e *element) store(fn *Function, v Value) {
}
func (e *element) address(fn *Function) Value {
- panic("map/string elements are not addressable")
+ panic("map elements are not addressable")
}
func (e *element) typ() types.Type {
return e.t
}
+// A lazyAddress is an lvalue whose address is the result of an instruction.
+// These work like an *address except a new address.address() Value
+// is created on each load, store and address call.
+// A lazyAddress can be used to control when a side effect (nil pointer
+// dereference, index out of bounds) of using a location happens.
+type lazyAddress struct {
+ addr func(fn *Function) Value // emit to fn the computation of the address
+ t types.Type // type of the location
+ pos token.Pos // source position
+ expr ast.Expr // source syntax of the value (not address) [debug mode]
+}
+
+func (l *lazyAddress) load(fn *Function) Value {
+ load := emitLoad(fn, l.addr(fn))
+ load.pos = l.pos
+ return load
+}
+
+func (l *lazyAddress) store(fn *Function, v Value) {
+ store := emitStore(fn, l.addr(fn), v, l.pos)
+ if l.expr != nil {
+ // store.Val is v, converted for assignability.
+ emitDebugRef(fn, l.expr, store.Val, false)
+ }
+}
+
+func (l *lazyAddress) address(fn *Function) Value {
+ addr := l.addr(fn)
+ if l.expr != nil {
+ emitDebugRef(fn, l.expr, addr, true)
+ }
+ return addr
+}
+
+func (l *lazyAddress) typ() types.Type { return l.t }
+
// A blank is a dummy variable whose name is "_".
// It is not reified: loads are illegal and stores are ignored.
-//
type blank struct{}
func (bl blank) load(fn *Function) Value {
diff --git a/go/ssa/methods.go b/go/ssa/methods.go
index 22e1f3f0a..4185618cd 100644
--- a/go/ssa/methods.go
+++ b/go/ssa/methods.go
@@ -9,40 +9,55 @@ package ssa
import (
"fmt"
"go/types"
+
+ "golang.org/x/tools/internal/typeparams"
)
// MethodValue returns the Function implementing method sel, building
// wrapper methods on demand. It returns nil if sel denotes an
-// abstract (interface) method.
+// abstract (interface or parameterized) method.
//
// Precondition: sel.Kind() == MethodVal.
//
// Thread-safe.
//
// EXCLUSIVE_LOCKS_ACQUIRED(prog.methodsMu)
-//
func (prog *Program) MethodValue(sel *types.Selection) *Function {
if sel.Kind() != types.MethodVal {
panic(fmt.Sprintf("MethodValue(%s) kind != MethodVal", sel))
}
T := sel.Recv()
- if isInterface(T) {
- return nil // abstract method
+ if types.IsInterface(T) {
+ return nil // abstract method (interface, possibly type param)
}
if prog.mode&LogSource != 0 {
defer logStack("MethodValue %s %v", T, sel)()
}
+ var m *Function
+ b := builder{created: &creator{}}
+
prog.methodsMu.Lock()
- defer prog.methodsMu.Unlock()
+ // Checks whether a type param is reachable from T.
+ // This is an expensive check. May need to be optimized later.
+ if !prog.parameterized.isParameterized(T) {
+ m = prog.addMethod(prog.createMethodSet(T), sel, b.created)
+ }
+ prog.methodsMu.Unlock()
- return prog.addMethod(prog.createMethodSet(T), sel)
+ if m == nil {
+ return nil // abstract method (generic)
+ }
+ for !b.done() {
+ b.buildCreated()
+ b.needsRuntimeTypes()
+ }
+ return m
}
// LookupMethod returns the implementation of the method of type T
// identified by (pkg, name). It returns nil if the method exists but
// is abstract, and panics if T has no such method.
-//
func (prog *Program) LookupMethod(T types.Type, pkg *types.Package, name string) *Function {
sel := prog.MethodSets.MethodSet(T).Lookup(pkg, name)
if sel == nil {
@@ -51,15 +66,20 @@ func (prog *Program) LookupMethod(T types.Type, pkg *types.Package, name string)
return prog.MethodValue(sel)
}
-// methodSet contains the (concrete) methods of a non-interface type.
+// methodSet contains the (concrete) methods of a concrete type (non-interface, non-parameterized).
type methodSet struct {
mapping map[string]*Function // populated lazily
complete bool // mapping contains all methods
}
-// Precondition: !isInterface(T).
+// Precondition: T is a concrete type, e.g. !isInterface(T) and not parameterized.
// EXCLUSIVE_LOCKS_REQUIRED(prog.methodsMu)
func (prog *Program) createMethodSet(T types.Type) *methodSet {
+ if prog.mode&SanityCheckFunctions != 0 {
+ if types.IsInterface(T) || prog.parameterized.isParameterized(T) {
+ panic("type is interface or parameterized")
+ }
+ }
mset, ok := prog.methodSets.At(T).(*methodSet)
if !ok {
mset = &methodSet{mapping: make(map[string]*Function)}
@@ -68,22 +88,29 @@ func (prog *Program) createMethodSet(T types.Type) *methodSet {
return mset
}
+// Adds any created functions to cr.
+// Precondition: T is a concrete type, e.g. !isInterface(T) and not parameterized.
// EXCLUSIVE_LOCKS_REQUIRED(prog.methodsMu)
-func (prog *Program) addMethod(mset *methodSet, sel *types.Selection) *Function {
+func (prog *Program) addMethod(mset *methodSet, sel *types.Selection, cr *creator) *Function {
if sel.Kind() == types.MethodExpr {
panic(sel)
}
id := sel.Obj().Id()
fn := mset.mapping[id]
if fn == nil {
- obj := sel.Obj().(*types.Func)
+ sel := toSelection(sel)
+ obj := sel.obj.(*types.Func)
- needsPromotion := len(sel.Index()) > 1
- needsIndirection := !isPointer(recvType(obj)) && isPointer(sel.Recv())
+ needsPromotion := len(sel.index) > 1
+ needsIndirection := !isPointer(recvType(obj)) && isPointer(sel.recv)
if needsPromotion || needsIndirection {
- fn = makeWrapper(prog, sel)
+ fn = makeWrapper(prog, sel, cr)
} else {
- fn = prog.declaredFunc(obj)
+ fn = prog.originFunc(obj)
+ if fn.typeparams.Len() > 0 { // instantiate
+ targs := receiverTypeArgs(obj)
+ fn = prog.lookupOrCreateInstance(fn, targs, cr)
+ }
}
if fn.Signature.Recv() == nil {
panic(fn) // missing receiver
@@ -100,7 +127,6 @@ func (prog *Program) addMethod(mset *methodSet, sel *types.Selection) *Function
// Thread-safe.
//
// EXCLUSIVE_LOCKS_ACQUIRED(prog.methodsMu)
-//
func (prog *Program) RuntimeTypes() []types.Type {
prog.methodsMu.Lock()
defer prog.methodsMu.Unlock()
@@ -116,7 +142,6 @@ func (prog *Program) RuntimeTypes() []types.Type {
// declaredFunc returns the concrete function/method denoted by obj.
// Panic ensues if there is none.
-//
func (prog *Program) declaredFunc(obj *types.Func) *Function {
if v := prog.packageLevelMember(obj); v != nil {
return v.(*Function)
@@ -132,26 +157,28 @@ func (prog *Program) declaredFunc(obj *types.Func) *Function {
// operand of some MakeInterface instruction, and for the type of
// every exported package member.
//
+// Adds any created functions to cr.
+//
// Precondition: T is not a method signature (*Signature with Recv()!=nil).
+// Precondition: T is not parameterized.
//
-// Thread-safe. (Called via emitConv from multiple builder goroutines.)
+// Thread-safe. (Called via Package.build from multiple builder goroutines.)
//
// TODO(adonovan): make this faster. It accounts for 20% of SSA build time.
//
// EXCLUSIVE_LOCKS_ACQUIRED(prog.methodsMu)
-//
-func (prog *Program) needMethodsOf(T types.Type) {
+func (prog *Program) needMethodsOf(T types.Type, cr *creator) {
prog.methodsMu.Lock()
- prog.needMethods(T, false)
+ prog.needMethods(T, false, cr)
prog.methodsMu.Unlock()
}
// Precondition: T is not a method signature (*Signature with Recv()!=nil).
+// Precondition: T is not parameterized.
// Recursive case: skip => don't create methods for T.
//
// EXCLUSIVE_LOCKS_REQUIRED(prog.methodsMu)
-//
-func (prog *Program) needMethods(T types.Type, skip bool) {
+func (prog *Program) needMethods(T types.Type, skip bool, cr *creator) {
// Each package maintains its own set of types it has visited.
if prevSkip, ok := prog.runtimeTypes.At(T).(bool); ok {
// needMethods(T) was previously called
@@ -163,14 +190,14 @@ func (prog *Program) needMethods(T types.Type, skip bool) {
tmset := prog.MethodSets.MethodSet(T)
- if !skip && !isInterface(T) && tmset.Len() > 0 {
+ if !skip && !types.IsInterface(T) && tmset.Len() > 0 {
// Create methods of T.
mset := prog.createMethodSet(T)
if !mset.complete {
mset.complete = true
n := tmset.Len()
for i := 0; i < n; i++ {
- prog.addMethod(mset, tmset.At(i))
+ prog.addMethod(mset, tmset.At(i), cr)
}
}
}
@@ -178,8 +205,8 @@ func (prog *Program) needMethods(T types.Type, skip bool) {
// Recursion over signatures of each method.
for i := 0; i < tmset.Len(); i++ {
sig := tmset.At(i).Type().(*types.Signature)
- prog.needMethods(sig.Params(), false)
- prog.needMethods(sig.Results(), false)
+ prog.needMethods(sig.Params(), false, cr)
+ prog.needMethods(sig.Results(), false, cr)
}
switch t := T.(type) {
@@ -190,49 +217,55 @@ func (prog *Program) needMethods(T types.Type, skip bool) {
// nop---handled by recursion over method set.
case *types.Pointer:
- prog.needMethods(t.Elem(), false)
+ prog.needMethods(t.Elem(), false, cr)
case *types.Slice:
- prog.needMethods(t.Elem(), false)
+ prog.needMethods(t.Elem(), false, cr)
case *types.Chan:
- prog.needMethods(t.Elem(), false)
+ prog.needMethods(t.Elem(), false, cr)
case *types.Map:
- prog.needMethods(t.Key(), false)
- prog.needMethods(t.Elem(), false)
+ prog.needMethods(t.Key(), false, cr)
+ prog.needMethods(t.Elem(), false, cr)
case *types.Signature:
if t.Recv() != nil {
panic(fmt.Sprintf("Signature %s has Recv %s", t, t.Recv()))
}
- prog.needMethods(t.Params(), false)
- prog.needMethods(t.Results(), false)
+ prog.needMethods(t.Params(), false, cr)
+ prog.needMethods(t.Results(), false, cr)
case *types.Named:
// A pointer-to-named type can be derived from a named
// type via reflection. It may have methods too.
- prog.needMethods(types.NewPointer(T), false)
+ prog.needMethods(types.NewPointer(T), false, cr)
// Consider 'type T struct{S}' where S has methods.
// Reflection provides no way to get from T to struct{S},
// only to S, so the method set of struct{S} is unwanted,
// so set 'skip' flag during recursion.
- prog.needMethods(t.Underlying(), true)
+ prog.needMethods(t.Underlying(), true, cr)
case *types.Array:
- prog.needMethods(t.Elem(), false)
+ prog.needMethods(t.Elem(), false, cr)
case *types.Struct:
for i, n := 0, t.NumFields(); i < n; i++ {
- prog.needMethods(t.Field(i).Type(), false)
+ prog.needMethods(t.Field(i).Type(), false, cr)
}
case *types.Tuple:
for i, n := 0, t.Len(); i < n; i++ {
- prog.needMethods(t.At(i).Type(), false)
+ prog.needMethods(t.At(i).Type(), false, cr)
}
+ case *typeparams.TypeParam:
+ panic(T) // type parameters are always abstract.
+
+ case *typeparams.Union:
+ // nop
+
default:
panic(T)
}
diff --git a/go/ssa/methods_test.go b/go/ssa/methods_test.go
new file mode 100644
index 000000000..8391cf6d7
--- /dev/null
+++ b/go/ssa/methods_test.go
@@ -0,0 +1,96 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa_test
+
+import (
+ "go/ast"
+ "go/parser"
+ "go/token"
+ "go/types"
+ "testing"
+
+ "golang.org/x/tools/go/ssa"
+ "golang.org/x/tools/go/ssa/ssautil"
+ "golang.org/x/tools/internal/typeparams"
+)
+
+// Tests that MethodValue returns the expected method.
+func TestMethodValue(t *testing.T) {
+ if !typeparams.Enabled {
+ t.Skip("TestMethodValue requires type parameters")
+ }
+ input := `
+package p
+
+type I interface{ M() }
+
+type S int
+func (S) M() {}
+type R[T any] struct{ S }
+
+var i I
+var s S
+var r R[string]
+
+func selections[T any]() {
+ _ = i.M
+ _ = s.M
+ _ = r.M
+
+ var v R[T]
+ _ = v.M
+}
+`
+
+ // Parse the file.
+ fset := token.NewFileSet()
+ f, err := parser.ParseFile(fset, "input.go", input, 0)
+ if err != nil {
+ t.Error(err)
+ return
+ }
+
+ // Build an SSA program from the parsed file.
+ p, info, err := ssautil.BuildPackage(&types.Config{}, fset,
+ types.NewPackage("p", ""), []*ast.File{f}, ssa.SanityCheckFunctions)
+ if err != nil {
+ t.Error(err)
+ return
+ }
+
+ // Collect all of the *types.Selection in the function "selections".
+ var selections []*types.Selection
+ for _, decl := range f.Decls {
+ if fn, ok := decl.(*ast.FuncDecl); ok && fn.Name.Name == "selections" {
+ for _, stmt := range fn.Body.List {
+ if assign, ok := stmt.(*ast.AssignStmt); ok {
+ sel := assign.Rhs[0].(*ast.SelectorExpr)
+ selections = append(selections, info.Selections[sel])
+ }
+ }
+ }
+ }
+
+ wants := map[string]string{
+ "method (p.S) M()": "(p.S).M",
+ "method (p.R[string]) M()": "(p.R[string]).M",
+ "method (p.I) M()": "nil", // interface
+ "method (p.R[T]) M()": "nil", // parameterized
+ }
+ if len(wants) != len(selections) {
+ t.Fatalf("Wanted %d selections. got %d", len(wants), len(selections))
+ }
+ for _, selection := range selections {
+ var got string
+ if m := p.Prog.MethodValue(selection); m != nil {
+ got = m.String()
+ } else {
+ got = "nil"
+ }
+ if want := wants[selection.String()]; want != got {
+ t.Errorf("p.Prog.MethodValue(%s) expected %q. got %q", selection, want, got)
+ }
+ }
+}
diff --git a/go/ssa/mode.go b/go/ssa/mode.go
index 298f24b91..8381639a5 100644
--- a/go/ssa/mode.go
+++ b/go/ssa/mode.go
@@ -15,9 +15,8 @@ import (
//
// *BuilderMode satisfies the flag.Value interface. Example:
//
-// var mode = ssa.BuilderMode(0)
-// func init() { flag.Var(&mode, "build", ssa.BuilderModeDoc) }
-//
+// var mode = ssa.BuilderMode(0)
+// func init() { flag.Var(&mode, "build", ssa.BuilderModeDoc) }
type BuilderMode uint
const (
@@ -29,6 +28,7 @@ const (
BuildSerially // Build packages serially, not in parallel.
GlobalDebug // Enable debug info for all packages
BareInits // Build init functions without guards or calls to dependent inits
+ InstantiateGenerics // Instantiate generics functions (monomorphize) while building
)
const BuilderModeDoc = `Options controlling the SSA builder.
@@ -41,6 +41,7 @@ S log [S]ource locations as SSA builder progresses.
L build distinct packages seria[L]ly instead of in parallel.
N build [N]aive SSA form: don't replace local loads/stores with registers.
I build bare [I]nit functions: no init guards or calls to dependent inits.
+G instantiate [G]eneric function bodies via monomorphization
`
func (m BuilderMode) String() string {
@@ -69,6 +70,9 @@ func (m BuilderMode) String() string {
if m&BareInits != 0 {
buf.WriteByte('I')
}
+ if m&InstantiateGenerics != 0 {
+ buf.WriteByte('G')
+ }
return buf.String()
}
@@ -93,6 +97,8 @@ func (m *BuilderMode) Set(s string) error {
mode |= BuildSerially
case 'I':
mode |= BareInits
+ case 'G':
+ mode |= InstantiateGenerics
default:
return fmt.Errorf("unknown BuilderMode option: %q", c)
}
diff --git a/go/ssa/parameterized.go b/go/ssa/parameterized.go
index 956718cd7..3fc4348fc 100644
--- a/go/ssa/parameterized.go
+++ b/go/ssa/parameterized.go
@@ -17,7 +17,7 @@ type tpWalker struct {
seen map[types.Type]bool
}
-// isParameterized returns true when typ contains any type parameters.
+// isParameterized returns true when typ reaches any type parameter.
func (w *tpWalker) isParameterized(typ types.Type) (res bool) {
// NOTE: Adapted from go/types/infer.go. Try to keep in sync.
@@ -101,6 +101,7 @@ func (w *tpWalker) isParameterized(typ types.Type) (res bool) {
return true
}
}
+ return w.isParameterized(t.Underlying()) // recurse for types local to parameterized functions
case *typeparams.TypeParam:
return true
@@ -111,3 +112,12 @@ func (w *tpWalker) isParameterized(typ types.Type) (res bool) {
return false
}
+
+func (w *tpWalker) anyParameterized(ts []types.Type) bool {
+ for _, t := range ts {
+ if w.isParameterized(t) {
+ return true
+ }
+ }
+ return false
+}
diff --git a/go/ssa/print.go b/go/ssa/print.go
index d0f3bbf7e..8b783196e 100644
--- a/go/ssa/print.go
+++ b/go/ssa/print.go
@@ -17,6 +17,7 @@ import (
"strings"
"golang.org/x/tools/go/types/typeutil"
+ "golang.org/x/tools/internal/typeparams"
)
// relName returns the name of v relative to i.
@@ -24,7 +25,6 @@ import (
// Functions (including methods) and Globals use RelString and
// all types are displayed with relType, so that only cross-package
// references are package-qualified.
-//
func relName(v Value, i Instruction) string {
var from *types.Package
if i != nil {
@@ -51,6 +51,14 @@ func relType(t types.Type, from *types.Package) string {
return s
}
+func relTerm(term *typeparams.Term, from *types.Package) string {
+ s := relType(term.Type(), from)
+ if term.Tilde() {
+ return "~" + s
+ }
+ return s
+}
+
func relString(m Member, from *types.Package) string {
// NB: not all globals have an Object (e.g. init$guard),
// so use Package().Object not Object.Package().
@@ -174,6 +182,24 @@ func (v *ChangeInterface) String() string { return printConv("change interfa
func (v *SliceToArrayPointer) String() string { return printConv("slice to array pointer", v, v.X) }
func (v *MakeInterface) String() string { return printConv("make", v, v.X) }
+func (v *MultiConvert) String() string {
+ from := v.Parent().relPkg()
+
+ var b strings.Builder
+ b.WriteString(printConv("multiconvert", v, v.X))
+ b.WriteString(" [")
+ for i, s := range v.from {
+ for j, d := range v.to {
+ if i != 0 || j != 0 {
+ b.WriteString(" | ")
+ }
+ fmt.Fprintf(&b, "%s <- %s", relTerm(d, from), relTerm(s, from))
+ }
+ }
+ b.WriteString("]")
+ return b.String()
+}
+
func (v *MakeClosure) String() string {
var b bytes.Buffer
fmt.Fprintf(&b, "make closure %s", relName(v.Fn, v))
@@ -233,7 +259,7 @@ func (v *MakeChan) String() string {
}
func (v *FieldAddr) String() string {
- st := deref(v.X.Type()).Underlying().(*types.Struct)
+ st := typeparams.CoreType(deref(v.X.Type())).(*types.Struct)
// Be robust against a bad index.
name := "?"
if 0 <= v.Field && v.Field < st.NumFields() {
@@ -243,7 +269,7 @@ func (v *FieldAddr) String() string {
}
func (v *Field) String() string {
- st := v.X.Type().Underlying().(*types.Struct)
+ st := typeparams.CoreType(v.X.Type()).(*types.Struct)
// Be robust against a bad index.
name := "?"
if 0 <= v.Field && v.Field < st.NumFields() {
diff --git a/go/ssa/sanity.go b/go/ssa/sanity.go
index 6e65d760d..88ad374de 100644
--- a/go/ssa/sanity.go
+++ b/go/ssa/sanity.go
@@ -30,7 +30,6 @@ type sanity struct {
//
// Sanity-checking is intended to facilitate the debugging of code
// transformation passes.
-//
func sanityCheck(fn *Function, reporter io.Writer) bool {
if reporter == nil {
reporter = os.Stderr
@@ -40,7 +39,6 @@ func sanityCheck(fn *Function, reporter io.Writer) bool {
// mustSanityCheck is like sanityCheck but panics instead of returning
// a negative result.
-//
func mustSanityCheck(fn *Function, reporter io.Writer) {
if !sanityCheck(fn, reporter) {
fn.WriteTo(os.Stderr)
@@ -110,6 +108,9 @@ func (s *sanity) checkInstr(idx int, instr Instruction) {
for i, e := range instr.Edges {
if e == nil {
s.errorf("phi node '%s' has no value for edge #%d from %s", instr.Comment, i, s.block.Preds[i])
+ } else if !types.Identical(instr.typ, e.Type()) {
+ s.errorf("phi node '%s' has a different type (%s) for edge #%d from %s (%s)",
+ instr.Comment, instr.Type(), i, s.block.Preds[i], e.Type())
}
}
}
@@ -134,12 +135,12 @@ func (s *sanity) checkInstr(idx int, instr Instruction) {
case *ChangeType:
case *SliceToArrayPointer:
case *Convert:
- if _, ok := instr.X.Type().Underlying().(*types.Basic); !ok {
- if _, ok := instr.Type().Underlying().(*types.Basic); !ok {
- s.errorf("convert %s -> %s: at least one type must be basic", instr.X.Type(), instr.Type())
+ if from := instr.X.Type(); !isBasicConvTypes(typeSetOf(from)) {
+ if to := instr.Type(); !isBasicConvTypes(typeSetOf(to)) {
+ s.errorf("convert %s -> %s: at least one type must be basic (or all basic, []byte, or []rune)", from, to)
}
}
-
+ case *MultiConvert:
case *Defer:
case *Extract:
case *Field:
@@ -404,6 +405,8 @@ func (s *sanity) checkFunction(fn *Function) bool {
// - check params match signature
// - check transient fields are nil
// - warn if any fn.Locals do not appear among block instructions.
+
+ // TODO(taking): Sanity check origin, typeparams, and typeargs.
s.fn = fn
if fn.Prog == nil {
s.errorf("nil Prog")
@@ -419,14 +422,23 @@ func (s *sanity) checkFunction(fn *Function) bool {
if strings.HasPrefix(fn.Synthetic, "wrapper ") ||
strings.HasPrefix(fn.Synthetic, "bound ") ||
strings.HasPrefix(fn.Synthetic, "thunk ") ||
- strings.HasSuffix(fn.name, "Error") {
+ strings.HasSuffix(fn.name, "Error") ||
+ strings.HasPrefix(fn.Synthetic, "instance ") ||
+ strings.HasPrefix(fn.Synthetic, "instantiation ") ||
+ (fn.parent != nil && len(fn.typeargs) > 0) /* anon fun in instance */ {
// ok
} else {
s.errorf("nil Pkg")
}
}
if src, syn := fn.Synthetic == "", fn.Syntax() != nil; src != syn {
- s.errorf("got fromSource=%t, hasSyntax=%t; want same values", src, syn)
+ if len(fn.typeargs) > 0 && fn.Prog.mode&InstantiateGenerics != 0 {
+ // ok (instantiation with InstantiateGenerics on)
+ } else if fn.topLevelOrigin != nil && len(fn.typeargs) > 0 {
+ // ok (we always have the syntax set for instantiation)
+ } else {
+ s.errorf("got fromSource=%t, hasSyntax=%t; want same values", src, syn)
+ }
}
for i, l := range fn.Locals {
if l.Parent() != fn {
@@ -488,6 +500,9 @@ func (s *sanity) checkFunction(fn *Function) bool {
if anon.Parent() != fn {
s.errorf("AnonFuncs[%d]=%s but %s.Parent()=%s", i, anon, anon, anon.Parent())
}
+ if i != int(anon.anonIdx) {
+ s.errorf("AnonFuncs[%d]=%s but %s.anonIdx=%d", i, anon, anon, anon.anonIdx)
+ }
}
s.fn = nil
return !s.insane
diff --git a/go/ssa/source.go b/go/ssa/source.go
index 7e2a369dd..b9a08363e 100644
--- a/go/ssa/source.go
+++ b/go/ssa/source.go
@@ -14,6 +14,8 @@ import (
"go/ast"
"go/token"
"go/types"
+
+ "golang.org/x/tools/internal/typeparams"
)
// EnclosingFunction returns the function that contains the syntax
@@ -23,11 +25,10 @@ import (
// enclosed by the package's init() function.
//
// Returns nil if not found; reasons might include:
-// - the node is not enclosed by any function.
-// - the node is within an anonymous function (FuncLit) and
-// its SSA function has not been created yet
-// (pkg.Build() has not yet been called).
-//
+// - the node is not enclosed by any function.
+// - the node is within an anonymous function (FuncLit) and
+// its SSA function has not been created yet
+// (pkg.Build() has not yet been called).
func EnclosingFunction(pkg *Package, path []ast.Node) *Function {
// Start with package-level function...
fn := findEnclosingPackageLevelFunction(pkg, path)
@@ -65,14 +66,12 @@ outer:
// depend on whether SSA code for pkg has been built, so it can be
// used to quickly reject check inputs that will cause
// EnclosingFunction to fail, prior to SSA building.
-//
func HasEnclosingFunction(pkg *Package, path []ast.Node) bool {
return findEnclosingPackageLevelFunction(pkg, path) != nil
}
// findEnclosingPackageLevelFunction returns the Function
// corresponding to the package-level function enclosing path.
-//
func findEnclosingPackageLevelFunction(pkg *Package, path []ast.Node) *Function {
if n := len(path); n >= 2 { // [... {Gen,Func}Decl File]
switch decl := path[n-2].(type) {
@@ -107,7 +106,6 @@ func findEnclosingPackageLevelFunction(pkg *Package, path []ast.Node) *Function
// findNamedFunc returns the named function whose FuncDecl.Ident is at
// position pos.
-//
func findNamedFunc(pkg *Package, pos token.Pos) *Function {
// Look at all package members and method sets of named types.
// Not very efficient.
@@ -135,13 +133,13 @@ func findNamedFunc(pkg *Package, pos token.Pos) *Function {
// expression e.
//
// It returns nil if no value was found, e.g.
-// - the expression is not lexically contained within f;
-// - f was not built with debug information; or
-// - e is a constant expression. (For efficiency, no debug
-// information is stored for constants. Use
-// go/types.Info.Types[e].Value instead.)
-// - e is a reference to nil or a built-in function.
-// - the value was optimised away.
+// - the expression is not lexically contained within f;
+// - f was not built with debug information; or
+// - e is a constant expression. (For efficiency, no debug
+// information is stored for constants. Use
+// go/types.Info.Types[e].Value instead.)
+// - e is a reference to nil or a built-in function.
+// - the value was optimised away.
//
// If e is an addressable expression used in an lvalue context,
// value is the address denoted by e, and isAddr is true.
@@ -153,7 +151,6 @@ func findNamedFunc(pkg *Package, pos token.Pos) *Function {
// astutil.PathEnclosingInterval to locate the ast.Node, then
// EnclosingFunction to locate the Function, then ValueForExpr to find
// the ssa.Value.)
-//
func (f *Function) ValueForExpr(e ast.Expr) (value Value, isAddr bool) {
if f.debugInfo() { // (opt)
e = unparen(e)
@@ -175,7 +172,6 @@ func (f *Function) ValueForExpr(e ast.Expr) (value Value, isAddr bool) {
// Package returns the SSA Package corresponding to the specified
// type-checker package object.
// It returns nil if no such SSA package has been created.
-//
func (prog *Program) Package(obj *types.Package) *Package {
return prog.packages[obj]
}
@@ -184,7 +180,6 @@ func (prog *Program) Package(obj *types.Package) *Package {
// the specified named object, which may be a package-level const
// (*NamedConst), var (*Global) or func (*Function) of some package in
// prog. It returns nil if the object is not found.
-//
func (prog *Program) packageLevelMember(obj types.Object) Member {
if pkg, ok := prog.packages[obj.Pkg()]; ok {
return pkg.objects[obj]
@@ -192,12 +187,17 @@ func (prog *Program) packageLevelMember(obj types.Object) Member {
return nil
}
+// originFunc returns the package-level generic function that is the
+// origin of obj. If returns nil if the generic function is not found.
+func (prog *Program) originFunc(obj *types.Func) *Function {
+ return prog.declaredFunc(typeparams.OriginMethod(obj))
+}
+
// FuncValue returns the concrete Function denoted by the source-level
// named function obj, or nil if obj denotes an interface method.
//
// TODO(adonovan): check the invariant that obj.Type() matches the
// result's Signature, both in the params/results and in the receiver.
-//
func (prog *Program) FuncValue(obj *types.Func) *Function {
fn, _ := prog.packageLevelMember(obj).(*Function)
return fn
@@ -205,7 +205,6 @@ func (prog *Program) FuncValue(obj *types.Func) *Function {
// ConstValue returns the SSA Value denoted by the source-level named
// constant obj.
-//
func (prog *Program) ConstValue(obj *types.Const) *Const {
// TODO(adonovan): opt: share (don't reallocate)
// Consts for const objects and constant ast.Exprs.
@@ -237,8 +236,9 @@ func (prog *Program) ConstValue(obj *types.Const) *Const {
// If the identifier is a field selector and its base expression is
// non-addressable, then VarValue returns the value of that field.
// For example:
-// func f() struct {x int}
-// f().x // VarValue(x) returns a *Field instruction of type int
+//
+// func f() struct {x int}
+// f().x // VarValue(x) returns a *Field instruction of type int
//
// All other identifiers denote addressable locations (variables).
// For them, VarValue may return either the variable's address or its
@@ -247,14 +247,14 @@ func (prog *Program) ConstValue(obj *types.Const) *Const {
//
// If !isAddr, the returned value is the one associated with the
// specific identifier. For example,
-// var x int // VarValue(x) returns Const 0 here
-// x = 1 // VarValue(x) returns Const 1 here
+//
+// var x int // VarValue(x) returns Const 0 here
+// x = 1 // VarValue(x) returns Const 1 here
//
// It is not specified whether the value or the address is returned in
// any particular case, as it may depend upon optimizations performed
// during SSA code generation, such as registerization, constant
// folding, avoidance of materialization of subexpressions, etc.
-//
func (prog *Program) VarValue(obj *types.Var, pkg *Package, ref []ast.Node) (value Value, isAddr bool) {
// All references to a var are local to some function, possibly init.
fn := EnclosingFunction(pkg, ref)
diff --git a/go/ssa/source_test.go b/go/ssa/source_test.go
index 24cf57ef0..eb266edd1 100644
--- a/go/ssa/source_test.go
+++ b/go/ssa/source_test.go
@@ -89,7 +89,7 @@ func TestObjValueLookup(t *testing.T) {
return
}
- prog := ssautil.CreateProgram(iprog, 0 /*|ssa.PrintFunctions*/)
+ prog := ssautil.CreateProgram(iprog, ssa.BuilderMode(0) /*|ssa.PrintFunctions*/)
mainInfo := iprog.Created[0]
mainPkg := prog.Package(mainInfo.Pkg)
mainPkg.SetDebugMode(true)
@@ -247,7 +247,7 @@ func testValueForExpr(t *testing.T, testfile string) {
mainInfo := iprog.Created[0]
- prog := ssautil.CreateProgram(iprog, 0)
+ prog := ssautil.CreateProgram(iprog, ssa.BuilderMode(0))
mainPkg := prog.Package(mainInfo.Pkg)
mainPkg.SetDebugMode(true)
mainPkg.Build()
@@ -325,7 +325,6 @@ func testValueForExpr(t *testing.T, testfile string) {
// findInterval parses input and returns the [start, end) positions of
// the first occurrence of substr in input. f==nil indicates failure;
// an error has already been reported in that case.
-//
func findInterval(t *testing.T, fset *token.FileSet, input, substr string) (f *ast.File, start, end token.Pos) {
f, err := parser.ParseFile(fset, "<input>", input, 0)
if err != nil {
@@ -404,7 +403,7 @@ func TestEnclosingFunction(t *testing.T) {
t.Error(err)
continue
}
- prog := ssautil.CreateProgram(iprog, 0)
+ prog := ssautil.CreateProgram(iprog, ssa.BuilderMode(0))
pkg := prog.Package(iprog.Created[0].Pkg)
pkg.Build()
diff --git a/go/ssa/ssa.go b/go/ssa/ssa.go
index ea5b68e26..c3471c156 100644
--- a/go/ssa/ssa.go
+++ b/go/ssa/ssa.go
@@ -16,6 +16,7 @@ import (
"sync"
"golang.org/x/tools/go/types/typeutil"
+ "golang.org/x/tools/internal/typeparams"
)
// A Program is a partial or complete Go program converted to SSA form.
@@ -26,13 +27,16 @@ type Program struct {
mode BuilderMode // set of mode bits for SSA construction
MethodSets typeutil.MethodSetCache // cache of type-checker's method-sets
- canon canonizer // type canonicalization map
+ canon *canonizer // type canonicalization map
+ ctxt *typeparams.Context // cache for type checking instantiations
- methodsMu sync.Mutex // guards the following maps:
- methodSets typeutil.Map // maps type to its concrete methodSet
- runtimeTypes typeutil.Map // types for which rtypes are needed
- bounds map[*types.Func]*Function // bounds for curried x.Method closures
- thunks map[selectionKey]*Function // thunks for T.Method expressions
+ methodsMu sync.Mutex // guards the following maps:
+ methodSets typeutil.Map // maps type to its concrete methodSet
+ runtimeTypes typeutil.Map // types for which rtypes are needed
+ bounds map[boundsKey]*Function // bounds for curried x.Method closures
+ thunks map[selectionKey]*Function // thunks for T.Method expressions
+ instances map[*Function]*instanceSet // instances of generic functions
+ parameterized tpWalker // determines whether a type reaches a type parameter.
}
// A Package is a single analyzed Go package containing Members for
@@ -43,7 +47,6 @@ type Program struct {
// Members also contains entries for "init" (the synthetic package
// initializer) and "init#%d", the nth declared init function,
// and unspecified other things too.
-//
type Package struct {
Prog *Program // the owning program
Pkg *types.Package // the corresponding go/types.Package
@@ -58,12 +61,12 @@ type Package struct {
ninit int32 // number of init functions
info *types.Info // package type information
files []*ast.File // package ASTs
+ created creator // members created as a result of building this package (includes declared functions, wrappers)
}
// A Member is a member of a Go package, implemented by *NamedConst,
// *Global, *Function, or *Type; they are created by package-level
// const, var, func and type declarations respectively.
-//
type Member interface {
Name() string // declared name of the package member
String() string // package-qualified name of the package member
@@ -89,7 +92,6 @@ type Type struct {
//
// NB: a NamedConst is not a Value; it contains a constant Value, which
// it augments with the name and position of its 'const' declaration.
-//
type NamedConst struct {
object *types.Const
Value *Const
@@ -165,7 +167,6 @@ type Value interface {
// An Instruction that defines a value (e.g. BinOp) also implements
// the Value interface; an Instruction that only has an effect (e.g. Store)
// does not.
-//
type Instruction interface {
// String returns the disassembled form of this value.
//
@@ -242,7 +243,6 @@ type Instruction interface {
// Node is provided to simplify SSA graph algorithms. Clients should
// use the more specific and informative Value or Instruction
// interfaces where appropriate.
-//
type Node interface {
// Common methods:
String() string
@@ -294,10 +294,19 @@ type Node interface {
//
// Type() returns the function's Signature.
//
+// A generic function is a function or method that has uninstantiated type
+// parameters (TypeParams() != nil). Consider a hypothetical generic
+// method, (*Map[K,V]).Get. It may be instantiated with all ground
+// (non-parameterized) types as (*Map[string,int]).Get or with
+// parameterized types as (*Map[string,U]).Get, where U is a type parameter.
+// In both instantiations, Origin() refers to the instantiated generic
+// method, (*Map[K,V]).Get, TypeParams() refers to the parameters [K,V] of
+// the generic method. TypeArgs() refers to [string,U] or [string,int],
+// respectively, and is nil in the generic method.
type Function struct {
name string
- object types.Object // a declared *types.Func or one of its wrappers
- method *types.Selection // info about provenance of synthetic methods
+ object types.Object // a declared *types.Func or one of its wrappers
+ method *selection // info about provenance of synthetic methods; thunk => non-nil
Signature *types.Signature
pos token.Pos
@@ -313,15 +322,22 @@ type Function struct {
Recover *BasicBlock // optional; control transfers here after recovered panic
AnonFuncs []*Function // anonymous functions directly beneath this one
referrers []Instruction // referring instructions (iff Parent() != nil)
+ built bool // function has completed both CREATE and BUILD phase.
+ anonIdx int32 // position of a nested function in parent's AnonFuncs. fn.Parent()!=nil => fn.Parent().AnonFunc[fn.anonIdx] == fn.
+
+ typeparams *typeparams.TypeParamList // type parameters of this function. typeparams.Len() > 0 => generic or instance of generic function
+ typeargs []types.Type // type arguments that instantiated typeparams. len(typeargs) > 0 => instance of generic function
+ topLevelOrigin *Function // the origin function if this is an instance of a source function. nil if Parent()!=nil.
// The following fields are set transiently during building,
// then cleared.
- currentBlock *BasicBlock // where to emit code
- objects map[types.Object]Value // addresses of local variables
- namedResults []*Alloc // tuple of named results
- targets *targets // linked stack of branch targets
- lblocks map[*ast.Object]*lblock // labelled blocks
- info *types.Info // *types.Info to build from. nil for wrappers.
+ currentBlock *BasicBlock // where to emit code
+ objects map[types.Object]Value // addresses of local variables
+ namedResults []*Alloc // tuple of named results
+ targets *targets // linked stack of branch targets
+ lblocks map[types.Object]*lblock // labelled blocks
+ info *types.Info // *types.Info to build from. nil for wrappers.
+ subst *subster // non-nil => expand generic body using this type substitution of ground types
}
// BasicBlock represents an SSA basic block.
@@ -343,7 +359,6 @@ type Function struct {
//
// The order of Preds and Succs is significant (to Phi and If
// instructions, respectively).
-//
type BasicBlock struct {
Index int // index of this block within Parent().Blocks
Comment string // optional label; no semantic significance
@@ -373,7 +388,6 @@ type BasicBlock struct {
//
// Pos() returns the position of the value that was captured, which
// belongs to an enclosing function.
-//
type FreeVar struct {
name string
typ types.Type
@@ -386,7 +400,6 @@ type FreeVar struct {
}
// A Parameter represents an input parameter of a function.
-//
type Parameter struct {
name string
object types.Object // a *types.Var; nil for non-source locals
@@ -396,26 +409,28 @@ type Parameter struct {
referrers []Instruction
}
-// A Const represents the value of a constant expression.
-//
-// The underlying type of a constant may be any boolean, numeric, or
-// string type. In addition, a Const may represent the nil value of
-// any reference type---interface, map, channel, pointer, slice, or
-// function---but not "untyped nil".
+// A Const represents a value known at build time.
//
-// All source-level constant expressions are represented by a Const
-// of the same type and value.
+// Consts include true constants of boolean, numeric, and string types, as
+// defined by the Go spec; these are represented by a non-nil Value field.
//
-// Value holds the value of the constant, independent of its Type(),
-// using go/constant representation, or nil for a typed nil value.
+// Consts also include the "zero" value of any type, of which the nil values
+// of various pointer-like types are a special case; these are represented
+// by a nil Value field.
//
// Pos() returns token.NoPos.
//
-// Example printed form:
-// 42:int
-// "hello":untyped string
-// 3+4i:MyComplex
-//
+// Example printed forms:
+//
+// 42:int
+// "hello":untyped string
+// 3+4i:MyComplex
+// nil:*int
+// nil:[]string
+// [3]int{}:[3]int
+// struct{x string}{}:struct{x string}
+// 0:interface{int|int64}
+// nil:interface{bool|int} // no go/constant representation
type Const struct {
typ types.Type
Value constant.Value
@@ -426,7 +441,6 @@ type Const struct {
//
// Pos() returns the position of the ast.ValueSpec.Names[*]
// identifier.
-//
type Global struct {
name string
object types.Object // a *types.Var; may be nil for synthetics e.g. init$guard
@@ -445,16 +459,15 @@ type Global struct {
// Go spec (excluding "make" and "new") or one of these ssa-defined
// intrinsics:
//
-// // wrapnilchk returns ptr if non-nil, panics otherwise.
-// // (For use in indirection wrappers.)
-// func ssa:wrapnilchk(ptr *T, recvType, methodName string) *T
+// // wrapnilchk returns ptr if non-nil, panics otherwise.
+// // (For use in indirection wrappers.)
+// func ssa:wrapnilchk(ptr *T, recvType, methodName string) *T
//
// Object() returns a *types.Builtin for built-ins defined by the spec,
// nil for others.
//
// Type() returns a *types.Signature representing the effective
// signature of the built-in for this call.
-//
type Builtin struct {
name string
sig *types.Signature
@@ -489,9 +502,9 @@ type Builtin struct {
// allocates a varargs slice.
//
// Example printed form:
-// t0 = local int
-// t1 = new int
//
+// t0 = local int
+// t1 = new int
type Alloc struct {
register
Comment string
@@ -509,8 +522,8 @@ type Alloc struct {
// during SSA renaming.
//
// Example printed form:
-// t2 = phi [0: t0, 1: t1]
//
+// t2 = phi [0: t0, 1: t1]
type Phi struct {
register
Comment string // a hint as to its purpose
@@ -528,10 +541,10 @@ type Phi struct {
// Pos() returns the ast.CallExpr.Lparen, if explicit in the source.
//
// Example printed form:
-// t2 = println(t0, t1)
-// t4 = t3()
-// t7 = invoke t5.Println(...t6)
//
+// t2 = println(t0, t1)
+// t4 = t3()
+// t7 = invoke t5.Println(...t6)
type Call struct {
register
Call CallCommon
@@ -542,8 +555,8 @@ type Call struct {
// Pos() returns the ast.BinaryExpr.OpPos, if explicit in the source.
//
// Example printed form:
-// t1 = t0 + 1:int
//
+// t1 = t0 + 1:int
type BinOp struct {
register
// One of:
@@ -573,9 +586,9 @@ type BinOp struct {
// specified.
//
// Example printed form:
-// t0 = *x
-// t2 = <-t1,ok
//
+// t0 = *x
+// t2 = <-t1,ok
type UnOp struct {
register
Op token.Token // One of: NOT SUB ARROW MUL XOR ! - <- * ^
@@ -587,20 +600,28 @@ type UnOp struct {
// change to Type().
//
// Type changes are permitted:
-// - between a named type and its underlying type.
-// - between two named types of the same underlying type.
-// - between (possibly named) pointers to identical base types.
-// - from a bidirectional channel to a read- or write-channel,
-// optionally adding/removing a name.
+// - between a named type and its underlying type.
+// - between two named types of the same underlying type.
+// - between (possibly named) pointers to identical base types.
+// - from a bidirectional channel to a read- or write-channel,
+// optionally adding/removing a name.
+// - between a type (t) and an instance of the type (tσ), i.e.
+// Type() == σ(X.Type()) (or X.Type()== σ(Type())) where
+// σ is the type substitution of Parent().TypeParams by
+// Parent().TypeArgs.
//
// This operation cannot fail dynamically.
//
+// Type changes may to be to or from a type parameter (or both). All
+// types in the type set of X.Type() have a value-preserving type
+// change to all types in the type set of Type().
+//
// Pos() returns the ast.CallExpr.Lparen, if the instruction arose
// from an explicit conversion in the source.
//
// Example printed form:
-// t1 = changetype *int <- IntPtr (t0)
//
+// t1 = changetype *int <- IntPtr (t0)
type ChangeType struct {
register
X Value
@@ -611,14 +632,19 @@ type ChangeType struct {
//
// A conversion may change the value and representation of its operand.
// Conversions are permitted:
-// - between real numeric types.
-// - between complex numeric types.
-// - between string and []byte or []rune.
-// - between pointers and unsafe.Pointer.
-// - between unsafe.Pointer and uintptr.
-// - from (Unicode) integer to (UTF-8) string.
+// - between real numeric types.
+// - between complex numeric types.
+// - between string and []byte or []rune.
+// - between pointers and unsafe.Pointer.
+// - between unsafe.Pointer and uintptr.
+// - from (Unicode) integer to (UTF-8) string.
+//
// A conversion may imply a type name change also.
//
+// Conversions may to be to or from a type parameter. All types in
+// the type set of X.Type() can be converted to all types in the type
+// set of Type().
+//
// This operation cannot fail dynamically.
//
// Conversions of untyped string/number/bool constants to a specific
@@ -628,13 +654,37 @@ type ChangeType struct {
// from an explicit conversion in the source.
//
// Example printed form:
-// t1 = convert []byte <- string (t0)
//
+// t1 = convert []byte <- string (t0)
type Convert struct {
register
X Value
}
+// The MultiConvert instruction yields the conversion of value X to type
+// Type(). Either X.Type() or Type() must be a type parameter. Each
+// type in the type set of X.Type() can be converted to each type in the
+// type set of Type().
+//
+// See the documentation for Convert, ChangeType, and SliceToArrayPointer
+// for the conversions that are permitted. Additionally conversions of
+// slices to arrays are permitted.
+//
+// This operation can fail dynamically (see SliceToArrayPointer).
+//
+// Pos() returns the ast.CallExpr.Lparen, if the instruction arose
+// from an explicit conversion in the source.
+//
+// Example printed form:
+//
+// t1 = multiconvert D <- S (t0) [*[2]rune <- []rune | string <- []rune]
+type MultiConvert struct {
+ register
+ X Value
+ from []*typeparams.Term
+ to []*typeparams.Term
+}
+
// ChangeInterface constructs a value of one interface type from a
// value of another interface type known to be assignable to it.
// This operation cannot fail.
@@ -645,8 +695,8 @@ type Convert struct {
// otherwise.
//
// Example printed form:
-// t1 = change interface interface{} <- I (t0)
//
+// t1 = change interface interface{} <- I (t0)
type ChangeInterface struct {
register
X Value
@@ -658,9 +708,17 @@ type ChangeInterface struct {
// Pos() returns the ast.CallExpr.Lparen, if the instruction arose
// from an explicit conversion in the source.
//
+// Conversion may to be to or from a type parameter. All types in
+// the type set of X.Type() must be a slice types that can be converted to
+// all types in the type set of Type() which must all be pointer to array
+// types.
+//
+// This operation can fail dynamically if the length of the slice is less
+// than the length of the array.
+//
// Example printed form:
-// t1 = slice to array pointer *[4]byte <- []byte (t0)
//
+// t1 = slice to array pointer *[4]byte <- []byte (t0)
type SliceToArrayPointer struct {
register
X Value
@@ -673,15 +731,16 @@ type SliceToArrayPointer struct {
// of X, and Program.MethodValue(m) to find the implementation of a method.
//
// To construct the zero value of an interface type T, use:
-// NewConst(constant.MakeNil(), T, pos)
+//
+// NewConst(constant.MakeNil(), T, pos)
//
// Pos() returns the ast.CallExpr.Lparen, if the instruction arose
// from an explicit conversion in the source.
//
// Example printed form:
-// t1 = make interface{} <- int (42:int)
-// t2 = make Stringer <- t0
//
+// t1 = make interface{} <- int (42:int)
+// t2 = make Stringer <- t0
type MakeInterface struct {
register
X Value
@@ -696,9 +755,9 @@ type MakeInterface struct {
// closure or the ast.SelectorExpr.Sel for a bound method closure.
//
// Example printed form:
-// t0 = make closure anon@1.2 [x y z]
-// t1 = make closure bound$(main.I).add [i]
//
+// t0 = make closure anon@1.2 [x y z]
+// t1 = make closure bound$(main.I).add [i]
type MakeClosure struct {
register
Fn Value // always a *Function
@@ -714,9 +773,9 @@ type MakeClosure struct {
// the ast.CompositeLit.Lbrack if created by a literal.
//
// Example printed form:
-// t1 = make map[string]int t0
-// t1 = make StringIntMap t0
//
+// t1 = make map[string]int t0
+// t1 = make StringIntMap t0
type MakeMap struct {
register
Reserve Value // initial space reservation; nil => default
@@ -731,9 +790,9 @@ type MakeMap struct {
// created it.
//
// Example printed form:
-// t0 = make chan int 0
-// t0 = make IntChan 0
//
+// t0 = make chan int 0
+// t0 = make IntChan 0
type MakeChan struct {
register
Size Value // int; size of buffer; zero => synchronous.
@@ -753,9 +812,9 @@ type MakeChan struct {
// created it.
//
// Example printed form:
-// t1 = make []string 1:int t0
-// t1 = make StringSlice 1:int t0
//
+// t1 = make []string 1:int t0
+// t1 = make StringSlice 1:int t0
type MakeSlice struct {
register
Len Value
@@ -776,8 +835,8 @@ type MakeSlice struct {
// NoPos if not explicit in the source (e.g. a variadic argument slice).
//
// Example printed form:
-// t1 = slice t0[1:]
//
+// t1 = slice t0[1:]
type Slice struct {
register
X Value // slice, string, or *array
@@ -795,15 +854,18 @@ type Slice struct {
// Type() returns a (possibly named) *types.Pointer.
//
// Pos() returns the position of the ast.SelectorExpr.Sel for the
-// field, if explicit in the source.
+// field, if explicit in the source. For implicit selections, returns
+// the position of the inducing explicit selection. If produced for a
+// struct literal S{f: e}, it returns the position of the colon; for
+// S{e} it returns the start of expression e.
//
// Example printed form:
-// t1 = &t0.name [#1]
//
+// t1 = &t0.name [#1]
type FieldAddr struct {
register
X Value // *struct
- Field int // field is X.Type().Underlying().(*types.Pointer).Elem().Underlying().(*types.Struct).Field(Field)
+ Field int // field is typeparams.CoreType(X.Type().Underlying().(*types.Pointer).Elem()).(*types.Struct).Field(Field)
}
// The Field instruction yields the Field of struct X.
@@ -813,22 +875,23 @@ type FieldAddr struct {
// package-local identifiers and permit compact representations.
//
// Pos() returns the position of the ast.SelectorExpr.Sel for the
-// field, if explicit in the source.
-//
+// field, if explicit in the source. For implicit selections, returns
+// the position of the inducing explicit selection.
+
// Example printed form:
-// t1 = t0.name [#1]
//
+// t1 = t0.name [#1]
type Field struct {
register
X Value // struct
- Field int // index into X.Type().(*types.Struct).Fields
+ Field int // index into typeparams.CoreType(X.Type()).(*types.Struct).Fields
}
// The IndexAddr instruction yields the address of the element at
// index Index of collection X. Index is an integer expression.
//
-// The elements of maps and strings are not addressable; use Lookup or
-// MapUpdate instead.
+// The elements of maps and strings are not addressable; use Lookup (map),
+// Index (string), or MapUpdate instead.
//
// Dynamically, this instruction panics if X evaluates to a nil *array
// pointer.
@@ -839,31 +902,32 @@ type Field struct {
// explicit in the source.
//
// Example printed form:
-// t2 = &t0[t1]
//
+// t2 = &t0[t1]
type IndexAddr struct {
register
- X Value // slice or *array,
+ X Value // *array, slice or type parameter with types array, *array, or slice.
Index Value // numeric index
}
-// The Index instruction yields element Index of array X.
+// The Index instruction yields element Index of collection X, an array,
+// string or type parameter containing an array, a string, a pointer to an,
+// array or a slice.
//
// Pos() returns the ast.IndexExpr.Lbrack for the index operation, if
// explicit in the source.
//
// Example printed form:
-// t2 = t0[t1]
//
+// t2 = t0[t1]
type Index struct {
register
- X Value // array
+ X Value // array, string or type parameter with types array, *array, slice, or string.
Index Value // integer index
}
-// The Lookup instruction yields element Index of collection X, a map
-// or string. Index is an integer expression if X is a string or the
-// appropriate key type if X is a map.
+// The Lookup instruction yields element Index of collection map X.
+// Index is the appropriate key type.
//
// If CommaOk, the result is a 2-tuple of the value above and a
// boolean indicating the result of a map membership test for the key.
@@ -872,19 +936,18 @@ type Index struct {
// Pos() returns the ast.IndexExpr.Lbrack, if explicit in the source.
//
// Example printed form:
-// t2 = t0[t1]
-// t5 = t3[t4],ok
//
+// t2 = t0[t1]
+// t5 = t3[t4],ok
type Lookup struct {
register
- X Value // string or map
- Index Value // numeric or key-typed index
+ X Value // map
+ Index Value // key-typed index
CommaOk bool // return a value,ok pair
}
// SelectState is a helper for Select.
// It represents one goal state and its corresponding communication.
-//
type SelectState struct {
Dir types.ChanDir // direction of case (SendOnly or RecvOnly)
Chan Value // channel to use (for send or receive)
@@ -899,7 +962,9 @@ type SelectState struct {
// Let n be the number of States for which Dir==RECV and T_i (0<=i<n)
// be the element type of each such state's Chan.
// Select returns an n+2-tuple
-// (index int, recvOk bool, r_0 T_0, ... r_n-1 T_n-1)
+//
+// (index int, recvOk bool, r_0 T_0, ... r_n-1 T_n-1)
+//
// The tuple's components, described below, must be accessed via the
// Extract instruction.
//
@@ -925,9 +990,9 @@ type SelectState struct {
// Pos() returns the ast.SelectStmt.Select.
//
// Example printed form:
-// t3 = select nonblocking [<-t0, t1<-t2]
-// t4 = select blocking []
//
+// t3 = select nonblocking [<-t0, t1<-t2]
+// t4 = select blocking []
type Select struct {
register
States []*SelectState
@@ -944,8 +1009,8 @@ type Select struct {
// Pos() returns the ast.RangeStmt.For.
//
// Example printed form:
-// t0 = range "hello":string
//
+// t0 = range "hello":string
type Range struct {
register
X Value // string or map
@@ -967,8 +1032,8 @@ type Range struct {
// The types of k and/or v may be types.Invalid.
//
// Example printed form:
-// t1 = next t0
//
+// t1 = next t0
type Next struct {
register
Iter Value
@@ -986,6 +1051,9 @@ type Next struct {
// is AssertedType's zero value. The components of the pair must be
// accessed using the Extract instruction.
//
+// If Underlying: tests whether interface value X has the underlying
+// type AssertedType.
+//
// If AssertedType is a concrete type, TypeAssert checks whether the
// dynamic type in interface X is equal to it, and if so, the result
// of the conversion is a copy of the value in the interface.
@@ -1007,9 +1075,9 @@ type Next struct {
// type-switch statement.
//
// Example printed form:
-// t1 = typeassert t0.(int)
-// t3 = typeassert,ok t2.(T)
//
+// t1 = typeassert t0.(int)
+// t3 = typeassert,ok t2.(T)
type TypeAssert struct {
register
X Value
@@ -1024,8 +1092,8 @@ type TypeAssert struct {
// IndexExpr(Map).
//
// Example printed form:
-// t1 = extract t0 #1
//
+// t1 = extract t0 #1
type Extract struct {
register
Tuple Value
@@ -1042,8 +1110,8 @@ type Extract struct {
// Pos() returns NoPos.
//
// Example printed form:
-// jump done
//
+// jump done
type Jump struct {
anInstruction
}
@@ -1058,8 +1126,8 @@ type Jump struct {
// Pos() returns NoPos.
//
// Example printed form:
-// if t0 goto done else body
//
+// if t0 goto done else body
type If struct {
anInstruction
Cond Value
@@ -1084,9 +1152,9 @@ type If struct {
// Pos() returns the ast.ReturnStmt.Return, if explicit in the source.
//
// Example printed form:
-// return
-// return nil:I, 2:int
//
+// return
+// return nil:I, 2:int
type Return struct {
anInstruction
Results []Value
@@ -1103,8 +1171,8 @@ type Return struct {
// Pos() returns NoPos.
//
// Example printed form:
-// rundefers
//
+// rundefers
type RunDefers struct {
anInstruction
}
@@ -1121,8 +1189,8 @@ type RunDefers struct {
// in the source.
//
// Example printed form:
-// panic t0
//
+// panic t0
type Panic struct {
anInstruction
X Value // an interface{}
@@ -1137,10 +1205,10 @@ type Panic struct {
// Pos() returns the ast.GoStmt.Go.
//
// Example printed form:
-// go println(t0, t1)
-// go t3()
-// go invoke t5.Println(...t6)
//
+// go println(t0, t1)
+// go t3()
+// go invoke t5.Println(...t6)
type Go struct {
anInstruction
Call CallCommon
@@ -1155,10 +1223,10 @@ type Go struct {
// Pos() returns the ast.DeferStmt.Defer.
//
// Example printed form:
-// defer println(t0, t1)
-// defer t3()
-// defer invoke t5.Println(...t6)
//
+// defer println(t0, t1)
+// defer t3()
+// defer invoke t5.Println(...t6)
type Defer struct {
anInstruction
Call CallCommon
@@ -1170,8 +1238,8 @@ type Defer struct {
// Pos() returns the ast.SendStmt.Arrow, if explicit in the source.
//
// Example printed form:
-// send t0 <- t1
//
+// send t0 <- t1
type Send struct {
anInstruction
Chan, X Value
@@ -1187,8 +1255,8 @@ type Send struct {
// implementation choices, the details are not specified.
//
// Example printed form:
-// *x = y
//
+// *x = y
type Store struct {
anInstruction
Addr Value
@@ -1203,8 +1271,8 @@ type Store struct {
// if explicit in the source.
//
// Example printed form:
-// t0[t1] = t2
//
+// t0[t1] = t2
type MapUpdate struct {
anInstruction
Map Value
@@ -1242,11 +1310,12 @@ type MapUpdate struct {
// ordinary SSA renaming machinery.)
//
// Example printed form:
-// ; *ast.CallExpr @ 102:9 is t5
-// ; var x float64 @ 109:72 is x
-// ; address of *ast.CompositeLit @ 216:10 is t0
//
+// ; *ast.CallExpr @ 102:9 is t5
+// ; var x float64 @ 109:72 is x
+// ; address of *ast.CompositeLit @ 216:10 is t0
type DebugRef struct {
+ // TODO(generics): Reconsider what DebugRefs are for generics.
anInstruction
Expr ast.Expr // the referring expression (never *ast.ParenExpr)
object types.Object // the identity of the source var/func
@@ -1268,7 +1337,6 @@ type DebugRef struct {
// from it) is unique within a function. As always in this API,
// semantics are determined only by identity; names exist only to
// facilitate debugging.
-//
type register struct {
anInstruction
num int // "name" of virtual register, e.g. "t0". Not guaranteed unique.
@@ -1295,15 +1363,17 @@ type anInstruction struct {
// 'func'.
//
// Value may be one of:
-// (a) a *Function, indicating a statically dispatched call
-// to a package-level function, an anonymous function, or
-// a method of a named type.
-// (b) a *MakeClosure, indicating an immediately applied
-// function literal with free variables.
-// (c) a *Builtin, indicating a statically dispatched call
-// to a built-in function.
-// (d) any other value, indicating a dynamically dispatched
-// function call.
+//
+// (a) a *Function, indicating a statically dispatched call
+// to a package-level function, an anonymous function, or
+// a method of a named type.
+// (b) a *MakeClosure, indicating an immediately applied
+// function literal with free variables.
+// (c) a *Builtin, indicating a statically dispatched call
+// to a built-in function.
+// (d) any other value, indicating a dynamically dispatched
+// function call.
+//
// StaticCallee returns the identity of the callee in cases
// (a) and (b), nil otherwise.
//
@@ -1311,29 +1381,31 @@ type anInstruction struct {
// Args[0] contains the receiver parameter.
//
// Example printed form:
-// t2 = println(t0, t1)
-// go t3()
+//
+// t2 = println(t0, t1)
+// go t3()
// defer t5(...t6)
//
// 2. "invoke" mode: when Method is non-nil (IsInvoke), a CallCommon
// represents a dynamically dispatched call to an interface method.
// In this mode, Value is the interface value and Method is the
-// interface's abstract method. Note: an abstract method may be
-// shared by multiple interfaces due to embedding; Value.Type()
-// provides the specific interface used for this call.
+// interface's abstract method. The interface value may be a type
+// parameter. Note: an abstract method may be shared by multiple
+// interfaces due to embedding; Value.Type() provides the specific
+// interface used for this call.
//
// Value is implicitly supplied to the concrete method implementation
// as the receiver parameter; in other words, Args[0] holds not the
// receiver but the first true argument.
//
// Example printed form:
-// t1 = invoke t0.String()
-// go invoke t3.Run(t2)
-// defer invoke t4.Handle(...t5)
+//
+// t1 = invoke t0.String()
+// go invoke t3.Run(t2)
+// defer invoke t4.Handle(...t5)
//
// For all calls to variadic functions (Signature().Variadic()),
// the last element of Args is a slice.
-//
type CallCommon struct {
Value Value // receiver (invoke mode) or func value (call mode)
Method *types.Func // abstract method (invoke mode)
@@ -1355,12 +1427,11 @@ func (c *CallCommon) Pos() token.Pos { return c.pos }
//
// In either "call" or "invoke" mode, if the callee is a method, its
// receiver is represented by sig.Recv, not sig.Params().At(0).
-//
func (c *CallCommon) Signature() *types.Signature {
if c.Method != nil {
return c.Method.Type().(*types.Signature)
}
- return c.Value.Type().Underlying().(*types.Signature)
+ return typeparams.CoreType(c.Value.Type()).(*types.Signature)
}
// StaticCallee returns the callee if this is a trivially static
@@ -1398,7 +1469,6 @@ func (c *CallCommon) Description() string {
// The CallInstruction interface, implemented by *Go, *Defer and *Call,
// exposes the common parts of function-calling instructions,
// yet provides a way back to the Value defined by *Call alone.
-//
type CallInstruction interface {
Instruction
Common() *CallCommon // returns the common parts of the call
@@ -1452,6 +1522,29 @@ func (v *Function) Referrers() *[]Instruction {
return nil
}
+// TypeParams are the function's type parameters if generic or the
+// type parameters that were instantiated if fn is an instantiation.
+//
+// TODO(taking): declare result type as *types.TypeParamList
+// after we drop support for go1.17.
+func (fn *Function) TypeParams() *typeparams.TypeParamList {
+ return fn.typeparams
+}
+
+// TypeArgs are the types that TypeParams() were instantiated by to create fn
+// from fn.Origin().
+func (fn *Function) TypeArgs() []types.Type { return fn.typeargs }
+
+// Origin is the function fn is an instantiation of. Returns nil if fn is not
+// an instantiation.
+func (fn *Function) Origin() *Function {
+ if fn.parent != nil && len(fn.typeargs) > 0 {
+ // Nested functions are BUILT at a different time than there instances.
+ return fn.parent.Origin().AnonFuncs[fn.anonIdx]
+ }
+ return fn.topLevelOrigin
+}
+
func (v *Parameter) Type() types.Type { return v.typ }
func (v *Parameter) Name() string { return v.name }
func (v *Parameter) Object() types.Object { return v.object }
@@ -1498,7 +1591,6 @@ func (d *DebugRef) Object() types.Object { return d.object }
// Func returns the package-level function of the specified name,
// or nil if not found.
-//
func (p *Package) Func(name string) (f *Function) {
f, _ = p.Members[name].(*Function)
return
@@ -1506,7 +1598,6 @@ func (p *Package) Func(name string) (f *Function) {
// Var returns the package-level variable of the specified name,
// or nil if not found.
-//
func (p *Package) Var(name string) (g *Global) {
g, _ = p.Members[name].(*Global)
return
@@ -1514,7 +1605,6 @@ func (p *Package) Var(name string) (g *Global) {
// Const returns the package-level constant of the specified name,
// or nil if not found.
-//
func (p *Package) Const(name string) (c *NamedConst) {
c, _ = p.Members[name].(*NamedConst)
return
@@ -1522,7 +1612,6 @@ func (p *Package) Const(name string) (c *NamedConst) {
// Type returns the package-level type of the specified name,
// or nil if not found.
-//
func (p *Package) Type(name string) (t *Type) {
t, _ = p.Members[name].(*Type)
return
@@ -1583,6 +1672,10 @@ func (v *Convert) Operands(rands []*Value) []*Value {
return append(rands, &v.X)
}
+func (v *MultiConvert) Operands(rands []*Value) []*Value {
+ return append(rands, &v.X)
+}
+
func (v *SliceToArrayPointer) Operands(rands []*Value) []*Value {
return append(rands, &v.X)
}
diff --git a/go/ssa/ssautil/load.go b/go/ssa/ssautil/load.go
index 88d7c8f49..96d69a20a 100644
--- a/go/ssa/ssautil/load.go
+++ b/go/ssa/ssautil/load.go
@@ -34,7 +34,6 @@ import (
// packages with well-typed syntax trees.
//
// The mode parameter controls diagnostics and checking during SSA construction.
-//
func Packages(initial []*packages.Package, mode ssa.BuilderMode) (*ssa.Program, []*ssa.Package) {
return doPackages(initial, mode, false)
}
@@ -56,7 +55,6 @@ func Packages(initial []*packages.Package, mode ssa.BuilderMode) (*ssa.Program,
// well-typed syntax trees.
//
// The mode parameter controls diagnostics and checking during SSA construction.
-//
func AllPackages(initial []*packages.Package, mode ssa.BuilderMode) (*ssa.Program, []*ssa.Package) {
return doPackages(initial, mode, true)
}
@@ -79,10 +77,12 @@ func doPackages(initial []*packages.Package, mode ssa.BuilderMode, deps bool) (*
packages.Visit(initial, nil, func(p *packages.Package) {
if p.Types != nil && !p.IllTyped {
var files []*ast.File
+ var info *types.Info
if deps || isInitial[p] {
files = p.Syntax
+ info = p.TypesInfo
}
- ssamap[p] = prog.CreatePackage(p.Types, files, p.TypesInfo, true)
+ ssamap[p] = prog.CreatePackage(p.Types, files, info, true)
}
})
@@ -104,7 +104,6 @@ func doPackages(initial []*packages.Package, mode ssa.BuilderMode, deps bool) (*
//
// Deprecated: Use golang.org/x/tools/go/packages and the Packages
// function instead; see ssa.Example_loadPackages.
-//
func CreateProgram(lprog *loader.Program, mode ssa.BuilderMode) *ssa.Program {
prog := ssa.NewProgram(lprog.Fset, mode)
@@ -131,7 +130,6 @@ func CreateProgram(lprog *loader.Program, mode ssa.BuilderMode) *ssa.Program {
// The operation fails if there were any type-checking or import errors.
//
// See ../example_test.go for an example.
-//
func BuildPackage(tc *types.Config, fset *token.FileSet, pkg *types.Package, files []*ast.File, mode ssa.BuilderMode) (*ssa.Package, *types.Info, error) {
if fset == nil {
panic("no token.FileSet")
diff --git a/go/ssa/ssautil/load_test.go b/go/ssa/ssautil/load_test.go
index 55684e0a6..efa2ba40a 100644
--- a/go/ssa/ssautil/load_test.go
+++ b/go/ssa/ssautil/load_test.go
@@ -12,10 +12,13 @@ import (
"go/token"
"go/types"
"os"
+ "path"
"strings"
"testing"
"golang.org/x/tools/go/packages"
+ "golang.org/x/tools/go/packages/packagestest"
+ "golang.org/x/tools/go/ssa"
"golang.org/x/tools/go/ssa/ssautil"
"golang.org/x/tools/internal/testenv"
)
@@ -30,6 +33,8 @@ func main() {
`
func TestBuildPackage(t *testing.T) {
+ testenv.NeedsGoBuild(t) // for importer.Default()
+
// There is a more substantial test of BuildPackage and the
// SSA program it builds in ../ssa/builder_test.go.
@@ -39,17 +44,23 @@ func TestBuildPackage(t *testing.T) {
t.Fatal(err)
}
- pkg := types.NewPackage("hello", "")
- ssapkg, _, err := ssautil.BuildPackage(&types.Config{Importer: importer.Default()}, fset, pkg, []*ast.File{f}, 0)
- if err != nil {
- t.Fatal(err)
- }
- if pkg.Name() != "main" {
- t.Errorf("pkg.Name() = %s, want main", pkg.Name())
- }
- if ssapkg.Func("main") == nil {
- ssapkg.WriteTo(os.Stderr)
- t.Errorf("ssapkg has no main function")
+ for _, mode := range []ssa.BuilderMode{
+ ssa.SanityCheckFunctions,
+ ssa.InstantiateGenerics | ssa.SanityCheckFunctions,
+ } {
+ pkg := types.NewPackage("hello", "")
+ ssapkg, _, err := ssautil.BuildPackage(&types.Config{Importer: importer.Default()}, fset, pkg, []*ast.File{f}, mode)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if pkg.Name() != "main" {
+ t.Errorf("pkg.Name() = %s, want main", pkg.Name())
+ }
+ if ssapkg.Func("main") == nil {
+ ssapkg.WriteTo(os.Stderr)
+ t.Errorf("ssapkg has no main function")
+ }
+
}
}
@@ -65,19 +76,23 @@ func TestPackages(t *testing.T) {
t.Fatal("there were errors")
}
- prog, pkgs := ssautil.Packages(initial, 0)
- bytesNewBuffer := pkgs[0].Func("NewBuffer")
- bytesNewBuffer.Pkg.Build()
+ for _, mode := range []ssa.BuilderMode{
+ ssa.SanityCheckFunctions,
+ ssa.SanityCheckFunctions | ssa.InstantiateGenerics,
+ } {
+ prog, pkgs := ssautil.Packages(initial, mode)
+ bytesNewBuffer := pkgs[0].Func("NewBuffer")
+ bytesNewBuffer.Pkg.Build()
- // We'll dump the SSA of bytes.NewBuffer because it is small and stable.
- out := new(bytes.Buffer)
- bytesNewBuffer.WriteTo(out)
+ // We'll dump the SSA of bytes.NewBuffer because it is small and stable.
+ out := new(bytes.Buffer)
+ bytesNewBuffer.WriteTo(out)
- // For determinism, sanitize the location.
- location := prog.Fset.Position(bytesNewBuffer.Pos()).String()
- got := strings.Replace(out.String(), location, "$GOROOT/src/bytes/buffer.go:1", -1)
+ // For determinism, sanitize the location.
+ location := prog.Fset.Position(bytesNewBuffer.Pos()).String()
+ got := strings.Replace(out.String(), location, "$GOROOT/src/bytes/buffer.go:1", -1)
- want := `
+ want := `
# Name: bytes.NewBuffer
# Package: bytes
# Location: $GOROOT/src/bytes/buffer.go:1
@@ -89,8 +104,9 @@ func NewBuffer(buf []byte) *Buffer:
return t0
`[1:]
- if got != want {
- t.Errorf("bytes.NewBuffer SSA = <<%s>>, want <<%s>>", got, want)
+ if got != want {
+ t.Errorf("bytes.NewBuffer SSA = <<%s>>, want <<%s>>", got, want)
+ }
}
}
@@ -102,7 +118,7 @@ func TestBuildPackage_MissingImport(t *testing.T) {
}
pkg := types.NewPackage("bad", "")
- ssapkg, _, err := ssautil.BuildPackage(new(types.Config), fset, pkg, []*ast.File{f}, 0)
+ ssapkg, _, err := ssautil.BuildPackage(new(types.Config), fset, pkg, []*ast.File{f}, ssa.BuilderMode(0))
if err == nil || ssapkg != nil {
t.Fatal("BuildPackage succeeded unexpectedly")
}
@@ -120,6 +136,60 @@ func TestIssue28106(t *testing.T) {
if err != nil {
t.Fatal(err)
}
- prog, _ := ssautil.Packages(pkgs, 0)
+ prog, _ := ssautil.Packages(pkgs, ssa.BuilderMode(0))
prog.Build() // no crash
}
+
+func TestIssue53604(t *testing.T) {
+ // Tests that variable initializers are not added to init() when syntax
+ // is not present but types.Info is available.
+ //
+ // Packages x, y, z are loaded with mode `packages.LoadSyntax`.
+ // Package x imports y, and y imports z.
+ // Packages are built using ssautil.Packages() with x and z as roots.
+ // This setup creates y using CreatePackage(pkg, files, info, ...)
+ // where len(files) == 0 but info != nil.
+ //
+ // Tests that globals from y are not initialized.
+ e := packagestest.Export(t, packagestest.Modules, []packagestest.Module{
+ {
+ Name: "golang.org/fake",
+ Files: map[string]interface{}{
+ "x/x.go": `package x; import "golang.org/fake/y"; var V = y.F()`,
+ "y/y.go": `package y; import "golang.org/fake/z"; var F = func () *int { return &z.Z } `,
+ "z/z.go": `package z; var Z int`,
+ },
+ },
+ })
+ defer e.Cleanup()
+
+ // Load x and z as entry packages using packages.LoadSyntax
+ e.Config.Mode = packages.LoadSyntax
+ pkgs, err := packages.Load(e.Config, path.Join(e.Temp(), "fake/x"), path.Join(e.Temp(), "fake/z"))
+ if err != nil {
+ t.Fatal(err)
+ }
+ for _, p := range pkgs {
+ if len(p.Errors) > 0 {
+ t.Fatalf("%v", p.Errors)
+ }
+ }
+
+ prog, _ := ssautil.Packages(pkgs, ssa.BuilderMode(0))
+ prog.Build()
+
+ // y does not initialize F.
+ y := prog.ImportedPackage("golang.org/fake/y")
+ if y == nil {
+ t.Fatal("Failed to load intermediate package y")
+ }
+ yinit := y.Members["init"].(*ssa.Function)
+ for _, bb := range yinit.Blocks {
+ for _, i := range bb.Instrs {
+ if store, ok := i.(*ssa.Store); ok && store.Addr == y.Var("F") {
+ t.Errorf("y.init() stores to F %v", store)
+ }
+ }
+ }
+
+}
diff --git a/go/ssa/ssautil/switch.go b/go/ssa/ssautil/switch.go
index db03bf555..dd4b04e76 100644
--- a/go/ssa/ssautil/switch.go
+++ b/go/ssa/ssautil/switch.go
@@ -55,7 +55,6 @@ type TypeCase struct {
// A type switch may contain duplicate types, or types assignable
// to an interface type also in the list.
// TODO(adonovan): eliminate such duplicates.
-//
type Switch struct {
Start *ssa.BasicBlock // block containing start of if/else chain
X ssa.Value // the switch operand
@@ -103,7 +102,6 @@ func (sw *Switch) String() string {
// Switches may even be inferred from if/else- or goto-based control flow.
// (In general, the control flow constructs of the source program
// cannot be faithfully reproduced from the SSA representation.)
-//
func Switches(fn *ssa.Function) []Switch {
// Traverse the CFG in dominance order, so we don't
// enter an if/else-chain in the middle.
@@ -197,7 +195,6 @@ func typeSwitch(sw *Switch, y ssa.Value, T types.Type, seen map[*ssa.BasicBlock]
// isComparisonBlock returns the operands (v, k) if a block ends with
// a comparison v==k, where k is a compile-time constant.
-//
func isComparisonBlock(b *ssa.BasicBlock) (v ssa.Value, k *ssa.Const) {
if n := len(b.Instrs); n >= 2 {
if i, ok := b.Instrs[n-1].(*ssa.If); ok {
@@ -216,7 +213,6 @@ func isComparisonBlock(b *ssa.BasicBlock) (v ssa.Value, k *ssa.Const) {
// isTypeAssertBlock returns the operands (y, x, T) if a block ends with
// a type assertion "if y, ok := x.(T); ok {".
-//
func isTypeAssertBlock(b *ssa.BasicBlock) (y, x ssa.Value, T types.Type) {
if n := len(b.Instrs); n >= 4 {
if i, ok := b.Instrs[n-1].(*ssa.If); ok {
diff --git a/go/ssa/ssautil/switch_test.go b/go/ssa/ssautil/switch_test.go
index bad8bdd6a..6db410524 100644
--- a/go/ssa/ssautil/switch_test.go
+++ b/go/ssa/ssautil/switch_test.go
@@ -34,7 +34,7 @@ func TestSwitches(t *testing.T) {
return
}
- prog := ssautil.CreateProgram(iprog, 0)
+ prog := ssautil.CreateProgram(iprog, ssa.BuilderMode(0))
mainPkg := prog.Package(iprog.Created[0].Pkg)
mainPkg.Build()
diff --git a/go/ssa/ssautil/visit.go b/go/ssa/ssautil/visit.go
index 3424e8a30..5f27050b0 100644
--- a/go/ssa/ssautil/visit.go
+++ b/go/ssa/ssautil/visit.go
@@ -18,7 +18,6 @@ import "golang.org/x/tools/go/ssa"
// synthetic wrappers.
//
// Precondition: all packages are built.
-//
func AllFunctions(prog *ssa.Program) map[*ssa.Function]bool {
visit := visitor{
prog: prog,
diff --git a/go/ssa/stdlib_test.go b/go/ssa/stdlib_test.go
index aaa158076..8b9f4238d 100644
--- a/go/ssa/stdlib_test.go
+++ b/go/ssa/stdlib_test.go
@@ -21,12 +21,10 @@ import (
"testing"
"time"
- "golang.org/x/tools/go/ast/inspector"
"golang.org/x/tools/go/packages"
"golang.org/x/tools/go/ssa"
"golang.org/x/tools/go/ssa/ssautil"
"golang.org/x/tools/internal/testenv"
- "golang.org/x/tools/internal/typeparams/genericfeatures"
)
func bytesAllocated() uint64 {
@@ -51,22 +49,6 @@ func TestStdlib(t *testing.T) {
if err != nil {
t.Fatal(err)
}
- var nonGeneric int
- for i := 0; i < len(pkgs); i++ {
- pkg := pkgs[i]
- inspect := inspector.New(pkg.Syntax)
- features := genericfeatures.ForPackage(inspect, pkg.TypesInfo)
- // Skip standard library packages that use generics. This won't be
- // sufficient if any standard library packages start _importing_ packages
- // that use generics.
- if features != 0 {
- t.Logf("skipping package %q which uses generics", pkg.PkgPath)
- continue
- }
- pkgs[nonGeneric] = pkg
- nonGeneric++
- }
- pkgs = pkgs[:nonGeneric]
t1 := time.Now()
alloc1 := bytesAllocated()
@@ -76,6 +58,7 @@ func TestStdlib(t *testing.T) {
// Comment out these lines during benchmarking. Approx SSA build costs are noted.
mode |= ssa.SanityCheckFunctions // + 2% space, + 4% time
mode |= ssa.GlobalDebug // +30% space, +18% time
+ mode |= ssa.InstantiateGenerics // + 0% space, + 2% time (unlikely to reproduce outside of stdlib)
prog, _ := ssautil.Packages(pkgs, mode)
t2 := time.Now()
diff --git a/go/ssa/subst.go b/go/ssa/subst.go
index 0e9263fd2..7efab3578 100644
--- a/go/ssa/subst.go
+++ b/go/ssa/subst.go
@@ -1,10 +1,10 @@
// Copyright 2022 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
+
package ssa
import (
- "fmt"
"go/types"
"golang.org/x/tools/internal/typeparams"
@@ -20,37 +20,40 @@ import (
type subster struct {
replacements map[*typeparams.TypeParam]types.Type // values should contain no type params
cache map[types.Type]types.Type // cache of subst results
- ctxt *typeparams.Context
- debug bool // perform extra debugging checks
+ ctxt *typeparams.Context // cache for instantiation
+ scope *types.Scope // *types.Named declared within this scope can be substituted (optional)
+ debug bool // perform extra debugging checks
// TODO(taking): consider adding Pos
+ // TODO(zpavlinovic): replacements can contain type params
+ // when generating instances inside of a generic function body.
}
// Returns a subster that replaces tparams[i] with targs[i]. Uses ctxt as a cache.
// targs should not contain any types in tparams.
-func makeSubster(ctxt *typeparams.Context, tparams []*typeparams.TypeParam, targs []types.Type, debug bool) *subster {
- assert(len(tparams) == len(targs), "makeSubster argument count must match")
+// scope is the (optional) lexical block of the generic function for which we are substituting.
+func makeSubster(ctxt *typeparams.Context, scope *types.Scope, tparams *typeparams.TypeParamList, targs []types.Type, debug bool) *subster {
+ assert(tparams.Len() == len(targs), "makeSubster argument count must match")
subst := &subster{
- replacements: make(map[*typeparams.TypeParam]types.Type, len(tparams)),
+ replacements: make(map[*typeparams.TypeParam]types.Type, tparams.Len()),
cache: make(map[types.Type]types.Type),
ctxt: ctxt,
+ scope: scope,
debug: debug,
}
- for i, tpar := range tparams {
- subst.replacements[tpar] = targs[i]
+ for i := 0; i < tparams.Len(); i++ {
+ subst.replacements[tparams.At(i)] = targs[i]
}
if subst.debug {
- if err := subst.wellFormed(); err != nil {
- panic(err)
- }
+ subst.wellFormed()
}
return subst
}
-// wellFormed returns an error if subst was not properly initialized.
-func (subst *subster) wellFormed() error {
- if subst == nil || len(subst.replacements) == 0 {
- return nil
+// wellFormed asserts that subst was properly initialized.
+func (subst *subster) wellFormed() {
+ if subst == nil {
+ return
}
// Check that all of the type params do not appear in the arguments.
s := make(map[types.Type]bool, len(subst.replacements))
@@ -59,10 +62,9 @@ func (subst *subster) wellFormed() error {
}
for _, r := range subst.replacements {
if reaches(r, s) {
- return fmt.Errorf("\n‰r %s s %v replacements %v\n", r, s, subst.replacements)
+ panic(subst)
}
}
- return nil
}
// typ returns the type of t with the type parameter tparams[i] substituted
@@ -143,6 +145,15 @@ func (subst *subster) typ(t types.Type) (res types.Type) {
}
}
+// types returns the result of {subst.typ(ts[i])}.
+func (subst *subster) types(ts []types.Type) []types.Type {
+ res := make([]types.Type, len(ts))
+ for i := range ts {
+ res[i] = subst.typ(ts[i])
+ }
+ return res
+}
+
func (subst *subster) tuple(t *types.Tuple) *types.Tuple {
if t != nil {
if vars := subst.varlist(t); vars != nil {
@@ -294,37 +305,64 @@ func (subst *subster) interface_(iface *types.Interface) *types.Interface {
}
func (subst *subster) named(t *types.Named) types.Type {
- // A name type may be:
- // (1) ordinary (no type parameters, no type arguments),
- // (2) generic (type parameters but no type arguments), or
- // (3) instantiated (type parameters and type arguments).
+ // A named type may be:
+ // (1) ordinary named type (non-local scope, no type parameters, no type arguments),
+ // (2) locally scoped type,
+ // (3) generic (type parameters but no type arguments), or
+ // (4) instantiated (type parameters and type arguments).
tparams := typeparams.ForNamed(t)
if tparams.Len() == 0 {
- // case (1) ordinary
+ if subst.scope != nil && !subst.scope.Contains(t.Obj().Pos()) {
+ // Outside the current function scope?
+ return t // case (1) ordinary
+ }
- // Note: If Go allows for local type declarations in generic
- // functions we may need to descend into underlying as well.
- return t
+ // case (2) locally scoped type.
+ // Create a new named type to represent this instantiation.
+ // We assume that local types of distinct instantiations of a
+ // generic function are distinct, even if they don't refer to
+ // type parameters, but the spec is unclear; see golang/go#58573.
+ //
+ // Subtle: We short circuit substitution and use a newly created type in
+ // subst, i.e. cache[t]=n, to pre-emptively replace t with n in recursive
+ // types during traversal. This both breaks infinite cycles and allows for
+ // constructing types with the replacement applied in subst.typ(under).
+ //
+ // Example:
+ // func foo[T any]() {
+ // type linkedlist struct {
+ // next *linkedlist
+ // val T
+ // }
+ // }
+ //
+ // When the field `next *linkedlist` is visited during subst.typ(under),
+ // we want the substituted type for the field `next` to be `*n`.
+ n := types.NewNamed(t.Obj(), nil, nil)
+ subst.cache[t] = n
+ subst.cache[n] = n
+ n.SetUnderlying(subst.typ(t.Underlying()))
+ return n
}
targs := typeparams.NamedTypeArgs(t)
// insts are arguments to instantiate using.
insts := make([]types.Type, tparams.Len())
- // case (2) generic ==> targs.Len() == 0
+ // case (3) generic ==> targs.Len() == 0
// Instantiating a generic with no type arguments should be unreachable.
// Please report a bug if you encounter this.
assert(targs.Len() != 0, "substition into a generic Named type is currently unsupported")
- // case (3) instantiated.
+ // case (4) instantiated.
// Substitute into the type arguments and instantiate the replacements/
// Example:
// type N[A any] func() A
// func Foo[T](g N[T]) {}
// To instantiate Foo[string], one goes through {T->string}. To get the type of g
- // one subsitutes T with string in {N with TypeArgs == {T} and TypeParams == {A} }
- // to get {N with TypeArgs == {string} and TypeParams == {A} }.
- assert(targs.Len() == tparams.Len(), "TypeArgs().Len() must match TypeParams().Len() if present")
+ // one subsitutes T with string in {N with typeargs == {T} and typeparams == {A} }
+ // to get {N with TypeArgs == {string} and typeparams == {A} }.
+ assert(targs.Len() == tparams.Len(), "typeargs.Len() must match typeparams.Len() if present")
for i, n := 0, targs.Len(); i < n; i++ {
inst := subst.typ(targs.At(i)) // TODO(generic): Check with rfindley for mutual recursion
insts[i] = inst
@@ -360,25 +398,32 @@ func (subst *subster) signature(t *types.Signature) types.Type {
params := subst.tuple(t.Params())
results := subst.tuple(t.Results())
if recv != t.Recv() || params != t.Params() || results != t.Results() {
- return types.NewSignature(recv, params, results, t.Variadic())
+ return typeparams.NewSignatureType(recv, nil, nil, params, results, t.Variadic())
}
return t
}
// reaches returns true if a type t reaches any type t' s.t. c[t'] == true.
-// Updates c to cache results.
+// It updates c to cache results.
+//
+// reaches is currently only part of the wellFormed debug logic, and
+// in practice c is initially only type parameters. It is not currently
+// relied on in production.
func reaches(t types.Type, c map[types.Type]bool) (res bool) {
if c, ok := c[t]; ok {
return c
}
- c[t] = false // prevent cycles
+
+ // c is populated with temporary false entries as types are visited.
+ // This avoids repeat visits and break cycles.
+ c[t] = false
defer func() {
c[t] = res
}()
switch t := t.(type) {
case *typeparams.TypeParam, *types.Basic:
- // no-op => c == false
+ return false
case *types.Array:
return reaches(t.Elem(), c)
case *types.Slice:
diff --git a/go/ssa/subst_test.go b/go/ssa/subst_test.go
index fe84adcc3..14cda54e6 100644
--- a/go/ssa/subst_test.go
+++ b/go/ssa/subst_test.go
@@ -99,12 +99,8 @@ var _ L[int] = Fn0[L[int]](nil)
}
T := tv.Type.(*types.Named)
- var tparams []*typeparams.TypeParam
- for i, l := 0, typeparams.ForNamed(T); i < l.Len(); i++ {
- tparams = append(tparams, l.At(i))
- }
- subst := makeSubster(typeparams.NewContext(), tparams, targs, true)
+ subst := makeSubster(typeparams.NewContext(), nil, typeparams.ForNamed(T), targs, true)
sub := subst.typ(T.Underlying())
if got := sub.String(); got != test.want {
t.Errorf("subst{%v->%v}.typ(%s) = %v, want %v", test.expr, test.args, T.Underlying(), got, test.want)
diff --git a/go/ssa/testdata/src/README.txt b/go/ssa/testdata/src/README.txt
new file mode 100644
index 000000000..ee5909318
--- /dev/null
+++ b/go/ssa/testdata/src/README.txt
@@ -0,0 +1,5 @@
+These files are present to test building ssa on go files that use signatures from standard library packages.
+
+Only the exported members used by the tests are needed.
+
+Providing these decreases testing time ~10x (90s -> 8s) compared to building the standard library packages form source during tests. \ No newline at end of file
diff --git a/go/ssa/testdata/src/bytes/bytes.go b/go/ssa/testdata/src/bytes/bytes.go
new file mode 100644
index 000000000..deb7fdd7d
--- /dev/null
+++ b/go/ssa/testdata/src/bytes/bytes.go
@@ -0,0 +1,3 @@
+package bytes
+
+func Compare(a, b []byte) int
diff --git a/go/ssa/testdata/src/context/context.go b/go/ssa/testdata/src/context/context.go
new file mode 100644
index 000000000..d4f6c256c
--- /dev/null
+++ b/go/ssa/testdata/src/context/context.go
@@ -0,0 +1,7 @@
+package context
+
+type Context interface {
+ Done() <-chan struct{}
+}
+
+func Background() Context
diff --git a/go/ssa/testdata/src/encoding/encoding.go b/go/ssa/testdata/src/encoding/encoding.go
new file mode 100644
index 000000000..3fa2ba36c
--- /dev/null
+++ b/go/ssa/testdata/src/encoding/encoding.go
@@ -0,0 +1,9 @@
+package encoding
+
+type BinaryMarshaler interface {
+ MarshalBinary() (data []byte, err error)
+}
+
+type BinaryUnmarshaler interface {
+ UnmarshalBinary(data []byte) error
+}
diff --git a/go/ssa/testdata/src/encoding/json/json.go b/go/ssa/testdata/src/encoding/json/json.go
new file mode 100644
index 000000000..2080fc8cb
--- /dev/null
+++ b/go/ssa/testdata/src/encoding/json/json.go
@@ -0,0 +1,4 @@
+package json
+
+func Marshal(v any) ([]byte, error)
+func Unmarshal(data []byte, v any) error
diff --git a/go/ssa/testdata/src/encoding/xml/xml.go b/go/ssa/testdata/src/encoding/xml/xml.go
new file mode 100644
index 000000000..b226144b6
--- /dev/null
+++ b/go/ssa/testdata/src/encoding/xml/xml.go
@@ -0,0 +1,4 @@
+package xml
+
+func Marshal(v any) ([]byte, error)
+func Unmarshal(data []byte, v any) error
diff --git a/go/ssa/testdata/src/errors/errors.go b/go/ssa/testdata/src/errors/errors.go
new file mode 100644
index 000000000..5b292709f
--- /dev/null
+++ b/go/ssa/testdata/src/errors/errors.go
@@ -0,0 +1,3 @@
+package errors
+
+func New(text string) error
diff --git a/go/ssa/testdata/src/fmt/fmt.go b/go/ssa/testdata/src/fmt/fmt.go
new file mode 100644
index 000000000..cacfeef20
--- /dev/null
+++ b/go/ssa/testdata/src/fmt/fmt.go
@@ -0,0 +1,11 @@
+package fmt
+
+func Sprint(args ...interface{}) string
+func Sprintln(args ...interface{}) string
+func Sprintf(format string, args ...interface{}) string
+
+func Print(args ...interface{}) (int, error)
+func Println(args ...interface{})
+func Printf(format string, args ...interface{}) (int, error)
+
+func Errorf(format string, args ...interface{}) error
diff --git a/go/ssa/testdata/src/io/io.go b/go/ssa/testdata/src/io/io.go
new file mode 100644
index 000000000..8cde43061
--- /dev/null
+++ b/go/ssa/testdata/src/io/io.go
@@ -0,0 +1,5 @@
+package io
+
+import "errors"
+
+var EOF = errors.New("EOF")
diff --git a/go/ssa/testdata/src/log/log.go b/go/ssa/testdata/src/log/log.go
new file mode 100644
index 000000000..4ff0d8ea9
--- /dev/null
+++ b/go/ssa/testdata/src/log/log.go
@@ -0,0 +1,5 @@
+package log
+
+func Println(v ...interface{})
+func Fatalln(v ...interface{})
+func Fatalf(format string, v ...any)
diff --git a/go/ssa/testdata/src/math/math.go b/go/ssa/testdata/src/math/math.go
new file mode 100644
index 000000000..9768a56ef
--- /dev/null
+++ b/go/ssa/testdata/src/math/math.go
@@ -0,0 +1,15 @@
+package math
+
+func NaN() float64
+
+func Inf(int) float64
+
+func IsNaN(float64) bool
+
+func Float64bits(float64) uint64
+
+func Signbit(x float64) bool
+
+func Sqrt(x float64) float64
+
+func Sin(x float64) float64
diff --git a/go/ssa/testdata/src/os/os.go b/go/ssa/testdata/src/os/os.go
new file mode 100644
index 000000000..555ef5491
--- /dev/null
+++ b/go/ssa/testdata/src/os/os.go
@@ -0,0 +1,5 @@
+package os
+
+func Getenv(string) string
+
+func Exit(int)
diff --git a/go/ssa/testdata/src/reflect/reflect.go b/go/ssa/testdata/src/reflect/reflect.go
new file mode 100644
index 000000000..f5d7ba2a0
--- /dev/null
+++ b/go/ssa/testdata/src/reflect/reflect.go
@@ -0,0 +1,40 @@
+package reflect
+
+type Type interface {
+ Elem() Type
+ Kind() Kind
+ String() string
+}
+
+type Value struct{}
+
+func (Value) String() string
+func (Value) Elem() Value
+func (Value) Field(int) Value
+func (Value) Index(i int) Value
+func (Value) Int() int64
+func (Value) Interface() interface{}
+func (Value) IsNil() bool
+func (Value) IsValid() bool
+func (Value) Kind() Kind
+func (Value) Len() int
+func (Value) MapIndex(Value) Value
+func (Value) MapKeys() []Value
+func (Value) NumField() int
+func (Value) Pointer() uintptr
+func (Value) SetInt(int64)
+func (Value) Type() Type
+
+func SliceOf(Type) Type
+func TypeOf(interface{}) Type
+func ValueOf(interface{}) Value
+
+type Kind uint
+
+const (
+ Invalid Kind = iota
+ Int
+ Pointer
+)
+
+func DeepEqual(x, y interface{}) bool
diff --git a/go/ssa/testdata/src/runtime/runtime.go b/go/ssa/testdata/src/runtime/runtime.go
new file mode 100644
index 000000000..9feed5c99
--- /dev/null
+++ b/go/ssa/testdata/src/runtime/runtime.go
@@ -0,0 +1,5 @@
+package runtime
+
+func GC()
+
+func SetFinalizer(obj, finalizer any)
diff --git a/go/ssa/testdata/src/sort/sort.go b/go/ssa/testdata/src/sort/sort.go
new file mode 100644
index 000000000..d0b0e9942
--- /dev/null
+++ b/go/ssa/testdata/src/sort/sort.go
@@ -0,0 +1,13 @@
+package sort
+
+func Strings(x []string)
+func Ints(x []int)
+func Float64s(x []float64)
+
+func Sort(data Interface)
+
+type Interface interface {
+ Len() int
+ Less(i, j int) bool
+ Swap(i, j int)
+}
diff --git a/go/ssa/testdata/src/strconv/strconv.go b/go/ssa/testdata/src/strconv/strconv.go
new file mode 100644
index 000000000..3f6f8772b
--- /dev/null
+++ b/go/ssa/testdata/src/strconv/strconv.go
@@ -0,0 +1,6 @@
+package strconv
+
+func Itoa(i int) string
+func Atoi(s string) (int, error)
+
+func FormatFloat(float64, byte, int, int) string
diff --git a/go/ssa/testdata/src/strings/strings.go b/go/ssa/testdata/src/strings/strings.go
new file mode 100644
index 000000000..11695a43c
--- /dev/null
+++ b/go/ssa/testdata/src/strings/strings.go
@@ -0,0 +1,13 @@
+package strings
+
+func Replace(s, old, new string, n int) string
+func Index(haystack, needle string) int
+func Contains(haystack, needle string) bool
+func HasPrefix(s, prefix string) bool
+func EqualFold(s, t string) bool
+func ToLower(s string) string
+
+type Builder struct{}
+
+func (b *Builder) WriteString(s string) (int, error)
+func (b *Builder) String() string
diff --git a/go/ssa/testdata/src/sync/atomic/atomic.go b/go/ssa/testdata/src/sync/atomic/atomic.go
new file mode 100644
index 000000000..6080435b2
--- /dev/null
+++ b/go/ssa/testdata/src/sync/atomic/atomic.go
@@ -0,0 +1,5 @@
+package atomic
+
+import "unsafe"
+
+func LoadPointer(addr *unsafe.Pointer) (val unsafe.Pointer)
diff --git a/go/ssa/testdata/src/sync/sync.go b/go/ssa/testdata/src/sync/sync.go
new file mode 100644
index 000000000..8e6ff6893
--- /dev/null
+++ b/go/ssa/testdata/src/sync/sync.go
@@ -0,0 +1,12 @@
+package sync
+
+type Mutex struct{}
+
+func (m *Mutex) Lock()
+func (m *Mutex) Unlock()
+
+type WaitGroup struct{}
+
+func (wg *WaitGroup) Add(delta int)
+func (wg *WaitGroup) Done()
+func (wg *WaitGroup) Wait()
diff --git a/go/ssa/testdata/src/time/time.go b/go/ssa/testdata/src/time/time.go
new file mode 100644
index 000000000..d8d577d61
--- /dev/null
+++ b/go/ssa/testdata/src/time/time.go
@@ -0,0 +1,24 @@
+package time
+
+type Duration int64
+
+func Sleep(Duration)
+
+func NewTimer(d Duration) *Timer
+
+type Timer struct {
+ C <-chan Time
+}
+
+func (t *Timer) Stop() bool
+
+type Time struct{}
+
+func After(d Duration) <-chan Time
+
+const (
+ Nanosecond Duration = iota // Specific values do not matter here.
+ Second
+ Minute
+ Hour
+)
diff --git a/go/ssa/testdata/src/unsafe/unsafe.go b/go/ssa/testdata/src/unsafe/unsafe.go
new file mode 100644
index 000000000..5fd90b6f0
--- /dev/null
+++ b/go/ssa/testdata/src/unsafe/unsafe.go
@@ -0,0 +1,4 @@
+package unsafe
+
+// Empty unsafe package helps other packages load.
+// TODO(taking): determine why.
diff --git a/go/ssa/testdata/valueforexpr.go b/go/ssa/testdata/valueforexpr.go
index da76f13a3..243ec614f 100644
--- a/go/ssa/testdata/valueforexpr.go
+++ b/go/ssa/testdata/valueforexpr.go
@@ -1,3 +1,4 @@
+//go:build ignore
// +build ignore
package main
diff --git a/go/ssa/util.go b/go/ssa/util.go
index 010219364..db53aebee 100644
--- a/go/ssa/util.go
+++ b/go/ssa/util.go
@@ -17,6 +17,7 @@ import (
"golang.org/x/tools/go/ast/astutil"
"golang.org/x/tools/go/types/typeutil"
+ "golang.org/x/tools/internal/typeparams"
)
//// Sanity checking utilities
@@ -35,7 +36,6 @@ func unparen(e ast.Expr) ast.Expr { return astutil.Unparen(e) }
// isBlankIdent returns true iff e is an Ident with name "_".
// They have no associated types.Object, and thus no type.
-//
func isBlankIdent(e ast.Expr) bool {
id, ok := e.(*ast.Ident)
return ok && id.Name == "_"
@@ -49,7 +49,56 @@ func isPointer(typ types.Type) bool {
return ok
}
-func isInterface(T types.Type) bool { return types.IsInterface(T) }
+// isNonTypeParamInterface reports whether t is an interface type but not a type parameter.
+func isNonTypeParamInterface(t types.Type) bool {
+ return !typeparams.IsTypeParam(t) && types.IsInterface(t)
+}
+
+// isBasic reports whether t is a basic type.
+func isBasic(t types.Type) bool {
+ _, ok := t.(*types.Basic)
+ return ok
+}
+
+// isString reports whether t is exactly a string type.
+func isString(t types.Type) bool {
+ return isBasic(t) && t.(*types.Basic).Info()&types.IsString != 0
+}
+
+// isByteSlice reports whether t is of the form []~bytes.
+func isByteSlice(t types.Type) bool {
+ if b, ok := t.(*types.Slice); ok {
+ e, _ := b.Elem().Underlying().(*types.Basic)
+ return e != nil && e.Kind() == types.Byte
+ }
+ return false
+}
+
+// isRuneSlice reports whether t is of the form []~runes.
+func isRuneSlice(t types.Type) bool {
+ if b, ok := t.(*types.Slice); ok {
+ e, _ := b.Elem().Underlying().(*types.Basic)
+ return e != nil && e.Kind() == types.Rune
+ }
+ return false
+}
+
+// isBasicConvTypes returns true when a type set can be
+// one side of a Convert operation. This is when:
+// - All are basic, []byte, or []rune.
+// - At least 1 is basic.
+// - At most 1 is []byte or []rune.
+func isBasicConvTypes(tset termList) bool {
+ basics := 0
+ all := underIs(tset, func(t types.Type) bool {
+ if isBasic(t) {
+ basics++
+ return true
+ }
+ return isByteSlice(t) || isRuneSlice(t)
+ })
+ return all && basics >= 1 && tset.Len()-basics <= 1
+}
// deref returns a pointer's element type; otherwise it returns typ.
func deref(typ types.Type) types.Type {
@@ -64,11 +113,16 @@ func recvType(obj *types.Func) types.Type {
return obj.Type().(*types.Signature).Recv().Type()
}
+// isUntyped returns true for types that are untyped.
+func isUntyped(typ types.Type) bool {
+ b, ok := typ.(*types.Basic)
+ return ok && b.Info()&types.IsUntyped != 0
+}
+
// logStack prints the formatted "start" message to stderr and
// returns a closure that prints the corresponding "end" message.
// Call using 'defer logStack(...)()' to show builder stack on panic.
// Don't forget trailing parens!
-//
func logStack(format string, args ...interface{}) func() {
msg := fmt.Sprintf(format, args...)
io.WriteString(os.Stderr, msg)
@@ -100,25 +154,118 @@ func makeLen(T types.Type) *Builtin {
}
}
+// nonbasicTypes returns a list containing all of the types T in ts that are non-basic.
+func nonbasicTypes(ts []types.Type) []types.Type {
+ if len(ts) == 0 {
+ return nil
+ }
+ added := make(map[types.Type]bool) // additionally filter duplicates
+ var filtered []types.Type
+ for _, T := range ts {
+ if !isBasic(T) {
+ if !added[T] {
+ added[T] = true
+ filtered = append(filtered, T)
+ }
+ }
+ }
+ return filtered
+}
+
+// receiverTypeArgs returns the type arguments to a function's reciever.
+// Returns an empty list if obj does not have a reciever or its reciever does not have type arguments.
+func receiverTypeArgs(obj *types.Func) []types.Type {
+ rtype := recvType(obj)
+ if rtype == nil {
+ return nil
+ }
+ if isPointer(rtype) {
+ rtype = rtype.(*types.Pointer).Elem()
+ }
+ named, ok := rtype.(*types.Named)
+ if !ok {
+ return nil
+ }
+ ts := typeparams.NamedTypeArgs(named)
+ if ts.Len() == 0 {
+ return nil
+ }
+ targs := make([]types.Type, ts.Len())
+ for i := 0; i < ts.Len(); i++ {
+ targs[i] = ts.At(i)
+ }
+ return targs
+}
+
+// recvAsFirstArg takes a method signature and returns a function
+// signature with receiver as the first parameter.
+func recvAsFirstArg(sig *types.Signature) *types.Signature {
+ params := make([]*types.Var, 0, 1+sig.Params().Len())
+ params = append(params, sig.Recv())
+ for i := 0; i < sig.Params().Len(); i++ {
+ params = append(params, sig.Params().At(i))
+ }
+ return typeparams.NewSignatureType(nil, nil, nil, types.NewTuple(params...), sig.Results(), sig.Variadic())
+}
+
+// instance returns whether an expression is a simple or qualified identifier
+// that is a generic instantiation.
+func instance(info *types.Info, expr ast.Expr) bool {
+ // Compare the logic here against go/types.instantiatedIdent,
+ // which also handles *IndexExpr and *IndexListExpr.
+ var id *ast.Ident
+ switch x := expr.(type) {
+ case *ast.Ident:
+ id = x
+ case *ast.SelectorExpr:
+ id = x.Sel
+ default:
+ return false
+ }
+ _, ok := typeparams.GetInstances(info)[id]
+ return ok
+}
+
+// instanceArgs returns the Instance[id].TypeArgs as a slice.
+func instanceArgs(info *types.Info, id *ast.Ident) []types.Type {
+ targList := typeparams.GetInstances(info)[id].TypeArgs
+ if targList == nil {
+ return nil
+ }
+
+ targs := make([]types.Type, targList.Len())
+ for i, n := 0, targList.Len(); i < n; i++ {
+ targs[i] = targList.At(i)
+ }
+ return targs
+}
+
// Mapping of a type T to a canonical instance C s.t. types.Indentical(T, C).
// Thread-safe.
type canonizer struct {
mu sync.Mutex
- canon typeutil.Map // map from type to a canonical instance
+ types typeutil.Map // map from type to a canonical instance
+ lists typeListMap // map from a list of types to a canonical instance
+}
+
+func newCanonizer() *canonizer {
+ c := &canonizer{}
+ h := typeutil.MakeHasher()
+ c.types.SetHasher(h)
+ c.lists.hasher = h
+ return c
}
-// Tuple returns a canonical representative of a Tuple of types.
-// Representative of the empty Tuple is nil.
-func (c *canonizer) Tuple(ts []types.Type) *types.Tuple {
+// List returns a canonical representative of a list of types.
+// Representative of the empty list is nil.
+func (c *canonizer) List(ts []types.Type) *typeList {
if len(ts) == 0 {
return nil
}
- vars := make([]*types.Var, len(ts))
- for i, t := range ts {
- vars[i] = anonVar(t)
- }
- tuple := types.NewTuple(vars...)
- return c.Type(tuple).(*types.Tuple)
+
+ c.mu.Lock()
+ defer c.mu.Unlock()
+ return c.lists.rep(ts)
}
// Type returns a canonical representative of type T.
@@ -126,9 +273,90 @@ func (c *canonizer) Type(T types.Type) types.Type {
c.mu.Lock()
defer c.mu.Unlock()
- if r := c.canon.At(T); r != nil {
+ if r := c.types.At(T); r != nil {
return r.(types.Type)
}
- c.canon.Set(T, T)
+ c.types.Set(T, T)
return T
}
+
+// A type for representating an canonized list of types.
+type typeList []types.Type
+
+func (l *typeList) identical(ts []types.Type) bool {
+ if l == nil {
+ return len(ts) == 0
+ }
+ n := len(*l)
+ if len(ts) != n {
+ return false
+ }
+ for i, left := range *l {
+ right := ts[i]
+ if !types.Identical(left, right) {
+ return false
+ }
+ }
+ return true
+}
+
+type typeListMap struct {
+ hasher typeutil.Hasher
+ buckets map[uint32][]*typeList
+}
+
+// rep returns a canonical representative of a slice of types.
+func (m *typeListMap) rep(ts []types.Type) *typeList {
+ if m == nil || len(ts) == 0 {
+ return nil
+ }
+
+ if m.buckets == nil {
+ m.buckets = make(map[uint32][]*typeList)
+ }
+
+ h := m.hash(ts)
+ bucket := m.buckets[h]
+ for _, l := range bucket {
+ if l.identical(ts) {
+ return l
+ }
+ }
+
+ // not present. create a representative.
+ cp := make(typeList, len(ts))
+ copy(cp, ts)
+ rep := &cp
+
+ m.buckets[h] = append(bucket, rep)
+ return rep
+}
+
+func (m *typeListMap) hash(ts []types.Type) uint32 {
+ if m == nil {
+ return 0
+ }
+ // Some smallish prime far away from typeutil.Hash.
+ n := len(ts)
+ h := uint32(13619) + 2*uint32(n)
+ for i := 0; i < n; i++ {
+ h += 3 * m.hasher.Hash(ts[i])
+ }
+ return h
+}
+
+// instantiateMethod instantiates m with targs and returns a canonical representative for this method.
+func (canon *canonizer) instantiateMethod(m *types.Func, targs []types.Type, ctxt *typeparams.Context) *types.Func {
+ recv := recvType(m)
+ if p, ok := recv.(*types.Pointer); ok {
+ recv = p.Elem()
+ }
+ named := recv.(*types.Named)
+ inst, err := typeparams.Instantiate(ctxt, typeparams.NamedTypeOrigin(named), targs, false)
+ if err != nil {
+ panic(err)
+ }
+ rep := canon.Type(inst)
+ obj, _, _ := types.LookupFieldOrMethod(rep, true, m.Pkg(), m.Name())
+ return obj.(*types.Func)
+}
diff --git a/go/ssa/wrappers.go b/go/ssa/wrappers.go
index 90ddc9df7..228daf615 100644
--- a/go/ssa/wrappers.go
+++ b/go/ssa/wrappers.go
@@ -22,6 +22,7 @@ package ssa
import (
"fmt"
+ "go/token"
"go/types"
)
@@ -41,16 +42,15 @@ import (
// - the result may be a thunk or a wrapper.
//
// EXCLUSIVE_LOCKS_REQUIRED(prog.methodsMu)
-//
-func makeWrapper(prog *Program, sel *types.Selection) *Function {
- obj := sel.Obj().(*types.Func) // the declared function
- sig := sel.Type().(*types.Signature) // type of this wrapper
+func makeWrapper(prog *Program, sel *selection, cr *creator) *Function {
+ obj := sel.obj.(*types.Func) // the declared function
+ sig := sel.typ.(*types.Signature) // type of this wrapper
var recv *types.Var // wrapper's receiver or thunk's params[0]
name := obj.Name()
var description string
var start int // first regular param
- if sel.Kind() == types.MethodExpr {
+ if sel.kind == types.MethodExpr {
name += "$thunk"
description = "thunk"
recv = sig.Params().At(0)
@@ -60,7 +60,7 @@ func makeWrapper(prog *Program, sel *types.Selection) *Function {
recv = sig.Recv()
}
- description = fmt.Sprintf("%s for %s", description, sel.Obj())
+ description = fmt.Sprintf("%s for %s", description, sel.obj)
if prog.mode&LogSource != 0 {
defer logStack("make %s to (%s)", description, recv.Type())()
}
@@ -74,14 +74,15 @@ func makeWrapper(prog *Program, sel *types.Selection) *Function {
pos: obj.Pos(),
info: nil, // info is not set on wrappers.
}
+ cr.Add(fn)
fn.startBody()
fn.addSpilledParam(recv)
createParams(fn, start)
- indices := sel.Index()
+ indices := sel.index
var v Value = fn.Locals[0] // spilled receiver
- if isPointer(sel.Recv()) {
+ if isPointer(sel.recv) {
v = emitLoad(fn, v)
// For simple indirection wrappers, perform an informative nil-check:
@@ -91,13 +92,13 @@ func makeWrapper(prog *Program, sel *types.Selection) *Function {
c.Call.Value = &Builtin{
name: "ssa:wrapnilchk",
sig: types.NewSignature(nil,
- types.NewTuple(anonVar(sel.Recv()), anonVar(tString), anonVar(tString)),
- types.NewTuple(anonVar(sel.Recv())), false),
+ types.NewTuple(anonVar(sel.recv), anonVar(tString), anonVar(tString)),
+ types.NewTuple(anonVar(sel.recv)), false),
}
c.Call.Args = []Value{
v,
- stringConst(deref(sel.Recv()).String()),
- stringConst(sel.Obj().Name()),
+ stringConst(deref(sel.recv).String()),
+ stringConst(sel.obj.Name()),
}
c.setType(v.Type())
v = fn.emit(&c)
@@ -112,35 +113,39 @@ func makeWrapper(prog *Program, sel *types.Selection) *Function {
// Load) in preference to value extraction (Field possibly
// preceded by Load).
- v = emitImplicitSelections(fn, v, indices[:len(indices)-1])
+ v = emitImplicitSelections(fn, v, indices[:len(indices)-1], token.NoPos)
// Invariant: v is a pointer, either
// value of implicit *C field, or
// address of implicit C field.
var c Call
- if r := recvType(obj); !isInterface(r) { // concrete method
+ if r := recvType(obj); !types.IsInterface(r) { // concrete method
if !isPointer(r) {
v = emitLoad(fn, v)
}
- c.Call.Value = prog.declaredFunc(obj)
+ callee := prog.originFunc(obj)
+ if callee.typeparams.Len() > 0 {
+ callee = prog.lookupOrCreateInstance(callee, receiverTypeArgs(obj), cr)
+ }
+ c.Call.Value = callee
c.Call.Args = append(c.Call.Args, v)
} else {
c.Call.Method = obj
- c.Call.Value = emitLoad(fn, v)
+ c.Call.Value = emitLoad(fn, v) // interface (possibly a typeparam)
}
for _, arg := range fn.Params[1:] {
c.Call.Args = append(c.Call.Args, arg)
}
emitTailCall(fn, &c)
fn.finishBody()
+ fn.done()
return fn
}
// createParams creates parameters for wrapper method fn based on its
// Signature.Params, which do not include the receiver.
// start is the index of the first regular parameter to use.
-//
func createParams(fn *Function, start int) {
tparams := fn.Signature.Params()
for i, n := start, tparams.Len(); i < n; i++ {
@@ -159,26 +164,28 @@ func createParams(fn *Function, start int) {
// Use MakeClosure with such a wrapper to construct a bound method
// closure. e.g.:
//
-// type T int or: type T interface { meth() }
-// func (t T) meth()
-// var t T
-// f := t.meth
-// f() // calls t.meth()
+// type T int or: type T interface { meth() }
+// func (t T) meth()
+// var t T
+// f := t.meth
+// f() // calls t.meth()
//
// f is a closure of a synthetic wrapper defined as if by:
//
-// f := func() { return t.meth() }
+// f := func() { return t.meth() }
//
// Unlike makeWrapper, makeBound need perform no indirection or field
// selections because that can be done before the closure is
// constructed.
//
// EXCLUSIVE_LOCKS_ACQUIRED(meth.Prog.methodsMu)
-//
-func makeBound(prog *Program, obj *types.Func) *Function {
+func makeBound(prog *Program, obj *types.Func, cr *creator) *Function {
+ targs := receiverTypeArgs(obj)
+ key := boundsKey{obj, prog.canon.List(targs)}
+
prog.methodsMu.Lock()
defer prog.methodsMu.Unlock()
- fn, ok := prog.bounds[obj]
+ fn, ok := prog.bounds[key]
if !ok {
description := fmt.Sprintf("bound method wrapper for %s", obj)
if prog.mode&LogSource != 0 {
@@ -193,6 +200,7 @@ func makeBound(prog *Program, obj *types.Func) *Function {
pos: obj.Pos(),
info: nil, // info is not set on wrappers.
}
+ cr.Add(fn)
fv := &FreeVar{name: "recv", typ: recvType(obj), parent: fn}
fn.FreeVars = []*FreeVar{fv}
@@ -200,20 +208,25 @@ func makeBound(prog *Program, obj *types.Func) *Function {
createParams(fn, 0)
var c Call
- if !isInterface(recvType(obj)) { // concrete
- c.Call.Value = prog.declaredFunc(obj)
+ if !types.IsInterface(recvType(obj)) { // concrete
+ callee := prog.originFunc(obj)
+ if callee.typeparams.Len() > 0 {
+ callee = prog.lookupOrCreateInstance(callee, targs, cr)
+ }
+ c.Call.Value = callee
c.Call.Args = []Value{fv}
} else {
- c.Call.Value = fv
c.Call.Method = obj
+ c.Call.Value = fv // interface (possibly a typeparam)
}
for _, arg := range fn.Params {
c.Call.Args = append(c.Call.Args, arg)
}
emitTailCall(fn, &c)
fn.finishBody()
+ fn.done()
- prog.bounds[obj] = fn
+ prog.bounds[key] = fn
}
return fn
}
@@ -221,41 +234,40 @@ func makeBound(prog *Program, obj *types.Func) *Function {
// -- thunks -----------------------------------------------------------
// makeThunk returns a thunk, a synthetic function that delegates to a
-// concrete or interface method denoted by sel.Obj(). The resulting
+// concrete or interface method denoted by sel.obj. The resulting
// function has no receiver, but has an additional (first) regular
// parameter.
//
-// Precondition: sel.Kind() == types.MethodExpr.
+// Precondition: sel.kind == types.MethodExpr.
//
-// type T int or: type T interface { meth() }
-// func (t T) meth()
-// f := T.meth
-// var t T
-// f(t) // calls t.meth()
+// type T int or: type T interface { meth() }
+// func (t T) meth()
+// f := T.meth
+// var t T
+// f(t) // calls t.meth()
//
// f is a synthetic wrapper defined as if by:
//
-// f := func(t T) { return t.meth() }
+// f := func(t T) { return t.meth() }
//
// TODO(adonovan): opt: currently the stub is created even when used
// directly in a function call: C.f(i, 0). This is less efficient
// than inlining the stub.
//
// EXCLUSIVE_LOCKS_ACQUIRED(meth.Prog.methodsMu)
-//
-func makeThunk(prog *Program, sel *types.Selection) *Function {
- if sel.Kind() != types.MethodExpr {
+func makeThunk(prog *Program, sel *selection, cr *creator) *Function {
+ if sel.kind != types.MethodExpr {
panic(sel)
}
- // Canonicalize sel.Recv() to avoid constructing duplicate thunks.
- canonRecv := prog.canon.Type(sel.Recv())
+ // Canonicalize sel.recv to avoid constructing duplicate thunks.
+ canonRecv := prog.canon.Type(sel.recv)
key := selectionKey{
- kind: sel.Kind(),
+ kind: sel.kind,
recv: canonRecv,
- obj: sel.Obj(),
- index: fmt.Sprint(sel.Index()),
- indirect: sel.Indirect(),
+ obj: sel.obj,
+ index: fmt.Sprint(sel.index),
+ indirect: sel.indirect,
}
prog.methodsMu.Lock()
@@ -263,7 +275,7 @@ func makeThunk(prog *Program, sel *types.Selection) *Function {
fn, ok := prog.thunks[key]
if !ok {
- fn = makeWrapper(prog, sel)
+ fn = makeWrapper(prog, sel, cr)
if fn.Signature.Recv() != nil {
panic(fn) // unexpected receiver
}
@@ -284,3 +296,91 @@ type selectionKey struct {
index string
indirect bool
}
+
+// boundsKey is a unique for the object and a type instantiation.
+type boundsKey struct {
+ obj types.Object // t.meth
+ inst *typeList // canonical type instantiation list.
+}
+
+// A local version of *types.Selection.
+// Needed for some additional control, such as creating a MethodExpr for an instantiation.
+type selection struct {
+ kind types.SelectionKind
+ recv types.Type
+ typ types.Type
+ obj types.Object
+ index []int
+ indirect bool
+}
+
+func toSelection(sel *types.Selection) *selection {
+ return &selection{
+ kind: sel.Kind(),
+ recv: sel.Recv(),
+ typ: sel.Type(),
+ obj: sel.Obj(),
+ index: sel.Index(),
+ indirect: sel.Indirect(),
+ }
+}
+
+// -- instantiations --------------------------------------------------
+
+// buildInstantiationWrapper creates a body for an instantiation
+// wrapper fn. The body calls the original generic function,
+// bracketed by ChangeType conversions on its arguments and results.
+func buildInstantiationWrapper(fn *Function) {
+ orig := fn.topLevelOrigin
+ sig := fn.Signature
+
+ fn.startBody()
+ if sig.Recv() != nil {
+ fn.addParamObj(sig.Recv())
+ }
+ createParams(fn, 0)
+
+ // Create body. Add a call to origin generic function
+ // and make type changes between argument and parameters,
+ // as well as return values.
+ var c Call
+ c.Call.Value = orig
+ if res := orig.Signature.Results(); res.Len() == 1 {
+ c.typ = res.At(0).Type()
+ } else {
+ c.typ = res
+ }
+
+ // parameter of instance becomes an argument to the call
+ // to the original generic function.
+ argOffset := 0
+ for i, arg := range fn.Params {
+ var typ types.Type
+ if i == 0 && sig.Recv() != nil {
+ typ = orig.Signature.Recv().Type()
+ argOffset = 1
+ } else {
+ typ = orig.Signature.Params().At(i - argOffset).Type()
+ }
+ c.Call.Args = append(c.Call.Args, emitTypeCoercion(fn, arg, typ))
+ }
+
+ results := fn.emit(&c)
+ var ret Return
+ switch res := sig.Results(); res.Len() {
+ case 0:
+ // no results, do nothing.
+ case 1:
+ ret.Results = []Value{emitTypeCoercion(fn, results, res.At(0).Type())}
+ default:
+ for i := 0; i < sig.Results().Len(); i++ {
+ v := emitExtract(fn, results, i)
+ ret.Results = append(ret.Results, emitTypeCoercion(fn, v, res.At(i).Type()))
+ }
+ }
+
+ fn.emit(&ret)
+ fn.currentBlock = nil
+
+ fn.finishBody()
+}
diff --git a/go/types/objectpath/objectpath.go b/go/types/objectpath/objectpath.go
index 557202b4d..be8f5a867 100644
--- a/go/types/objectpath/objectpath.go
+++ b/go/types/objectpath/objectpath.go
@@ -14,8 +14,10 @@
// distinct but logically equivalent.
//
// A single object may have multiple paths. In this example,
-// type A struct{ X int }
-// type B A
+//
+// type A struct{ X int }
+// type B A
+//
// the field X has two paths due to its membership of both A and B.
// The For(obj) function always returns one of these paths, arbitrarily
// but consistently.
@@ -29,6 +31,8 @@ import (
"strings"
"golang.org/x/tools/internal/typeparams"
+
+ _ "unsafe" // for go:linkname
)
// A Path is an opaque name that identifies a types.Object
@@ -45,30 +49,30 @@ type Path string
// The sequences represent a path through the package/object/type graph.
// We classify these operators by their type:
//
-// PO package->object Package.Scope.Lookup
-// OT object->type Object.Type
-// TT type->type Type.{Elem,Key,Params,Results,Underlying} [EKPRU]
-// TO type->object Type.{At,Field,Method,Obj} [AFMO]
+// PO package->object Package.Scope.Lookup
+// OT object->type Object.Type
+// TT type->type Type.{Elem,Key,Params,Results,Underlying} [EKPRU]
+// TO type->object Type.{At,Field,Method,Obj} [AFMO]
//
// All valid paths start with a package and end at an object
// and thus may be defined by the regular language:
//
-// objectpath = PO (OT TT* TO)*
+// objectpath = PO (OT TT* TO)*
//
// The concrete encoding follows directly:
-// - The only PO operator is Package.Scope.Lookup, which requires an identifier.
-// - The only OT operator is Object.Type,
-// which we encode as '.' because dot cannot appear in an identifier.
-// - The TT operators are encoded as [EKPRUTC];
-// one of these (TypeParam) requires an integer operand,
-// which is encoded as a string of decimal digits.
-// - The TO operators are encoded as [AFMO];
-// three of these (At,Field,Method) require an integer operand,
-// which is encoded as a string of decimal digits.
-// These indices are stable across different representations
-// of the same package, even source and export data.
-// The indices used are implementation specific and may not correspond to
-// the argument to the go/types function.
+// - The only PO operator is Package.Scope.Lookup, which requires an identifier.
+// - The only OT operator is Object.Type,
+// which we encode as '.' because dot cannot appear in an identifier.
+// - The TT operators are encoded as [EKPRUTC];
+// one of these (TypeParam) requires an integer operand,
+// which is encoded as a string of decimal digits.
+// - The TO operators are encoded as [AFMO];
+// three of these (At,Field,Method) require an integer operand,
+// which is encoded as a string of decimal digits.
+// These indices are stable across different representations
+// of the same package, even source and export data.
+// The indices used are implementation specific and may not correspond to
+// the argument to the go/types function.
//
// In the example below,
//
@@ -81,15 +85,14 @@ type Path string
// field X has the path "T.UM0.RA1.F0",
// representing the following sequence of operations:
//
-// p.Lookup("T") T
-// .Type().Underlying().Method(0). f
-// .Type().Results().At(1) b
-// .Type().Field(0) X
+// p.Lookup("T") T
+// .Type().Underlying().Method(0). f
+// .Type().Results().At(1) b
+// .Type().Field(0) X
//
// The encoding is not maximally compact---every R or P is
// followed by an A, for example---but this simplifies the
// encoder and decoder.
-//
const (
// object->type operators
opType = '.' // .Type() (Object)
@@ -110,7 +113,7 @@ const (
opObj = 'O' // .Obj() (Named, TypeParam)
)
-// The For function returns the path to an object relative to its package,
+// For returns the path to an object relative to its package,
// or an error if the object is not accessible from the package's Scope.
//
// The For function guarantees to return a path only for the following objects:
@@ -136,13 +139,30 @@ const (
//
// For(X) would return a path that denotes the following sequence of operations:
//
-// p.Scope().Lookup("T") (TypeName T)
-// .Type().Underlying().Method(0). (method Func f)
-// .Type().Results().At(1) (field Var b)
-// .Type().Field(0) (field Var X)
+// p.Scope().Lookup("T") (TypeName T)
+// .Type().Underlying().Method(0). (method Func f)
+// .Type().Results().At(1) (field Var b)
+// .Type().Field(0) (field Var X)
//
// where p is the package (*types.Package) to which X belongs.
func For(obj types.Object) (Path, error) {
+ return newEncoderFor()(obj)
+}
+
+// An encoder amortizes the cost of encoding the paths of multiple objects.
+// Nonexported pending approval of proposal 58668.
+type encoder struct {
+ scopeNamesMemo map[*types.Scope][]string // memoization of Scope.Names()
+ namedMethodsMemo map[*types.Named][]*types.Func // memoization of namedMethods()
+}
+
+// Exposed to gopls via golang.org/x/tools/internal/typesinternal
+// pending approval of proposal 58668.
+//
+//go:linkname newEncoderFor
+func newEncoderFor() func(types.Object) (Path, error) { return new(encoder).For }
+
+func (enc *encoder) For(obj types.Object) (Path, error) {
pkg := obj.Pkg()
// This table lists the cases of interest.
@@ -223,10 +243,11 @@ func For(obj types.Object) (Path, error) {
if recv := obj.Type().(*types.Signature).Recv(); recv == nil {
return "", fmt.Errorf("func is not a method: %v", obj)
}
- // TODO(adonovan): opt: if the method is concrete,
- // do a specialized version of the rest of this function so
- // that it's O(1) not O(|scope|). Basically 'find' is needed
- // only for struct fields and interface methods.
+
+ if path, ok := enc.concreteMethod(obj); ok {
+ // Fast path for concrete methods that avoids looping over scope.
+ return path, nil
+ }
default:
panic(obj)
@@ -239,7 +260,7 @@ func For(obj types.Object) (Path, error) {
// the best paths because non-types may
// refer to types, but not the reverse.
empty := make([]byte, 0, 48) // initial space
- names := scope.Names()
+ names := enc.scopeNames(scope)
for _, name := range names {
o := scope.Lookup(name)
tname, ok := o.(*types.TypeName)
@@ -292,9 +313,7 @@ func For(obj types.Object) (Path, error) {
// Note that method index here is always with respect
// to canonical ordering of methods, regardless of how
// they appear in the underlying type.
- canonical := canonicalize(T)
- for i := 0; i < len(canonical); i++ {
- m := canonical[i]
+ for i, m := range enc.namedMethods(T) {
path2 := appendOpArg(path, opMethod, i)
if m == obj {
return Path(path2), nil // found declared method
@@ -315,6 +334,96 @@ func appendOpArg(path []byte, op byte, arg int) []byte {
return path
}
+// concreteMethod returns the path for meth, which must have a non-nil receiver.
+// The second return value indicates success and may be false if the method is
+// an interface method or if it is an instantiated method.
+//
+// This function is just an optimization that avoids the general scope walking
+// approach. You are expected to fall back to the general approach if this
+// function fails.
+func (enc *encoder) concreteMethod(meth *types.Func) (Path, bool) {
+ // Concrete methods can only be declared on package-scoped named types. For
+ // that reason we can skip the expensive walk over the package scope: the
+ // path will always be package -> named type -> method. We can trivially get
+ // the type name from the receiver, and only have to look over the type's
+ // methods to find the method index.
+ //
+ // Methods on generic types require special consideration, however. Consider
+ // the following package:
+ //
+ // L1: type S[T any] struct{}
+ // L2: func (recv S[A]) Foo() { recv.Bar() }
+ // L3: func (recv S[B]) Bar() { }
+ // L4: type Alias = S[int]
+ // L5: func _[T any]() { var s S[int]; s.Foo() }
+ //
+ // The receivers of methods on generic types are instantiations. L2 and L3
+ // instantiate S with the type-parameters A and B, which are scoped to the
+ // respective methods. L4 and L5 each instantiate S with int. Each of these
+ // instantiations has its own method set, full of methods (and thus objects)
+ // with receivers whose types are the respective instantiations. In other
+ // words, we have
+ //
+ // S[A].Foo, S[A].Bar
+ // S[B].Foo, S[B].Bar
+ // S[int].Foo, S[int].Bar
+ //
+ // We may thus be trying to produce object paths for any of these objects.
+ //
+ // S[A].Foo and S[B].Bar are the origin methods, and their paths are S.Foo
+ // and S.Bar, which are the paths that this function naturally produces.
+ //
+ // S[A].Bar, S[B].Foo, and both methods on S[int] are instantiations that
+ // don't correspond to the origin methods. For S[int], this is significant.
+ // The most precise object path for S[int].Foo, for example, is Alias.Foo,
+ // not S.Foo. Our function, however, would produce S.Foo, which would
+ // resolve to a different object.
+ //
+ // For S[A].Bar and S[B].Foo it could be argued that S.Bar and S.Foo are
+ // still the correct paths, since only the origin methods have meaningful
+ // paths. But this is likely only true for trivial cases and has edge cases.
+ // Since this function is only an optimization, we err on the side of giving
+ // up, deferring to the slower but definitely correct algorithm. Most users
+ // of objectpath will only be giving us origin methods, anyway, as referring
+ // to instantiated methods is usually not useful.
+
+ if typeparams.OriginMethod(meth) != meth {
+ return "", false
+ }
+
+ recvT := meth.Type().(*types.Signature).Recv().Type()
+ if ptr, ok := recvT.(*types.Pointer); ok {
+ recvT = ptr.Elem()
+ }
+
+ named, ok := recvT.(*types.Named)
+ if !ok {
+ return "", false
+ }
+
+ if types.IsInterface(named) {
+ // Named interfaces don't have to be package-scoped
+ //
+ // TODO(dominikh): opt: if scope.Lookup(name) == named, then we can apply this optimization to interface
+ // methods, too, I think.
+ return "", false
+ }
+
+ // Preallocate space for the name, opType, opMethod, and some digits.
+ name := named.Obj().Name()
+ path := make([]byte, 0, len(name)+8)
+ path = append(path, name...)
+ path = append(path, opType)
+ for i, m := range enc.namedMethods(named) {
+ if m == meth {
+ path = appendOpArg(path, opMethod, i)
+ return Path(path), true
+ }
+ }
+
+ panic(fmt.Sprintf("couldn't find method %s on type %s", meth, named))
+}
+
// find finds obj within type T, returning the path to it, or nil if not found.
//
// The seen map is used to short circuit cycles through type parameters. If
@@ -570,15 +679,23 @@ func Object(pkg *types.Package, p Path) (types.Object, error) {
t = nil
case opMethod:
- hasMethods, ok := t.(hasMethods) // Interface or Named
- if !ok {
+ switch t := t.(type) {
+ case *types.Interface:
+ if index >= t.NumMethods() {
+ return nil, fmt.Errorf("method index %d out of range [0-%d)", index, t.NumMethods())
+ }
+ obj = t.Method(index) // Id-ordered
+
+ case *types.Named:
+ methods := namedMethods(t) // (unmemoized)
+ if index >= len(methods) {
+ return nil, fmt.Errorf("method index %d out of range [0-%d)", index, len(methods))
+ }
+ obj = methods[index] // Id-ordered
+
+ default:
return nil, fmt.Errorf("cannot apply %q to %s (got %T, want interface or named)", code, t, t)
}
- canonical := canonicalize(hasMethods)
- if n := len(canonical); index >= n {
- return nil, fmt.Errorf("method index %d out of range [0-%d)", index, n)
- }
- obj = canonical[index]
t = nil
case opObj:
@@ -601,27 +718,45 @@ func Object(pkg *types.Package, p Path) (types.Object, error) {
return obj, nil // success
}
-// hasMethods is an abstraction of *types.{Interface,Named}. This is pulled up
-// because it is used by methodOrdering, which is in turn used by both encoding
-// and decoding.
-type hasMethods interface {
- Method(int) *types.Func
- NumMethods() int
+// namedMethods returns the methods of a Named type in ascending Id order.
+func namedMethods(named *types.Named) []*types.Func {
+ methods := make([]*types.Func, named.NumMethods())
+ for i := range methods {
+ methods[i] = named.Method(i)
+ }
+ sort.Slice(methods, func(i, j int) bool {
+ return methods[i].Id() < methods[j].Id()
+ })
+ return methods
}
-// canonicalize returns a canonical order for the methods in a hasMethod.
-func canonicalize(hm hasMethods) []*types.Func {
- count := hm.NumMethods()
- if count <= 0 {
- return nil
+// scopeNames is a memoization of scope.Names. Callers must not modify the result.
+func (enc *encoder) scopeNames(scope *types.Scope) []string {
+ m := enc.scopeNamesMemo
+ if m == nil {
+ m = make(map[*types.Scope][]string)
+ enc.scopeNamesMemo = m
}
- canon := make([]*types.Func, count)
- for i := 0; i < count; i++ {
- canon[i] = hm.Method(i)
+ names, ok := m[scope]
+ if !ok {
+ names = scope.Names() // allocates and sorts
+ m[scope] = names
}
- less := func(i, j int) bool {
- return canon[i].Id() < canon[j].Id()
+ return names
+}
+
+// namedMethods is a memoization of the namedMethods function. Callers must not modify the result.
+func (enc *encoder) namedMethods(named *types.Named) []*types.Func {
+ m := enc.namedMethodsMemo
+ if m == nil {
+ m = make(map[*types.Named][]*types.Func)
+ enc.namedMethodsMemo = m
}
- sort.Slice(canon, less)
- return canon
+ methods, ok := m[named]
+ if !ok {
+ methods = namedMethods(named) // allocates and sorts
+ m[named] = methods
+ }
+ return methods
+
}
diff --git a/go/types/objectpath/objectpath_test.go b/go/types/objectpath/objectpath_test.go
index 39e7b1bcd..adfad2cd2 100644
--- a/go/types/objectpath/objectpath_test.go
+++ b/go/types/objectpath/objectpath_test.go
@@ -182,7 +182,7 @@ func testPath(prog *loader.Program, test pathTest) error {
return fmt.Errorf("Object(%s, %q) returned error %q, want %q", pkg.Path(), test.path, err, test.wantErr)
}
if test.wantErr != "" {
- if got := stripSubscripts(err.Error()); got != test.wantErr {
+ if got := err.Error(); got != test.wantErr {
return fmt.Errorf("Object(%s, %q) error was %q, want %q",
pkg.Path(), test.path, got, test.wantErr)
}
@@ -190,7 +190,7 @@ func testPath(prog *loader.Program, test pathTest) error {
}
// Inv: err == nil
- if objString := stripSubscripts(obj.String()); objString != test.wantobj {
+ if objString := obj.String(); objString != test.wantobj {
return fmt.Errorf("Object(%s, %q) = %s, want %s", pkg.Path(), test.path, objString, test.wantobj)
}
if obj.Pkg() != pkg {
@@ -215,25 +215,6 @@ func testPath(prog *loader.Program, test pathTest) error {
return nil
}
-// stripSubscripts removes type parameter id subscripts.
-//
-// TODO(rfindley): remove this function once subscripts are removed from the
-// type parameter type string.
-func stripSubscripts(s string) string {
- var runes []rune
- for _, r := range s {
- // For debugging/uniqueness purposes, TypeString on a type parameter adds a
- // subscript corresponding to the type parameter's unique id. This is going
- // to be removed, but in the meantime we skip the subscript runes to get a
- // deterministic output.
- if '₀' <= r && r < '₀'+10 {
- continue // trim type parameter subscripts
- }
- runes = append(runes, r)
- }
- return string(runes)
-}
-
// TestSourceAndExportData uses objectpath to compute a correspondence
// of objects between two versions of the same package, one loaded from
// source, the other from export data.
diff --git a/go/types/typeutil/imports.go b/go/types/typeutil/imports.go
index 9c441dba9..b81ce0c33 100644
--- a/go/types/typeutil/imports.go
+++ b/go/types/typeutil/imports.go
@@ -12,7 +12,6 @@ import "go/types"
// package Q, Q appears earlier than P in the result.
// The algorithm follows import statements in the order they
// appear in the source code, so the result is a total order.
-//
func Dependencies(pkgs ...*types.Package) []*types.Package {
var result []*types.Package
seen := make(map[*types.Package]bool)
diff --git a/go/types/typeutil/map.go b/go/types/typeutil/map.go
index c9f8f25a0..7bd2fdb38 100644
--- a/go/types/typeutil/map.go
+++ b/go/types/typeutil/map.go
@@ -24,7 +24,6 @@ import (
// Just as with map[K]V, a nil *Map is a valid empty map.
//
// Not thread-safe.
-//
type Map struct {
hasher Hasher // shared by many Maps
table map[uint32][]entry // maps hash to bucket; entry.key==nil means unused
@@ -57,14 +56,12 @@ type entry struct {
//
// If SetHasher is not called, the Map will create a private hasher at
// the first call to Insert.
-//
func (m *Map) SetHasher(hasher Hasher) {
m.hasher = hasher
}
// Delete removes the entry with the given key, if any.
// It returns true if the entry was found.
-//
func (m *Map) Delete(key types.Type) bool {
if m != nil && m.table != nil {
hash := m.hasher.Hash(key)
@@ -84,7 +81,6 @@ func (m *Map) Delete(key types.Type) bool {
// At returns the map entry for the given key.
// The result is nil if the entry is not present.
-//
func (m *Map) At(key types.Type) interface{} {
if m != nil && m.table != nil {
for _, e := range m.table[m.hasher.Hash(key)] {
@@ -145,7 +141,6 @@ func (m *Map) Len() int {
// f will not be invoked for it, but if f inserts a map entry that
// Iterate has not yet reached, whether or not f will be invoked for
// it is unspecified.
-//
func (m *Map) Iterate(f func(key types.Type, value interface{})) {
if m != nil {
for _, bucket := range m.table {
@@ -190,14 +185,12 @@ func (m *Map) toString(values bool) string {
// String returns a string representation of the map's entries.
// Values are printed using fmt.Sprintf("%v", v).
// Order is unspecified.
-//
func (m *Map) String() string {
return m.toString(true)
}
// KeysString returns a string representation of the map's key set.
// Order is unspecified.
-//
func (m *Map) KeysString() string {
return m.toString(false)
}
@@ -339,7 +332,9 @@ func (h Hasher) hashFor(t types.Type) uint32 {
// Method order is not significant.
// Ignore m.Pkg().
m := t.Method(i)
- hash += 3*hashString(m.Name()) + 5*h.Hash(m.Type())
+ // Use shallow hash on method signature to
+ // avoid anonymous interface cycles.
+ hash += 3*hashString(m.Name()) + 5*h.shallowHash(m.Type())
}
// Hash type restrictions.
@@ -441,3 +436,76 @@ func (h Hasher) hashPtr(ptr interface{}) uint32 {
h.ptrMap[ptr] = hash
return hash
}
+
+// shallowHash computes a hash of t without looking at any of its
+// element Types, to avoid potential anonymous cycles in the types of
+// interface methods.
+//
+// When an unnamed non-empty interface type appears anywhere among the
+// arguments or results of an interface method, there is a potential
+// for endless recursion. Consider:
+//
+// type X interface { m() []*interface { X } }
+//
+// The problem is that the Methods of the interface in m's result type
+// include m itself; there is no mention of the named type X that
+// might help us break the cycle.
+// (See comment in go/types.identical, case *Interface, for more.)
+func (h Hasher) shallowHash(t types.Type) uint32 {
+ // t is the type of an interface method (Signature),
+ // its params or results (Tuples), or their immediate
+ // elements (mostly Slice, Pointer, Basic, Named),
+ // so there's no need to optimize anything else.
+ switch t := t.(type) {
+ case *types.Signature:
+ var hash uint32 = 604171
+ if t.Variadic() {
+ hash *= 971767
+ }
+ // The Signature/Tuple recursion is always finite
+ // and invariably shallow.
+ return hash + 1062599*h.shallowHash(t.Params()) + 1282529*h.shallowHash(t.Results())
+
+ case *types.Tuple:
+ n := t.Len()
+ hash := 9137 + 2*uint32(n)
+ for i := 0; i < n; i++ {
+ hash += 53471161 * h.shallowHash(t.At(i).Type())
+ }
+ return hash
+
+ case *types.Basic:
+ return 45212177 * uint32(t.Kind())
+
+ case *types.Array:
+ return 1524181 + 2*uint32(t.Len())
+
+ case *types.Slice:
+ return 2690201
+
+ case *types.Struct:
+ return 3326489
+
+ case *types.Pointer:
+ return 4393139
+
+ case *typeparams.Union:
+ return 562448657
+
+ case *types.Interface:
+ return 2124679 // no recursion here
+
+ case *types.Map:
+ return 9109
+
+ case *types.Chan:
+ return 9127
+
+ case *types.Named:
+ return h.hashPtr(t.Obj())
+
+ case *typeparams.TypeParam:
+ return h.hashPtr(t.Obj())
+ }
+ panic(fmt.Sprintf("shallowHash: %T: %v", t, t))
+}
diff --git a/go/types/typeutil/map_test.go b/go/types/typeutil/map_test.go
index 8cd643e5b..ee73ff9cf 100644
--- a/go/types/typeutil/map_test.go
+++ b/go/types/typeutil/map_test.go
@@ -244,6 +244,14 @@ func Bar[P Constraint[P]]() {}
func Baz[Q any]() {} // The underlying type of Constraint[P] is any.
// But Quux is not.
func Quux[Q interface{ quux() }]() {}
+
+
+type Issue56048_I interface{ m() interface { Issue56048_I } }
+var Issue56048 = Issue56048_I.m
+
+type Issue56048_Ib interface{ m() chan []*interface { Issue56048_Ib } }
+var Issue56048b = Issue56048_Ib.m
+
`
fset := token.NewFileSet()
@@ -296,12 +304,14 @@ func Quux[Q interface{ quux() }]() {}
ME1Type = scope.Lookup("ME1Type").Type()
ME2 = scope.Lookup("ME2").Type()
- Constraint = scope.Lookup("Constraint").Type()
- Foo = scope.Lookup("Foo").Type()
- Fn = scope.Lookup("Fn").Type()
- Bar = scope.Lookup("Foo").Type()
- Baz = scope.Lookup("Foo").Type()
- Quux = scope.Lookup("Quux").Type()
+ Constraint = scope.Lookup("Constraint").Type()
+ Foo = scope.Lookup("Foo").Type()
+ Fn = scope.Lookup("Fn").Type()
+ Bar = scope.Lookup("Foo").Type()
+ Baz = scope.Lookup("Foo").Type()
+ Quux = scope.Lookup("Quux").Type()
+ Issue56048 = scope.Lookup("Issue56048").Type()
+ Issue56048b = scope.Lookup("Issue56048b").Type()
)
tmap := new(typeutil.Map)
@@ -371,6 +381,9 @@ func Quux[Q interface{ quux() }]() {}
{Bar, "Bar", false},
{Baz, "Baz", false},
{Quux, "Quux", true},
+
+ {Issue56048, "Issue56048", true}, // (not actually about generics)
+ {Issue56048b, "Issue56048b", true}, // (not actually about generics)
}
for _, step := range steps {
diff --git a/go/types/typeutil/methodsetcache.go b/go/types/typeutil/methodsetcache.go
index 32084610f..a5d931083 100644
--- a/go/types/typeutil/methodsetcache.go
+++ b/go/types/typeutil/methodsetcache.go
@@ -25,7 +25,6 @@ type MethodSetCache struct {
// If cache is nil, this function is equivalent to types.NewMethodSet(T).
// Utility functions can thus expose an optional *MethodSetCache
// parameter to clients that care about performance.
-//
func (cache *MethodSetCache) MethodSet(T types.Type) *types.MethodSet {
if cache == nil {
return types.NewMethodSet(T)
diff --git a/go/types/typeutil/ui.go b/go/types/typeutil/ui.go
index 9849c24ce..fa55b0a1e 100644
--- a/go/types/typeutil/ui.go
+++ b/go/types/typeutil/ui.go
@@ -22,7 +22,6 @@ import "go/types"
// this function is intended only for user interfaces.
//
// The order of the result is as for types.MethodSet(T).
-//
func IntuitiveMethodSet(T types.Type, msets *MethodSetCache) []*types.Selection {
isPointerToConcrete := func(T types.Type) bool {
ptr, ok := T.(*types.Pointer)
diff --git a/go/vcs/vcs.go b/go/vcs/vcs.go
index f2aac1c0d..54d850535 100644
--- a/go/vcs/vcs.go
+++ b/go/vcs/vcs.go
@@ -11,7 +11,6 @@
// for developers who want to write tools with similar semantics.
// It needs to be manually kept in sync with upstream when changes are
// made to cmd/go/internal/get; see https://golang.org/issue/11490.
-//
package vcs // import "golang.org/x/tools/go/vcs"
import (